submit_nova_art.py
Go to the documentation of this file.
1 #!/bin/env python
2 
3 import os, sys, stat, pwd, re
4 import argparse
5 import datetime
6 import samweb_client
7 import string
8 import tokenize
9 import cStringIO
10 import subprocess
11 import NovaGridUtils
12 from NovaGridUtils import *
13 
14 user=os.getenv("USER")
15 os.environ["GRID_USER"]=user
16 
17 sam_user=user
18 sam_station=os.getenv("SAM_STATION")
19 os.system("echo SAM_STATION DEFINED AS $SAM_STATION")
20 
21 recommended_sites=["BNL",
22  "Caltech",
23  "Clemson",
24  "Cornell",
25  "FZU",
26 # "Harvard",
27  "Hyak_CE",
28  "Michigan",
29 # "MIT",
30  "MWT2",
31  "Nebraska",
32  "NotreDame",
33  "Omaha",
34  "OSC",
35  "SMU_HPC",
36  "SU-OG",
37  "SU-ITS",
38  "UChicago",
39  "UCSD",
40 # "TTU",
41  "Wisconsin"]
42 # Not working now, may return: "Harvard", "MIT", "MWT2", "UChicago", "TTU"
43 # FIFE maintains a list, too. Find it here:
44 # https://cdcvs.fnal.gov/redmine/projects/fife/wiki/Information_about_job_submission_to_OSG_sites
45 
46 cvmfs_distro_base = "/cvmfs/nova.opensciencegrid.org"
47 novasoft_cvmfs = "%s/novasoft/slf6/novasoft" % cvmfs_distro_base
48 build_location_arguments = "" # Becomes more specific with --cvmfs
49 setup_location = ""
50 
51 jobsub_opts = []
52 
53 run_nova_sam_cmd="runNovaSAM.py"
54 run_nova_sam_opts= []
55 export_to_run_nova_sam = []
56 
57 art_sam_wrap_cmd="$NOVAGRIDUTILS_DIR/bin/art_sam_wrap.sh"
58 art_sam_wrap_opts= []
59 export_to_art_sam_wrap=[]
60 export_to_art_sam_wrap.append("SAM_PROJECT_NAME")
61 export_to_art_sam_wrap.append("SAM_STATION")
62 export_to_art_sam_wrap.append("IFDH_BASE_URI")
63 if "IFDH_DEBUG" in os.environ:
64  export_to_art_sam_wrap.append("IFDH_DEBUG")
65 if "G4NEUTRONHP_USE_ONLY_PHOTONEVAPORATION" in os.environ:
66  export_to_art_sam_wrap.append("G4NEUTRONHP_USE_ONLY_PHOTONEVAPORATION")
67 
68 export_to_art_sam_wrap.append("EXPERIMENT")
69 export_to_art_sam_wrap.append("GRID_USER")
70 
71 os.environ["CVMFS_DISTRO_BASE"]=cvmfs_distro_base
72 export_to_art_sam_wrap.append("CVMFS_DISTRO_BASE")
73 
74 #there must be a better name for this variable
75 usage_models=["DEDICATED"]
76 
77 input_files=[]
78 
79 veryearly_scripts=[]
80 early_scripts=[]
81 source_scripts=[]
82 pre_scripts=[]
83 inter_scripts=[]
84 post_scripts=[]
85 
86 def check_env(vname):
87  value=os.getenv(vname)
88  if None == value or "" == value:
89  fail("Environment variable %s not defined" %vname)
90 
91 def check_fcl(tag,fcl):
92  if "/" in fcl[:1] and os.path.isfile(fcl):
93  return fcl # hey I'm ok!
94  # Loop over dirs in FHICL_FILE_PATH
95  fclPaths = os.environ["FHICL_FILE_PATH"].split(":")
96  for path in fclPaths:
97  fullPath = os.path.join(path, fcl)
98  if os.path.isfile(fullPath):
99  return fcl # hey you're ok!
100  # Check if they are copying the fhicl file to the worker node.
101  for input_file in input_files:
102  if os.path.basename(input_file) == fcl:
103  return fcl # Passing fhicl as argument, all good.
104  elif os.path.basename(input_file) == os.path.basename(fcl):
105  print ""
106  print "The fhicl will be copied to $PWD on the worker node."
107  print "You specified some sort of file path which isn't needed. Fixing it for you :)"
108  print fcl + " --> " + os.path.basename(fcl)
109  print ""
110  return os.path.basename(fcl) # User incorrectly left file path there.
111 
112  fail("fcl file %s does not exist" %fcl)
113 
114 
116  """
117  This reads tokens using tokenize.generate_tokens and recombines them
118  using tokenize.untokenize, and skipping comment/docstring tokens in between
119  """
120  f = cStringIO.StringIO(src)
121  class SkipException(Exception): pass
122  processed_tokens = []
123  last_token = None
124  # go thru all the tokens and try to skip comments and docstrings
125  for tok in tokenize.generate_tokens(f.readline):
126  t_type, t_string, t_srow_scol, t_erow_ecol, t_line = tok
127 
128  try:
129  if t_type == tokenize.COMMENT:
130  raise SkipException()
131 
132  elif t_type == tokenize.STRING:
133 
134  if last_token is None or last_token[0] in [tokenize.INDENT]:
135  pass
136 
137  except SkipException:
138  pass
139  else:
140  processed_tokens.append(tok)
141 
142  last_token = tok
143 
144  return tokenize.untokenize(processed_tokens)
145 
146 def find_file(paths, filename):
147  if os.path.isfile(filename):
148  return filename
149  for path in paths:
150  for root, dirs, files in os.walk(os.path.expandvars(path)):
151  if filename in files:
152  return os.path.join(root, filename)
153  fail("Cannot find file "+filename)
154 
155 def find_file_in_list(filepath, pathlist):
156  for testpath in pathlist:
157  if os.path.basename(filepath) == os.path.basename(testpath):
158  return True
159  return False
160 
161 
163  # Start with jobsub_submit and its options
164  jobsub_cmd = "jobsub_submit \\\n"
165  for opt in jobsub_opts:
166  jobsub_cmd += " " + opt + " \\\n"
167  for export in export_to_art_sam_wrap:
168  jobsub_cmd += " -e " + export
169  jobsub_cmd += " \\\n"
170 
171  # Add art_sam_wrap wrapper script and its options
172  if args.testrel:
173  if args.reuse_tarball:
174  jobsub_cmd += " --tar_file_name dropbox://" + os.path.basename(args.testrel) +".tar \\\n"
175  else:
176  jobsub_cmd += " --tar_file_name tardir://" + args.testrel +" \\\n"
177  elif args.user_tarball:
178  if not os.path.isfile(args.user_tarball):
179  print "Tarball filename passed to --user_tarball does not exit:", args.user_tarball
180  sys.exit(5)
181  jobsub_cmd += " --tar_file_name dropbox://" + args.user_tarball + " \\\n"
182 
183  jobsub_cmd += " file://" + art_sam_wrap_cmd + " \\\n"
184  for opt in art_sam_wrap_opts:
185  jobsub_cmd += " " + opt + " \\\n"
186  for export in export_to_run_nova_sam :
187  jobsub_cmd += " --export " + export + " \\\n"
188 
189 
190 
191  # Now set the main program and its options
192  if not mcgen:
193  jobsub_cmd += " -X " + run_nova_sam_cmd + " \\\n"
194  for opt in run_nova_sam_opts:
195  jobsub_cmd += " " + opt + " \\\n"
196  else:
197  jobsub_cmd += " -X nova \\\n"
198  mcgen_opts = ["--sam-file-type=importedSimulated", "--sam-application-family=nova","--sam-data-tier=" + mcouttier,"--sam-application-version=" + tag]
199  if nevts>0 :
200  mcgen_opts += ["-n %d" % nevts]
201  for opt in mcgen_opts:
202  jobsub_cmd += " " + opt + " \\\n"
203 
204  jobsub_cmd = jobsub_cmd[:-2]
205  return jobsub_cmd
206 
207 
208 #######################################################################################
209 
210 if __name__=='__main__':
211 
212  prog=os.path.basename(sys.argv[0])
213  NovaGridUtils.prog=prog
214 
215 
216  while "-f" in sys.argv or "--file" in sys.argv:
217  ### Allow args to be passed in as a plain text file.
218  ### We make a preliminary parser get these arguments out for two reasons:
219  ### 1) Maintain standard -h, --help functionality
220  ### 2) Avoid necessity required arguments in initial parsing,
221  ### allow them to be missing, but find them in the file.
222  preliminary_parser = argparse.ArgumentParser(prog=prog, description='Submit nova art job')
223 
224  preliminary_parser.add_argument('-f', '--file',
225  help="""Text file containing any arguments to this utility. Multiple allowed.
226  Arguments should look just like they would on the command line,
227  but the parsing of this file is whitespace insenstive.
228  Commented lines will be identified with the # character and removed. """, type=str, action='append')
229  pre_args, unknown = preliminary_parser.parse_known_args()
230 
231  # Remove pre_args from sys.argv so they are not processed again
232  sys.argv = filter(lambda x: x not in [ "-f", "--file"], sys.argv)
233 
234  if pre_args.file:
235  for filepath in pre_args.file:
236  index = sys.argv.index(filepath)
237  sys.argv.remove(filepath)
238  if os.path.isfile(filepath):
239  fullpath = filepath
240  else:
241  fullpath = find_file(["$NOVAGRIDUTILS_DIR/configs/"],filepath)
242  text = open(fullpath, 'r').read()
243  text = remove_comments(text) # Strip out commented lines
244  newargs = []
245  for line in text.splitlines():
246  # Insert arguments into list in order
247  # where the -f appeared
248  newargs += line.split()
249  sys.argv[index:index] = newargs
250 
251  parser = argparse.ArgumentParser(prog=prog, description='Submit nova art job', add_help=False)
252 
253  ###required options
254  required_args = parser.add_argument_group("Required arguments", "These arguments must be supplied.")
255 
256  required_args.add_argument('--jobname',
257  required=True,
258  help='Job name',
259  type=str)
260 
261  required_args.add_argument('--defname',
262  required=True,
263  help='SAM dataset definition to run over',
264  type=str)
265 
266  required_args.add_argument('--config', '-c',
267  required=True,
268  help='FHiCL file to use as configuration for nova executable. The path given should be relative to the $SRT_PRIVATE_CONTEXT of any test release you submit using',
269  type=str)
270 
271  required_args.add_argument("--tag",
272  required=True,
273  help="Tag of novasoft to use",
274  type=str)
275 
276  required_args.add_argument("--dest",
277  required=True,
278  help="Destination for output files",
279  type=str)
280 
281 
282  ###debugging
283  debugging_args = parser.add_argument_group("Debugging options", "These optional arguments can help debug your submission.")
284 
285  debugging_args.add_argument('--print_jobsub',
286  help='Print jobsub command',
287  action='store_true',default=False)
288 
289  debugging_args.add_argument('--printenv',
290  help='Print environment variables',
291  action='store_true',default=False)
292 
293  debugging_args.add_argument('--test',
294  help='Do not actually do anything, just run tests and print jobsub cmd',
295  action='store_true',default=False)
296 
297  debugging_args.add_argument('--gdb',
298  help='Run nova executable under gdb, print full stack trace, then quit gdb.',
299  action='store_true',default=False)
300 
301  debugging_args.add_argument('--test_submission',
302  help='Override other arguments given to submit a test to the grid. It will run 1 job with 3 events and write the output to /pnfs/nova/scratch/users/<user>/test_jobs/<date>_<time>',
303  action='store_true',default=False)
304 
305  debugging_args.add_argument('--jobsub_server',
306  help='Submit using the specified jobsub server',
307  default="")
308 
309  debugging_args.add_argument('--test_queue',
310  help='Submit jobs to the test jobsub queue for higher starting priority. NB NOvA is limited to 10 jobs at a time on this queue.',
311  action='store_true', default=False)
312 
313  debugging_args.add_argument("--kill_after",
314  metavar="SEC",
315  help="If job is still running after this many seconds, kill in such a way that a log will be returned",
316  type=int)
317 
318  ###job control
319  job_control_args = parser.add_argument_group("Job control options", "These optional arguments help control where and how your jobs land.")
320  ###number of jobs
321  job_control_args.add_argument('--njobs',
322  help='Number of jobs to submit',
323  type=int, default = 0)
324 
325  job_control_args.add_argument('--maxConcurrent',
326  help='Run a maximum of N jobs simultaneously',
327  metavar='N',
328  type=int, default=0)
329 
330  job_control_args.add_argument('--files_per_job',
331  help='Number of files per job - if zero, calculate from number of jobs', metavar='N',
332  type=int, default = 0)
333 
334  job_control_args.add_argument('--nevts',
335  help='Number of events per file to process',
336  type=int, default = 0)
337 
338 
339  job_control_args.add_argument('--no_multifile',
340  help='Do not use art_sam_wrap.sh multifile mode, which is on by default',
341  action='store_true')
342 
343  job_control_args.add_argument('--txtfiledef',
344  help='Use if the input definition is made up of text files, each containing a list of file names',
345  action='store_true',default=False)
346 
347  ###general job control
348  job_control_args.add_argument('--opportunistic',
349  help='Run opportunistically on the fermigrid',
350  action='store_true',default=False)
351 
352  job_control_args.add_argument('--offsite',
353  help='Allow to run on offsite resources as well. Implies --opportunistic.',
354  action='store_true',default=False)
355 
356  job_control_args.add_argument('--offsite_only',
357  help='Allow to run solely on offsite resources.',
358  action='store_true',default=False)
359 
360  job_control_args.add_argument('--amazon',
361  help='Run at amazon.',
362  action='store_true',default=False)
363 
364  job_control_args.add_argument('--site',
365  help='Specify allowed offsite locations. Omit to allow running at any offsite location',
366  type=str,action='append')
367 
368  job_control_args.add_argument('--exclude_site',
369  help='Specify an offsite location to exclude.',
370  metavar='SITE',
371  type=str,action='append')
372 
373  job_control_args.add_argument('--recommended_sites',
374  help='Specify known working offsite locations.',
375  action='store_true',default=False)
376 
377  job_control_args.add_argument('--autoDropbox',
378  help='Use automatic dropbox location based on site',
379  action='store_true',default=False)
380 
381  job_control_args.add_argument('--os',
382  help='Specify OS version of worker node',
383  type=str,action='append')
384 
385  job_control_args.add_argument('--disk',
386  help='Local disk space requirement for worker node in MB.',
387  type=int, default=10000)
388 
389  job_control_args.add_argument('--memory',
390  help='Local memory requirement for worker node in MB.',
391  type=int, default=1900)
392 
393  job_control_args.add_argument('--expected_lifetime',
394  help='Expected job lifetime (default is 10800s=3h). Valid values are an integer number of seconds or one of \"short\" (6h), \"medium\" (12h) or \"long\" (24h, jobsub default)', metavar='LIFETIME',
395  type=str, default="10800")
396 
397  job_control_args.add_argument('--dynamic_lifetime',
398  help="Dynamically determine whether a new file should be started based on glidein lifetime. Specify the maximum length expected for a single file to take to process in seconds.", metavar="LIFETIME",
399  type=str)
400 
401  job_control_args.add_argument('--cpu',
402  help="Request worker nodes that have at least NUMBER cpus",
403  type=int, default=1)
404 
405 
406  job_control_args.add_argument('--group', '-G',
407  help="Specify batch group GROUP -- mainly used to set job priority. At present, only supportable value is nova",
408  type=str, default="nova")
409 
410  job_control_args.add_argument('--subgroup',
411  help='Production subgroup',
412  type=str)
413 
414  job_control_args.add_argument("--role",
415  help="Specify role to run on the grid. Can be Analysis (default) or Production. This option is no longer supported",
416  type=str,default="Analysis")
417 
418  job_control_args.add_argument('--continue_project',
419  help="Don't start a new samweb project, instead continue this one.", metavar='PROJECT',
420  type=str,default="")
421 
422  job_control_args.add_argument("--snapshot_id",
423  help="Use this existing snapshot instead of creating a new one.", metavar="ID",
424  type=int,default=0)
425 
426  job_control_args.add_argument("--poms",
427  help="Start/continue a poms campaign and task for this submission",
428  default=False, action="store_true")
429 
430  job_control_args.add_argument("--mix",
431  help="Pass a mixing script to the job to pull in a files for job mixing.",
432  default="", type=str)
433 
434  job_control_args.add_argument("--mail_always",
435  help="Do you want an email whenever every jobs finishes?",
436  default=False, action="store_true")
437 
438  job_control_args.add_argument("--mail_on_error",
439  help="Do you want an email whenever a job fails on an error?",
440  default=False, action="store_true")
441 
442 # job_control_args.add_argument("--poms_definition",
443 # help="POMS definition to use",
444 # default="GenercicSubmitNOvAArt", type=str)
445 
446  job_control_args.add_argument('--user_priority',
447  help='Priority (integer) within a user\'s jobs (default = 0)',
448  type=int, default=0)
449 
450  job_control_args.add_argument('--singularity',
451  help='Location in CVMFS of a singularity container to launch the job into',
452  type=str,default='')
453 
454  job_control_args.add_argument('--jobfactory',
455  help='Use the specified JobFactoryType.',
456  default="")
457 
458  job_control_args.add_argument("--gpu",
459  help="Request a node with a GPU",
460  default=False, action="store_true")
461 
462  add_node_features_arg(job_control_args)
463 
464  ###software control
465  novasoft_args = parser.add_argument_group("NOvA software options", "These options control the novasoft setup.")
466  novasoft_args.add_argument('--maxopt',
467  help='Run in maxopt mode',
468  action='store_true',default=True)
469 
470  testrel_gp = novasoft_args.add_mutually_exclusive_group(required=False)
471  testrel_gp.add_argument("--testrel",
472  help="Use a test release at location TESTREL. It will be tarred up, and sent to the worker node.",
473  type=str)
474  testrel_gp.add_argument("--user_tarball",
475  help="Use existing test release tarball in specified location rather than having jobsub make one for you (conflicts with --testrel)",
476  type=str)
477 
478  novasoft_args.add_argument('--reuse_tarball',
479  help='Do you want to reuse a tarball that is already in resilient space? If using this option avoid trailing slash in --testrel option. (conflicts with --user_tarball)',
480  action='store_true',default=False)
481 
482  novasoft_args.add_argument('--cvmfs',
483  help='Does nothing (always true), but retained for compatibility: pull software from CVMFS.',
484  action='store_true')
485 
486 
487  novasoft_args.add_argument('--disable_cvmfs_version_matching',
488  help="Don't perform a CVMFS-is-up-to-date check on target nodes via Condor requirements. (For advanced debugging use.) ",
489  action="store_true",
490  default=False)
491 
492  novasoft_args.add_argument('--novasoftups',
493  help='Use the ups build of novasoft, must be used with source to setup.',
494  action='store_true')
495 
496  novasoft_args.add_argument('--ngu_test',
497  help='Setup the test version of NovaGridUtils in the grid jobs.',
498  action='store_true')
499 
500  novasoft_args.add_argument('--ngu_version',
501  help='Setup a specific NovaGridUtils version in the grid jobs.', metavar='VERSION',
502  type=str)
503 
504  novasoft_args.add_argument('--testrel_ngu',
505  help="Must be used with --testrel, with NGU checked out. After unpacking tarball will setup the local version of NGU you are using on the work.",
506  action='store_true')
507 
508  novasoft_args.add_argument('--lemBalance',
509  help='Choose lem server based on (CLUSTER+PROCESS)%%2 to balance load',
510  action='store_true', default=False)
511 
512  novasoft_args.add_argument('--lemServer',
513  help='Specify lem server',
514  type=str)
515 
516  ###output control
517  output_args = parser.add_argument_group("Output file options", "Note that you must specify either --copyOut or --copyOutScript.")
518 
519 
520  output_args.add_argument('--copyOutScript',
521  help='Use script COPYOUTSCRIPT to copy back your output',
522  type=str)
523 
524  output_args.add_argument('--copyOut',
525  help='Use the built in copy out mechanism. If used, you must specify --outTier, --cafTier, --flatTier, --h5Tier or --histTier',
526  action='store_true')
527 
528  output_args.add_argument('--logs',
529  help='Return .log files corresponding to every output',
530  action='store_true')
531  output_args.add_argument('--zipLogs',
532  help='Format logs as .bz2 files. Implies --logs',
533  action='store_true')
534 
535  output_args.add_argument('--outTier',
536  help='Data tier of the output file, multiple allowed, formatted as <name_in_fcl_outputs>:<data_tier>',
537  type=str, action='append')
538 
539  output_args.add_argument('--cafTier',
540  help='Module label for CAF output, multiple allowed. Format as <cafmaker_module_label>:<data_tier>',
541  type=str, action='append')
542 
543  output_args.add_argument('--flatTier',
544  help='Module label for FlatCAF output, multiple allowed. Format as <flatmaker_module_label>:<data_tier>',
545  type=str, action='append')
546 
547  output_args.add_argument('--histTier',
548  help='File identifier string for TFileService output, only one allowed. Supply as --histTier <id> for output_name.<id>.root, where output_name is assembled based on the input file.',
549  type=str)
550 
551  output_args.add_argument('--h5Tier',
552  help='File identifier for h5 output, multiple allowed. Format as <hdf5maker_module>:<data_tier>',
553  type=str, action='append')
554 
555  output_args.add_argument('--outputNumuDeCAF',
556  help='Make standard numu decafs for all CAF files produced during the job',
557  action='store_true',default=False)
558 
559  output_args.add_argument('--outputNueDeCAF',
560  help='Make standard nue decafs for all CAF files produced during the job',
561  action='store_true',default=False)
562 
563  output_args.add_argument('--outputNumuOrNueDeCAF',
564  help='Make standard nue or numu decafs for all CAF files produced during the job',
565  action='store_true',default=False)
566 
567  output_args.add_argument('--outputNusDeCAF',
568  help='Make standard nus decafs for all CAF files produced during the job',
569  action='store_true',default=False)
570 
571  output_args.add_argument('--outputValidationDeCAF',
572  help='Make validation (nue_or_numu_or_nus) decafs for all CAF files produced during the job',
573  action='store_true',default=False)
574 
575 
576  output_args.add_argument('--cosmicsPolarity',
577  help='To specify a horn polarity for the cosmics output file name',
578  type=str)
579 
580  output_args.add_argument('--npass',
581  help='To specify npass (aka nova.subversion)',
582  type=str)
583 
584  output_args.add_argument('--skim',
585  help='To specify nova.skim (does not work with mc gen)',
586  type=str)
587 
588  output_args.add_argument('--systematic',
589  help='To specify nova.systematic (does not work with mc gen)', metavar='SYST',
590  type=str)
591 
592  output_args.add_argument('--specialName',
593  help='To specify nova.special name (does not work with mc gen)',
594  type=str)
595 
596  output_args.add_argument('--genietune',
597  help='To specify nova.genietune (does not work with mc gen)', metavar='TUNE',
598  type=str)
599 
600  output_args.add_argument('--NPPFX',
601  help='To specify number of PPFX universes',
602  type=str)
603 
604  output_args.add_argument('--hashDirs',
605  help='Use hash directory structure in destination directory.',
606  action='store_true')
607 
608  output_args.add_argument('--runDirs',
609  help='Use run directory structure in destination directory, 000XYZ/XYZUW for run number XYZUW.',
610  action='store_true')
611 
612  output_args.add_argument('--noCleanup',
613  help='Pass --noCleanup argument to runNovaSAM.py. Necessary when using a postscript for copyout.',
614  action='store_true')
615 
616  output_args.add_argument('--jsonMetadata', help='Create JSON files with metadata corresponding to each output file, and copy them to the same destinations', action='store_true')
617 
618  output_args.add_argument('--declareFiles',
619  help='Declare files with metadata on worker node',
620  action='store_true')
621 
622  output_args.add_argument('--production',
623  help='Submit production style jobs. Implies \"--role=Production --hashDirs --jsonMetadata --zipLogs\", and checks that other settings needed for production are specified',
624  action='store_true')
625 
626  output_args.add_argument('--calibration',
627  help='Submit calibration style jobs. Implies \"--role=Production\", and checks that other settings needed for calibration are specified',
628  action='store_true')
629 
630  output_args.add_argument('--declareLocations',
631  help='Declare the file output locations to SAM during the copy back of the files',
632  action='store_true')
633 
634  ###environment configuration
635  environment_args = parser.add_argument_group("Environment options", "These optional arguments allow control of the grid running environment.")
636 
637  environment_args.add_argument('--export',
638  help='Export variable EXPORT to art_sam_wrap.sh',
639  type=str, action='append')
640 
641  environment_args.add_argument('--veryearlyscript',
642  help='Source script EARLYSCRIPT before any environment setup or sourcing',
643  type=str, action='append')
644 
645  environment_args.add_argument('--source',
646  help='Source script SOURCE',
647  type=str, action='append')
648 
649  environment_args.add_argument('--earlyscript',
650  help='Execute script EARLYSCRIPT before any environment setup',
651  type=str, action='append')
652 
653  environment_args.add_argument('--prescript',
654  help='Execute script PRESCRIPT before executing runNovaSAM.py',
655  type=str, action='append')
656 
657  environment_args.add_argument('--precopyscript',
658  help='Execute script PRECOPYSCRIPT within runNovaSAM.py, after running the nova -c command.',
659  type=str, action='append')
660 
661  environment_args.add_argument('--postscript',
662  help='Execute script POSTSCRIPT after executing runNovaSAM.py',
663  type=str, action='append')
664 
665  environment_args.add_argument('--inputfile',
666  help='Copy this extra input file into job area before running executable',
667  type=str, action='append')
668 
669  ###support options
670  support_args = parser.add_argument_group("Support options", "These optional arguments using this submission utility easier.")
671 
672  support_args.add_argument("-h", "--help", action="help", help="Show this help message and exit")
673 
674  support_args.add_argument('-f', '--file',
675  help="""Text file containing any arguments to this utility. Multiple allowed.
676  Arguments should look just like they would on the command line,
677  but the parsing of this file is whitespace insenstive.
678  Comments will be identified with the # character and removed. """, type=str, action='append')
679 
680 
681 
682 
683  ############################################
684  # Process and check command line arguments #
685  ############################################
686 
687  args = parser.parse_args()
688  timestamp=datetime.datetime.now().strftime("%Y%m%d_%H%M")
689 
690  # Load POMS if we need to:
691  if args.poms:
692  try:
693  import poms_client
694  except ImportError:
695  print "POMS not setup. Run this and try again:"
696  print
697  print " setup poms_client"
698  print
699  sys.exit(1)
700 
701  # Check for test submission. Has to be first to override other arguments
702  if args.test_submission:
703  test_njobs = 1
704  test_nevts = 3
705  test_dest = "/pnfs/nova/scratch/users/%s/test_jobs/%s" % (os.environ["USER"], timestamp)
706  if not os.path.exists(test_dest):
707  os.makedirs(test_dest)
708  mode = os.stat(test_dest).st_mode | stat.S_IXGRP | stat.S_IWGRP
709  os.chmod(test_dest, mode)
710  test_expected_lifetime = "0"
711  test_dynamic_lifetime = "500"
712  test_files_per_job = 1
713 
714  print "Running a test submission. Overwriting:"
715 
716  print " njobs", args.njobs, "-->", test_njobs
717  args.njobs = test_njobs
718  print " nevts", args.nevts, "-->", test_nevts
719  args.nevts = test_nevts
720  print " dest", args.dest, "-->", test_dest
721  args.dest = test_dest
722  print " expected_lifetime", args.expected_lifetime, "-->", test_expected_lifetime
723  args.expected_lifetime = test_expected_lifetime
724  print " dynamic_lifetime", args.dynamic_lifetime, "-->", test_dynamic_lifetime
725  args.dynamic_lifetime = test_dynamic_lifetime
726  print " files_per_job", args.files_per_job, "-->", test_files_per_job
727  args.files_per_job = test_files_per_job
728  if args.declareFiles:
729  print " don't declareFiles"
730  args.declareFiles = False
731  if args.declareLocations:
732  print " don't declareLocations"
733  args.declareLocations = False
734  if args.autoDropbox:
735  print " don't use autoDropbox"
736  args.autoDropbox = False
737  if args.poms:
738  print " don't use poms"
739  args.poms = False
740 
741  #print " use the test jobsub queue, so OnSite only."
742  #args.test_queue = True
743  args.offsite = False
744 
745  jobname=args.jobname
746  defname=args.defname
747  snapshot_id=args.snapshot_id
748  print_jobsub=args.print_jobsub
749 
750  if args.printenv :
751  print "Will print environment vars "
752  printenv=True
753 
754  test=args.test
755  if test :
756  print_jobsub=True
757  print ""
758  warn("--test was specified, so all we do is run checks and print jobsub cmd.")
759 
760  check_env("SETUP_IFDH_ART")
761  check_env("SETUP_SAM_WEB_CLIENT")
762  check_env("SETUP_JOBSUB_CLIENT")
763  check_env("SAM_STATION")
764  tag=check_tag(args.tag)
765 
766  srt_qual="debug"
767  maxopt=args.maxopt
768  maxopt_opt=""
769  if maxopt:
770  maxopt_opt += "-b:maxopt"
771  srt_qual="maxopt"
772 
773  if args.reuse_tarball and not args.testrel:
774  fail("--reuse_tarball specified without --testrel??")
775 
776  if args.testrel:
777  check_dir(args.testrel)
778  if not os.path.isdir(args.testrel+'/lib/'+os.getenv('SRT_ARCH')+'-GCC-'+srt_qual):
779  fail(args.testrel+' has never been built '+srt_qual)
780 
781  if args.inputfile:
782  input_files += args.inputfile
783 
784  for input_file in input_files:
785  if not os.path.isfile(os.path.expandvars(input_file)):
786  fail("Input file %s does not exist!" % input_file)
787  if os.path.expandvars(input_file).startswith("/nova/"):
788  fail("Input file %s cannot be on /nova/app or /nova/ana/ or /nova/data/ it must be in dCache /pnfs/nova/" % input_file)
789  elif os.path.expandvars(input_file).startswith("/grid/"):
790  fail("Input file %s cannot be on /grid/ it must be in dCache /pnfs/nova/" % input_file)
791 
792  if args.singularity and not os.path.exists(args.singularity):
793  fail("Requested singularity image cannot be found: %s" % args.singularity)
794 
795  if args.gpu and not args.singularity:
796  warn("Requested GPU, but did not request singularity. This is not likely to succeed.")
797 
798  if args.gpu and not args.offsite_only:
799  warn("GPUs are only available offsite, and you have not chosen --offsite_only")
800 
801 
802 
803  fcl=args.config
804 
805  mcgen = (fcl == "mcgen")
806 
807  if not mcgen:
808  fcl = check_fcl(tag,fcl)
809 
810  dest=args.dest
811  if not dest.startswith("s3://") :
812  check_dir(dest)
813 
814  if os.path.expandvars(dest).startswith("/nova/"):
815  fail("Destination directory %s cannot be on /nova/app or /nova/ana/ or /nova/data/ it must be in dCache /pnfs/nova/" % dest)
816  elif os.path.expandvars(dest).startswith("/grid/"):
817  fail("Destination directory %s cannot be on /grid/ it must be in dCache /pnfs/nova/" % dest)
818 
819  export_to_run_nova_sam.append("DEST=%s"%dest)
820 
821  is_production_arg = args.production
822 
823  if "Production" in args.role and not is_production_arg:
824  fail("You specified --role=Production but not --production. This is no longer supported")
825 
826  if args.production:
827  setup_production(args)
828  elif args.calibration:
829  setup_calibration(args)
830  else:
831  setup_analysis(args)
832 
833  # Check for test submission. Has to be first to override other arguments
834  if args.test_submission:
835  print "Running a test submission, turning off hashDirs"
836  args.hashDirs = False
837 
838  if args.hashDirs and args.runDirs:
839  fail("Cannot specify both --hashDirs and --runDirs (note that hashDirs is implied by --production)")
840 
841  role=args.role
842 
843  njobs=args.njobs
844  files_per_job = args.files_per_job
845 
846  print "Definition name: %s" % defname
847  if snapshot_id:
848  print " with snapshot_id: %d" % snapshot_id
849 
850 
851 
852 
853  ################
854  # Setup jobsub #
855  ################
856 
857  if args.jobsub_server:
858  jobsub_opts += ["--jobsub-server=%s"%args.jobsub_server]
859 
860  if files_per_job > 0 and njobs > 0 :
861  ## both njobs and files per job are specified. Just pass
862  ## the settings through to jobsub and art_sam_wrap
863  jobsub_opts += ["-N %d" %njobs]
864  art_sam_wrap_opts += ["--limit %d" % files_per_job]
865 
866  elif files_per_job > 0:
867  ##files/job specified, but not njobs. Calculate njobs
868  ## on the fly
869 
870  ##get files in dataset
871  samweb = samweb_client.SAMWebClient(experiment='nova')
872  if not snapshot_id:
873  num_project_files=samweb.countFiles(defname=defname)
874  else:
875  num_project_files=samweb.countFiles(dimensions="snapshot_id {0:d}".format(snapshot_id))
876  print "Definition file count %d" % num_project_files
877 
878  njobs=(num_project_files / files_per_job) +1
879  jobsub_opts += ["-N %d" %njobs]
880  art_sam_wrap_opts += ["--limit %d" % files_per_job]
881 
882  elif njobs > 0 :
883  ##njobs specified, but not files/job. Just set njobs
884  ## and don't force limits on files per jobs
885  jobsub_opts += ["-N %d" %njobs]
886 
887  else :
888  warn("Neither --njobs nor --files_per_job specified. Did you really want to do this? Sleeping for 5 seconds")
889  sleep(5)
890 
891  # allow a little bit of grace -- 5500 rather than 5000
892  if njobs > 5500:
893  print >> sys.stderr, """
894  Error: cannot submit more than 5000 jobs in one cluster.
895  Please break your submission into multiple batches of 5000 (or less) jobs,
896  and after submitting the first batch, use --continue_project with the project
897  that results from the first submission for the remaining batches.
898 
899  Please separate submissions by 5 minutes.
900  """
901  sys.exit(1)
902 
903  if args.maxConcurrent:
904  jobsub_opts += ["--maxConcurrent=%d" %args.maxConcurrent]
905 
906  if args.opportunistic or args.offsite:
907  usage_models.append("OPPORTUNISTIC")
908  if args.offsite :
909  usage_models.append("OFFSITE")
910  if args.offsite_only:
911  if args.offsite:
912  fail("Both --offsite and --offsite_only specified, these arguments conflict")
913 
914  if args.opportunistic:
915  fail("Both --opportunistic and --offsite_only specified, these arguments conflict")
916 
917  usage_models = ["OFFSITE"]
918 
919  if args.amazon :
920  usage_models=["AWS_HEPCLOUD"]
921  awsfilepath=os.path.expandvars("/cvmfs/nova.opensciencegrid.org/externals/NovaGridUtils/$NOVAGRIDUTILS_VERSION/NULL/utils/aws_setup.sh")
922  source_scripts.append(awsfilepath)
923 
924  if args.autoDropbox:
925  run_nova_sam_opts += ["--autoDropbox"]
926 
927  # Check OS in off-site submissions
928  if args.offsite or args.offsite_only:
929  if args.singularity:
930  if args.os:
931  fail("Don't specify OS when submitting with --singularity")
932  else:
933  if not args.os:
934  fail("Running offsite, but OS version not specified!")
935 
936  resource_opt="--resource-provides=usage_model=" + string.join(usage_models,",")
937  jobsub_opts += [resource_opt]
938 
939  if args.recommended_sites or args.site:
940  site_opt="--site="
941 
942  if args.recommended_sites:
943  for isite in recommended_sites:
944  site_opt += isite + ","
945  if args.site:
946  for isite in args.site:
947  if isite not in recommended_sites:
948  warn("Site "+isite+" is not known to work. Your jobs may fail at that site. Sleeping for 5 seconds")
949  sleep(5)
950  site_opt += isite + ","
951 
952  site_opt=site_opt[:-1]
953  jobsub_opts += [ site_opt ]
954 
955  if args.exclude_site:
956  for isite in args.exclude_site:
957  jobsub_opts += [ "--append_condor_requirements='(TARGET.GLIDEIN_Site\ isnt\ \\\"%s\\\")'" % isite ]
958 
959  if args.os :
960  allowed_os=["SL6"]
961  for ios in args.os:
962  if ios not in allowed_os:
963  fail("Invalid OS %s" %ios)
964 
965  os_opt="--OS=" + string.join(args.os,",")
966  jobsub_opts += [ os_opt ]
967 
968  if args.disk:
969  disk_opt="--disk=%sMB" % (args.disk)
970  jobsub_opts += [ disk_opt ]
971 
972  if args.memory:
973  mem_opt="--memory=%sMB" % (args.memory)
974  jobsub_opts += [ mem_opt ]
975 
976  if args.cpu:
977  cpu_opt="--cpu=%d" % (args.cpu)
978  jobsub_opts += [ cpu_opt ]
979 
980  if args.mail_always:
981  jobsub_opts += ["--mail_always"]
982  elif args.mail_on_error:
983  jobsub_opts += ["--mail_on_error"]
984  else:
985  jobsub_opts += ["--mail_never"]
986 
987  # The default jobsub_submit priority is 0
988  #production_priority_max = 100 # Reserved for keepup processing
989  if args.user_priority != 0 :
990  #if args.production and args.user_priority >= production_priority_max :
991  # fail( "Priority for production must be < %d" % production_priority_max )
992  jobsub_opts += [ '-l "priority=%d"' % args.user_priority ]
993 
994  if args.kill_after:
995  kill_opt="--self-destruct-timer %d" % (args.kill_after)
996  art_sam_wrap_opts += [ kill_opt ]
997 
998  if args.dynamic_lifetime:
999  # Check other arguments
1000  args.expected_lifetime = "0"
1001  if args.files_per_job > 1:
1002  warn("You have limited number of files per job to "+str(args.files_per_job)+" but this argument should not be necessary with dynamic_lifetime.")
1003  art_sam_wrap_opts += [ "--dynamic_lifetime " + args.dynamic_lifetime ]
1004  jobsub_opts += [ "--append_condor_requirements='(((TARGET.GLIDEIN_ToDie-CurrentTime)>%s)||isUndefined(TARGET.GLIDEIN_ToDie))'" % args.dynamic_lifetime]
1005 
1006  #expected lifetime can be an in (number of secs) or
1007  # one of a few strings, this should test for either
1008  # possibility
1009  try:
1010  dummy=string.atoi(args.expected_lifetime)
1011  jobsub_opts += ["--expected-lifetime=%ss" % (args.expected_lifetime)]
1012  except:
1013  allowed_lifetimes=["short","medium","long"]
1014  if args.expected_lifetime not in allowed_lifetimes:
1015  fail("Invalid expected_lifetime %s" % args.expected_lifetime)
1016  else:
1017  jobsub_opts += ["--expected-lifetime=%s" % (args.expected_lifetime)]
1018 
1019  # all software comes from CVMFS now since /nova/data is no longer mounted on Fermigrid
1020  build_location_arguments = "" # ":-e:%s/externals:/cvmfs/fermilab.opensciencegrid.org/products/common/db" % cvmfs_distro_base
1021  source_scripts.append( "/cvmfs/fermilab.opensciencegrid.org/products/common/etc/setups.sh" )
1022  # assumes this job is being submitted on an up-to-date CVMFS install. best we can do...
1023  cvmfs_rev = subprocess.check_output(["attr", "-qg", "revision", "/cvmfs/nova.opensciencegrid.org"]).strip()
1024  if cvmfs_rev:
1025  art_sam_wrap_opts.append("--cvmfs-revision %s" % cvmfs_rev)
1026  if not args.disable_cvmfs_version_matching:
1027  # if NOvA CVMFS version is available, select on it; if not, just accept the slot anyway
1028  jobsub_opts += [ "--append_condor_requirements='ifThenElse(isUndefined(TARGET.HAS_CVMFS_nova_opensciencegrid_org)==FALSE,TARGET.CVMFS_nova_opensciencegrid_org_REVISION>=%s,TRUE)'" % cvmfs_rev ]
1029  if tag == "development" or tag[0] == 'N':
1030  build_location_arguments += ":-6:/cvmfs/nova-development.opensciencegrid.org/novasoft"
1031  else:
1032  build_location_arguments += ":-6:%(dir)s/novasoft/slf6/novasoft" % {"dir": cvmfs_distro_base}
1033 
1034  setup_location= "%s/novasoft/slf6/novasoft/setup/setup_nova.sh" % (cvmfs_distro_base)
1035 
1036 
1037  group=args.group
1038  #probably others are ok as well
1039  allowed_groups=['nova','fermilab']
1040  #allowed_groups=['nova_high_prio', 'nova_medium_prio', 'nova_low_prio', 'nova']
1041  if group not in allowed_groups :
1042  fail("The only valid args for --group are " + " ".join(allowed_group) )
1043  jobsub_opts += [ "-G %s" % group ]
1044 
1045  if args.test_queue:
1046  jobsub_opts += ["--subgroup test"]
1047  elif args.subgroup :
1048  subgroup = args.subgroup
1049  if is_production_arg :
1050  allowed_subgroups = [ "keepup_prio", "prod_high_prio", "prod_prio" ]
1051  else :
1052  fail( "Only production subgroups are available at this time and reqire production credentials" )
1053  #allowed_subgroups = [ "ana_prio" ]
1054 
1055  if subgroup in allowed_subgroups :
1056  jobsub_opts += [ "--subgroup %s" % subgroup ]
1057  else :
1058  fail( "Allowed subgroups: " + ", ".join( allowed_subgroups ) )
1059 
1060  jobsub_opts += ["--role=%s" %(role)]
1061 
1062 
1063  # Singularity
1064 
1065  if args.singularity:
1066  jobsub_opts += [ "--line='+SingularityImage=\\\"%s\\\"'" % args.singularity ]
1067  jobsub_opts += [ "--append_condor_requirements='(TARGET.HAS_SINGULARITY=?=true)'"]
1068  if args.jobfactory:
1069  jobsub_opts += [ "--line='+JobFactoryType=\\\"%s\\\"'" % args.jobfactory ]
1070 
1071  # GPUs
1072  if args.gpu:
1073  jobsub_opts += [ "--line='+RequestGPUs=1'" ]
1074 
1075  if args.node_features:
1076  jobsub_opts += [ make_jobsub_node_features_arg(args.node_features) ]
1077 
1078 
1079 
1080  ####################
1081  # Setup runNovaSAM #
1082  ####################
1083 
1084  nevts=args.nevts
1085  if nevts > 0:
1086  run_nova_sam_opts += ["-n %d" % nevts ]
1087 
1088  if args.zipLogs: args.logs = True
1089 
1090  if args.lemBalance and args.lemServer:
1091  fail("Cannot specify both --lemServer and --lemBalance")
1092 
1093  # Toggled options accepted by runNovaSAM with exactly the same syntax as
1094  # our own options
1095  passThru = ['gdb', 'hashDirs', 'runDirs', 'noCleanup', 'jsonMetadata', 'copyOut', 'logs', 'zipLogs', 'outputNumuDeCAF', 'outputNueDeCAF', 'outputNumuOrNueDeCAF','outputNusDeCAF', 'outputValidationDeCAF', 'lemBalance']
1096 
1097  va = vars(args)
1098  for opt in passThru:
1099  if va[opt]:
1100  run_nova_sam_opts += ['--'+opt]
1101 
1102  # Could consider doing a similar loop for specialName, outTier, cafTier,
1103  # histTier, but gets a little complicated, and there are fewer of them to
1104  # begin with to make it worth the effort.
1105 
1106  if args.lemServer:
1107  run_nova_sam_opts += ["--lemServer " + args.lemServer]
1108 
1109  if args.cosmicsPolarity:
1110  run_nova_sam_opts += ["--cosmicsPolarity " + args.cosmicsPolarity]
1111 
1112  if args.npass:
1113  run_nova_sam_opts += ["--npass " + args.npass]
1114 
1115  if args.skim :
1116  run_nova_sam_opts += ["--skim " + args.skim]
1117 
1118  if args.systematic :
1119  run_nova_sam_opts += ["--systematic " + args.systematic]
1120 
1121  if args.specialName :
1122  run_nova_sam_opts += ["--specialName " + args.specialName]
1123 
1124  if args.genietune :
1125  run_nova_sam_opts += ["--genietune " + args.genietune]
1126 
1127  if args.NPPFX :
1128  run_nova_sam_opts += ["--NPPFX " + args.NPPFX]
1129 
1130  if args.declareFiles:
1131  run_nova_sam_opts += ["--declareFiles"]
1132 
1133  if args.declareLocations:
1134  run_nova_sam_opts += ["--declareLocations"]
1135 
1136  out_tiers=args.outTier
1137 
1138  if fcl == "mcgen" :
1139  outnum,mcouttier = out_tiers[0].split(":")
1140  if mcouttier == "artdaq" :
1141  copyback = '"*.daq.root"'
1142  else:
1143  copyback = '"*.'+ mcouttier +'.root"'
1144 
1145  if None != out_tiers :
1146  for tier in out_tiers :
1147  run_nova_sam_opts += ["--outTier " + tier]
1148 
1149  caf_tiers=args.cafTier
1150  if None != caf_tiers :
1151  for tier in caf_tiers :
1152  run_nova_sam_opts += ["--cafTier " + tier]
1153 
1154  flat_tiers=args.flatTier
1155  if None != flat_tiers :
1156  for tier in flat_tiers :
1157  run_nova_sam_opts += ["--flatTier " + tier]
1158 
1159  hist_tier=args.histTier
1160  if None != hist_tier :
1161  run_nova_sam_opts += ["--histTier " + hist_tier]
1162 
1163  h5_tiers=args.h5Tier
1164  if None != h5_tiers :
1165  for tier in h5_tiers :
1166  run_nova_sam_opts += ["--h5Tier " + tier]
1167 
1168  if args.copyOut and (None==hist_tier and None==caf_tiers and None==flat_tiers and None==out_tiers and None==h5_tiers):
1169  fail("You specified --copyOut but did not specify --outTier, --cafTier, --flatTier, --h5Tier or --histTier")
1170 
1171  if not (args.copyOut or args.copyOutScript) :
1172  fail("Did not specify a method to copy back output (--copyOut or --copyOutScript)")
1173 
1174  if args.copyOut and args.copyOutScript :
1175  fail("The options --copyOut and --copyOutScript conflict")
1176 
1177  if args.export:
1178  export_to_art_sam_wrap += args.export
1179 
1180  if args.veryearlyscript:
1181  veryearly_scripts += args.veryearlyscript
1182 
1183  if args.source:
1184  source_scripts += args.source
1185 
1186  if args.earlyscript:
1187  early_scripts += args.earlyscript
1188 
1189  if args.prescript:
1190  pre_scripts += args.prescript
1191 
1192  if args.precopyscript:
1193  inter_scripts += args.precopyscript
1194 
1195  if args.postscript:
1196  post_scripts += args.postscript
1197 
1198  if args.copyOutScript:
1199  post_scripts.append(args.copyOutScript)
1200 
1201  for script in veryearly_scripts + early_scripts + source_scripts + pre_scripts + post_scripts + inter_scripts:
1202  if ":" in script:
1203  script_path = script.split(":")[0]
1204  else:
1205  script_path = script
1206 
1207  if not find_file_in_list(os.path.expandvars(script_path), input_files):
1208  if not find_file(os.environ["PATH"].split(os.pathsep), os.path.expandvars(script_path)):
1209  fail("Script %s does not exist!" % script_path)
1210 
1211  if not args.continue_project:
1212  ##start sam project
1213  project_name = user + "-" + jobname + "-" + timestamp
1214  if args.test_submission:
1215  project_name += "-testjobs"
1216  start_project = True
1217  else:
1218  project_name = args.continue_project
1219  start_project = False
1220 
1221  #sam_station=os.getenv("SAM_STATION")
1222 
1223  if args.novasoftups:
1224  art_sam_wrap_cmd="$NOVASOFT_FQ_DIR/bin/art_sam_wrap.sh"
1225 
1226 
1227 
1228 
1229 
1230  #########################
1231  # Start the SAM project #
1232  #########################
1233 
1234 
1235  start_proj_command ="samweb start-project "
1236  if not snapshot_id:
1237  start_proj_command+=" --defname=%s" %defname
1238  else:
1239  start_proj_command+=" --snapshot_id=%d" % snapshot_id
1240  start_proj_command+=" --group=nova"
1241  start_proj_command+=" --station=%s" % sam_station
1242 
1243  start_proj_command+= " %s" %project_name
1244  if start_project and not test:
1245  start_proj_retval=os.system(start_proj_command)
1246  print "start proj returned %d" % start_proj_retval
1247  if start_proj_retval != 0:
1248  fail("Couldn't start project")
1249 
1250  print "Station monitor: http://samweb.fnal.gov:8480/station_monitor/nova/stations/" + sam_station +"/projects/" + project_name
1251  os.putenv("SAM_PROJECT_NAME",project_name)
1252 
1253  check_make_dir("${CONDOR_EXEC}")
1254  # Ensure unique job command
1255  job_cmd=os.path.expandvars("${CONDOR_EXEC}/%s.sh" % project_name)
1256  scriptcount=1
1257  while os.path.exists(job_cmd):
1258  job_cmd=os.path.expandvars("${CONDOR_EXEC}/%s_%i.sh" % (project_name, scriptcount) )
1259  scriptcount += 1
1260 
1261 
1262 
1263 
1264  ######################
1265  # Setup art_sam_wrap #
1266  ######################
1267 
1268  if not test:
1269  #create symlink so that jobs get a better name
1270  os.symlink(os.path.expandvars(art_sam_wrap_cmd),job_cmd)
1271  art_sam_wrap_cmd=job_cmd
1272 
1273  sys.stdout.flush()
1274  sys.stderr.flush()
1275 
1276 
1277 
1278  if not args.no_multifile:
1279  art_sam_wrap_opts += ["--multifile"]
1280 
1281  if args.printenv:
1282  art_sam_wrap_opts += ["--printenv"]
1283 
1284  if args.txtfiledef:
1285  run_nova_sam_opts += ["--txtfiledef"]
1286  print "Passing --txtfiledef from submit_nova_art.py to runNovaSAM"
1287 
1288  if not mcgen:
1289  art_sam_wrap_opts += ["--config " + fcl]
1290 
1291  if mcgen:
1292  jobsub_opts += ['-l "+JobType="MC""']
1293  art_sam_wrap_opts += ["--getconfig"]
1294 
1295  if args.mix:
1296  art_sam_wrap_opts += ["--mix",args.mix]
1297 
1298  if not args.novasoftups :
1299  art_sam_wrap_opts += ["--source %s:-r:%s:%s%s" %(setup_location, tag, maxopt_opt, build_location_arguments)]
1300  if args.testrel or args.user_tarball:
1301  art_sam_wrap_opts += ["--setup_testrel"]
1302 
1303  if args.ngu_test:
1304  art_sam_wrap_opts += ["--source setup_test_product:NovaGridUtils"]
1305 
1306  if args.ngu_version:
1307  art_sam_wrap_opts += ["--source setup_product:NovaGridUtils:%s" %(args.ngu_version)]
1308 
1309  if args.testrel_ngu:
1310  art_sam_wrap_opts += ["--testrel_ngu"]
1311 
1312  for veryearly_script in veryearly_scripts:
1313  art_sam_wrap_opts += ["--veryearlyscript " + veryearly_script]
1314 
1315  for early_script in early_scripts:
1316  art_sam_wrap_opts += ["--earlyscript " + early_script]
1317 
1318  for source_script in source_scripts:
1319  art_sam_wrap_opts += ["--source " + source_script]
1320 
1321  for pre_script in pre_scripts:
1322  art_sam_wrap_opts += ["--prescript " + pre_script]
1323 
1324  for inter_script in inter_scripts:
1325  run_nova_sam_opts += ["--precopyscript " + inter_script]
1326 
1327  for post_script in post_scripts:
1328  art_sam_wrap_opts += ["--postscript " + post_script]
1329 
1330  for input_file in input_files:
1331  art_sam_wrap_opts += ["--inputfile " + input_file]
1332 
1333  if mcgen:
1334  art_sam_wrap_opts += ["--addoutput " + copyback]
1335  if args.hashDirs==True:
1336  art_sam_wrap_opts += ["--hash"]
1337  art_sam_wrap_opts += ["--dest " + dest]
1338 
1339  if args.poms and not test:
1340  poms_campaign_id = poms_client.register_poms_campaign(
1341  jobname,
1342  user = user,
1343  experiment = 'nova',
1344  version = tag,
1345  dataset = defname)
1346 # campaign_definition = args.poms_definition )
1347 
1348  poms_task_id = poms_client.get_task_id_for(
1349  poms_campaign_id,
1350  user = user,
1351  command_executed = build_jobsub_cmd() )
1352 
1353  export_to_art_sam_wrap += ["POMS_CAMPAIGN_ID={0}".format(poms_campaign_id),
1354  "POMS_TASK_ID={0}".format(poms_task_id)]
1355  print "POMS Campaign: https://pomsgpvm01.fnal.gov/poms/campaign_info?campaign_id={0}".format(poms_campaign_id)
1356 
1357 
1358 
1359 
1360 
1361  ############################
1362  # Actually launch the jobs #
1363  ############################
1364 
1365  jobsub_cmd = build_jobsub_cmd()
1366 
1367  if print_jobsub:
1368  print jobsub_cmd
1369  sys.stdout.flush()
1370  sys.stderr.flush()
1371 
1372 
1373  if not test:
1374  os.system(jobsub_cmd)
1375 
1376  if njobs > 1000:
1377  print
1378  print "Please note: if you intend to submit any more jobs,"
1379  print " please wait", njobs/1000, "minutes before your next submission"
1380  print " so as to avoid overloading the jobsub server."
void split(double tt, double *fr)
def setup_production(args)
def check_dir(output_dir, prefix='')
Definition: check_jobs.py:127
def find_file(paths, filename)
Module that kips a configurable number of events between each that it allows through. Note that this module really skips (N-1) events, it uses a simple modular division as its critera. This module will cut down the data sample to 1/N of its original size.
def remove_comments(src)
def setup_analysis(args)
def make_jobsub_node_features_arg(features)
def check_tag(tag)
const std::map< std::pair< std::string, std::string >, Variable > vars
def find_file_in_list(filepath, pathlist)
def fail(msg)
print a failure message, from: https://cdcvs.fnal.gov/redmine/projects/novaart/repository/entry/trunk...
Definition: common_tools.py:7
std::string format(const int32_t &value, const int &ndigits=8)
Definition: HexUtils.cpp:14
def warn(msg)
print a warning message, from: https://cdcvs.fnal.gov/redmine/projects/novaart/repository/entry/trunk...
Definition: common_tools.py:16
procfile open("FD_BRL_v0.txt")
cet::coded_exception< errors::ErrorCodes, ExceptionDetail::translate > Exception
Definition: Exception.h:66
def check_fcl(tag, fcl)
def add_node_features_arg(parser)
def check_env(vname)
def setup_calibration(args)
def check_make_dir(dname)