submit_nova_art.py
Go to the documentation of this file.
1 #!/bin/env python
2 
3 import os, sys, stat, pwd, re
4 import argparse
5 import datetime
6 import samweb_client
7 import string
8 import tokenize
9 import cStringIO
10 import subprocess
11 import NovaGridUtils
12 from NovaGridUtils import *
13 
14 user=os.getenv("USER")
15 os.environ["GRID_USER"]=user
16 
17 sam_user=user
18 sam_station=os.getenv("SAM_STATION")
19 os.system("echo SAM_STATION DEFINED AS $SAM_STATION")
20 
21 recommended_sites=["BNL",
22  "Caltech",
23  "Clemson",
24  "Cornell",
25  "FZU",
26 # "Harvard",
27  "Hyak_CE",
28  "Michigan",
29 # "MIT",
30  "MWT2",
31  "Nebraska",
32  "NotreDame",
33  "Omaha",
34  "OSC",
35  "SMU_HPC",
36  "SU-OG",
37  "SU-ITS",
38  "UChicago",
39  "UCSD",
40 # "TTU",
41  "Wisconsin"]
42 # Not working now, may return: "Harvard", "MIT", "MWT2", "UChicago", "TTU"
43 # FIFE maintains a list, too. Find it here:
44 # https://cdcvs.fnal.gov/redmine/projects/fife/wiki/Information_about_job_submission_to_OSG_sites
45 
46 cvmfs_distro_base = "/cvmfs/nova.opensciencegrid.org"
47 novasoft_cvmfs = "%s/novasoft/slf6/novasoft" % cvmfs_distro_base
48 build_location_arguments = "" # Becomes more specific with --cvmfs
49 setup_location = ""
50 
51 jobsub_opts = []
52 
53 run_nova_sam_cmd="runNovaSAM.py"
54 run_nova_sam_opts= []
55 export_to_run_nova_sam = []
56 
57 art_sam_wrap_cmd="$NOVAGRIDUTILS_DIR/bin/art_sam_wrap.sh"
58 art_sam_wrap_opts= []
59 export_to_art_sam_wrap=[]
60 export_to_art_sam_wrap.append("SAM_PROJECT_NAME")
61 export_to_art_sam_wrap.append("SAM_STATION")
62 export_to_art_sam_wrap.append("IFDH_BASE_URI")
63 if "IFDH_DEBUG" in os.environ:
64  export_to_art_sam_wrap.append("IFDH_DEBUG")
65 if "G4NEUTRONHP_USE_ONLY_PHOTONEVAPORATION" in os.environ:
66  export_to_art_sam_wrap.append("G4NEUTRONHP_USE_ONLY_PHOTONEVAPORATION")
67 
68 export_to_art_sam_wrap.append("EXPERIMENT")
69 export_to_art_sam_wrap.append("GRID_USER")
70 
71 os.environ["CVMFS_DISTRO_BASE"]=cvmfs_distro_base
72 export_to_art_sam_wrap.append("CVMFS_DISTRO_BASE")
73 
74 #there must be a better name for this variable
75 usage_models=["DEDICATED"]
76 
77 input_files=[]
78 
79 veryearly_scripts=[]
80 early_scripts=[]
81 source_scripts=[]
82 pre_scripts=[]
83 inter_scripts=[]
84 post_scripts=[]
85 
86 def check_env(vname):
87  value=os.getenv(vname)
88  if None == value or "" == value:
89  fail("Environment variable %s not defined" %vname)
90 
91 def check_fcl(tag,fcl):
92  if "/" in fcl[:1] and os.path.isfile(fcl):
93  return fcl # hey I'm ok!
94  # Loop over dirs in FHICL_FILE_PATH
95  fclPaths = os.environ["FHICL_FILE_PATH"].split(":")
96  for path in fclPaths:
97  fullPath = os.path.join(path, fcl)
98  if os.path.isfile(fullPath):
99  return fcl # hey you're ok!
100  # Check if they are copying the fhicl file to the worker node.
101  for input_file in input_files:
102  if os.path.basename(input_file) == fcl:
103  return fcl # Passing fhicl as argument, all good.
104  elif os.path.basename(input_file) == os.path.basename(fcl):
105  print ""
106  print "The fhicl will be copied to $PWD on the worker node."
107  print "You specified some sort of file path which isn't needed. Fixing it for you :)"
108  print fcl + " --> " + os.path.basename(fcl)
109  print ""
110  return os.path.basename(fcl) # User incorrectly left file path there.
111 
112  fail("fcl file %s does not exist" %fcl)
113 
114 
116  """
117  This reads tokens using tokenize.generate_tokens and recombines them
118  using tokenize.untokenize, and skipping comment/docstring tokens in between
119  """
120  f = cStringIO.StringIO(src)
121  class SkipException(Exception): pass
122  processed_tokens = []
123  last_token = None
124  # go thru all the tokens and try to skip comments and docstrings
125  for tok in tokenize.generate_tokens(f.readline):
126  t_type, t_string, t_srow_scol, t_erow_ecol, t_line = tok
127 
128  try:
129  if t_type == tokenize.COMMENT:
130  raise SkipException()
131 
132  elif t_type == tokenize.STRING:
133 
134  if last_token is None or last_token[0] in [tokenize.INDENT]:
135  pass
136 
137  except SkipException:
138  pass
139  else:
140  processed_tokens.append(tok)
141 
142  last_token = tok
143 
144  return tokenize.untokenize(processed_tokens)
145 
146 def find_file(paths, filename):
147  if os.path.isfile(filename):
148  return filename
149  for path in paths:
150  for root, dirs, files in os.walk(os.path.expandvars(path)):
151  if filename in files:
152  return os.path.join(root, filename)
153  fail("Cannot find file "+filename)
154 
155 def find_file_in_list(filepath, pathlist):
156  for testpath in pathlist:
157  if os.path.basename(filepath) == os.path.basename(testpath):
158  return True
159  return False
160 
161 
163  # Start with jobsub_submit and its options
164  jobsub_cmd = "jobsub_submit \\\n"
165  for opt in jobsub_opts:
166  jobsub_cmd += " " + opt + " \\\n"
167  for export in export_to_art_sam_wrap:
168  jobsub_cmd += " -e " + export
169  jobsub_cmd += " \\\n"
170 
171  # Add art_sam_wrap wrapper script and its options
172  if args.testrel:
173  if args.reuse_tarball:
174  jobsub_cmd += " --tar_file_name dropbox://" + os.path.basename(args.testrel) +".tar \\\n"
175  else:
176  jobsub_cmd += " --tar_file_name tardir://" + args.testrel +" \\\n"
177  elif args.user_tarball:
178  if not os.path.isfile(args.user_tarball):
179  print "Tarball filename passed to --user_tarball does not exit:", args.user_tarball
180  sys.exit(5)
181  jobsub_cmd += " --tar_file_name dropbox://" + args.user_tarball + " \\\n"
182 
183  jobsub_cmd += " file://" + art_sam_wrap_cmd + " \\\n"
184  for opt in art_sam_wrap_opts:
185  jobsub_cmd += " " + opt + " \\\n"
186  for export in export_to_run_nova_sam :
187  jobsub_cmd += " --export " + export + " \\\n"
188 
189 
190 
191  # Now set the main program and its options
192  if not mcgen:
193  jobsub_cmd += " -X " + run_nova_sam_cmd + " \\\n"
194  for opt in run_nova_sam_opts:
195  jobsub_cmd += " " + opt + " \\\n"
196  else:
197  jobsub_cmd += " -X nova \\\n"
198  mcgen_opts = ["--sam-file-type=importedSimulated", "--sam-application-family=nova","--sam-data-tier=" + mcouttier,"--sam-application-version=" + tag]
199  if nevts>0 :
200  mcgen_opts += ["-n %d" % nevts]
201  for opt in mcgen_opts:
202  jobsub_cmd += " " + opt + " \\\n"
203 
204  jobsub_cmd = jobsub_cmd[:-2]
205  return jobsub_cmd
206 
207 
208 #######################################################################################
209 
210 if __name__=='__main__':
211 
212  prog=os.path.basename(sys.argv[0])
213  NovaGridUtils.prog=prog
214 
215 
216  while "-f" in sys.argv or "--file" in sys.argv:
217  ### Allow args to be passed in as a plain text file.
218  ### We make a preliminary parser get these arguments out for two reasons:
219  ### 1) Maintain standard -h, --help functionality
220  ### 2) Avoid necessity required arguments in initial parsing,
221  ### allow them to be missing, but find them in the file.
222  preliminary_parser = argparse.ArgumentParser(prog=prog, description='Submit nova art job')
223 
224  preliminary_parser.add_argument('-f', '--file',
225  help="""Text file containing any arguments to this utility. Multiple allowed.
226  Arguments should look just like they would on the command line,
227  but the parsing of this file is whitespace insenstive.
228  Commented lines will be identified with the # character and removed. """, type=str, action='append')
229  pre_args, unknown = preliminary_parser.parse_known_args()
230 
231  # Remove pre_args from sys.argv so they are not processed again
232  sys.argv = filter(lambda x: x not in [ "-f", "--file"], sys.argv)
233 
234  if pre_args.file:
235  for filepath in pre_args.file:
236  index = sys.argv.index(filepath)
237  sys.argv.remove(filepath)
238  if os.path.isfile(filepath):
239  fullpath = filepath
240  else:
241  fullpath = find_file(["$NOVAGRIDUTILS_DIR/configs/"],filepath)
242  text = open(fullpath, 'r').read()
243  text = remove_comments(text) # Strip out commented lines
244  newargs = []
245  for line in text.splitlines():
246  # Insert arguments into list in order
247  # where the -f appeared
248  newargs += line.split()
249  sys.argv[index:index] = newargs
250 
251  parser = argparse.ArgumentParser(prog=prog, description='Submit nova art job', add_help=False)
252 
253  ###required options
254  required_args = parser.add_argument_group("Required arguments", "These arguments must be supplied.")
255 
256  required_args.add_argument('--jobname',
257  required=True,
258  help='Job name',
259  type=str)
260 
261  required_args.add_argument('--defname',
262  required=True,
263  help='SAM dataset definition to run over',
264  type=str)
265 
266  required_args.add_argument('--config', '-c',
267  required=True,
268  help='FHiCL file to use as configuration for nova executable. The path given should be relative to the $SRT_PRIVATE_CONTEXT of any test release you submit using',
269  type=str)
270 
271  required_args.add_argument("--tag",
272  required=True,
273  help="Tag of novasoft to use",
274  type=str)
275 
276  required_args.add_argument("--dest",
277  required=True,
278  help="Destination for output files",
279  type=str)
280 
281  ###debugging
282  debugging_args = parser.add_argument_group("Debugging options", "These optional arguments can help debug your submission.")
283 
284  debugging_args.add_argument('--print_jobsub',
285  help='Print jobsub command',
286  action='store_true',default=False)
287 
288  debugging_args.add_argument('--printenv',
289  help='Print environment variables',
290  action='store_true',default=False)
291 
292  debugging_args.add_argument('--test',
293  help='Do not actually do anything, just run tests and print jobsub cmd',
294  action='store_true',default=False)
295 
296  debugging_args.add_argument('--gdb',
297  help='Run nova executable under gdb, print full stack trace, then quit gdb.',
298  action='store_true',default=False)
299 
300  debugging_args.add_argument('--test_submission',
301  help='Override other arguments given to submit a test to the grid. It will run 1 job with 3 events and write the output to /pnfs/nova/scratch/users/<user>/test_jobs/<date>_<time>',
302  action='store_true',default=False)
303 
304  debugging_args.add_argument('--jobsub_server',
305  help='Submit using the specified jobsub server',
306  default="")
307 
308  debugging_args.add_argument('--test_queue',
309  help='Submit jobs to the test jobsub queue for higher starting priority. NB NOvA is limited to 10 jobs at a time on this queue.',
310  action='store_true', default=False)
311 
312  debugging_args.add_argument("--kill_after",
313  metavar="SEC",
314  help="If job is still running after this many seconds, kill in such a way that a log will be returned",
315  type=int)
316 
317  ###job control
318  job_control_args = parser.add_argument_group("Job control options", "These optional arguments help control where and how your jobs land.")
319  ###number of jobs
320  job_control_args.add_argument('--njobs',
321  help='Number of jobs to submit',
322  type=int, default = 0)
323 
324  job_control_args.add_argument('--maxConcurrent',
325  help='Run a maximum of N jobs simultaneously',
326  metavar='N',
327  type=int, default=0)
328 
329  job_control_args.add_argument('--files_per_job',
330  help='Number of files per job - if zero, calculate from number of jobs', metavar='N',
331  type=int, default = 0)
332 
333  job_control_args.add_argument('--nevts',
334  help='Number of events per file to process',
335  type=int, default = 0)
336 
337 
338  job_control_args.add_argument('--no_multifile',
339  help='Do not use art_sam_wrap.sh multifile mode, which is on by default',
340  action='store_true')
341 
342  job_control_args.add_argument('--txtfiledef',
343  help='Use if the input definition is made up of text files, each containing a list of file names',
344  action='store_true',default=False)
345 
346  ###general job control
347  job_control_args.add_argument('--opportunistic',
348  help='Run opportunistically on the fermigrid',
349  action='store_true',default=False)
350 
351  job_control_args.add_argument('--offsite',
352  help='Allow to run on offsite resources as well. Implies --opportunistic.',
353  action='store_true',default=False)
354 
355  job_control_args.add_argument('--offsite_only',
356  help='Allow to run solely on offsite resources.',
357  action='store_true',default=False)
358 
359  job_control_args.add_argument('--amazon',
360  help='Run at amazon.',
361  action='store_true',default=False)
362 
363  job_control_args.add_argument('--site',
364  help='Specify allowed offsite locations. Omit to allow running at any offsite location',
365  type=str,action='append')
366 
367  job_control_args.add_argument('--exclude_site',
368  help='Specify an offsite location to exclude.',
369  metavar='SITE',
370  type=str,action='append')
371 
372  job_control_args.add_argument('--recommended_sites',
373  help='Specify known working offsite locations.',
374  action='store_true',default=False)
375 
376  job_control_args.add_argument('--autoDropbox',
377  help='Use automatic dropbox location based on site',
378  action='store_true',default=False)
379 
380  job_control_args.add_argument('--os',
381  help='Specify OS version of worker node',
382  type=str,action='append')
383 
384  job_control_args.add_argument('--disk',
385  help='Local disk space requirement for worker node in MB.',
386  type=int, default=10000)
387 
388  job_control_args.add_argument('--memory',
389  help='Local memory requirement for worker node in MB.',
390  type=int, default=1900)
391 
392  job_control_args.add_argument('--expected_lifetime',
393  help='Expected job lifetime (default is 10800s=3h). Valid values are an integer number of seconds or one of \"short\" (6h), \"medium\" (12h) or \"long\" (24h, jobsub default)', metavar='LIFETIME',
394  type=str, default="10800")
395 
396  job_control_args.add_argument('--dynamic_lifetime',
397  help="Dynamically determine whether a new file should be started based on glidein lifetime. Specify the maximum length expected for a single file to take to process in seconds.", metavar="LIFETIME",
398  type=str)
399 
400  job_control_args.add_argument('--cpu',
401  help="Request worker nodes that have at least NUMBER cpus",
402  type=int, default=1)
403 
404 
405  job_control_args.add_argument('--group', '-G',
406  help="Specify batch group GROUP -- mainly used to set job priority. At present, only supportable value is nova",
407  type=str, default="nova")
408 
409  job_control_args.add_argument('--subgroup',
410  help='Production subgroup',
411  type=str)
412 
413  job_control_args.add_argument("--role",
414  help="Specify role to run on the grid. Can be Analysis (default) or Production. This option is no longer supported",
415  type=str,default="Analysis")
416 
417  job_control_args.add_argument('--continue_project',
418  help="Don't start a new samweb project, instead continue this one.", metavar='PROJECT',
419  type=str,default="")
420 
421  job_control_args.add_argument("--snapshot_id",
422  help="Use this existing snapshot instead of creating a new one.", metavar="ID",
423  type=int,default=0)
424 
425  job_control_args.add_argument("--poms",
426  help="Start/continue a poms campaign and task for this submission",
427  default=False, action="store_true")
428 
429  job_control_args.add_argument("--mix",
430  help="Pass a mixing script to the job to pull in a files for job mixing.",
431  default="", type=str)
432 
433  job_control_args.add_argument("--mail_always",
434  help="Do you want an email whenever every jobs finishes?",
435  default=False, action="store_true")
436 
437  job_control_args.add_argument("--mail_on_error",
438  help="Do you want an email whenever a job fails on an error?",
439  default=False, action="store_true")
440 
441 # job_control_args.add_argument("--poms_definition",
442 # help="POMS definition to use",
443 # default="GenercicSubmitNOvAArt", type=str)
444 
445  job_control_args.add_argument('--user_priority',
446  help='Priority (integer) within a user\'s jobs (default = 0)',
447  type=int, default=0)
448 
449  job_control_args.add_argument('--singularity',
450  help='Location in CVMFS of a singularity container to launch the job into',
451  type=str,default='')
452 
453  job_control_args.add_argument('--jobfactory',
454  help='Use the specified JobFactoryType.',
455  default="")
456 
457  job_control_args.add_argument("--gpu",
458  help="Request a node with a GPU",
459  default=False, action="store_true")
460 
461  add_node_features_arg(job_control_args)
462 
463  ###software control
464  novasoft_args = parser.add_argument_group("NOvA software options", "These options control the novasoft setup.")
465  novasoft_args.add_argument('--maxopt',
466  help='Run in maxopt mode',
467  action='store_true',default=True)
468 
469  testrel_gp = novasoft_args.add_mutually_exclusive_group(required=False)
470  testrel_gp.add_argument("--testrel",
471  help="Use a test release at location TESTREL. It will be tarred up, and sent to the worker node.",
472  type=str)
473  testrel_gp.add_argument("--user_tarball",
474  help="Use existing test release tarball in specified location rather than having jobsub make one for you (conflicts with --testrel)",
475  type=str)
476 
477  novasoft_args.add_argument('--reuse_tarball',
478  help='Do you want to reuse a tarball that is already in resilient space? If using this option avoid trailing slash in --testrel option. (conflicts with --user_tarball)',
479  action='store_true',default=False)
480 
481  novasoft_args.add_argument('--cvmfs',
482  help='Does nothing (always true), but retained for compatibility: pull software from CVMFS.',
483  action='store_true')
484 
485 
486  novasoft_args.add_argument('--disable_cvmfs_version_matching',
487  help="Don't perform a CVMFS-is-up-to-date check on target nodes via Condor requirements. (For advanced debugging use.) ",
488  action="store_true",
489  default=False)
490 
491  novasoft_args.add_argument('--novasoftups',
492  help='Use the ups build of novasoft, must be used with source to setup.',
493  action='store_true')
494 
495  novasoft_args.add_argument('--ngu_test',
496  help='Setup the test version of NovaGridUtils in the grid jobs.',
497  action='store_true')
498 
499  novasoft_args.add_argument('--ngu_version',
500  help='Setup a specific NovaGridUtils version in the grid jobs.', metavar='VERSION',
501  type=str)
502 
503  novasoft_args.add_argument('--testrel_ngu',
504  help="Must be used with --testrel, with NGU checked out. After unpacking tarball will setup the local version of NGU you are using on the work.",
505  action='store_true')
506 
507  novasoft_args.add_argument('--lemBalance',
508  help='Choose lem server based on (CLUSTER+PROCESS)%%2 to balance load',
509  action='store_true', default=False)
510 
511  novasoft_args.add_argument('--lemServer',
512  help='Specify lem server',
513  type=str)
514 
515  ###output control
516  output_args = parser.add_argument_group("Output file options", "Note that you must specify either --copyOut or --copyOutScript.")
517 
518 
519  output_args.add_argument('--copyOutScript',
520  help='Use script COPYOUTSCRIPT to copy back your output',
521  type=str)
522 
523  output_args.add_argument('--copyOut',
524  help='Use the built in copy out mechanism. If used, you must specify --outTier, --cafTier, --flatTier, --h5Tier or --histTier',
525  action='store_true')
526 
527  output_args.add_argument('--logs',
528  help='Return .log files corresponding to every output',
529  action='store_true')
530  output_args.add_argument('--zipLogs',
531  help='Format logs as .bz2 files. Implies --logs',
532  action='store_true')
533 
534  output_args.add_argument('--outTier',
535  help='Data tier of the output file, multiple allowed, formatted as <name_in_fcl_outputs>:<data_tier>',
536  type=str, action='append')
537 
538  output_args.add_argument('--cafTier',
539  help='Module label for CAF output, multiple allowed. Format as <cafmaker_module_label>:<data_tier>',
540  type=str, action='append')
541 
542  output_args.add_argument('--flatTier',
543  help='Module label for FlatCAF output, multiple allowed. Format as <flatmaker_module_label>:<data_tier>',
544  type=str, action='append')
545 
546  output_args.add_argument('--histTier',
547  help='File identifier string for TFileService output, only one allowed. Supply as --histTier <id> for output_name.<id>.root, where output_name is assembled based on the input file.',
548  type=str)
549 
550  output_args.add_argument('--h5Tier',
551  help='File identifier for h5 output, multiple allowed. Format as <hdf5maker_module>:<data_tier>',
552  type=str, action='append')
553  output_args.add_argument('--second_config',
554  help="""Second configuration fcl executed after main process.
555  nova is executed with art file that is output from the main process.
556  Files that get produced by this process that are named identically
557  to files produced by the main process and are among the requested outputs
558  are ignored and the file produced by the first process is returned""",
559  type=str)
560 
561  output_args.add_argument('--outputNumuDeCAF',
562  help='Make standard numu decafs for all CAF files produced during the job',
563  action='store_true',default=False)
564 
565  output_args.add_argument('--outputNueDeCAF',
566  help='Make standard nue decafs for all CAF files produced during the job',
567  action='store_true',default=False)
568 
569  output_args.add_argument('--outputNumuOrNueDeCAF',
570  help='Make standard nue or numu decafs for all CAF files produced during the job',
571  action='store_true',default=False)
572 
573  output_args.add_argument('--outputNusDeCAF',
574  help='Make standard nus decafs for all CAF files produced during the job',
575  action='store_true',default=False)
576 
577  output_args.add_argument('--outputValidationDeCAF',
578  help='Make validation (nue_or_numu_or_nus) decafs for all CAF files produced during the job',
579  action='store_true',default=False)
580 
581 
582  output_args.add_argument('--cosmicsPolarity',
583  help='To specify a horn polarity for the cosmics output file name',
584  type=str)
585 
586  output_args.add_argument('--npass',
587  help='To specify npass (aka nova.subversion)',
588  type=str)
589 
590  output_args.add_argument('--skim',
591  help='To specify nova.skim (does not work with mc gen)',
592  type=str)
593 
594  output_args.add_argument('--systematic',
595  help='To specify nova.systematic (does not work with mc gen)', metavar='SYST',
596  type=str)
597 
598  output_args.add_argument('--specialName',
599  help='To specify nova.special name (does not work with mc gen)',
600  type=str)
601 
602  output_args.add_argument('--genietune',
603  help='To specify nova.genietune (does not work with mc gen)', metavar='TUNE',
604  type=str)
605 
606  output_args.add_argument('--NPPFX',
607  help='To specify number of PPFX universes',
608  type=str)
609 
610  output_args.add_argument('--hashDirs',
611  help='Use hash directory structure in destination directory.',
612  action='store_true')
613 
614  output_args.add_argument('--runDirs',
615  help='Use run directory structure in destination directory, 000XYZ/XYZUW for run number XYZUW.',
616  action='store_true')
617 
618  output_args.add_argument('--noCleanup',
619  help='Pass --noCleanup argument to runNovaSAM.py. Necessary when using a postscript for copyout.',
620  action='store_true')
621 
622  output_args.add_argument('--jsonMetadata', help='Create JSON files with metadata corresponding to each output file, and copy them to the same destinations', action='store_true')
623 
624  output_args.add_argument('--declareFiles',
625  help='Declare files with metadata on worker node',
626  action='store_true')
627 
628  output_args.add_argument('--production',
629  help='Submit production style jobs. Implies \"--role=Production --hashDirs --jsonMetadata --zipLogs\", and checks that other settings needed for production are specified',
630  action='store_true')
631 
632  output_args.add_argument('--calibration',
633  help='Submit calibration style jobs. Implies \"--role=Production\", and checks that other settings needed for calibration are specified',
634  action='store_true')
635 
636  output_args.add_argument('--declareLocations',
637  help='Declare the file output locations to SAM during the copy back of the files',
638  action='store_true')
639 
640  ###environment configuration
641  environment_args = parser.add_argument_group("Environment options", "These optional arguments allow control of the grid running environment.")
642 
643  environment_args.add_argument('--export',
644  help='Export variable EXPORT to art_sam_wrap.sh',
645  type=str, action='append')
646 
647  environment_args.add_argument('--veryearlyscript',
648  help='Source script EARLYSCRIPT before any environment setup or sourcing',
649  type=str, action='append')
650 
651  environment_args.add_argument('--source',
652  help='Source script SOURCE',
653  type=str, action='append')
654 
655  environment_args.add_argument('--earlyscript',
656  help='Execute script EARLYSCRIPT before any environment setup',
657  type=str, action='append')
658 
659  environment_args.add_argument('--prescript',
660  help='Execute script PRESCRIPT before executing runNovaSAM.py',
661  type=str, action='append')
662 
663  environment_args.add_argument('--precopyscript',
664  help='Execute script PRECOPYSCRIPT within runNovaSAM.py, after running the nova -c command.',
665  type=str, action='append')
666 
667  environment_args.add_argument('--postscript',
668  help='Execute script POSTSCRIPT after executing runNovaSAM.py',
669  type=str, action='append')
670 
671  environment_args.add_argument('--inputfile',
672  help='Copy this extra input file into job area before running executable',
673  type=str, action='append')
674 
675 
676  ###support options
677  support_args = parser.add_argument_group("Support options", "These optional arguments using this submission utility easier.")
678 
679  support_args.add_argument("-h", "--help", action="help", help="Show this help message and exit")
680 
681  support_args.add_argument('-f', '--file',
682  help="""Text file containing any arguments to this utility. Multiple allowed.
683  Arguments should look just like they would on the command line,
684  but the parsing of this file is whitespace insenstive.
685  Comments will be identified with the # character and removed. """, type=str, action='append')
686 
687 
688 
689 
690  ############################################
691  # Process and check command line arguments #
692  ############################################
693 
694  args = parser.parse_args()
695  timestamp=datetime.datetime.now().strftime("%Y%m%d_%H%M")
696 
697  # Load POMS if we need to:
698  if args.poms:
699  try:
700  import poms_client
701  except ImportError:
702  print "POMS not setup. Run this and try again:"
703  print
704  print " setup poms_client"
705  print
706  sys.exit(1)
707 
708  # Check for test submission. Has to be first to override other arguments
709  if args.test_submission:
710  test_njobs = 1
711  test_nevts = 3
712  test_dest = "/pnfs/nova/scratch/users/%s/test_jobs/%s" % (os.environ["USER"], timestamp)
713  if not os.path.exists(test_dest):
714  os.makedirs(test_dest)
715  mode = os.stat(test_dest).st_mode | stat.S_IXGRP | stat.S_IWGRP
716  os.chmod(test_dest, mode)
717  test_expected_lifetime = "0"
718  test_dynamic_lifetime = "500"
719  test_files_per_job = 1
720 
721  print "Running a test submission. Overwriting:"
722 
723  print " njobs", args.njobs, "-->", test_njobs
724  args.njobs = test_njobs
725  print " nevts", args.nevts, "-->", test_nevts
726  args.nevts = test_nevts
727  print " dest", args.dest, "-->", test_dest
728  args.dest = test_dest
729  print " expected_lifetime", args.expected_lifetime, "-->", test_expected_lifetime
730  args.expected_lifetime = test_expected_lifetime
731  print " dynamic_lifetime", args.dynamic_lifetime, "-->", test_dynamic_lifetime
732  args.dynamic_lifetime = test_dynamic_lifetime
733  print " files_per_job", args.files_per_job, "-->", test_files_per_job
734  args.files_per_job = test_files_per_job
735  if args.declareFiles:
736  print " don't declareFiles"
737  args.declareFiles = False
738  if args.declareLocations:
739  print " don't declareLocations"
740  args.declareLocations = False
741  if args.autoDropbox:
742  print " don't use autoDropbox"
743  args.autoDropbox = False
744  if args.poms:
745  print " don't use poms"
746  args.poms = False
747 
748  #print " use the test jobsub queue, so OnSite only."
749  #args.test_queue = True
750  args.offsite = False
751 
752  jobname=args.jobname
753  defname=args.defname
754  snapshot_id=args.snapshot_id
755  print_jobsub=args.print_jobsub
756 
757  if args.printenv :
758  print "Will print environment vars "
759  printenv=True
760 
761  test=args.test
762  if test :
763  print_jobsub=True
764  print ""
765  warn("--test was specified, so all we do is run checks and print jobsub cmd.")
766 
767  check_env("SETUP_IFDH_ART")
768  check_env("SETUP_SAM_WEB_CLIENT")
769  check_env("SETUP_JOBSUB_CLIENT")
770  check_env("SAM_STATION")
771  tag=check_tag(args.tag)
772 
773  srt_qual="debug"
774  maxopt=args.maxopt
775  maxopt_opt=""
776  if maxopt:
777  maxopt_opt += "-b:maxopt"
778  srt_qual="maxopt"
779 
780  if args.reuse_tarball and not args.testrel:
781  fail("--reuse_tarball specified without --testrel??")
782 
783  if args.testrel:
784  check_dir(args.testrel)
785  if not os.path.isdir(args.testrel+'/lib/'+os.getenv('SRT_ARCH')+'-GCC-'+srt_qual):
786  fail(args.testrel+' has never been built '+srt_qual)
787 
788  if args.inputfile:
789  input_files += args.inputfile
790 
791  for input_file in input_files:
792  if not os.path.isfile(os.path.expandvars(input_file)):
793  fail("Input file %s does not exist!" % input_file)
794  if os.path.expandvars(input_file).startswith("/nova/"):
795  fail("Input file %s cannot be on /nova/app or /nova/ana/ or /nova/data/ it must be in dCache /pnfs/nova/" % input_file)
796  elif os.path.expandvars(input_file).startswith("/grid/"):
797  fail("Input file %s cannot be on /grid/ it must be in dCache /pnfs/nova/" % input_file)
798 
799  if args.singularity and not os.path.exists(args.singularity):
800  fail("Requested singularity image cannot be found: %s" % args.singularity)
801 
802  if args.gpu and not args.singularity:
803  warn("Requested GPU, but did not request singularity. This is not likely to succeed.")
804 
805  if args.gpu and not args.offsite_only:
806  warn("GPUs are only available offsite, and you have not chosen --offsite_only")
807 
808 
809 
810  fcl=args.config
811 
812  mcgen = (fcl == "mcgen")
813 
814  if not mcgen:
815  fcl = check_fcl(tag,fcl)
816 
817  dest=args.dest
818  if not dest.startswith("s3://") :
819  check_dir(dest)
820 
821  if os.path.expandvars(dest).startswith("/nova/"):
822  fail("Destination directory %s cannot be on /nova/app or /nova/ana/ or /nova/data/ it must be in dCache /pnfs/nova/" % dest)
823  elif os.path.expandvars(dest).startswith("/grid/"):
824  fail("Destination directory %s cannot be on /grid/ it must be in dCache /pnfs/nova/" % dest)
825 
826  export_to_run_nova_sam.append("DEST=%s"%dest)
827 
828  is_production_arg = args.production
829 
830  if "Production" in args.role and not is_production_arg:
831  fail("You specified --role=Production but not --production. This is no longer supported")
832 
833  if args.production:
834  setup_production(args)
835  elif args.calibration:
836  setup_calibration(args)
837  else:
838  setup_analysis(args)
839 
840  # Check for test submission. Has to be first to override other arguments
841  if args.test_submission:
842  print "Running a test submission, turning off hashDirs"
843  args.hashDirs = False
844 
845  if args.hashDirs and args.runDirs:
846  fail("Cannot specify both --hashDirs and --runDirs (note that hashDirs is implied by --production)")
847 
848  role=args.role
849 
850  njobs=args.njobs
851  files_per_job = args.files_per_job
852 
853  print "Definition name: %s" % defname
854  if snapshot_id:
855  print " with snapshot_id: %d" % snapshot_id
856 
857 
858 
859 
860  ################
861  # Setup jobsub #
862  ################
863 
864  if args.jobsub_server:
865  jobsub_opts += ["--jobsub-server=%s"%args.jobsub_server]
866 
867  if files_per_job > 0 and njobs > 0 :
868  ## both njobs and files per job are specified. Just pass
869  ## the settings through to jobsub and art_sam_wrap
870  jobsub_opts += ["-N %d" %njobs]
871  art_sam_wrap_opts += ["--limit %d" % files_per_job]
872 
873  elif files_per_job > 0:
874  ##files/job specified, but not njobs. Calculate njobs
875  ## on the fly
876 
877  ##get files in dataset
878  samweb = samweb_client.SAMWebClient(experiment='nova')
879  if not snapshot_id:
880  num_project_files=samweb.countFiles(defname=defname)
881  else:
882  num_project_files=samweb.countFiles(dimensions="snapshot_id {0:d}".format(snapshot_id))
883  print "Definition file count %d" % num_project_files
884 
885  njobs=(num_project_files / files_per_job) +1
886  jobsub_opts += ["-N %d" %njobs]
887  art_sam_wrap_opts += ["--limit %d" % files_per_job]
888 
889  elif njobs > 0 :
890  ##njobs specified, but not files/job. Just set njobs
891  ## and don't force limits on files per jobs
892  jobsub_opts += ["-N %d" %njobs]
893 
894  else :
895  warn("Neither --njobs nor --files_per_job specified. Did you really want to do this? Sleeping for 5 seconds")
896  sleep(5)
897 
898  # allow a little bit of grace -- 5500 rather than 5000
899  if njobs > 5500:
900  print >> sys.stderr, """
901  Error: cannot submit more than 5000 jobs in one cluster.
902  Please break your submission into multiple batches of 5000 (or less) jobs,
903  and after submitting the first batch, use --continue_project with the project
904  that results from the first submission for the remaining batches.
905 
906  Please separate submissions by 5 minutes.
907  """
908  sys.exit(1)
909 
910  if args.maxConcurrent:
911  jobsub_opts += ["--maxConcurrent=%d" %args.maxConcurrent]
912 
913  if args.opportunistic or args.offsite:
914  usage_models.append("OPPORTUNISTIC")
915  if args.offsite :
916  usage_models.append("OFFSITE")
917  if args.offsite_only:
918  if args.offsite:
919  fail("Both --offsite and --offsite_only specified, these arguments conflict")
920 
921  if args.opportunistic:
922  fail("Both --opportunistic and --offsite_only specified, these arguments conflict")
923 
924  usage_models = ["OFFSITE"]
925 
926  if args.amazon :
927  usage_models=["AWS_HEPCLOUD"]
928  awsfilepath=os.path.expandvars("/cvmfs/nova.opensciencegrid.org/externals/NovaGridUtils/$NOVAGRIDUTILS_VERSION/NULL/utils/aws_setup.sh")
929  source_scripts.append(awsfilepath)
930 
931  if args.autoDropbox:
932  run_nova_sam_opts += ["--autoDropbox"]
933 
934  # Check OS in off-site submissions
935  if args.offsite or args.offsite_only:
936  if args.singularity:
937  if args.os:
938  fail("Don't specify OS when submitting with --singularity")
939  else:
940  if not args.os:
941  fail("Running offsite, but OS version not specified!")
942 
943  resource_opt="--resource-provides=usage_model=" + string.join(usage_models,",")
944  jobsub_opts += [resource_opt]
945 
946  if args.recommended_sites or args.site:
947  site_opt="--site="
948 
949  if args.recommended_sites:
950  for isite in recommended_sites:
951  site_opt += isite + ","
952  if args.site:
953  for isite in args.site:
954  if isite not in recommended_sites:
955  warn("Site "+isite+" is not known to work. Your jobs may fail at that site. Sleeping for 5 seconds")
956  sleep(5)
957  site_opt += isite + ","
958 
959  site_opt=site_opt[:-1]
960  jobsub_opts += [ site_opt ]
961 
962  if args.exclude_site:
963  for isite in args.exclude_site:
964  jobsub_opts += [ "--append_condor_requirements='(TARGET.GLIDEIN_Site\ isnt\ \\\"%s\\\")'" % isite ]
965 
966  if args.os :
967  allowed_os=["SL6"]
968  for ios in args.os:
969  if ios not in allowed_os:
970  fail("Invalid OS %s" %ios)
971 
972  os_opt="--OS=" + string.join(args.os,",")
973  jobsub_opts += [ os_opt ]
974 
975  if args.disk:
976  disk_opt="--disk=%sMB" % (args.disk)
977  jobsub_opts += [ disk_opt ]
978 
979  if args.memory:
980  mem_opt="--memory=%sMB" % (args.memory)
981  jobsub_opts += [ mem_opt ]
982 
983  if args.cpu:
984  cpu_opt="--cpu=%d" % (args.cpu)
985  jobsub_opts += [ cpu_opt ]
986 
987  if args.mail_always:
988  jobsub_opts += ["--mail_always"]
989  elif args.mail_on_error:
990  jobsub_opts += ["--mail_on_error"]
991  else:
992  jobsub_opts += ["--mail_never"]
993 
994  # The default jobsub_submit priority is 0
995  #production_priority_max = 100 # Reserved for keepup processing
996  if args.user_priority != 0 :
997  #if args.production and args.user_priority >= production_priority_max :
998  # fail( "Priority for production must be < %d" % production_priority_max )
999  jobsub_opts += [ '-l "priority=%d"' % args.user_priority ]
1000 
1001  if args.kill_after:
1002  kill_opt="--self-destruct-timer %d" % (args.kill_after)
1003  art_sam_wrap_opts += [ kill_opt ]
1004 
1005  if args.dynamic_lifetime:
1006  # Check other arguments
1007  args.expected_lifetime = "0"
1008  if args.files_per_job > 1:
1009  warn("You have limited number of files per job to "+str(args.files_per_job)+" but this argument should not be necessary with dynamic_lifetime.")
1010  art_sam_wrap_opts += [ "--dynamic_lifetime " + args.dynamic_lifetime ]
1011  jobsub_opts += [ "--append_condor_requirements='(((TARGET.GLIDEIN_ToDie-CurrentTime)>%s)||isUndefined(TARGET.GLIDEIN_ToDie))'" % args.dynamic_lifetime]
1012 
1013  #expected lifetime can be an in (number of secs) or
1014  # one of a few strings, this should test for either
1015  # possibility
1016  try:
1017  dummy=string.atoi(args.expected_lifetime)
1018  jobsub_opts += ["--expected-lifetime=%ss" % (args.expected_lifetime)]
1019  except:
1020  allowed_lifetimes=["short","medium","long"]
1021  if args.expected_lifetime not in allowed_lifetimes:
1022  fail("Invalid expected_lifetime %s" % args.expected_lifetime)
1023  else:
1024  jobsub_opts += ["--expected-lifetime=%s" % (args.expected_lifetime)]
1025 
1026  # all software comes from CVMFS now since /nova/data is no longer mounted on Fermigrid
1027  build_location_arguments = "" # ":-e:%s/externals:/cvmfs/fermilab.opensciencegrid.org/products/common/db" % cvmfs_distro_base
1028  source_scripts.append( "/cvmfs/fermilab.opensciencegrid.org/products/common/etc/setups.sh" )
1029  # assumes this job is being submitted on an up-to-date CVMFS install. best we can do...
1030  cvmfs_rev = subprocess.check_output(["attr", "-qg", "revision", "/cvmfs/nova.opensciencegrid.org"]).strip()
1031  if cvmfs_rev:
1032  art_sam_wrap_opts.append("--cvmfs-revision %s" % cvmfs_rev)
1033  if not args.disable_cvmfs_version_matching:
1034  # if NOvA CVMFS version is available, select on it; if not, just accept the slot anyway
1035  jobsub_opts += [ "--append_condor_requirements='ifThenElse(isUndefined(TARGET.HAS_CVMFS_nova_opensciencegrid_org)==FALSE,TARGET.CVMFS_nova_opensciencegrid_org_REVISION>=%s,TRUE)'" % cvmfs_rev ]
1036  if tag == "development" or tag[0] == 'N':
1037  # maybe we need to do -6 or -7 depending on what SRT_ARCH is, rather than just having them both ?
1038  build_location_arguments += ":-6:/cvmfs/nova-development.opensciencegrid.org/novasoft:-7:/cvmfs/nova-development.opensciencegrid.org/novasoft"
1039  else:
1040  build_location_arguments += ":-6:%(dir)s/novasoft/slf6/novasoft:-7:%(dir)s/novasoft/slf7/novasoft" % {"dir": cvmfs_distro_base}
1041 
1042  setup_location= "%s/novasoft/slf6/novasoft/setup/setup_nova.sh" % (cvmfs_distro_base)
1043 
1044 
1045  group=args.group
1046  #probably others are ok as well
1047  allowed_groups=['nova','fermilab']
1048  #allowed_groups=['nova_high_prio', 'nova_medium_prio', 'nova_low_prio', 'nova']
1049  if group not in allowed_groups :
1050  fail("The only valid args for --group are " + " ".join(allowed_group) )
1051  jobsub_opts += [ "-G %s" % group ]
1052 
1053  if args.test_queue:
1054  jobsub_opts += ["--subgroup test"]
1055  elif args.subgroup :
1056  subgroup = args.subgroup
1057  if is_production_arg :
1058  allowed_subgroups = [ "keepup_prio", "prod_high_prio", "prod_prio" ]
1059  else :
1060  fail( "Only production subgroups are available at this time and reqire production credentials" )
1061  #allowed_subgroups = [ "ana_prio" ]
1062 
1063  if subgroup in allowed_subgroups :
1064  jobsub_opts += [ "--subgroup %s" % subgroup ]
1065  else :
1066  fail( "Allowed subgroups: " + ", ".join( allowed_subgroups ) )
1067 
1068  jobsub_opts += ["--role=%s" %(role)]
1069 
1070 
1071  # Singularity
1072 
1073  if args.singularity:
1074  jobsub_opts += [ "--line='+SingularityImage=\\\"%s\\\"'" % args.singularity ]
1075  jobsub_opts += [ "--append_condor_requirements='(TARGET.HAS_SINGULARITY=?=true)'"]
1076  if args.jobfactory:
1077  jobsub_opts += [ "--line='+JobFactoryType=\\\"%s\\\"'" % args.jobfactory ]
1078 
1079  # GPUs
1080  if args.gpu:
1081  jobsub_opts += [ "--line='+RequestGPUs=1'" ]
1082 
1083  if args.node_features:
1084  jobsub_opts += [ make_jobsub_node_features_arg(args.node_features) ]
1085 
1086 
1087 
1088  ####################
1089  # Setup runNovaSAM #
1090  ####################
1091 
1092  nevts=args.nevts
1093  if nevts > 0:
1094  run_nova_sam_opts += ["-n %d" % nevts ]
1095 
1096  if args.zipLogs: args.logs = True
1097 
1098  if args.lemBalance and args.lemServer:
1099  fail("Cannot specify both --lemServer and --lemBalance")
1100 
1101  # Toggled options accepted by runNovaSAM with exactly the same syntax as
1102  # our own options
1103  passThru = ['gdb', 'hashDirs', 'runDirs', 'noCleanup', 'jsonMetadata', 'copyOut', 'logs', 'zipLogs', 'outputNumuDeCAF', 'outputNueDeCAF', 'outputNumuOrNueDeCAF','outputNusDeCAF', 'outputValidationDeCAF', 'lemBalance']
1104 
1105  va = vars(args)
1106  for opt in passThru:
1107  if va[opt]:
1108  run_nova_sam_opts += ['--'+opt]
1109 
1110  # Could consider doing a similar loop for specialName, outTier, cafTier,
1111  # histTier, but gets a little complicated, and there are fewer of them to
1112  # begin with to make it worth the effort.
1113  if args.second_config:
1114  run_nova_sam_opts += ['--second_config ' + args.second_config]
1115 
1116  if args.lemServer:
1117  run_nova_sam_opts += ["--lemServer " + args.lemServer]
1118 
1119  if args.cosmicsPolarity:
1120  run_nova_sam_opts += ["--cosmicsPolarity " + args.cosmicsPolarity]
1121 
1122  if args.npass:
1123  run_nova_sam_opts += ["--npass " + args.npass]
1124 
1125  if args.skim :
1126  run_nova_sam_opts += ["--skim " + args.skim]
1127 
1128  if args.systematic :
1129  run_nova_sam_opts += ["--systematic " + args.systematic]
1130 
1131  if args.specialName :
1132  run_nova_sam_opts += ["--specialName " + args.specialName]
1133 
1134  if args.genietune :
1135  run_nova_sam_opts += ["--genietune " + args.genietune]
1136 
1137  if args.NPPFX :
1138  run_nova_sam_opts += ["--NPPFX " + args.NPPFX]
1139 
1140  if args.declareFiles:
1141  run_nova_sam_opts += ["--declareFiles"]
1142 
1143  if args.declareLocations:
1144  run_nova_sam_opts += ["--declareLocations"]
1145 
1146  out_tiers=args.outTier
1147 
1148  if fcl == "mcgen" :
1149  outnum,mcouttier = out_tiers[0].split(":")
1150  if mcouttier == "artdaq" :
1151  copyback = '"*.daq.root"'
1152  else:
1153  copyback = '"*.'+ mcouttier +'.root"'
1154 
1155  if None != out_tiers :
1156  for tier in out_tiers :
1157  run_nova_sam_opts += ["--outTier " + tier]
1158 
1159  caf_tiers=args.cafTier
1160  if None != caf_tiers :
1161  for tier in caf_tiers :
1162  run_nova_sam_opts += ["--cafTier " + tier]
1163 
1164  flat_tiers=args.flatTier
1165  if None != flat_tiers :
1166  for tier in flat_tiers :
1167  run_nova_sam_opts += ["--flatTier " + tier]
1168 
1169  hist_tier=args.histTier
1170  if None != hist_tier :
1171  run_nova_sam_opts += ["--histTier " + hist_tier]
1172 
1173  h5_tiers=args.h5Tier
1174  if None != h5_tiers :
1175  for tier in h5_tiers :
1176  run_nova_sam_opts += ["--h5Tier " + tier]
1177 
1178  if args.copyOut and (None==hist_tier and None==caf_tiers and None==flat_tiers and None==out_tiers and None==h5_tiers):
1179  fail("You specified --copyOut but did not specify --outTier, --cafTier, --flatTier, --h5Tier or --histTier")
1180 
1181  if not (args.copyOut or args.copyOutScript) :
1182  fail("Did not specify a method to copy back output (--copyOut or --copyOutScript)")
1183 
1184  if args.copyOut and args.copyOutScript :
1185  fail("The options --copyOut and --copyOutScript conflict")
1186 
1187  if args.export:
1188  export_to_art_sam_wrap += args.export
1189 
1190  if args.veryearlyscript:
1191  veryearly_scripts += args.veryearlyscript
1192 
1193  if args.source:
1194  source_scripts += args.source
1195 
1196  if args.earlyscript:
1197  early_scripts += args.earlyscript
1198 
1199  if args.prescript:
1200  pre_scripts += args.prescript
1201 
1202  if args.precopyscript:
1203  inter_scripts += args.precopyscript
1204 
1205  if args.postscript:
1206  post_scripts += args.postscript
1207 
1208  if args.copyOutScript:
1209  post_scripts.append(args.copyOutScript)
1210 
1211  for script in veryearly_scripts + early_scripts + source_scripts + pre_scripts + post_scripts + inter_scripts:
1212  if ":" in script:
1213  script_path = script.split(":")[0]
1214  else:
1215  script_path = script
1216 
1217  if not find_file_in_list(os.path.expandvars(script_path), input_files):
1218  if not find_file(os.environ["PATH"].split(os.pathsep), os.path.expandvars(script_path)):
1219  fail("Script %s does not exist!" % script_path)
1220 
1221  if not args.continue_project:
1222  ##start sam project
1223  project_name = user + "-" + jobname + "-" + timestamp
1224  if args.test_submission:
1225  project_name += "-testjobs"
1226  start_project = True
1227  else:
1228  project_name = args.continue_project
1229  start_project = False
1230 
1231  #sam_station=os.getenv("SAM_STATION")
1232 
1233  if args.novasoftups:
1234  art_sam_wrap_cmd="$NOVASOFT_FQ_DIR/bin/art_sam_wrap.sh"
1235 
1236 
1237 
1238 
1239 
1240  #########################
1241  # Start the SAM project #
1242  #########################
1243 
1244 
1245  start_proj_command ="samweb start-project "
1246  if not snapshot_id:
1247  start_proj_command+=" --defname=%s" %defname
1248  else:
1249  start_proj_command+=" --snapshot_id=%d" % snapshot_id
1250  start_proj_command+=" --group=nova"
1251  start_proj_command+=" --station=%s" % sam_station
1252 
1253  start_proj_command+= " %s" %project_name
1254  if start_project and not test:
1255  start_proj_retval=os.system(start_proj_command)
1256  print "start proj returned %d" % start_proj_retval
1257  if start_proj_retval != 0:
1258  fail("Couldn't start project")
1259 
1260  print "Station monitor: http://samweb.fnal.gov:8480/station_monitor/nova/stations/" + sam_station +"/projects/" + project_name
1261  os.putenv("SAM_PROJECT_NAME",project_name)
1262 
1263  check_make_dir("${CONDOR_EXEC}")
1264  # Ensure unique job command
1265  job_cmd=os.path.expandvars("${CONDOR_EXEC}/%s.sh" % project_name)
1266  scriptcount=1
1267  while os.path.exists(job_cmd):
1268  job_cmd=os.path.expandvars("${CONDOR_EXEC}/%s_%i.sh" % (project_name, scriptcount) )
1269  scriptcount += 1
1270 
1271 
1272 
1273 
1274  ######################
1275  # Setup art_sam_wrap #
1276  ######################
1277 
1278  if not test:
1279  #create symlink so that jobs get a better name
1280  os.symlink(os.path.expandvars(art_sam_wrap_cmd),job_cmd)
1281  art_sam_wrap_cmd=job_cmd
1282 
1283  sys.stdout.flush()
1284  sys.stderr.flush()
1285 
1286 
1287 
1288  if not args.no_multifile:
1289  art_sam_wrap_opts += ["--multifile"]
1290 
1291  if args.printenv:
1292  art_sam_wrap_opts += ["--printenv"]
1293 
1294  if args.txtfiledef:
1295  run_nova_sam_opts += ["--txtfiledef"]
1296  print "Passing --txtfiledef from submit_nova_art.py to runNovaSAM"
1297 
1298  if not mcgen:
1299  art_sam_wrap_opts += ["--config " + fcl]
1300 
1301  if mcgen:
1302  jobsub_opts += ['-l "+JobType="MC""']
1303  art_sam_wrap_opts += ["--getconfig"]
1304 
1305  if args.mix:
1306  art_sam_wrap_opts += ["--mix",args.mix]
1307 
1308  if not args.novasoftups :
1309  art_sam_wrap_opts += ["--source %s:-r:%s:%s%s" %(setup_location, tag, maxopt_opt, build_location_arguments)]
1310  if args.testrel or args.user_tarball:
1311  art_sam_wrap_opts += ["--setup_testrel"]
1312 
1313  if args.ngu_test:
1314  art_sam_wrap_opts += ["--source setup_test_product:NovaGridUtils"]
1315 
1316  if args.ngu_version:
1317  art_sam_wrap_opts += ["--source setup_product:NovaGridUtils:%s" %(args.ngu_version)]
1318 
1319  if args.testrel_ngu:
1320  art_sam_wrap_opts += ["--testrel_ngu"]
1321 
1322  for veryearly_script in veryearly_scripts:
1323  art_sam_wrap_opts += ["--veryearlyscript " + veryearly_script]
1324 
1325  for early_script in early_scripts:
1326  art_sam_wrap_opts += ["--earlyscript " + early_script]
1327 
1328  for source_script in source_scripts:
1329  art_sam_wrap_opts += ["--source " + source_script]
1330 
1331  for pre_script in pre_scripts:
1332  art_sam_wrap_opts += ["--prescript " + pre_script]
1333 
1334  for inter_script in inter_scripts:
1335  run_nova_sam_opts += ["--precopyscript " + inter_script]
1336 
1337  for post_script in post_scripts:
1338  art_sam_wrap_opts += ["--postscript " + post_script]
1339 
1340  for input_file in input_files:
1341  art_sam_wrap_opts += ["--inputfile " + input_file]
1342 
1343  if mcgen:
1344  art_sam_wrap_opts += ["--addoutput " + copyback]
1345  if args.hashDirs==True:
1346  art_sam_wrap_opts += ["--hash"]
1347  art_sam_wrap_opts += ["--dest " + dest]
1348 
1349  if args.poms and not test:
1350  poms_campaign_id = poms_client.register_poms_campaign(
1351  jobname,
1352  user = user,
1353  experiment = 'nova',
1354  version = tag,
1355  dataset = defname)
1356 # campaign_definition = args.poms_definition )
1357 
1358  poms_task_id = poms_client.get_task_id_for(
1359  poms_campaign_id,
1360  user = user,
1361  command_executed = build_jobsub_cmd() )
1362 
1363  export_to_art_sam_wrap += ["POMS_CAMPAIGN_ID={0}".format(poms_campaign_id),
1364  "POMS_TASK_ID={0}".format(poms_task_id)]
1365  print "POMS Campaign: https://pomsgpvm01.fnal.gov/poms/campaign_info?campaign_id={0}".format(poms_campaign_id)
1366 
1367 
1368 
1369  ############################
1370  # Actually launch the jobs #
1371  ############################
1372 
1373  jobsub_cmd = build_jobsub_cmd()
1374 
1375  if print_jobsub:
1376  print jobsub_cmd
1377  sys.stdout.flush()
1378  sys.stderr.flush()
1379 
1380 
1381  if not test:
1382  os.system(jobsub_cmd)
1383 
1384  if njobs > 1000:
1385  print
1386  print "Please note: if you intend to submit any more jobs,"
1387  print " please wait", njobs/1000, "minutes before your next submission"
1388  print " so as to avoid overloading the jobsub server."
void split(double tt, double *fr)
def setup_production(args)
def check_dir(output_dir, prefix='')
Definition: check_jobs.py:127
def find_file(paths, filename)
Module that kips a configurable number of events between each that it allows through. Note that this module really skips (N-1) events, it uses a simple modular division as its critera. This module will cut down the data sample to 1/N of its original size.
def remove_comments(src)
def setup_analysis(args)
def make_jobsub_node_features_arg(features)
def check_tag(tag)
const std::map< std::pair< std::string, std::string >, Variable > vars
def find_file_in_list(filepath, pathlist)
def fail(msg)
print a failure message, from: https://cdcvs.fnal.gov/redmine/projects/novaart/repository/entry/trunk...
Definition: common_tools.py:7
std::string format(const int32_t &value, const int &ndigits=8)
Definition: HexUtils.cpp:14
def warn(msg)
print a warning message, from: https://cdcvs.fnal.gov/redmine/projects/novaart/repository/entry/trunk...
Definition: common_tools.py:16
procfile open("FD_BRL_v0.txt")
cet::coded_exception< errors::ErrorCodes, ExceptionDetail::translate > Exception
Definition: Exception.h:66
def check_fcl(tag, fcl)
def add_node_features_arg(parser)
def check_env(vname)
def setup_calibration(args)
def check_make_dir(dname)