runNovaSAM.py
Go to the documentation of this file.
1 #!/bin/env python
2 
3 import os, sys
4 import shutil
5 import samweb_client, ifdh
6 import pprint
7 import argparse
8 import subprocess
9 import re
10 import md5
11 import json
12 import resource
13 import string
14 import datetime
15 from string import atoi
16 from samweb_client.utility import fileEnstoreChecksum
17 from samweb_client.exceptions import *
18 
19 import MetadataUtils
20 import NovaGridUtils as NGU
21 
22 ## Do a little bit of trickery with sys.argv to stop ROOT from gobbling it up and killing --help
23 argvCopy = sys.argv[:] # Make a copy
24 sys.argv = sys.argv[:1] # Replace it with just the first argument, i.e. script name
25 import ROOT
26 # The weird thing is that you have to do something with ROOT in order to make it parse args. Ok, we would have done this anyway.
27 ROOT.gErrorIgnoreLevel=3000 # Stop ROOT from complaining about not having dictionaries, 3000 is below kError but above kWarning.
28 sys.argv = argvCopy # Restore sys.argv. Thanks Wim, that's a real "solution"!
29 
30 #files
31 _skip_pattern = re.compile(r"^.*(RootOutput).*\.root", re.IGNORECASE)
32 
33 def make_temp_fcl(fclFile, inFileBase):
34  # make a temporary copy of the fcl file
35  fclPath = resolveFclPath(fclFile)
36  print "Found fcl file here: ", fclPath
37  tmpFclName = os.path.basename(fclPath).replace(".fcl", "_" + os.path.splitext(inFileBase)[0] + ".fcl")
38  print "Creating local copy : ", tmpFclName
39  shutil.copy(fclPath, tmpFclName)
40 
41  # determine if we're using the metadata module
42  # add parameters that describe this job
43  # NOTE:
44  # fhicl-expand (and other FCL tools) don't allow you
45  # to give the absolute path to a FCL. Instead FCLs have to
46  # live in $FHICL_FILE_PATH. $FHICL_FILE_PATH always begins
47  # with './:', i.e., search current dir first.
48  # so just make sure you don't cd() away from this dir
49  # between when the FCL is copied to this dir above and the check below.
50  isRunningMetadataModule=True
51  try:
52  subprocess.check_call("fhicl-expand %s | grep -q ^physics.analyzers.metadata.params." % tmpFclName, shell=True)
53  except subprocess.CalledProcessError:
54  isRunningMetadataModule=False
55 
56  if isRunningMetadataModule:
57  with open(tmpFclName, 'a') as fclFileObj:
58  if not fileMetaDataMgr.isSam4Users():
59  if args.npass != None:
60  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.subversion", '"' + fileMetaDataMgr.subversion + '"')
61 
62  if fileMetaDataMgr.dataFlag == "sim" and fileMetaDataMgr.generator in MetadataUtils.neutrinoGenerators:
63  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.flux_version", '"' + fileMetaDataMgr.fluxVersion + '"')
64 
65  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.skim", '"' + fileMetaDataMgr.skim + '"')
66  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.systematic", '"' + fileMetaDataMgr.systematic + '"')
67  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.Special", '"' + fileMetaDataMgr.special + '"')
68 
69  return tmpFclName
70 
72  #memLimit = 3.9*1024**3
73  memLimit = 3.9*1000**3
74  print "Old virtual memory limit:", resource.getrlimit(resource.RLIMIT_AS)
75  print "Limiting the virtual memory of the nova job to 3.9 GB"
76  resource.setrlimit(resource.RLIMIT_AS, (memLimit, memLimit))
77  print "New virtual memory limit:", resource.getrlimit(resource.RLIMIT_AS)
78 
79 def makeDirSafely(dir):
80  if "/pnfs" != dir[0:5]:
81  makeDirIfNeeded(dir)
82  return
83 
84  print "runNovaSAM is making a directory with IFDH"
85  dh = ifdh.ifdh("http://samweb.fnal.gov:8480/sam/nova/api")
86  try:
87  print "Checking if directory ", dir, "exists"
88  dh.ls(dir, 1, "")
89  dh.chmod(774,dir,"")
90 
91  except (RuntimeError, IOError) as e:
92  try:
93  print "It doesn't - make directory", dir
94  dh.mkdir(dir)
95  except:
96  print "Tried to make directory and couldn't. Perhaps it already exists?"
97 
98 
99 def makeDirIfNeeded(dir):
100  os.umask(002) # (rw-rw-r--) for files and (rwxrwxr-x) for directories.
101  if not os.path.isdir(dir):
102  print "runNovaSAM is making a directory: ", dir
103  try:
104  os.mkdir(dir)
105  except:
106  print "Couldn't make the directory... some other job perhaps did, or permissions did not allow "
107  if not os.path.isdir(dir):
108  raise Exception("Failed to make directory + " + dir )
109 
110 
111 def getOutDir(pathname, dest, hashDirs=False, runDirs=False, runNum=0):
112  dirs = [dest]
113  if hashDirs:
114  head, tail = os.path.split(pathname)
115  hash = md5.new(tail)
116  dirs += list(hash.hexdigest()[:3])
117  if runDirs:
118  head, tail = os.path.split(pathname)
119  runStr = str(runNum)
120  multiRunDir = runStr[:3].zfill(6)
121  makeDirSafely(os.path.join(dest, multiRunDir))
122  makeDirSafely(os.path.join(dest, multiRunDir, runStr))
123  dirs += [multiRunDir, runStr]
124  return os.path.join(*dirs)
125 
126 def checkAndMoveFiles(inFile, outputs, noCleanup=False, copyOnly=False):
127  makeDirIfNeeded('./results')
128  """Checks all root files in the current directories for zombie or recovered status. Bad files are deleted while good files are moved to the results subdirectory for copy out ease"""
129  inFileBase = os.path.basename(inFile)
130  baseDir = "."
131 
132  print 'Looking for requested outputs: '
133  for o in outputs: print o
134 
135  # If declaring files have to do non-decafs first, since they're the parents
136  # of the decafs. If not, it doesn't hurt to do it in that order anyway.
137  for secondPass in [False, True]:
138  for root, dirs, filenames in os.walk(baseDir):
139  if root == baseDir:
140  for file in filenames:
141  if file.endswith('decaf.root') != secondPass: continue
142  # If file not in outputs, then delete.
143  if (file.endswith (".root") or file.endswith(".h5")) and file != inFileBase:
144  fileWPath = os.path.join(root, file)
145  # If the file isn't in my outputs then delete it.
146  if file not in outputs:
147  if not noCleanup:
148  # Do not copy over, just delete
149  print "File", fileWPath, " is not among requested outputs, removing"
150  os.remove(fileWPath)
151  continue
152  # Now that I know that this file is in my output list, lets check it is valid...
153  # First, if it is a root file.
154  if file.endswith (".root"):
155  print "In checkAndMoveFiles, fileWPath is %s" %fileWPath
156  rootFile = ROOT.TFile(fileWPath)
157  if rootFile.IsZombie() or rootFile.TestBit(ROOT.TFile.kRecovered):
158  #do not copy over, just delete
159  print "File", fileWPath, "is Zombie - remove it"
160  os.remove(fileWPath)
161  else:
162  newFilePath = os.path.join(root, "results", file)
163  print "New file name is %s" %newFilePath
164  if copyOnly:
165  shutil.copyfile(fileWPath, newFilePath)
166  else:
167  os.renames(fileWPath, newFilePath)
168  rootFile.Close()
169  # Next, if it is a h5 file.
170  elif file.endswith(".h5"):
171  # Only import h5py if have h5 files.
172  import h5py
173  print "In checkAndMoveFiles, fileWPath is %s" %fileWPath
174  # If a valid HDF5 files.
175  if h5py.is_hdf5(fileWPath):
176  newFilePath = os.path.join(root, "results", file)
177  print "New file name is %s" %newFilePath
178  if copyOnly:
179  shutil.copyfile(fileWPath, newFilePath)
180  else:
181  os.renames(fileWPath, newFilePath)
182 
183  # If not a valid HDF5 file.
184  else:
185  print "File", fileWPath, "is Zombie - remove it"
186  os.remove(fileWPath)
187  return
188 
189 #def h5MetaHack(inFile):
190 # print "Doing a hack to get HDF5 metadata...Will take CAF metadata and change subtly."
191 # ### Make my client and get my CAF metadata
192 # samweb = samweb_client.SAMWebClient(experiment='nova')
193 # md = MetadataUtils.createMetadata( inFile.replace('.h5caf.h5', '.caf.root') )
194 # ### Change some parameters.
195 # md['file_name'] = inFile
196 # md['data_tier'] = unicode('h5')
197 # md['file_size'] = os.path.getsize( inFile )
198 # return md
199 
200 def declareFile(fileWPath):
201  """Checks file for a TKey of RootFileDB. If it exists, run sam_metadata_dumper and construct appropriate metadata for the file. Use that metadata to declare the file to SAM"""
202  samweb = samweb_client.SAMWebClient(experiment='nova')
203 
204  filename = os.path.basename(fileWPath)
205  print filename
206 
207  rootFileDB = False
208  if filename.endswith(".root"):
209  rootFile = ROOT.TFile(fileWPath)
210  rootFileDB = rootFile.FindKey("RootFileDB")
211  else:
212  rootFile = None
213 
214  olddir = os.getcwd()
215  os.chdir(os.path.dirname(fileWPath))
216  if rootFileDB or filename.endswith("caf.root") or filename.endswith(".h5"):
217  md = MetadataUtils.createMetadata(filename)
218  # Check that md exists, and then declare!
219  if md == None:
220  print "No metadata found!"
221  else:
222  # If I have a transpose file want to add plane (and cell) number.
223  if "cell" in filename:
224  try:
225  plane = re.search('^.*outplane(\d*).', filename).group(1)
226  cell = re.search('^.*cell(\d*).', filename).group(1)
227  print "Plane number:", plane, "Cell number:", cell, ". Is a transpose file"
228  md['Calibration.PlaneNumber'] = plane
229  md['Calibration.CellNumber'] = cell
230  except:
231  print "No cell number found - could be a plane mode transpose file"
232 
233  elif "outplane" in filename:
234  print filename
235  try:
236  plane = re.search('^.*outplane(\d*).', filename).group(1)
237  print "Plane number:", plane, ". Is a transpose file"
238  md['Calibration.PlaneNumber'] = plane
239  except:
240  print "No plane number found - not a transpose file"
241  ### Make sure that the txtfile is in the parent list.
242  if args.txtfiledef:
243  md['parents'].append({u'file_name':unicode(inFile)})
244 
245  # Print out the metadata before trying to declare
246  pprint.pprint(md)
247  print ''
248  print "Declaring", fileWPath, "to SAM"
249  try:
250  samweb.declareFile(md)
251  #samweb.validateFileMetadata(md=md)
252  except Exception as inst:
253  #print fileWPath, "already exists in SAM"
254  print inst
255  else:
256  print fileWPath, "does not contain RootFileDB, do not try to declare"
257 
258  if rootFile:
259  rootFile.Close()
260  os.chdir(olddir)
261  return
262 
263 
264 def declareLogFile(logName, logFile, rootName):
265  # This is what NovaFTS/plugins/nova_log_metadata.py does
266  md = {
267  'file_name': os.path.basename(logName),
268  'file_size': os.path.getsize(logFile),
269  'data_tier': 'log',
270  'file_format': 'log',
271  'file_type': 'unknown', # maybe 'nonPhysicsGeneric'?
272  'parents': [{'file_name': os.path.basename(rootName)}]
273  }
274  print "16) declareLogFile(logName, logFile, rootName):"
275  print "Declaring", logName, "to SAM"
276  try:
277  samweb = samweb_client.SAMWebClient(experiment='nova')
278  samweb.declareFile(md)
279  except Exception as inst:
280  print inst
281 
282 
283 def fileExists(outPath, dh):
284  try:
285  # Check to see if you can ls the file.
286  # This works in ifdh 1_7_0 and newer
287  # If it exists, dh.ls() returns a tuple with one entry, converts to True
288  # If it doesn't, dh.ls() returns an empty tuple, converts to False
289  return bool(dh.ls(outPath, 1, ""))
290  except (RuntimeError, IOError) as e:
291  # Some versions of ifdh throw an exception when the file was not found
292  # But that means it is not there.
293  return False
294 
295 def listFiles(outPath, dh):
296  try:
297  return dh.ls(outPath, 1, "")
298  except:
299  return "Exception while trying to ls, OutPath =",outPath
300 
301 
303  """For every ROOT file, try to extract its metadata into a matching .json
304  file in the same directory"""
305  baseDir = "./results"
306  # If using --txtfiledef, want to pass multiple files at a time to sam_meta_dumper.
307  # This is because it simply takes too long to call it for each indiv file
308  # when there are 800+ outputs. 5 s each -> ~1 hour!
309  TransposeList = ""
310  # Declare the samweb client within this function
311  samweb = samweb_client.SAMWebClient(experiment='nova')
312  # Loop through directories to search for files to make json files for.
313  for root, dirs, filenames in os.walk(baseDir):
314  if root == baseDir:
315  # Push the h5 files to the front of the list so that the CAF files remain
316  # available to hack in the metadata.
317  for ifile in range(len(filenames)):
318  if("h5" in filenames[ifile]):
319  filenames = [filenames[ifile]] + filenames[:ifile] + filenames[ifile+1:]
320  for file in filenames:
321  if (file.endswith (".root") or file.endswith(".h5") ) and file != inFileBase:
322  skip_match = _skip_pattern.match(file)
323  if skip_match == None:
324  # Set some quick and useful variables.
325  olddir = os.getcwd()
326  fileWPath = os.path.join(root, file)
327  # If a transpose file want to make the json subtly differently.
328  if args.txtfiledef and "outplane" in file:
329  # This works for the cellmode of the transposer as well, due to the filename including outplane and cell.
330  print "Adding %s to TransposeList" %file
331  TransposeList += "%s " %file
332  continue
333  # Which extractor am I using?
334  extractor = 'sam_metadata_dumper'
335  if file.endswith('caf.root'):
336  extractor = 'extractCAFMetadata'
337  elif file.endswith('.h5'):
338  extractor = 'extractHDF5Metadata'
339  try:
340  # sam_metadata_dumper doesn't apply basename() to filename. https://cdcvs.fnal.gov/redmine/issues/8987
341  os.chdir(os.path.dirname(fileWPath))
342  meta = subprocess.check_output([extractor, os.path.basename(fileWPath)])
343  if file.endswith (".root"):
344 
345  jsonf = open(file.replace('.root', '.json'), 'w')
346  jsonf.write(meta)
347  jsonf.close()
348  print "Made metadata for %s" %file
349  elif file.endswith(".h5"):
350  print "\nNow to make my json file for my h5...\n"
351  jsonf = open(file.replace('.h5caf.h5', '.h5caf.json'), 'w')
352  jsonf.write(meta)
353  jsonf.close()
354  print "Made metadata for %s" %file
355  else:
356  print "I'm not sure what file extension you have..."
357  except:
358  print "Error extracting metadata from file."
359  finally:
360  os.chdir(olddir)
361  # Make the Transpose json files.
362  # Again same argument, outplane is already in the filenames for cell mode
363  if args.txtfiledef and "outplane" in file:
364  olddir = os.getcwd()
365  os.chdir(baseDir)
366  MakeTransposeJson( TransposeList )
367  os.chdir(olddir)
368 
369 def MakeTransposeJson( TransposeList ):
370  """Transpose files need some extra tweaking when making .json files, largely because there are so many of them.
371  This takes in a list of files, and makes appropriate .json files in the same directory"""
372  # If using --txtfiledef, I can now pass my file list to sam_meta_dumper.
373  print "Is MakeTransposeJson called without a txt def, if you see this then yes."
374  MetaListFile="AllMetaJson.txt"
375  meta_cmd="sam_metadata_dumper -s " + TransposeList + " > " + MetaListFile
376  os.system(meta_cmd)
377  # Now want to open the file and split by "}," character.
378  MetaFile = open( MetaListFile )
379  MetaLines = MetaFile.read().split(" },")
380  # Loop through lines, and appropriately separate out json files.
381  for i in range(0,len( MetaLines ) ):
382  meta=MetaLines[i]
383  # Figure out file name...this is assuming that file name is always the first entry...
384  StName=re.search('"(.+?).root"', meta ).group(1)
385  filename=StName+".json" # Effecitvely replacing .root with .json
386  # If transpose file add PlaneNumber and CellNumber if run in cell mode
387  if "cell" in filename:
388  try:
389  plane = re.search('^.*outplane(\d*).', filename).group(1)
390  cell = re.search('^.*cell(\d*).', filename).group(1)
391  print "Plane number:", plane, "Cell number:", cell, ". Is a transpose file"
392  meta = meta.replace('"calibration.base_release"', '"calibration.PlaneNumber": "%s",\n "calibration.CellNumber": "%s",\n "calibration.base_release"')%(plane, cell)
393  except:
394  print "No cell number found - could be a plane mode transpose file"
395 
396  elif "outplane" in filename:
397  try:
398  plane = re.search('^.*outplane(\d*).', filename).group(1)
399  print "Plane number:", plane, ". Is a transpose file"
400  meta = meta.replace('"calibration.base_release"', '"calibration.PlaneNumber": "%s",\n "calibration.base_release"') %plane
401  except:
402  print "Error extracting plane number from transpose file."
403 
404  ### Make sure that the txtfile is in the parent list.
405  meta['parents'].append({u'file_name':unicode(inFile)})
406 
407  # Now open the json file
408  fout=open(filename,'w')
409  # Want to make sure that the json starts with '{'
410  if meta[0] not in "{":
411  meta = meta[:0] + '{\n' + meta[1:]
412  # Want to make sure that the json ends with a double '}'
413  if i < len(MetaLines)-1:
414  meta += "}\n}\n"
415  # Write and close the json file
416  fout.write(meta)
417  fout.close()
418 
419 
420 def copyOutFiles(dest, hashDirs=False, runDirs=False, runNum=0, noCleanup=False, declareLocation=False, declareLogs=False):
421  """Builtin facility to copy out art files. This adds in a subdirectories with a single hex digit each corresponding to the first three digits in the hash of the output file name. This splits up files into 4096 separate subdirectories, preventing overfull directories. Copy out does not happen if the file already exists in the output"""
422  dh = ifdh.ifdh("http://samweb.fnal.gov:8480/sam/nova/api")
423  baseDir = "./results"
424  declareFiles = declareLogs
425  for root, dirs, filenames in os.walk(baseDir):
426  if root == baseDir:
427 
428  # copy out root files before h5 files
429  ordered_files = [s for s in filenames if ".root" in s]
430  for s in filenames:
431  if ".h5" in s:
432  ordered_files.append(s)
433 
434  for file in ordered_files:
435  if (file.endswith (".root") or file.endswith(".h5") ) and file != inFileBase:
436  fileWPath = os.path.join(root, file)
437  outDir = getOutDir(file, dest, hashDirs, runDirs, runNum)
438  skip_match = _skip_pattern.match(file)
439  if skip_match == None:
440  outPath = os.path.join(outDir, file)
441 
442  # note: this will fail if the file already exists
443  returnValue = dh.cp(["-D", fileWPath, outDir])
444  if returnValue != 0:
445  print >> sys.stderr, "Copy out failed for file:", fileWPath
446  print >> sys.stderr, "Skipping it."
447  else:
448  if declareFiles:
449  declareFile(fileWPath)
450  ###################
451  # Declare the file's location to SAM if we have the declareLocation option on
452  if declareLocation==True :
453  loc = string.replace(outDir, 's3://','s3:/')
454  print "Declaring location %s for file %s\n" % (loc,file)
455  sam = samweb_client.SAMWebClient('nova')
456  ret=sam.addFileLocation(file, loc)
457  if ret.status_code != 200 :
458  print " SAMWEB Unable to declare file location (%s, %s) status code %s" %(file, loc, ret.status_code)
459  if fileWPath.endswith (".root"):
460  jsonPath = fileWPath.replace('.root', '.json')
461  elif fileWPath.endswith (".h5"):
462  jsonPath = fileWPath[:-3] + '.json'
463  if os.path.isfile(jsonPath):
464  returnValue = dh.cp(['-D', jsonPath, outDir])
465  if returnValue != 0:
466  print >> sys.stderr, "Copy out failed for file: " + jsonPath
467  print >> sys.stderr, "Skipping it."
468  else:
469  print('JSON not found %s' % jsonPath)
470 
471  for ext in ['.bz2', '']:
472  if os.path.isfile('log.txt'+ext):
473  if file.endswith (".root"):
474  logName = file.replace('.root', '.log'+ext)
475  elif file.endswith (".h5"):
476  logName = file + '.log'+ext
477  returnValue = dh.cp(['log.txt'+ext, outDir+'/'+logName])
478 
479  if returnValue != 0:
480  print >> sys.stderr, "Copy out failed for file: " + logName
481  print >> sys.stderr, "Skipping it."
482  if declareLogs:
483  declareLogFile(logName, 'log.txt'+ext, file)
484 
485  # Remove the copied-out log so it's not in
486  # the way for new log creation.
487  os.remove('log.txt'+ext)
488 
489  break
490 
491  else:
492  print "It does exist, not copying."
493  if not noCleanup:
494  print "Removing", fileWPath
495  os.remove(fileWPath)
496 
497  if fileWPath.endswith(".root"):
498  jsonPath = fileWPath.replace('.root', '.json')
499  elif fileWPath.endswith(".h5"):
500  jsonPath = fileWPath + ".json"
501  if os.path.isfile(jsonPath):
502  print 'Removing', jsonPath
503  os.remove(jsonPath)
504  return
505 
506 def makeDeCAF(script, fname, special):
507  trimname = fname[:-5] # cut off .root from end
508  trimidx = trimname.rindex('.')+1 # find last period before .root
509  decaf_tier = trimname[trimidx:-3]+'de'+trimname[-3:] # properly insert 'de'
510  oname = '{0}_{1}.{2}.root'.format(trimname, special, decaf_tier)
511  novaSource = os.getenv("SRT_PUBLIC_CONTEXT", "undefined")
512  if(novaSource == "undefined"):
513  novaSource = os.getenv("NOVASOFT_DIR", "undefined")
514  if(novaSource == "undefined"):
515  NGU.fail("Unable to locate NOvA source code")
516  else:
517  novaSource = os.getenv("NOVASOFT_DIR") + "/source"
518 
519  os.system('cafe -bq '+novaSource+'/CAFAna/'+script+' '+fname+' '+oname+' 2>&1')
520  return oname
521 
522 def resolveFclPath(fcl):
523  # Check if we have an absolute path name, return it if so
524  if fcl[0] == "/":
525  return fcl
526 
527  # Otherwise, we need to do some searching
528  fclPaths = os.environ["FHICL_FILE_PATH"].split(":")
529  for path in fclPaths:
530  # ensure there is always a trailing "/" on the path
531  path += "/"
532  if os.path.isfile(path + fcl):
533  return path + fcl
534 
535  # If we haven't found it, we have a problem.
536  raise IOError(sys.argv[0] + ": config file "+ fcl+" not found in FHICL_FILE_PATH")
537 
538 
539 if __name__=='__main__':
540 
541  parser = argparse.ArgumentParser(description='Run the nova command using SAM metadata')
542  parser.add_argument('inFile', help='The input file to run over', type=str)
543  parser.add_argument('--config', '-c', help='FHiCL file to use as configuration for nova executable', type=str)
544  parser.add_argument('--outTier', help="""
545  Data tier of the output file, multiple allowed, formatted as
546  <name_in_fcl_outputs>:<data_tier>.' Optionally, if a second colon is
547  included, the third argument will be treated as an additional naming string,
548  allowing multiple outputs with the same data_tier but unique file names.
549  Example: out1:reco:shifted leads to <file_id>_shifted.reco.root
550  """, type=str, action='append')
551  parser.add_argument('--cafTier', help="""Module label for CAF output,
552  multiple allowed. Format as <cafmaker_module_label>:<data_tier>.
553  Optionally, if a second colon is
554  included, the third argument will be treated as an additional naming string,
555  allowing multiple outputs with the same data_tier but unique file names.
556  Example: cafmaker:caf:shifted leads to <file_id>_shifted.caf.root
557  """, type=str, action='append')
558  parser.add_argument('--flatTier', help="""Module label for FlatCAF output,
559  multiple allowed. Format as <flatmaker_module_label>:<data_tier>.
560  Optionally, if a second colon is
561  included, the third argument will be treated as an additional naming string,
562  allowing multiple outputs with the same data_tier but unique file names.
563  Example: flatmaker:flatcaf:shifted leads to <file_id>_shifted.flatcaf.root
564  """, type=str, action='append')
565  parser.add_argument('--histTier', help='File identifier string for TFileService output, only one allowed. Supply as --histTier <id> for output_name.<id>.root, where output_name is assembled based on the input file.', type=str)
566  parser.add_argument('--h5Tier', help="""Module label for H5 output,
567  multiple allowed. Format as <h5maker_module_label>:<data_tier>.
568  Optionally, if a second colon is
569  included, the third argument will be treated as an additional naming string,
570  allowing multiple outputs with the same data_tier but unique file names.
571  Example: h5maker:h5:shifted leads to <file_id>_shifted.h5
572  """, type=str, action='append')
573  parser.add_argument('--outputNumuDeCAF', help='Make standard numu decafs for all CAF files produced', action='store_true')
574  parser.add_argument('--outputNueDeCAF', help='Make standard nue decafs for all CAF files produced', action='store_true')
575  parser.add_argument('--outputNumuOrNueDeCAF', help='Make standard numu or nue decafs for all CAF files produced', action='store_true')
576  parser.add_argument('--outputNusDeCAF', help='Make standard nus decafs for all CAF files produced', action='store_true')
577  parser.add_argument('--outputValidationDeCAF', help='Make validation (nue_or_numu_or_nus) decafs for all CAF files produced during the job', action='store_true')
578  parser.add_argument('--cosmicsPolarity', help='.', type=str)
579  parser.add_argument('--npass', help='.', type=str)
580  parser.add_argument('--skim', help='Specify skimming name.', type=str)
581  parser.add_argument('--systematic', help='Flag as systematic variation (append to file name and metadata parameters).', type=str)
582  parser.add_argument('--specialName', help='Additional name to add before data tier in output.', type=str)
583  parser.add_argument('--genietune', help='Specify the GENIE tune (append to file name and metadata parameters).', type=str)
584  parser.add_argument('--NPPFX', help='Number of PPFX universes.', type=str)
585  parser.add_argument('-n', help='Number of events to run over', type=int)
586  parser.add_argument('--copyOut', help='Use the built in copy out mechanism', action='store_true')
587  parser.add_argument('--dest', '-d', help='Output file destination for --copyOut functionality.', type=str)
588  parser.add_argument('--hashDirs', help='Use hash directory structure in destination directory.', action='store_true')
589  parser.add_argument('--runDirs', help='Use run directory structure in destination directory, 000XYZ/XYZUW for run number XYZUW.', action='store_true')
590  parser.add_argument('--autoDropbox', help='Use automatic dropox location', default=False, action='store_true')
591  parser.add_argument('--jsonMetadata', help='Create JSON files with metadata corresponding to each output file, and copy them to the same destinations', action='store_true')
592  parser.add_argument('--declareFiles', help='Declare files with metadata on worker node', action='store_true')
593  parser.add_argument('--declareLocations', help='Declare the file output locations to SAM during the copy back of the files', action='store_true')
594  parser.add_argument('--logs', help='Return .log files corresponding to every output', action='store_true')
595  parser.add_argument('--zipLogs', help='Format logs as .bz2 files. Implies --logs', action='store_true')
596  parser.add_argument('--noCleanup', help='Skip working directory cleanup step, good for interactive debugging or custom copy-out.', action='store_true')
597  parser.add_argument('--gdb', help='Run nova executable under gdb, print full stack trace, then quit gdb.', action='store_true')
598  parser.add_argument('--lemBalance', help='Choose lem server based on (CLUSTER+PROCESS)%%2 to balance load', action='store_true')
599  parser.add_argument('--lemServer', help='Specify lem server', type=str)
600  parser.add_argument('--txtfiledef', help='Use if the input definition is made up of text files, each containing a list of file names',default=False, action='store_true')
601  parser.add_argument('--precopyscript', help='Execute script PRECOPYSCRIPT within runNovaSAM.py, after running the nova -c command.', type=str, action='append')
602  parser.add_argument('--second_config', help="""Second configuration fcl executed after main process.
603  nova is executed with art file that is output from the main process.
604  Files that get produced by this process that are named identically
605  to files produced by the main process and are among the requested outputs
606  are ignored and the file produced by the first process is returned""", type=str)
607 
608  args = parser.parse_args()
609 
610  # Sanity check for output
611  if args.copyOut:
612  if not (args.outTier or args.cafTier or args.flatTier or args.histTier or args.h5Tier):
613  raise Exception("Copy-out requested with --copyOut, but no outputs specified. Nothing will happen with output, aborting.")
614  if not (args.dest or "DEST" in os.environ):
615  raise Exception("Copy-out requested with --copyOut, but no output directory specified. Use --dest or $DEST.")
616 
617  # No longer set VMem limit -- causes problems on some OSG sites
618  #setVMemLimit()
619 
620  samweb = samweb_client.SAMWebClient(experiment='nova')
621 
622  if "SRT_BASE_RELEASE" in os.environ:
623  release = os.environ["SRT_BASE_RELEASE"]
624  elif "NOVASOFT_VERSION" in os.environ:
625  release = os.environ["NOVASOFT_VERSION"]
626  else:
627  print "No release set!"
628  exit(1)
629 
630 
631  inFile = args.inFile
632  inFileBase = os.path.basename(inFile)
633 
634  # Which file do I want to use to get the metadata?
635  if not args.txtfiledef:
636  # Normally my infile.
637  metadata = samweb.getMetadata(inFileBase)
638  fileMetaDataMgr = MetadataUtils.metaDataMgr(inFile, metadata, release, args.systematic, args.skim, args.cosmicsPolarity, args.npass, args.specialName)
639  else:
640  # However, if using a txtfile def, want to use the first file in the txt file.
641  with open( inFile ) as f:
642  PassFile = f.readline().strip()
643  print "Looking at ", PassFile
644  #metadata_cmd = "ifdh_fetch %s" %PassFile
645  #os.system(metadata_cmd)
646  metadata = samweb.getMetadata(PassFile)
647  fileMetaDataMgr = MetadataUtils.metaDataMgr(PassFile, metadata, release, args.systematic, args.skim, args.cosmicsPolarity, args.npass, args.specialName)
648 
649 
650 
651  tmpFclName = make_temp_fcl(args.config, inFileBase)
652 
653  # Open the fcl file so that we can append output filenames to it
654  fclFileObj = open(tmpFclName, 'a')
655 
656  print " Open the fcl file so that we can append output filenames to it ::::::::::::::::::::::::::: fclFileObj=", fclFileObj
657  doMeta = True
658  if not (args.outTier or args.cafTier or args.flatTier or args.h5Tier):
659  doMeta = False
660 
661  # Start setting up the nova command, add SAM parameters
662  cmdList = []
663  cmdList.append('nova')
664  cmdList.append('-c')
665  cmdList.append(tmpFclName)
666  if doMeta:
667  cmdList.append('--sam-application-family=nova')
668  cmdList.append('--sam-application-version=' + release)
669  if not fileMetaDataMgr.isSam4Users():
670  cmdList.append('--sam-file-type=' + fileMetaDataMgr.fileType)
671 
672  if not args.outTier:
673  args.outTier = []
674  if not args.cafTier:
675  args.cafTier = []
676  if not args.flatTier:
677  args.flatTier = []
678  if not args.h5Tier:
679  args.h5Tier = []
680 
681  if not args.precopyscript:
682  args.precopyscript = []
683 
684  outList = [] # list of files to be supplied with -o
685  outputs = [] # list of files that will be moved to results directory, includes CAFs
686  # Loop over output tiers
687  for outTier in args.outTier:
688 
689  try:
690  output = outTier.split(":")[0]
691 
692  tier = outTier.split(":")[1]
693 
694  except:
695  raise ValueError("Output data tier: " + outTier + "not formatted corectly, should be <output_name>:<data_tier>")
696 
697  if "," in output:
698  if(re.search('cell',output)):
699  outP, outC = re.findall(',(.+?),', output)
700  outXp, outXc = [re.findall('(.+)-', outP)[0], re.findall('(.+)-', outC)[0]] # p stands for plane, c stands for cell
701  outYp, outYc = [re.findall('-(.+)', outP)[0], re.findall('-(.+)', outC)[0]]
702  for i in range( int(outXp), int(outYp)+1 ):
703  for j in range( int(outXc), int(outYc)+1):
704  NewOMod = re.search('^(.+?),', output).group(1) + `i` + re.search('cell',output).group() + `j`
705  cmdList.append('--sam-data-tier=' + ":".join([NewOMod, tier]))
706  if not fileMetaDataMgr.isSam4Users():
707  if fileMetaDataMgr.dataFlag == "data":
708  cmdList.append('--sam-stream-name=' + NewOMod + ':' + str(fileMetaDataMgr.stream))
709  else:
710  outX = re.search(',(.+?)-', output).group(1)
711  outY = re.search('-(.+?),', output).group(1)
712  for i in range( int(outX), int(outY)+1 ):
713  NewOMod = re.search('^(.+?),', output).group(1)+`i`
714  cmdList.append('--sam-data-tier=' + ":".join([NewOMod, tier]))
715  if not fileMetaDataMgr.isSam4Users():
716  if fileMetaDataMgr.dataFlag == "data":
717  cmdList.append('--sam-stream-name=' + NewOMod + ':' + str(fileMetaDataMgr.stream))
718  else:
719  cmdList.append('--sam-data-tier=' + ":".join([output, tier]))
720  if not fileMetaDataMgr.isSam4Users():
721  if fileMetaDataMgr.dataFlag == "data":
722  cmdList.append('--sam-stream-name=' +output + ':' + str(fileMetaDataMgr.stream))
723 
724  outNameTemp = fileMetaDataMgr.getOutputFileName(tier)
725  if args.txtfiledef:
726  FirstRun = re.search('FirstRun-(.+?)_LastRun', os.path.basename(inFile)).group(1).zfill(8)
727  LastRun = re.search('LastRun-(.+?)_TotFiles', os.path.basename(inFile)).group(1).zfill(8)
728  Index=outNameTemp.find("_r0")
729  outNameTemp=outNameTemp[:Index]+"_r"+FirstRun+"_r"+LastRun+outNameTemp[Index+14:]
730  outName = os.path.basename(outNameTemp)
731 
732  if "," in output:
733  if(re.search('cell',output)):
734  outP, outC = re.findall(',(.+?),', output)
735  outXp, outXc = [re.findall('(.+)-', outP)[0], re.findall('(.+)-', outC)[0]] # p stands for plane, c stands for cell
736  outYp, outYc = [re.findall('-(.+)', outP)[0], re.findall('-(.+)', outC)[0]]
737  for i in range( int(outXp), int(outYp)+1 ):
738  for j in range( int(outXc), int(outYc)+1):
739  NewOMod = re.search('^(.+?),', output).group(1)+`i` + re.search('cell',output).group() + `j`
740  tier = outTier.split(":")[1]
741  NewOName = outName.replace(str("."+tier), str("-"+NewOMod+"."+tier))
742  fclFileObj.write("\noutputs." + NewOMod + '.fileName: "'+ NewOName + '"\n')
743  outList.append(NewOName)
744  outputs.append(NewOName)
745  else:
746  print "Running in Plane Mode"
747  outX = re.search(',(.+?)-', output).group(1)
748  outY = re.search('-(.+?),', output).group(1)
749  for i in range( int(outX), int(outY)+1 ):
750  NewOMod = re.search('^(.+?),', output).group(1)+`i`
751  tier = outTier.split(":")[1]
752  NewOName = outName.replace(str("."+tier), str("-"+NewOMod+"."+tier))
753  fclFileObj.write("\noutputs." + NewOMod + '.fileName: "'+ NewOName + '"\n')
754  outList.append(NewOName)
755  outputs.append(NewOName)
756  else:
757  print "Output file name: ", outName, " for tier ", tier, " and output ", output
758  fclFileObj.write("\noutputs." + output + '.fileName: "'+ outName + '"\n')
759  outList.append(outName)
760  outputs.append(outName)
761 
762  for cafTier in args.cafTier:
763  try:
764  cafLabel = cafTier.split(":")[0]
765  tier = cafTier.split(":")[1]
766  except:
767  raise ValueError("Output data tier: " + cafTier + "not formatted corectly, should be <output_name>:<data_tier>")
768 
769  cafName = fileMetaDataMgr.getOutputFileName(tier)
770  print "Adding CAF: ", cafLabel, tier, cafName
771 
772  fclFileObj.write("\nphysics.producers." + cafLabel + '.CAFFilename: "' + cafName + '" \n')
773  fclFileObj.write("physics.producers." + cafLabel + '.DataTier: "' + tier + '" \n')
774  outputs.append(cafName)
775 
776 
777  for flatTier in args.flatTier:
778  try:
779  flatLabel = flatTier.split(":")[0]
780  tier = flatTier.split(":")[1]
781  except:
782  raise ValueError("Output data tier: " + flatTier + "not formatted corectly, should be <output_name>:<data_tier>")
783 
784  flatName = fileMetaDataMgr.getOutputFileName(tier)
785  print "Adding FlatCAF: ", flatLabel, tier, flatName
786 
787  #fclFileObj.write("\nphysics.producers." + flatLabel + '.OutputName: "' + flatName + '" \n')
788  #fclFileObj.write("physics.producers." + flatLabel + '.DataTier: "' + tier + '" \n')
789  outputs.append(flatName)
790 
791 
792  for h5Tier in args.h5Tier:
793  try:
794  h5Label = h5Tier.split(":")[0]
795  tier = h5Tier.split(":")[1]
796  except:
797  raise ValueError("Output data tier: " + h5Tier + "not formatted corectly, should be <output_name>:<data_tier>")
798 
799  h5Name = fileMetaDataMgr.getOutputFileName(tier)
800  print "Adding H5: ", h5Label, tier, h5Name
801 
802  outputs.append(h5Name+".h5")
803 
804  if args.lemBalance and (atoi(os.environ["PROCESS"])+atoi(os.environ["CLUSTER"]))%2==0:
805  fclFileObj.write("physics.producers.lem.WebSettings.Host: \"lem2.hep.caltech.edu\"\n")
806  elif args.lemServer:
807  fclFileObj.write("physics.producers.lem.WebSettings.Host: \"%s\"\n" % args.lemServer)
808 
809 
810  if args.histTier:
811  try:
812  tier = str(args.histTier)
813  except:
814  raise Exception("Histogram identifier supplied by --histTier could not be converted to a string.")
815 
816  histName = fileMetaDataMgr.getOutputFileName(tier)
817 
818  outputs.append(histName)
819  fclFileObj.write("\nservices.TFileService.fileName: " + '"' + histName + '"\n')
820 
821  fclFileObj.close()
822 
823 
824 
825  print "Config: "
826  for line in open(tmpFclName):
827  print line.strip()
828 
829  if args.n != None:
830  cmdList.append('-n')
831  cmdList.append(str(args.n))
832 
833  if args.txtfiledef:
834  print "\nI have a text file definition, InFile is {}".format(inFile)
835  ### Are we streaming the files via xrootd?
836  #txtcmd="cat %s | xargs -n1 samweb2xrootd > xrootd_inFile.txt"%inFile
837  #os.system(txtcmd)
838  #with open("xrootd_inFile.txt") as f:
839  # for line in f:
840  # print line.strip()
841  # cmdList.append( line.strip() )
842  #print ""
843  ### Are we going to copy the files?
844  olddir = os.getcwd()
845  os.system("mkdir InFiles")
846  allFiles = 0.
847  failFiles = 0.
848  with open(inFile) as f:
849  os.chdir("InFiles")
850  for line in f:
851  allFiles += 1
852  copyfile = "InFiles/%s" %line.strip()
853  print "Now copying",line.strip(),"to ",copyfile
854  ifdhcmd = "ifdh cp -D `samweb2xrootd %s` ." %line.strip()
855  print datetime.datetime.now()
856  ret = os.system( ifdhcmd )
857  if ret == 0:
858  cmdList.append( copyfile )
859  else:
860  failFiles += 1
861  print("Copy in success ratio: " + str((allFiles-failFiles)/allFiles))
862  os.chdir(olddir)
863  else:
864  cmdList.append(inFile)
865 
866  if args.gdb:
867  gdbArgs = ["gdb", "-return-child-result", "--ex", "run", "--ex", "bt", "full", "--ex", "q", "--args"]
868  cmdList = gdbArgs + cmdList
869 
870  cmd = ' '.join(cmdList)
871 
872  print 'Running:', cmd
873  sys.stdout.flush() # flush the stdout buffer before running the nova executable, cleans up output.
874 
875  if args.logs or args.zipLogs:
876  with open('log.txt', 'w') as logfile:
877  sys.stderr.write('\nnova command runs here. stderr redirected to stdout\n\n')
878  retCode = subprocess.call(cmdList, stdout=logfile, stderr=subprocess.STDOUT)
879  # Print all the output to the screen as well so that regular condor
880  # logs include it too.
881  with open('log.txt', 'r') as logfile:
882  for line in logfile:
883  print line,
884  #if args.zipLogs:
885  # os.system('bzip2 -f log.txt')
886  else:
887  retCode = subprocess.call(cmdList)
888 
889  ### If using a txtfiledef make sure to clean up the InputFile List....
890  if args.txtfiledef:
891  os.system("rm -rf InFiles")
892 
893  if retCode != 0:
894  print "Want to copy back the logs for this job somehwere...."
895  else:
896 
897  # determine output destination
898  if args.copyOut:
899  if args.dest:
900  dest = args.dest
901  elif "DEST" in os.environ:
902  dest = os.environ["DEST"]
903  else:
904  raise Exception("Copy out requested with --copyOut, but no destination supplied. Use --dest or $DEST")
905 
906  if args.autoDropbox:
907  dest=NGU.get_prod_dropbox()
908  print("Getting automatic dropbox location", dest)
909  # If the initial job finished successfully, we may want to run another config over the output
910  # copy logic above for executing nova with the second_config instead.
911  if args.second_config:
912  # stage files from first config to be declared
913  # before the second config is run
914  if args.copyOut:
915  # move what we have there
916  checkAndMoveFiles(inFile, outputs, noCleanup=True, copyOnly=True)
917  if args.jsonMetadata:
919  copyOutFiles(dest, args.hashDirs, args.runDirs, fileMetaDataMgr.runNum, args.noCleanup, args.declareLocations, args.declareFiles)
920 
921 
922  # create a temporary work space so we're careful about overwriting files
923  # from the main step
924  import random
925  import string
926 
927  lastdir = os.getcwd()
928  tmpdir = os.path.abspath(os.path.join('./', 'tmp' + ''.join(random.choice(string.ascii_letters) for i in range(8))))
929  os.mkdir(tmpdir)
930  os.chdir(tmpdir)
931  print('Changing to %s' % os.getcwd())
932  tmpSecondFclName = make_temp_fcl(args.second_config, inFileBase)
933  cmdList = []
934  cmdList.append('nova')
935  cmdList.append('-c')
936  cmdList.append(tmpSecondFclName)
937  if doMeta:
938  cmdList.append('--sam-application-family=nova')
939  cmdList.append('--sam-application-version=' + release)
940  if not fileMetaDataMgr.isSam4Users():
941  cmdList.append('--sam-file-type=' + fileMetaDataMgr.fileType)
942 
943  if args.gdb:
944  gdbArgs = ["gdb", "-return-child-result", "--ex", "run", "--ex", "bt", "full", "--ex", "q", "--args"]
945  cmdList = gdbArgs + cmdList
946 
947 
948  # run second nova executable
949  inPID= fileMetaDataMgr.getOutputFileName('pid')
950  if not os.path.isfile(os.path.abspath(os.path.join(lastdir, os.path.basename(inPID)))):
951  print 'WARNING: Could not find PID file for second configuration fcl. Skipping', inPID
952  else:
953 
954  cmdList.append(os.path.abspath(os.path.join(lastdir, os.path.basename(inPID))))
955  cmd = ' '.join(cmdList)
956 
957  print 'Running:', cmd
958  sys.stdout.flush() # flush the stdout buffer before running the nova executable, cleans up output.
959 
960  if args.logs or args.zipLogs:
961  with open(os.path.join(lastdir,'log.txt'), 'a') as logfile:
962  sys.stderr.write('\nnova command runs here. stderr redirected to stdout\n\n')
963  retCode = subprocess.call(cmdList, stdout=logfile, stderr=subprocess.STDOUT)
964  # Print all the output to the screen as well so that regular condor
965  # logs include it too.
966  with open(os.path.join(lastdir, 'log.txt'), 'r') as logfile:
967  for line in logfile:
968  print line,
969 
970  else:
971  retCode = subprocess.call(cmdList)
972 
973  # handle output of second nova executable
974  # don't copy back files that belong to both the 'outputs' list and
975  # the files that were created by the first executable.
976  # Warn the user if this happens
977  _first = set(os.listdir(lastdir))
978  _second = set(os.listdir(tmpdir))
979  _outputs = set(outputs)
980  _bad = (_first & _second) & _outputs
981  _good = _second - _bad
982  if len(_bad) > 0:
983  print 'runNovaSAM.py: [WARNING] First and second processes produced identically named files that among requested outputs. Ignoring file produced by the second and copying out only the first'
984  for b in _bad: print b
985  for g in _good:
986  print 'Change ', os.path.join(tmpdir, os.path.basename(g)), ' to ', os.path.join(lastdir, os.path.basename(g))
987  os.rename(os.path.join(tmpdir, os.path.basename(g)), os.path.join(lastdir, os.path.basename(g)))
988 
989  # remove temp directory
990  if not args.noCleanup:
991  os.rmdir(tmpdir)
992 
993  # go back to previous working directory
994  os.chdir(lastdir)
995 
996  # wait until after we possibly run a second config to zip the output up
997  if args.zipLogs:
998  os.system('bzip2 -f log.txt')
999 
1000 
1001  if args.outputNumuDeCAF or args.outputNueDeCAF or args.outputNumuOrNueDeCAF or args.outputNusDeCAF or args.outputValidationDeCAF:
1002  decafdir = os.listdir(".")
1003  for fname in decafdir:
1004  if fname.endswith("caf.root"):
1005  if args.outputNumuDeCAF:
1006  outputs.append(makeDeCAF('numu/FirstAnalysis/reduce_numu_fa.C',fname,'numu_contain'))
1007  if args.outputNueDeCAF:
1008  outputs.append(makeDeCAF('nue/reduce_nue_sa.C',fname,'nue_contain'))
1009  if args.outputNumuOrNueDeCAF:
1010  outputs.append(makeDeCAF('nue/reduce_nue_or_numu_sa.C',fname,'nue_or_numu_contain'))
1011  if args.outputNusDeCAF:
1012  outputs.append(makeDeCAF('nus/reduce_nus.C',fname,'nus_contain'))
1013  if args.outputValidationDeCAF:
1014  outputs.append(makeDeCAF('nus/reduce_nue_or_numu_or_nus.C',fname,'nue_or_numu_or_nus_contain'))
1015  print "\nAt the start of check and move."
1016  checkAndMoveFiles(inFile, outputs, args.noCleanup)
1017 
1018 
1019  if args.copyOut:
1020  if args.jsonMetadata:
1021  print "\nMake JSONs"
1023  print "\nMade JSONs, now to copy files."
1024 
1025  copyOutFiles(dest, args.hashDirs, args.runDirs, fileMetaDataMgr.runNum, args.noCleanup, args.declareLocations, args.declareFiles)
1026  print "Copied files."
1027  else:
1028  #job didn't succeed, remove output files if they exist
1029  for file in outList:
1030  try:
1031  os.remove("./" + file)
1032  except OSError:
1033  pass
1034 
1035  #clean up section
1036  if not args.noCleanup:
1037  os.remove(tmpFclName)
1038  os.remove(inFile)
1039  dirList = os.listdir(".")
1040  for file in dirList:
1041  skip_match = _skip_pattern.match(file)
1042  if skip_match != None:
1043  print file, "contains RootOutput*.root: clean up"
1044  os.remove("./" + file)
1045 
1046  dh = ifdh.ifdh("http://samweb.fnal.gov:8480/sam/nova/api")
1047  dh.cleanup()
1048 
1049  exit(retCode)
def makeMetadataJSONs()
Definition: runNovaSAM.py:302
void split(double tt, double *fr)
def copyOutFiles(hashDirs=False)
Definition: runNovaSAM.py:149
def makeDirSafely(dir)
Definition: runNovaSAM.py:79
def checkAndMoveFiles(inFile, declareFiles)
Definition: runNovaSAM.py:105
def resolveFclPath(fcl)
Definition: runNovaSAM.py:188
void append()
Definition: append.C:24
def getOutDir(pathname, hashDirs=False)
Definition: runNovaSAM.py:92
if(dump)
bool print
def MakeTransposeJson(TransposeList)
Definition: runNovaSAM.py:369
std::string format(const int32_t &value, const int &ndigits=8)
Definition: HexUtils.cpp:14
procfile open("FD_BRL_v0.txt")
def createMetadata(inFile)
cet::coded_exception< errors::ErrorCodes, ExceptionDetail::translate > Exception
Definition: Exception.h:66
def addMetadataToFCL(fclFile, parName, parValue)
exit(0)
def listFiles(outPath, dh)
Definition: runNovaSAM.py:295
def make_temp_fcl(fclFile, inFileBase)
Definition: runNovaSAM.py:33
def setVMemLimit()
Definition: runNovaSAM.py:71
def fileExists(outPath, dh)
Definition: runNovaSAM.py:283
def makeDirIfNeeded(dir)
Definition: runNovaSAM.py:99
def declareFile(fileWPath)
Definition: runNovaSAM.py:127
def declareLogFile(logName, logFile, rootName)
Definition: runNovaSAM.py:264
def makeDeCAF(script, fname, special)
Definition: runNovaSAM.py:506