runNovaSAM.py
Go to the documentation of this file.
1 #!/bin/env python
2 
3 from __future__ import print_function
4 from __future__ import division
5 from builtins import str
6 from builtins import range
7 from past.utils import old_div
8 import os, sys
9 import shutil
10 import samweb_client, ifdh
11 import pprint
12 import argparse
13 import subprocess
14 import re
15 import hashlib
16 import json
17 import resource
18 import string
19 import datetime
20 from samweb_client.utility import fileEnstoreChecksum
21 from samweb_client.exceptions import *
22 
23 import MetadataUtils
24 import NovaGridUtils as NGU
25 
26 ## Do a little bit of trickery with sys.argv to stop ROOT from gobbling it up and killing --help
27 argvCopy = sys.argv[:] # Make a copy
28 sys.argv = sys.argv[:1] # Replace it with just the first argument, i.e. script name
29 import ROOT
30 # The weird thing is that you have to do something with ROOT in order to make it parse args. Ok, we would have done this anyway.
31 ROOT.gErrorIgnoreLevel=3000 # Stop ROOT from complaining about not having dictionaries, 3000 is below kError but above kWarning.
32 sys.argv = argvCopy # Restore sys.argv. Thanks Wim, that's a real "solution"!
33 
34 md5 = hashlib.md5()
35 
36 #files
37 _skip_pattern = re.compile(r"^.*(RootOutput).*\.root", re.IGNORECASE)
38 
39 def make_temp_fcl(fclFile, inFileBase):
40  # make a temporary copy of the fcl file
41  fclPath = resolveFclPath(fclFile)
42  print("Found fcl file here: ", fclPath)
43  tmpFclName = os.path.basename(fclPath).replace(".fcl", "_" + os.path.splitext(inFileBase)[0] + ".fcl")
44  print("Creating local copy : ", tmpFclName)
45  shutil.copy(fclPath, tmpFclName)
46 
47  # determine if we're using the metadata module
48  # add parameters that describe this job
49  # NOTE:
50  # fhicl-expand (and other FCL tools) don't allow you
51  # to give the absolute path to a FCL. Instead FCLs have to
52  # live in $FHICL_FILE_PATH. $FHICL_FILE_PATH always begins
53  # with './:', i.e., search current dir first.
54  # so just make sure you don't cd() away from this dir
55  # between when the FCL is copied to this dir above and the check below.
56  isRunningMetadataModule=True
57  try:
58  subprocess.check_call("fhicl-expand %s | grep -q ^physics.analyzers.metadata.params." % tmpFclName, shell=True)
59  except subprocess.CalledProcessError:
60  isRunningMetadataModule=False
61 
62  if isRunningMetadataModule:
63  with open(tmpFclName, 'a') as fclFileObj:
64  if not fileMetaDataMgr.isSam4Users():
65  if args.npass != None:
66  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.subversion", '"' + fileMetaDataMgr.subversion + '"')
67 
68  if fileMetaDataMgr.dataFlag == "sim" and fileMetaDataMgr.generator in MetadataUtils.neutrinoGenerators:
69  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.flux_version", '"' + fileMetaDataMgr.fluxVersion + '"')
70 
71  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.skim", '"' + fileMetaDataMgr.skim + '"')
72  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.systematic", '"' + fileMetaDataMgr.systematic + '"')
73  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.Special", '"' + fileMetaDataMgr.special + '"')
74 
75  return tmpFclName
76 
78  #memLimit = 3.9*1024**3
79  memLimit = 3.9*1000**3
80  print("Old virtual memory limit:", resource.getrlimit(resource.RLIMIT_AS))
81  print("Limiting the virtual memory of the nova job to 3.9 GB")
82  resource.setrlimit(resource.RLIMIT_AS, (memLimit, memLimit))
83  print("New virtual memory limit:", resource.getrlimit(resource.RLIMIT_AS))
84 
85 def makeDirSafely(dir):
86  if "/pnfs" != dir[0:5]:
87  makeDirIfNeeded(dir)
88  return
89 
90  print("runNovaSAM is making a directory with IFDH")
91  dh = ifdh.ifdh("http://samweb.fnal.gov:8480/sam/nova/api")
92  try:
93  print("Checking if directory ", dir, "exists")
94  dh.ls(dir, 1, "")
95  dh.chmod(774,dir,"")
96 
97  except (RuntimeError, IOError) as e:
98  try:
99  print("It doesn't - make directory", dir)
100  dh.mkdir(dir)
101  except:
102  print("Tried to make directory and couldn't. Perhaps it already exists?")
103 
104 
106  os.umask(0o02) # (rw-rw-r--) for files and (rwxrwxr-x) for directories.
107  if not os.path.isdir(dir):
108  print("runNovaSAM is making a directory: ", dir)
109  try:
110  os.mkdir(dir)
111  except:
112  print("Couldn't make the directory... some other job perhaps did, or permissions did not allow ")
113  if not os.path.isdir(dir):
114  raise Exception("Failed to make directory + " + dir )
115 
116 
117 def getOutDir(pathname, dest, hashDirs=False, runDirs=False, runNum=0):
118  dirs = [dest]
119  if hashDirs:
120  head, tail = os.path.split(pathname)
121  hash = md5.new(tail)
122  dirs += list(hash.hexdigest()[:3])
123  if runDirs:
124  head, tail = os.path.split(pathname)
125  runStr = str(runNum)
126  multiRunDir = runStr[:3].zfill(6)
127  makeDirSafely(os.path.join(dest, multiRunDir))
128  makeDirSafely(os.path.join(dest, multiRunDir, runStr))
129  dirs += [multiRunDir, runStr]
130  return os.path.join(*dirs)
131 
132 def checkAndMoveFiles(inFile, outputs, noCleanup=False, copyOnly=False):
133  makeDirIfNeeded('./results')
134  """Checks all root files in the current directories for zombie or recovered status. Bad files are deleted while good files are moved to the results subdirectory for copy out ease"""
135  inFileBase = os.path.basename(inFile)
136  baseDir = "."
137 
138  print('Looking for requested outputs: ')
139  for o in outputs: print(o)
140 
141  # If declaring files have to do non-decafs first, since they're the parents
142  # of the decafs. If not, it doesn't hurt to do it in that order anyway.
143  for secondPass in [False, True]:
144  for root, dirs, filenames in os.walk(baseDir):
145  if root == baseDir:
146  for file in filenames:
147  if file.endswith('decaf.root') != secondPass: continue
148  # If file not in outputs, then delete.
149  if (file.endswith (".root") or file.endswith(".h5")) and file != inFileBase:
150  fileWPath = os.path.join(root, file)
151  # If the file isn't in my outputs then delete it.
152  if file not in outputs:
153  if not noCleanup:
154  # Do not copy over, just delete
155  print("File", fileWPath, " is not among requested outputs, removing")
156  os.remove(fileWPath)
157  continue
158  # Now that I know that this file is in my output list, lets check it is valid...
159  # First, if it is a root file.
160  if file.endswith (".root"):
161  print("In checkAndMoveFiles, fileWPath is %s" %fileWPath)
162  rootFile = ROOT.TFile(fileWPath)
163  if rootFile.IsZombie() or rootFile.TestBit(ROOT.TFile.kRecovered):
164  #do not copy over, just delete
165  print("File", fileWPath, "is Zombie - remove it")
166  os.remove(fileWPath)
167  else:
168  newFilePath = os.path.join(root, "results", file)
169  print("New file name is %s" %newFilePath)
170  if copyOnly:
171  shutil.copyfile(fileWPath, newFilePath)
172  else:
173  os.renames(fileWPath, newFilePath)
174  rootFile.Close()
175  # Next, if it is a h5 file.
176  elif file.endswith(".h5"):
177  # Only import h5py if have h5 files.
178  import h5py
179  print("In checkAndMoveFiles, fileWPath is %s" %fileWPath)
180  # If a valid HDF5 files.
181  if h5py.is_hdf5(fileWPath):
182  newFilePath = os.path.join(root, "results", file)
183  print("New file name is %s" %newFilePath)
184  if copyOnly:
185  shutil.copyfile(fileWPath, newFilePath)
186  else:
187  os.renames(fileWPath, newFilePath)
188 
189  # If not a valid HDF5 file.
190  else:
191  print("File", fileWPath, "is Zombie - remove it")
192  os.remove(fileWPath)
193  return
194 
195 #def h5MetaHack(inFile):
196 # print "Doing a hack to get HDF5 metadata...Will take CAF metadata and change subtly."
197 # ### Make my client and get my CAF metadata
198 # samweb = samweb_client.SAMWebClient(experiment='nova')
199 # md = MetadataUtils.createMetadata( inFile.replace('.h5caf.h5', '.caf.root') )
200 # ### Change some parameters.
201 # md['file_name'] = inFile
202 # md['data_tier'] = unicode('h5')
203 # md['file_size'] = os.path.getsize( inFile )
204 # return md
205 
206 def declareFile(fileWPath):
207  """Checks file for a TKey of RootFileDB. If it exists, run sam_metadata_dumper and construct appropriate metadata for the file. Use that metadata to declare the file to SAM"""
208  samweb = samweb_client.SAMWebClient(experiment='nova')
209 
210  filename = os.path.basename(fileWPath)
211  print(filename)
212 
213  rootFileDB = False
214  if filename.endswith(".root"):
215  rootFile = ROOT.TFile(fileWPath)
216  rootFileDB = rootFile.FindKey("RootFileDB")
217  else:
218  rootFile = None
219 
220  olddir = os.getcwd()
221  os.chdir(os.path.dirname(fileWPath))
222  if rootFileDB or filename.endswith("caf.root") or filename.endswith(".h5"):
223  md = MetadataUtils.createMetadata(filename)
224  # Check that md exists, and then declare!
225  if md == None:
226  print("No metadata found!")
227  else:
228  # If I have a transpose file want to add plane (and cell) number.
229  if "cell" in filename:
230  try:
231  plane = re.search('^.*outplane(\d*).', filename).group(1)
232  cell = re.search('^.*cell(\d*).', filename).group(1)
233  print("Plane number:", plane, "Cell number:", cell, ". Is a transpose file")
234  md['Calibration.PlaneNumber'] = plane
235  md['Calibration.CellNumber'] = cell
236  except:
237  print("No cell number found - could be a plane mode transpose file")
238 
239  elif "outplane" in filename:
240  print(filename)
241  try:
242  plane = re.search('^.*outplane(\d*).', filename).group(1)
243  print("Plane number:", plane, ". Is a transpose file")
244  md['Calibration.PlaneNumber'] = plane
245  except:
246  print("No plane number found - not a transpose file")
247  ### Make sure that the txtfile is in the parent list.
248  if args.txtfiledef:
249  md['parents'].append({u'file_name':str(inFile)})
250 
251  # Print out the metadata before trying to declare
252  pprint.pprint(md)
253  print('')
254  print("Declaring", fileWPath, "to SAM")
255  try:
256  samweb.declareFile(md)
257  #samweb.validateFileMetadata(md=md)
258  except Exception as inst:
259  #print fileWPath, "already exists in SAM"
260  print(inst)
261  else:
262  print(fileWPath, "does not contain RootFileDB, do not try to declare")
263 
264  if rootFile:
265  rootFile.Close()
266  os.chdir(olddir)
267  return
268 
269 
270 def declareLogFile(logName, logFile, rootName):
271  # This is what NovaFTS/plugins/nova_log_metadata.py does
272  md = {
273  'file_name': os.path.basename(logName),
274  'file_size': os.path.getsize(logFile),
275  'data_tier': 'log',
276  'file_format': 'log',
277  'file_type': 'unknown', # maybe 'nonPhysicsGeneric'?
278  'parents': [{'file_name': os.path.basename(rootName)}]
279  }
280  print("16) declareLogFile(logName, logFile, rootName):")
281  print("Declaring", logName, "to SAM")
282  try:
283  samweb = samweb_client.SAMWebClient(experiment='nova')
284  samweb.declareFile(md)
285  except Exception as inst:
286  print(inst)
287 
288 
289 def fileExists(outPath, dh):
290  try:
291  # Check to see if you can ls the file.
292  # This works in ifdh 1_7_0 and newer
293  # If it exists, dh.ls() returns a tuple with one entry, converts to True
294  # If it doesn't, dh.ls() returns an empty tuple, converts to False
295  return bool(dh.ls(outPath, 1, ""))
296  except (RuntimeError, IOError) as e:
297  # Some versions of ifdh throw an exception when the file was not found
298  # But that means it is not there.
299  return False
300 
301 def listFiles(outPath, dh):
302  try:
303  return dh.ls(outPath, 1, "")
304  except:
305  return "Exception while trying to ls, OutPath =",outPath
306 
307 
309  """For every ROOT file, try to extract its metadata into a matching .json
310  file in the same directory"""
311  baseDir = "./results"
312  # If using --txtfiledef, want to pass multiple files at a time to sam_meta_dumper.
313  # This is because it simply takes too long to call it for each indiv file
314  # when there are 800+ outputs. 5 s each -> ~1 hour!
315  TransposeList = ""
316  # Declare the samweb client within this function
317  samweb = samweb_client.SAMWebClient(experiment='nova')
318  # Loop through directories to search for files to make json files for.
319  for root, dirs, filenames in os.walk(baseDir):
320  if root == baseDir:
321  # Push the h5 files to the front of the list so that the CAF files remain
322  # available to hack in the metadata.
323  for ifile in range(len(filenames)):
324  if("h5" in filenames[ifile]):
325  filenames = [filenames[ifile]] + filenames[:ifile] + filenames[ifile+1:]
326  for file in filenames:
327  if (file.endswith (".root") or file.endswith(".h5") ) and file != inFileBase:
328  skip_match = _skip_pattern.match(file)
329  if skip_match == None:
330  # Set some quick and useful variables.
331  olddir = os.getcwd()
332  fileWPath = os.path.join(root, file)
333  # If a transpose file want to make the json subtly differently.
334  if args.txtfiledef and "outplane" in file:
335  # This works for the cellmode of the transposer as well, due to the filename including outplane and cell.
336  print("Adding %s to TransposeList" %file)
337  TransposeList += "%s " %file
338  continue
339  # Which extractor am I using?
340  extractor = 'sam_metadata_dumper'
341  if file.endswith('caf.root'):
342  extractor = 'extractCAFMetadata'
343  elif file.endswith('.h5'):
344  extractor = 'extractHDF5Metadata'
345  try:
346  # sam_metadata_dumper doesn't apply basename() to filename. https://cdcvs.fnal.gov/redmine/issues/8987
347  os.chdir(os.path.dirname(fileWPath))
348  meta = subprocess.check_output([extractor, os.path.basename(fileWPath)])
349  if file.endswith (".root"):
350 
351  jsonf = open(file.replace('.root', '.json'), 'w')
352  jsonf.write(meta)
353  jsonf.close()
354  print("Made metadata for %s" %file)
355  elif file.endswith(".h5"):
356  print("\nNow to make my json file for my h5...\n")
357  jsonf = open(file.replace('.h5caf.h5', '.h5caf.json'), 'w')
358  jsonf.write(meta)
359  jsonf.close()
360  print("Made metadata for %s" %file)
361  else:
362  print("I'm not sure what file extension you have...")
363  except:
364  print("Error extracting metadata from file.")
365  finally:
366  os.chdir(olddir)
367  # Make the Transpose json files.
368  # Again same argument, outplane is already in the filenames for cell mode
369  if args.txtfiledef and "outplane" in file:
370  olddir = os.getcwd()
371  os.chdir(baseDir)
372  MakeTransposeJson( TransposeList )
373  os.chdir(olddir)
374 
375 def MakeTransposeJson( TransposeList ):
376  """Transpose files need some extra tweaking when making .json files, largely because there are so many of them.
377  This takes in a list of files, and makes appropriate .json files in the same directory"""
378  # If using --txtfiledef, I can now pass my file list to sam_meta_dumper.
379  print("Is MakeTransposeJson called without a txt def, if you see this then yes.")
380  MetaListFile="AllMetaJson.txt"
381  meta_cmd="sam_metadata_dumper -s " + TransposeList + " > " + MetaListFile
382  os.system(meta_cmd)
383  # Now want to open the file and split by "}," character.
384  MetaFile = open( MetaListFile )
385  MetaLines = MetaFile.read().split(" },")
386  # Loop through lines, and appropriately separate out json files.
387  for i in range(0,len( MetaLines ) ):
388  meta=MetaLines[i]
389  # Figure out file name...this is assuming that file name is always the first entry...
390  StName=re.search('"(.+?).root"', meta ).group(1)
391  filename=StName+".json" # Effecitvely replacing .root with .json
392  # If transpose file add PlaneNumber and CellNumber if run in cell mode
393  if "cell" in filename:
394  try:
395  plane = re.search('^.*outplane(\d*).', filename).group(1)
396  cell = re.search('^.*cell(\d*).', filename).group(1)
397  print("Plane number:", plane, "Cell number:", cell, ". Is a transpose file")
398  meta = meta.replace('"calibration.base_release"', '"calibration.PlaneNumber": "%s",\n "calibration.CellNumber": "%s",\n "calibration.base_release"')%(plane, cell)
399  except:
400  print("No cell number found - could be a plane mode transpose file")
401 
402  elif "outplane" in filename:
403  try:
404  plane = re.search('^.*outplane(\d*).', filename).group(1)
405  print("Plane number:", plane, ". Is a transpose file")
406  meta = meta.replace('"calibration.base_release"', '"calibration.PlaneNumber": "%s",\n "calibration.base_release"') %plane
407  except:
408  print("Error extracting plane number from transpose file.")
409 
410  ### Make sure that the txtfile is in the parent list.
411  meta['parents'].append({u'file_name':str(inFile)})
412 
413  # Now open the json file
414  fout=open(filename,'w')
415  # Want to make sure that the json starts with '{'
416  if meta[0] not in "{":
417  meta = meta[:0] + '{\n' + meta[1:]
418  # Want to make sure that the json ends with a double '}'
419  if i < len(MetaLines)-1:
420  meta += "}\n}\n"
421  # Write and close the json file
422  fout.write(meta)
423  fout.close()
424 
425 def copyOutFiles(dest, hashDirs=False, runDirs=False, runNum=0, noCleanup=False, declareLocation=False, declareLogs=False):
426  """Builtin facility to copy out art files. This adds in a subdirectories with a single hex digit each corresponding to the first three digits in the hash of the output file name. This splits up files into 4096 separate subdirectories, preventing overfull directories. Copy out does not happen if the file already exists in the output"""
427  dh = ifdh.ifdh("http://samweb.fnal.gov:8480/sam/nova/api")
428  baseDir = "./results"
429  declareFiles = declareLogs
430  for root, dirs, filenames in os.walk(baseDir):
431  if root == baseDir:
432 
433  # copy out root files before h5 files
434  ordered_files = [s for s in filenames if ".root" in s]
435  for s in filenames:
436  if ".h5" in s:
437  ordered_files.append(s)
438 
439  for file in ordered_files:
440  if (file.endswith (".root") or file.endswith(".h5") ) and file != inFileBase:
441  fileWPath = os.path.join(root, file)
442  outDir = getOutDir(file, dest, hashDirs, runDirs, runNum)
443 
444  skip_match = _skip_pattern.match(file)
445  if skip_match == None:
446  outPath = os.path.join(outDir, file)
447 
448  if fileExists(outPath, dh):
449  print('copyOutFiles: ', outPath, 'already moved. Skipping')
450  else:
451  # note: this will fail if the file already exists
452  returnValue = dh.cp(["-D", fileWPath, outDir])
453  if returnValue != 0:
454  print("Copy out failed for file:", fileWPath, file=sys.stderr)
455  print("Skipping it.", file=sys.stderr)
456  else:
457  if declareFiles:
458  declareFile(fileWPath)
459  ###################
460  # Declare the file's location to SAM if we have the declareLocation option on
461  if declareLocation==True :
462  loc = string.replace(outDir, 's3://','s3:/')
463  print("Declaring location %s for file %s\n" % (loc,file))
464  sam = samweb_client.SAMWebClient('nova')
465  ret=sam.addFileLocation(file, loc)
466  if ret.status_code != 200 :
467  print(" SAMWEB Unable to declare file location (%s, %s) status code %s" %(file, loc, ret.status_code))
468  if fileWPath.endswith (".root"):
469  jsonPath = fileWPath.replace('.root', '.json')
470  elif fileWPath.endswith (".h5"):
471  jsonPath = fileWPath[:-3] + '.json'
472  if os.path.isfile(jsonPath):
473  if fileExists(os.path.join(outDir, os.path.basename(jsonPath)), dh):
474  print('copyOutFiles: ', os.path.join(outDir, os.path.basename(jsonPath)), 'already moved. Skipping')
475  else:
476  returnValue = dh.cp(['-D', jsonPath, outDir])
477  if returnValue != 0:
478  print("Copy out failed for file: " + jsonPath, file=sys.stderr)
479  print("Skipping it.", file=sys.stderr)
480  else:
481  print('JSON not found %s' % jsonPath)
482 
483  for ext in ['.bz2', '']:
484  if os.path.isfile('log.txt'+ext):
485  if file.endswith (".root"):
486  logName = file.replace('.root', '.log'+ext)
487  elif file.endswith (".h5"):
488  logName = file + '.log'+ext
489  if fileExists(os.path.join(outDir,logName), dh):
490  print('copyOutFiles: ', os.path.join(outDir, logName), 'already moved. Skipping')
491  else:
492  returnValue = dh.cp(['log.txt'+ext, os.path.join(outDir, logName)])
493 
494  if returnValue != 0:
495  print("Copy out failed for file: " + logName, file=sys.stderr)
496  print("Skipping it.", file=sys.stderr)
497 
498  if declareLogs:
499  declareLogFile(logName, 'log.txt'+ext, file)
500 
501  # Remove the copied-out log so it's not in
502  # the way for new log creation.
503  os.remove('log.txt'+ext)
504 
505  break
506 
507  else:
508  print("It does exist, not copying.")
509  if not noCleanup:
510  print("Removing", fileWPath)
511  os.remove(fileWPath)
512 
513  if fileWPath.endswith(".root"):
514  jsonPath = fileWPath.replace('.root', '.json')
515  elif fileWPath.endswith(".h5"):
516  jsonPath = fileWPath + ".json"
517  if os.path.isfile(jsonPath):
518  print('Removing', jsonPath)
519  os.remove(jsonPath)
520  return
521 
522 def makeDeCAF(script, fname, special):
523  trimname = fname[:-5] # cut off .root from end
524  trimidx = trimname.rindex('.')+1 # find last period before .root
525  decaf_tier = trimname[trimidx:-3]+'de'+trimname[-3:] # properly insert 'de'
526  oname = '{0}_{1}.{2}.root'.format(trimname, special, decaf_tier)
527  novaSource = os.getenv("SRT_PUBLIC_CONTEXT", "undefined")
528  if(novaSource == "undefined"):
529  novaSource = os.getenv("NOVASOFT_DIR", "undefined")
530  if(novaSource == "undefined"):
531  NGU.fail("Unable to locate NOvA source code")
532  else:
533  novaSource = os.getenv("NOVASOFT_DIR") + "/source"
534 
535  os.system('cafe -bq '+novaSource+'/CAFAna/'+script+' '+fname+' '+oname+' 2>&1')
536  return oname
537 
538 def resolveFclPath(fcl):
539  # Check if we have an absolute path name, return it if so
540  if fcl[0] == "/":
541  return fcl
542 
543  # Otherwise, we need to do some searching
544  fclPaths = os.environ["FHICL_FILE_PATH"].split(":")
545  for path in fclPaths:
546  # ensure there is always a trailing "/" on the path
547  path += "/"
548  if os.path.isfile(path + fcl):
549  return path + fcl
550 
551  # If we haven't found it, we have a problem.
552  raise IOError(sys.argv[0] + ": config file "+ fcl+" not found in FHICL_FILE_PATH")
553 
554 
555 if __name__=='__main__':
556 
557  parser = argparse.ArgumentParser(description='Run the nova command using SAM metadata')
558  parser.add_argument('inFile', help='The input file to run over', type=str)
559  parser.add_argument('--config', '-c', help='FHiCL file to use as configuration for nova executable', type=str)
560  parser.add_argument('--outTier', help="""
561  Data tier of the output file, multiple allowed, formatted as
562  <name_in_fcl_outputs>:<data_tier>.' Optionally, if a second colon is
563  included, the third argument will be treated as an additional naming string,
564  allowing multiple outputs with the same data_tier but unique file names.
565  Example: out1:reco:shifted leads to <file_id>_shifted.reco.root
566  """, type=str, action='append')
567  parser.add_argument('--cafTier', help="""Module label for CAF output,
568  multiple allowed. Format as <cafmaker_module_label>:<data_tier>.
569  Optionally, if a second colon is
570  included, the third argument will be treated as an additional naming string,
571  allowing multiple outputs with the same data_tier but unique file names.
572  Example: cafmaker:caf:shifted leads to <file_id>_shifted.caf.root
573  """, type=str, action='append')
574  parser.add_argument('--flatTier', help="""Module label for FlatCAF output,
575  multiple allowed. Format as <flatmaker_module_label>:<data_tier>.
576  Optionally, if a second colon is
577  included, the third argument will be treated as an additional naming string,
578  allowing multiple outputs with the same data_tier but unique file names.
579  Example: flatmaker:flatcaf:shifted leads to <file_id>_shifted.flatcaf.root
580  """, type=str, action='append')
581  parser.add_argument('--histTier', help='File identifier string for TFileService output, only one allowed. Supply as --histTier <id> for output_name.<id>.root, where output_name is assembled based on the input file.', type=str)
582  parser.add_argument('--h5Tier', help="""Module label for H5 output,
583  multiple allowed. Format as <h5maker_module_label>:<data_tier>.
584  Optionally, if a second colon is
585  included, the third argument will be treated as an additional naming string,
586  allowing multiple outputs with the same data_tier but unique file names.
587  Example: h5maker:h5:shifted leads to <file_id>_shifted.h5
588  """, type=str, action='append')
589  parser.add_argument('--outputNumuDeCAF', help='Make standard numu decafs for all CAF files produced', action='store_true')
590  parser.add_argument('--outputNueDeCAF', help='Make standard nue decafs for all CAF files produced', action='store_true')
591  parser.add_argument('--outputNumuOrNueDeCAF', help='Make standard numu or nue decafs for all CAF files produced', action='store_true')
592  parser.add_argument('--outputNusDeCAF', help='Make standard nus decafs for all CAF files produced', action='store_true')
593  parser.add_argument('--outputValidationDeCAF', help='Make validation (nue_or_numu_or_nus) decafs for all CAF files produced during the job', action='store_true')
594  parser.add_argument('--cosmicsPolarity', help='.', type=str)
595  parser.add_argument('--npass', help='.', type=str)
596  parser.add_argument('--skim', help='Specify skimming name.', type=str)
597  parser.add_argument('--systematic', help='Flag as systematic variation (append to file name and metadata parameters).', type=str)
598  parser.add_argument('--specialName', help='Additional name to add before data tier in output.', type=str)
599  parser.add_argument('--genietune', help='Specify the GENIE tune (append to file name and metadata parameters).', type=str)
600  parser.add_argument('--NPPFX', help='Number of PPFX universes.', type=str)
601  parser.add_argument('-n', help='Number of events to run over', type=int)
602  parser.add_argument('--copyOut', help='Use the built in copy out mechanism', action='store_true')
603  parser.add_argument('--dest', '-d', help='Output file destination for --copyOut functionality.', type=str)
604  parser.add_argument('--hashDirs', help='Use hash directory structure in destination directory.', action='store_true')
605  parser.add_argument('--runDirs', help='Use run directory structure in destination directory, 000XYZ/XYZUW for run number XYZUW.', action='store_true')
606  parser.add_argument('--autoDropbox', help='Use automatic dropox location', default=False, action='store_true')
607  parser.add_argument('--jsonMetadata', help='Create JSON files with metadata corresponding to each output file, and copy them to the same destinations', action='store_true')
608  parser.add_argument('--declareFiles', help='Declare files with metadata on worker node', action='store_true')
609  parser.add_argument('--declareLocations', help='Declare the file output locations to SAM during the copy back of the files', action='store_true')
610  parser.add_argument('--logs', help='Return .log files corresponding to every output', action='store_true')
611  parser.add_argument('--zipLogs', help='Format logs as .bz2 files. Implies --logs', action='store_true')
612  parser.add_argument('--noCleanup', help='Skip working directory cleanup step, good for interactive debugging or custom copy-out.', action='store_true')
613  parser.add_argument('--gdb', help='Run nova executable under gdb, print full stack trace, then quit gdb.', action='store_true')
614  parser.add_argument('--lemBalance', help='Choose lem server based on (CLUSTER+PROCESS)%%2 to balance load', action='store_true')
615  parser.add_argument('--lemServer', help='Specify lem server', type=str)
616  parser.add_argument('--txtfiledef', help='Use if the input definition is made up of text files, each containing a list of file names',default=False, action='store_true')
617  parser.add_argument('--precopyscript', help='Execute script PRECOPYSCRIPT within runNovaSAM.py, after running the nova -c command.', type=str, action='append')
618  parser.add_argument('--second_config', help="""Second configuration fcl executed after main process.
619  nova is executed with art file that is output from the main process.
620  Files that get produced by this process that are named identically
621  to files produced by the main process and are among the requested outputs
622  are ignored and the file produced by the first process is returned""", type=str)
623 
624  args = parser.parse_args()
625 
626  # Sanity check for output
627  if args.copyOut:
628  if not (args.outTier or args.cafTier or args.flatTier or args.histTier or args.h5Tier):
629  raise Exception("Copy-out requested with --copyOut, but no outputs specified. Nothing will happen with output, aborting.")
630  if not (args.dest or "DEST" in os.environ):
631  raise Exception("Copy-out requested with --copyOut, but no output directory specified. Use --dest or $DEST.")
632 
633  # No longer set VMem limit -- causes problems on some OSG sites
634  #setVMemLimit()
635 
636  samweb = samweb_client.SAMWebClient(experiment='nova')
637 
638  if "SRT_BASE_RELEASE" in os.environ:
639  release = os.environ["SRT_BASE_RELEASE"]
640  elif "NOVASOFT_VERSION" in os.environ:
641  release = os.environ["NOVASOFT_VERSION"]
642  else:
643  print("No release set!")
644  exit(1)
645 
646 
647  inFile = args.inFile
648  inFileBase = os.path.basename(inFile)
649 
650  # Which file do I want to use to get the metadata?
651  if not args.txtfiledef:
652  # Normally my infile.
653  metadata = samweb.getMetadata(inFileBase)
654  fileMetaDataMgr = MetadataUtils.metaDataMgr(inFile, metadata, release, args.systematic, args.skim, args.cosmicsPolarity, args.npass, args.specialName)
655  else:
656  # However, if using a txtfile def, want to use the first file in the txt file.
657  with open( inFile ) as f:
658  PassFile = f.readline().strip()
659  print("Looking at ", PassFile)
660  #metadata_cmd = "ifdh_fetch %s" %PassFile
661  #os.system(metadata_cmd)
662  metadata = samweb.getMetadata(PassFile)
663  fileMetaDataMgr = MetadataUtils.metaDataMgr(PassFile, metadata, release, args.systematic, args.skim, args.cosmicsPolarity, args.npass, args.specialName)
664 
665 
666 
667  tmpFclName = make_temp_fcl(args.config, inFileBase)
668 
669  # Open the fcl file so that we can append output filenames to it
670  fclFileObj = open(tmpFclName, 'a')
671 
672  print(" Open the fcl file so that we can append output filenames to it ::::::::::::::::::::::::::: fclFileObj=", fclFileObj)
673  doMeta = True
674  if not (args.outTier or args.cafTier or args.flatTier or args.h5Tier):
675  doMeta = False
676 
677  # Start setting up the nova command, add SAM parameters
678  cmdList = []
679  cmdList.append('nova')
680  cmdList.append('-c')
681  cmdList.append(tmpFclName)
682  if doMeta:
683  cmdList.append('--sam-application-family=nova')
684  cmdList.append('--sam-application-version=' + release)
685  if not fileMetaDataMgr.isSam4Users():
686  cmdList.append('--sam-file-type=' + fileMetaDataMgr.fileType)
687 
688  if not args.outTier:
689  args.outTier = []
690  if not args.cafTier:
691  args.cafTier = []
692  if not args.flatTier:
693  args.flatTier = []
694  if not args.h5Tier:
695  args.h5Tier = []
696 
697  if not args.precopyscript:
698  args.precopyscript = []
699 
700  outList = [] # list of files to be supplied with -o
701  outputs = [] # list of files that will be moved to results directory, includes CAFs
702  # Loop over output tiers
703  for outTier in args.outTier:
704 
705  try:
706  output = outTier.split(":")[0]
707 
708  tier = outTier.split(":")[1]
709 
710  except:
711  raise ValueError("Output data tier: " + outTier + "not formatted corectly, should be <output_name>:<data_tier>")
712 
713  if "," in output:
714  if(re.search('cell',output)):
715  outP, outC = re.findall(',(.+?),', output)
716  outXp, outXc = [re.findall('(.+)-', outP)[0], re.findall('(.+)-', outC)[0]] # p stands for plane, c stands for cell
717  outYp, outYc = [re.findall('-(.+)', outP)[0], re.findall('-(.+)', outC)[0]]
718  for i in range( int(outXp), int(outYp)+1 ):
719  for j in range( int(outXc), int(outYc)+1):
720  NewOMod = re.search('^(.+?),', output).group(1) + repr(i) + re.search('cell',output).group() + repr(j)
721  cmdList.append('--sam-data-tier=' + ":".join([NewOMod, tier]))
722  if not fileMetaDataMgr.isSam4Users():
723  if fileMetaDataMgr.dataFlag == "data":
724  cmdList.append('--sam-stream-name=' + NewOMod + ':' + str(fileMetaDataMgr.stream))
725  else:
726  outX = re.search(',(.+?)-', output).group(1)
727  outY = re.search('-(.+?),', output).group(1)
728  for i in range( int(outX), int(outY)+1 ):
729  NewOMod = re.search('^(.+?),', output).group(1)+repr(i)
730  cmdList.append('--sam-data-tier=' + ":".join([NewOMod, tier]))
731  if not fileMetaDataMgr.isSam4Users():
732  if fileMetaDataMgr.dataFlag == "data":
733  cmdList.append('--sam-stream-name=' + NewOMod + ':' + str(fileMetaDataMgr.stream))
734  else:
735  cmdList.append('--sam-data-tier=' + ":".join([output, tier]))
736  if not fileMetaDataMgr.isSam4Users():
737  if fileMetaDataMgr.dataFlag == "data":
738  cmdList.append('--sam-stream-name=' +output + ':' + str(fileMetaDataMgr.stream))
739 
740  outNameTemp = fileMetaDataMgr.getOutputFileName(tier)
741  if args.txtfiledef:
742  FirstRun = re.search('FirstRun-(.+?)_LastRun', os.path.basename(inFile)).group(1).zfill(8)
743  LastRun = re.search('LastRun-(.+?)_TotFiles', os.path.basename(inFile)).group(1).zfill(8)
744  Index=outNameTemp.find("_r0")
745  outNameTemp=outNameTemp[:Index]+"_r"+FirstRun+"_r"+LastRun+outNameTemp[Index+14:]
746  outName = os.path.basename(outNameTemp)
747 
748  if "," in output:
749  if(re.search('cell',output)):
750  outP, outC = re.findall(',(.+?),', output)
751  outXp, outXc = [re.findall('(.+)-', outP)[0], re.findall('(.+)-', outC)[0]] # p stands for plane, c stands for cell
752  outYp, outYc = [re.findall('-(.+)', outP)[0], re.findall('-(.+)', outC)[0]]
753  for i in range( int(outXp), int(outYp)+1 ):
754  for j in range( int(outXc), int(outYc)+1):
755  NewOMod = re.search('^(.+?),', output).group(1)+repr(i) + re.search('cell',output).group() + repr(j)
756  tier = outTier.split(":")[1]
757  NewOName = outName.replace(str("."+tier), str("-"+NewOMod+"."+tier))
758  fclFileObj.write("\noutputs." + NewOMod + '.fileName: "'+ NewOName + '"\n')
759  outList.append(NewOName)
760  outputs.append(NewOName)
761  else:
762  print("Running in Plane Mode")
763  outX = re.search(',(.+?)-', output).group(1)
764  outY = re.search('-(.+?),', output).group(1)
765  for i in range( int(outX), int(outY)+1 ):
766  NewOMod = re.search('^(.+?),', output).group(1)+repr(i)
767  tier = outTier.split(":")[1]
768  NewOName = outName.replace(str("."+tier), str("-"+NewOMod+"."+tier))
769  fclFileObj.write("\noutputs." + NewOMod + '.fileName: "'+ NewOName + '"\n')
770  outList.append(NewOName)
771  outputs.append(NewOName)
772  else:
773  print("Output file name: ", outName, " for tier ", tier, " and output ", output)
774  fclFileObj.write("\noutputs." + output + '.fileName: "'+ outName + '"\n')
775  outList.append(outName)
776  outputs.append(outName)
777 
778  for cafTier in args.cafTier:
779  try:
780  cafLabel = cafTier.split(":")[0]
781  tier = cafTier.split(":")[1]
782  except:
783  raise ValueError("Output data tier: " + cafTier + "not formatted corectly, should be <output_name>:<data_tier>")
784 
785  cafName = fileMetaDataMgr.getOutputFileName(tier)
786  print("Adding CAF: ", cafLabel, tier, cafName)
787 
788  fclFileObj.write("\nphysics.producers." + cafLabel + '.CAFFilename: "' + cafName + '" \n')
789  fclFileObj.write("physics.producers." + cafLabel + '.DataTier: "' + tier + '" \n')
790  outputs.append(cafName)
791 
792 
793  for flatTier in args.flatTier:
794  try:
795  flatLabel = flatTier.split(":")[0]
796  tier = flatTier.split(":")[1]
797  except:
798  raise ValueError("Output data tier: " + flatTier + "not formatted corectly, should be <output_name>:<data_tier>")
799 
800  flatName = fileMetaDataMgr.getOutputFileName(tier)
801  print("Adding FlatCAF: ", flatLabel, tier, flatName)
802 
803  #fclFileObj.write("\nphysics.producers." + flatLabel + '.OutputName: "' + flatName + '" \n')
804  #fclFileObj.write("physics.producers." + flatLabel + '.DataTier: "' + tier + '" \n')
805  outputs.append(flatName)
806 
807 
808  for h5Tier in args.h5Tier:
809  try:
810  h5Label = h5Tier.split(":")[0]
811  tier = h5Tier.split(":")[1]
812  except:
813  raise ValueError("Output data tier: " + h5Tier + "not formatted corectly, should be <output_name>:<data_tier>")
814 
815  h5Name = fileMetaDataMgr.getOutputFileName(tier)
816  print("Adding H5: ", h5Label, tier, h5Name)
817 
818  outputs.append(h5Name+".h5")
819 
820  if args.lemBalance and (int(os.environ["PROCESS"])+int(os.environ["CLUSTER"]))%2==0:
821  fclFileObj.write("physics.producers.lem.WebSettings.Host: \"lem2.hep.caltech.edu\"\n")
822  elif args.lemServer:
823  fclFileObj.write("physics.producers.lem.WebSettings.Host: \"%s\"\n" % args.lemServer)
824 
825 
826  if args.histTier:
827  try:
828  tier = str(args.histTier)
829  except:
830  raise Exception("Histogram identifier supplied by --histTier could not be converted to a string.")
831 
832  histName = fileMetaDataMgr.getOutputFileName(tier)
833 
834  outputs.append(histName)
835  fclFileObj.write("\nservices.TFileService.fileName: " + '"' + histName + '"\n')
836 
837  fclFileObj.close()
838 
839 
840 
841  print("Config: ")
842  for line in open(tmpFclName):
843  print(line.strip())
844 
845  if args.n != None:
846  cmdList.append('-n')
847  cmdList.append(str(args.n))
848 
849  if args.txtfiledef:
850  print("\nI have a text file definition, InFile is {}".format(inFile))
851  ### Are we streaming the files via xrootd?
852  #txtcmd="cat %s | xargs -n1 samweb2xrootd > xrootd_inFile.txt"%inFile
853  #os.system(txtcmd)
854  #with open("xrootd_inFile.txt") as f:
855  # for line in f:
856  # print line.strip()
857  # cmdList.append( line.strip() )
858  #print ""
859  ### Are we going to copy the files?
860  olddir = os.getcwd()
861  os.system("mkdir InFiles")
862  allFiles = 0.
863  failFiles = 0.
864  with open(inFile) as f:
865  os.chdir("InFiles")
866  for line in f:
867  allFiles += 1
868  copyfile = "InFiles/%s" %line.strip()
869  print("Now copying",line.strip(),"to ",copyfile)
870  ifdhcmd = "ifdh cp -D `samweb2xrootd %s` ." %line.strip()
871  print(datetime.datetime.now())
872  ret = os.system( ifdhcmd )
873  if ret == 0:
874  cmdList.append( copyfile )
875  else:
876  failFiles += 1
877  print("Copy in success ratio: " + str(old_div((allFiles-failFiles),allFiles)))
878  os.chdir(olddir)
879  else:
880  cmdList.append(inFile)
881 
882  if args.gdb:
883  gdbArgs = ["gdb", "-return-child-result", "--ex", "run", "--ex", "bt", "full", "--ex", "q", "--args"]
884  cmdList = gdbArgs + cmdList
885 
886  cmd = ' '.join(cmdList)
887 
888  print('Running:', cmd)
889  sys.stdout.flush() # flush the stdout buffer before running the nova executable, cleans up output.
890 
891  if args.logs or args.zipLogs:
892  with open('log.txt', 'w') as logfile:
893  sys.stderr.write('\nnova command runs here. stderr redirected to stdout\n\n')
894  retCode = subprocess.call(cmdList, stdout=logfile, stderr=subprocess.STDOUT)
895  # Print all the output to the screen as well so that regular condor
896  # logs include it too.
897  with open('log.txt', 'r') as logfile:
898  for line in logfile:
899  print(line, end=' ')
900  if args.zipLogs:
901  os.system('bzip2 -f log.txt')
902  else:
903  retCode = subprocess.call(cmdList)
904 
905  ### If using a txtfiledef make sure to clean up the InputFile List....
906  if args.txtfiledef:
907  os.system("rm -rf InFiles")
908 
909  if retCode != 0:
910  print("Want to copy back the logs for this job somehwere....")
911  else:
912 
913  # determine output destination
914  if args.copyOut:
915  if args.dest:
916  dest = args.dest
917  elif "DEST" in os.environ:
918  dest = os.environ["DEST"]
919  else:
920  raise Exception("Copy out requested with --copyOut, but no destination supplied. Use --dest or $DEST")
921 
922  if args.autoDropbox:
923  dest=NGU.get_prod_dropbox()
924  print(("Getting automatic dropbox location", dest))
925  # If the initial job finished successfully, we may want to run another config over the output
926  # copy logic above for executing nova with the second_config instead.
927  if args.second_config:
928  # stage files from first config to be declared
929  # before the second config is run
930  if args.copyOut:
931  # move what we have there
932  checkAndMoveFiles(inFile, outputs, noCleanup=True, copyOnly=True)
933  if args.jsonMetadata:
935  copyOutFiles(dest, args.hashDirs, args.runDirs, fileMetaDataMgr.runNum, args.noCleanup, args.declareLocations, args.declareFiles)
936 
937 
938  # create a temporary work space so we're careful about overwriting files
939  # from the main step
940  import random
941  import string
942 
943  lastdir = os.getcwd()
944  tmpdir = os.path.abspath(os.path.join('./', 'tmp' + ''.join(random.choice(string.ascii_letters) for i in range(8))))
945  os.mkdir(tmpdir)
946  os.chdir(tmpdir)
947  print('Changing to %s' % os.getcwd())
948  tmpSecondFclName = make_temp_fcl(args.second_config, inFileBase)
949  cmdList = []
950  cmdList.append('nova')
951  cmdList.append('-c')
952  cmdList.append(tmpSecondFclName)
953  if doMeta:
954  cmdList.append('--sam-application-family=nova')
955  cmdList.append('--sam-application-version=' + release)
956  if not fileMetaDataMgr.isSam4Users():
957  cmdList.append('--sam-file-type=' + fileMetaDataMgr.fileType)
958 
959  if args.gdb:
960  gdbArgs = ["gdb", "-return-child-result", "--ex", "run", "--ex", "bt", "full", "--ex", "q", "--args"]
961  cmdList = gdbArgs + cmdList
962 
963 
964  # run second nova executable
965  inPID= fileMetaDataMgr.getOutputFileName('pid')
966  if not os.path.isfile(os.path.abspath(os.path.join(lastdir, os.path.basename(inPID)))):
967  print('WARNING: Could not find PID file for second configuration fcl. Skipping', inPID)
968  else:
969 
970  cmdList.append(os.path.abspath(os.path.join(lastdir, os.path.basename(inPID))))
971  cmd = ' '.join(cmdList)
972 
973  print('Running:', cmd)
974  sys.stdout.flush() # flush the stdout buffer before running the nova executable, cleans up output.
975  if args.logs or args.zipLogs:
976  with open(os.path.join(lastdir,'log.txt'), 'w') as logfile:
977  sys.stderr.write('\nsecond nova command runs here. stderr redirected to stdout\n\n')
978  retCode = subprocess.call(cmdList, stdout=logfile, stderr=subprocess.STDOUT)
979 
980  # Print all the output to the screen as well so that regular condor
981  # logs include it too.
982  with open(os.path.join(lastdir, 'log.txt'), 'r') as logfile:
983  for line in logfile:
984  print(line, end=' ')
985 
986  if args.zipLogs:
987  os.system('bzip2 -f log.txt')
988 
989  else:
990  retCode = subprocess.call(cmdList)
991 
992  if retCode == 0:
993  # handle output of second nova executable
994  # don't copy back files that belong to both the 'outputs' list and
995  # the files that were created by the first executable.
996  # Warn the user if this happens
997  _first = set(os.listdir(lastdir))
998  _second = set(os.listdir(tmpdir))
999  _outputs = set(outputs)
1000  _bad = (_first & _second) & _outputs
1001  _good = _second - _bad
1002  if len(_bad) > 0:
1003  print('runNovaSAM.py: [WARNING] First and second processes produced identically named files that among requested outputs. Ignoring file produced by the second and copying out only the first')
1004  for b in _bad: print(b)
1005  for g in _good:
1006  print('Change ', os.path.join(tmpdir, os.path.basename(g)), ' to ', os.path.join(lastdir, os.path.basename(g)))
1007  os.rename(os.path.join(tmpdir, os.path.basename(g)), os.path.join(lastdir, os.path.basename(g)))
1008  else:
1009  sys.stderr.write('\nSecond nova command failed with exit code %d' % retCode)
1010  # remove temp directory
1011  if not args.noCleanup:
1012  # we've moved all of the files we care about at this point.
1013  # remove all contents of tmpdir so we can remove the directory
1014  # itself too
1015  for f in os.listdir(tmpdir):
1016  os.remove(os.path.join(tmpdir, f))
1017  os.rmdir(tmpdir)
1018 
1019  # go back to previous working directory
1020  os.chdir(lastdir)
1021 
1022  # wait until after we possibly run a second config to zip the output up
1023  if args.zipLogs:
1024  os.system('bzip2 -f log.txt')
1025 
1026 
1027  if args.outputNumuDeCAF or args.outputNueDeCAF or args.outputNumuOrNueDeCAF or args.outputNusDeCAF or args.outputValidationDeCAF:
1028  decafdir = os.listdir(".")
1029  for fname in decafdir:
1030  if fname.endswith("caf.root"):
1031  if args.outputNumuDeCAF:
1032  outputs.append(makeDeCAF('numu/FirstAnalysis/reduce_numu_fa.C',fname,'numu_contain'))
1033  if args.outputNueDeCAF:
1034  outputs.append(makeDeCAF('nue/reduce_nue_sa.C',fname,'nue_contain'))
1035  if args.outputNumuOrNueDeCAF:
1036  outputs.append(makeDeCAF('nue/reduce_nue_or_numu_sa.C',fname,'nue_or_numu_contain'))
1037  if args.outputNusDeCAF:
1038  outputs.append(makeDeCAF('nus/reduce_nus.C',fname,'nus_contain'))
1039  if args.outputValidationDeCAF:
1040  outputs.append(makeDeCAF('nus/reduce_nue_or_numu_or_nus.C',fname,'nue_or_numu_or_nus_contain'))
1041  print("\nAt the start of check and move.")
1042  checkAndMoveFiles(inFile, outputs, args.noCleanup)
1043 
1044 
1045  if args.copyOut:
1046  if args.jsonMetadata:
1047  print("\nMake JSONs")
1049  print("\nMade JSONs, now to copy files.")
1050 
1051  copyOutFiles(dest, args.hashDirs, args.runDirs, fileMetaDataMgr.runNum, args.noCleanup, args.declareLocations, args.declareFiles)
1052  print("Copied files.")
1053  else:
1054  #job didn't succeed, remove output files if they exist
1055  for file in outList:
1056  try:
1057  os.remove("./" + file)
1058  except OSError:
1059  pass
1060 
1061  #clean up section
1062  if not args.noCleanup:
1063  os.remove(tmpFclName)
1064  os.remove(inFile)
1065  dirList = os.listdir(".")
1066  for file in dirList:
1067  skip_match = _skip_pattern.match(file)
1068  if skip_match != None:
1069  print(file, "contains RootOutput*.root: clean up")
1070  os.remove("./" + file)
1071 
1072  dh = ifdh.ifdh("http://samweb.fnal.gov:8480/sam/nova/api")
1073  dh.cleanup()
1074 
1075  exit(retCode)
def makeMetadataJSONs()
Definition: runNovaSAM.py:308
void split(double tt, double *fr)
def copyOutFiles(hashDirs=False)
Definition: runNovaSAM.py:149
def makeDirSafely(dir)
Definition: runNovaSAM.py:85
def checkAndMoveFiles(inFile, declareFiles)
Definition: runNovaSAM.py:105
def resolveFclPath(fcl)
Definition: runNovaSAM.py:188
void append()
Definition: append.C:24
def getOutDir(pathname, hashDirs=False)
Definition: runNovaSAM.py:92
if(dump)
bool print
def MakeTransposeJson(TransposeList)
Definition: runNovaSAM.py:375
std::string format(const int32_t &value, const int &ndigits=8)
Definition: HexUtils.cpp:14
procfile open("FD_BRL_v0.txt")
def createMetadata(inFile)
cet::coded_exception< errors::ErrorCodes, ExceptionDetail::translate > Exception
Definition: Exception.h:66
def addMetadataToFCL(fclFile, parName, parValue)
exit(0)
def listFiles(outPath, dh)
Definition: runNovaSAM.py:301
def make_temp_fcl(fclFile, inFileBase)
Definition: runNovaSAM.py:39
def setVMemLimit()
Definition: runNovaSAM.py:77
def fileExists(outPath, dh)
Definition: runNovaSAM.py:289
def makeDirIfNeeded(dir)
Definition: runNovaSAM.py:105
def declareFile(fileWPath)
Definition: runNovaSAM.py:127
def declareLogFile(logName, logFile, rootName)
Definition: runNovaSAM.py:270
def makeDeCAF(script, fname, special)
Definition: runNovaSAM.py:522