runNovaSAM.py
Go to the documentation of this file.
1 #!/bin/env python
2 
3 import os, sys
4 import shutil
5 import samweb_client, ifdh
6 import pprint
7 import argparse
8 import subprocess
9 import re
10 import md5
11 import json
12 import resource
13 import string
14 import datetime
15 from string import atoi
16 from samweb_client.utility import fileEnstoreChecksum
17 from samweb_client.exceptions import *
18 
19 import MetadataUtils
20 import NovaGridUtils as NGU
21 
22 ## Do a little bit of trickery with sys.argv to stop ROOT from gobbling it up and killing --help
23 argvCopy = sys.argv[:] # Make a copy
24 sys.argv = sys.argv[:1] # Replace it with just the first argument, i.e. script name
25 import ROOT
26 # The weird thing is that you have to do something with ROOT in order to make it parse args. Ok, we would have done this anyway.
27 ROOT.gErrorIgnoreLevel=3000 # Stop ROOT from complaining about not having dictionaries, 3000 is below kError but above kWarning.
28 sys.argv = argvCopy # Restore sys.argv. Thanks Wim, that's a real "solution"!
29 
30 #files
31 _skip_pattern = re.compile(r"^.*(RootOutput).*\.root", re.IGNORECASE)
32 
33 def make_temp_fcl(fclFile, inFileBase):
34  # make a temporary copy of the fcl file
35  fclPath = resolveFclPath(fclFile)
36  print "Found fcl file here: ", fclPath
37  tmpFclName = os.path.basename(fclPath).replace(".fcl", "_" + os.path.splitext(inFileBase)[0] + ".fcl")
38  print "Creating local copy : ", tmpFclName
39  shutil.copy(fclPath, tmpFclName)
40 
41  # determine if we're using the metadata module
42  # add parameters that describe this job
43  # NOTE:
44  # fhicl-expand (and other FCL tools) don't allow you
45  # to give the absolute path to a FCL. Instead FCLs have to
46  # live in $FHICL_FILE_PATH. $FHICL_FILE_PATH always begins
47  # with './:', i.e., search current dir first.
48  # so just make sure you don't cd() away from this dir
49  # between when the FCL is copied to this dir above and the check below.
50  isRunningMetadataModule=True
51  try:
52  subprocess.check_call("fhicl-expand %s | grep -q ^physics.analyzers.metadata.params." % tmpFclName, shell=True)
53  except subprocess.CalledProcessError:
54  isRunningMetadataModule=False
55 
56  if isRunningMetadataModule:
57  with open(tmpFclName, 'a') as fclFileObj:
58  if not fileMetaDataMgr.isSam4Users():
59  if args.npass != None:
60  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.subversion", '"' + fileMetaDataMgr.subversion + '"')
61 
62  if fileMetaDataMgr.dataFlag == "sim" and fileMetaDataMgr.generator in MetadataUtils.neutrinoGenerators:
63  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.flux_version", '"' + fileMetaDataMgr.fluxVersion + '"')
64 
65  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.skim", '"' + fileMetaDataMgr.skim + '"')
66  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.systematic", '"' + fileMetaDataMgr.systematic + '"')
67  MetadataUtils.addMetadataToFCL(fclFileObj, "NOVA.Special", '"' + fileMetaDataMgr.special + '"')
68 
69  return tmpFclName
70 
72  #memLimit = 3.9*1024**3
73  memLimit = 3.9*1000**3
74  print "Old virtual memory limit:", resource.getrlimit(resource.RLIMIT_AS)
75  print "Limiting the virtual memory of the nova job to 3.9 GB"
76  resource.setrlimit(resource.RLIMIT_AS, (memLimit, memLimit))
77  print "New virtual memory limit:", resource.getrlimit(resource.RLIMIT_AS)
78 
79 def makeDirSafely(dir):
80  if "/pnfs" != dir[0:5]:
81  makeDirIfNeeded(dir)
82  return
83 
84  print "runNovaSAM is making a directory with IFDH"
85  dh = ifdh.ifdh("http://samweb.fnal.gov:8480/sam/nova/api")
86  try:
87  print "Checking if directory ", dir, "exists"
88  dh.ls(dir, 1, "")
89  dh.chmod(774,dir,"")
90 
91  except (RuntimeError, IOError) as e:
92  try:
93  print "It doesn't - make directory", dir
94  dh.mkdir(dir)
95  except:
96  print "Tried to make directory and couldn't. Perhaps it already exists?"
97 
98 
99 def makeDirIfNeeded(dir):
100  os.umask(002) # (rw-rw-r--) for files and (rwxrwxr-x) for directories.
101  if not os.path.isdir(dir):
102  print "runNovaSAM is making a directory: ", dir
103  try:
104  os.mkdir(dir)
105  except:
106  print "Couldn't make the directory... some other job perhaps did, or permissions did not allow "
107  if not os.path.isdir(dir):
108  raise Exception("Failed to make directory + " + dir )
109 
110 
111 def getOutDir(pathname, dest, hashDirs=False, runDirs=False, runNum=0):
112  dirs = [dest]
113  if hashDirs:
114  head, tail = os.path.split(pathname)
115  hash = md5.new(tail)
116  dirs += list(hash.hexdigest()[:3])
117  if runDirs:
118  head, tail = os.path.split(pathname)
119  runStr = str(runNum)
120  multiRunDir = runStr[:3].zfill(6)
121  makeDirSafely(os.path.join(dest, multiRunDir))
122  makeDirSafely(os.path.join(dest, multiRunDir, runStr))
123  dirs += [multiRunDir, runStr]
124  return os.path.join(*dirs)
125 
126 def checkAndMoveFiles(inFile, outputs, noCleanup=False, copyOnly=False):
127  makeDirIfNeeded('./results')
128  """Checks all root files in the current directories for zombie or recovered status. Bad files are deleted while good files are moved to the results subdirectory for copy out ease"""
129  inFileBase = os.path.basename(inFile)
130  baseDir = "."
131 
132  print 'Looking for requested outputs: '
133  for o in outputs: print o
134 
135  # If declaring files have to do non-decafs first, since they're the parents
136  # of the decafs. If not, it doesn't hurt to do it in that order anyway.
137  for secondPass in [False, True]:
138  for root, dirs, filenames in os.walk(baseDir):
139  if root == baseDir:
140  for file in filenames:
141  if file.endswith('decaf.root') != secondPass: continue
142  # If file not in outputs, then delete.
143  if (file.endswith (".root") or file.endswith(".h5")) and file != inFileBase:
144  fileWPath = os.path.join(root, file)
145  # If the file isn't in my outputs then delete it.
146  if file not in outputs:
147  if not noCleanup:
148  # Do not copy over, just delete
149  print "File", fileWPath, " is not among requested outputs, removing"
150  os.remove(fileWPath)
151  continue
152  # Now that I know that this file is in my output list, lets check it is valid...
153  # First, if it is a root file.
154  if file.endswith (".root"):
155  print "In checkAndMoveFiles, fileWPath is %s" %fileWPath
156  rootFile = ROOT.TFile(fileWPath)
157  if rootFile.IsZombie() or rootFile.TestBit(ROOT.TFile.kRecovered):
158  #do not copy over, just delete
159  print "File", fileWPath, "is Zombie - remove it"
160  os.remove(fileWPath)
161  else:
162  newFilePath = os.path.join(root, "results", file)
163  print "New file name is %s" %newFilePath
164  if copyOnly:
165  shutil.copyfile(fileWPath, newFilePath)
166  else:
167  os.renames(fileWPath, newFilePath)
168  rootFile.Close()
169  # Next, if it is a h5 file.
170  elif file.endswith(".h5"):
171  # Only import h5py if have h5 files.
172  import h5py
173  print "In checkAndMoveFiles, fileWPath is %s" %fileWPath
174  # If a valid HDF5 files.
175  if h5py.is_hdf5(fileWPath):
176  newFilePath = os.path.join(root, "results", file)
177  print "New file name is %s" %newFilePath
178  if copyOnly:
179  shutil.copyfile(fileWPath, newFilePath)
180  else:
181  os.renames(fileWPath, newFilePath)
182 
183  # If not a valid HDF5 file.
184  else:
185  print "File", fileWPath, "is Zombie - remove it"
186  os.remove(fileWPath)
187  return
188 
189 #def h5MetaHack(inFile):
190 # print "Doing a hack to get HDF5 metadata...Will take CAF metadata and change subtly."
191 # ### Make my client and get my CAF metadata
192 # samweb = samweb_client.SAMWebClient(experiment='nova')
193 # md = MetadataUtils.createMetadata( inFile.replace('.h5caf.h5', '.caf.root') )
194 # ### Change some parameters.
195 # md['file_name'] = inFile
196 # md['data_tier'] = unicode('h5')
197 # md['file_size'] = os.path.getsize( inFile )
198 # return md
199 
200 def declareFile(fileWPath):
201  """Checks file for a TKey of RootFileDB. If it exists, run sam_metadata_dumper and construct appropriate metadata for the file. Use that metadata to declare the file to SAM"""
202  samweb = samweb_client.SAMWebClient(experiment='nova')
203 
204  filename = os.path.basename(fileWPath)
205  print filename
206 
207  rootFileDB = False
208  if filename.endswith(".root"):
209  rootFile = ROOT.TFile(fileWPath)
210  rootFileDB = rootFile.FindKey("RootFileDB")
211  else:
212  rootFile = None
213 
214  olddir = os.getcwd()
215  os.chdir(os.path.dirname(fileWPath))
216  if rootFileDB or filename.endswith("caf.root") or filename.endswith(".h5"):
217  md = MetadataUtils.createMetadata(filename)
218  # Check that md exists, and then declare!
219  if md == None:
220  print "No metadata found!"
221  else:
222  # If I have a transpose file want to add plane (and cell) number.
223  if "cell" in filename:
224  try:
225  plane = re.search('^.*outplane(\d*).', filename).group(1)
226  cell = re.search('^.*cell(\d*).', filename).group(1)
227  print "Plane number:", plane, "Cell number:", cell, ". Is a transpose file"
228  md['Calibration.PlaneNumber'] = plane
229  md['Calibration.CellNumber'] = cell
230  except:
231  print "No cell number found - could be a plane mode transpose file"
232 
233  elif "outplane" in filename:
234  print filename
235  try:
236  plane = re.search('^.*outplane(\d*).', filename).group(1)
237  print "Plane number:", plane, ". Is a transpose file"
238  md['Calibration.PlaneNumber'] = plane
239  except:
240  print "No plane number found - not a transpose file"
241  ### Make sure that the txtfile is in the parent list.
242  if args.txtfiledef:
243  md['parents'].append({u'file_name':unicode(inFile)})
244 
245  # Print out the metadata before trying to declare
246  pprint.pprint(md)
247  print ''
248  print "Declaring", fileWPath, "to SAM"
249  try:
250  samweb.declareFile(md)
251  #samweb.validateFileMetadata(md=md)
252  except Exception as inst:
253  #print fileWPath, "already exists in SAM"
254  print inst
255  else:
256  print fileWPath, "does not contain RootFileDB, do not try to declare"
257 
258  if rootFile:
259  rootFile.Close()
260  os.chdir(olddir)
261  return
262 
263 
264 def declareLogFile(logName, logFile, rootName):
265  # This is what NovaFTS/plugins/nova_log_metadata.py does
266  md = {
267  'file_name': os.path.basename(logName),
268  'file_size': os.path.getsize(logFile),
269  'data_tier': 'log',
270  'file_format': 'log',
271  'file_type': 'unknown', # maybe 'nonPhysicsGeneric'?
272  'parents': [{'file_name': os.path.basename(rootName)}]
273  }
274  print "16) declareLogFile(logName, logFile, rootName):"
275  print "Declaring", logName, "to SAM"
276  try:
277  samweb = samweb_client.SAMWebClient(experiment='nova')
278  samweb.declareFile(md)
279  except Exception as inst:
280  print inst
281 
282 
283 def fileExists(outPath, dh):
284  try:
285  # Check to see if you can ls the file.
286  # This works in ifdh 1_7_0 and newer
287  # If it exists, dh.ls() returns a tuple with one entry, converts to True
288  # If it doesn't, dh.ls() returns an empty tuple, converts to False
289  return bool(dh.ls(outPath, 1, ""))
290  except (RuntimeError, IOError) as e:
291  # Some versions of ifdh throw an exception when the file was not found
292  # But that means it is not there.
293  return False
294 
295 def listFiles(outPath, dh):
296  try:
297  return dh.ls(outPath, 1, "")
298  except:
299  return "Exception while trying to ls, OutPath =",outPath
300 
301 
303  """For every ROOT file, try to extract its metadata into a matching .json
304  file in the same directory"""
305  baseDir = "./results"
306  # If using --txtfiledef, want to pass multiple files at a time to sam_meta_dumper.
307  # This is because it simply takes too long to call it for each indiv file
308  # when there are 800+ outputs. 5 s each -> ~1 hour!
309  TransposeList = ""
310  # Declare the samweb client within this function
311  samweb = samweb_client.SAMWebClient(experiment='nova')
312  # Loop through directories to search for files to make json files for.
313  for root, dirs, filenames in os.walk(baseDir):
314  if root == baseDir:
315  # Push the h5 files to the front of the list so that the CAF files remain
316  # available to hack in the metadata.
317  for ifile in range(len(filenames)):
318  if("h5" in filenames[ifile]):
319  filenames = [filenames[ifile]] + filenames[:ifile] + filenames[ifile+1:]
320  for file in filenames:
321  if (file.endswith (".root") or file.endswith(".h5") ) and file != inFileBase:
322  skip_match = _skip_pattern.match(file)
323  if skip_match == None:
324  # Set some quick and useful variables.
325  olddir = os.getcwd()
326  fileWPath = os.path.join(root, file)
327  # If a transpose file want to make the json subtly differently.
328  if args.txtfiledef and "outplane" in file:
329  # This works for the cellmode of the transposer as well, due to the filename including outplane and cell.
330  print "Adding %s to TransposeList" %file
331  TransposeList += "%s " %file
332  continue
333  # Which extractor am I using?
334  extractor = 'sam_metadata_dumper'
335  if file.endswith('caf.root'):
336  extractor = 'extractCAFMetadata'
337  elif file.endswith('.h5'):
338  extractor = 'extractHDF5Metadata'
339  try:
340  # sam_metadata_dumper doesn't apply basename() to filename. https://cdcvs.fnal.gov/redmine/issues/8987
341  os.chdir(os.path.dirname(fileWPath))
342  meta = subprocess.check_output([extractor, os.path.basename(fileWPath)])
343  if file.endswith (".root"):
344 
345  jsonf = open(file.replace('.root', '.json'), 'w')
346  jsonf.write(meta)
347  jsonf.close()
348  print "Made metadata for %s" %file
349  elif file.endswith(".h5"):
350  print "\nNow to make my json file for my h5...\n"
351  jsonf = open(file.replace('.h5caf.h5', '.h5caf.json'), 'w')
352  jsonf.write(meta)
353  jsonf.close()
354  print "Made metadata for %s" %file
355  else:
356  print "I'm not sure what file extension you have..."
357  except:
358  print "Error extracting metadata from file."
359  finally:
360  os.chdir(olddir)
361  # Make the Transpose json files.
362  # Again same argument, outplane is already in the filenames for cell mode
363  if args.txtfiledef and "outplane" in file:
364  olddir = os.getcwd()
365  os.chdir(baseDir)
366  MakeTransposeJson( TransposeList )
367  os.chdir(olddir)
368 
369 def MakeTransposeJson( TransposeList ):
370  """Transpose files need some extra tweaking when making .json files, largely because there are so many of them.
371  This takes in a list of files, and makes appropriate .json files in the same directory"""
372  # If using --txtfiledef, I can now pass my file list to sam_meta_dumper.
373  print "Is MakeTransposeJson called without a txt def, if you see this then yes."
374  MetaListFile="AllMetaJson.txt"
375  meta_cmd="sam_metadata_dumper -s " + TransposeList + " > " + MetaListFile
376  os.system(meta_cmd)
377  # Now want to open the file and split by "}," character.
378  MetaFile = open( MetaListFile )
379  MetaLines = MetaFile.read().split(" },")
380  # Loop through lines, and appropriately separate out json files.
381  for i in range(0,len( MetaLines ) ):
382  meta=MetaLines[i]
383  # Figure out file name...this is assuming that file name is always the first entry...
384  StName=re.search('"(.+?).root"', meta ).group(1)
385  filename=StName+".json" # Effecitvely replacing .root with .json
386  # If transpose file add PlaneNumber and CellNumber if run in cell mode
387  if "cell" in filename:
388  try:
389  plane = re.search('^.*outplane(\d*).', filename).group(1)
390  cell = re.search('^.*cell(\d*).', filename).group(1)
391  print "Plane number:", plane, "Cell number:", cell, ". Is a transpose file"
392  meta = meta.replace('"calibration.base_release"', '"calibration.PlaneNumber": "%s",\n "calibration.CellNumber": "%s",\n "calibration.base_release"')%(plane, cell)
393  except:
394  print "No cell number found - could be a plane mode transpose file"
395 
396  elif "outplane" in filename:
397  try:
398  plane = re.search('^.*outplane(\d*).', filename).group(1)
399  print "Plane number:", plane, ". Is a transpose file"
400  meta = meta.replace('"calibration.base_release"', '"calibration.PlaneNumber": "%s",\n "calibration.base_release"') %plane
401  except:
402  print "Error extracting plane number from transpose file."
403 
404  ### Make sure that the txtfile is in the parent list.
405  meta['parents'].append({u'file_name':unicode(inFile)})
406 
407  # Now open the json file
408  fout=open(filename,'w')
409  # Want to make sure that the json starts with '{'
410  if meta[0] not in "{":
411  meta = meta[:0] + '{\n' + meta[1:]
412  # Want to make sure that the json ends with a double '}'
413  if i < len(MetaLines)-1:
414  meta += "}\n}\n"
415  # Write and close the json file
416  fout.write(meta)
417  fout.close()
418 
419 def copyOutFiles(dest, hashDirs=False, runDirs=False, runNum=0, noCleanup=False, declareLocation=False, declareLogs=False):
420  """Builtin facility to copy out art files. This adds in a subdirectories with a single hex digit each corresponding to the first three digits in the hash of the output file name. This splits up files into 4096 separate subdirectories, preventing overfull directories. Copy out does not happen if the file already exists in the output"""
421  dh = ifdh.ifdh("http://samweb.fnal.gov:8480/sam/nova/api")
422  baseDir = "./results"
423  declareFiles = declareLogs
424  for root, dirs, filenames in os.walk(baseDir):
425  if root == baseDir:
426 
427  # copy out root files before h5 files
428  ordered_files = [s for s in filenames if ".root" in s]
429  for s in filenames:
430  if ".h5" in s:
431  ordered_files.append(s)
432 
433  for file in ordered_files:
434  if (file.endswith (".root") or file.endswith(".h5") ) and file != inFileBase:
435  fileWPath = os.path.join(root, file)
436  outDir = getOutDir(file, dest, hashDirs, runDirs, runNum)
437 
438  skip_match = _skip_pattern.match(file)
439  if skip_match == None:
440  outPath = os.path.join(outDir, file)
441 
442  if fileExists(outPath, dh):
443  print 'copyOutFiles: ', outPath, 'already moved. Skipping'
444  else:
445  # note: this will fail if the file already exists
446  returnValue = dh.cp(["-D", fileWPath, outDir])
447  if returnValue != 0:
448  print >> sys.stderr, "Copy out failed for file:", fileWPath
449  print >> sys.stderr, "Skipping it."
450  else:
451  if declareFiles:
452  declareFile(fileWPath)
453  ###################
454  # Declare the file's location to SAM if we have the declareLocation option on
455  if declareLocation==True :
456  loc = string.replace(outDir, 's3://','s3:/')
457  print "Declaring location %s for file %s\n" % (loc,file)
458  sam = samweb_client.SAMWebClient('nova')
459  ret=sam.addFileLocation(file, loc)
460  if ret.status_code != 200 :
461  print " SAMWEB Unable to declare file location (%s, %s) status code %s" %(file, loc, ret.status_code)
462  if fileWPath.endswith (".root"):
463  jsonPath = fileWPath.replace('.root', '.json')
464  elif fileWPath.endswith (".h5"):
465  jsonPath = fileWPath[:-3] + '.json'
466  if os.path.isfile(jsonPath):
467  if fileExists(os.path.join(outDir, os.path.basename(jsonPath)), dh):
468  print 'copyOutFiles: ', os.path.join(outDir, os.path.basename(jsonPath)), 'already moved. Skipping'
469  else:
470  returnValue = dh.cp(['-D', jsonPath, outDir])
471  if returnValue != 0:
472  print >> sys.stderr, "Copy out failed for file: " + jsonPath
473  print >> sys.stderr, "Skipping it."
474  else:
475  print('JSON not found %s' % jsonPath)
476 
477  for ext in ['.bz2', '']:
478  if os.path.isfile('log.txt'+ext):
479  if file.endswith (".root"):
480  logName = file.replace('.root', '.log'+ext)
481  elif file.endswith (".h5"):
482  logName = file + '.log'+ext
483  if fileExists(os.path.join(outDir,logName), dh):
484  print 'copyOutFiles: ', os.path.join(outDir, logName), 'already moved. Skipping'
485  else:
486  returnValue = dh.cp(['log.txt'+ext, os.path.join(outDir, logName)])
487 
488  if returnValue != 0:
489  print >> sys.stderr, "Copy out failed for file: " + logName
490  print >> sys.stderr, "Skipping it."
491 
492  if declareLogs:
493  declareLogFile(logName, 'log.txt'+ext, file)
494 
495  # Remove the copied-out log so it's not in
496  # the way for new log creation.
497  os.remove('log.txt'+ext)
498 
499  break
500 
501  else:
502  print "It does exist, not copying."
503  if not noCleanup:
504  print "Removing", fileWPath
505  os.remove(fileWPath)
506 
507  if fileWPath.endswith(".root"):
508  jsonPath = fileWPath.replace('.root', '.json')
509  elif fileWPath.endswith(".h5"):
510  jsonPath = fileWPath + ".json"
511  if os.path.isfile(jsonPath):
512  print 'Removing', jsonPath
513  os.remove(jsonPath)
514  return
515 
516 def makeDeCAF(script, fname, special):
517  trimname = fname[:-5] # cut off .root from end
518  trimidx = trimname.rindex('.')+1 # find last period before .root
519  decaf_tier = trimname[trimidx:-3]+'de'+trimname[-3:] # properly insert 'de'
520  oname = '{0}_{1}.{2}.root'.format(trimname, special, decaf_tier)
521  novaSource = os.getenv("SRT_PUBLIC_CONTEXT", "undefined")
522  if(novaSource == "undefined"):
523  novaSource = os.getenv("NOVASOFT_DIR", "undefined")
524  if(novaSource == "undefined"):
525  NGU.fail("Unable to locate NOvA source code")
526  else:
527  novaSource = os.getenv("NOVASOFT_DIR") + "/source"
528 
529  os.system('cafe -bq '+novaSource+'/CAFAna/'+script+' '+fname+' '+oname+' 2>&1')
530  return oname
531 
532 def resolveFclPath(fcl):
533  # Check if we have an absolute path name, return it if so
534  if fcl[0] == "/":
535  return fcl
536 
537  # Otherwise, we need to do some searching
538  fclPaths = os.environ["FHICL_FILE_PATH"].split(":")
539  for path in fclPaths:
540  # ensure there is always a trailing "/" on the path
541  path += "/"
542  if os.path.isfile(path + fcl):
543  return path + fcl
544 
545  # If we haven't found it, we have a problem.
546  raise IOError(sys.argv[0] + ": config file "+ fcl+" not found in FHICL_FILE_PATH")
547 
548 
549 if __name__=='__main__':
550 
551  parser = argparse.ArgumentParser(description='Run the nova command using SAM metadata')
552  parser.add_argument('inFile', help='The input file to run over', type=str)
553  parser.add_argument('--config', '-c', help='FHiCL file to use as configuration for nova executable', type=str)
554  parser.add_argument('--outTier', help="""
555  Data tier of the output file, multiple allowed, formatted as
556  <name_in_fcl_outputs>:<data_tier>.' Optionally, if a second colon is
557  included, the third argument will be treated as an additional naming string,
558  allowing multiple outputs with the same data_tier but unique file names.
559  Example: out1:reco:shifted leads to <file_id>_shifted.reco.root
560  """, type=str, action='append')
561  parser.add_argument('--cafTier', help="""Module label for CAF output,
562  multiple allowed. Format as <cafmaker_module_label>:<data_tier>.
563  Optionally, if a second colon is
564  included, the third argument will be treated as an additional naming string,
565  allowing multiple outputs with the same data_tier but unique file names.
566  Example: cafmaker:caf:shifted leads to <file_id>_shifted.caf.root
567  """, type=str, action='append')
568  parser.add_argument('--flatTier', help="""Module label for FlatCAF output,
569  multiple allowed. Format as <flatmaker_module_label>:<data_tier>.
570  Optionally, if a second colon is
571  included, the third argument will be treated as an additional naming string,
572  allowing multiple outputs with the same data_tier but unique file names.
573  Example: flatmaker:flatcaf:shifted leads to <file_id>_shifted.flatcaf.root
574  """, type=str, action='append')
575  parser.add_argument('--histTier', help='File identifier string for TFileService output, only one allowed. Supply as --histTier <id> for output_name.<id>.root, where output_name is assembled based on the input file.', type=str)
576  parser.add_argument('--h5Tier', help="""Module label for H5 output,
577  multiple allowed. Format as <h5maker_module_label>:<data_tier>.
578  Optionally, if a second colon is
579  included, the third argument will be treated as an additional naming string,
580  allowing multiple outputs with the same data_tier but unique file names.
581  Example: h5maker:h5:shifted leads to <file_id>_shifted.h5
582  """, type=str, action='append')
583  parser.add_argument('--outputNumuDeCAF', help='Make standard numu decafs for all CAF files produced', action='store_true')
584  parser.add_argument('--outputNueDeCAF', help='Make standard nue decafs for all CAF files produced', action='store_true')
585  parser.add_argument('--outputNumuOrNueDeCAF', help='Make standard numu or nue decafs for all CAF files produced', action='store_true')
586  parser.add_argument('--outputNusDeCAF', help='Make standard nus decafs for all CAF files produced', action='store_true')
587  parser.add_argument('--outputValidationDeCAF', help='Make validation (nue_or_numu_or_nus) decafs for all CAF files produced during the job', action='store_true')
588  parser.add_argument('--cosmicsPolarity', help='.', type=str)
589  parser.add_argument('--npass', help='.', type=str)
590  parser.add_argument('--skim', help='Specify skimming name.', type=str)
591  parser.add_argument('--systematic', help='Flag as systematic variation (append to file name and metadata parameters).', type=str)
592  parser.add_argument('--specialName', help='Additional name to add before data tier in output.', type=str)
593  parser.add_argument('--genietune', help='Specify the GENIE tune (append to file name and metadata parameters).', type=str)
594  parser.add_argument('--NPPFX', help='Number of PPFX universes.', type=str)
595  parser.add_argument('-n', help='Number of events to run over', type=int)
596  parser.add_argument('--copyOut', help='Use the built in copy out mechanism', action='store_true')
597  parser.add_argument('--dest', '-d', help='Output file destination for --copyOut functionality.', type=str)
598  parser.add_argument('--hashDirs', help='Use hash directory structure in destination directory.', action='store_true')
599  parser.add_argument('--runDirs', help='Use run directory structure in destination directory, 000XYZ/XYZUW for run number XYZUW.', action='store_true')
600  parser.add_argument('--autoDropbox', help='Use automatic dropox location', default=False, action='store_true')
601  parser.add_argument('--jsonMetadata', help='Create JSON files with metadata corresponding to each output file, and copy them to the same destinations', action='store_true')
602  parser.add_argument('--declareFiles', help='Declare files with metadata on worker node', action='store_true')
603  parser.add_argument('--declareLocations', help='Declare the file output locations to SAM during the copy back of the files', action='store_true')
604  parser.add_argument('--logs', help='Return .log files corresponding to every output', action='store_true')
605  parser.add_argument('--zipLogs', help='Format logs as .bz2 files. Implies --logs', action='store_true')
606  parser.add_argument('--noCleanup', help='Skip working directory cleanup step, good for interactive debugging or custom copy-out.', action='store_true')
607  parser.add_argument('--gdb', help='Run nova executable under gdb, print full stack trace, then quit gdb.', action='store_true')
608  parser.add_argument('--lemBalance', help='Choose lem server based on (CLUSTER+PROCESS)%%2 to balance load', action='store_true')
609  parser.add_argument('--lemServer', help='Specify lem server', type=str)
610  parser.add_argument('--txtfiledef', help='Use if the input definition is made up of text files, each containing a list of file names',default=False, action='store_true')
611  parser.add_argument('--precopyscript', help='Execute script PRECOPYSCRIPT within runNovaSAM.py, after running the nova -c command.', type=str, action='append')
612  parser.add_argument('--second_config', help="""Second configuration fcl executed after main process.
613  nova is executed with art file that is output from the main process.
614  Files that get produced by this process that are named identically
615  to files produced by the main process and are among the requested outputs
616  are ignored and the file produced by the first process is returned""", type=str)
617 
618  args = parser.parse_args()
619 
620  # Sanity check for output
621  if args.copyOut:
622  if not (args.outTier or args.cafTier or args.flatTier or args.histTier or args.h5Tier):
623  raise Exception("Copy-out requested with --copyOut, but no outputs specified. Nothing will happen with output, aborting.")
624  if not (args.dest or "DEST" in os.environ):
625  raise Exception("Copy-out requested with --copyOut, but no output directory specified. Use --dest or $DEST.")
626 
627  # No longer set VMem limit -- causes problems on some OSG sites
628  #setVMemLimit()
629 
630  samweb = samweb_client.SAMWebClient(experiment='nova')
631 
632  if "SRT_BASE_RELEASE" in os.environ:
633  release = os.environ["SRT_BASE_RELEASE"]
634  elif "NOVASOFT_VERSION" in os.environ:
635  release = os.environ["NOVASOFT_VERSION"]
636  else:
637  print "No release set!"
638  exit(1)
639 
640 
641  inFile = args.inFile
642  inFileBase = os.path.basename(inFile)
643 
644  # Which file do I want to use to get the metadata?
645  if not args.txtfiledef:
646  # Normally my infile.
647  metadata = samweb.getMetadata(inFileBase)
648  fileMetaDataMgr = MetadataUtils.metaDataMgr(inFile, metadata, release, args.systematic, args.skim, args.cosmicsPolarity, args.npass, args.specialName)
649  else:
650  # However, if using a txtfile def, want to use the first file in the txt file.
651  with open( inFile ) as f:
652  PassFile = f.readline().strip()
653  print "Looking at ", PassFile
654  #metadata_cmd = "ifdh_fetch %s" %PassFile
655  #os.system(metadata_cmd)
656  metadata = samweb.getMetadata(PassFile)
657  fileMetaDataMgr = MetadataUtils.metaDataMgr(PassFile, metadata, release, args.systematic, args.skim, args.cosmicsPolarity, args.npass, args.specialName)
658 
659 
660 
661  tmpFclName = make_temp_fcl(args.config, inFileBase)
662 
663  # Open the fcl file so that we can append output filenames to it
664  fclFileObj = open(tmpFclName, 'a')
665 
666  print " Open the fcl file so that we can append output filenames to it ::::::::::::::::::::::::::: fclFileObj=", fclFileObj
667  doMeta = True
668  if not (args.outTier or args.cafTier or args.flatTier or args.h5Tier):
669  doMeta = False
670 
671  # Start setting up the nova command, add SAM parameters
672  cmdList = []
673  cmdList.append('nova')
674  cmdList.append('-c')
675  cmdList.append(tmpFclName)
676  if doMeta:
677  cmdList.append('--sam-application-family=nova')
678  cmdList.append('--sam-application-version=' + release)
679  if not fileMetaDataMgr.isSam4Users():
680  cmdList.append('--sam-file-type=' + fileMetaDataMgr.fileType)
681 
682  if not args.outTier:
683  args.outTier = []
684  if not args.cafTier:
685  args.cafTier = []
686  if not args.flatTier:
687  args.flatTier = []
688  if not args.h5Tier:
689  args.h5Tier = []
690 
691  if not args.precopyscript:
692  args.precopyscript = []
693 
694  outList = [] # list of files to be supplied with -o
695  outputs = [] # list of files that will be moved to results directory, includes CAFs
696  # Loop over output tiers
697  for outTier in args.outTier:
698 
699  try:
700  output = outTier.split(":")[0]
701 
702  tier = outTier.split(":")[1]
703 
704  except:
705  raise ValueError("Output data tier: " + outTier + "not formatted corectly, should be <output_name>:<data_tier>")
706 
707  if "," in output:
708  if(re.search('cell',output)):
709  outP, outC = re.findall(',(.+?),', output)
710  outXp, outXc = [re.findall('(.+)-', outP)[0], re.findall('(.+)-', outC)[0]] # p stands for plane, c stands for cell
711  outYp, outYc = [re.findall('-(.+)', outP)[0], re.findall('-(.+)', outC)[0]]
712  for i in range( int(outXp), int(outYp)+1 ):
713  for j in range( int(outXc), int(outYc)+1):
714  NewOMod = re.search('^(.+?),', output).group(1) + `i` + re.search('cell',output).group() + `j`
715  cmdList.append('--sam-data-tier=' + ":".join([NewOMod, tier]))
716  if not fileMetaDataMgr.isSam4Users():
717  if fileMetaDataMgr.dataFlag == "data":
718  cmdList.append('--sam-stream-name=' + NewOMod + ':' + str(fileMetaDataMgr.stream))
719  else:
720  outX = re.search(',(.+?)-', output).group(1)
721  outY = re.search('-(.+?),', output).group(1)
722  for i in range( int(outX), int(outY)+1 ):
723  NewOMod = re.search('^(.+?),', output).group(1)+`i`
724  cmdList.append('--sam-data-tier=' + ":".join([NewOMod, tier]))
725  if not fileMetaDataMgr.isSam4Users():
726  if fileMetaDataMgr.dataFlag == "data":
727  cmdList.append('--sam-stream-name=' + NewOMod + ':' + str(fileMetaDataMgr.stream))
728  else:
729  cmdList.append('--sam-data-tier=' + ":".join([output, tier]))
730  if not fileMetaDataMgr.isSam4Users():
731  if fileMetaDataMgr.dataFlag == "data":
732  cmdList.append('--sam-stream-name=' +output + ':' + str(fileMetaDataMgr.stream))
733 
734  outNameTemp = fileMetaDataMgr.getOutputFileName(tier)
735  if args.txtfiledef:
736  FirstRun = re.search('FirstRun-(.+?)_LastRun', os.path.basename(inFile)).group(1).zfill(8)
737  LastRun = re.search('LastRun-(.+?)_TotFiles', os.path.basename(inFile)).group(1).zfill(8)
738  Index=outNameTemp.find("_r0")
739  outNameTemp=outNameTemp[:Index]+"_r"+FirstRun+"_r"+LastRun+outNameTemp[Index+14:]
740  outName = os.path.basename(outNameTemp)
741 
742  if "," in output:
743  if(re.search('cell',output)):
744  outP, outC = re.findall(',(.+?),', output)
745  outXp, outXc = [re.findall('(.+)-', outP)[0], re.findall('(.+)-', outC)[0]] # p stands for plane, c stands for cell
746  outYp, outYc = [re.findall('-(.+)', outP)[0], re.findall('-(.+)', outC)[0]]
747  for i in range( int(outXp), int(outYp)+1 ):
748  for j in range( int(outXc), int(outYc)+1):
749  NewOMod = re.search('^(.+?),', output).group(1)+`i` + re.search('cell',output).group() + `j`
750  tier = outTier.split(":")[1]
751  NewOName = outName.replace(str("."+tier), str("-"+NewOMod+"."+tier))
752  fclFileObj.write("\noutputs." + NewOMod + '.fileName: "'+ NewOName + '"\n')
753  outList.append(NewOName)
754  outputs.append(NewOName)
755  else:
756  print "Running in Plane Mode"
757  outX = re.search(',(.+?)-', output).group(1)
758  outY = re.search('-(.+?),', output).group(1)
759  for i in range( int(outX), int(outY)+1 ):
760  NewOMod = re.search('^(.+?),', output).group(1)+`i`
761  tier = outTier.split(":")[1]
762  NewOName = outName.replace(str("."+tier), str("-"+NewOMod+"."+tier))
763  fclFileObj.write("\noutputs." + NewOMod + '.fileName: "'+ NewOName + '"\n')
764  outList.append(NewOName)
765  outputs.append(NewOName)
766  else:
767  print "Output file name: ", outName, " for tier ", tier, " and output ", output
768  fclFileObj.write("\noutputs." + output + '.fileName: "'+ outName + '"\n')
769  outList.append(outName)
770  outputs.append(outName)
771 
772  for cafTier in args.cafTier:
773  try:
774  cafLabel = cafTier.split(":")[0]
775  tier = cafTier.split(":")[1]
776  except:
777  raise ValueError("Output data tier: " + cafTier + "not formatted corectly, should be <output_name>:<data_tier>")
778 
779  cafName = fileMetaDataMgr.getOutputFileName(tier)
780  print "Adding CAF: ", cafLabel, tier, cafName
781 
782  fclFileObj.write("\nphysics.producers." + cafLabel + '.CAFFilename: "' + cafName + '" \n')
783  fclFileObj.write("physics.producers." + cafLabel + '.DataTier: "' + tier + '" \n')
784  outputs.append(cafName)
785 
786 
787  for flatTier in args.flatTier:
788  try:
789  flatLabel = flatTier.split(":")[0]
790  tier = flatTier.split(":")[1]
791  except:
792  raise ValueError("Output data tier: " + flatTier + "not formatted corectly, should be <output_name>:<data_tier>")
793 
794  flatName = fileMetaDataMgr.getOutputFileName(tier)
795  print "Adding FlatCAF: ", flatLabel, tier, flatName
796 
797  #fclFileObj.write("\nphysics.producers." + flatLabel + '.OutputName: "' + flatName + '" \n')
798  #fclFileObj.write("physics.producers." + flatLabel + '.DataTier: "' + tier + '" \n')
799  outputs.append(flatName)
800 
801 
802  for h5Tier in args.h5Tier:
803  try:
804  h5Label = h5Tier.split(":")[0]
805  tier = h5Tier.split(":")[1]
806  except:
807  raise ValueError("Output data tier: " + h5Tier + "not formatted corectly, should be <output_name>:<data_tier>")
808 
809  h5Name = fileMetaDataMgr.getOutputFileName(tier)
810  print "Adding H5: ", h5Label, tier, h5Name
811 
812  outputs.append(h5Name+".h5")
813 
814  if args.lemBalance and (atoi(os.environ["PROCESS"])+atoi(os.environ["CLUSTER"]))%2==0:
815  fclFileObj.write("physics.producers.lem.WebSettings.Host: \"lem2.hep.caltech.edu\"\n")
816  elif args.lemServer:
817  fclFileObj.write("physics.producers.lem.WebSettings.Host: \"%s\"\n" % args.lemServer)
818 
819 
820  if args.histTier:
821  try:
822  tier = str(args.histTier)
823  except:
824  raise Exception("Histogram identifier supplied by --histTier could not be converted to a string.")
825 
826  histName = fileMetaDataMgr.getOutputFileName(tier)
827 
828  outputs.append(histName)
829  fclFileObj.write("\nservices.TFileService.fileName: " + '"' + histName + '"\n')
830 
831  fclFileObj.close()
832 
833 
834 
835  print "Config: "
836  for line in open(tmpFclName):
837  print line.strip()
838 
839  if args.n != None:
840  cmdList.append('-n')
841  cmdList.append(str(args.n))
842 
843  if args.txtfiledef:
844  print "\nI have a text file definition, InFile is {}".format(inFile)
845  ### Are we streaming the files via xrootd?
846  #txtcmd="cat %s | xargs -n1 samweb2xrootd > xrootd_inFile.txt"%inFile
847  #os.system(txtcmd)
848  #with open("xrootd_inFile.txt") as f:
849  # for line in f:
850  # print line.strip()
851  # cmdList.append( line.strip() )
852  #print ""
853  ### Are we going to copy the files?
854  olddir = os.getcwd()
855  os.system("mkdir InFiles")
856  allFiles = 0.
857  failFiles = 0.
858  with open(inFile) as f:
859  os.chdir("InFiles")
860  for line in f:
861  allFiles += 1
862  copyfile = "InFiles/%s" %line.strip()
863  print "Now copying",line.strip(),"to ",copyfile
864  ifdhcmd = "ifdh cp -D `samweb2xrootd %s` ." %line.strip()
865  print datetime.datetime.now()
866  ret = os.system( ifdhcmd )
867  if ret == 0:
868  cmdList.append( copyfile )
869  else:
870  failFiles += 1
871  print("Copy in success ratio: " + str((allFiles-failFiles)/allFiles))
872  os.chdir(olddir)
873  else:
874  cmdList.append(inFile)
875 
876  if args.gdb:
877  gdbArgs = ["gdb", "-return-child-result", "--ex", "run", "--ex", "bt", "full", "--ex", "q", "--args"]
878  cmdList = gdbArgs + cmdList
879 
880  cmd = ' '.join(cmdList)
881 
882  print 'Running:', cmd
883  sys.stdout.flush() # flush the stdout buffer before running the nova executable, cleans up output.
884 
885  if args.logs or args.zipLogs:
886  with open('log.txt', 'w') as logfile:
887  sys.stderr.write('\nnova command runs here. stderr redirected to stdout\n\n')
888  retCode = subprocess.call(cmdList, stdout=logfile, stderr=subprocess.STDOUT)
889  # Print all the output to the screen as well so that regular condor
890  # logs include it too.
891  with open('log.txt', 'r') as logfile:
892  for line in logfile:
893  print line,
894  if args.zipLogs:
895  os.system('bzip2 -f log.txt')
896  else:
897  retCode = subprocess.call(cmdList)
898 
899  ### If using a txtfiledef make sure to clean up the InputFile List....
900  if args.txtfiledef:
901  os.system("rm -rf InFiles")
902 
903  if retCode != 0:
904  print "Want to copy back the logs for this job somehwere...."
905  else:
906 
907  # determine output destination
908  if args.copyOut:
909  if args.dest:
910  dest = args.dest
911  elif "DEST" in os.environ:
912  dest = os.environ["DEST"]
913  else:
914  raise Exception("Copy out requested with --copyOut, but no destination supplied. Use --dest or $DEST")
915 
916  if args.autoDropbox:
917  dest=NGU.get_prod_dropbox()
918  print("Getting automatic dropbox location", dest)
919  # If the initial job finished successfully, we may want to run another config over the output
920  # copy logic above for executing nova with the second_config instead.
921  if args.second_config:
922  # stage files from first config to be declared
923  # before the second config is run
924  if args.copyOut:
925  # move what we have there
926  checkAndMoveFiles(inFile, outputs, noCleanup=True, copyOnly=True)
927  if args.jsonMetadata:
929  copyOutFiles(dest, args.hashDirs, args.runDirs, fileMetaDataMgr.runNum, args.noCleanup, args.declareLocations, args.declareFiles)
930 
931 
932  # create a temporary work space so we're careful about overwriting files
933  # from the main step
934  import random
935  import string
936 
937  lastdir = os.getcwd()
938  tmpdir = os.path.abspath(os.path.join('./', 'tmp' + ''.join(random.choice(string.ascii_letters) for i in range(8))))
939  os.mkdir(tmpdir)
940  os.chdir(tmpdir)
941  print('Changing to %s' % os.getcwd())
942  tmpSecondFclName = make_temp_fcl(args.second_config, inFileBase)
943  cmdList = []
944  cmdList.append('nova')
945  cmdList.append('-c')
946  cmdList.append(tmpSecondFclName)
947  if doMeta:
948  cmdList.append('--sam-application-family=nova')
949  cmdList.append('--sam-application-version=' + release)
950  if not fileMetaDataMgr.isSam4Users():
951  cmdList.append('--sam-file-type=' + fileMetaDataMgr.fileType)
952 
953  if args.gdb:
954  gdbArgs = ["gdb", "-return-child-result", "--ex", "run", "--ex", "bt", "full", "--ex", "q", "--args"]
955  cmdList = gdbArgs + cmdList
956 
957 
958  # run second nova executable
959  inPID= fileMetaDataMgr.getOutputFileName('pid')
960  if not os.path.isfile(os.path.abspath(os.path.join(lastdir, os.path.basename(inPID)))):
961  print 'WARNING: Could not find PID file for second configuration fcl. Skipping', inPID
962  else:
963 
964  cmdList.append(os.path.abspath(os.path.join(lastdir, os.path.basename(inPID))))
965  cmd = ' '.join(cmdList)
966 
967  print 'Running:', cmd
968  sys.stdout.flush() # flush the stdout buffer before running the nova executable, cleans up output.
969  if args.logs or args.zipLogs:
970  with open(os.path.join(lastdir,'log.txt'), 'w') as logfile:
971  sys.stderr.write('\nsecond nova command runs here. stderr redirected to stdout\n\n')
972  retCode = subprocess.call(cmdList, stdout=logfile, stderr=subprocess.STDOUT)
973 
974  # Print all the output to the screen as well so that regular condor
975  # logs include it too.
976  with open(os.path.join(lastdir, 'log.txt'), 'r') as logfile:
977  for line in logfile:
978  print line,
979 
980  if args.zipLogs:
981  os.system('bzip2 -f log.txt')
982 
983  else:
984  retCode = subprocess.call(cmdList)
985 
986  if retCode == 0:
987  # handle output of second nova executable
988  # don't copy back files that belong to both the 'outputs' list and
989  # the files that were created by the first executable.
990  # Warn the user if this happens
991  _first = set(os.listdir(lastdir))
992  _second = set(os.listdir(tmpdir))
993  _outputs = set(outputs)
994  _bad = (_first & _second) & _outputs
995  _good = _second - _bad
996  if len(_bad) > 0:
997  print 'runNovaSAM.py: [WARNING] First and second processes produced identically named files that among requested outputs. Ignoring file produced by the second and copying out only the first'
998  for b in _bad: print b
999  for g in _good:
1000  print 'Change ', os.path.join(tmpdir, os.path.basename(g)), ' to ', os.path.join(lastdir, os.path.basename(g))
1001  os.rename(os.path.join(tmpdir, os.path.basename(g)), os.path.join(lastdir, os.path.basename(g)))
1002  else:
1003  sys.stderr.write('\nSecond nova command failed with exit code %d' % retCode)
1004  # remove temp directory
1005  if not args.noCleanup:
1006  # we've moved all of the files we care about at this point.
1007  # remove all contents of tmpdir so we can remove the directory
1008  # itself too
1009  for f in os.listdir(tmpdir):
1010  os.remove(os.path.join(tmpdir, f))
1011  os.rmdir(tmpdir)
1012 
1013  # go back to previous working directory
1014  os.chdir(lastdir)
1015 
1016  # wait until after we possibly run a second config to zip the output up
1017  if args.zipLogs:
1018  os.system('bzip2 -f log.txt')
1019 
1020 
1021  if args.outputNumuDeCAF or args.outputNueDeCAF or args.outputNumuOrNueDeCAF or args.outputNusDeCAF or args.outputValidationDeCAF:
1022  decafdir = os.listdir(".")
1023  for fname in decafdir:
1024  if fname.endswith("caf.root"):
1025  if args.outputNumuDeCAF:
1026  outputs.append(makeDeCAF('numu/FirstAnalysis/reduce_numu_fa.C',fname,'numu_contain'))
1027  if args.outputNueDeCAF:
1028  outputs.append(makeDeCAF('nue/reduce_nue_sa.C',fname,'nue_contain'))
1029  if args.outputNumuOrNueDeCAF:
1030  outputs.append(makeDeCAF('nue/reduce_nue_or_numu_sa.C',fname,'nue_or_numu_contain'))
1031  if args.outputNusDeCAF:
1032  outputs.append(makeDeCAF('nus/reduce_nus.C',fname,'nus_contain'))
1033  if args.outputValidationDeCAF:
1034  outputs.append(makeDeCAF('nus/reduce_nue_or_numu_or_nus.C',fname,'nue_or_numu_or_nus_contain'))
1035  print "\nAt the start of check and move."
1036  checkAndMoveFiles(inFile, outputs, args.noCleanup)
1037 
1038 
1039  if args.copyOut:
1040  if args.jsonMetadata:
1041  print "\nMake JSONs"
1043  print "\nMade JSONs, now to copy files."
1044 
1045  copyOutFiles(dest, args.hashDirs, args.runDirs, fileMetaDataMgr.runNum, args.noCleanup, args.declareLocations, args.declareFiles)
1046  print "Copied files."
1047  else:
1048  #job didn't succeed, remove output files if they exist
1049  for file in outList:
1050  try:
1051  os.remove("./" + file)
1052  except OSError:
1053  pass
1054 
1055  #clean up section
1056  if not args.noCleanup:
1057  os.remove(tmpFclName)
1058  os.remove(inFile)
1059  dirList = os.listdir(".")
1060  for file in dirList:
1061  skip_match = _skip_pattern.match(file)
1062  if skip_match != None:
1063  print file, "contains RootOutput*.root: clean up"
1064  os.remove("./" + file)
1065 
1066  dh = ifdh.ifdh("http://samweb.fnal.gov:8480/sam/nova/api")
1067  dh.cleanup()
1068 
1069  exit(retCode)
def makeMetadataJSONs()
Definition: runNovaSAM.py:302
void split(double tt, double *fr)
def copyOutFiles(hashDirs=False)
Definition: runNovaSAM.py:149
def makeDirSafely(dir)
Definition: runNovaSAM.py:79
def checkAndMoveFiles(inFile, declareFiles)
Definition: runNovaSAM.py:105
def resolveFclPath(fcl)
Definition: runNovaSAM.py:188
void append()
Definition: append.C:24
def getOutDir(pathname, hashDirs=False)
Definition: runNovaSAM.py:92
if(dump)
bool print
def MakeTransposeJson(TransposeList)
Definition: runNovaSAM.py:369
std::string format(const int32_t &value, const int &ndigits=8)
Definition: HexUtils.cpp:14
procfile open("FD_BRL_v0.txt")
def createMetadata(inFile)
cet::coded_exception< errors::ErrorCodes, ExceptionDetail::translate > Exception
Definition: Exception.h:66
def addMetadataToFCL(fclFile, parName, parValue)
exit(0)
def listFiles(outPath, dh)
Definition: runNovaSAM.py:295
def make_temp_fcl(fclFile, inFileBase)
Definition: runNovaSAM.py:33
def setVMemLimit()
Definition: runNovaSAM.py:71
def fileExists(outPath, dh)
Definition: runNovaSAM.py:283
def makeDirIfNeeded(dir)
Definition: runNovaSAM.py:99
def declareFile(fileWPath)
Definition: runNovaSAM.py:127
def declareLogFile(logName, logFile, rootName)
Definition: runNovaSAM.py:264
def makeDeCAF(script, fname, special)
Definition: runNovaSAM.py:516