172 maintain_parentage = 0
177 if args[0] ==
'--dir' and len(args) > 1:
180 elif args[0] ==
'--logfiledir' and len(args) > 1:
183 elif args[0] ==
'--outdir' and len(args) > 1:
186 elif args[0] ==
'--declare' and len(args) > 1:
187 declare_file = int(args[1])
189 elif args[0] ==
'--copy' and len(args) > 1:
190 copy_to_dropbox = int(args[1])
192 elif args[0] ==
'--maintain_parentage' and len(args) > 1:
193 maintain_parentage = int(args[1])
195 elif args[0] ==
'--data_file_type' and len(args) > 1:
196 data_file_types.append(args[1])
199 print(
'Unknown option %s' % args[0])
204 if len(data_file_types) == 0:
205 data_file_types.append(
'root')
209 print(
"Do decleration in job: %d" % declare_file)
212 stat_filename = os.path.join(logdir,
'lar.stat')
213 if project_utilities.safeexist(stat_filename):
215 status = int(project_utilities.saferead(stat_filename)[0].
strip())
217 print(
'Job in subdirectory %s ended with non-zero exit status %d.' % (checkdir, status))
221 print(
'Bad file lar.stat in subdirectory %s.' % checkdir)
225 print(
'No directory specified (use the --dir option.) Exiting.')
228 print(
'No log file directory specified (use the --logfiledir option.) Exiting.')
231 nevts,rootfiles,hists =
check_root(checkdir, logdir, data_file_types)
238 if len(hists) > 0
and len(rootfiles) == 0
and nevts <= 0:
242 if len(rootfiles) == 0
or nevts < 0:
243 print(
'Problem with root file(s) in %s.' % checkdir)
247 elif nevts < -1
or len(hists) == 0:
248 print(
'Problem with analysis root file(s) in %s.' % checkdir)
257 validate_list =
open(
'validate.list',
'w')
258 file_list =
open(
'files.list',
'w')
259 ana_file_list =
open(
'filesana.list',
'w')
261 events_list =
open(
'events.list',
'w')
264 bad_list =
open(
'bad.list',
'w')
265 missing_list =
open(
'missing_files.list',
'w')
270 print(
"%d processes completed successfully." % nproc)
271 print(
"%d total good histogram files." % len(hists))
274 print(
"%d total good events." % nevts)
275 print(
"%d total good root files." % len(rootfiles))
276 print(
"%d total good histogram files." % len(hists))
278 file_list_stream = {}
282 for rootfile
in rootfiles:
284 rootpath = rootfile[0]
286 streamname = rootfile[2]
289 rootname = os.path.basename(rootpath)
290 if len(rootname) >= 200:
291 print(
'Filename %s in subdirectory %s is longer than 200 characters.' % (
295 if streamname
not in file_list_stream:
296 file_list_stream[streamname] =
open(
'files_%s.list' % streamname,
'w')
297 validate_list.write(rootpath +
'\n')
298 file_on_scratch = os.path.join(outdir, os.path.basename(rootpath))
299 file_list.write(file_on_scratch +
'\n')
300 file_list_stream[streamname].
write(file_on_scratch +
'\n')
301 events_list.write(
'%s %d \n' % (file_on_scratch, nev) )
305 for histfile
in hists:
306 validate_list.write(histfile +
'\n')
307 file_on_scratch = os.path.join(outdir, os.path.basename(histfile))
308 ana_file_list.write(file_on_scratch +
'\n')
312 validate_list.close()
314 ana_file_list.close()
315 for streamname
in list(file_list_stream.keys()):
316 file_list_stream[streamname].
close()
320 missing_list.write(
'%d \n' %status)
331 for rootfile
in rootfiles:
333 rootpath = rootfile[0]
334 fn = os.path.basename(rootpath)
342 md = samweb.getMetadata(fn)
345 print(
'File %s is already declared.' % fn)
350 print(
'Declaring %s' % fn)
351 expSpecificMetaData = expMetaData(project_utilities.get_experiment(), rootpath)
352 md = expSpecificMetaData.getmetadata()
356 if maintain_parentage == 1:
365 jobs_parents = os.getenv(
'JOBS_PARENTS',
'').split(
" ")
366 jobs_aunts = os.getenv(
'JOBS_AUNTS',
'').split(
" ")
367 if(jobs_parents[0] !=
'' ):
368 md[
'parents'] = [{
'file_name': parent}
for parent
in jobs_parents]
369 if(jobs_aunts[0] !=
'' ):
370 for aunt
in jobs_aunts:
371 mixparent_dict = {
'file_name': aunt}
372 if 'parents' not in md:
374 md[
'parents'].append(mixparent_dict)
377 project_utilities.test_kca()
383 samweb.declareFile(md=md)
386 except samweb_cli.exceptions.SAMWebHTTPError
as e:
388 print(
'SAM declare failed.')
392 print(
'SAM declare failed.')
396 print(
'No sam metadata found for %s.' % fn)
400 if copy_to_dropbox == 1
and declare_ok:
401 print(
"Copying to Dropbox")
402 dropbox_dir = project_utilities.get_dropbox(fn)
403 rootPath = os.path.join(dropbox_dir, fn)
404 jsonPath = rootPath +
".json"
409 for histpath
in hists:
412 fn = os.path.basename(histpath)
419 md = samweb.getMetadata(fn)
422 print(
'File %s is already declared.' % fn)
427 print(
'Declaring %s' % fn)
428 json_file = os.path.join(logdir, fn +
'.json')
433 if project_utilities.safeexist(json_file):
434 mdlines = project_utilities.saferead(json_file)
437 mdtext = mdtext + line
439 md = json.loads(mdtext)
444 if maintain_parentage == 1:
453 jobs_parents = os.getenv(
'JOBS_PARENTS',
'').split(
" ")
454 jobs_aunts = os.getenv(
'JOBS_AUNTS',
'').split(
" ")
455 if(jobs_parents[0] !=
'' ):
456 md[
'parents'] = [{
'file_name': parent}
for parent
in jobs_parents]
457 if(jobs_aunts[0] !=
'' ):
458 for aunt
in jobs_aunts:
459 mixparent_dict = {
'file_name': aunt}
460 if 'parents' not in md:
462 md[
'parents'].append(mixparent_dict)
464 if len(md) > 0
and 'file_type' in md:
465 project_utilities.test_kca()
471 samweb.declareFile(md=md)
474 except samweb_cli.exceptions.SAMWebHTTPError
as e:
476 print(
'SAM declare failed.')
480 print(
'SAM declare failed.')
484 print(
'No sam metadata found for %s.' % fn)
487 if copy_to_dropbox == 1
and declare_ok:
488 print(
"Copying to Dropbox")
489 dropbox_dir = project_utilities.get_dropbox(fn)
490 rootPath = dropbox_dir +
"/" + fn
491 jsonPath = rootPath +
".json"
499 dir_on_scratch = os.path.basename(outdir)
500 print(
'Dir on scratch ' + dir_on_scratch)
501 bad_list.write(
'%s \n' % dir_on_scratch)