2 #------------------------------------------------------------------
4 # Purpose: A batch script to fetch root files from sam and combine
7 # Adapted from condor_lBdetMC.sh by E. Church.
11 # condor_hadd_sam.sh [options]
15 # -c, --config - For compatibility (ignored).
16 # -T, --TFileName <arg> - TFile output file name
17 # --nfile <arg> - Number of files to process per worker.
19 # Sam and parallel project options.
21 # --sam_user <arg> - Specify sam user (default $GRID_USER).
22 # --sam_group <arg> - Specify sam group (default --group option).
23 # --sam_station <arg> - Specify sam station (default --group option).
24 # --sam_defname <arg> - Sam dataset definition name.
25 # --sam_project <arg> - Sam project name.
26 # --sam_start - Specify that this worker should be responsible for
27 # starting and stopping the sam project.
28 # --recur - Recursive input dataset (force snapshot).
29 # --sam_schema <arg> - Use this option with argument "root" to stream files using
30 # --os <arg> - A copy of the os argument passed to jobsub. May be used
31 # to affect definition of UPS_OVERRIDE.
32 # --data_file_type - Specify data file type (default "root," repeatable).
36 # --ups <arg> - Comma-separated list of top level run-time ups products.
37 # -r, --release <arg> - Release tag.
38 # -q, -b, --build <arg> - Release build qualifier (default "debug", or "prof").
39 # --localdir <arg> - Larsoft local test release directory (default none).
40 # --localtar <arg> - Tarball of local test release.
41 # --mrb - Ignored (for compatibility).
42 # --srt - Exit with error status (SRT run time no longer supported).
46 # -h, --help - Print help.
47 # -i, --interactive - For interactive use.
48 # -g, --grid - Be grid-friendly.
49 # --group <arg> - Group or experiment (required).
50 # --workdir <arg> - Work directory (required).
51 # --outdir <arg> - Output directory (required).
52 # --logdir <arg> - Log directory (required).
53 # --scratch <arg> - Scratch directory (only for interactive).
54 # --cluster <arg> - Job cluster (override $CLUSTER)
55 # --process <arg> - Process within cluster (override $PROCESS).
56 # --procmap <arg> - Name of process map file (override $PROCESS).
57 # --init-script <arg> - User initialization script execute.
58 # --init-source <arg> - User initialization script to source (bash).
59 # --end-script <arg> - User end-of-job script to execute.
60 # --init <path> - Absolute path of environment initialization script.
64 # Run time environment setup.
66 # MRB run-time environmental setup is controlled by four options:
67 # --release (-r), --build (-b, -q), --localdir, and --localtar.
69 # a) Use option --release or -r to specify version of top-level product(s).
70 # b) Use option --build or -b to specify build full qualifiers (e.g.
71 # "debug:e5" or "e5:prof").
72 # c) Options --localdir or --localtar are used to specify your local
73 # test release. Use one or the other (not both).
75 # Use --localdir to specify the location of your local install
76 # directory ($MRB_INSTALL).
78 # Use --localtar to specify thye location of a tarball of your
79 # install directory (made relative to $MRB_INSTALL).
81 # Note that --localdir is not grid-friendly.
85 # 1. Each batch worker is uniquely identified by two numbers stored
86 # in environment variables $CLUSTER and $PROCESS (the latter is
87 # a small integer that starts from zero and varies for different
88 # jobs in a parallel job group). These environment variables are
89 # normally set by the batch system, but can be overridden by options
90 # --cluster, --process, and --procmap (e.g. to rerun failed jobs).
92 # 2. The work directory must be set to an existing directory owned
93 # by the submitter and readable by the batch worker. Files from the
94 # work directory are copied to the batch worker scratch directory at
95 # the start of the job.
97 # 3. The initialization and end-of-job
98 # scripts (optins --init-script, --init-source, --end-script) may
99 # be stored in the work directory specified by option --workdir, or they
100 # may be specified as absolute paths visible on the worker node.
102 # 4. A local test release may be specified as an absolute path using
103 # --localdir, or a tarball using --localtar. The location of the tarball
104 # may be specified as an absolute path visible on the worker, or a
105 # relative path relative to the work directory.
107 # 5. The output directory must exist and be writable by the batch
108 # worker (i.e. be group-writable for grid jobs). The worker
109 # makes a new subdirectory called ${CLUSTER}_${PROCESS} in the output
110 # directory and copies all files in the batch scratch directory there
111 # at the end of the job. If the output directory is not specified, the
112 # default is /grid/data/<group>/outstage/<user> (user is defined as
113 # owner of work directory).
115 # 6. This script reads input files from sam using the standard sam project api.
116 # All files are fetched from sam, then then are combined by a single
117 # invocation of hadd. This way of working implies an upper limit on
118 # the number of files that can be combined in a single worker.
121 # Created: H. Greenlee, 29-Aug-2012
123 #------------------------------------------------------------------
160 while [ $# -gt 0 ];
do
165 awk
'/^# Usage:/,/^# End options/{print $0}' $0 | cut -c3- | head -
n -2
169 # Config file (for compatibility -- ignored).
171 if [ $# -gt 1 ]; then
176 # Number of events (for compabitility -- ignored).
178 if [ $# -gt 1 ]; then
185 if [ $# -gt 1 ]; then
191 # Number of files to process.
193 if [ $# -gt 1 ]; then
199 # Specify data file types (repeatable).
201 if [ $# -gt 1 ]; then
210 if [ $# -gt 1 ]; then
218 if [ $# -gt 1 ]; then
226 if [ $# -gt 1 ]; then
232 # Sam dataset definition name.
234 if [ $# -gt 1 ]; then
242 if [ $# -gt 1 ]; then
248 # Sam start/stop project flag.
260 if [ $# -gt 1 ]; then
268 if [ $# -gt 1 ]; then
274 # General arguments for hadd command line.
276 if [ $# -gt 1 ]; then
283 # Top level ups products (comma-separated list).
285 if [ $# -gt 1 ]; then
293 if [ $# -gt 1 ]; then
299 # Release build qualifier.
301 if [ $# -gt 1 ]; then
307 # Local test release directory.
309 if [ $# -gt 1 ]; then
315 # Local test release tarball.
317 if [ $# -gt 1 ]; then
329 echo
"SRT run time environment is no longer supported."
338 # Grid flag (no effect).
344 if [ $# -gt 1 ]; then
352 if [ $# -gt 1 ]; then
360 if [ $# -gt 1 ]; then
368 if [ $# -gt 1 ]; then
376 if [ $# -gt 1 ]; then
384 if [ $# -gt 1 ]; then
390 # Process within cluster.
392 if [ $# -gt 1 ]; then
400 if [ $# -gt 1 ]; then
406 # User initialization script.
408 if [ $# -gt 1 ]; then
414 # User source initialization script.
416 if [ $# -gt 1 ]; then
422 # User end-of-job script.
424 if [ $# -gt 1 ]; then
430 # Specify environment initialization script path.
432 if [ $# -gt 1 ]; then
440 echo
"Unknown option $1"
451 #echo
"LOCALDIR=$LOCALDIR"
452 #echo
"LOCALTAR=$LOCALTAR"
453 #echo
"INTERACTIVE=$INTERACTIVE"
455 #echo
"WORKDIR=$WORKDIR"
456 #echo
"OUTDIR=$OUTDIR"
457 #echo
"LOGDIR=$LOGDIR"
458 #echo
"SCRATCH=$SCRATCH"
461 #echo
"INITSCRIPT=$INITSCRIPT"
462 #echo
"INITSOURCE=$INITSOURCE"
463 #echo
"ENDSCRIPT=$ENDSCRIPT"
471 # Done with arguments.
473 echo
"Nodename: `hostname -f`"
480 if [
x$QUAL =
x ]; then
484 if [
x$SAM_GROUP =
x ]; then
488 if [
x$SAM_STATION =
x ]; then
492 # Standardize sam_schema (xrootd -> root, xroot -> root).
494 if [
x$SAM_SCHEMA = xxrootd ]; then
497 if [
x$SAM_SCHEMA = xxroot ]; then
501 # Make sure work directory is defined and exists.
503 if [
x$WORKDIR =
x ]; then
504 echo
"Work directory not specified."
507 echo
"Work directory: $WORKDIR"
509 # Initialize experiment ups products and mrb.
511 echo
"Initializing ups and mrb."
513 if [
x$INIT !=
x ]; then
514 if [ ! -f $INIT ]; then
515 echo
"Environment initialization script $INIT not found."
518 echo
"Sourcing $INIT"
521 echo
"Sourcing setup_experiment.sh"
522 source ${CONDOR_DIR_INPUT}/setup_experiment.sh
525 echo PRODUCTS=$PRODUCTS
527 # Ifdh may already be setup by jobsub wrapper.
528 # If not, set it up here.
530 echo
"IFDHC_DIR=$IFDHC_DIR"
531 if [
x$IFDHC_DIR =
x ]; then
532 echo
"Setting up ifdhc, because jobsub did not set it up."
535 echo
"IFDHC_DIR=$IFDHC_DIR"
537 # Set GROUP environment variable.
540 if [
x$GRP !=
x ]; then
543 echo
"GROUP not specified."
549 echo
"IFDH_OPT=$IFDH_OPT"
551 # Make sure output directory exists and is writable.
553 if [
x$OUTDIR =
x ]; then
554 echo
"Output directory not specified."
557 echo
"Output directory: $OUTDIR"
559 # Make sure log directory exists and is writable.
561 if [
x$LOGDIR =
x ]; then
562 echo
"Log directory not specified."
565 echo
"Log directory: $LOGDIR"
567 # Make sure scratch directory is defined.
568 # For batch, the scratch directory is always $_CONDOR_SCRATCH_DIR
569 # For interactive, the scratch directory is specified by option
570 # --scratch or --outdir.
572 if [ $INTERACTIVE -eq 0 ]; then
575 if [
x$SCRATCH =
x ]; then
579 if [
x$SCRATCH =
x -o ! -d
"$SCRATCH" -o ! -
w "$SCRATCH" ]; then
580 echo
"Local scratch directory not defined or not writable."
584 # Create the scratch directory in the condor scratch diretory.
585 # Copied from condor_lBdetMC.sh.
586 # Scratch directory path is stored in $TMP.
587 # Scratch directory is automatically deleted when shell exits.
589 # Do not change this section.
590 # It creates a temporary working directory that automatically cleans up all
591 # leftover files at the end.
592 TMP=`mktemp -d ${SCRATCH}/working_dir.XXXXXXXXXX`
593 TMP=${TMP:-${SCRATCH}/working_dir.$$}
595 { [[ -
n "$TMP" ]] &&
mkdir -
p "$TMP"; } || \
596 { echo
"ERROR: unable to create temporary directory!" 1>&2;
exit 1; }
597 trap
"[[ -n \"$TMP\" ]] && { rm -rf \"$TMP\"; }" 0
599 # End of the section you should not change.
601 echo
"Scratch directory: $TMP"
603 # Copy files from work directory to scratch directory.
605 echo
"No longer fetching files from work directory."
606 echo
"that's now done with using jobsub -f commands"
608 cp ${CONDOR_DIR_INPUT}
then source grid fermiapp products dune setup_dune_fermiapp sh exit else echo No setup file found exit fi setup
process_name opflash particleana ie x
std::vector< SelDef > types
BEGIN_PROLOG dataFFTHistosEW root
constexpr BitMask< Storage > Set(Flag_t< Storage > flag)
Returns a bit mask which sets the specified flag.
auto end(FixedBins< T, C > const &) noexcept
then echo Setting up ifdhc
if &&[-z"$BASH_VERSION"] then echo Attempting to switch to bash bash shellSwitch exit fi &&["$1"= 'shellSwitch'] shift declare a IncludeDirectives for Dir in
i interactive INTERACTIVE