#!/usr/bin/env bash #----------------------------------------------------------------------- # example: # # cdfopr/scripts/submit_Production [-v debug_mode] -r 5.1.0pre6 -d d0 -s job1:job2 # # debug_mode: "debug", "debug_only" # assumptions: # ------------ # - Production tarball used: Production_5.1.0pre6.tar.gz # - output goes to fcdfdata033 # - input is defined by the cdfopr/tcl/Production_d0 etc tcl files # -x A=value on the command line translates into "export A=value" # # if dataset="look_area", assume Stream I on FCDFSGI2, # run number has to be also specified # # if dataset="special_run", assume data to be read from DCACHE # get list of files for the specified # run number from DFC # # to run locally on LNX3 : -f fcdflnx3 # # Examples: # # ./cdfopr/scripts/submit_Production -r 6.0.0int3_maxopt_pass10A \ # -d special_run \ # -q GCC_3_4-maxopt \ # -R 190866:h \ # -f caf \ # -o cdfopr@fcdfdata034.fnal.gov # # ./cdfopr/scripts/submit_Production -d 10Ktest \ # -q maxopt \ # -f fcdflnx3 \ # -p 1 \ # -s 1:3 #----------------------------------------------------------------------- dataset= output_node=cdfopr@fcdfdata033.fnal.gov job1=1 job2= qual=default dhaccess=DFC export CAF=caf queue=long export WORK_DIR=$PWD export NEVENTS= export RUN_NUMBER= export RAW_STREAM= export LIST_OF_FILES= export DEBUG_SCRIPT=1 export NEW_ENV_VAR= export JOB_NUMBER=1 export PASS=pass1 export tarball= export SRT_QUAL=default #----------------------------------------------------------------------- usage() { echo usage: submit_Production [-v debug] -d dataset [-n nevents] \ -r release [-o output_node] [-s section1:section2] [-x VAR=value] echo example: submit_Production -v -d zmumu -r 5.1.0a_maxopt \ -s 1:100 -o cdfopr@fcdfdata034.fnal.gov } $HOME/scripts/ticket export OPTIND=1 while getopts :d:hn:o:p:q:r:R:s:v:x:f: OPT; do case $OPT in d) # input dataset name dataset=${OPTARG} dsid=`echo $dataset | awk -F : '{print $2}'` if [ .$dataset == ".look_area" ] ; then dhaccess=rootd fi if [ .$DEBUG_SCRIPT != "." ] ; then echo dataset=$dataset ; echo dsid=$dsid ; fi ;; h) # help usage exit ;; n) # number of events export NEVENTS=$OPTARG if [ .$OPTARG != "." ] ; then export NEVENTS=$OPTARG ; fi if [ .$DEBUG_SCRIPT != "." ] ; then echo NEVENTS=$NEVENTS ; fi ;; o) # output node name output_node=${OPTARG} ;; f) # farm (e.g. caf, cafcondor) farm=${OPTARG} export CAF=$OPTARG export CAF=`echo $OPTARG | awk -F : '{print $1}'` x=`echo $OPTARG | awk -F : '{print $2}'` if [ .$x != "." ] ; then queue=$x ; fi ;; p) # offline version export PASS=pass${OPTARG} if [ .$DEBUG_SCRIPT != "." ] ; then echo PASS=$PASS ; fi ;; q) # qual qual=${OPTARG} export SRT_QUAL=$OPTARG if [ .$DEBUG_SCRIPT != "." ] ; then echo SRT_QUAL=$qual ; fi ;; r) # offline version export OFFLINE_VERSION=$OPTARG if [ .$DEBUG_SCRIPT != "." ] ; then echo OFFLINE_VERSION=$OFFLINE_VERSION ; fi ;; R) # run number (express mode) export RUN_NUMBER=$OPTARG if [ .$OPTARG != "." ] ; then export RUN_NUMBER=`echo $OPTARG | sed 's/:/ /g' | awk '{print $1}'` export RAW_STREAM=`echo $OPTARG | sed 's/:/ /g' | awk '{print $2}'` fi if [ .$DEBUG_SCRIPT != "." ] ; then echo RUN_NUMBER=$RUN_NUMBER ; echo RAW_STREAM=$RAW_STREAM ; fi ;; s) # job sections job1=`echo $OPTARG | sed 's/:/ /g' | awk '{print $1}'` x=`echo $OPTARG | sed 's/:/ /g' | awk '{print $2}'` if [ .$x != "." ] ; then job2=$x fi if [ .$DEBUG_SCRIPT != "." ] ; then echo job1=$job1 , job2=$job2 ; fi ;; t) # tarball tarball=$OPTARG if [ .$DEBUG_SCRIPT != "." ] ; then echo tarball=$tarball ; fi ;; v) # debug script, # should go first export DEBUG_SCRIPT=$OPTARG ;; x) # export export $OPTARG export NEW_ENV_VAR=${NEW_ENV_VAR}" -x "$OPTARG if [ .$DEBUG_SCRIPT != "." ] ; then echo $NEW_ENV_VAR fi ;; *) echo "**** wrong input parameter:" $OPTARG usage exit -1 ;; esac done #------------------------------------------------------------------------------ if [ .$tarball == "." ] ; then label=$OFFLINE_VERSION echo check.003: label=$label if [ .$SRT_QUAL != ".default" ] ; then label=${OFFLINE_VERSION}_${SRT_QUAL} ; fi echo check.004: label=$label SRT_QUAL=$SRT_QUAL tarball=/cdf/scratch/cdfopr/tarballs/ProductionExe_$label.tar.gz echo check.002: tarball=$tarball output_dir=/cdf/scratch/cdfopr/validation_data/$label/$dataset else output_dir=/cdf/scratch/cdfopr/validation_data/$OFFLINE_VERSION/$dataset fi tcl_file=$PWD/cdfopr/tcl/Production_$dataset if [ .$dataset == ."look_area" ] ; then #----------------------------------------------------------------------- # assume that the LOOK area is NFS mounted - the case since Dec'04 #----------------------------------------------------------------------- rn=`printf "%x" $RUN_NUMBER` export LIST_OF_FILES=`ls /cdf/data/look/${RAW_STREAM}physr | grep $rn | awk '{print $1}' | sort` list=($LIST_OF_FILES) job2=${#list[@]} output_dir=/cdf/scratch/cdfopr/specialRun/$RUN_NUMBER echo LIST_OF_FILES=$LIST_OF_FILES echo job2=$job2 elif [ .$dataset == ."special_run" ] ; then #----------------------------------------------------------------------- # processing of the special run #----------------------------------------------------------------------- output_dir=/cdf/scratch/cdfopr/specialRun/$RUN_NUMBER if [ .$job2 == "." ] ; then job2=`$WORK_DIR/cdfopr/scripts/dfc_get_list_of_files $RUN_NUMBER $RAW_STREAM | wc -l` job2=`echo $job2 | sed 's/:/ /g' | awk '{print $1}'` fi elif [ .$dataset == ."10Ktest" ] ; then #----------------------------------------------------------------------- # 10K test, expect PASS to be specified #----------------------------------------------------------------------- output_dir=/cdf/scratch/cdfopr/validation_data/${OFFLINE_VERSION}/$dataset/$PASS else if [ .$job2 == "." ] ; then job2=`cat $tcl_file | grep NJOBS | awk '{print $4}'` fi fi group=opr if [ .$farm == ."cafcondor" ] ; then group=common fi if [ .$farm == "." ] ; then if [ .$CAF_CURRENT == ."cafcondor" ] ; then group=common export CAF="cafcondor" fi fi source ~cdfsoft/cdf2.shrc setup cdfsoft2 `cat .base_release` echo srt_setup -a SRT_QUAL=$qual srt_setup -a SRT_QUAL=$qual echo BFARCH=$BFARCH parameters="-e bin/$BFARCH/ProductionExe" parameters=${parameters}" -I cdfopr/tcl/Production_master_jpsi.tcl" parameters=${parameters}" -d $dataset" if [ .$dataset == ."10Ktest" ] ; then parameters=${parameters}" -j $PASS" fi if [ .$RUN_NUMBER != "." ] ; then if [ .$RAW_STREAM != "." ] ; then parameters=${parameters}" -r ${RUN_NUMBER}:${RAW_STREAM}" ; else parameters=${parameters}" -r ${RUN_NUMBER}" ; fi fi if [ .$NEVENTS != "." ] ; then parameters=${parameters}" -n $NEVENTS" ; fi if [ ."$NEW_ENV_VAR" != "." ] ; then parameters=${parameters}$NEW_ENV_VAR ; fi parameters=${parameters}" -x CAF=$CAF" if [ $CAF == "fcdflnx3" ] ; then #----------------------------------------------------------------------- # run locally, need to redefine output directory #----------------------------------------------------------------------- for ((job=$job1; job<=$job2; job++)) ; do cmd="./cdfopr/scripts/run.sh -J $job $parameters" if [ ."$DEBUG_SCRIPT" != "." ] ; then echo $cmd -o ${output_node}:$output_dir fi if [ ".$DEBUG_SCRIPT" != ".debug_only" ] ; then $cmd -o ${output_node}:$output_dir & sleep 10; fi done else cmd="./cdfopr/scripts/run.sh -J $job1 $parameters" source ~cdfsoft/cdf2.shrc setup cdfsoft2 development export CAF_ON_DH_WARNING=ignore if [ ."$DEBUG_SCRIPT" != "." ] ; then echo CafSubmit --tarFile=$tarball \ --outLocation=${output_node}:$output_dir/validation_$dataset.$.tgz \ --dhaccess=$dhaccess \ --email=aafke@fnal.gov,robson@fnal.gov \ --procType=$queue \ --group=$group \ --farm=$CAF \ --start=$job1 \ --end=$job2 "./cdfopr/scripts/run.sh -v -J $ $parameters -o ${output_node}:$output_dir" if [ ."$DEBUG_SCRIPT" == ."debug_only" ] ; then exit ; fi fi CafSubmit --tarFile=$tarball \ --outLocation=${output_node}:$output_dir/validation_$dataset.$.tgz \ --dhaccess=$dhaccess \ --email=aafke@fnal.gov,robson@fnal.gov \ --procType=$queue \ --group=$group \ --farm=$CAF \ --start=$job1 \ --end=$job2 "./cdfopr/scripts/run.sh -v -J $ $parameters -o ${output_node}:$output_dir" fi #-------------------- end ----------------------------------------------