#!/usr/bin/env bash #----------------------------------------------------------------------- # runs one job # call: concat_dst dataset_id [max_n_jobs [verbose]] # # assume that everything is distributed over the network #----------------------------------------------------------------------- # . $HOME/.bashrc DATA_SERVER=fcdflnx3.fnal.gov CATALOG_SERVER=fcdflnx3.fnal.gov export DATASET_ID=$1 max_n_jobs=2 ; if [ .$2 != "." ] ; then max_n_jobs=$2 ; fi VERBOSE=0 ; if [ .$3 != "." ] ; then VERBOSE=$3 ; fi . $HOME/scripts/common_procedures RELEASE_DIR=/cdf/scratch/cdfopr/testRel/5.3.1_physics JOB_OUTPUT_DIR=$RELEASE_DIR/results/$$ if [ $DATASET_ID == "bhel0d" ] ; then OUTPUT_DATASET=bewk0d ; fi if [ $DATASET_ID == "bhmu0d" ] ; then OUTPUT_DATASET=bewk2d ; fi if [ $DATASET_ID == "bhel0c" ] ; then OUTPUT_DATASET=zewk0c ; fi name=concat_dst remote="" WORK_DIR=/cdf/opr2/cdfopr/strip/$DATASET_ID if [ `hostname` != $CATALOG_SERVER ] ; then WORK_DIR=${CATALOG_SERVER}:$WORK_DIR remote="rsh -x -N -n -l cdfopr $CATALOG_SERVER" fi export OUTPUT_DIR=/cdf/opr2/cdfopr/strip/$DATASET_ID/.concatenated export BOOK=cdfpewk BOOK_DIR=$WORK_DIR/.book echo remote=$remote WORK_DIR=$WORK_DIR lock=`$remote ls $BOOK_DIR | grep lock` array_of_requests=(`$remote ls $BOOK_DIR/requests`) n_requests=${#array_of_requests[@]} if [ $n_requests == 0 ] ; then # all done, exit exit fi request_file=${array_of_requests[0]} logfile=$BOOK_DIR/log/$request_file.log echo lock=$lock if [ ."$lock" != "." ] ; then echo `date`: $name detected $lock , bailing out >> $logfile exit 1 fi echo `date`: $name started >> $logfile n_running_jobs=`$remote ls $BOOK_DIR/requests/.*.tmp | wc -l` echo " " n_running_jobs=$n_running_jobs echo " " n_running_jobs=$n_running_jobs >> $logfile if [ $n_running_jobs -ge $max_n_jobs ] ; then echo " " n_running_jobs gt $max_n_jobs , bailing out >> $logfile exit fi $remote mv $BOOK_DIR/requests/$request_file $BOOK_DIR/requests/.$request_file.tmp export TCLFILE=$BOOK_DIR/requests/.$request_file.tmp echo BOOK=$BOOK >> $logfile echo DATASET_ID=$DATASET_ID >> $logfile echo OUTPUT_DIR=$OUTPUT_DIR >> $logfile cd $RELEASE_DIR source ~cdfsoft/cdf2.shrc setup cdfsoft2 `cat .base_release` srt_setup -a mkdir -p $JOB_OUTPUT_DIR cd $JOB_OUTPUT_DIR AC++Dump $TCLFILE 1>>$logfile 2>>$logfile rc=$? echo AC++Dump finished with rc=$rc >> $logfile if [ $rc == "0" ] ; then echo removing original request : $BOOK_DIR/requests/$request_file >> $logfile $remote mv $BOOK_DIR/requests/.$request_file.tmp \ $BOOK_DIR/requests/.$request_file.done x1=`echo $OUTPUT_DATASET | awk '{ print substr($1,2,4) }'` files=`ls | grep $x1` for f in $files ; do echo mv $f $OUTPUT_DIR/. >> $logfile mv $f $OUTPUT_DIR/. rc=$? if [ $rc != 0 ] ; then echo -- mv $f $OUTPUT_DIR/. failed with rc=$rc, bailing out exit $rc else #----------------------------------------------------------------------- # can delete input files, figure what they are #----------------------------------------------------------------------- input_list=`cat $BOOK_DIR/requests/.$request_file.done | \ grep include | grep -v show | awk '{ print $3}'` for ff in $input_list ; do echo mv $ff `dirname $ff`/.`basename $ff` >> $logfile mv $ff `dirname $ff`/.`basename $ff` done fi done #----------------------------------------------------------------------- # everything goes well, remove the working directory #----------------------------------------------------------------------- rmdir $JOB_OUTPUT_DIR fi echo `date`: $name finished with RC=$rc >> $logfile exit $rc #-------------------------------------------------------------------------------