]> git.uio.no Git - u/mrichter/AliRoot.git/blob - PWGPP/QA/scripts/runQA.sh
add the possibility to process the event stat file
[u/mrichter/AliRoot.git] / PWGPP / QA / scripts / runQA.sh
1 #!/bin/bash
2 # process QA output into plots and trending
3 # run without arguments for examples
4 # origin: Mikolaj Krzewicki, mkrzewic@cern.ch
5 #
6 if [ ${BASH_VERSINFO} -lt 4 ]; then
7   echo "bash version >= 4 needed, you have ${BASH_VERSION}, exiting..."
8   exit 1
9 fi
10
11 main()
12 {
13   if [[ -z $1 ]]; then
14     echo "Usage: "
15     echo "  ${0##*/} option=value [option=value]"
16     echo "  at least inputList should be specified, or configFile containing it:"
17     echo "  ${0##*/} inputList=file.list"
18     echo "  options override config file (if any), e.g.:"
19     echo "  ${0##*/} configFile=runQA.config inputList=file.list outputDirectory=%det"
20     return 1
21   fi
22
23   if ! parseConfig "$@"; then
24     ${0}
25     return 1
26   fi
27
28   [[ -z $ALICE_ROOT ]] && echo "ALICE_ROOT not defined" && return 1
29
30   ocdbregex='raw://'
31   if [[ ${ocdbStorage} =~ ${ocdbregex} ]]; then
32     alien-token-init ${alienUserName}
33     #this is a hack! alien-token init seems not enough
34     #but the gclient_env script messes up the LD_LIBRARY_PATH
35     while read x; do
36       eval ${x};
37     done < <(grep -v "LD_LIBRARY_PATH" /tmp/gclient_env_${UID})
38   fi
39
40   updateQA "$@"
41   return 0
42 }
43
44 updateQA()
45 {
46   umask 0002
47   parseConfig "$@"
48
49   #be paranoid and make some full paths
50   [[ ! -f ${inputList} ]] && echo "no input list: ${inputList}" && return 1
51   inputList=$(get_realpath ${inputList})
52   mkdir -p ${workingDirectory}
53   workingDirectory=$(workingDirectory=${workingDirectory%/}; cd ${workingDirectory%/*}; echo "${PWD}/${workingDirectory##*/}")
54   if [[ ! -d ${workingDirectory} ]]; then
55     echo "working dir $workingDirectory does not exist and cannot be created"
56     return 1
57   fi
58   cd ${workingDirectory}
59
60   echo JOB config:
61   echo inputList=$inputList
62   echo outputDirectory=$outputDirectory
63   echo
64
65   dateString=$(date +%Y-%m-%d-%H-%M-%S-%N)
66   echo "Start time QA process: $dateString"
67
68   #logging
69   mkdir -p $logDirectory
70   [[ ! -d $logDirectory ]] && echo "no log dir $logDirectory" && return 1
71   logFile="$logDirectory/${0##*/}.${dateString}.log"
72   touch ${logFile}
73   [[ ! -f ${logFile} ]] && echo "cannot write logfile $logfile" && return 1
74   echo "logFile = $logFile"
75
76   #check lock
77   lockFile=${workingDirectory}/runQA.lock
78   [[ -f ${lockFile} ]] && echo "lock ${lockFile} exists!" | tee ${logFile} && return 1
79   touch ${lockFile}
80   [[ ! -f ${lockFile} ]] && echo "cannot lock $lockFile" | tee ${logFile} && return 1
81   
82   exec &>${logFile}
83
84   ################################################################
85   #ze detector loop
86   for detectorScript in $ALICE_ROOT/PWGPP/QA/detectorQAscripts/*; do
87     echo
88     echo "##############################################"
89     echo $(date)
90     unset planB
91     [[ ! ${detectorScript} =~ .*\.sh$ ]] && continue
92     detector=${detectorScript%.sh}
93     detector=${detector##*/}
94     #by default we expect the container in the QA root file to de named like
95     #the detector
96     detectorQAcontainerName=${detector}
97     
98     #skip if excluded
99     if [[ "${excludeDetectors}" =~ ${detector} ]]; then
100       echo "${detector} is excluded in config, skipping..."
101       continue
102     fi
103
104     #if includeDetectors set, only process thoe detectors specified there
105     if [[ -n ${includeDetectors} && ! "${includeDetectors}" =~ ${detector} ]]; then
106       echo "${detector} not included in includeDetectors, skipping..."
107       continue
108     fi
109
110     logSummary=${logDirectory}/summary-${detector}-${dateString}.log
111     hostInfo >> ${logSummary}
112     outputDir=$(substituteDetectorName ${detector} ${outputDirectory})
113     tmpDetectorRunDir=${workingDirectory}/tmpQAtmpRunDir${detector}-${dateString}
114     if ! mkdir -p ${tmpDetectorRunDir}; then
115       echo "cannot create the temp dir $tmpDetectorRunDir"
116       continue
117     fi
118     cd ${tmpDetectorRunDir}
119
120     tmpPrefix=${tmpDetectorRunDir}/${outputDir}
121     echo "running QA for ${detector}"
122     echo "  outputDir=$outputDir"
123     echo "  tmpPrefix=$tmpPrefix"
124     
125     #source the detector script
126     #unset the detector functions from previous iterations (detectors)
127     unset -f runLevelQA
128     unset -f periodLevelQA
129     unset -f runLevelEventStatQA
130     unset -f runLevelHighPtTreeQA
131     unset -f periodLevelHighPtTreeQA
132     source ${detectorScript}
133
134     #################################################################
135     #produce the QA and trending tree for each file (run)
136     unset arrOfTouchedProductions
137     declare -A arrOfTouchedProductions
138     while read inputFile; do
139       echo
140       echo $(date)
141       
142       #first check if input file exists
143       [[ ! -f ${inputFile%\#*} ]] && echo "file ${inputFile%\#*} not accessible" && continue
144
145       if ! guessRunData ${inputFile}; then
146         echo "could not guess run data from ${inputFile}"
147         continue
148       fi
149       echo "anchorYear for ${originalPeriod} is: ${anchorYear}"
150
151       tmpProductionDir=${tmpPrefix}/${dataType}/${year}/${period}/${pass}
152       tmpRunDir=${tmpProductionDir}/000${runNumber}
153       mkdir -p ${tmpRunDir}
154       cd ${tmpRunDir}
155
156       #check what kind of input file we have, default is a zip archive
157       #set the inputs accordingly
158       qaFile=""
159       highPtTree=""
160       eventStatFile=""
161       #it is possible we get the highPt trees from somewhere else
162       #search the list of high pt trees for the proper run number
163       if [[ -n ${inputListHighPtTrees} ]]; then
164         highPtTree=$(egrep -m1 ${runNumber} ${inputListHighPtTrees})
165         echo "loaded the highPtTree ${highPtTree} from external file ${inputListHighPtTrees}"
166       fi
167       #if we are explicit about the input file this takes precedence 
168       #over earlier additions
169       [[ "${inputFile}" =~ QAresults.root$ ]] && qaFile=${inputFile}
170       [[ "${inputFile}" =~ FilterEvents_Trees.root$ ]] && highPtTree=${inputFile}
171       [[ "${inputFile}" =~ event_stat.root$ ]] && eventStatFile=${inputFile}
172       if [[ "${inputFile}" =~ \.zip$ ]]; then
173         [[ -z ${qaFile} ]] && qaFile=${inputFile}
174         [[ -z ${highPtTree} ]] && highPtTree=${inputFile}
175         [[ -z ${eventStatFile} ]] && eventStatFile=${inputFile}
176       fi
177
178       #if we have zip archives in the input, extract the proper file name
179       #from the archive and append in a root-like fashion
180       if [[ "$qaFile" =~ .*.zip$ ]]; then
181         if unzip -l ${qaFile} | egrep "QAresults.root" &>/dev/null; then
182           qaFile+="#QAresults.root"
183         elif unzip -l ${qaFile} | egrep "QAresults_barrel.root" &>/dev/null; then
184           qaFile+="#QAresults_barrel.root"
185         else
186           qaFile=""
187         fi
188       fi
189       if [[ "$highPtTree" =~ .*.zip$ ]]; then
190         if unzip -l ${highPtTree} | egrep "FilterEvents_Trees.root" &>/dev/null; then
191           highPtTree+="#FilterEvents_Trees.root"
192         else
193           highPtTree=""
194         fi
195       fi
196       if [[ "${eventStatFile}" =~ .*.zip$ ]]; then
197         if unzip -l ${eventStatFile} | egrep "event_stat.root" &>/dev/null; then
198           eventStatFile+="#event_stat.root"
199         elif unzip -l ${eventStatFile} | egrep "event_stat_barrel.root" &>/dev/null; then
200           eventStatFile+="#event_stat_barrel.root"
201         else
202           eventStatFile=""
203         fi
204       fi
205      
206       echo qaFile=$qaFile
207       echo highPtTree=$highPtTree
208       echo eventStatFile=$eventStatFile
209       echo ocdbStorage=${ocdbStorage}
210       echo
211
212       #standard QA based on QAresults.root file (and variants)
213       if [[ -n ${qaFile} && $(type -t runLevelQA) =~ "function" ]]; then
214         echo running ${detector} runLevelQA for run ${runNumber} from ${qaFile}
215         ( runLevelQA "${qaFile}" ) &>> runLevelQA.log
216         #cache the touched production + an example file to guarantee consistent run data parsing
217         arrOfTouchedProductions[${tmpProductionDir}]="${inputFile%\#*}"
218       fi
219       #expert QA based on high pt trees
220       if [[ -n ${highPtTree} && $(type -t runLevelHighPtTreeQA) =~ "function" ]]; then
221         echo running ${detector} runLevelHighPtTreeQA for run ${runNumber} from ${highPtTree}
222         ( runLevelHighPtTreeQA "${highPtTree}" ) &>> runLevelQA.log
223         #cache the touched production + an example file to guarantee consistent run data parsing
224         arrOfTouchedProductions[${tmpProductionDir}]="${inputFile%\#*}"
225       fi
226       #event stat QA based on event_stat.root file
227       if [[ -n ${eventStatFile} && $(type -t runLevelEventStatQA) =~ "function" ]]; then
228         echo running ${detector} runLevelEventStatQA for run ${runNumber} from ${eventStatFile}
229         ( runLevelEventStatQA "${eventStatFile}" ) &>> runLevel.log
230         #cache the touched production + an example file to guarantee consistent run data parsing
231         arrOfTouchedProductions[${tmpProductionDir}]="${inputFile%\#*}"
232       fi
233
234       #perform some default actions:
235       #if trending.root not created, create a default one
236       if [[ ! -f trending.root ]]; then
237         aliroot -b -q -l "$ALICE_ROOT/PWGPP/macros/simpleTrending.C(\"${qaFile}\",${runNumber},\"${detectorQAcontainerName}\",\"trending.root\",\"trending\",\"recreate\")" 2>&1 | tee -a runLevelQA.log
238       fi
239       if [[ ! -f trending.root ]]; then
240         echo "trending.root not created"
241       fi
242
243       cd ${tmpDetectorRunDir}
244     
245     done < ${inputList}
246
247     #################################################################
248     #cache which productions were (re)done
249     echo "list of processed productions:"
250     echo "    ${!arrOfTouchedProductions[@]}"
251     echo
252
253     #################################################################
254     #(re)do the merging/trending 
255     for tmpProductionDir in ${!arrOfTouchedProductions[@]}; do
256       cd ${tmpProductionDir}
257       echo
258       echo "running period level stuff in ${tmpProductionDir}"
259       echo $(date)
260     
261       productionDir=${outputDir}/${tmpProductionDir#${tmpPrefix}}
262       echo productionDir=${outputDir}/${tmpProductionDir#${tmpPrefix}}
263
264       mkdir -p ${productionDir}
265       if [[ ! -d ${productionDir} ]]; then 
266         echo "cannot make productionDir $productionDir" && continue
267       fi
268       
269       #move runs to final destination
270       for dir in ${tmpProductionDir}/000*; do
271         echo 
272         oldRunDir=${outputDir}/${dir#${tmpPrefix}}
273         if ! guessRunData "${arrOfTouchedProductions[${tmpProductionDir}]}"; then
274           echo "could not guess run data from ${dir}"
275           continue
276         fi
277
278         #before moving - VALIDATE!!!
279         if ! validate ${dir}; then 
280           continue
281         fi
282
283         #moving a dir is an atomic operation, no locking necessary
284         if [[ -d ${oldRunDir} ]]; then
285           echo "removing old ${oldRunDir}"
286           rm -rf ${oldRunDir}
287         fi
288         echo "moving new ${runNumber} to ${productionDir}"
289         mv -f ${dir} ${productionDir}
290       done
291    
292       #go to a temp dir to do the period level stuff in a completely clean dir
293       tmpPeriodLevelQAdir="${tmpProductionDir}/periodLevelQA"
294       echo
295       echo tmpPeriodLevelQAdir="${tmpPeriodLevelQAdir}"
296       if ! mkdir -p ${tmpPeriodLevelQAdir}; then continue; fi
297       cd ${tmpPeriodLevelQAdir}
298
299       #link the final list of per-run dirs here, just the dirs
300       #to have a clean working directory
301       unset linkedStuff
302       declare -a linkedStuff
303       for x in ${productionDir}/000*; do [[ -d $x ]] && ln -s $x && linkedStuff+=(${x##*/}); done
304
305       #merge trending files if any
306       if /bin/ls 000*/trending.root &>/dev/null; then
307         hadd trending.root 000*/trending.root &> periodLevelQA.log
308       fi
309       
310       #run the period level trending/QA
311       if [[ -f "trending.root" && $(type -t periodLevelQA) =~ "function" ]]; then
312         echo running ${detector} periodLevelQA for production ${period}/${pass}
313         ( periodLevelQA trending.root ) &>> periodLevelQA.log
314       else 
315         echo "WARNING: not running ${detector} periodLevelQA for production ${period}/${pass}, no trending.root"
316       fi
317
318       if ! validate ${PWD}; then continue; fi
319
320       #here we are validated so move the produced QA to the final place
321       #clean up linked stuff first
322       [[ -n ${linkedStuff[@]} ]] && rm ${linkedStuff[@]}
323       periodLevelLock=${productionDir}/runQA.lock
324       if [[ ! -f ${periodLevelLock} ]]; then
325         #some of the output could be a directory, so handle that
326         #TODO: maybe use rsync?
327         #lock to avoid conflicts:
328         echo "${HOSTNAME} ${dateString}" > ${periodLevelLock}
329         for x in ${tmpPeriodLevelQAdir}/*; do  
330           if [[ -d ${x} ]]; then
331             echo "removing ${productionDir}/${x##*/}"
332             rm -rf ${productionDir}/${x##*/}
333             echo "moving ${x} to ${productionDir}"
334             mv ${x} ${productionDir}
335           fi
336           if [[ -f ${x} ]]; then
337             echo "moving ${x} to ${productionDir}"
338             mv -f ${x} ${productionDir} 
339           fi
340         done
341         rm -f ${periodLevelLock}
342         #remove the temp dir
343         rm -rf ${tmpPeriodLevelQAdir}
344       else
345         echo "ERROR: cannot move to destination"                     >> ${logSummary}
346         echo "production dir ${productionDir} locked!"               >> ${logSummary}
347         echo "check and maybe manually do:"                          >> ${logSummary}
348         echo " rm ${periodLevelLock}"                                >> ${logSummary}
349         echo " rsync -av ${tmpPeriodLevelQAdir}/ ${productionDir}/"  >> ${logSummary}
350         planB=1
351       fi
352
353     done
354
355     cd ${workingDirectory}
356
357     if [[ -z ${planB} ]]; then
358       echo
359       echo removing ${tmpDetectorRunDir}
360       rm -rf ${tmpDetectorRunDir}
361     else
362       executePlanB
363     fi
364   done #end of detector loop
365
366   #remove lock
367   rm -f ${lockFile}
368   return 0
369 }
370
371 executePlanB()
372 {
373   #in case of emergency
374   #first check if we have the email of the detector expert defined,
375   #if yes, append to the mailing list
376   local mailTo=${MAILTO}
377   local detExpertEmailVar="MAILTO_${detector}"
378   [[ -n "${!detExpertEmailVar}" ]] && mailTo+=" ${!detExpertEmailVar}"
379   if [[ -n ${mailTo} ]]; then 
380     echo
381     echo "trouble detected, sending email to ${mailTo}"
382     cat ${logSummary} | mail -s "${detector} QA in need of assistance" ${mailTo}
383   fi
384   return 0
385 }
386
387 validate()
388 {
389   summarizeLogs ${1} >> ${logSummary}
390   logStatus=$?
391   if [[ ${logStatus} -ne 0 ]]; then 
392     echo "WARNING not validated: ${1}"
393     planB=1
394     return 1
395   fi
396   return 0
397 }
398
399 summarizeLogs()
400 {
401   local dir=$1
402   [[ ! -d ${dir} ]] && dir=${PWD}
403
404   #print a summary of logs
405   logFiles=(
406       "*.log"
407       "stdout"
408       "stderr"
409   )
410
411   #check logs
412   local logstatus=0
413   for log in ${dir}/${logFiles[*]}; do
414     [[ ! -f ${log} ]] && continue
415     errorSummary=$(validateLog ${log})
416     validationStatus=$?
417     [[ validationStatus -ne 0 ]] && logstatus=1
418     if [[ ${validationStatus} -eq 0 ]]; then 
419       #in pretend mode randomly report an error in rec.log some cases
420       if [[ -n ${pretend} && "${log}" == "rec.log" ]]; then
421         [[ $(( ${RANDOM}%2 )) -ge 1 ]] && echo "${log} BAD random error" || echo "${log} OK"
422       else
423         echo "${log} OK"
424       fi
425     elif [[ ${validationStatus} -eq 1 ]]; then
426       echo "${log} BAD ${errorSummary}"
427     elif [[ ${validationStatus} -eq 2 ]]; then
428       echo "${log} OK MWAH ${errorSummary}"
429     fi
430   done
431
432   #report core files
433   while read x; do
434     echo ${x}
435     chmod 644 ${x}
436     gdb --batch --quiet -ex "bt" -ex "quit" aliroot ${x} > stacktrace_${x//\//_}.log
437   done < <(/bin/ls ${PWD}/*/core 2>/dev/null; /bin/ls ${PWD}/core 2>/dev/null)
438
439   return ${logstatus}
440 }
441
442 validateLog()
443 {
444   log=${1}
445   errorConditions=(
446             'There was a crash'
447             'floating'
448             'error while loading shared libraries'
449             'std::bad_alloc'
450             's_err_syswatch_'
451             'Thread [0-9]* (Thread'
452             'AliFatal'
453             'core dumped'
454             '\.C.*error:.*\.h: No such file'
455             'segmentation'
456             'Interpreter error recovered'
457   )
458
459   warningConditions=(
460             'This is serious'
461   )
462
463   local logstatus=0
464   local errorSummary=""
465   local warningSummary=""
466
467   for ((i=0; i<${#errorConditions[@]};i++)); do
468     local tmp=$(grep -m1 -e "${errorConditions[${i}]}" ${log})
469     [[ -n ${tmp} ]] && tmp+=" : "
470     errorSummary+=${tmp}
471   done
472
473   for ((i=0; i<${#warningConditions[@]};i++)); do
474     local tmp=$(grep -m1 -e "${warningConditions[${i}]}" ${log})
475     [[ -n ${tmp} ]] && tmp+=" : "
476     warningSummary+=${tmp}
477   done
478
479   if [[ -n ${errorSummary} ]]; then 
480     echo "${errorSummary}"
481     return 1
482   fi
483
484   if [[ -n ${warningSummary} ]]; then
485     echo "${warningSummary}"
486     return 2
487   fi
488
489   return 0
490 }
491
492 parseConfig()
493 {
494   args=("$@")
495
496   #config file
497   configFile=""
498   #where to search for qa files
499   inputList=file.list
500   #working directory
501   workingDirectory="${PWD}"
502   #where to place the final qa plots
503   #outputDirectory="/afs/cern.ch/work/a/aliqa%det/www/"
504   outputDirectory="${workingDirectory}/%DET"
505   #filter out detector option
506   excludeDetectors="EXAMPLE"
507   #logs
508   logDirectory=${workingDirectory}/logs
509   #OCDB storage
510   ocdbStorage="raw://"
511   #email to
512   #MAILTO="fbellini@cern.ch"
513
514   #first, check if the config file is configured
515   #is yes - source it so that other options can override it
516   #if any
517   for opt in "${args[@]}"; do
518     if [[ ${opt} =~ configFile=.* ]]; then
519       eval "${opt}"
520       [[ ! -f ${configFile} ]] && echo "configFile ${configFile} not found, exiting..." && return 1
521       echo "using config file: ${configFile}"
522       source "${configFile}"
523       break
524     fi
525   done
526
527   #then, parse the options as they override the options from file
528   for opt in "${args[@]}"; do
529     if [[ ! "${opt}" =~ .*=.* ]]; then
530       echo "badly formatted option ${var}, should be: option=value, stopping..."
531       return 1
532     fi
533     local var="${opt%%=*}"
534     local value="${opt#*=}"
535     echo "${var}=${value}"
536     export ${var}="${value}"
537   done
538   return 0
539 }
540
541 guessRunData()
542 {
543   #guess the period from the path, pick the rightmost one
544   period=""
545   runNumber=""
546   year=""
547   pass=""
548   legoTrainRunNumber=""
549   dataType=""
550   originalPass=""
551   originalPeriod=""
552   anchorYear=""
553
554   shortRunNumber=""
555   oldIFS=${IFS}
556   local IFS="/"
557   declare -a path=( $1 )
558   IFS="${oldIFS}"
559   local dirDepth=$(( ${#path[*]}-1 ))
560   i=0
561   for ((x=${dirDepth};x>=0;x--)); do
562
563     [[ $((x-1)) -ge 0 ]] && local fieldPrev=${path[$((x-1))]}
564     local field=${path[${x}]}
565     local fieldNext=${path[$((x+1))]}
566
567     [[ ${field} =~ ^[0-9]*$ && ${fieldNext} =~ (.*\.zip$|.*\.root$) ]] && legoTrainRunNumber=${field}
568     [[ -n ${legoTrainRunNumber} && -z ${pass} ]] && pass=${fieldPrev}
569     [[ ${field} =~ ^LHC[0-9][0-9][a-z].*$ ]] && period=${field%_*} && originalPeriod=${field}
570     [[ ${field} =~ ^000[0-9][0-9][0-9][0-9][0-9][0-9]$ ]] && runNumber=${field#000}
571     [[ ${field} =~ ^[0-9][0-9][0-9][0-9][0-9][0-9]$ ]] && shortRunNumber=${field}
572     [[ ${field} =~ ^20[0-9][0-9]$ ]] && year=${field}
573     [[ ${field} =~ ^(^sim$|^data$) ]] && dataType=${field}
574     (( i++ ))
575   done
576   originalPass=${pass}
577   [[ -n ${shortRunNumber} && "${legoTrainRunNumber}" =~ ${shortRunNumber} ]] && legoTrainRunNumber=""
578   [[ -z ${legoTrainRunNumber} ]] && pass=${path[$((dirDepth-1))]}
579   [[ "${dataType}" =~ ^sim$ ]] && pass="passMC" && runNumber=${shortRunNumber} && originalPass="" #for MC not from lego, the runnumber is identified as lego train number, thus needs to be nulled
580   [[ -n ${legoTrainRunNumber} ]] && pass+="_lego${legoTrainRunNumber}"
581   
582   #modify the OCDB: set the year
583   if [[ ${dataType} =~ sim ]]; then 
584     anchorYear=$(run2year $runNumber)
585     if [[ -z "${anchorYear}" ]]; then
586       echo "WARNING: anchorYear not available for this production: ${originalPeriod}, runNumber: ${runNumber}. Cannot set the OCDB."
587       return 1
588     fi
589     ocdbStorage=$(setYear ${anchorYear} ${ocdbStorage})
590   else
591     ocdbStorage=$(setYear ${year} ${ocdbStorage})
592   fi
593
594   #if [[ -z ${dataType} || -z ${year} || -z ${period} || -z ${runNumber}} || -z ${pass} ]];
595   if [[ -z ${runNumber} ]]
596   then
597     #error condition
598     return 1
599   fi
600   
601   #ALL OK
602   return 0
603 }
604
605 run2year()
606 {
607   #for a given run print the year.
608   #the run-year table is ${runMap} (a string)
609   #defined in the config file
610   #one line per year, format: year runMin runMax
611   local run=$1
612   [[ -z ${run} ]] && return 1
613   local year=""
614   local runMin=""
615   local runMax=""
616   while read year runMin runMax; do
617     [[ -z ${year} || -z ${runMin} || -z ${runMax} ]] && continue
618     [[ ${run} -ge ${runMin} && ${run} -le ${runMax} ]] && echo ${year} && break
619   done < <(echo "${runMap}")
620   return 0
621 }
622
623 substituteDetectorName()
624 {
625   local det=$1
626   local dir=$2
627   [[ ${dir} =~ \%det ]] && det=${det,,} && echo ${dir/\%det/${det}}
628   [[ ${dir} =~ \%DET ]] && det=${det} && echo ${dir/\%DET/${det}}
629   return 0
630 }
631
632 get_realpath() 
633 {
634   if [[ -f "$1" ]]
635   then
636     # file *must* exist
637     if cd "$(echo "${1%/*}")" &>/dev/null
638     then
639       # file *may* not be local
640       # exception is ./file.ext
641       # try 'cd .; cd -;' *works!*
642       local tmppwd="$PWD"
643       cd - &>/dev/null
644     else
645       # file *must* be local
646       local tmppwd="$PWD"
647     fi
648   else
649     # file *cannot* exist
650     return 1 # failure
651   fi
652   # reassemble realpath
653   echo "$tmppwd"/"${1##*/}"
654   return 0 # success
655 }
656
657 setYear()
658 {
659   #set the year
660   #  ${1} - year to be set
661   #  ${2} - where to set the year
662   local year1=$(guessYear ${1})
663   local year2=$(guessYear ${2})
664   local path=${2}
665   [[ ${year1} -ne ${year2} && -n ${year2} && -n ${year1} ]] && path=${2/\/${year2}\//\/${year1}\/}
666   echo ${path}
667   return 0
668 }
669
670 guessYear()
671 {
672   #guess the year from the path, pick the rightmost one
673   local IFS="/"
674   declare -a pathArray=( ${1} )
675   local field
676   local year
677   for field in ${pathArray[@]}; do
678     [[ ${field} =~ ^20[0-9][0-9]$ ]] && year=${field}
679   done
680   echo ${year}
681   return 0
682 }
683
684 hostInfo(){
685 #
686 # Hallo world -  Print AliRoot/Root/Alien system info
687 #
688
689 #
690 # HOST info
691 #
692     echo --------------------------------------
693         echo 
694         echo HOSTINFO
695         echo 
696         echo HOSTINFO HOSTNAME"      "$HOSTNAME
697         echo HOSTINFO DATE"          "`date`
698         echo HOSTINFO gccpath"       "`which gcc` 
699         echo HOSTINFO gcc version"   "`gcc --version | grep gcc`
700         echo --------------------------------------    
701
702 #
703 # ROOT info
704 #
705         echo --------------------------------------
706         echo
707         echo ROOTINFO
708         echo 
709         echo ROOTINFO ROOT"           "`which root`
710         echo ROOTINFO VERSION"        "`root-config --version`
711         echo 
712         echo --------------------------------------
713
714
715 #
716 # ALIROOT info
717 #
718         echo --------------------------------------
719         echo
720         echo ALIROOTINFO
721         echo 
722         echo ALIROOTINFO ALIROOT"        "`which aliroot`
723         echo ALIROOTINFO VERSION"        "`echo $ALICE_LEVEL`
724         echo ALIROOTINFO TARGET"         "`echo $ALICE_TARGET`
725         echo 
726         echo --------------------------------------
727
728 #
729 # Alien info
730 #
731 #echo --------------------------------------
732 #echo
733 #echo ALIENINFO
734 #for a in `alien --printenv`; do echo ALIENINFO $a; done 
735 #echo
736 #echo --------------------------------------
737
738 #
739 # Local Info
740 #
741         echo PWD `pwd`
742         echo Dir 
743         ls -al
744         echo
745         echo
746         echo
747   
748   return 0
749 }
750
751 main "$@"