]> git.uio.no Git - u/mrichter/AliRoot.git/blob - PWGPP/QA/scripts/runQA.sh
ab8780d35c08d240272920440edb2242aac7286a
[u/mrichter/AliRoot.git] / PWGPP / QA / scripts / runQA.sh
1 #!/bin/bash
2 if [ ${BASH_VERSINFO} -lt 4 ]; then
3   echo "bash version >= 4 needed, you have ${BASH_VERSION}, exiting..."
4   exit 1
5 fi
6
7 main()
8 {
9   if [[ -z $1 ]]; then
10     echo "Usage: "
11     echo "  ${0##*/} option=value [option=value]"
12     echo "  at least inputList should be specified, or configFile containing it:"
13     echo "  ${0##*/} inputList=file.list"
14     echo "  options override config file (if any), e.g.:"
15     echo "  ${0##*/} configFile=runQA.config inputList=file.list outputDirectory=%det"
16     return 1
17   fi
18
19   if ! parseConfig "$@"; then
20     ${0}
21     return 1
22   fi
23
24   [[ -z $ALICE_ROOT ]] && echo "ALICE_ROOT not defined" && return 1
25
26   ocdbregex='raw://'
27   if [[ ${ocdbStorage} =~ ${ocdbregex} ]]; then
28     alien-token-init ${alienUserName}
29     #this is a hack! alien-token init seems not enough
30     #but the gclient_env script messes up the LD_LIBRARY_PATH
31     while read x; do
32       eval ${x};
33     done < <(grep -v "LD_LIBRARY_PATH" /tmp/gclient_env_${UID})
34   fi
35
36   updateQA "$@"
37   return 0
38 }
39
40 updateQA()
41 {
42   umask 0002
43   parseConfig "$@"
44
45   #be paranoid and make some full paths
46   [[ ! -f ${inputList} ]] && echo "no input list: ${inputList}" && return 1
47   inputList=$(get_realpath ${inputList})
48   mkdir -p ${workingDirectory}
49   workingDirectory=$(workingDirectory=${workingDirectory%/}; cd ${workingDirectory%/*}; echo "${PWD}/${workingDirectory##*/}")
50   if [[ ! -d ${workingDirectory} ]]; then
51     echo "working dir $workingDirectory does not exist and cannot be created"
52     return 1
53   fi
54   cd ${workingDirectory}
55
56   echo JOB config:
57   echo inputList=$inputList
58   echo outputDirectory=$outputDirectory
59   echo
60
61   dateString=$(date +%Y-%m-%d-%H-%M-%S-%N)
62   echo "Start time QA process: $dateString"
63
64   #logging
65   mkdir -p $logDirectory
66   [[ ! -d $logDirectory ]] && echo "no log dir $logDirectory" && return 1
67   logFile="$logDirectory/${0##*/}.${dateString}.log"
68   touch ${logFile}
69   [[ ! -f ${logFile} ]] && echo "cannot write logfile $logfile" && return 1
70   echo "logFile = $logFile"
71
72   #check lock
73   lockFile=${workingDirectory}/runQA.lock
74   [[ -f ${lockFile} ]] && echo "lock ${lockFile} exists!" | tee ${logFile} && return 1
75   touch ${lockFile}
76   [[ ! -f ${lockFile} ]] && echo "cannot lock $lockFile" | tee ${logFile} && return 1
77   
78   exec &>${logFile}
79
80   ################################################################
81   #ze detector loop
82   for detectorScript in $ALICE_ROOT/PWGPP/QA/detectorQAscripts/*; do
83     echo
84     echo "##############################################"
85     echo $(date)
86     unset planB
87     [[ ! ${detectorScript} =~ .*\.sh$ ]] && continue
88     detector=${detectorScript%.sh}
89     detector=${detector##*/}
90     #by default we expect the container in the QA root file to de named like
91     #the detector
92     detectorQAcontainerName=${detector}
93     
94     #skip if excluded
95     if [[ "${excludeDetectors}" =~ ${detector} ]]; then
96       echo "${detector} is excluded in config, skipping..."
97       continue
98     fi
99
100     #if includeDetectors set, only process thoe detectors specified there
101     if [[ -n ${includeDetectors} && ! "${includeDetectors}" =~ ${detector} ]]; then
102       echo "${detector} not included in includeDetectors, skipping..."
103       continue
104     fi
105
106     logSummary=${logDirectory}/summary-${detector}-${dateString}.log
107     hostInfo >> ${logSummary}
108     outputDir=$(substituteDetectorName ${detector} ${outputDirectory})
109     tmpDetectorRunDir=${workingDirectory}/tmpQAtmpRunDir${detector}-${dateString}
110     if ! mkdir -p ${tmpDetectorRunDir}; then
111       echo "cannot create the temp dir $tmpDetectorRunDir"
112       continue
113     fi
114     cd ${tmpDetectorRunDir}
115
116     tmpPrefix=${tmpDetectorRunDir}/${outputDir}
117     echo "running QA for ${detector}"
118     echo "  outputDir=$outputDir"
119     echo "  tmpPrefix=$tmpPrefix"
120     
121     #unset the detector functions from previous iterations (detectors)
122     unset -f runLevelQA
123     unset -f periodLevelQA
124     unset -f runLevelHighPtTreeQA
125     unset -f periodLevelHighPtTreeQA
126     source ${detectorScript}
127
128     #################################################################
129     #produce the QA and trending tree for each file (run)
130     unset arrOfTouchedProductions
131     declare -A arrOfTouchedProductions
132     while read qaFile; do
133       echo
134       echo $(date)
135       
136       #first check if input file exists
137       [[ ! -f ${qaFile%\#*} ]] && echo "file ${qaFile%\#*} not accessible" && continue
138
139       if ! guessRunData ${qaFile}; then
140         echo "could not guess run data from ${qaFile}"
141         continue
142       fi
143       echo "anchorYear for ${originalPeriod} is: ${anchorYear}"
144
145       tmpProductionDir=${tmpPrefix}/${dataType}/${year}/${period}/${pass}
146       tmpRunDir=${tmpProductionDir}/000${runNumber}
147       mkdir -p ${tmpRunDir}
148       cd ${tmpRunDir}
149
150       #by default we expect to have everything in the same archive
151       highPtTree=${qaFile}
152
153       #maybe the input is not an archive, but a file
154       [[ "${qaFile}" =~ QAresults.root$ ]] && highPtTree=""
155       [[ "${qaFile}" =~ FilterEvents_Trees.root$ ]] && qaFile=""
156
157       #it is possible we get the highPt trees from somewhere else
158       #search the list of high pt trees for the proper run number
159       if [[ -n ${inputListHighPtTrees} ]]; then
160         highPtTree=$(egrep -m1 ${runNumber} ${inputListHighPtTrees})
161         echo "loaded the highPtTree ${highPtTree} from external file ${inputListHighPtTrees}"
162       fi
163       
164       echo qaFile=$qaFile
165       echo highPtTree=$highPtTree
166       echo ocdbStorage=${ocdbStorage}
167       echo
168
169       #what if we have a zip archive?
170       if [[ "$qaFile" =~ .*.zip$ ]]; then
171         if unzip -l ${qaFile} | egrep "QAresults.root" &>/dev/null; then
172           qaFile="${qaFile}#QAresults.root"
173         elif unzip -l ${qaFile} | egrep "QAresults_barrel.root" &>/dev/null; then
174           qaFile="${qaFile}#QAresults_barrel.root"
175         else
176           qaFile=""
177         fi
178       fi
179       if [[ "$highPtTree" =~ .*.zip$ ]]; then
180         if unzip -l ${highPtTree} | egrep "FilterEvents_Trees.root" &>/dev/null; then
181           highPtTree="${highPtTree}#FilterEvents_Trees.root"
182         else
183           highPtTree=""
184         fi
185       fi
186      
187       if [[ -n ${qaFile} && $(type -t runLevelQA) =~ "function" ]]; then
188         echo running ${detector} runLevelQA for run ${runNumber} from ${qaFile}
189         runLevelQA "${qaFile}" &> runLevelQA.log
190         #perform some default actions:
191         #if trending.root not created, create a default one
192         if [[ ! -f trending.root ]]; then
193           aliroot -b -q -l "$ALICE_ROOT/PWGPP/macros/simpleTrending.C(\"${qaFile}\",${runNumber},\"${detectorQAcontainerName}\",\"trending.root\",\"trending\",\"recreate\")" 2>&1 | tee -a runLevelQA.log
194         fi
195         if [[ -f trending.root ]]; then
196           #cache the touched production + an example file to guarantee consistent run data parsing
197           arrOfTouchedProductions[${tmpProductionDir}]="${qaFile%\#*}"
198         else
199           echo "trending.root not created"
200         fi
201       fi
202       #expert QA based on high pt trees
203       if [[ -n ${highPtTree} && $(type -t runLevelHighPtTreeQA) =~ "function" ]]; then
204         echo running ${detector} runLevelHighPtTreeQA for run ${runNumber} from ${highPtTree}
205         runLevelHighPtTreeQA "${highPtTree}" &> runLevelHighPtTreeQA.log
206         arrOfTouchedProductions[${tmpProductionDir}]=1
207       fi
208
209       cd ${tmpDetectorRunDir}
210     
211     done < ${inputList}
212
213     #################################################################
214     #cache which productions were (re)done
215     echo "list of processed productions:"
216     echo "    ${!arrOfTouchedProductions[@]}"
217     echo
218
219     #################################################################
220     #(re)do the merging/trending 
221     for tmpProductionDir in ${!arrOfTouchedProductions[@]}; do
222       cd ${tmpProductionDir}
223       echo
224       echo "running period level stuff in ${tmpProductionDir}"
225       echo $(date)
226     
227       productionDir=${outputDir}/${tmpProductionDir#${tmpPrefix}}
228       echo productionDir=${outputDir}/${tmpProductionDir#${tmpPrefix}}
229
230       mkdir -p ${productionDir}
231       if [[ ! -d ${productionDir} ]]; then 
232         echo "cannot make productionDir $productionDir" && continue
233       fi
234       
235       #move runs to final destination
236       for dir in ${tmpProductionDir}/000*; do
237         echo 
238         oldRunDir=${outputDir}/${dir#${tmpPrefix}}
239         if ! guessRunData "${arrOfTouchedProductions[${tmpProductionDir}]}"; then
240           echo "could not guess run data from ${dir}"
241           continue
242         fi
243
244         #before moving - VALIDATE!!!
245         if ! validate ${dir}; then 
246           continue
247         fi
248
249         #moving a dir is an atomic operation, no locking necessary
250         if [[ -d ${oldRunDir} ]]; then
251           echo "removing old ${oldRunDir}"
252           rm -rf ${oldRunDir}
253         fi
254         echo "moving new ${runNumber} to ${productionDir}"
255         mv -f ${dir} ${productionDir}
256       done
257    
258       #go to a temp dir to do the period level stuff in a completely clean dir
259       tmpPeriodLevelQAdir="${tmpProductionDir}/periodLevelQA"
260       echo
261       echo tmpPeriodLevelQAdir="${tmpPeriodLevelQAdir}"
262       if ! mkdir -p ${tmpPeriodLevelQAdir}; then continue; fi
263       cd ${tmpPeriodLevelQAdir}
264
265       #link the final list of per-run dirs here, just the dirs
266       #to have a clean working directory
267       unset linkedStuff
268       declare -a linkedStuff
269       for x in ${productionDir}/000*; do [[ -d $x ]] && ln -s $x && linkedStuff+=(${x##*/}); done
270
271       #merge trending files if any
272       if /bin/ls 000*/trending.root &>/dev/null; then
273         hadd trending.root 000*/trending.root &> periodLevelQA.log
274       fi
275       
276       #run the period level trending/QA
277       if [[ -f "trending.root" && $(type -t periodLevelQA) =~ "function" ]]; then
278         echo running ${detector} periodLevelQA for production ${period}/${pass}
279         periodLevelQA trending.root &>> periodLevelQA.log
280       else 
281         echo "WARNING: not running ${detector} periodLevelQA for production ${period}/${pass}, no trending.root"
282       fi
283
284       if ! validate ${PWD}; then continue; fi
285
286       #here we are validated so move the produced QA to the final place
287       #clean up linked stuff first
288       [[ -n ${linkedStuff[@]} ]] && rm ${linkedStuff[@]}
289       periodLevelLock=${productionDir}/runQA.lock
290       if [[ ! -f ${periodLevelLock} ]]; then
291         #some of the output could be a directory, so handle that
292         #TODO: maybe use rsync?
293         #lock to avoid conflicts:
294         echo "${HOSTNAME} ${dateString}" > ${periodLevelLock}
295         for x in ${tmpPeriodLevelQAdir}/*; do  
296           if [[ -d ${x} ]]; then
297             echo "removing ${productionDir}/${x##*/}"
298             rm -rf ${productionDir}/${x##*/}
299             echo "moving ${x} to ${productionDir}"
300             mv ${x} ${productionDir}
301           fi
302           if [[ -f ${x} ]]; then
303             echo "moving ${x} to ${productionDir}"
304             mv -f ${x} ${productionDir} 
305           fi
306         done
307         rm -f ${periodLevelLock}
308         #remove the temp dir
309         rm -rf ${tmpPeriodLevelQAdir}
310       else
311         echo "ERROR: cannot move to destination"                     >> ${logSummary}
312         echo "production dir ${productionDir} locked!"               >> ${logSummary}
313         echo "check and maybe manually do:"                          >> ${logSummary}
314         echo " rm ${periodLevelLock}"                                >> ${logSummary}
315         echo " rsync -av ${tmpPeriodLevelQAdir}/ ${productionDir}/"  >> ${logSummary}
316         planB=1
317       fi
318
319     done
320
321     cd ${workingDirectory}
322
323     if [[ -z ${planB} ]]; then
324       echo
325       echo removing ${tmpDetectorRunDir}
326       rm -rf ${tmpDetectorRunDir}
327     else
328       executePlanB
329     fi
330   done #end of detector loop
331
332   #remove lock
333   rm -f ${lockFile}
334   return 0
335 }
336
337 executePlanB()
338 {
339   #in case of emergency
340   #first check if we have the email of the detector expert defined,
341   #if yes, append to the mailing list
342   local mailTo=${MAILTO}
343   local detExpertEmailVar="MAILTO_${detector}"
344   [[ -n "${!detExpertEmailVar}" ]] && mailTo+=" ${!detExpertEmailVar}"
345   if [[ -n ${mailTo} ]]; then 
346     echo
347     echo "trouble detected, sending email to ${mailTo}"
348     cat ${logSummary} | mail -s "${detector} QA in need of assistance" ${mailTo}
349   fi
350   return 0
351 }
352
353 validate()
354 {
355   summarizeLogs ${1} >> ${logSummary}
356   logStatus=$?
357   if [[ ${logStatus} -ne 0 ]]; then 
358     echo "WARNING not validated: ${1}"
359     planB=1
360     return 1
361   fi
362   return 0
363 }
364
365 summarizeLogs()
366 {
367   local dir=$1
368   [[ ! -d ${dir} ]] && dir=${PWD}
369
370   #print a summary of logs
371   logFiles=(
372       "*.log"
373       "stdout"
374       "stderr"
375   )
376
377   #check logs
378   local logstatus=0
379   for log in ${dir}/${logFiles[*]}; do
380     [[ ! -f ${log} ]] && continue
381     errorSummary=$(validateLog ${log})
382     validationStatus=$?
383     [[ validationStatus -ne 0 ]] && logstatus=1
384     if [[ ${validationStatus} -eq 0 ]]; then 
385       #in pretend mode randomly report an error in rec.log some cases
386       if [[ -n ${pretend} && "${log}" == "rec.log" ]]; then
387         [[ $(( ${RANDOM}%2 )) -ge 1 ]] && echo "${log} BAD random error" || echo "${log} OK"
388       else
389         echo "${log} OK"
390       fi
391     elif [[ ${validationStatus} -eq 1 ]]; then
392       echo "${log} BAD ${errorSummary}"
393     elif [[ ${validationStatus} -eq 2 ]]; then
394       echo "${log} OK MWAH ${errorSummary}"
395     fi
396   done
397
398   #report core files
399   while read x; do
400     echo ${x}
401     chmod 644 ${x}
402     gdb --batch --quiet -ex "bt" -ex "quit" aliroot ${x} > stacktrace_${x//\//_}.log
403   done < <(/bin/ls ${PWD}/*/core 2>/dev/null; /bin/ls ${PWD}/core 2>/dev/null)
404
405   return ${logstatus}
406 }
407
408 validateLog()
409 {
410   log=${1}
411   errorConditions=(
412             'There was a crash'
413             'floating'
414             'error while loading shared libraries'
415             'std::bad_alloc'
416             's_err_syswatch_'
417             'Thread [0-9]* (Thread'
418             'AliFatal'
419             'core dumped'
420             '\.C.*error:.*\.h: No such file'
421             'segmentation'
422             'Interpreter error recovered'
423   )
424
425   warningConditions=(
426             'This is serious'
427   )
428
429   local logstatus=0
430   local errorSummary=""
431   local warningSummary=""
432
433   for ((i=0; i<${#errorConditions[@]};i++)); do
434     local tmp=$(grep -m1 -e "${errorConditions[${i}]}" ${log})
435     [[ -n ${tmp} ]] && tmp+=" : "
436     errorSummary+=${tmp}
437   done
438
439   for ((i=0; i<${#warningConditions[@]};i++)); do
440     local tmp=$(grep -m1 -e "${warningConditions[${i}]}" ${log})
441     [[ -n ${tmp} ]] && tmp+=" : "
442     warningSummary+=${tmp}
443   done
444
445   if [[ -n ${errorSummary} ]]; then 
446     echo "${errorSummary}"
447     return 1
448   fi
449
450   if [[ -n ${warningSummary} ]]; then
451     echo "${warningSummary}"
452     return 2
453   fi
454
455   return 0
456 }
457
458 parseConfig()
459 {
460   args=("$@")
461
462   #config file
463   configFile=""
464   #where to search for qa files
465   inputList=file.list
466   #working directory
467   workingDirectory="${PWD}"
468   #where to place the final qa plots
469   #outputDirectory="/afs/cern.ch/work/a/aliqa%det/www/"
470   outputDirectory="${workingDirectory}/%DET"
471   #filter out detector option
472   excludeDetectors="EXAMPLE"
473   #logs
474   logDirectory=${workingDirectory}/logs
475   #OCDB storage
476   ocdbStorage="raw://"
477   #email to
478   #MAILTO="fbellini@cern.ch"
479
480   #first, check if the config file is configured
481   #is yes - source it so that other options can override it
482   #if any
483   for opt in "${args[@]}"; do
484     if [[ ${opt} =~ configFile=.* ]]; then
485       eval "${opt}"
486       [[ ! -f ${configFile} ]] && echo "configFile ${configFile} not found, exiting..." && return 1
487       echo "using config file: ${configFile}"
488       source "${configFile}"
489       break
490     fi
491   done
492
493   #then, parse the options as they override the options from file
494   for opt in "${args[@]}"; do
495     if [[ ! "${opt}" =~ .*=.* ]]; then
496       echo "badly formatted option ${var}, should be: option=value, stopping..."
497       return 1
498     fi
499     local var="${opt%%=*}"
500     local value="${opt#*=}"
501     echo "${var}=${value}"
502     export ${var}="${value}"
503   done
504   return 0
505 }
506
507 guessRunData()
508 {
509   #guess the period from the path, pick the rightmost one
510   period=""
511   runNumber=""
512   year=""
513   pass=""
514   legoTrainRunNumber=""
515   dataType=""
516   originalPass=""
517   originalPeriod=""
518   anchorYear=""
519
520   shortRunNumber=""
521   oldIFS=${IFS}
522   local IFS="/"
523   declare -a path=( $1 )
524   IFS="${oldIFS}"
525   local dirDepth=$(( ${#path[*]}-1 ))
526   i=0
527   for ((x=${dirDepth};x>=0;x--)); do
528
529     [[ $((x-1)) -ge 0 ]] && local fieldPrev=${path[$((x-1))]}
530     local field=${path[${x}]}
531     local fieldNext=${path[$((x+1))]}
532
533     [[ ${field} =~ ^[0-9]*$ && ${fieldNext} =~ (.*\.zip$|.*\.root$) ]] && legoTrainRunNumber=${field}
534     [[ -n ${legoTrainRunNumber} && -z ${pass} ]] && pass=${fieldPrev}
535     [[ ${field} =~ ^LHC[0-9][0-9][a-z].*$ ]] && period=${field%_*} && originalPeriod=${field}
536     [[ ${field} =~ ^000[0-9][0-9][0-9][0-9][0-9][0-9]$ ]] && runNumber=${field#000}
537     [[ ${field} =~ ^[0-9][0-9][0-9][0-9][0-9][0-9]$ ]] && shortRunNumber=${field}
538     [[ ${field} =~ ^20[0-9][0-9]$ ]] && year=${field}
539     [[ ${field} =~ ^(^sim$|^data$) ]] && dataType=${field}
540     (( i++ ))
541   done
542   originalPass=${pass}
543   [[ -n ${shortRunNumber} && "${legoTrainRunNumber}" =~ ${shortRunNumber} ]] && legoTrainRunNumber=""
544   [[ -z ${legoTrainRunNumber} ]] && pass=${path[$((dirDepth-1))]}
545   [[ "${dataType}" =~ ^sim$ ]] && pass="passMC" && runNumber=${shortRunNumber} && originalPass="" #for MC not from lego, the runnumber is identified as lego train number, thus needs to be nulled
546   [[ -n ${legoTrainRunNumber} ]] && pass+="_lego${legoTrainRunNumber}"
547   
548   #modify the OCDB: set the year
549   if [[ ${dataType} =~ sim ]]; then 
550     anchorYear=$(run2year $runNumber)
551     if [[ -z "${anchorYear}" ]]; then
552       echo "WARNING: anchorYear not available for this production: ${originalPeriod}, runNumber: ${runNumber}. Cannot set the OCDB."
553       return 1
554     fi
555     ocdbStorage=$(setYear ${anchorYear} ${ocdbStorage})
556   else
557     ocdbStorage=$(setYear ${year} ${ocdbStorage})
558   fi
559
560   #if [[ -z ${dataType} || -z ${year} || -z ${period} || -z ${runNumber}} || -z ${pass} ]];
561   if [[ -z ${runNumber} ]]
562   then
563     #error condition
564     return 1
565   fi
566   
567   #ALL OK
568   return 0
569 }
570
571 run2year()
572 {
573   #for a given run print the year.
574   #the run-year table is ${runMap} (a string)
575   #defined in the config file
576   #one line per year, format: year runMin runMax
577   local run=$1
578   [[ -z ${run} ]] && return 1
579   local year=""
580   local runMin=""
581   local runMax=""
582   while read year runMin runMax; do
583     [[ -z ${year} || -z ${runMin} || -z ${runMax} ]] && continue
584     [[ ${run} -ge ${runMin} && ${run} -le ${runMax} ]] && echo ${year} && break
585   done < <(echo "${runMap}")
586   return 0
587 }
588
589 substituteDetectorName()
590 {
591   local det=$1
592   local dir=$2
593   [[ ${dir} =~ \%det ]] && det=${det,,} && echo ${dir/\%det/${det}}
594   [[ ${dir} =~ \%DET ]] && det=${det} && echo ${dir/\%DET/${det}}
595   return 0
596 }
597
598 get_realpath() 
599 {
600   if [[ -f "$1" ]]
601   then
602     # file *must* exist
603     if cd "$(echo "${1%/*}")" &>/dev/null
604     then
605       # file *may* not be local
606       # exception is ./file.ext
607       # try 'cd .; cd -;' *works!*
608       local tmppwd="$PWD"
609       cd - &>/dev/null
610     else
611       # file *must* be local
612       local tmppwd="$PWD"
613     fi
614   else
615     # file *cannot* exist
616     return 1 # failure
617   fi
618   # reassemble realpath
619   echo "$tmppwd"/"${1##*/}"
620   return 0 # success
621 }
622
623 setYear()
624 {
625   #set the year
626   #  ${1} - year to be set
627   #  ${2} - where to set the year
628   local year1=$(guessYear ${1})
629   local year2=$(guessYear ${2})
630   local path=${2}
631   [[ ${year1} -ne ${year2} && -n ${year2} && -n ${year1} ]] && path=${2/\/${year2}\//\/${year1}\/}
632   echo ${path}
633   return 0
634 }
635
636 guessYear()
637 {
638   #guess the year from the path, pick the rightmost one
639   local IFS="/"
640   declare -a pathArray=( ${1} )
641   local field
642   local year
643   for field in ${pathArray[@]}; do
644     [[ ${field} =~ ^20[0-9][0-9]$ ]] && year=${field}
645   done
646   echo ${year}
647   return 0
648 }
649
650 hostInfo(){
651 #
652 # Hallo world -  Print AliRoot/Root/Alien system info
653 #
654
655 #
656 # HOST info
657 #
658     echo --------------------------------------
659         echo 
660         echo HOSTINFO
661         echo 
662         echo HOSTINFO HOSTNAME"      "$HOSTNAME
663         echo HOSTINFO DATE"          "`date`
664         echo HOSTINFO gccpath"       "`which gcc` 
665         echo HOSTINFO gcc version"   "`gcc --version | grep gcc`
666         echo --------------------------------------    
667
668 #
669 # ROOT info
670 #
671         echo --------------------------------------
672         echo
673         echo ROOTINFO
674         echo 
675         echo ROOTINFO ROOT"           "`which root`
676         echo ROOTINFO VERSION"        "`root-config --version`
677         echo 
678         echo --------------------------------------
679
680
681 #
682 # ALIROOT info
683 #
684         echo --------------------------------------
685         echo
686         echo ALIROOTINFO
687         echo 
688         echo ALIROOTINFO ALIROOT"        "`which aliroot`
689         echo ALIROOTINFO VERSION"        "`echo $ALICE_LEVEL`
690         echo ALIROOTINFO TARGET"         "`echo $ALICE_TARGET`
691         echo 
692         echo --------------------------------------
693
694 #
695 # Alien info
696 #
697 #echo --------------------------------------
698 #echo
699 #echo ALIENINFO
700 #for a in `alien --printenv`; do echo ALIENINFO $a; done 
701 #echo
702 #echo --------------------------------------
703
704 #
705 # Local Info
706 #
707         echo PWD `pwd`
708         echo Dir 
709         ls -al
710         echo
711         echo
712         echo
713   
714   return 0
715 }
716
717 main "$@"