]> git.uio.no Git - u/mrichter/AliRoot.git/blob - PWGPP/QA/scripts/runQA.sh
assign blame
[u/mrichter/AliRoot.git] / PWGPP / QA / scripts / runQA.sh
1 #!/bin/bash
2 # process QA output into plots and trending
3 # run without arguments for examples
4 # origin: Mikolaj Krzewicki, mkrzewic@cern.ch
5 #
6 if [ ${BASH_VERSINFO} -lt 4 ]; then
7   echo "bash version >= 4 needed, you have ${BASH_VERSION}, exiting..."
8   exit 1
9 fi
10
11 main()
12 {
13   if [[ -z $1 ]]; then
14     echo "Usage: "
15     echo "  ${0##*/} option=value [option=value]"
16     echo "  at least inputList should be specified, or configFile containing it:"
17     echo "  ${0##*/} inputList=file.list"
18     echo "  options override config file (if any), e.g.:"
19     echo "  ${0##*/} configFile=runQA.config inputList=file.list outputDirectory=%det"
20     return 1
21   fi
22
23   if ! parseConfig "$@"; then
24     ${0}
25     return 1
26   fi
27
28   [[ -z $ALICE_ROOT ]] && echo "ALICE_ROOT not defined" && return 1
29
30   ocdbregex='raw://'
31   if [[ ${ocdbStorage} =~ ${ocdbregex} ]]; then
32     alien-token-init ${alienUserName}
33     #this is a hack! alien-token init seems not enough
34     #but the gclient_env script messes up the LD_LIBRARY_PATH
35     while read x; do
36       eval ${x};
37     done < <(grep -v "LD_LIBRARY_PATH" /tmp/gclient_env_${UID})
38   fi
39
40   updateQA "$@"
41   return 0
42 }
43
44 updateQA()
45 {
46   umask 0002
47   parseConfig "$@"
48
49   #be paranoid and make some full paths
50   [[ ! -f ${inputList} ]] && echo "no input list: ${inputList}" && return 1
51   inputList=$(get_realpath ${inputList})
52   mkdir -p ${workingDirectory}
53   workingDirectory=$(workingDirectory=${workingDirectory%/}; cd ${workingDirectory%/*}; echo "${PWD}/${workingDirectory##*/}")
54   if [[ ! -d ${workingDirectory} ]]; then
55     echo "working dir $workingDirectory does not exist and cannot be created"
56     return 1
57   fi
58   cd ${workingDirectory}
59
60   echo JOB config:
61   echo inputList=$inputList
62   echo outputDirectory=$outputDirectory
63   echo
64
65   dateString=$(date +%Y-%m-%d-%H-%M-%S-%N)
66   echo "Start time QA process: $dateString"
67
68   #logging
69   mkdir -p $logDirectory
70   [[ ! -d $logDirectory ]] && echo "no log dir $logDirectory" && return 1
71   logFile="$logDirectory/${0##*/}.${dateString}.log"
72   touch ${logFile}
73   [[ ! -f ${logFile} ]] && echo "cannot write logfile $logfile" && return 1
74   echo "logFile = $logFile"
75
76   #check lock
77   lockFile=${workingDirectory}/runQA.lock
78   [[ -f ${lockFile} ]] && echo "lock ${lockFile} exists!" | tee ${logFile} && return 1
79   touch ${lockFile}
80   [[ ! -f ${lockFile} ]] && echo "cannot lock $lockFile" | tee ${logFile} && return 1
81   
82   exec &>${logFile}
83
84   ################################################################
85   #ze detector loop
86   for detectorScript in $ALICE_ROOT/PWGPP/QA/detectorQAscripts/*; do
87     echo
88     echo "##############################################"
89     echo $(date)
90     unset planB
91     [[ ! ${detectorScript} =~ .*\.sh$ ]] && continue
92     detector=${detectorScript%.sh}
93     detector=${detector##*/}
94     #by default we expect the container in the QA root file to de named like
95     #the detector
96     detectorQAcontainerName=${detector}
97     
98     #skip if excluded
99     if [[ "${excludeDetectors}" =~ ${detector} ]]; then
100       echo "${detector} is excluded in config, skipping..."
101       continue
102     fi
103
104     #if includeDetectors set, only process thoe detectors specified there
105     if [[ -n ${includeDetectors} && ! "${includeDetectors}" =~ ${detector} ]]; then
106       echo "${detector} not included in includeDetectors, skipping..."
107       continue
108     fi
109
110     logSummary=${logDirectory}/summary-${detector}-${dateString}.log
111     hostInfo >> ${logSummary}
112     outputDir=$(substituteDetectorName ${detector} ${outputDirectory})
113     tmpDetectorRunDir=${workingDirectory}/tmpQAtmpRunDir${detector}-${dateString}
114     if ! mkdir -p ${tmpDetectorRunDir}; then
115       echo "cannot create the temp dir $tmpDetectorRunDir"
116       continue
117     fi
118     cd ${tmpDetectorRunDir}
119
120     tmpPrefix=${tmpDetectorRunDir}/${outputDir}
121     echo "running QA for ${detector}"
122     echo "  outputDir=$outputDir"
123     echo "  tmpPrefix=$tmpPrefix"
124     
125     #unset the detector functions from previous iterations (detectors)
126     unset -f runLevelQA
127     unset -f periodLevelQA
128     unset -f runLevelHighPtTreeQA
129     unset -f periodLevelHighPtTreeQA
130     source ${detectorScript}
131
132     #################################################################
133     #produce the QA and trending tree for each file (run)
134     unset arrOfTouchedProductions
135     declare -A arrOfTouchedProductions
136     while read qaFile; do
137       echo
138       echo $(date)
139       
140       #first check if input file exists
141       [[ ! -f ${qaFile%\#*} ]] && echo "file ${qaFile%\#*} not accessible" && continue
142
143       if ! guessRunData ${qaFile}; then
144         echo "could not guess run data from ${qaFile}"
145         continue
146       fi
147       echo "anchorYear for ${originalPeriod} is: ${anchorYear}"
148
149       tmpProductionDir=${tmpPrefix}/${dataType}/${year}/${period}/${pass}
150       tmpRunDir=${tmpProductionDir}/000${runNumber}
151       mkdir -p ${tmpRunDir}
152       cd ${tmpRunDir}
153
154       #by default we expect to have everything in the same archive
155       highPtTree=${qaFile}
156
157       #maybe the input is not an archive, but a file
158       [[ "${qaFile}" =~ QAresults.root$ ]] && highPtTree=""
159       [[ "${qaFile}" =~ FilterEvents_Trees.root$ ]] && qaFile=""
160
161       #it is possible we get the highPt trees from somewhere else
162       #search the list of high pt trees for the proper run number
163       if [[ -n ${inputListHighPtTrees} ]]; then
164         highPtTree=$(egrep -m1 ${runNumber} ${inputListHighPtTrees})
165         echo "loaded the highPtTree ${highPtTree} from external file ${inputListHighPtTrees}"
166       fi
167       
168       echo qaFile=$qaFile
169       echo highPtTree=$highPtTree
170       echo ocdbStorage=${ocdbStorage}
171       echo
172
173       #what if we have a zip archive?
174       if [[ "$qaFile" =~ .*.zip$ ]]; then
175         if unzip -l ${qaFile} | egrep "QAresults.root" &>/dev/null; then
176           qaFile="${qaFile}#QAresults.root"
177         elif unzip -l ${qaFile} | egrep "QAresults_barrel.root" &>/dev/null; then
178           qaFile="${qaFile}#QAresults_barrel.root"
179         else
180           qaFile=""
181         fi
182       fi
183       if [[ "$highPtTree" =~ .*.zip$ ]]; then
184         if unzip -l ${highPtTree} | egrep "FilterEvents_Trees.root" &>/dev/null; then
185           highPtTree="${highPtTree}#FilterEvents_Trees.root"
186         else
187           highPtTree=""
188         fi
189       fi
190      
191       if [[ -n ${qaFile} && $(type -t runLevelQA) =~ "function" ]]; then
192         echo running ${detector} runLevelQA for run ${runNumber} from ${qaFile}
193         runLevelQA "${qaFile}" &> runLevelQA.log
194         #perform some default actions:
195         #if trending.root not created, create a default one
196         if [[ ! -f trending.root ]]; then
197           aliroot -b -q -l "$ALICE_ROOT/PWGPP/macros/simpleTrending.C(\"${qaFile}\",${runNumber},\"${detectorQAcontainerName}\",\"trending.root\",\"trending\",\"recreate\")" 2>&1 | tee -a runLevelQA.log
198         fi
199         if [[ -f trending.root ]]; then
200           #cache the touched production + an example file to guarantee consistent run data parsing
201           arrOfTouchedProductions[${tmpProductionDir}]="${qaFile%\#*}"
202         else
203           echo "trending.root not created"
204         fi
205       fi
206       #expert QA based on high pt trees
207       if [[ -n ${highPtTree} && $(type -t runLevelHighPtTreeQA) =~ "function" ]]; then
208         echo running ${detector} runLevelHighPtTreeQA for run ${runNumber} from ${highPtTree}
209         runLevelHighPtTreeQA "${highPtTree}" &> runLevelHighPtTreeQA.log
210         arrOfTouchedProductions[${tmpProductionDir}]=1
211       fi
212
213       cd ${tmpDetectorRunDir}
214     
215     done < ${inputList}
216
217     #################################################################
218     #cache which productions were (re)done
219     echo "list of processed productions:"
220     echo "    ${!arrOfTouchedProductions[@]}"
221     echo
222
223     #################################################################
224     #(re)do the merging/trending 
225     for tmpProductionDir in ${!arrOfTouchedProductions[@]}; do
226       cd ${tmpProductionDir}
227       echo
228       echo "running period level stuff in ${tmpProductionDir}"
229       echo $(date)
230     
231       productionDir=${outputDir}/${tmpProductionDir#${tmpPrefix}}
232       echo productionDir=${outputDir}/${tmpProductionDir#${tmpPrefix}}
233
234       mkdir -p ${productionDir}
235       if [[ ! -d ${productionDir} ]]; then 
236         echo "cannot make productionDir $productionDir" && continue
237       fi
238       
239       #move runs to final destination
240       for dir in ${tmpProductionDir}/000*; do
241         echo 
242         oldRunDir=${outputDir}/${dir#${tmpPrefix}}
243         if ! guessRunData "${arrOfTouchedProductions[${tmpProductionDir}]}"; then
244           echo "could not guess run data from ${dir}"
245           continue
246         fi
247
248         #before moving - VALIDATE!!!
249         if ! validate ${dir}; then 
250           continue
251         fi
252
253         #moving a dir is an atomic operation, no locking necessary
254         if [[ -d ${oldRunDir} ]]; then
255           echo "removing old ${oldRunDir}"
256           rm -rf ${oldRunDir}
257         fi
258         echo "moving new ${runNumber} to ${productionDir}"
259         mv -f ${dir} ${productionDir}
260       done
261    
262       #go to a temp dir to do the period level stuff in a completely clean dir
263       tmpPeriodLevelQAdir="${tmpProductionDir}/periodLevelQA"
264       echo
265       echo tmpPeriodLevelQAdir="${tmpPeriodLevelQAdir}"
266       if ! mkdir -p ${tmpPeriodLevelQAdir}; then continue; fi
267       cd ${tmpPeriodLevelQAdir}
268
269       #link the final list of per-run dirs here, just the dirs
270       #to have a clean working directory
271       unset linkedStuff
272       declare -a linkedStuff
273       for x in ${productionDir}/000*; do [[ -d $x ]] && ln -s $x && linkedStuff+=(${x##*/}); done
274
275       #merge trending files if any
276       if /bin/ls 000*/trending.root &>/dev/null; then
277         hadd trending.root 000*/trending.root &> periodLevelQA.log
278       fi
279       
280       #run the period level trending/QA
281       if [[ -f "trending.root" && $(type -t periodLevelQA) =~ "function" ]]; then
282         echo running ${detector} periodLevelQA for production ${period}/${pass}
283         periodLevelQA trending.root &>> periodLevelQA.log
284       else 
285         echo "WARNING: not running ${detector} periodLevelQA for production ${period}/${pass}, no trending.root"
286       fi
287
288       if ! validate ${PWD}; then continue; fi
289
290       #here we are validated so move the produced QA to the final place
291       #clean up linked stuff first
292       [[ -n ${linkedStuff[@]} ]] && rm ${linkedStuff[@]}
293       periodLevelLock=${productionDir}/runQA.lock
294       if [[ ! -f ${periodLevelLock} ]]; then
295         #some of the output could be a directory, so handle that
296         #TODO: maybe use rsync?
297         #lock to avoid conflicts:
298         echo "${HOSTNAME} ${dateString}" > ${periodLevelLock}
299         for x in ${tmpPeriodLevelQAdir}/*; do  
300           if [[ -d ${x} ]]; then
301             echo "removing ${productionDir}/${x##*/}"
302             rm -rf ${productionDir}/${x##*/}
303             echo "moving ${x} to ${productionDir}"
304             mv ${x} ${productionDir}
305           fi
306           if [[ -f ${x} ]]; then
307             echo "moving ${x} to ${productionDir}"
308             mv -f ${x} ${productionDir} 
309           fi
310         done
311         rm -f ${periodLevelLock}
312         #remove the temp dir
313         rm -rf ${tmpPeriodLevelQAdir}
314       else
315         echo "ERROR: cannot move to destination"                     >> ${logSummary}
316         echo "production dir ${productionDir} locked!"               >> ${logSummary}
317         echo "check and maybe manually do:"                          >> ${logSummary}
318         echo " rm ${periodLevelLock}"                                >> ${logSummary}
319         echo " rsync -av ${tmpPeriodLevelQAdir}/ ${productionDir}/"  >> ${logSummary}
320         planB=1
321       fi
322
323     done
324
325     cd ${workingDirectory}
326
327     if [[ -z ${planB} ]]; then
328       echo
329       echo removing ${tmpDetectorRunDir}
330       rm -rf ${tmpDetectorRunDir}
331     else
332       executePlanB
333     fi
334   done #end of detector loop
335
336   #remove lock
337   rm -f ${lockFile}
338   return 0
339 }
340
341 executePlanB()
342 {
343   #in case of emergency
344   #first check if we have the email of the detector expert defined,
345   #if yes, append to the mailing list
346   local mailTo=${MAILTO}
347   local detExpertEmailVar="MAILTO_${detector}"
348   [[ -n "${!detExpertEmailVar}" ]] && mailTo+=" ${!detExpertEmailVar}"
349   if [[ -n ${mailTo} ]]; then 
350     echo
351     echo "trouble detected, sending email to ${mailTo}"
352     cat ${logSummary} | mail -s "${detector} QA in need of assistance" ${mailTo}
353   fi
354   return 0
355 }
356
357 validate()
358 {
359   summarizeLogs ${1} >> ${logSummary}
360   logStatus=$?
361   if [[ ${logStatus} -ne 0 ]]; then 
362     echo "WARNING not validated: ${1}"
363     planB=1
364     return 1
365   fi
366   return 0
367 }
368
369 summarizeLogs()
370 {
371   local dir=$1
372   [[ ! -d ${dir} ]] && dir=${PWD}
373
374   #print a summary of logs
375   logFiles=(
376       "*.log"
377       "stdout"
378       "stderr"
379   )
380
381   #check logs
382   local logstatus=0
383   for log in ${dir}/${logFiles[*]}; do
384     [[ ! -f ${log} ]] && continue
385     errorSummary=$(validateLog ${log})
386     validationStatus=$?
387     [[ validationStatus -ne 0 ]] && logstatus=1
388     if [[ ${validationStatus} -eq 0 ]]; then 
389       #in pretend mode randomly report an error in rec.log some cases
390       if [[ -n ${pretend} && "${log}" == "rec.log" ]]; then
391         [[ $(( ${RANDOM}%2 )) -ge 1 ]] && echo "${log} BAD random error" || echo "${log} OK"
392       else
393         echo "${log} OK"
394       fi
395     elif [[ ${validationStatus} -eq 1 ]]; then
396       echo "${log} BAD ${errorSummary}"
397     elif [[ ${validationStatus} -eq 2 ]]; then
398       echo "${log} OK MWAH ${errorSummary}"
399     fi
400   done
401
402   #report core files
403   while read x; do
404     echo ${x}
405     chmod 644 ${x}
406     gdb --batch --quiet -ex "bt" -ex "quit" aliroot ${x} > stacktrace_${x//\//_}.log
407   done < <(/bin/ls ${PWD}/*/core 2>/dev/null; /bin/ls ${PWD}/core 2>/dev/null)
408
409   return ${logstatus}
410 }
411
412 validateLog()
413 {
414   log=${1}
415   errorConditions=(
416             'There was a crash'
417             'floating'
418             'error while loading shared libraries'
419             'std::bad_alloc'
420             's_err_syswatch_'
421             'Thread [0-9]* (Thread'
422             'AliFatal'
423             'core dumped'
424             '\.C.*error:.*\.h: No such file'
425             'segmentation'
426             'Interpreter error recovered'
427   )
428
429   warningConditions=(
430             'This is serious'
431   )
432
433   local logstatus=0
434   local errorSummary=""
435   local warningSummary=""
436
437   for ((i=0; i<${#errorConditions[@]};i++)); do
438     local tmp=$(grep -m1 -e "${errorConditions[${i}]}" ${log})
439     [[ -n ${tmp} ]] && tmp+=" : "
440     errorSummary+=${tmp}
441   done
442
443   for ((i=0; i<${#warningConditions[@]};i++)); do
444     local tmp=$(grep -m1 -e "${warningConditions[${i}]}" ${log})
445     [[ -n ${tmp} ]] && tmp+=" : "
446     warningSummary+=${tmp}
447   done
448
449   if [[ -n ${errorSummary} ]]; then 
450     echo "${errorSummary}"
451     return 1
452   fi
453
454   if [[ -n ${warningSummary} ]]; then
455     echo "${warningSummary}"
456     return 2
457   fi
458
459   return 0
460 }
461
462 parseConfig()
463 {
464   args=("$@")
465
466   #config file
467   configFile=""
468   #where to search for qa files
469   inputList=file.list
470   #working directory
471   workingDirectory="${PWD}"
472   #where to place the final qa plots
473   #outputDirectory="/afs/cern.ch/work/a/aliqa%det/www/"
474   outputDirectory="${workingDirectory}/%DET"
475   #filter out detector option
476   excludeDetectors="EXAMPLE"
477   #logs
478   logDirectory=${workingDirectory}/logs
479   #OCDB storage
480   ocdbStorage="raw://"
481   #email to
482   #MAILTO="fbellini@cern.ch"
483
484   #first, check if the config file is configured
485   #is yes - source it so that other options can override it
486   #if any
487   for opt in "${args[@]}"; do
488     if [[ ${opt} =~ configFile=.* ]]; then
489       eval "${opt}"
490       [[ ! -f ${configFile} ]] && echo "configFile ${configFile} not found, exiting..." && return 1
491       echo "using config file: ${configFile}"
492       source "${configFile}"
493       break
494     fi
495   done
496
497   #then, parse the options as they override the options from file
498   for opt in "${args[@]}"; do
499     if [[ ! "${opt}" =~ .*=.* ]]; then
500       echo "badly formatted option ${var}, should be: option=value, stopping..."
501       return 1
502     fi
503     local var="${opt%%=*}"
504     local value="${opt#*=}"
505     echo "${var}=${value}"
506     export ${var}="${value}"
507   done
508   return 0
509 }
510
511 guessRunData()
512 {
513   #guess the period from the path, pick the rightmost one
514   period=""
515   runNumber=""
516   year=""
517   pass=""
518   legoTrainRunNumber=""
519   dataType=""
520   originalPass=""
521   originalPeriod=""
522   anchorYear=""
523
524   shortRunNumber=""
525   oldIFS=${IFS}
526   local IFS="/"
527   declare -a path=( $1 )
528   IFS="${oldIFS}"
529   local dirDepth=$(( ${#path[*]}-1 ))
530   i=0
531   for ((x=${dirDepth};x>=0;x--)); do
532
533     [[ $((x-1)) -ge 0 ]] && local fieldPrev=${path[$((x-1))]}
534     local field=${path[${x}]}
535     local fieldNext=${path[$((x+1))]}
536
537     [[ ${field} =~ ^[0-9]*$ && ${fieldNext} =~ (.*\.zip$|.*\.root$) ]] && legoTrainRunNumber=${field}
538     [[ -n ${legoTrainRunNumber} && -z ${pass} ]] && pass=${fieldPrev}
539     [[ ${field} =~ ^LHC[0-9][0-9][a-z].*$ ]] && period=${field%_*} && originalPeriod=${field}
540     [[ ${field} =~ ^000[0-9][0-9][0-9][0-9][0-9][0-9]$ ]] && runNumber=${field#000}
541     [[ ${field} =~ ^[0-9][0-9][0-9][0-9][0-9][0-9]$ ]] && shortRunNumber=${field}
542     [[ ${field} =~ ^20[0-9][0-9]$ ]] && year=${field}
543     [[ ${field} =~ ^(^sim$|^data$) ]] && dataType=${field}
544     (( i++ ))
545   done
546   originalPass=${pass}
547   [[ -n ${shortRunNumber} && "${legoTrainRunNumber}" =~ ${shortRunNumber} ]] && legoTrainRunNumber=""
548   [[ -z ${legoTrainRunNumber} ]] && pass=${path[$((dirDepth-1))]}
549   [[ "${dataType}" =~ ^sim$ ]] && pass="passMC" && runNumber=${shortRunNumber} && originalPass="" #for MC not from lego, the runnumber is identified as lego train number, thus needs to be nulled
550   [[ -n ${legoTrainRunNumber} ]] && pass+="_lego${legoTrainRunNumber}"
551   
552   #modify the OCDB: set the year
553   if [[ ${dataType} =~ sim ]]; then 
554     anchorYear=$(run2year $runNumber)
555     if [[ -z "${anchorYear}" ]]; then
556       echo "WARNING: anchorYear not available for this production: ${originalPeriod}, runNumber: ${runNumber}. Cannot set the OCDB."
557       return 1
558     fi
559     ocdbStorage=$(setYear ${anchorYear} ${ocdbStorage})
560   else
561     ocdbStorage=$(setYear ${year} ${ocdbStorage})
562   fi
563
564   #if [[ -z ${dataType} || -z ${year} || -z ${period} || -z ${runNumber}} || -z ${pass} ]];
565   if [[ -z ${runNumber} ]]
566   then
567     #error condition
568     return 1
569   fi
570   
571   #ALL OK
572   return 0
573 }
574
575 run2year()
576 {
577   #for a given run print the year.
578   #the run-year table is ${runMap} (a string)
579   #defined in the config file
580   #one line per year, format: year runMin runMax
581   local run=$1
582   [[ -z ${run} ]] && return 1
583   local year=""
584   local runMin=""
585   local runMax=""
586   while read year runMin runMax; do
587     [[ -z ${year} || -z ${runMin} || -z ${runMax} ]] && continue
588     [[ ${run} -ge ${runMin} && ${run} -le ${runMax} ]] && echo ${year} && break
589   done < <(echo "${runMap}")
590   return 0
591 }
592
593 substituteDetectorName()
594 {
595   local det=$1
596   local dir=$2
597   [[ ${dir} =~ \%det ]] && det=${det,,} && echo ${dir/\%det/${det}}
598   [[ ${dir} =~ \%DET ]] && det=${det} && echo ${dir/\%DET/${det}}
599   return 0
600 }
601
602 get_realpath() 
603 {
604   if [[ -f "$1" ]]
605   then
606     # file *must* exist
607     if cd "$(echo "${1%/*}")" &>/dev/null
608     then
609       # file *may* not be local
610       # exception is ./file.ext
611       # try 'cd .; cd -;' *works!*
612       local tmppwd="$PWD"
613       cd - &>/dev/null
614     else
615       # file *must* be local
616       local tmppwd="$PWD"
617     fi
618   else
619     # file *cannot* exist
620     return 1 # failure
621   fi
622   # reassemble realpath
623   echo "$tmppwd"/"${1##*/}"
624   return 0 # success
625 }
626
627 setYear()
628 {
629   #set the year
630   #  ${1} - year to be set
631   #  ${2} - where to set the year
632   local year1=$(guessYear ${1})
633   local year2=$(guessYear ${2})
634   local path=${2}
635   [[ ${year1} -ne ${year2} && -n ${year2} && -n ${year1} ]] && path=${2/\/${year2}\//\/${year1}\/}
636   echo ${path}
637   return 0
638 }
639
640 guessYear()
641 {
642   #guess the year from the path, pick the rightmost one
643   local IFS="/"
644   declare -a pathArray=( ${1} )
645   local field
646   local year
647   for field in ${pathArray[@]}; do
648     [[ ${field} =~ ^20[0-9][0-9]$ ]] && year=${field}
649   done
650   echo ${year}
651   return 0
652 }
653
654 hostInfo(){
655 #
656 # Hallo world -  Print AliRoot/Root/Alien system info
657 #
658
659 #
660 # HOST info
661 #
662     echo --------------------------------------
663         echo 
664         echo HOSTINFO
665         echo 
666         echo HOSTINFO HOSTNAME"      "$HOSTNAME
667         echo HOSTINFO DATE"          "`date`
668         echo HOSTINFO gccpath"       "`which gcc` 
669         echo HOSTINFO gcc version"   "`gcc --version | grep gcc`
670         echo --------------------------------------    
671
672 #
673 # ROOT info
674 #
675         echo --------------------------------------
676         echo
677         echo ROOTINFO
678         echo 
679         echo ROOTINFO ROOT"           "`which root`
680         echo ROOTINFO VERSION"        "`root-config --version`
681         echo 
682         echo --------------------------------------
683
684
685 #
686 # ALIROOT info
687 #
688         echo --------------------------------------
689         echo
690         echo ALIROOTINFO
691         echo 
692         echo ALIROOTINFO ALIROOT"        "`which aliroot`
693         echo ALIROOTINFO VERSION"        "`echo $ALICE_LEVEL`
694         echo ALIROOTINFO TARGET"         "`echo $ALICE_TARGET`
695         echo 
696         echo --------------------------------------
697
698 #
699 # Alien info
700 #
701 #echo --------------------------------------
702 #echo
703 #echo ALIENINFO
704 #for a in `alien --printenv`; do echo ALIENINFO $a; done 
705 #echo
706 #echo --------------------------------------
707
708 #
709 # Local Info
710 #
711         echo PWD `pwd`
712         echo Dir 
713         ls -al
714         echo
715         echo
716         echo
717   
718   return 0
719 }
720
721 main "$@"