]> git.uio.no Git - u/mrichter/AliRoot.git/blobdiff - PWGPP/benchmark/benchmark.sh
ATO-98 more verbose output for benchmark
[u/mrichter/AliRoot.git] / PWGPP / benchmark / benchmark.sh
index af210e42d2909752fa5292d14dd51c51e2faf696..5dfcf2988f8f60a4b6ea45dbc544588cb7fbe3ba 100755 (executable)
@@ -1,6 +1,7 @@
 #!/bin/bash
 #include benchmark.config
 
+# blame: Mikolaj Krzewicki, mkrzewic@cern.ch
 # this script runs the CPass0/CPass1 train
 # produced OCDB updates are local
 
@@ -78,7 +79,7 @@ goCPass0()
   jobindex=${7}
   shift 7
   if ! parseConfig ${configFile} "$@"; then return 1; fi
-
+  echo Start: goCPass0
   #record the working directory provided by the batch system
   batchWorkingDirectory=${PWD}
 
@@ -93,8 +94,15 @@ goCPass0()
   fi
 
   [[ -z ${commonOutputPath} ]] && commonOutputPath=${PWD}
-  doneFile="${commonOutputPath}/meta/cpass0.job${jobindex}.run${runNumber}.done"
-  [[ -n ${useProfilingCommand} ]] && doneFile="${commonOutputPath}/meta/profiling.cpass0.job${jobindex}.run${runNumber}.done"
+
+  # This file signals that/if everything went fine
+  doneFileBase="cpass0.job${jobindex}.run${runNumber}.done"
+  [[ -n ${useProfilingCommand} ]] && doneFileBase="profiling.cpass0.job${jobindex}.run${runNumber}.done"
+
+  # We will have two copies of the file
+  mkdir -p "${commonOutputPath}/meta" || return 1
+  doneFileTmp="${batchWorkingDirectory}/${doneFileBase}"
+  doneFile="${commonOutputPath}/meta/${doneFileBase}"
 
   [[ -f ${alirootSource} && -z ${ALICE_ROOT} ]] && source ${alirootSource}
   
@@ -119,30 +127,39 @@ goCPass0()
   outputDir=${targetDirectory}/${jobindex}_${chunkName%.*}
   mkdir -p ${outputDir}
   if [[ ! -d ${outputDir} ]]; then 
-    touch ${doneFile} 
-    echo "cannot make ${outputDir}" >> ${doneFile}
+    touch ${doneFileTmp}
+    echo "cannot make ${outputDir}" >> ${doneFileTmp}
+    cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
     return 1  
   fi
   
-  #runpath=${PWD}/rundir_cpass0_${runNumber}_${jobindex}
   runpath=${outputDir}
-  #[[ ${reconstructInTemporaryDir} -eq 1 && -n ${TMPDIR} ]] && runpath=${TMPDIR}
   [[ ${reconstructInTemporaryDir} -eq 1 ]] && runpath=$(mktemp -d -t cpass0.XXXXXX)
+  [[ ${reconstructInTemporaryDir} -eq 2 ]] && runpath=${PWD}/rundir_cpass0_${runNumber}_${jobindex}
   mkdir -p ${runpath}
   if [[ ! -d ${runpath} ]]; then
-    touch ${doneFile} 
-    echo "cannot make runpath ${runpath}" >> ${doneFile}
+    touch ${doneFileTmp} 
+    echo "cannot make runpath ${runpath}" >> ${doneFileTmp}
+    cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
     return 1
   fi
   if ! cd ${runpath}; then
-    touch ${doneFile}
-    echo "PWD=$PWD is not the runpath=${runpath}" >> ${doneFile}
+    touch ${doneFileTmp}
+    echo "PWD=$PWD is not the runpath=${runpath}" >> ${doneFileTmp}
+    cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
     return 1
   fi
 
   #runCPassX/C expects the raw chunk to be linked in the run dir
   #despite it being accessed by the full path
-  ln -s ${infile} ${runpath}/${chunkName}
+  if [[ $copyInputData == 0 ]]; then
+    ln -s ${infile} ${runpath}/${chunkName}
+  else
+    copyFileToLocal ${infile} ${runpath}/${chunkName}
+  fi
 
   #####MC
   if [[ -n ${generateMC} ]]; then
@@ -178,9 +195,11 @@ goCPass0()
   fi
   ######
   
-  if [[ ! -f ${inputList} && -z ${pretend} ]]; then
-    touch ${doneFile} 
-    echo "input file ${inputList} not found, exiting..." >> ${doneFile}
+  if [[ "${inputList}" == "${inputList%%://*}" && ! -f "${inputList}" && -z ${pretend} ]]; then
+    touch ${doneFileTmp}
+    echo "input file ${inputList} not found, exiting..." >> ${doneFileTmp}
+    cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
     return 1
   fi
 
@@ -193,21 +212,21 @@ goCPass0()
   echo "#####################"
   echo CPass0:
   echo JOB setup
-  echo nEvents            ${nEvents}
-  echo runNumber          ${runNumber}
-  echo ocdbPath           ${ocdbPath}
-  echo infile             ${infile}
-  echo chunkName          ${chunkName}
-  echo jobindex           ${jobindex}
-  echo recoTriggerOptions ${recoTriggerOptions}
-  echo targetDirectory    ${targetDirectory}
-  echo commonOutputPath         ${commonOutputPath}
-  echo doneFile      ${doneFile}
-  echo batchWorkingDirectory=$batchWorkingDirectory
-  echo runpath            ${runpath}  
-  echo outputDir          ${outputDir}
-  echo PWD                ${PWD}
-  echo ALICE_ROOT         ${ALICE_ROOT}
+  echo nEvents               ${nEvents}
+  echo runNumber             ${runNumber}
+  echo ocdbPath              ${ocdbPath}
+  echo infile                ${infile}
+  echo chunkName             ${chunkName}
+  echo jobindex              ${jobindex}
+  echo recoTriggerOptions    ${recoTriggerOptions}
+  echo targetDirectory       ${targetDirectory}
+  echo commonOutputPath      ${commonOutputPath}
+  echo doneFile              ${doneFile}
+  echo batchWorkingDirectory ${batchWorkingDirectory}
+  echo runpath               ${runpath}  
+  echo outputDir             ${outputDir}
+  echo PWD                   ${PWD}
+  echo ALICE_ROOT            ${ALICE_ROOT}
   echo "########## ###########"
 
   alirootInfo > ALICE_ROOT.log
@@ -233,6 +252,9 @@ goCPass0()
   echo
   chmod u+x runCPass0.sh
 
+  #remove spaces from around arguments to root macros
+  #for example this sometimes fails: 
+  #  root 'macro.C(argument1, argument2)'
   sed -i '/.*root .*\.C/ s|\s*,\s*|,|g' *.sh
 
   if [[ -n ${postSetUpActionCPass0} ]]; then
@@ -241,7 +263,7 @@ goCPass0()
   fi
 
   #run CPass0
-  echo "${runpath}/runCPass0.sh ${infile} ${nEvents} ${runNumber} ${ocdbPath} ${recoTriggerOptions}"
+  echo "${runpath}/runCPass0.sh /${infile} ${nEvents} ${runNumber} ${ocdbPath} ${recoTriggerOptions}"
   if [[ -n ${pretend} ]]; then
     sleep ${pretendDelay}
     touch AliESDs.root
@@ -250,8 +272,8 @@ goCPass0()
     touch rec.log
     touch calib.log
   else
-    echo ./runCPass0.sh "${infile}" "${nEvents}" "${runNumber}" "${ocdbPath}" "${recoTriggerOptions}"
-    ./runCPass0.sh "${infile}" "${nEvents}" "${runNumber}" "${ocdbPath}" "${recoTriggerOptions}"
+    #caveat: in the local case, first arg must start with a slash
+    ./runCPass0.sh "/${infile}" "${nEvents}" "${runNumber}" "${ocdbPath}" "${recoTriggerOptions}"
   fi
   
   #move stuff to final destination
@@ -259,23 +281,25 @@ goCPass0()
   /bin/ls
   echo
 
+  # [dberzano] OK this is fine!
   echo rm -f ./${chunkName}
   rm -f ./${chunkName}
-  echo "cp -R ${runpath}/* ${outputDir}"
-  cp -p -R ${runpath}/* ${outputDir}
+  echo "paranoidCp ${runpath}/* ${outputDir}"
+  paranoidCp ${runpath}/* ${outputDir}
   echo
   
   #validate CPass0
   cd ${outputDir}
-  touch ${doneFile}
-  echo "dir ${outputDir}" >> ${doneFile}
-  if summarizeLogs >> ${doneFile}; then
-    [[ -f ${outputDirMC}/galice.root ]] && echo "sim ${outputDirMC}/galice.root" >> ${doneFile}
-    [[ -f AliESDfriends_v1.root ]] && echo "calibfile ${outputDir}/AliESDfriends_v1.root" >> ${doneFile}
-    [[ -f AliESDs.root ]] && echo "esd ${outputDir}/AliESDs.root" >> ${doneFile}
+  if summarizeLogs >> ${doneFileTmp}; then
+    [[ -f ${outputDirMC}/galice.root ]] && echo "sim ${outputDirMC}/galice.root" >> ${doneFileTmp}
+    [[ -f AliESDfriends_v1.root ]] && echo "calibfile ${outputDir}/AliESDfriends_v1.root" >> ${doneFileTmp}
+    [[ -f AliESDs.root ]] && echo "esd ${outputDir}/AliESDs.root" >> ${doneFileTmp}
   fi
 
   [[ "${runpath}" != "${outputDir}" ]] && rm -rf ${runpath} && echo "removing ${runpath}"
+  cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+  [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
+  echo End: goCPass0
   return 0
 )
 
@@ -293,6 +317,7 @@ goCPass1()
   shift 7
   extraOpts=("$@")
   if ! parseConfig ${configFile} "$@"; then return 1; fi
+  echo Start: goCPass1
 
   #record the working directory provided by the batch system
   batchWorkingDirectory=${PWD}
@@ -308,8 +333,15 @@ goCPass1()
   fi
 
   [[ -z ${commonOutputPath} ]] && commonOutputPath=${PWD}
-  doneFile="${commonOutputPath}/meta/cpass1.job${jobindex}.run${runNumber}.done"
-  [[ -n ${useProfilingCommand} ]] && doneFile="${commonOutputPath}/meta/profiling.cpass1.job${jobindex}.run${runNumber}.done"
+
+  # This file signals that/if everything went fine
+  doneFileBase="cpass1.job${jobindex}.run${runNumber}.done"
+  [[ -n ${useProfilingCommand} ]] && doneFileBase="profiling.cpass0.job${jobindex}.run${runNumber}.done"
+
+  # We will have two copies of the file
+  mkdir -p "${commonOutputPath}/meta" || return 1
+  doneFileTmp="${batchWorkingDirectory}/${doneFileBase}"
+  doneFile="${commonOutputPath}/meta/${doneFileBase}"
 
   [[ -f ${alirootSource} && -z ${ALICE_ROOT} ]] && source ${alirootSource}
   
@@ -324,9 +356,11 @@ goCPass1()
   #Packages= ;OutputDir= ;LPMPass= ;TriggerAlias= ;LPMRunNumber= ;LPMProductionType= ;LPMInteractionType= ;LPMProductionTag= ;LPMAnchorRun= ;LPMAnchorProduction= ;LPMAnchorYear= 
   export PRODUCTION_METADATA="OutputDir=cpass1"
 
-  if [[ ! -f ${inputList} && -z ${pretend} ]]; then
-    touch ${doneFile}
-    echo "input file ${inputList} not found, exiting..." >> ${doneFile}
+  if [[ "${inputList}" == "${inputList%%://*}" && ! -f "${inputList}" && -z ${pretend} ]]; then
+    touch ${doneFileTmp}
+    echo "input file ${inputList} not found, exiting..." >> ${doneFileTmp}
+    cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
     return 1
   fi
   if [[ "${inputList}" =~ \.root$ ]]; then
@@ -339,15 +373,16 @@ goCPass1()
   outputDir=${targetDirectory}/${jobindex}_${chunkName%.*}
   mkdir -p ${outputDir}
   if [[ ! -d ${outputDir} ]];then
-    touch ${doneFile}
-    echo "cannot make ${outputDir}" >> ${doneFile}
+    touch ${doneFileTmp}
+    echo "cannot make ${outputDir}" >> ${doneFileTmp}
+    cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
     return 1
   fi
   
-  #runpath=${PWD}/rundir_cpass1_${runNumber}_${jobindex}
   runpath=${outputDir}
-  #[[ ${reconstructInTemporaryDir} -eq 1 && -n ${TMPDIR} ]] && runpath=${TMPDIR}
   [[ ${reconstructInTemporaryDir} -eq 1 ]] && runpath=$(mktemp -d -t cpass1.XXXXXX)
+  [[ ${reconstructInTemporaryDir} -eq 2 ]] && runpath=${PWD}/rundir_cpass1_${runNumber}_${jobindex}
 
   #MC
   if [[ "${infile}" =~ galice\.root ]]; then
@@ -358,18 +393,26 @@ goCPass1()
   #init the running path
   mkdir -p ${runpath}
   if [[ ! -d ${runpath} ]]; then
-   touch ${doneFile}
-   echo "cannot make runpath ${runpath}" >> ${doneFile}
+   touch ${doneFileTmp}
+   echo "cannot make runpath ${runpath}" >> ${doneFileTmp}
+   cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
    return 1
  fi
   if ! cd ${runpath}; then
-    touch ${doneFile}
-    echo "PWD=$PWD is not the runpath=${runpath}" >> ${doneFile}
+    touch ${doneFileTmp}
+    echo "PWD=$PWD is not the runpath=${runpath}" >> ${doneFileTmp}
+    cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
     return 1
   fi
 
   #this is needed for runCPass1.sh
-  ln -s ${infile} ${runpath}/${chunkName}
+  if [[ $copyInputData == 0 ]]; then
+    ln -s ${infile} ${runpath}/${chunkName}
+  else
+    copyFileToLocal ${infile} ${runpath}/${chunkName}
+  fi
 
   logOutputDir=${runpath}
   [[ -n ${logToFinalDestination} ]] && logOutputDir=${outputDir}
@@ -380,22 +423,22 @@ goCPass1()
   echo "#####################"
   echo CPass1:
   echo JOB setup
-  echo nEvents            ${nEvents}
-  echo runNumber          ${runNumber}
-  echo ocdbPath           ${ocdbPath}
-  echo infile             ${infile}
-  echo chunkName          ${chunkName}
-  echo jobindex           ${jobindex}
-  echo recoTriggerOptions ${recoTriggerOptions}
-  echo targetDirectory    ${targetDirectory}
-  echo commonOutputPath         ${commonOutputPath}
-  echo doneFile      ${doneFile}
-  echo runpath            ${runpath}  
-  echo outputDir          ${outputDir}
+  echo nEvents               ${nEvents}
+  echo runNumber             ${runNumber}
+  echo ocdbPath              ${ocdbPath}
+  echo infile                ${infile}
+  echo chunkName             ${chunkName}
+  echo jobindex              ${jobindex}
+  echo recoTriggerOptions    ${recoTriggerOptions}
+  echo targetDirectory       ${targetDirectory}
+  echo commonOutputPath      ${commonOutputPath}
+  echo doneFile              ${doneFile}
+  echo runpath               ${runpath}  
+  echo outputDir             ${outputDir}
   echo batchWorkingDirectory ${batchWorkingDirectory}
-  echo ALICE_ROOT         ${ALICE_ROOT}
-  echo PWD                ${PWD}
-  echo "########## ###########"
+  echo ALICE_ROOT            ${ALICE_ROOT}
+  echo PWD                   ${PWD}
+  echo "#####################"
 
   alirootInfo > ALICE_ROOT.log
 
@@ -426,7 +469,9 @@ goCPass1()
   /bin/ls
   echo
 
-  #remove spaces around commas from calls to root
+  #remove spaces from around arguments to root macros
+  #for example this sometimes fails: 
+  #  root 'macro.C(argument1, argument2)'
   sed -i '/.*root .*\.C/ s|\s*,\s*|,|g' *.sh
 
   if [[ -n ${postSetUpActionCPass1} ]]; then
@@ -444,8 +489,10 @@ goCPass1()
   fi
 
   if [[ ! $(/bin/ls -1 OCDB/*/*/*/*.root 2>/dev/null) ]]; then
-    touch ${doneFile}
-    echo "cpass0 produced no calibration! exiting..." >> ${doneFile}
+    touch ${doneFileTmp}
+    echo "cpass0 produced no calibration! exiting..." >> ${doneFileTmp}
+    cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
     return 1
   fi
 
@@ -471,7 +518,7 @@ goCPass1()
 
   #run CPass1
   chmod u+x runCPass1.sh
-  echo "${runpath}/runCPass1.sh ${infile} ${nEvents} ${runNumber} ${ocdbPath} ${recoTriggerOptions}"
+  echo "${runpath}/runCPass1.sh /${infile} ${nEvents} ${runNumber} ${ocdbPath} ${recoTriggerOptions}"
   if [[ -n ${pretend} ]]; then
     sleep ${pretendDelay}
     touch AliESDs_Barrel.root
@@ -489,7 +536,8 @@ goCPass1()
     touch qa.log
     touch filtering.log FilterEvents_Trees.root
   else
-    ./runCPass1.sh "${infile}" "${nEvents}" "${runNumber}" "${ocdbPath}" "${recoTriggerOptions}"
+    #caveat: in the local case, first arg must start with a slash
+    ./runCPass1.sh "/${infile}" "${nEvents}" "${runNumber}" "${ocdbPath}" "${recoTriggerOptions}"
     
     [[ ! -f AliESDs_Barrel.root && -f Barrel/AliESDs.root ]] && mv Barrel/AliESDs.root AliESDs_Barrel.root
     [[ ! -f AliESDfriends_Barrel.root && -f Barrel/AliESDfriends.root ]] && mv Barrel/AliESDfriends.root AliESDfriends_Barrel.root
@@ -537,37 +585,38 @@ goCPass1()
   /bin/ls
   echo rm -f ./${chunkName}
   rm -f ./${chunkName}
-  echo "cp -R ${runpath}/* ${outputDir}"
-  cp -pf -R ${runpath}/* ${outputDir}
+  echo "paranoidCp ${runpath}/* ${outputDir}"
+  paranoidCp ${runpath}/* ${outputDir}
   echo
 
   #validate CPass1
   cd ${outputDir}
-  touch ${doneFile}
-  echo "dir ${outputDir}" >> ${doneFile}
-  if summarizeLogs >> ${doneFile}; then
-    [[ -f AliESDs_Barrel.root ]] && echo "esd ${outputDir}/AliESDs_Barrel.root" >> ${doneFile}
-    [[ -f AliESDfriends_v1.root ]] && echo "calibfile ${outputDir}/AliESDfriends_v1.root" >> ${doneFile}
-    [[ -f QAresults_Barrel.root ]] && echo "qafile ${outputDir}/QAresults_Barrel.root" >> ${doneFile}
-    [[ -f QAresults_Outer.root ]] && echo "qafile ${outputDir}/QAresults_Outer.root" >> ${doneFile}
-    [[ -f QAresults_barrel.root ]] && echo "qafile ${outputDir}/QAresults_barrel.root" >> ${doneFile}
-    [[ -f QAresults_outer.root ]] && echo "qafile ${outputDir}/QAresults_outer.root" >> ${doneFile}
-    [[ -f FilterEvents_Trees.root ]] && echo "filteredTree ${outputDir}/FilterEvents_Trees.root" >> ${doneFile}
+  if summarizeLogs >> ${doneFileTmp}; then
+    [[ -f AliESDs_Barrel.root ]] && echo "esd ${outputDir}/AliESDs_Barrel.root" >> ${doneFileTmp}
+    [[ -f AliESDfriends_v1.root ]] && echo "calibfile ${outputDir}/AliESDfriends_v1.root" >> ${doneFileTmp}
+    [[ -f QAresults_Barrel.root ]] && echo "qafile ${outputDir}/QAresults_Barrel.root" >> ${doneFileTmp}
+    [[ -f QAresults_Outer.root ]] && echo "qafile ${outputDir}/QAresults_Outer.root" >> ${doneFileTmp}
+    [[ -f QAresults_barrel.root ]] && echo "qafile ${outputDir}/QAresults_barrel.root" >> ${doneFileTmp}
+    [[ -f QAresults_outer.root ]] && echo "qafile ${outputDir}/QAresults_outer.root" >> ${doneFileTmp}
+    [[ -f FilterEvents_Trees.root ]] && echo "filteredTree ${outputDir}/FilterEvents_Trees.root" >> ${doneFileTmp}
   else
-    if grep "qa_outer.log.*OK" ${doneFile} > /dev/null; then
-      [[ -f QAresults_Outer.root ]] && echo "qafile ${outputDir}/QAresults_Outer.root" >> ${doneFile}
-      [[ -f QAresults_outer.root ]] && echo "qafile ${outputDir}/QAresults_outer.root" >> ${doneFile}
+    if grep "qa_outer.log.*OK" ${doneFileTmp} > /dev/null; then
+      [[ -f QAresults_Outer.root ]] && echo "qafile ${outputDir}/QAresults_Outer.root" >> ${doneFileTmp}
+      [[ -f QAresults_outer.root ]] && echo "qafile ${outputDir}/QAresults_outer.root" >> ${doneFileTmp}
     fi
-    if grep "qa_barrel.log.*OK" ${doneFile} > /dev/null; then
-      [[ -f QAresults_Barrel.root ]] && echo "qafile ${outputDir}/QAresults_Barrel.root" >> ${doneFile}
-      [[ -f QAresults_barrel.root ]] && echo "qafile ${outputDir}/QAresults_barrel.root" >> ${doneFile}
+    if grep "qa_barrel.log.*OK" ${doneFileTmp} > /dev/null; then
+      [[ -f QAresults_Barrel.root ]] && echo "qafile ${outputDir}/QAresults_Barrel.root" >> ${doneFileTmp}
+      [[ -f QAresults_barrel.root ]] && echo "qafile ${outputDir}/QAresults_barrel.root" >> ${doneFileTmp}
     fi
-    if grep "filtering.log.*OK" ${doneFile} > /dev/null; then
-      [[ -f FilterEvents_Trees.root ]] && echo "filteredTree ${outputDir}/FilterEvents_Trees.root" >> ${doneFile}
+    if grep "filtering.log.*OK" ${doneFileTmp} > /dev/null; then
+      [[ -f FilterEvents_Trees.root ]] && echo "filteredTree ${outputDir}/FilterEvents_Trees.root" >> ${doneFileTmp}
     fi
   fi
 
   [[ "${runpath}" != "${outputDir}" ]] && rm -rf ${runpath}
+  cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+  [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
+  echo End: goCPass1
   return 0
 )
 
@@ -585,32 +634,43 @@ goMergeCPass0()
   calibrationFilesToMerge=${5}  #can be a non-existent file, will then be produced on the fly
   shift 5
   if ! parseConfig ${configFile} "$@"; then return 1; fi
+  echo Start: goMergeCPass0
 
   #record the working directory provided by the batch system
   batchWorkingDirectory=${PWD}
 
   [[ -z ${commonOutputPath} ]] && commonOutputPath=${PWD}
-  doneFile="${commonOutputPath}/meta/merge.cpass0.run${runNumber}.done"
+
+  # This file signals that everything went fine
+  doneFileBase="merge.cpass0.run${runNumber}.done"
+
+  # We will have two copies of the file
+  mkdir -p "${commonOutputPath}/meta" || return 1
+  doneFileTmp="${batchWorkingDirectory}/${doneFileBase}"
+  doneFile="${commonOutputPath}/meta/${doneFileBase}"
 
   umask 0002
   ulimit -c unlimited 
 
   [[ -f ${alirootSource} && -z ${ALICE_ROOT} ]] && source ${alirootSource}
 
-  #runpath=${PWD}/rundir_cpass0_Merge_${runNumber}
   runpath=${outputDir}
-  #[[ ${reconstructInTemporaryDir} -eq 1 && -n ${TMPDIR} ]] && runpath=${TMPDIR}
   [[ ${reconstructInTemporaryDir} -eq 1 ]] && runpath=$(mktemp -d -t mergeCPass0.XXXXXX)
+  [[ ${reconstructInTemporaryDir} -eq 2 ]] && runpath=${PWD}/rundir_mergeCPass0_${runNumber}
 
   mkdir -p ${runpath}
   if [[ ! -d ${runpath} ]]; then
-    touch ${doneFile}
-    echo "not able to make the runpath ${runpath}" >> ${doneFile}
+    touch ${doneFileTmp}
+    echo "not able to make the runpath ${runpath}" >> ${doneFileTmp}
+    cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
     return 1
   fi
   if ! cd ${runpath}; then 
-    touch ${doneFile}
-    echo "PWD=$PWD is not the runpath=${runpath}" >> ${doneFile}
+    touch ${doneFileTmp}
+    echo "PWD=$PWD is not the runpath=${runpath}" >> ${doneFileTmp}
+    cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
     return 1
   fi
 
@@ -646,6 +706,9 @@ goMergeCPass0()
     [[ ${file##*/} =~ .*\.sh ]] && chmod +x ${file##*/}
   done
   
+  #remove spaces from around arguments to root macros
+  #for example this sometimes fails: 
+  #  root 'macro.C(argument1, argument2)'
   sed -i '/.*root .*\.C/ s|\s*,\s*|,|g' *.sh
 
   alirootInfo > ALICE_ROOT.log
@@ -665,7 +728,7 @@ goMergeCPass0()
     /bin/ls -1 ${outputDir}/*/AliESDfriends_v1.root 2>/dev/null > ${calibrationFilesToMerge}
   fi
   
-  echo "${mergingScript} ${calibrationFilesToMerge} ${runNumber} local://./OCDB ${ocdbStorage}"
+  echo "${mergingScript} ${calibrationFilesToMerge} ${runNumber} local://./OCDB defaultOCDB=${ocdbStorage} fileAccessMethod=nocopy"
   if [[ -n ${pretend} ]]; then
     sleep ${pretendDelay}
     touch CalibObjects.root
@@ -677,7 +740,7 @@ goMergeCPass0()
     echo "some calibration" >> ./OCDB/TPC/Calib/TimeGain/someCalibObject_0-999999_cpass0.root
     echo "some calibration" >> ./OCDB/TPC/Calib/TimeDrift/otherCalibObject_0-999999_cpass0.root
   else
-    ./${mergingScript} ${calibrationFilesToMerge} ${runNumber} "local://./OCDB" ${ocdbStorage} >> "mergeMakeOCDB.log"
+    ./${mergingScript} ${calibrationFilesToMerge} ${runNumber} "local://./OCDB" defaultOCDB=${ocdbStorage} fileAccessMethod=nocopy >> "mergeMakeOCDB.log"
 
     #produce the calib trees for expert QA (dcsTime.root)
     goMakeLocalOCDBaccessConfig ./OCDB
@@ -687,12 +750,20 @@ goMergeCPass0()
   
   ### produce the output
   #tar the produced OCDB for reuse
-  tar czf ${commonOutputPath}/meta/cpass0.localOCDB.${runNumber}.tgz ./OCDB
+  #tar czf ${commonOutputPath}/meta/cpass0.localOCDB.${runNumber}.tgz ./OCDB
+
+  # Create tarball with OCDB, store on the shared directory, create signal file on batch directory
+  mkdir -p ${commonOutputPath}/meta
+  baseTar="cpass0.localOCDB.${runNumber}.tgz"
+  tar czf ${batchWorkingDirectory}/${baseTar} ./OCDB && \
+    mv ${batchWorkingDirectory}/${baseTar} ${commonOutputPath}/meta/${baseTar} && \
+    touch ${batchWorkingDirectory}/${baseTar}.done
 
   /bin/ls
 
   #copy all to output dir
-  cp -pf -R ${runpath}/* ${outputDir}
+  echo "paranoidCp ${runpath}/* ${outputDir}"
+  paranoidCp ${runpath}/* ${outputDir}
 
   if [[ -n ${generateMC} ]]; then
     goPrintValues sim ${commonOutputPath}/meta/sim.run${runNumber}.list ${commonOutputPath}/meta/cpass0.job*.run${runNumber}.done
@@ -700,14 +771,15 @@ goMergeCPass0()
 
   #validate merging cpass0
   cd ${outputDir}
-  touch ${doneFile}
-  echo "dir ${outputDir}" >> ${doneFile}
-  if summarizeLogs >> ${doneFile}; then
-    [[ -f CalibObjects.root ]] && echo "calibfile ${outputDir}/CalibObjects.root" >> ${doneFile}
-    [[ -f dcsTime.root ]] && echo "dcsTree ${outputDir}/dcsTime.root" >> ${doneFile}
+  if summarizeLogs >> ${doneFileTmp}; then
+    [[ -f CalibObjects.root ]] && echo "calibfile ${outputDir}/CalibObjects.root" >> ${doneFileTmp}
+    [[ -f dcsTime.root ]] && echo "dcsTree ${outputDir}/dcsTime.root" >> ${doneFileTmp}
   fi
 
   [[ "${runpath}" != "${outputDir}" ]] && rm -rf ${runpath}
+  cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+  [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
+  echo End: goMergeCPass0
   return 0
 )
 
@@ -726,12 +798,20 @@ goMergeCPass1()
   filteredFilesToMerge=${7}
   shift 7
   if ! parseConfig ${configFile} "$@"; then return 1; fi
+  echo Start: goMergeCPass1
 
   #record the working directory provided by the batch system
   batchWorkingDirectory=${PWD}
 
   [[ -z ${commonOutputPath} ]] && commonOutputPath=${PWD}
-  doneFile="${commonOutputPath}/meta/merge.cpass1.run${runNumber}.done"
+
+  # This file signals that everything went fine
+  doneFileBase="merge.cpass1.run${runNumber}.done"
+
+  # We will have two copies of the file
+  mkdir -p "${commonOutputPath}/meta" || return 1
+  doneFileTmp="${batchWorkingDirectory}/${doneFileBase}"
+  doneFile="${commonOutputPath}/meta/${doneFileBase}"
 
   umask 0002
   ulimit -c unlimited 
@@ -743,20 +823,23 @@ goMergeCPass1()
 
   [[ -f ${alirootSource} && -z ${ALICE_ROOT} ]] && source ${alirootSource}
 
-  #runpath=${PWD}/rundir_cpass1_Merge_${runNumber}
   runpath=${outputDir}
-  #[[ ${reconstructInTemporaryDir} -eq 1 && -n ${TMPDIR} ]] && runpath=${TMPDIR}
   [[ ${reconstructInTemporaryDir} -eq 1 ]] && runpath=$(mktemp -d -t mergeCPass1.XXXXXX)
+  [[ ${reconstructInTemporaryDir} -eq 2 ]] && runpath=${PWD}/rundir_mergeCPass1_${runNumber}
 
   mkdir -p ${runpath}
   if [[ ! -d ${runpath} ]]; then
-    touch ${doneFile}
-    echo "not able to make the runpath ${runpath}" >> ${doneFile}
+    touch ${doneFileTmp}
+    echo "not able to make the runpath ${runpath}" >> ${doneFileTmp}
+    cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
     return 1
   fi
   if ! cd ${runpath}; then 
-    touch ${doneFile}
-    echo "PWD=$PWD is not the runpath=${runpath}" >> ${doneFile}
+    touch ${doneFileTmp}
+    echo "PWD=$PWD is not the runpath=${runpath}" >> ${doneFileTmp}
+    cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+    [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
     return 1
   fi
 
@@ -802,6 +885,9 @@ goMergeCPass1()
     [[ ${file##*/} =~ .*\.sh ]] && chmod +x ${file##*/}
   done
 
+  #remove spaces from around arguments to root macros
+  #for example this sometimes fails: 
+  #  root 'macro.C(argument1, argument2)'
   sed -i '/.*root .*\.C/ s|\s*,\s*|,|g' *.sh
 
   #configure local OCDB storage from CPass0 (creates the localOCDBaccessConfig.C script)
@@ -835,8 +921,8 @@ goMergeCPass1()
     echo "/bin/ls -1 ${outputDir}/*/QAresults*.root | while read x; do echo ${x%/*}; done | sort | uniq > ${qaFilesToMerge}"
     /bin/ls -1 ${outputDir}/*/QAresults*.root | while read x; do echo ${x%/*}; done | sort | uniq > ${qaFilesToMerge}
   fi
-  
-  echo "${mergingScript} ${calibrationFilesToMerge} ${runNumber} local://./OCDB ${ocdbStorage}"
+
+  echo "${mergingScript} ${calibrationFilesToMerge} ${runNumber} local://./OCDB defaultOCDB=${ocdbStorage} fileAccessMethod=nocopy"
   if [[ -n ${pretend} ]]; then
     sleep ${pretendDelay}
     touch ocdb.log
@@ -850,7 +936,7 @@ goMergeCPass1()
     touch ${qaMergedOutputFileName}
     mkdir -p OCDB
   else
-    ./${mergingScript} ${calibrationFilesToMerge} ${runNumber} "local://./OCDB" ${ocdbStorage}
+    ./${mergingScript} ${calibrationFilesToMerge} ${runNumber} "local://./OCDB" defaultOCDB=${ocdbStorage} fileAccessMethod=nocopy
 
     #merge QA (and filtered trees)
     [[ -n ${AliAnalysisTaskFilteredTree_fLowPtTrackDownscaligF} ]] && export AliAnalysisTaskFilteredTree_fLowPtTrackDownscaligF
@@ -872,33 +958,40 @@ goMergeCPass1()
     aliroot -b -q "${ALICE_ROOT}/PWGPP/TPC/macros/CalibSummary.C(${runNumber},\"${ocdbStorage}\")" > calibTree.log
   fi
 
-  tar czf ${commonOutputPath}/meta/cpass1.localOCDB.${runNumber}.tgz ./OCDB
+  # Create tarball with OCDB, store on the shared directory, create signal file on batch directory
+  mkdir -p ${commonOutputPath}/meta
+  baseTar="cpass1.localOCDB.${runNumber}.tgz"
+  tar czf ${batchWorkingDirectory}/${baseTar} ./OCDB && \
+    mv ${batchWorkingDirectory}/${baseTar} ${commonOutputPath}/meta/${baseTar} && \
+    touch ${batchWorkingDirectory}/${baseTar}.done
 
   /bin/ls
 
   #copy all to output dir
-  cp -pf -R ${runpath}/* ${outputDir}
+  echo "paranoidCp ${runpath}/* ${outputDir}"
+  paranoidCp ${runpath}/* ${outputDir}
   
   #validate merge cpass1
   cd ${outputDir}
-  touch ${doneFile}
-  echo "dir ${outputDir}" >> ${doneFile}
-  if summarizeLogs >>  ${doneFile}; then
-    [[ -f CalibObjects.root ]] && echo "calibfile ${outputDir}/CalibObjects.root" >> ${doneFile}
-    [[ -f ${qaMergedOutputFileName} ]] && echo "qafile ${outputDir}/${qaMergedOutputFileName}" >> ${doneFile}
-    [[ -f trending.root ]] && echo "trendingfile ${outputDir}/trending.root" >> ${doneFile}
-    [[ -f dcsTime.root ]] && echo "dcsTree ${outputDir}/dcsTime.root" >> ${doneFile}
-    [[ -f FilterEvents_Trees.root ]] && echo "filteredTree ${outputDir}/FilterEvents_Trees.root" >> ${doneFile}
+  if summarizeLogs >>  ${doneFileTmp}; then
+    [[ -f CalibObjects.root ]] && echo "calibfile ${outputDir}/CalibObjects.root" >> ${doneFileTmp}
+    [[ -f ${qaMergedOutputFileName} ]] && echo "qafile ${outputDir}/${qaMergedOutputFileName}" >> ${doneFileTmp}
+    [[ -f trending.root ]] && echo "trendingfile ${outputDir}/trending.root" >> ${doneFileTmp}
+    [[ -f dcsTime.root ]] && echo "dcsTree ${outputDir}/dcsTime.root" >> ${doneFileTmp}
+    [[ -f FilterEvents_Trees.root ]] && echo "filteredTree ${outputDir}/FilterEvents_Trees.root" >> ${doneFileTmp}
   else
-    if grep "mergeQA.log.*OK" ${doneFile} > /dev/null; then
-      [[ -f ${qaMergedOutputFileName} ]] && echo "qafile ${outputDir}/${qaMergedOutputFileName}" >> ${doneFile}
+    if grep "mergeQA.log.*OK" ${doneFileTmp} > /dev/null; then
+      [[ -f ${qaMergedOutputFileName} ]] && echo "qafile ${outputDir}/${qaMergedOutputFileName}" >> ${doneFileTmp}
     fi
-    if grep "mergeFilteredTrees.log.*OK" ${doneFile} > /dev/null; then
-      [[ -f FilterEvents_Trees.root ]] && echo "filteredTree ${outputDir}/FilterEvents_Trees.root" >> ${doneFile}
+    if grep "mergeFilteredTrees.log.*OK" ${doneFileTmp} > /dev/null; then
+      [[ -f FilterEvents_Trees.root ]] && echo "filteredTree ${outputDir}/FilterEvents_Trees.root" >> ${doneFileTmp}
     fi
   fi
       
   [[ "${runpath}" != "${outputDir}" ]] && rm -rf ${runpath}
+  cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+  [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
+  echo End: goMergeCPass1
   return 0
 )
 
@@ -959,9 +1052,9 @@ goSubmitMakeflow()
   #if which greadlink; then self=$(greadlink -f "${0}"); fi
   
   #for reference copy the setup to the output dir
-  cp ${self} ${commonOutputPath}
-  cp ${configFile} ${commonOutputPath}
-  cp ${inputFileList} ${commonOutputPath}
+  paranoidCp ${self} ${commonOutputPath}
+  paranoidCp ${configFile} ${commonOutputPath}
+  paranoidCp ${inputFileList} ${commonOutputPath}
 
   #submit - use makeflow if available, fall back to old stuff when makeflow not there
   if which makeflow; then
@@ -971,6 +1064,15 @@ goSubmitMakeflow()
   else 
     echo "no makeflow!"
   fi
+  
+  #summarize the run based on the makeflow log
+  #and add it to the end of summary log
+  awk '/STARTED/   {startTime=$3} 
+       /COMPLETED/ {endTime=$3} 
+       END         {print "makeflow running time: "(endTime-startTime)/1000000/3600" hours"}' \
+      benchmark.makeflow.makeflowlog | tee -a summary.log
+  paranoidCp summary.log ${commonOutputPath}
+
   return 0
 }
 
@@ -1016,6 +1118,7 @@ goGenerateMakeflow()
               "runCalibTrain.C"
               "runCPass0.sh"
               "recCPass0.C"
+              "runQA.sh"
   )
   for file in ${inputFiles[*]}; do
     [[ -f ${file} ]] && copyFiles+=("${file}")
@@ -1039,6 +1142,10 @@ goGenerateMakeflow()
     declare -a arr_cpass0_outputs
     declare -a arr_cpass1_outputs
 
+    #Header
+    echo "### Automatically generated on $(LANG=C date) ###"
+    echo ; echo
+
     jobindex=0
     inputFile=""
     while read inputFile; do
@@ -1047,56 +1154,87 @@ goGenerateMakeflow()
       if [[ ${autoOCDB} -ne 0 ]]; then
         currentDefaultOCDB=$(setYear ${inputFile} ${defaultOCDB})
       fi
+      guessRunData ${inputFile}
+
+      #Set variables
+      echo "### Variables ###"
+      echo "OUTPATH=\"${commonOutputPath}/${year}/${period}\""
+      echo ; echo
 
       #CPass0
-      arr_cpass0_outputs[${jobindex}]="${commonOutputPath}/meta/cpass0.job${jobindex}.run${runNumber}.done"
-      echo "${arr_cpass0_outputs[${jobindex}]} : benchmark.sh ${configFile} ${copyFiles[@]}"
-      echo " ${alirootEnv} ./benchmark.sh CPass0 ${commonOutputPath}/000${runNumber}/cpass0 ${inputFile} ${nEvents} ${currentDefaultOCDB} ${configFile} ${runNumber} ${jobindex} ${extraOpts[@]}"" "
+      #arr_cpass0_outputs[${jobindex}]="${commonOutputPath}/meta/cpass0.job${jobindex}.run${runNumber}.done"
+      arr_cpass0_outputs[${jobindex}]="cpass0.job${jobindex}.run${runNumber}.done"
+      echo "### CPass0 ###"
+      echo "${arr_cpass0_outputs[${jobindex}]}: benchmark.sh ${configFile} ${copyFiles[@]}"
+      echo "    ${alirootEnv} ./benchmark.sh CPass0 \$OUTPATH/000${runNumber}/cpass0 ${inputFile} ${nEvents} ${currentDefaultOCDB} ${configFile} ${runNumber} ${jobindex} ${extraOpts[@]}"" "
+      echo ; echo
 
       #CPass1
-      arr_cpass1_outputs[${jobindex}]="${commonOutputPath}/meta/cpass1.job${jobindex}.run${runNumber}.done"
-      echo "${arr_cpass1_outputs[${jobindex}]} : benchmark.sh ${configFile} ${commonOutputPath}/meta/cpass0.localOCDB.${runNumber}.tgz ${copyFiles[@]}"
-      echo " ${alirootEnv} ./benchmark.sh CPass1 ${commonOutputPath}/000${runNumber}/cpass1 ${inputFile} ${nEvents} ${currentDefaultOCDB} ${configFile} ${runNumber} ${jobindex} ${extraOpts[@]}"" "
+      #arr_cpass1_outputs[${jobindex}]="${commonOutputPath}/meta/cpass1.job${jobindex}.run${runNumber}.done"
+      arr_cpass1_outputs[${jobindex}]="cpass1.job${jobindex}.run${runNumber}.done"
+      echo "### CPass1 ###"
+      echo "${arr_cpass1_outputs[${jobindex}]}: benchmark.sh ${configFile} cpass0.localOCDB.${runNumber}.tgz.done ${copyFiles[@]}"
+      echo "    ${alirootEnv} ./benchmark.sh CPass1 \$OUTPATH/000${runNumber}/cpass1 ${inputFile} ${nEvents} ${currentDefaultOCDB} ${configFile} ${runNumber} ${jobindex} ${extraOpts[@]}"" "
+      echo ; echo
       ((jobindex++))
 
     done< <(grep "/000${runNumber}/" ${inputFileList})
     
     #CPass0 list of Calib files to merge
-    arr_cpass0_calib_list[${runNumber}]="${commonOutputPath}/meta/cpass0.calib.run${runNumber}.list"
-    echo "${arr_cpass0_calib_list[${runNumber}]} : benchmark.sh ${arr_cpass0_outputs[*]}"
-    echo "  ./benchmark.sh PrintValues calibfile ${arr_cpass0_calib_list[${runNumber}]} ${arr_cpass0_outputs[*]}"
-    echo
+    #arr_cpass0_calib_list[${runNumber}]="${commonOutputPath}/meta/cpass0.calib.run${runNumber}.list"
+    arr_cpass0_calib_list[${runNumber}]="cpass0.calib.run${runNumber}.list"
+    echo "### Produces the list of CPass0 files to merge (executes locally) ###"
+    echo "${arr_cpass0_calib_list[${runNumber}]}: benchmark.sh ${arr_cpass0_outputs[*]}"
+    echo "    LOCAL ./benchmark.sh PrintValues calibfile ${arr_cpass0_calib_list[${runNumber}]} ${arr_cpass0_outputs[*]} && mkdir -p \$OUTPATH/meta && cp ${arr_cpass0_calib_list[${runNumber}]} \$OUTPATH/meta/${arr_cpass0_calib_list[${runNumber}]}"
+    echo ; echo
 
     #CPass0 merging
-    arr_cpass0_merged[${runNumber}]="${commonOutputPath}/meta/merge.cpass0.run${runNumber}.done"
-    echo "${commonOutputPath}/meta/cpass0.localOCDB.${runNumber}.tgz ${arr_cpass0_merged[${runNumber}]} : benchmark.sh ${configFile} ${arr_cpass0_calib_list[${runNumber}]} ${copyFiles[@]}"
-    echo " ${alirootEnv} ./benchmark.sh MergeCPass0 ${commonOutputPath}/000${runNumber}/cpass0 ${currentDefaultOCDB} ${configFile} ${runNumber} ${arr_cpass0_calib_list[${runNumber}]} ${extraOpts[@]}"" "
+    echo "### Merges CPass0 files ###"
+    #arr_cpass0_merged[${runNumber}]="${commonOutputPath}/meta/merge.cpass0.run${runNumber}.done"
+    arr_cpass0_merged[${runNumber}]="merge.cpass0.run${runNumber}.done"
+    echo "cpass0.localOCDB.${runNumber}.tgz.done ${arr_cpass0_merged[${runNumber}]}: benchmark.sh ${configFile} ${arr_cpass0_calib_list[${runNumber}]} ${copyFiles[@]}"
+    echo "    ${alirootEnv} ./benchmark.sh MergeCPass0 \$OUTPATH/000${runNumber}/cpass0 ${currentDefaultOCDB} ${configFile} ${runNumber} ${arr_cpass0_calib_list[${runNumber}]} ${extraOpts[@]}"" "
+    echo ; echo
 
     #CPass1 list of Calib/QA/ESD/filtered files
     # the trick with QA is to have the string "Stage.txt" in the file name of the list of directories with QA output to trigger
     # the production of the QA trending tree (only then the task->Finish() will be called in QAtrain_duo.C, on the grid
     # this corresponds to the last merging stage)
-    arr_cpass1_QA_list[${runNumber}]="${commonOutputPath}/meta/cpass1.QA.run${runNumber}.lastMergingStage.txt.list"
+    #arr_cpass1_QA_list[${runNumber}]="${commonOutputPath}/meta/cpass1.QA.run${runNumber}.lastMergingStage.txt.list"
+    arr_cpass1_QA_list[${runNumber}]="cpass1.QA.run${runNumber}.lastMergingStage.txt.list"
+    echo "### Lists CPass1 QA ###"
     echo "${arr_cpass1_QA_list[${runNumber}]}: benchmark.sh ${arr_cpass1_outputs[*]}"
-    echo "  ./benchmark.sh PrintValues dir ${arr_cpass1_QA_list[${runNumber}]} ${arr_cpass1_outputs[*]}"
-    echo
-    arr_cpass1_calib_list[${runNumber}]="${commonOutputPath}/meta/cpass1.calib.run${runNumber}.list"
-    echo "${arr_cpass1_calib_list[${runNumber}]} : benchmark.sh ${arr_cpass1_outputs[*]}"
-    echo "  ./benchmark.sh PrintValues calibfile ${arr_cpass1_calib_list[${runNumber}]} ${arr_cpass1_outputs[*]};"
-    echo
-    arr_cpass1_ESD_list[${runNumber}]="${commonOutputPath}/meta/cpass1.ESD.run${runNumber}.list"
-    echo "${arr_cpass1_ESD_list[${runNumber}]} : benchmark.sh ${arr_cpass1_outputs[*]}"
-    echo "  ./benchmark.sh PrintValues esd ${arr_cpass1_ESD_list[${runNumber}]} ${arr_cpass1_outputs[*]}"
-    echo
-    arr_cpass1_filtered_list[${runNumber}]="${commonOutputPath}/meta/cpass1.filtered.run${runNumber}.list"
-    echo "${arr_cpass1_filtered_list[${runNumber}]} : benchmark.sh ${arr_cpass1_outputs[*]}"
-    echo "  ./benchmark.sh PrintValues filteredTree ${arr_cpass1_filtered_list[${runNumber}]} ${arr_cpass1_outputs[*]}"
-    echo
+    echo "    LOCAL ./benchmark.sh PrintValues dir ${arr_cpass1_QA_list[${runNumber}]} ${arr_cpass1_outputs[*]} && mkdir -p \$OUTPATH/meta && cp ${arr_cpass1_QA_list[${runNumber}]} \$OUTPATH/meta/${arr_cpass1_QA_list[${runNumber}]}"
+    echo ; echo
+
+    #arr_cpass1_calib_list[${runNumber}]="${commonOutputPath}/meta/cpass1.calib.run${runNumber}.list"
+    arr_cpass1_calib_list[${runNumber}]="cpass1.calib.run${runNumber}.list"
+    echo "### Lists CPass1 Calib ###"
+    echo "${arr_cpass1_calib_list[${runNumber}]}: benchmark.sh ${arr_cpass1_outputs[*]}"
+    echo "    LOCAL ./benchmark.sh PrintValues calibfile ${arr_cpass1_calib_list[${runNumber}]} ${arr_cpass1_outputs[*]} && mkdir -p \$OUTPATH/meta && cp ${arr_cpass1_calib_list[${runNumber}]} \$OUTPATH/meta/${arr_cpass1_calib_list[${runNumber}]}"
+    echo ; echo
+
+    #arr_cpass1_ESD_list[${runNumber}]="${commonOutputPath}/meta/cpass1.ESD.run${runNumber}.list"
+    arr_cpass1_ESD_list[${runNumber}]="cpass1.ESD.run${runNumber}.list"
+    echo "### Lists CPass1 ESDs ###"
+    echo "${arr_cpass1_ESD_list[${runNumber}]}: benchmark.sh ${arr_cpass1_outputs[*]}"
+    echo "    LOCAL ./benchmark.sh PrintValues esd ${arr_cpass1_ESD_list[${runNumber}]} ${arr_cpass1_outputs[*]} && mkdir -p \$OUTPATH/meta && cp ${arr_cpass1_ESD_list[${runNumber}]} \$OUTPATH/meta/${arr_cpass1_ESD_list[${runNumber}]}"
+    echo ; echo
+
+    #arr_cpass1_filtered_list[${runNumber}]="${commonOutputPath}/meta/cpass1.filtered.run${runNumber}.list"
+    arr_cpass1_filtered_list[${runNumber}]="cpass1.filtered.run${runNumber}.list"
+    echo "### Lists CPass1 filtered ###"
+    echo "${arr_cpass1_filtered_list[${runNumber}]}: benchmark.sh ${arr_cpass1_outputs[*]}"
+    echo "    LOCAL ./benchmark.sh PrintValues filteredTree ${arr_cpass1_filtered_list[${runNumber}]} ${arr_cpass1_outputs[*]} && mkdir -p \$OUTPATH/meta && cp ${arr_cpass1_filtered_list[${runNumber}]} \$OUTPATH/meta/${arr_cpass1_filtered_list[${runNumber}]}"
+    echo ; echo
   
     #CPass1 merging
-    arr_cpass1_merged[${runNumber}]="${commonOutputPath}/meta/merge.cpass1.run${runNumber}.done"
-    echo "${commonOutputPath}/meta/cpass1.localOCDB.${runNumber}.tgz ${arr_cpass1_merged[${runNumber}]} :  benchmark.sh ${configFile} ${arr_cpass1_calib_list[${runNumber}]} ${arr_cpass1_QA_list[${runNumber}]} ${copyFiles[@]}"
-    echo " ${alirootEnv} ./benchmark.sh MergeCPass1 ${commonOutputPath}/000${runNumber}/cpass1 ${currentDefaultOCDB} ${configFile} ${runNumber} ${arr_cpass1_calib_list[${runNumber}]} ${arr_cpass1_QA_list[${runNumber}]} ${arr_cpass1_filtered_list[${runNumber}]} ${extraOpts[@]}"" "
+    #arr_cpass1_merged[${runNumber}]="${commonOutputPath}/meta/merge.cpass1.run${runNumber}.done"
+    arr_cpass1_merged[${runNumber}]="merge.cpass1.run${runNumber}.done"
+    echo "### Merges CPass1 files ###"
+    echo "cpass1.localOCDB.${runNumber}.tgz.done ${arr_cpass1_merged[${runNumber}]}: benchmark.sh ${configFile} ${arr_cpass1_calib_list[${runNumber}]} ${arr_cpass1_QA_list[${runNumber}]} ${copyFiles[@]}"
+    echo "    ${alirootEnv} ./benchmark.sh MergeCPass1 \$OUTPATH/000${runNumber}/cpass1 ${currentDefaultOCDB} ${configFile} ${runNumber} ${arr_cpass1_calib_list[${runNumber}]} ${arr_cpass1_QA_list[${runNumber}]} ${arr_cpass1_filtered_list[${runNumber}]} ${extraOpts[@]}"" "
+    echo ; echo
 
     #CPass0 wrapped in a profiling tool (valgrind,....)
     if [[ -n ${profilingCommand} ]]; then
@@ -1105,18 +1243,22 @@ goGenerateMakeflow()
       currentDefaultOCDB=$(setYear ${inputFile} ${defaultOCDB})
       jobindex="profiling"
 
-      arr_cpass0_profiled_outputs[${runNumber}]="${commonOutputPath}/meta/profiling.cpass0.job${jobindex}.run${runNumber}.done"
-      echo "${arr_cpass0_profiled_outputs[${runNumber}]} : benchmark.sh ${configFile} ${copyFiles[@]}"
+      #arr_cpass0_profiled_outputs[${runNumber}]="${commonOutputPath}/meta/profiling.cpass0.job${jobindex}.run${runNumber}.done"
+      arr_cpass0_profiled_outputs[${runNumber}]="profiling.cpass0.job${jobindex}.run${runNumber}.done"
+      echo "### CPass0 in a profiler ###"
+      echo "${arr_cpass0_profiled_outputs[${runNumber}]}: benchmark.sh ${configFile} ${copyFiles[@]}"
       profilingCommand=$(encSpaces "${profilingCommand}")
-      echo " ${alirootEnv} ./benchmark.sh CPass0 ${commonOutputPath}/000${runNumber}/${jobindex} ${inputFile} ${nEventsProfiling} ${currentDefaultOCDB} ${configFile} ${runNumber} ${jobindex} ${extraOpts[@]} useProfilingCommand=${profilingCommand}"
+      echo "    ${alirootEnv} ./benchmark.sh CPass0 \$OUTPATH/000${runNumber}/${jobindex} ${inputFile} ${nEventsProfiling} ${currentDefaultOCDB} ${configFile} ${runNumber} ${jobindex} ${extraOpts[@]} useProfilingCommand=${profilingCommand}"
+      echo ; echo
     fi
 
   done #runs
 
   #Summary
-  echo "${commonOutputPath}/summary.log : benchmark.sh ${configFile} ${arr_cpass1_merged[*]}"
-  echo "  ${alirootEnv} ./benchmark.sh MakeSummary ${configFile} ${extraOpts[@]}"
-  echo
+  echo "### Summary ###"
+  echo "summary.log: benchmark.sh ${configFile} ${arr_cpass1_merged[*]}"
+  echo "     ${alirootEnv} ./benchmark.sh MakeSummary ${configFile} ${extraOpts[@]}"
+  echo ; echo
 
   return 0
 )
@@ -1126,7 +1268,7 @@ goPrintValues()
   #print the values given the key from any number of files (store in output file on second argument)
   if [[ $# -lt 3 ]]; then
     echo "goPrintValues key outputFile inputFiles"
-    echo "if outputFile is \"-\" dont redirect to a file"
+    echo "if outputFile is \"-\" don't redirect to a file"
     return
   fi
   key=${1}
@@ -1161,11 +1303,17 @@ goCreateQAplots()
   cd ${outputDir}
   [[ ! "${PWD}" =~ "${outputDir}" ]] && echo "PWD is not equal to outputDir=${outputDir}" && cd ${olddir} && return 1
 
-  if [[ -z ${pretend} ]]; then
-    ${ALICE_ROOT}/PWGPP/QA/scripts/runQA.sh inputList="${mergedQAfileList}" inputListHighPtTrees="${filteringList}" ocdbStorage=${defaultOCDB}
-  else
-    touch pretendCreateQAplots.done
-  fi
+  inputFiles=(
+              "${batchWorkingDirectory}/runQA.sh"
+              "${ALICE_ROOT}/PWGPP/QA/scripts/runQA.sh"
+  )
+  for file in ${inputFiles[*]}; do
+    [[ ! -f ${file##*/} && -f ${file} ]] && echo "copying ${file}" && cp -f ${file} .
+  done
+
+  echo "running QA with command:"
+  echo ./runQA.sh inputList="${mergedQAfileList}" inputListHighPtTrees="${filteringList}" ocdbStorage="${defaultOCDB}"
+  ./runQA.sh inputList="${mergedQAfileList}" inputListHighPtTrees="${filteringList}" ocdbStorage="${defaultOCDB}"
   cd ${olddir}
   return 0
 )
@@ -1316,6 +1464,9 @@ summarizeLogs()
             "stderr"
   )
 
+  #put dir information in the output
+  echo "dir $PWD"
+
   #check logs
   local logstatus=0
   for log in ${logFiles[*]}; do
@@ -1435,7 +1586,9 @@ goMakeFilteredTrees()
   batchWorkingDirectory=${PWD}
 
   [[ -z ${commonOutputPath} ]] && commonOutputPath=${PWD}
-  doneFile=${commonOutputPath}/meta/filtering.cpass1.run${runNumber}.done
+  doneFileBase=filtering.cpass1.run${runNumber}.done
+  doneFileTmp=${batchWorkingDirectory}/${doneFileBase}
+  doneFile=${commonOutputPath}/meta/${doneFileBase}
 
   cat > filtering.log << EOF
   goMakeFilteredTrees config:
@@ -1470,7 +1623,6 @@ EOF
   fi
   pwd
   /bin/ls
-  touch ${doneFile}
   summarizeLogs >>  ${doneFile}
   
   #echo mv -f * ${outputDir}
@@ -1478,7 +1630,10 @@ EOF
   #[[ -f ${outputDir}/FilterEvents_Trees.root ]] && echo "filteredTree ${outputDir}/FilterEvents_Trees.root" >> ${doneFile}
   #cd ${commonOutputPath}
   #[[ "${runpath}" != "${outputDir}" ]] && rm -rf ${runpath}
-  
+  cp "$doneFileTmp" "$doneFile" || rm -f "$doneFileTmp" "$doneFile"
+  [[ -n ${removeTMPdoneFile} ]] && rm -f ${doneFileTmp}
+
   return 0
 )
 
@@ -1559,6 +1714,8 @@ goSubmitBatch()
     extraOpts[i]=$(encSpaces "${extraOpts[i]}")
   done
   extraOpts+=("encodedSpaces=1")
+  #this removes the copy of the done file used by makeflow (in the running dir)
+  extraOpts+=("removeTMPdoneFile=1")
 
   #record the working directory provided by the batch system
   batchWorkingDirectory=${PWD}
@@ -1702,6 +1859,8 @@ goSubmitBatch()
     if [[ ${autoOCDB} -ne 0 ]]; then
       currentDefaultOCDB=$(setYear ${oneInputFile} ${defaultOCDB})
     fi
+    period=$(guessPeriod ${oneInputFile})
+    year=$(guessYear ${oneInputFile})
 
     echo "submitting run ${runNumber} with OCDB ${currentDefaultOCDB}"
 
@@ -1710,7 +1869,7 @@ goSubmitBatch()
     if [[ -n ${profilingCommand} ]]; then
       [[ -z ${nEventsProfiling} ]] && nEventsProfiling=2
       [[ -z ${profilingCommand} ]] && profilingCommand="/usr/bin/valgrind --tool=callgrind --num-callers=40 -v --trace-children=yes"
-      submit "profile-${JOBpostfix}" 1 1 000 "${alirootEnv} ${self}" CPass0 ${commonOutputPath}/000${runNumber}/${jobindex} ${oneInputFile} ${nEventsProfiling} ${currentDefaultOCDB} ${configFile} ${runNumber} ${jobindex} useProfilingCommand=$(encSpaces "${profilingCommand}") "${extraOpts[@]}"
+      submit "profile-${JOBpostfix}" 1 1 000 "${alirootEnv} ${self}" CPass0 ${commonOutputPath}/${year}/${period}/000${runNumber}/${jobindex} ${oneInputFile} ${nEventsProfiling} ${currentDefaultOCDB} ${configFile} ${runNumber} ${jobindex} useProfilingCommand=$(encSpaces "${profilingCommand}") "${extraOpts[@]}"
     fi 
 
     ################################################################################
@@ -1724,7 +1883,7 @@ goSubmitBatch()
       echo
 
       # create directory and copy all files that are needed
-      targetDirectory="${commonOutputPath}/000${runNumber}/cpass0"
+      targetDirectory="${commonOutputPath}/${year}/${period}/000${runNumber}/cpass0"
       mkdir -p ${targetDirectory}
 
       filesCPass0=( 
@@ -1779,7 +1938,7 @@ goSubmitBatch()
       echo "submit CPass0 merging for run ${runNumber}"
       echo
 
-      targetDirectory="${commonOutputPath}/000${runNumber}/cpass0"
+      targetDirectory="${commonOutputPath}/${year}/${period}/000${runNumber}/cpass0"
       mkdir -p ${targetDirectory}
 
       #copy the scripts
@@ -1815,7 +1974,7 @@ goSubmitBatch()
 
     if [ ${runCPass1reco} -eq 1 ]; then
 
-      targetDirectory="${commonOutputPath}/000${runNumber}/cpass1"
+      targetDirectory="${commonOutputPath}/${year}/${period}/000${runNumber}/cpass1"
       rm -f ${commonOutputPath}/meta/cpass1.job*.run${runNumber}.done
 
       # safety feature: if we are re-running for any reason we want to delete the previous output first.
@@ -1883,7 +2042,7 @@ goSubmitBatch()
       echo "submit CPass1 merging for run ${runNumber}"
       echo
 
-      targetDirectory="${commonOutputPath}/000${runNumber}/cpass1"
+      targetDirectory="${commonOutputPath}/${year}/${period}/000${runNumber}/cpass1"
       rm -f ${commonOutputPath}/meta/merge.cpass1.run${runNumber}.done
       mkdir -p ${targetDirectory}
 
@@ -1915,7 +2074,7 @@ goSubmitBatch()
     #  echo submitting filtering for run ${runNumber}
     #  echo
     #  submit "${JOBmakeESDlistCPass1}" 1 1 "${LASTJOB}" "${self}" PrintValues esd ${commonOutputPath}/meta/cpass1.ESD.run${runNumber}.list ${commonOutputPath}/meta/cpass1.job*.run${runNumber}.done 
-    #  submit "${JOBfilterESDcpass1}" 1 1 "${JOBmakeESDlistCPass1}" "${alirootEnv} ${self}" MakeFilteredTrees ${commonOutputPath}/000${runNumber}/cpass1 ${runNumber} ${commonOutputPath}/meta/cpass1.ESD.run${runNumber}.list ${filteringFactorHighPt} ${filteringFactorV0s} ${currentDefaultOCDB} 1000000 0 10000000 0 ${configFile} AliESDs_Barrel.root "${extraOpts[@]}"
+    #  submit "${JOBfilterESDcpass1}" 1 1 "${JOBmakeESDlistCPass1}" "${alirootEnv} ${self}" MakeFilteredTrees ${commonOutputPath}/${year}/${period}/000${runNumber}/cpass1 ${runNumber} ${commonOutputPath}/meta/cpass1.ESD.run${runNumber}.list ${filteringFactorHighPt} ${filteringFactorV0s} ${currentDefaultOCDB} 1000000 0 10000000 0 ${configFile} AliESDs_Barrel.root "${extraOpts[@]}"
     #  LASTJOB=${JOBfilterESDcpass1}
     #fi
 
@@ -1935,7 +2094,8 @@ goSubmitBatch()
   echo "submit make a summary"
   echo
 
-  submit "${JOBID6}" 1 1 "${LASTJOB}" "${alirootEnv} ${self}" MakeSummary ${configFile}
+  [[ -z ${alirootEnvQA} ]] && alirootEnvQA=$(encSpaces "${alirootEnv}")
+  submit "${JOBID6}" 1 1 "${LASTJOB}" "${alirootEnvQA} ${self}" MakeSummary ${configFile} "commonOutputPath=${commonOutputPath}"
   LASTJOB=${JOBID6}
   #################################################################################
   
@@ -1949,6 +2109,7 @@ goWaitForOutput()
 (
   umask 0002
   [[ $# -lt 3 ]] && echo "goWaitForOutput() wrong number of arguments, exiting.." && return 1
+  echo Start:goWaitForOutput
   echo searchPath=${1}
   echo fileName=${2}
   echo numberOfFiles=${3}
@@ -1967,6 +2128,7 @@ goWaitForOutput()
     sleep 60
   done
   echo "DONE! exiting..."
+  echo End:goWaitForOutput
   return 0
 )
 
@@ -2067,16 +2229,30 @@ EOF
 
 stackTraceTree()
 (
-  awk '
+  if [[ $# -lt 1 ]]; then
+    echo 'make stacktrace processing  in case of standard root crash log'
+    echo 'input is a (list of) text files with the stack trace (either gdb aoutput'
+    echo 'produced with e.g. gdb --batch --quiet -ex "bt" -ex "quit" aliroot core,'
+    echo 'or the root crash log), output is a TTree formatted table.'
+    echo 'example usage:'
+    echo 'benchmark.sh stackTraceTree /foo/*/rec.log'
+    echo 'benchmark.sh stackTraceTree $(cat file.list)'
+    echo 'benchmark.sh stackTraceTree `cat file.list`'
+    return 0
+  fi
+  gawk '
        BEGIN { 
-               print "frame/I:method/C:line/C:cpass/I:aliroot/I";
+               print "frame/I:method/C:line/C:cpass/I:aliroot/I:file/C";
                RS="#[0-9]*";
                aliroot=0;
+               read=1;
              } 
-             { 
+      /There was a crash/ {read=1;}
+      /The lines below might hint at the cause of the crash/ {read=0;}
+      read==1 { 
                if ($3 ~ /Ali*/) aliroot=1; else aliroot=0;
                gsub("#","",RT); 
-               if ($NF!="" && RT!="" && $3!="") print RT" "$3" "$NF" "0" "aliroot
+               if ($NF!="" && RT!="" && $3!="") print RT" "$3" "$NF" "0" "aliroot" "FILENAME
              }
       ' "$@" 2>/dev/null
 )
@@ -2087,8 +2263,11 @@ goMakeSummary()
   # summary logs
   # qa plot making
   # final file lists
+  # runs in current dir - in makeflow mode it can run LOCAL, then the QA plots and summaries
+  # will appear in the submission dir.
   #some defaults:
   log="summary.log"
+  jsonLog="summary.json"
   productionID="qa"
 
   configFile=${1}
@@ -2100,6 +2279,9 @@ goMakeSummary()
   
   #record the working directory provided by the batch system
   batchWorkingDirectory=${PWD}
+
+  logTmp="${batchWorkingDirectory}/${log}"
+  jsonLogTmp="${batchWorkingDirectory}/${jsonLog}"
   
   [[ -f ${alirootSource} && -z ${ALICE_ROOT} ]] && source ${alirootSource}
 
@@ -2108,11 +2290,9 @@ goMakeSummary()
   [[ -z ${commonOutputPath} ]] && commonOutputPath=${PWD}
 
   #copy some useful stuff
-  #and go to the commonOutputPath
-  cp ${configFile} ${commonOutputPath}
-  cd ${commonOutputPath}
+  [ -f "${commonOutputPath}/${configFile}" ] || paranoidCp "${configFile}" "${commonOutputPath}"
 
-  exec &> >(tee ${log})
+  exec &> >(tee ${logTmp})
 
   #summarize the global stuff
   echo "env script: ${alirootSource} ${alirootEnv}"
@@ -2120,67 +2300,70 @@ goMakeSummary()
   echo "commonOutputPath=${commonOutputPath}"
 
   #summarize the stacktraces
-  stackTraceTree 000*/cpass0/*/stacktrace* > stacktrace_cpass0.tree
-  stackTraceTree 000*/cpass1/*/stacktrace* > stacktrace_cpass1.tree
+  stackTraceTree ${commonOutputPath}/*/*/000*/cpass0/*/stacktrace* > stacktrace_cpass0.tree
+  stackTraceTree ${commonOutputPath}/*/*/000*/cpass1/*/stacktrace* > stacktrace_cpass1.tree
+
+  # json header: open array of objects
+  echo '[' > "${jsonLogTmp}"
 
   echo total numbers for the production:
   echo
   awk 'BEGIN {nFiles=0;nCore=0;} 
   /^calibfile/ {nFiles++;} 
   /core dumped/ {nCore++i;}
-  END {print     "cpass0 produced "nFiles" calib files, "nCore" core files";}' meta/cpass0.job*done 2>/dev/null
+  END {print     "cpass0 produced "nFiles" calib files, "nCore" core files";}' ${commonOutputPath}/meta/cpass0.job*done 2>/dev/null
   awk 'BEGIN {nOK=0; nBAD=0; } 
   /\/rec.log OK/ {nOK++;} 
   /\/rec.log BAD/ {nBAD++;} 
   /stderr BAD/ {if ($0 ~ /rec.log/){nBAD++;}}
-  END {print     "cpass0 reco:  OK: "nOK"\tBAD: "nBAD;}' meta/cpass0.job*done 2>/dev/null
+  END {print     "cpass0 reco:  OK: "nOK"\tBAD: "nBAD;}' ${commonOutputPath}/meta/cpass0.job*done 2>/dev/null
   awk 'BEGIN {nOK=0; nBAD=0; } 
   /\/calib.log OK/ {nOK++;} 
   /\/calib.log BAD/ {nBAD++;} 
-  END {print "cpass0 calib: OK: "nOK"\tBAD: "nBAD;}' meta/cpass0.job*done 2>/dev/null
+  END {print "cpass0 calib: OK: "nOK"\tBAD: "nBAD;}' ${commonOutputPath}/meta/cpass0.job*done 2>/dev/null
 
   awk 'BEGIN {nOK=0; nBAD=0; } 
   /merge.log OK/ {nOK++;} 
   /merge.log BAD/ {nBAD++;} 
-  END {print "cpass0 merge: OK: "nOK"\tBAD: "nBAD;}' meta/merge.cpass0*done 2>/dev/null
+  END {print "cpass0 merge: OK: "nOK"\tBAD: "nBAD;}' ${commonOutputPath}/meta/merge.cpass0*done 2>/dev/null
   awk 'BEGIN {nOK=0; nBAD=0; } 
   /ocdb.log OK/ {nOK++;} 
   /ocdb.log BAD/ {nBAD++;} 
-  END {print   "cpass0 OCDB:  OK: "nOK"\tBAD: "nBAD;}' meta/merge.cpass0*done 2>/dev/null
+  END {print   "cpass0 OCDB:  OK: "nOK"\tBAD: "nBAD;}' ${commonOutputPath}/meta/merge.cpass0*done 2>/dev/null
 
   echo
   awk 'BEGIN {nFiles=0;nCore=0;} 
   /^calibfile/ {nFiles++;} 
   /core dumped/ {nCore++;}
-  END {print     "cpass1 produced "nFiles" calib files, "nCore" core files";}' meta/cpass1.job*done 2>/dev/null
+  END {print     "cpass1 produced "nFiles" calib files, "nCore" core files";}' ${commonOutputPath}/meta/cpass1.job*done 2>/dev/null
   awk 'BEGIN {nOK=0; nBAD=0; } 
   /\/rec.log OK/ {nOK++;} 
   /\/rec.log BAD/ {nBAD++;} 
   /stderr BAD/ {if ($0 ~ /rec.log/){nBAD++;}}
-  END {print     "cpass1 reco:  OK: "nOK"\tBAD: "nBAD;}' meta/cpass1.job*done 2>/dev/null
+  END {print     "cpass1 reco:  OK: "nOK"\tBAD: "nBAD;}' ${commonOutputPath}/meta/cpass1.job*done 2>/dev/null
   awk 'BEGIN {nOK=0; nBAD=0; } 
   /\/calib.log OK/ {nOK++;} 
   /\/calib.log BAD/ {nBAD++;} 
-  END {print "cpass1 calib: OK: "nOK"\tBAD: "nBAD;}' meta/cpass1.job*done 2>/dev/null
+  END {print "cpass1 calib: OK: "nOK"\tBAD: "nBAD;}' ${commonOutputPath}/meta/cpass1.job*done 2>/dev/null
 
   awk 'BEGIN {nOK=0; nBAD=0; } 
   /merge.log OK/ {nOK++;} 
   /merge.log BAD/ {nBAD++;} 
-  END {print "cpass1 merge: OK: "nOK"\tBAD: "nBAD;}' meta/merge.cpass1*done 2>/dev/null
+  END {print "cpass1 merge: OK: "nOK"\tBAD: "nBAD;}' ${commonOutputPath}/meta/merge.cpass1*done 2>/dev/null
   awk 'BEGIN {nOK=0; nBAD=0; } 
   /ocdb.log OK/ {nOK++;} 
   /ocdb.log BAD/ {nBAD++;} 
-  END {print   "cpass1 OCDB:  OK: "nOK"\tBAD: "nBAD;}' meta/merge.cpass1*done 2>/dev/null
+  END {print   "cpass1 OCDB:  OK: "nOK"\tBAD: "nBAD;}' ${commonOutputPath}/meta/merge.cpass1*done 2>/dev/null
 
   echo
   echo per run stats:
-  /bin/ls -1 meta/merge.cpass0.run*.done | while read x 
+  /bin/ls -1 ${commonOutputPath}/meta/merge.cpass0.run*.done | while read x 
 do
-  dir=$(goPrintValues calibfile - ${x})
+  dir=$(goPrintValues dir - ${x})
   runNumber=$(guessRunNumber ${dir})
   [[ -z ${runNumber} ]] && continue
 
-  if $(/bin/ls meta/cpass0.job*.run${runNumber}.done &> /dev/null); then
+  if $(/bin/ls ${commonOutputPath}/meta/cpass0.job*.run${runNumber}.done &> /dev/null); then
     statusCPass0=( $(
     awk 'BEGIN {nOKrec=0;nBADrec=0;nOKcalib=0;nBADcalib=0;nOKstderr=0;nBADstderr=0;}
     /\/rec.log OK/ {nOKrec++;} 
@@ -2189,11 +2372,11 @@ do
     /stderr OK/ {nOKstderr++;}
     /\/calib.log OK/ {nOKcalib++;}
     /\/calib.log BAD/ {nBADcalib++}
-    END {print ""nOKrec" "nBADrec" "nOKstderr" "nBADstderr" "nOKcalib" "nBADcalib;}' meta/cpass0.job*.run${runNumber}.done 2>/dev/null
+    END {print ""nOKrec" "nBADrec" "nOKstderr" "nBADstderr" "nOKcalib" "nBADcalib;}' ${commonOutputPath}/meta/cpass0.job*.run${runNumber}.done 2>/dev/null
     ) ) 
   fi
 
-  if $(/bin/ls meta/cpass1.job*.run${runNumber}.done &>/dev/null); then
+  if $(/bin/ls ${commonOutputPath}/meta/cpass1.job*.run${runNumber}.done &>/dev/null); then
     statusCPass1=( $(
     awk 'BEGIN {nOKrec=0;nBADrec=0;nOKcalib=0;nBADcalib=0;nOKstderr=0;nBADstderr=0;nQAbarrelOK=0;nQAbarrelBAD=0;nQAouterOK=0;nQAouterBAD=0;}
     /\/rec.log OK/ {nOKrec++;} 
@@ -2206,7 +2389,7 @@ do
     /\/qa_barrel.log BAD/ {nQAbarrelBAD++;}
     /\/qa_outer.log OK/ {nQAouterOK++;}
     /\/qa_outer.log BAD/ {nQAouterBAD++;}
-    END {print ""nOKrec" "nBADrec" "nOKstderr" "nBADstderr" "nOKcalib" "nBADcalib" "nQAbarrelOK" "nQAbarrelBAD" "nQAouterOK" "nQAouterBAD;}' meta/cpass1.job*.run${runNumber}.done 2>/dev/null
+    END {print ""nOKrec" "nBADrec" "nOKstderr" "nBADstderr" "nOKcalib" "nBADcalib" "nQAbarrelOK" "nQAbarrelBAD" "nQAouterOK" "nQAouterBAD;}' ${commonOutputPath}/meta/cpass1.job*.run${runNumber}.done 2>/dev/null
     ) ) 
   fi
 
@@ -2215,8 +2398,38 @@ do
   statusQA=$(awk '/mergeMakeOCDB.log/ {print $2}' ${x/cpass0/cpass1} 2>/dev/null)
 
   printf "%s\t ocdb.log cpass0: %s\t ocdb.log cpass1: %s\tqa.log:%s\t| cpass0: rec:%s/%s stderr:%s/%s calib:%s/%s cpass1: rec:%s/%s stderr:%s/%s calib:%s/%s QAbarrel:%s/%s QAouter:%s/%s\n" ${runNumber} ${statusOCDBcpass0} ${statusOCDBcpass1} ${statusQA} ${statusCPass0[0]} ${statusCPass0[1]} ${statusCPass0[2]} ${statusCPass0[3]} ${statusCPass0[4]} ${statusCPass0[5]} ${statusCPass1[0]} ${statusCPass1[1]} ${statusCPass1[2]} ${statusCPass1[3]} ${statusCPass1[4]} ${statusCPass1[5]} ${statusCPass1[6]} ${statusCPass1[7]} ${statusCPass1[8]} ${statusCPass1[9]}
+
+  # produce json summary
+  statusOCDBcpass0json=false
+  statusOCDBcpass1json=false
+  statusQAjson=false
+  [[ "$statusOCDBcpass0" == 'OK' ]] && statusOCDBcpass0json=true
+  [[ "$statusOCDBcpass1" == 'OK' ]] && statusOCDBcpass1json=true
+  [[ "$statusQA" == 'OK' ]] && statusQAjson=true
+  cat >> "$jsonLogTmp" <<EOF
+  {
+    run: ${runNumber},
+    status: { ocdb_pass0: ${statusOCDBcpass0json}, ocdb_pass1: ${statusOCDBcpass1json}, qa: ${statusQAjson} },
+    cpass0: {
+      reco: { n_ok: ${statusCPass0[0]}, n_bad: ${statusCPass0[1]} },
+      stderr: { n_ok: ${statusCPass0[2]}, n_bad: ${statusCPass0[3]} },
+      calib: { n_ok: ${statusCPass0[4]}, n_bad: ${statusCPass0[5]} }
+    },
+    cpass1: {
+      reco: { n_ok: ${statusCPass1[0]}, n_bad: ${statusCPass1[1]} },
+      stderr: { n_ok: ${statusCPass1[2]}, n_bad: ${statusCPass1[3]} },
+      calib: { n_ok: ${statusCPass1[4]}, n_bad: ${statusCPass1[5]} },
+      qabarrel: { n_ok: ${statusCPass1[6]}, n_bad: ${statusCPass1[7]} },
+      qarouter: { n_ok: ${statusCPass1[8]}, n_bad: ${statusCPass1[9]} }
+    }
+  },
+EOF
+
 done
 
+  # json footer: close array of objects
+  echo ']' >> "${jsonLogTmp}"
+
   #make lists with output files - QA, trending, filtering and calibration
   ### wait for the merging of all runs to be over ###
   rm -f qa.list
@@ -2233,8 +2446,8 @@ done
   goPrintValues dcsTree cpass1.dcsTree.list ${commonOutputPath}/meta/merge.cpass1.run*.done &>/dev/null
  
   #merge trending
-  rm -f ${commonOutputPath}/trending_merged.root
-  goMerge trending.list ${commonOutputPath}/trending.root ${configFile} "${extraOpts[@]}" &> mergeTrending.log
+  rm -f trending.root
+  goMerge trending.list trending.root ${configFile} "${extraOpts[@]}" &> mergeTrending.log
 
   goMakeSummaryTree ${commonOutputPath} 0
   goMakeSummaryTree ${commonOutputPath} 1
@@ -2245,7 +2458,19 @@ done
   goMakeMergedSummaryTree
 
   #if set, email the summary
-  [[ -n ${MAILTO} ]] && cat ${log} | mail -s "benchmark ${productionID} done" ${MAILTO}
+  [[ -n ${MAILTO} ]] && cat ${logTmp} | mail -s "benchmark ${productionID} done" ${MAILTO}
+
+  #copy logs to destination
+  paranoidCp "$logTmp" "${commonOutputPath}"
+  paranoidCp "$jsonLogTmp" "${commonOutputPath}"
+  
+  #copy output files
+  exec &> >(tee fileCopy.log)
+  paranoidCp QAplots ${commonOutputPath}
+  paranoidCp *.list ${commonOutputPath}
+  paranoidCp *.root ${commonOutputPath}
+  paranoidCp *.log ${commonOutputPath}
+  paranoidCp fileCopy.log ${commonOutputPath}
 
   return 0
 )
@@ -2354,6 +2579,7 @@ goMakeSummaryTree()
 
 parseConfig()
 {
+  echo Start: parseConfig
   configFile=${1}
   shift
   args=("$@")
@@ -2368,7 +2594,7 @@ parseConfig()
   #makeflowOptions="-T wq -N alice -C ali-copilot.cern.ch:9097"
   makeflowOptions=""
   #batchCommand="/usr/bin/qsub"
-  batchFlags="-b y -cwd -l h_rt=24:0:0,h_rss=4G "
+  batchFlags=""
   baseOutputDirectory="$PWD/output"
   #alirootEnv="/cvmfs/alice.cern.ch/bin/alienv setenv AliRoot/v5-04-34-AN -c"
   #alirootEnv="/home/mkrzewic/alisoft/balice_master.sh"
@@ -2394,6 +2620,7 @@ parseConfig()
   logToFinalDestination=1
   ALIROOT_FORCE_COREDUMP=1
   pretendDelay=0
+  copyInputData=0
 
   #first, source the config file
   if [ -f ${configFile} ]; then
@@ -2427,8 +2654,8 @@ parseConfig()
   [[ -z ${alirootEnv} ]] && echo "alirootEnv not defined!" && return 1
 
   #export the aliroot function if defined to override normal behaviour
-  [[ $(type -t aliroot) =~ "function" ]] && export -f aliroot
-
+  [[ $(type -t aliroot) =~ "function" ]] && export -f aliroot && echo "exporting aliroot() function..."
+  echo End: parseConfig
   return 0
 }
 
@@ -2437,9 +2664,9 @@ aliroot()
   args=("$@")
   if [[ -n ${useProfilingCommand} ]]; then
     profilerLogFile="cpu.txt"
-    [[ "${args}" =~ rec ]] && profilerLogFile="cpu_rec.txt"
-    [[ "${args}}" =~ Calib ]] && profilerLogFile="cpu_calib.txt"
-    echo running "${useProfilingCommand} aliroot ${args} &> ${profilerLogFile}"
+    [[ "${args[@]}" =~ rec ]] && profilerLogFile="cpu_rec.txt"
+    [[ "${args[@]}" =~ Calib ]] && profilerLogFile="cpu_calib.txt"
+    echo running "${useProfilingCommand} aliroot ${args[@]} &> ${profilerLogFile}"
     ${useProfilingCommand} aliroot "${args[@]}" &> ${profilerLogFile}
   else
     #to prevent an infinite recursion use "command aliroot" to disable
@@ -2450,6 +2677,117 @@ aliroot()
   return 0
 }
 
+copyFileToLocal()
+(
+  #copies a single file to a local destination: the file may either come from
+  #a local filesystem or from a remote location (whose protocol must be
+  #supported)
+  #copy is "robust" and it is repeated some times in case of failure before
+  #giving up (1 is returned in that case)
+  src="$1"
+  dst="$2"
+  ok=0
+  [[ -z "${maxCopyTries}" ]] && maxCopyTries=10
+
+  proto="${src%%://*}"
+
+  echo "copy file to local dest started: $src -> $dst"
+
+  for (( i=1 ; i<=maxCopyTries ; i++ )) ; do
+
+    echo "...attempt $i of $maxCopyTries"
+    rm -f "$dst"
+
+    if [[ "$proto" == "$src" ]]; then
+      cp "$src" "$dst"
+    else
+      case "$proto" in
+        root)
+          xrdcp -f "$src" "$dst"
+        ;;
+        http)
+          curl -L "$src" -O "$dst"
+        ;;
+        *)
+          echo "protocol not supported: $proto"
+          return 2
+        ;;
+      esac
+    fi
+
+    if [ $? == 0 ] ; then
+      ok=1
+      break
+    fi
+
+  done
+
+  if [[ "$ok" == 1 ]] ; then
+    echo "copy file to local dest OK after $i attempt(s): $src -> $dst"
+    return 0
+  fi
+
+  echo "copy file to local dest FAILED after $maxCopyTries attempt(s): $src -> $dst"
+  return 1
+)
+
+paranoidCp()
+(
+  #recursively copy files and directories
+  #to avoid using find and the like as they kill
+  #the performance on some cluster file systems
+  #does not copy links to avoid problems
+  sourceFiles=("${@}")
+  destination="${sourceFiles[@]:(-1)}" #last element
+  unset sourceFiles[${#sourceFiles[@]}-1] #remove last element (dst)
+  for src in "${sourceFiles[@]}"; do
+    if [[ -f "${src}" && ! -h  "${src}" ]]; then
+      paranoidCopyFile "${src}" "${destination}"
+    elif [[ -d "${src}" && ! -h "${src}" ]]; then
+      src="${src%/}"
+      dst="${destination}/${src##*/}"
+      mkdir -p "${dst}"
+      paranoidCp "${src}"/* "${dst}"
+    fi
+  done
+)
+
+paranoidCopyFile()
+(
+  #copy a single file to a target in an existing dir
+  #repeat a few times if copy fails
+  #returns 1 on failure, 0 on success
+  src="${1}"
+  dst="${2}"
+  ok=0
+  [[ -d "${dst}" ]] && dst="${dst}/${src##*/}"
+  [[ -z "${maxCopyTries}" ]] && maxCopyTries=10
+
+  echo "paranoid copy started: $src -> $dst"
+
+  for (( i=1 ; i<=maxCopyTries ; i++ )) ; do
+
+    echo "...attempt $i of $maxCopyTries"
+    rm -f "$dst"
+    cp "$src" "$dst"
+
+    cmp -s "$src" "$dst"
+    if [ $? == 0 ] ; then
+      ok=1
+      break
+    fi
+
+  done
+
+  if [[ "$ok" == 1 ]] ; then
+    echo "paranoid copy OK after $i attempt(s): $src -> $dst"
+    return 0
+  fi
+
+  echo "paranoid copy FAILED after $maxCopyTries attempt(s): $src -> $dst"
+  return 1
+)
+
 guessRunData()
 {
   #guess the period from the path, pick the rightmost one