#include "Riostream.h"
#include "TEnv.h"
+#include "TBits.h"
#include "TError.h"
#include "TROOT.h"
#include "TSystem.h"
#include "TFile.h"
+#include "TFileCollection.h"
+#include "TChain.h"
#include "TObjString.h"
#include "TObjArray.h"
#include "TGrid.h"
fFastReadOption(0),
fOverwriteMode(1),
fNreplicas(2),
+ fNproofWorkers(0),
+ fNproofWorkersPerSlave(0),
+ fProofReset(0),
fRunNumbers(),
fExecutable(),
fExecutableCommand(),
fJobTag(),
fOutputSingle(),
fRunPrefix(),
+ fProofCluster(),
+ fProofDataSet(),
+ fFileForTestMode(),
+ fRootVersionForProof(),
+ fAliRootMode(),
fInputFiles(0),
fPackages(0)
{
fFastReadOption(0),
fOverwriteMode(1),
fNreplicas(2),
+ fNproofWorkers(0),
+ fNproofWorkersPerSlave(0),
+ fProofReset(0),
fRunNumbers(),
fExecutable(),
fExecutableCommand(),
fJobTag(),
fOutputSingle(),
fRunPrefix(),
+ fProofCluster(),
+ fProofDataSet(),
+ fFileForTestMode(),
+ fRootVersionForProof(),
+ fAliRootMode(),
fInputFiles(0),
fPackages(0)
{
fFastReadOption(other.fFastReadOption),
fOverwriteMode(other.fOverwriteMode),
fNreplicas(other.fNreplicas),
+ fNproofWorkers(other.fNproofWorkers),
+ fNproofWorkersPerSlave(other.fNproofWorkersPerSlave),
+ fProofReset(other.fProofReset),
fRunNumbers(other.fRunNumbers),
fExecutable(other.fExecutable),
fExecutableCommand(other.fExecutableCommand),
fJobTag(other.fJobTag),
fOutputSingle(other.fOutputSingle),
fRunPrefix(other.fRunPrefix),
+ fProofCluster(other.fProofCluster),
+ fProofDataSet(other.fProofDataSet),
+ fFileForTestMode(other.fFileForTestMode),
+ fRootVersionForProof(other.fRootVersionForProof),
+ fAliRootMode(other.fAliRootMode),
fInputFiles(0),
fPackages(0)
{
fFastReadOption = other.fFastReadOption;
fOverwriteMode = other.fOverwriteMode;
fNreplicas = other.fNreplicas;
+ fNproofWorkers = other.fNproofWorkers;
+ fNproofWorkersPerSlave = other.fNproofWorkersPerSlave;
+ fProofReset = other.fProofReset;
fRunNumbers = other.fRunNumbers;
fExecutable = other.fExecutable;
fExecutableCommand = other.fExecutableCommand;
fJobTag = other.fJobTag;
fOutputSingle = other.fOutputSingle;
fRunPrefix = other.fRunPrefix;
+ fProofCluster = other.fProofCluster;
+ fProofDataSet = other.fProofDataSet;
+ fFileForTestMode = other.fFileForTestMode;
+ fRootVersionForProof = other.fRootVersionForProof;
+ fAliRootMode = other.fAliRootMode;
if (other.fInputFiles) {
fInputFiles = new TObjArray();
TIter next(other.fInputFiles);
}
// Work directory not existing - create it
gGrid->Cd(homedir);
- if (gGrid->Mkdir(workdir)) {
+ if (gGrid->Mkdir(workdir, "-p")) {
gGrid->Cd(fGridWorkingDir);
Info("CreateJDL", "\n##### Created alien working directory %s", fGridWorkingDir.Data());
} else {
return kFALSE;
}
Info("CheckInputData", "Analysis will make a single xml for base data directory %s",fGridDataDir.Data());
+ if (fDataPattern.Contains("tag") && TestBit(AliAnalysisGrid::kTest))
+ TObject::SetBit(AliAnalysisGrid::kUseTags, kTRUE); // ADDED (fix problem in determining the tag usage in test mode)
return kTRUE;
}
// Process declared files
msg += " type: xml_collection;";
if (useTags) msg += " using_tags: Yes";
else msg += " using_tags: No";
- Info("CheckDataType", msg.Data());
+ Info("CheckDataType", "%s", msg.Data());
if (fNrunsPerMaster<2) {
AddDataFile(Form("%s.xml", os->GetString().Data()));
} else {
msg += " type: xml_collection;";
if (useTags) msg += " using_tags: Yes";
else msg += " using_tags: No";
- Info("CheckDataType", msg.Data());
+ Info("CheckDataType", "%s", msg.Data());
if (fNrunsPerMaster<2) {
AddDataFile(Form("%s%d.xml",fRunPrefix.Data(),irun));
} else {
} else {
if (!fGridOutputDir.Contains("/")) fGridOutputDir = Form("%s/%s", workdir.Data(), fGridOutputDir.Data());
if (!DirectoryExists(fGridOutputDir)) {
- if (gGrid->Mkdir(fGridOutputDir)) {
+ if (gGrid->Mkdir(fGridOutputDir,"-p")) {
Info("CreateJDL", "\n##### Created alien output directory %s", fGridOutputDir.Data());
} else {
Error("CreateJDL", "Could not create alien output directory %s", fGridOutputDir.Data());
fMergingJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(),mergeExec.Data()), "List of input files to be uploaded to workers");
if (!fArguments.IsNull())
fGridJDL->SetArguments(fArguments, "Arguments for the executable command");
- fMergingJDL->SetArguments("$1");
+ fMergingJDL->SetArguments("$1 $2 $3");
fGridJDL->SetValue("TTL", Form("\"%d\"",fTTL));
fGridJDL->SetDescription("TTL", Form("Time after which the job is killed (%d min.)", fTTL/60));
fMergingJDL->SetValue("TTL", Form("\"%d\"",fTTL));
first = kTRUE;
while ((os=(TObjString*)next2())) {
if (!first) comment = NULL;
- if (!os->GetString().Contains("@") && fCloseSE.Length())
- fMergingJDL->AddToOutputArchive(Form("%s@%s",os->GetString().Data(), fCloseSE.Data()), comment);
+ TString currentfile = os->GetString();
+ currentfile.ReplaceAll(".root", "*.root");
+ currentfile.ReplaceAll(".zip", "-Stage$2_$3.zip");
+ if (!currentfile.Contains("@") && fCloseSE.Length())
+ fMergingJDL->AddToOutputArchive(Form("%s@%s",currentfile.Data(), fCloseSE.Data()), comment);
else
- fMergingJDL->AddToOutputArchive(os->GetString(), comment);
+ fMergingJDL->AddToOutputArchive(currentfile, comment);
first = kFALSE;
}
delete arr;
fMergingJDL->AddToOutputSandbox(Form("%s@%s",os->GetString().Data(), fCloseSE.Data()), comment);
else
fMergingJDL->AddToOutputSandbox(os->GetString(), comment);
+ first = kFALSE;
}
delete arr;
}
} else {
if (!fGridOutputDir.Contains("/")) fGridOutputDir = Form("%s/%s", workdir.Data(), fGridOutputDir.Data());
if (!fProductionMode && !DirectoryExists(fGridOutputDir)) {
- if (gGrid->Mkdir(fGridOutputDir)) {
+ if (gGrid->Mkdir(fGridOutputDir,"-p")) {
Info("CreateJDL", "\n##### Created alien output directory %s", fGridOutputDir.Data());
} else {
Error("CreateJDL", "Could not create alien output directory %s", fGridOutputDir.Data());
else fGridJDL->SetOutputDirectory(Form("%s/$2",fGridOutputDir.Data()), "Output directory");
} else {
fGridJDL->SetOutputDirectory(Form("%s/$2/#alien_counter_03i#", fGridOutputDir.Data()), "Output directory");
- fMergingJDL->SetOutputDirectory(Form("%s/$1", fGridOutputDir.Data()), "Output directory");
+ fMergingJDL->SetOutputDirectory("$1", "Output directory");
}
}
sjdl.Prepend(Form("Jobtag = {\n \"comment:%s\"\n};\n", fJobTag.Data()));
index = sjdl.Index("JDLVariables");
if (index >= 0) sjdl.Insert(index, "\n# JDL variables\n");
+ sjdl += "Workdirectorysize = {\"5000MB\"};";
sjdl1 += "JDLVariables = \n{\n \"Packages\",\n \"OutputDir\"\n};\n";
- sjdl1.Prepend(Form("Jobtag = {\n \"comment:Merging_%s\"\n};\n", fJobTag.Data()));
+ sjdl1.Prepend(Form("Jobtag = {\n \"comment:%s_Merging\"\n};\n", fJobTag.Data()));
+ sjdl1.Prepend("# Generated merging jdl\n# $1 = full alien path to output directory to be merged\n# $2 = merging stage\n# $3 = merged chunk\n");
index = sjdl1.Index("JDLVariables");
if (index >= 0) sjdl1.Insert(index, "\n# JDL variables\n");
+ sjdl1 += "Workdirectorysize = {\"5000MB\"};";
// Write jdl to file
ofstream out;
out.open(fJDLName.Data(), ios::out);
TGridResult *res = gGrid->Command(Form("listFilesFromCollection -z -v %s",lfn), kFALSE);
if (!res) {
msg += " using_tags: No (unknown)";
- Info("CheckDataType", msg.Data());
+ Info("CheckDataType", "%s", msg.Data());
return;
}
const char* typeStr = res->GetKey(0, "origLFN");
if (!typeStr || !strlen(typeStr)) {
msg += " using_tags: No (unknown)";
- Info("CheckDataType", msg.Data());
+ Info("CheckDataType", "%s", msg.Data());
return;
}
TString file = typeStr;
useTags = file.Contains(".tag");
if (useTags) msg += " using_tags: Yes";
else msg += " using_tags: No";
- Info("CheckDataType", msg.Data());
+ Info("CheckDataType", "%s", msg.Data());
return;
}
TString slfn(lfn);
TGridCollection *coll = (TGridCollection*)gROOT->ProcessLine(Form("TAlienCollection::Open(\"alien://%s\",1);",lfn));
if (!coll) {
msg += " using_tags: No (unknown)";
- Info("CheckDataType", msg.Data());
+ Info("CheckDataType", "%s", msg.Data());
return;
}
TMap *map = coll->Next();
if (!map) {
msg += " using_tags: No (unknown)";
- Info("CheckDataType", msg.Data());
+ Info("CheckDataType", "%s", msg.Data());
return;
}
map = (TMap*)map->GetValue("");
delete coll;
if (useTags) msg += " using_tags: Yes";
else msg += " using_tags: No";
- Info("CheckDataType", msg.Data());
+ Info("CheckDataType", "%s", msg.Data());
return;
}
useTags = slfn.Contains(".tag");
else msg += " type: unknown file;";
if (useTags) msg += " using_tags: Yes";
else msg += " using_tags: No";
- Info("CheckDataType", msg.Data());
+ Info("CheckDataType", "%s", msg.Data());
}
//______________________________________________________________________________
fPackages->Add(new TObjString(pkg));
}
+//______________________________________________________________________________
+TChain *AliAnalysisAlien::GetChainForTestMode(const char *treeName) const
+{
+// Make a tree from files having the location specified in fFileForTestMode.
+// Inspired from JF's CreateESDChain.
+ if (fFileForTestMode.IsNull()) {
+ Error("GetChainForTestMode", "For proof test mode please use SetFileForTestMode() pointing to a file that contains data file locations.");
+ return NULL;
+ }
+ if (gSystem->AccessPathName(fFileForTestMode)) {
+ Error("GetChainForTestMode", "File not found: %s", fFileForTestMode.Data());
+ return NULL;
+ }
+ // Open the file
+ ifstream in;
+ in.open(fFileForTestMode);
+ Int_t count = 0;
+ // Read the input list of files and add them to the chain
+ TString line;
+ TChain *chain = new TChain(treeName);
+ while (in.good())
+ {
+ in >> line;
+ if (line.IsNull()) continue;
+ if (count++ == fNtestFiles) break;
+ TString esdFile(line);
+ TFile *file = TFile::Open(esdFile);
+ if (file) {
+ if (!file->IsZombie()) chain->Add(esdFile);
+ file->Close();
+ } else {
+ Error("GetChainforTestMode", "Skipping un-openable file: %s", esdFile.Data());
+ }
+ }
+ in.close();
+ if (!chain->GetListOfFiles()->GetEntries()) {
+ Error("GetChainForTestMode", "No file from %s could be opened", fFileForTestMode.Data());
+ delete chain;
+ return NULL;
+ }
+// chain->ls();
+ return chain;
+}
+
//______________________________________________________________________________
const char *AliAnalysisAlien::GetJobStatus(Int_t jobidstart, Int_t lastid, Int_t &nrunning, Int_t &nwaiting, Int_t &nerror, Int_t &ndone)
{
}
//______________________________________________________________________________
-Bool_t AliAnalysisAlien::MergeOutput(const char *output, const char *basedir, Int_t nmaxmerge)
+Bool_t AliAnalysisAlien::CheckMergedFiles(const char *filename, const char *aliendir, Int_t nperchunk, Bool_t submit, const char *jdl)
+{
+// Static method that checks the status of merging. This can submit merging jobs that did not produced the expected
+// output. If <submit> is false (checking) returns true only when the final merged file was found. If submit is true returns
+// true if the jobs were successfully submitted.
+ Int_t countOrig = 0;
+ Int_t countStage = 0;
+ Int_t stage = 0;
+ Int_t i;
+ Bool_t doneFinal = kFALSE;
+ TBits chunksDone;
+ TString saliendir(aliendir);
+ TString sfilename, stmp;
+ saliendir.ReplaceAll("//","/");
+ saliendir = saliendir.Strip(TString::kTrailing, '/');
+ if (!gGrid) {
+ ::Error("GetNregisteredFiles", "You need to be connected to AliEn.");
+ return kFALSE;
+ }
+ sfilename = filename;
+ sfilename.ReplaceAll(".root", "*.root");
+ printf("Checking directory <%s> for merged files <%s> ...\n", aliendir, sfilename.Data());
+ TString command = Form("find %s/ *%s", saliendir.Data(), sfilename.Data());
+ TGridResult *res = gGrid->Command(command);
+ if (!res) {
+ ::Error("GetNregisteredFiles","Error: No result for the find command\n");
+ return kFALSE;
+ }
+ TIter nextmap(res);
+ TMap *map = 0;
+ while ((map=(TMap*)nextmap())) {
+ TString turl = map->GetValue("turl")->GetName();
+ if (!turl.Length()) {
+ // Nothing found
+ delete res;
+ return kFALSE;
+ }
+ turl.ReplaceAll("alien://", "");
+ turl.ReplaceAll(saliendir, "");
+ sfilename = gSystem->BaseName(turl);
+ turl = turl.Strip(TString::kLeading, '/');
+ // Now check to what the file corresponds to:
+ // original output - aliendir/%03d/filename
+ // merged file (which stage) - aliendir/filename-Stage%02d_%04d
+ // final merged file - aliendir/filename
+ if (sfilename == turl) {
+ if (sfilename == filename) {
+ doneFinal = kTRUE;
+ } else {
+ // check stage
+ Int_t index = sfilename.Index("Stage");
+ if (index<0) continue;
+ stmp = sfilename(index+5,2);
+ Int_t istage = atoi(stmp);
+ stmp = sfilename(index+8,4);
+ Int_t ijob = atoi(stmp);
+ if (istage<stage) continue; // Ignore lower stages
+ if (istage>stage) {
+ countStage = 0;
+ chunksDone.ResetAllBits();
+ stage = istage;
+ }
+ countStage++;
+ chunksDone.SetBitNumber(ijob);
+ }
+ } else {
+ countOrig++;
+ }
+ if (doneFinal) {
+ delete res;
+ printf("=> Removing files from previous stages...\n");
+ gGrid->Rm(Form("%s/*Stage*.root", aliendir));
+ for (i=1; i<stage; i++)
+ gGrid->Rm(Form("%s/*Stage%d*.zip", aliendir, i));
+ return kTRUE;
+ }
+ }
+ delete res;
+ // Compute number of jobs that were submitted for the current stage
+ Int_t ntotstage = countOrig;
+ for (i=1; i<=stage; i++) {
+ if (ntotstage%nperchunk) ntotstage = (ntotstage/nperchunk)+1;
+ else ntotstage = (ntotstage/nperchunk);
+ }
+ // Now compare with the number of set bits in the chunksDone array
+ Int_t nmissing = (stage>0)?(ntotstage - countStage):0;
+ // Print the info
+ printf("*** Found %d original files\n", countOrig);
+ if (stage==0) printf("*** No merging completed so far.\n");
+ else printf("*** Found %d out of %d files merged for stage %d\n", countStage, ntotstage, stage);
+ if (nmissing) printf("*** Number of merged files missing for this stage: %d -> check merging job completion\n", nmissing);
+ if (!submit) return doneFinal;
+ // Sumbit merging jobs for all missing chunks for the current stage.
+ TString query = Form("submit %s %s", jdl, aliendir);
+ Int_t ichunk = -1;
+ if (nmissing) {
+ for (i=0; i<nmissing; i++) {
+ ichunk = chunksDone.FirstNullBit(ichunk+1);
+ Int_t jobId = SubmitSingleJob(Form("%s %d %d", query.Data(), stage, ichunk));
+ if (!jobId) return kFALSE;
+ }
+ return kTRUE;
+ }
+ // Submit next stage of merging
+ if (stage==0) countStage = countOrig;
+ Int_t nchunks = (countStage/nperchunk);
+ if (countStage%nperchunk) nchunks += 1;
+ for (i=0; i<nchunks; i++) {
+ Int_t jobId = SubmitSingleJob(Form("%s %d %d", query.Data(), stage+1, i));
+ if (!jobId) return kFALSE;
+ }
+ return kTRUE;
+}
+
+//______________________________________________________________________________
+Int_t AliAnalysisAlien::SubmitSingleJob(const char *query)
+{
+// Submits a single job corresponding to the query and returns job id. If 0 submission failed.
+ if (!gGrid) return 0;
+ printf("=> %s ------> ",query);
+ TGridResult *res = gGrid->Command(query);
+ if (!res) return 0;
+ TString jobId = res->GetKey(0,"jobId");
+ delete res;
+ if (jobId.IsNull()) {
+ printf("submission failed. Reason:\n");
+ gGrid->Stdout();
+ gGrid->Stderr();
+ ::Error("SubmitSingleJob", "Your query %s could not be submitted", query);
+ return 0;
+ }
+ printf(" Job id: %s\n", jobId.Data());
+ return atoi(jobId);
+}
+
+//______________________________________________________________________________
+Bool_t AliAnalysisAlien::MergeOutput(const char *output, const char *basedir, Int_t nmaxmerge, Int_t stage, Int_t ichunk)
{
-// Merge all registered outputs from basedir.
+// Merge given output files from basedir. The file merger will merge nmaxmerge
+// files in a group. Merging can be done in stages:
+// stage=0 : will merge all existing files in a single stage
+// stage=1 : does a find command for all files that do NOT contain the string "Stage".
+// If their number is bigger that nmaxmerge, only the files from
+// ichunk*nmaxmerge to ichunk*(nmaxmerge+1)-1 will get merged as output_stage_<ichunk>
+// stage=n : does a find command for files named <output>Stage<stage-1>_*. If their number is bigger than
+// nmaxmerge, merge just the chunk ichunk, otherwise write the merged output to the file
+// named <output>.
TString outputFile = output;
TString command;
TString outputChunk;
Bool_t merged = kTRUE;
Int_t index = outputFile.Index("@");
if (index > 0) outputFile.Remove(index);
- command = Form("find %s/ *%s", basedir, outputFile.Data());
+ TString inputFile = outputFile;
+ if (stage>1) inputFile.ReplaceAll(".root", Form("-Stage%02d_*.root", stage-1));
+ command = Form("find %s/ *%s", basedir, inputFile.Data());
printf("command: %s\n", command.Data());
TGridResult *res = gGrid->Command(command);
if (!res) {
- printf("Error: No result for the find command\n");
+ ::Error("MergeOutput","No result for the find command\n");
return kFALSE;
}
TFileMerger *fm = 0;
TIter nextmap(res);
TMap *map = 0;
- // Check if there is a merge operation to resume
+ // Check if there is a merge operation to resume. Works only for stage 0 or 1.
outputChunk = outputFile;
outputChunk.ReplaceAll(".root", "_*.root");
// Check for existent temporary merge files
// Check overwrite mode and remove previous partial results if needed
- if (!gSystem->Exec(Form("ls %s 2>/dev/null", outputChunk.Data()))) {
- while (1) {
- // Skip as many input files as in a chunk
- for (Int_t counter=0; counter<nmaxmerge; counter++) map = (TMap*)nextmap();
- if (!map) {
- ::Error("MergeOutputs", "Cannot resume merging for <%s>, nentries=%d", outputFile.Data(), res->GetSize());
+ // Preserve old merging functionality for stage 0.
+ if (stage==0) {
+ if (!gSystem->Exec(Form("ls %s 2>/dev/null", outputChunk.Data()))) {
+ while (1) {
+ // Skip as many input files as in a chunk
+ for (Int_t counter=0; counter<nmaxmerge; counter++) map = (TMap*)nextmap();
+ if (!map) {
+ ::Error("MergeOutput", "Cannot resume merging for <%s>, nentries=%d", outputFile.Data(), res->GetSize());
+ delete res;
+ return kFALSE;
+ }
+ outputChunk = outputFile;
+ outputChunk.ReplaceAll(".root", Form("_%04d.root", countChunk));
+ countChunk++;
+ if (gSystem->AccessPathName(outputChunk)) continue;
+ // Merged file with chunks up to <countChunk> found
+ ::Info("MergeOutput", "Resume merging of <%s> from <%s>\n", outputFile.Data(), outputChunk.Data());
+ previousChunk = outputChunk;
+ break;
+ }
+ }
+ countZero = nmaxmerge;
+
+ while ((map=(TMap*)nextmap())) {
+ // Loop 'find' results and get next LFN
+ if (countZero == nmaxmerge) {
+ // First file in chunk - create file merger and add previous chunk if any.
+ fm = new TFileMerger(kFALSE);
+ fm->SetFastMethod(kTRUE);
+ if (previousChunk.Length()) fm->AddFile(previousChunk.Data());
+ outputChunk = outputFile;
+ outputChunk.ReplaceAll(".root", Form("_%04d.root", countChunk));
+ }
+ // If last file found, put merged results in the output file
+ if (map == res->Last()) outputChunk = outputFile;
+ TObjString *objs = dynamic_cast<TObjString*>(map->GetValue("turl"));
+ if (!objs || !objs->GetString().Length()) {
+ // Nothing found - skip this output
delete res;
+ delete fm;
return kFALSE;
+ }
+ // Add file to be merged and decrement chunk counter.
+ fm->AddFile(objs->GetString());
+ countZero--;
+ if (countZero==0 || map == res->Last()) {
+ if (!fm->GetMergeList() || !fm->GetMergeList()->GetSize()) {
+ // Nothing found - skip this output
+ ::Warning("MergeOutput", "No <%s> files found.", inputFile.Data());
+ delete res;
+ delete fm;
+ return kFALSE;
+ }
+ fm->OutputFile(outputChunk);
+ // Merge the outputs, then go to next chunk
+ if (!fm->Merge()) {
+ ::Error("MergeOutput", "Could not merge all <%s> files", outputFile.Data());
+ delete res;
+ delete fm;
+ return kFALSE;
+ } else {
+ ::Info("MergeOutputs", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), outputChunk.Data());
+ gSystem->Unlink(previousChunk);
+ }
+ if (map == res->Last()) {
+ delete res;
+ delete fm;
+ break;
+ }
+ countChunk++;
+ countZero = nmaxmerge;
+ previousChunk = outputChunk;
}
- outputChunk = outputFile;
- outputChunk.ReplaceAll(".root", Form("_%04d.root", countChunk));
- countChunk++;
- if (gSystem->AccessPathName(outputChunk)) continue;
- // Merged file with chunks up to <countChunk> found
- printf("Resume merging of <%s> from <%s>\n", outputFile.Data(), outputChunk.Data());
- previousChunk = outputChunk;
- break;
}
- }
- countZero = nmaxmerge;
-
+ return merged;
+ }
+ // Merging stage different than 0.
+ // Move to the begining of the requested chunk.
+ outputChunk = outputFile;
+ if (nmaxmerge < res->GetSize()) {
+ if (ichunk*nmaxmerge >= res->GetSize()) {
+ ::Error("MergeOutput", "Cannot merge merge chunk %d grouping %d files from %d total.", ichunk, nmaxmerge, res->GetSize());
+ delete res;
+ return kFALSE;
+ }
+ for (Int_t counter=0; counter<ichunk*nmaxmerge; counter++) map = (TMap*)nextmap();
+ outputChunk.ReplaceAll(".root", Form("-Stage%02d_%04d.root", stage, ichunk));
+ }
+ countZero = nmaxmerge;
+ fm = new TFileMerger(kFALSE);
+ fm->SetFastMethod(kTRUE);
while ((map=(TMap*)nextmap())) {
- // Loop 'find' results and get next LFN
- if (countZero == nmaxmerge) {
- // First file in chunk - create file merger and add previous chunk if any.
- fm = new TFileMerger(kFALSE);
- fm->SetFastMethod(kTRUE);
- if (previousChunk.Length()) fm->AddFile(previousChunk.Data());
- outputChunk = outputFile;
- outputChunk.ReplaceAll(".root", Form("_%04d.root", countChunk));
- }
- // If last file found, put merged results in the output file
- if (map == res->Last()) outputChunk = outputFile;
+ // Loop 'find' results and get next LFN
TObjString *objs = dynamic_cast<TObjString*>(map->GetValue("turl"));
if (!objs || !objs->GetString().Length()) {
// Nothing found - skip this output
// Add file to be merged and decrement chunk counter.
fm->AddFile(objs->GetString());
countZero--;
- if (countZero==0 || map == res->Last()) {
- fm->OutputFile(outputChunk);
- if (!fm->GetMergeList() || !fm->GetMergeList()->GetSize()) {
- // Nothing found - skip this output
- ::Warning("MergeOutputs", "No <%s> files found.", outputFile.Data());
- delete res;
- delete fm;
- return kFALSE;
- }
- // Merge the outputs, then go to next chunk
- if (!fm->Merge()) {
- ::Error("MergeOutputs", "Could not merge all <%s> files", outputFile.Data());
- delete res;
- delete fm;
- merged = kFALSE;
- return kFALSE;
- } else {
- ::Info("MergeOutputs", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), outputChunk.Data());
- gSystem->Unlink(previousChunk);
- }
- if (map == res->Last()) {
- delete res;
- delete fm;
- break;
- }
- countChunk++;
- countZero = nmaxmerge;
- previousChunk = outputChunk;
- }
+ if (countZero==0) break;
+ }
+ delete res;
+ if (!fm->GetMergeList() || !fm->GetMergeList()->GetSize()) {
+ // Nothing found - skip this output
+ ::Warning("MergeOutput", "No <%s> files found.", inputFile.Data());
+ delete fm;
+ return kFALSE;
+ }
+ fm->OutputFile(outputChunk);
+ // Merge the outputs
+ if (!fm->Merge()) {
+ ::Error("MergeOutput", "Could not merge all <%s> files", outputFile.Data());
+ delete fm;
+ return kFALSE;
+ } else {
+ ::Info("MergeOutput", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), outputChunk.Data());
}
- return merged;
+ delete fm;
+ return kTRUE;
}
//______________________________________________________________________________
gEnv->SetValue("XNet.ReconnectTimeout",10);
gEnv->SetValue("XNet.FirstConnectMaxCnt",1);
}
+ // Make sure we change the temporary directory
+ gSystem->Setenv("TMPDIR", gSystem->pwd());
TObjArray *list = fOutputFiles.Tokenize(",");
TIter next(list);
TObjString *str;
if (!merged) {
Error("MergeOutputs", "Terminate() will NOT be executed");
return kFALSE;
- }
+ }
+ TFile *fileOpened = (TFile*)gROOT->GetListOfFiles()->FindObject(outputFile);
+ if (fileOpened) fileOpened->Close();
}
return kTRUE;
}
// Use the output files connected to output containers from the analysis manager
// rather than the files defined by SetOutputFiles
if (flag && !TObject::TestBit(AliAnalysisGrid::kDefaultOutputs))
- Info("SetDefaultOutputs", "Plugin will use the output files taken from \
- analysis manager");
+ Info("SetDefaultOutputs", "Plugin will use the output files taken from analysis manager");
TObject::SetBit(AliAnalysisGrid::kDefaultOutputs, flag);
}
Bool_t AliAnalysisAlien::StartAnalysis(Long64_t /*nentries*/, Long64_t /*firstEntry*/)
{
// Start remote grid analysis.
+ AliAnalysisManager *mgr = AliAnalysisManager::GetAnalysisManager();
+ Bool_t testMode = TestBit(AliAnalysisGrid::kTest);
+ if (!mgr || !mgr->IsInitialized()) {
+ Error("StartAnalysis", "You need an initialized analysis manager for this");
+ return kFALSE;
+ }
+ // Are we in PROOF mode ?
+ if (mgr->IsProofMode()) {
+ Info("StartAnalysis", "##### Starting PROOF analysis on via the plugin #####");
+ if (fProofCluster.IsNull()) {
+ Error("StartAnalysis", "You need to specify the proof cluster name via SetProofCluster");
+ return kFALSE;
+ }
+ if (fProofDataSet.IsNull() && !testMode) {
+ Error("StartAnalysis", "You need to specify a dataset using SetProofDataSet()");
+ return kFALSE;
+ }
+ // Set the needed environment
+ gEnv->SetValue("XSec.GSI.DelegProxy","2");
+ // Do we need to reset PROOF ? The success of the Reset operation cannot be checked
+ if (fProofReset && !testMode) {
+ if (fProofReset==1) {
+ Info("StartAnalysis", "Sending soft reset signal to proof cluster %s", fProofCluster.Data());
+ gROOT->ProcessLine(Form("TProof::Reset(\"%s\", kFALSE);", fProofCluster.Data()));
+ } else {
+ Info("StartAnalysis", "Sending hard reset signal to proof cluster %s", fProofCluster.Data());
+ gROOT->ProcessLine(Form("TProof::Reset(\"%s\", kTRUE);", fProofCluster.Data()));
+ }
+ Info("StartAnalysis", "Stopping the analysis. Please use SetProofReset(0) to resume.");
+ return kFALSE;
+ }
+ // Do we need to change the ROOT version ? The success of this cannot be checked.
+ if (!fRootVersionForProof.IsNull() && !testMode) {
+ gROOT->ProcessLine(Form("TProof::Mgr(\"%s\")->SetROOTVersion(\"%s\");",
+ fProofCluster.Data(), fRootVersionForProof.Data()));
+ }
+ // Connect to PROOF and check the status
+ Long_t proof = 0;
+ TString sworkers;
+ if (fNproofWorkersPerSlave) sworkers = Form("workers=%dx", fNproofWorkersPerSlave);
+ else if (fNproofWorkers) sworkers = Form("workers=%d", fNproofWorkers);
+ if (!testMode) {
+ if (!sworkers.IsNull())
+ proof = gROOT->ProcessLine(Form("TProof::Open(\"%s\", \"%s\");", fProofCluster.Data(), sworkers.Data()));
+ else
+ proof = gROOT->ProcessLine(Form("TProof::Open(\"%s\");", fProofCluster.Data()));
+ } else {
+ proof = gROOT->ProcessLine("TProof::Open(\"\");");
+ if (!proof) {
+ Error("StartAnalysis", "Could not start PROOF in test mode");
+ return kFALSE;
+ }
+ }
+ if (!proof) {
+ Error("StartAnalysis", "Could not connect to PROOF cluster <%s>", fProofCluster.Data());
+ return kFALSE;
+ }
+ if (fNproofWorkersPerSlave*fNproofWorkers > 0)
+ gROOT->ProcessLine(Form("gProof->SetParallel(%d);", fNproofWorkers));
+ // Is dataset existing ?
+ if (!testMode) {
+ TString dataset = fProofDataSet;
+ Int_t index = dataset.Index("#");
+ if (index>=0) dataset.Remove(index);
+// if (!gROOT->ProcessLine(Form("gProof->ExistsDataSet(\"%s\");",fProofDataSet.Data()))) {
+// Error("StartAnalysis", "Dataset %s not existing", fProofDataSet.Data());
+// return kFALSE;
+// }
+// Info("StartAnalysis", "Dataset %s found", dataset.Data());
+ }
+ // Is ClearPackages() needed ?
+ if (TestSpecialBit(kClearPackages)) {
+ Info("StartAnalysis", "ClearPackages signal sent to PROOF. Use SetClearPackages(kFALSE) to reset this.");
+ gROOT->ProcessLine("gProof->ClearPackages();");
+ }
+ // Is a given aliroot mode requested ?
+ TList optionsList;
+ if (!fAliRootMode.IsNull()) {
+ TString alirootMode = fAliRootMode;
+ if (alirootMode == "default") alirootMode = "";
+ Info("StartAnalysis", "You are requesting AliRoot mode: %s", fAliRootMode.Data());
+ optionsList.SetOwner();
+ optionsList.Add(new TNamed("ALIROOT_MODE", alirootMode.Data()));
+ // Check the additional libs to be loaded
+ TString extraLibs;
+ if (!alirootMode.IsNull()) extraLibs = "ANALYSIS:ANALYSISalice";
+ // Parse the extra libs for .so
+ if (fAdditionalLibs.Length()) {
+ TObjArray *list = fAdditionalLibs.Tokenize(" ");
+ TIter next(list);
+ TObjString *str;
+ while((str=(TObjString*)next()) && str->GetString().Contains(".so")) {
+ TString stmp = str->GetName();
+ if (stmp.BeginsWith("lib")) stmp.Remove(0,3);
+ stmp.ReplaceAll(".so","");
+ if (!extraLibs.IsNull()) extraLibs += ":";
+ extraLibs += stmp;
+ }
+ if (list) delete list;
+ }
+ if (!extraLibs.IsNull()) optionsList.Add(new TNamed("ALIROOT_EXTRA_LIBS",extraLibs.Data()));
+ // Check extra includes
+ if (!fIncludePath.IsNull()) {
+ TString includePath = fIncludePath;
+ includePath.ReplaceAll(" ",":");
+ includePath.Strip(TString::kTrailing, ':');
+ Info("StartAnalysis", "Adding extra includes: %s",includePath.Data());
+ optionsList.Add(new TNamed("ALIROOT_EXTRA_INCLUDES",includePath.Data()));
+ }
+ // Check if connection to grid is requested
+ if (TestSpecialBit(kProofConnectGrid))
+ optionsList.Add(new TNamed("ALIROOT_ENABLE_ALIEN", "1"));
+ // Enable AliRoot par
+ if (testMode) {
+ // Enable proof lite package
+ TString alirootLite = gSystem->ExpandPathName("$ALICE_ROOT/ANALYSIS/macros/AliRootProofLite.par");
+ for (Int_t i=0; i<optionsList.GetSize(); i++) {
+ TNamed *obj = (TNamed*)optionsList.At(i);
+ printf("%s %s\n", obj->GetName(), obj->GetTitle());
+ }
+ if (!gROOT->ProcessLine(Form("gProof->UploadPackage(\"%s\");",alirootLite.Data()))
+ && !gROOT->ProcessLine(Form("gProof->EnablePackage(\"%s\", (TList*)0x%lx);",alirootLite.Data(),(ULong_t)&optionsList))) {
+ Info("StartAnalysis", "AliRootProofLite enabled");
+ } else {
+ Error("StartAnalysis", "There was an error trying to enable package AliRootProofLite.par");
+ return kFALSE;
+ }
+ } else {
+ if (gROOT->ProcessLine(Form("gProof->EnablePackage(\"VO_ALICE@AliRoot::%s\", (TList*)0x%lx);",
+ fAliROOTVersion.Data(), (ULong_t)&optionsList))) {
+ Error("StartAnalysis", "There was an error trying to enable package VO_ALICE@AliRoot::%s", fAliROOTVersion.Data());
+ return kFALSE;
+ }
+ }
+ } else {
+ if (fAdditionalLibs.Contains(".so") && !testMode) {
+ Error("StartAnalysis", "You request additional libs to be loaded but did not enabled any AliRoot mode. Please refer to: \
+ \n http://aaf.cern.ch/node/83 and use a parameter for SetAliRootMode()");
+ return kFALSE;
+ }
+ }
+ // Enable par files if requested
+ if (fPackages && fPackages->GetEntries()) {
+ TIter next(fPackages);
+ TObject *package;
+ while ((package=next())) {
+ if (gROOT->ProcessLine(Form("gProof->UploadPackage(\"%s\");", package->GetName()))) {
+ if (gROOT->ProcessLine(Form("gProof->EnablePackage(\"%s\",kTRUE);", package->GetName()))) {
+ Error("StartAnalysis", "There was an error trying to enable package %s", package->GetName());
+ return kFALSE;
+ }
+ } else {
+ Error("StartAnalysis", "There was an error trying to upload package %s", package->GetName());
+ return kFALSE;
+ }
+ }
+ }
+ // Do we need to load analysis source files ?
+ // NOTE: don't load on client since this is anyway done by the user to attach his task.
+ if (fAnalysisSource.Length()) {
+ TObjArray *list = fAnalysisSource.Tokenize(" ");
+ TIter next(list);
+ TObjString *str;
+ while((str=(TObjString*)next())) {
+ gROOT->ProcessLine(Form("gProof->Load(\"%s+g\", kTRUE);", str->GetName()));
+ }
+ if (list) delete list;
+ }
+ if (testMode) {
+ // Register dataset to proof lite.
+ if (fFileForTestMode.IsNull()) {
+ Error("GetChainForTestMode", "For proof test mode please use SetFileForTestMode() pointing to a file that contains data file locations.");
+ return kFALSE;
+ }
+ if (gSystem->AccessPathName(fFileForTestMode)) {
+ Error("GetChainForTestMode", "File not found: %s", fFileForTestMode.Data());
+ return kFALSE;
+ }
+ TFileCollection *coll = new TFileCollection();
+ coll->AddFromFile(fFileForTestMode);
+ gROOT->ProcessLine(Form("gProof->RegisterDataSet(\"test_collection\", (TFileCollection*)0x%lx, \"OV\");", (ULong_t)coll));
+ gROOT->ProcessLine("gProof->ShowDataSets()");
+ }
+ return kTRUE;
+ }
// Check if output files have to be taken from the analysis manager
if (TestBit(AliAnalysisGrid::kDefaultOutputs)) {
- AliAnalysisManager *mgr = AliAnalysisManager::GetAnalysisManager();
- if (!mgr || !mgr->IsInitialized()) {
- Error("StartAnalysis", "You need an initialized analysis manager for this");
- return kFALSE;
- }
fOutputFiles = "";
TIter next(mgr->GetOutputs());
AliAnalysisDataContainer *output;
}
if (!CreateJDL()) return kFALSE;
if (TestBit(AliAnalysisGrid::kOffline)) return kFALSE;
- if (TestBit(AliAnalysisGrid::kTest)) {
+ if (testMode) {
// Locally testing the analysis
Info("StartAnalysis", "\n_______________________________________________________________________ \
\n Running analysis script in a daughter shell as on a worker node \
TString mergeJDLName = fExecutable;
mergeJDLName.ReplaceAll(".sh", "_merge.jdl");
Int_t ntosubmit = fInputFiles->GetEntries();
- printf("### Submitting %d merging jobs...\n", ntosubmit);
for (Int_t i=0; i<ntosubmit; i++) {
- TString query;
TString runOutDir = gSystem->BaseName(fInputFiles->At(i)->GetName());
runOutDir.ReplaceAll(".xml", "");
- if (fOutputToRunNo)
- query = Form("submit %s %s", mergeJDLName.Data(), runOutDir.Data());
- else
- query = Form("submit %s %03d", mergeJDLName.Data(), i);
- printf("********* %s\n",query.Data());
- TGridResult *res = gGrid->Command(query);
- if (res) {
- const char *cjobId = res->GetKey(0,"jobId");
- if (!cjobId) {
- gGrid->Stdout();
- gGrid->Stderr();
- Error("StartAnalysis", "Your JDL %s could not be submitted", mergeJDLName.Data());
- return kFALSE;
- } else {
- Info("StartAnalysis", "\n_______________________________________________________________________ \
- \n##### Your JDL %s was successfully submitted. \nTHE JOB ID IS: %s \
- \n_______________________________________________________________________",
- mergeJDLName.Data(), cjobId);
- }
- delete res;
- } else {
- Error("SubmitMerging", "No grid result after submission !!! Bailing out...");
- return kFALSE;
- }
+ if (fOutputToRunNo) {
+ // The output directory is the run number
+ printf("### Submitting merging job for run <%s>\n", runOutDir.Data());
+ runOutDir = Form("%s/%s", fGridOutputDir.Data(), runOutDir.Data());
+ } else {
+ // The output directory is the master number in 3 digits format
+ printf("### Submitting merging job for master <%03d>\n", i);
+ runOutDir = Form("%s/%03d",fGridOutputDir.Data(), i);
+ }
+ // Check now the number of merging stages.
+ TObjArray *list = fOutputFiles.Tokenize(",");
+ TIter next(list);
+ TObjString *str;
+ TString outputFile;
+ while((str=(TObjString*)next())) {
+ outputFile = str->GetString();
+ Int_t index = outputFile.Index("@");
+ if (index > 0) outputFile.Remove(index);
+ if (!fMergeExcludes.Contains(outputFile)) break;
+ }
+ delete list;
+ Bool_t done = CheckMergedFiles(outputFile, runOutDir, fMaxMergeFiles, kTRUE, mergeJDLName);
+ if (!done) return kFALSE;
}
if (!ntosubmit) return kTRUE;
Info("StartAnalysis", "\n#### STARTING AN ALIEN SHELL FOR YOU. EXIT WHEN YOUR MERGING JOBS HAVE FINISHED. #### \
//______________________________________________________________________________
Bool_t AliAnalysisAlien::SubmitNext()
{
-// Submit next bunch of master jobs if the queue is free.
+// Submit next bunch of master jobs if the queue is free. The first master job is
+// submitted right away, while the next will not be unless the previous was split.
+// The plugin will not submit new master jobs if there are more that 500 jobs in
+// waiting phase.
static Bool_t iscalled = kFALSE;
static Int_t firstmaster = 0;
static Int_t lastmaster = 0;
Int_t ntosubmit = 0;
TGridResult *res;
TString jobID = "";
- if (!fNsubmitted) ntosubmit = 1;
- else {
+ Int_t nmasterjobs = fInputFiles->GetEntries();
+ if (!fNsubmitted) {
+ ntosubmit = 1;
+ if (!IsUseSubmitPolicy()) {
+ if (ntosubmit>5)
+ Info("SubmitNext","### Warning submit policy not used ! Submitting too many jobs at a time may be prohibitted. \
+ \n### You can use SetUseSubmitPolicy() to enable if you have problems.");
+ ntosubmit = nmasterjobs;
+ }
+ } else {
TString status = GetJobStatus(firstmaster, lastmaster, nrunning, nwaiting, nerror, ndone);
printf("=== master %d: %s\n", lastmaster, status.Data());
// If last master not split, just return
if (status != "SPLIT") {iscalled = kFALSE; return kTRUE;}
// No more than 100 waiting jobs
- if (nwaiting>100) {iscalled = kFALSE; return kTRUE;}
+ if (nwaiting>500) {iscalled = kFALSE; return kTRUE;}
npermaster = (nrunning+nwaiting+nerror+ndone)/fNsubmitted;
- if (npermaster) ntosubmit = (100-nwaiting)/npermaster;
+ if (npermaster) ntosubmit = (500-nwaiting)/npermaster;
if (!ntosubmit) ntosubmit = 1;
printf("=== WAITING(%d) RUNNING(%d) DONE(%d) OTHER(%d) NperMaster=%d => to submit %d jobs\n",
nwaiting, nrunning, ndone, nerror, npermaster, ntosubmit);
}
- Int_t nmasterjobs = fInputFiles->GetEntries();
for (Int_t i=0; i<ntosubmit; i++) {
// Submit for a range of enumeration of runs.
if (fNsubmitted>=nmasterjobs) {iscalled = kFALSE; return kTRUE;}
out << "// Automatically generated analysis steering macro executed in grid subjobs" << endl << endl;
out << " TStopwatch timer;" << endl;
out << " timer.Start();" << endl << endl;
+ // Change temp directory to current one
+ out << "// Set temporary merging directory to current one" << endl;
+ out << " gSystem->Setenv(\"TMPDIR\", gSystem->pwd());" << endl << endl;
out << "// load base root libraries" << endl;
out << " gSystem->Load(\"libTree\");" << endl;
out << " gSystem->Load(\"libGeom\");" << endl;
if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
out << " gSystem->AddIncludePath(\"-I$ALICE_ROOT/include\");" << endl << endl;
out << "// Load analysis framework libraries" << endl;
+ TString setupPar = "AliAnalysisAlien::SetupPar";
if (!fPackages) {
out << " gSystem->Load(\"libSTEERBase\");" << endl;
out << " gSystem->Load(\"libESD\");" << endl;
TIter next(fPackages);
TObject *obj;
TString pkgname;
- TString setupPar = "AliAnalysisAlien::SetupPar";
while ((obj=next())) {
pkgname = obj->GetName();
if (pkgname == "STEERBase" ||
while((str=(TObjString*)next())) {
if (str->GetString().Contains(".so"))
out << " gSystem->Load(\"" << str->GetString().Data() << "\");" << endl;
+ if (str->GetString().Contains(".par"))
+ out << " if (!" << setupPar << "(\"" << str->GetString() << "\")) return;" << endl;
}
if (list) delete list;
}
if (IsUsingTags()) {
out << " TChain *chain = CreateChainFromTags(\"wn.xml\", anatype);" << endl << endl;
} else {
- if(fFriendChainName!="AliAOD.VertexingHF.root") {
- out << " TChain *chain = CreateChain(\"wn.xml\", anatype);" << endl << endl;
- } else {
- out << " // Check if the macro to create the chain was provided" << endl;
- out << " if (gSystem->AccessPathName(\"MakeAODInputChain.C\")) {" << endl;
- out << " ::Error(\"" << func.Data() << "\", \"File MakeAODInputChain.C not provided. Aborting.\");" << endl;
- out << " return;" << endl;
- out << " }" << endl;
- out << " gROOT->LoadMacro(\"MakeAODInputChain.C\");" << endl;
- out << " TChain *chain = MakeAODInputChain(\"wn.xml\",\"none\");" << endl << endl;
- }
+ out << " TChain *chain = CreateChain(\"wn.xml\", anatype);" << endl << endl;
}
out << "// read the analysis manager from file" << endl;
TString analysisFile = fExecutable;
out << " mgr = (AliAnalysisManager*)file->Get(key->GetName());" << endl;
out << " };" << endl;
out << " if (!mgr) {" << endl;
- out << " ::Error(\"" << func.Data() << "\", \"No analysis manager found in file" << analysisFile <<"\");" << endl;
+ out << " ::Error(\"" << func.Data() << "\", \"No analysis manager found in file " << analysisFile <<"\");" << endl;
out << " return;" << endl;
out << " }" << endl << endl;
out << " mgr->PrintStatus();" << endl;
msg += " AliLHCTagCuts *lhcCuts,\n";
msg += " AliDetectorTagCuts *detCuts,\n";
msg += " AliEventTagCuts *evCuts)";
- Info("WriteAnalysisMacro", msg.Data());
+ Info("WriteAnalysisMacro", "%s", msg.Data());
}
}
if (!IsUsingTags() || fFriendChainName!="") {
TString func = mergingMacro;
TString comment;
func.ReplaceAll(".C", "");
- out << "void " << func.Data() << "(const char *dir)" << endl;
+ out << "void " << func.Data() << "(const char *dir, Int_t stage=0, Int_t ichunk=0)" << endl;
out << "{" << endl;
out << "// Automatically generated merging macro executed in grid subjobs" << endl << endl;
out << " TStopwatch timer;" << endl;
out << " timer.Start();" << endl << endl;
- out << "// load base root libraries" << endl;
- out << " gSystem->Load(\"libTree\");" << endl;
- out << " gSystem->Load(\"libGeom\");" << endl;
- out << " gSystem->Load(\"libVMC\");" << endl;
- out << " gSystem->Load(\"libPhysics\");" << endl << endl;
- out << " gSystem->Load(\"libMinuit\");" << endl << endl;
+ if (!fExecutableCommand.Contains("aliroot")) {
+ out << "// load base root libraries" << endl;
+ out << " gSystem->Load(\"libTree\");" << endl;
+ out << " gSystem->Load(\"libGeom\");" << endl;
+ out << " gSystem->Load(\"libVMC\");" << endl;
+ out << " gSystem->Load(\"libPhysics\");" << endl << endl;
+ out << " gSystem->Load(\"libMinuit\");" << endl << endl;
+ }
if (fAdditionalRootLibs.Length()) {
// in principle libtree /lib geom libvmc etc. can go into this list, too
out << "// Add aditional libraries" << endl;
out << " gSystem->AddIncludePath(\"-I$ALICE_ROOT/include\");" << endl << endl;
out << "// Load analysis framework libraries" << endl;
if (!fPackages) {
- out << " gSystem->Load(\"libSTEERBase\");" << endl;
- out << " gSystem->Load(\"libESD\");" << endl;
- out << " gSystem->Load(\"libAOD\");" << endl;
+ if (!fExecutableCommand.Contains("aliroot")) {
+ out << " gSystem->Load(\"libSTEERBase\");" << endl;
+ out << " gSystem->Load(\"libESD\");" << endl;
+ out << " gSystem->Load(\"libAOD\");" << endl;
+ }
out << " gSystem->Load(\"libANALYSIS\");" << endl;
out << " gSystem->Load(\"libANALYSISalice\");" << endl;
out << " gSystem->Load(\"libCORRFW\");" << endl << endl;
out << " gEnv->SetValue(\"XNet.MaxRedirectCount\",2);" << endl;
out << " gEnv->SetValue(\"XNet.ReconnectTimeout\",10);" << endl;
out << " gEnv->SetValue(\"XNet.FirstConnectMaxCnt\",1);" << endl << endl;
- }
+ }
+ // Change temp directory to current one
+ out << "// Set temporary merging directory to current one" << endl;
+ out << " gSystem->Setenv(\"TMPDIR\", gSystem->pwd());" << endl << endl;
out << "// Connect to AliEn" << endl;
out << " if (!TGrid::Connect(\"alien://\")) return;" << endl;
- out << " TString outputDir = \"" << fGridOutputDir << "/\";" << endl;
- out << " outputDir += dir;" << endl;
+ out << " Bool_t laststage = kFALSE;" << endl;
+ out << " TString outputDir = dir;" << endl;
out << " TString outputFiles = \"" << fOutputFiles << "\";" << endl;
out << " TString mergeExcludes = \"" << fMergeExcludes << "\";" << endl;
out << " mergeExcludes += \"" << AliAnalysisManager::GetAnalysisManager()->GetExtraFiles() << "\";" << endl;
- out << " TObjArray *list = outputFiles.Tokenize(\" \");" << endl;
+ out << " TObjArray *list = outputFiles.Tokenize(\",\");" << endl;
out << " TIter *iter = new TIter(list);" << endl;
out << " TObjString *str;" << endl;
out << " TString outputFile;" << endl;
out << " Bool_t merged = kTRUE;" << endl;
out << " while((str=(TObjString*)iter->Next())) {" << endl;
out << " outputFile = str->GetString();" << endl;
+ out << " if (outputFile.Contains(\"*\")) continue;" << endl;
out << " Int_t index = outputFile.Index(\"@\");" << endl;
out << " if (index > 0) outputFile.Remove(index);" << endl;
out << " // Skip already merged outputs" << endl;
out << " continue;" << endl;
out << " }" << endl;
out << " if (mergeExcludes.Contains(outputFile.Data())) continue;" << endl;
- out << " merged = AliAnalysisAlien::MergeOutput(outputFile, outputDir, " << fMaxMergeFiles << ");" << endl;
+ out << " merged = AliAnalysisAlien::MergeOutput(outputFile, outputDir, " << fMaxMergeFiles << ", stage, ichunk);" << endl;
out << " if (!merged) {" << endl;
out << " printf(\"ERROR: Cannot merge %s\\n\", outputFile.Data());" << endl;
+ out << " return;" << endl;
out << " }" << endl;
+ out << " // Check if this was the last stage. If yes, run terminate for the tasks." << endl;
+ out << " if (!gSystem->AccessPathName(outputFile)) laststage = kTRUE;" << endl;
out << " }" << endl;
- out << "// read the analysis manager from file" << endl;
+ out << " // all outputs merged, validate" << endl;
+ out << " ofstream out;" << endl;
+ out << " out.open(\"outputs_valid\", ios::out);" << endl;
+ out << " out.close();" << endl;
+ out << " // read the analysis manager from file" << endl;
TString analysisFile = fExecutable;
analysisFile.ReplaceAll(".sh", ".root");
+ out << " if (!laststage) return;" << endl;
out << " TFile *file = TFile::Open(\"" << analysisFile << "\");" << endl;
- out << " if (!file) return;" << endl;
+ out << " if (!file) return;" << endl;
out << " TIter nextkey(file->GetListOfKeys());" << endl;
out << " AliAnalysisManager *mgr = 0;" << endl;
out << " TKey *key;" << endl;
//______________________________________________________________________________
Bool_t AliAnalysisAlien::SetupPar(const char *package)
{
-// Compile the par file archive pointed by <package>. This must be present in the current durectory.
+// Compile the par file archive pointed by <package>. This must be present in the current directory.
// Note that for loading the compiled library. The current directory should have precedence in
// LD_LIBRARY_PATH
TString pkgdir = package;
CdWork();
TString workdir = gGrid->GetHomeDirectory();
TString bindir = Form("%s/bin", workdir.Data());
- if (!DirectoryExists(bindir)) gGrid->Mkdir(bindir);
+ if (!DirectoryExists(bindir)) gGrid->Mkdir(bindir,"-p");
workdir += fGridWorkingDir;
TString executable = Form("%s/bin/%s", gGrid->GetHomeDirectory(), fExecutable.Data());
if (FileExists(executable)) gGrid->Rm(executable);
if (TObject::TestBit(AliAnalysisGrid::kUsePars)) out << "export LD_LIBRARY_PATH=.:$LD_LIBRARY_PATH" << endl;
TString mergeMacro = fExecutable;
mergeMacro.ReplaceAll(".sh", "_merge.C");
- out << "export ARG=\"" << mergeMacro << "(\\\"$1\\\")\"" << endl;
+ out << "export ARG=\"" << mergeMacro << "(\\\"$1\\\",$2,$3)\"" << endl;
out << fExecutableCommand << " " << "$ARG" << endl;
out << "echo \"======== " << mergeMacro.Data() << " finished with exit code: $? ========\"" << endl;
out << "echo \"############## memory after: ##############\"" << endl;
CdWork();
TString workdir = gGrid->GetHomeDirectory();
TString bindir = Form("%s/bin", workdir.Data());
- if (!DirectoryExists(bindir)) gGrid->Mkdir(bindir);
+ if (!DirectoryExists(bindir)) gGrid->Mkdir(bindir,"-p");
workdir += fGridWorkingDir;
TString executable = Form("%s/bin/%s", gGrid->GetHomeDirectory(), mergeExec.Data());
if (FileExists(executable)) gGrid->Rm(executable);
AliAnalysisManager *mgr = AliAnalysisManager::GetAnalysisManager();
TString extra = mgr->GetExtraFiles();
while ((os=(TObjString*)next1())) {
+ if (merge) break;
outputFile = os->GetString();
Int_t index = outputFile.Index("@");
if (index > 0) outputFile.Remove(index);
if (outputFile.Contains("*")) continue;
out << "if ! [ -f " << outputFile.Data() << " ] ; then" << endl;
out << " error=1" << endl;
- out << " echo \"Output file(s) not found. Job FAILED !\"" << outStream << endl;
- out << " echo \"Output file(s) not found. Job FAILED !\" >> stderr" << endl;
+ out << " echo \"Output file " << outputFile << " not found. Job FAILED !\"" << outStream << endl;
+ out << " echo \"Output file " << outputFile << " not found. Job FAILED !\" >> stderr" << endl;
out << "fi" << endl;
}
delete arr;
- if (!merge) {
- out << "if ! [ -f outputs_valid ] ; then" << endl;
- out << " error=1" << endl;
- out << " echo \"Output files were not validated by the analysis manager\" >> stdout" << endl;
- out << " echo \"Output files were not validated by the analysis manager\" >> stderr" << endl;
- out << "fi" << endl;
- }
+ out << "if ! [ -f outputs_valid ] ; then" << endl;
+ out << " error=1" << endl;
+ out << " echo \"Output files were not validated by the analysis manager\" >> stdout" << endl;
+ out << " echo \"Output files were not validated by the analysis manager\" >> stderr" << endl;
+ out << "fi" << endl;
out << "if [ $error = 0 ] ; then" << endl;
out << " echo \"* ---------------- Job Validated ------------------*\"" << outStream << endl;