fNtestFiles(0),
fNrunsPerMaster(0),
fMaxMergeFiles(0),
+ fMaxMergeStages(0),
fNsubmitted(0),
fProductionMode(0),
fOutputToRunNo(0),
fNtestFiles(0),
fNrunsPerMaster(0),
fMaxMergeFiles(0),
+ fMaxMergeStages(0),
fNsubmitted(0),
fProductionMode(0),
fOutputToRunNo(0),
fNtestFiles(other.fNtestFiles),
fNrunsPerMaster(other.fNrunsPerMaster),
fMaxMergeFiles(other.fMaxMergeFiles),
+ fMaxMergeStages(other.fMaxMergeStages),
fNsubmitted(other.fNsubmitted),
fProductionMode(other.fProductionMode),
fOutputToRunNo(other.fOutputToRunNo),
fNtestFiles = other.fNtestFiles;
fNrunsPerMaster = other.fNrunsPerMaster;
fMaxMergeFiles = other.fMaxMergeFiles;
+ fMaxMergeStages = other.fMaxMergeStages;
fNsubmitted = other.fNsubmitted;
fProductionMode = other.fProductionMode;
fOutputToRunNo = other.fOutputToRunNo;
if (!fArguments.IsNull())
fGridJDL->SetArguments(fArguments, "Arguments for the executable command");
if (IsOneStageMerging()) fMergingJDL->SetArguments(fGridOutputDir);
- else fMergingJDL->SetArguments("$1 $2 $3");
+ else fMergingJDL->SetArguments("wn.xml $2 $3"); // xml, stage, laststage(0 or 1)
+
fGridJDL->SetValue("TTL", Form("\"%d\"",fTTL));
fGridJDL->SetDescription("TTL", Form("Time after which the job is killed (%d min.)", fTTL/60));
fMergingJDL->SetValue("TTL", Form("\"%d\"",fTTL));
if (fSplitMaxInputFileNumber > 0) {
fGridJDL->SetValue("SplitMaxInputFileNumber", Form("\"%d\"", fSplitMaxInputFileNumber));
fGridJDL->SetDescription("SplitMaxInputFileNumber", "Maximum number of input files to be processed per subjob");
+ }
+ if (!IsOneStageMerging()) {
+ fMergingJDL->SetValue("SplitMaxInputFileNumber", "\"$3\"");
+ fMergingJDL->SetDescription("SplitMaxInputFileNumber", "Maximum number of input files to be merged in one go");
}
if (fSplitMode.Length()) {
fGridJDL->SetValue("Split", Form("\"%s\"", fSplitMode.Data()));
fGridJDL->SetDescription("Split", "We split per SE or file");
- }
+ }
+ fMergingJDL->SetValue("Split", "\"se\"");
+ fMergingJDL->SetDescription("Split", "We split per SE for merging in stages");
if (!fAliROOTVersion.IsNull()) {
fGridJDL->AddToPackages("AliRoot", fAliROOTVersion,"VO_ALICE", "List of requested packages");
fMergingJDL->AddToPackages("AliRoot", fAliROOTVersion, "VO_ALICE", "List of requested packages");
}
fGridJDL->SetInputDataListFormat(fInputFormat, "Format of input data");
fGridJDL->SetInputDataList("wn.xml", "Collection name to be processed on each worker node");
+ fMergingJDL->SetInputDataListFormat(fInputFormat, "Format of input data");
+ fMergingJDL->SetInputDataList("wn.xml", "Collection name to be processed on each worker node");
fGridJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(), fAnalysisMacro.Data()), "List of input files to be uploaded to workers");
TString analysisFile = fExecutable;
analysisFile.ReplaceAll(".sh", ".root");
while ((os=(TObjString*)next2())) {
if (!first) comment = NULL;
TString currentfile = os->GetString();
- if (!IsOneStageMerging()) currentfile.ReplaceAll(".zip", "-Stage$2_$3.zip");
if (!currentfile.Contains("@") && fCloseSE.Length())
fMergingJDL->AddToOutputArchive(Form("%s@%s",currentfile.Data(), fCloseSE.Data()), comment);
else
fGridJDL->AddToOutputSandbox(Form("%s@%s",os->GetString().Data(), fCloseSE.Data()), comment);
else
fGridJDL->AddToOutputSandbox(os->GetString(), comment);
- first = kFALSE;
+ first = kFALSE;
if (fMergeExcludes.Contains(sout)) continue;
if (!os->GetString().Contains("@") && fCloseSE.Length())
fMergingJDL->AddToOutputSandbox(Form("%s@%s",os->GetString().Data(), fCloseSE.Data()), comment);
TString workdir;
if (!fProductionMode && !fGridWorkingDir.BeginsWith("/alice")) workdir = gGrid->GetHomeDirectory();
workdir += fGridWorkingDir;
+ fMergingJDL->AddToInputDataCollection("LF:$1/Stage_$2.xml,nodownload", "Collection of files to be merged for stage $2");
+ fMergingJDL->SetOutputDirectory("$1/Stage_$2/#alien_counter_03i#", "Output directory");
if (fProductionMode) {
TIter next(fInputFiles);
- while ((os=next()))
+ while ((os=next())) {
fGridJDL->AddToInputDataCollection(Form("LF:%s,nodownload", os->GetName()), "Input xml collections");
+ }
fGridJDL->SetOutputDirectory(Form("%s/#alien_counter_04i#", fGridOutputDir.Data()));
- fMergingJDL->SetOutputDirectory(fGridOutputDir);
} else {
if (!fRunNumbers.Length() && !fRunRange[0]) {
// One jdl with no parameters in case input data is specified by name.
else fGridJDL->SetOutputDirectory(Form("%s/$2",fGridOutputDir.Data()), "Output directory");
} else {
fGridJDL->SetOutputDirectory(Form("%s/$2/#alien_counter_03i#", fGridOutputDir.Data()), "Output directory");
- fMergingJDL->SetOutputDirectory("$1", "Output directory");
}
}
}
// Generate the JDL as a string
TString sjdl = fGridJDL->Generate();
TString sjdl1 = fMergingJDL->Generate();
- Int_t index;
+ fMergingJDL->SetOutputDirectory("$1", "Output directory");
+ fMergingJDL->AddToInputSandbox("LF:$1/$4");
+ TString sjdl2 = fMergingJDL->Generate();
+ Int_t index, index1;
sjdl.ReplaceAll("\"LF:", "\n \"LF:");
sjdl.ReplaceAll("(member", "\n (member");
sjdl.ReplaceAll("\",\"VO_", "\",\n \"VO_");
sjdl1.ReplaceAll("{\n \n", "{\n");
sjdl1.ReplaceAll("\n\n", "\n");
sjdl1.ReplaceAll("OutputDirectory", "OutputDir");
+ sjdl2.ReplaceAll("\"LF:", "\n \"LF:");
+ sjdl2.ReplaceAll("(member", "\n (member");
+ sjdl2.ReplaceAll("\",\"VO_", "\",\n \"VO_");
+ sjdl2.ReplaceAll("{", "{\n ");
+ sjdl2.ReplaceAll("};", "\n};");
+ sjdl2.ReplaceAll("{\n \n", "{\n");
+ sjdl2.ReplaceAll("\n\n", "\n");
+ sjdl2.ReplaceAll("OutputDirectory", "OutputDir");
sjdl += "JDLVariables = \n{\n \"Packages\",\n \"OutputDir\"\n};\n";
sjdl.Prepend(Form("Jobtag = {\n \"comment:%s\"\n};\n", fJobTag.Data()));
index = sjdl.Index("JDLVariables");
if (index >= 0) sjdl.Insert(index, "\n# JDL variables\n");
sjdl += "Workdirectorysize = {\"5000MB\"};";
+ sjdl1 += "Workdirectorysize = {\"5000MB\"};";
sjdl1 += "JDLVariables = \n{\n \"Packages\",\n \"OutputDir\"\n};\n";
index = fJobTag.Index(":");
if (index < 0) index = fJobTag.Length();
TString jobTag = fJobTag;
- jobTag.Insert(index, "_Merging");
sjdl1.Prepend(Form("Jobtag = {\n \"comment:%s_Merging\"\n};\n", jobTag.Data()));
- sjdl1.Prepend("# Generated merging jdl\n# $1 = full alien path to output directory to be merged\n# $2 = merging stage\n# $3 = merged chunk\n");
+ sjdl1.Prepend("# Generated merging jdl \
+ \n# $1 = full alien path to output directory to be merged \
+ \n# $2 = merging stage \
+ \n# $3 = maximum number of files to merge (must be >= 10000 for the last stage) \
+ \n# $4 = xml made via: find <OutputDir> *Stage<n-1>/*root_archive.zip\n");
+ sjdl2.Prepend(Form("Jobtag = {\n \"comment:%s_FinalMerging\"\n};\n", jobTag.Data()));
+ sjdl2.Prepend("# Generated merging jdl \
+ \n# $1 = full alien path to output directory to be merged \
+ \n# $2 = merging stage \
+ \n# $3 = maximum number of files to merge (must be >= 10000 for the last stage) \
+ \n# $4 = xml made via: find <OutputDir> *Stage<n-1>/*root_archive.zip\n");
index = sjdl1.Index("JDLVariables");
if (index >= 0) sjdl1.Insert(index, "\n# JDL variables\n");
+ index = sjdl2.Index("JDLVariables");
+ if (index >= 0) sjdl2.Insert(index, "\n# JDL variables\n");
sjdl1 += "Workdirectorysize = {\"5000MB\"};";
+ sjdl2 += "Workdirectorysize = {\"5000MB\"};";
+ index = sjdl2.Index("Split =");
+ if (index>=0) {
+ index1 = sjdl2.Index("\n", index);
+ sjdl2.Remove(index, index1-index+1);
+ }
+ index = sjdl2.Index("SplitMaxInputFileNumber");
+ if (index>=0) {
+ index1 = sjdl2.Index("\n", index);
+ sjdl2.Remove(index, index1-index+1);
+ }
+ index = sjdl2.Index("InputDataCollection");
+ if (index>=0) {
+ index1 = sjdl2.Index(";", index);
+ sjdl2.Remove(index, index1-index+1);
+ }
+ index = sjdl2.Index("InputDataListFormat");
+ if (index>=0) {
+ index1 = sjdl2.Index("\n", index);
+ sjdl2.Remove(index, index1-index+1);
+ }
+ index = sjdl2.Index("InputDataList");
+ if (index>=0) {
+ index1 = sjdl2.Index("\n", index);
+ sjdl2.Remove(index, index1-index+1);
+ }
+ sjdl2.ReplaceAll("wn.xml", "$4");
// Write jdl to file
ofstream out;
out.open(fJDLName.Data(), ios::out);
return kFALSE;
}
out << sjdl << endl;
+ out.close();
TString mergeJDLName = fExecutable;
mergeJDLName.ReplaceAll(".sh", "_merge.jdl");
if (fMergeViaJDL) {
ofstream out1;
out1.open(mergeJDLName.Data(), ios::out);
- if (out.bad()) {
+ if (out1.bad()) {
Error("WriteJDL", "Bad file name: %s", mergeJDLName.Data());
return kFALSE;
}
out1 << sjdl1 << endl;
+ out1.close();
+ ofstream out2;
+ TString finalJDL = mergeJDLName;
+ finalJDL.ReplaceAll(".jdl", "_final.jdl");
+ out2.open(finalJDL.Data(), ios::out);
+ if (out2.bad()) {
+ Error("WriteJDL", "Bad file name: %s", finalJDL.Data());
+ return kFALSE;
+ }
+ out2 << sjdl2 << endl;
+ out2.close();
}
// Copy jdl to grid workspace
} else {
TString locjdl = Form("%s/%s", fGridOutputDir.Data(),fJDLName.Data());
TString locjdl1 = Form("%s/%s", fGridOutputDir.Data(),mergeJDLName.Data());
+ TString finalJDL = mergeJDLName;
+ finalJDL.ReplaceAll(".jdl", "_final.jdl");
+ TString locjdl2 = Form("%s/%s", fGridOutputDir.Data(),finalJDL.Data());
if (fProductionMode) {
locjdl = Form("%s/%s", workdir.Data(),fJDLName.Data());
locjdl1 = Form("%s/%s", workdir.Data(),mergeJDLName.Data());
+ locjdl2 = Form("%s/%s", workdir.Data(),finalJDL.Data());
}
if (FileExists(locjdl)) gGrid->Rm(locjdl);
if (FileExists(locjdl1)) gGrid->Rm(locjdl1);
+ if (FileExists(locjdl2)) gGrid->Rm(locjdl2);
Info("WriteJDL", "\n##### Copying JDL file <%s> to your AliEn output directory", fJDLName.Data());
TFile::Cp(Form("file:%s",fJDLName.Data()), Form("alien://%s", locjdl.Data()));
if (fMergeViaJDL) {
- Info("WriteJDL", "\n##### Copying merging JDL file <%s> to your AliEn output directory", mergeJDLName.Data());
+ Info("WriteJDL", "\n##### Copying merging JDL files <%s> to your AliEn output directory", mergeJDLName.Data());
TFile::Cp(Form("file:%s",mergeJDLName.Data()), Form("alien://%s", locjdl1.Data()));
+ TFile::Cp(Form("file:%s",finalJDL.Data()), Form("alien://%s", locjdl2.Data()));
}
}
return kTRUE;
}
//______________________________________________________________________________
-Bool_t AliAnalysisAlien::CheckMergedFiles(const char *filename, const char *aliendir, Int_t nperchunk, Bool_t submit, const char *jdl)
+Bool_t AliAnalysisAlien::CheckMergedFiles(const char *filename, const char *aliendir, Int_t nperchunk, const char *jdl)
{
-// Static method that checks the status of merging. This can submit merging jobs that did not produced the expected
-// output. If <submit> is false (checking) returns true only when the final merged file was found. If submit is true returns
-// true if the jobs were successfully submitted.
- Int_t countOrig = 0;
- Int_t countStage = 0;
- Int_t stage = 0;
- Int_t i;
- Bool_t doneFinal = kFALSE;
- TBits chunksDone;
- TString saliendir(aliendir);
- TString sfilename, stmp;
- saliendir.ReplaceAll("//","/");
- saliendir = saliendir.Strip(TString::kTrailing, '/');
- if (!gGrid) {
- ::Error("GetNregisteredFiles", "You need to be connected to AliEn.");
+// Checks current merge stage, makes xml for the next stage, counts number of files, submits next stage.
+ // First check if the result is already in the output directory.
+ if (FileExists(Form("%s/%s",aliendir,filename))) {
+ printf("Final merged results found. Not merging again.\n");
return kFALSE;
}
- sfilename = filename;
- sfilename.ReplaceAll(".root", "*.root");
- printf("Checking directory <%s> for merged files <%s> ...\n", aliendir, sfilename.Data());
- TString command = Form("find %s/ *%s", saliendir.Data(), sfilename.Data());
- TGridResult *res = gGrid->Command(command);
- if (!res) {
- ::Error("GetNregisteredFiles","Error: No result for the find command\n");
+ // Now check the last stage done.
+ Int_t stage = 0;
+ while (1) {
+ if (!FileExists(Form("%s/Stage_%d.xml",aliendir, stage+1))) break;
+ stage++;
+ }
+ // Next stage of merging
+ stage++;
+ TString pattern = "*root_archive.zip";
+ if (stage>1) pattern = Form("Stage_%d/*root_archive.zip", stage-1);
+ TGridResult *res = gGrid->Command(Form("find -x Stage_%d %s %s", stage, aliendir, pattern.Data()));
+ if (res) delete res;
+ // Write standard output to file
+ gROOT->ProcessLine(Form("gGrid->Stdout(); > %s", Form("Stage_%d.xml",stage)));
+ // Count the number of files inside
+ ifstream ifile;
+ ifile.open(Form("Stage_%d.xml",stage));
+ if (!ifile.good()) {
+ ::Error("CheckMergedFiles", "Could not redirect result of the find command to file %s", Form("Stage_%d.xml",stage));
return kFALSE;
- }
- TIter nextmap(res);
- TMap *map = 0;
- while ((map=(TMap*)nextmap())) {
- TString turl = map->GetValue("turl")->GetName();
- if (!turl.Length()) {
- // Nothing found
- delete res;
- return kFALSE;
- }
- turl.ReplaceAll("alien://", "");
- turl.ReplaceAll(saliendir, "");
- sfilename = gSystem->BaseName(turl);
- turl = turl.Strip(TString::kLeading, '/');
- // Now check to what the file corresponds to:
- // original output - aliendir/%03d/filename
- // merged file (which stage) - aliendir/filename-Stage%02d_%04d
- // final merged file - aliendir/filename
- if (sfilename == turl) {
- if (sfilename == filename) {
- doneFinal = kTRUE;
- } else {
- // check stage
- Int_t index = sfilename.Index("Stage");
- if (index<0) continue;
- stmp = sfilename(index+5,2);
- Int_t istage = atoi(stmp);
- stmp = sfilename(index+8,4);
- Int_t ijob = atoi(stmp);
- if (istage<stage) continue; // Ignore lower stages
- if (istage>stage) {
- countStage = 0;
- chunksDone.ResetAllBits();
- stage = istage;
- }
- countStage++;
- chunksDone.SetBitNumber(ijob);
- }
- } else {
- countOrig++;
- }
- if (doneFinal) {
- delete res;
- printf("=> Removing files from previous stages...\n");
- gGrid->Rm(Form("%s/*Stage*.root", aliendir));
- for (i=1; i<stage; i++)
- gGrid->Rm(Form("%s/*Stage%d*.zip", aliendir, i));
- return kTRUE;
- }
+ }
+ TString line;
+ Int_t nfiles = 0;
+ while (!ifile.eof()) {
+ ifile >> line;
+ if (line.Contains("/event")) nfiles++;
}
- delete res;
- // Compute number of jobs that were submitted for the current stage
- Int_t ntotstage = countOrig;
- for (i=1; i<=stage; i++) {
- if (ntotstage%nperchunk) ntotstage = (ntotstage/nperchunk)+1;
- else ntotstage = (ntotstage/nperchunk);
+ ifile.close();
+ if (!nfiles) {
+ ::Error("CheckMergedFiles", "Cannot start Stage_%d merging since Stage_%d did not produced yet output", stage, stage-1);
+ return kFALSE;
+ } else {
+ printf("=== Stage_%d produced %d files\n", stage-1, nfiles);
}
- // Now compare with the number of set bits in the chunksDone array
- Int_t nmissing = (stage>0)?(ntotstage - countStage):0;
- // Print the info
- printf("*** Found %d original files\n", countOrig);
- if (stage==0) printf("*** No merging completed so far.\n");
- else printf("*** Found %d out of %d files merged for stage %d\n", countStage, ntotstage, stage);
- if (nmissing) printf("*** Number of merged files missing for this stage: %d -> check merging job completion\n", nmissing);
- if (!submit) return doneFinal;
- // Sumbit merging jobs for all missing chunks for the current stage.
- TString query = Form("submit %s %s", jdl, aliendir);
- Int_t ichunk = -1;
- chunksDone.SetBitNumber(ntotstage); // expand the array to the maximum number of chunks
- if (nmissing) {
- for (i=0; i<nmissing; i++) {
- ichunk = chunksDone.FirstNullBit(ichunk+1);
- Int_t jobId = SubmitSingleJob(Form("%s %d %d", query.Data(), stage, ichunk));
- if (!jobId) return kFALSE;
- }
- return kTRUE;
+ // Copy the file in the output directory
+ printf("===> Copying collection %s in the output directory %s\n", Form("Stage_%d.xml",stage), aliendir);
+ TFile::Cp(Form("Stage_%d.xml",stage), Form("alien://%s/Stage_%d.xml",aliendir,stage));
+ // Check if this is the last stage to be done.
+ Bool_t laststage = (nfiles<nperchunk);
+ if (fMaxMergeStages && stage>=fMaxMergeStages) laststage = kTRUE;
+ if (laststage) {
+ printf("### Submiting final merging stage %d\n", stage);
+ TString finalJDL = jdl;
+ finalJDL.ReplaceAll(".jdl", "_final.jdl");
+ TString query = Form("submit %s %s %d 10000 Stage_%d.xml", finalJDL.Data(), aliendir, stage, stage);
+ Int_t jobId = SubmitSingleJob(query);
+ if (!jobId) return kFALSE;
+ } else {
+ printf("### Submiting merging stage %d\n", stage);
+ TString query = Form("submit %s %s %d %d wn.xml", jdl, aliendir, stage, nperchunk);
+ Int_t jobId = SubmitSingleJob(query);
+ if (!jobId) return kFALSE;
}
- // Submit next stage of merging
- if (stage==0) countStage = countOrig;
- Int_t nchunks = (countStage/nperchunk);
- if (countStage%nperchunk) nchunks += 1;
- for (i=0; i<nchunks; i++) {
- Int_t jobId = SubmitSingleJob(Form("%s %d %d", query.Data(), stage+1, i));
- if (!jobId) return kFALSE;
- }
- return kTRUE;
-}
+ return kTRUE;
+}
//______________________________________________________________________________
Int_t AliAnalysisAlien::SubmitSingleJob(const char *query)
}
//______________________________________________________________________________
-Bool_t AliAnalysisAlien::MergeOutput(const char *output, const char *basedir, Int_t nmaxmerge, Int_t stage, Int_t ichunk)
+Bool_t AliAnalysisAlien::MergeOutput(const char *output, const char *basedir, Int_t nmaxmerge, Int_t stage)
{
-// Merge given output files from basedir. The file merger will merge nmaxmerge
-// files in a group. Merging can be done in stages:
-// stage=0 : will merge all existing files in a single stage
-// stage=1 : does a find command for all files that do NOT contain the string "Stage".
-// If their number is bigger that nmaxmerge, only the files from
-// ichunk*nmaxmerge to ichunk*(nmaxmerge+1)-1 will get merged as output_stage_<ichunk>
-// stage=n : does a find command for files named <output>Stage<stage-1>_*. If their number is bigger than
-// nmaxmerge, merge just the chunk ichunk, otherwise write the merged output to the file
-// named <output>.
+// Merge given output files from basedir. Basedir can be an alien output directory
+// but also an xml file with root_archive.zip locations. The file merger will merge nmaxmerge
+// files in a group (ignored for xml input). Merging can be done in stages:
+// stage=0 : will merge all existing files in a single stage, supporting resume if run locally
+// stage=1 : works with an xml of all root_archive.zip in the output directory
+// stage>1 : works with an xml of all root_archive.zip in the Stage_<n-1> directory
TString outputFile = output;
TString command;
TString outputChunk;
TString previousChunk = "";
+ TObjArray *listoffiles = new TObjArray();
+// listoffiles->SetOwner();
Int_t countChunk = 0;
Int_t countZero = nmaxmerge;
Bool_t merged = kTRUE;
Int_t index = outputFile.Index("@");
if (index > 0) outputFile.Remove(index);
TString inputFile = outputFile;
- if (stage>1) inputFile.ReplaceAll(".root", Form("-Stage%02d_*.root", stage-1));
- command = Form("find %s/ *%s", basedir, inputFile.Data());
- printf("command: %s\n", command.Data());
- TGridResult *res = gGrid->Command(command);
- if (!res) {
+ TString sbasedir = basedir;
+ if (sbasedir.Contains(".xml")) {
+ // Merge files pointed by the xml - ignore nmaxmerge and set ichunk to 0
+ nmaxmerge = 9999999;
+ TGridCollection *coll = (TGridCollection*)gROOT->ProcessLine(Form("TAlienCollection::Open(\"%s\");", basedir));
+ if (!coll) {
+ ::Error("MergeOutput", "Input XML collection empty.");
+ return kFALSE;
+ }
+ // Iterate grid collection
+ while (coll->Next()) {
+ TString fname = gSystem->DirName(coll->GetTURL());
+ fname += "/";
+ fname += inputFile;
+ listoffiles->Add(new TNamed(fname.Data(),""));
+ }
+ } else {
+ command = Form("find %s/ *%s", basedir, inputFile.Data());
+ printf("command: %s\n", command.Data());
+ TGridResult *res = gGrid->Command(command);
+ if (!res) {
+ ::Error("MergeOutput","No result for the find command\n");
+ delete listoffiles;
+ return kFALSE;
+ }
+ TIter nextmap(res);
+ TMap *map = 0;
+ while ((map=(TMap*)nextmap())) {
+ TObjString *objs = dynamic_cast<TObjString*>(map->GetValue("turl"));
+ if (!objs || !objs->GetString().Length()) {
+ // Nothing found - skip this output
+ delete res;
+ delete listoffiles;
+ return kFALSE;
+ }
+ listoffiles->Add(new TNamed(objs->GetName(),""));
+ }
+ delete res;
+ }
+ if (!listoffiles->GetEntries()) {
::Error("MergeOutput","No result for the find command\n");
+ delete listoffiles;
return kFALSE;
}
TFileMerger *fm = 0;
- TIter nextmap(res);
- TMap *map = 0;
+ TIter next0(listoffiles);
+ TObjArray *listoffilestmp = new TObjArray();
+ listoffilestmp->SetOwner();
+ TObject *nextfile;
+ TString snextfile;
+ // Keep only the files at upper level
+ Int_t countChar = 0;
+ while ((nextfile=next0())) {
+ snextfile = nextfile->GetName();
+ Int_t crtCount = snextfile.CountChar('/');
+ if (nextfile == listoffiles->First()) countChar = crtCount;
+ if (crtCount < countChar) countChar = crtCount;
+ }
+ next0.Reset();
+ while ((nextfile=next0())) {
+ snextfile = nextfile->GetName();
+ Int_t crtCount = snextfile.CountChar('/');
+ if (crtCount > countChar) {
+ delete nextfile;
+ continue;
+ }
+ listoffilestmp->Add(nextfile);
+ }
+ delete listoffiles;
+ listoffiles = listoffilestmp; // Now contains 'good' files
+ listoffiles->Print();
+ TIter next(listoffiles);
// Check if there is a merge operation to resume. Works only for stage 0 or 1.
outputChunk = outputFile;
outputChunk.ReplaceAll(".root", "_*.root");
// Check overwrite mode and remove previous partial results if needed
// Preserve old merging functionality for stage 0.
if (stage==0) {
- Int_t countChar = 0;
if (!gSystem->Exec(Form("ls %s 2>/dev/null", outputChunk.Data()))) {
while (1) {
// Skip as many input files as in a chunk
for (Int_t counter=0; counter<nmaxmerge; counter++) {
- map = (TMap*)nextmap();
- if (!map) {
+ nextfile = next();
+ if (!nextfile) {
::Error("MergeOutput", "Mismatch found. Please remove partial merged files from local dir.");
- delete res;
+ delete listoffiles;
return kFALSE;
}
- TObjString *objs = dynamic_cast<TObjString*>(map->GetValue("turl"));
- // Count the '/' characters in the path to the current file.
- Int_t crtCount = objs->GetString().CountChar('/');
- if (!countChar) {
- countChar = crtCount;
- // Make sure we check if the same file in the parent dir exists
- if (FileExists(Form("%s/../%s", basedir, output))) countChar--;
- }
- if (crtCount > countChar) counter--;
- }
- if (!map) {
- ::Error("MergeOutput", "Cannot resume merging for <%s>, nentries=%d", outputFile.Data(), res->GetSize());
- delete res;
- return kFALSE;
+ snextfile = nextfile->GetName();
}
outputChunk = outputFile;
outputChunk.ReplaceAll(".root", Form("_%04d.root", countChunk));
}
countZero = nmaxmerge;
- while ((map=(TMap*)nextmap())) {
- TObjString *objs = dynamic_cast<TObjString*>(map->GetValue("turl"));
- if (!objs || !objs->GetString().Length()) {
- // Nothing found - skip this output
- delete res;
- delete fm;
- return kFALSE;
- }
- // Make sure this is a good file and not one from a subjob directory in case we merge runs
- // Count the '/' characters in the path to the current file.
- Int_t crtCount = objs->GetString().CountChar('/');
- if (!countChar) {
- countChar = crtCount;
- // Make sure we check if the same file in the parent dir exists
- if (FileExists(Form("%s/../%s", basedir, output))) countChar--;
- }
- if (crtCount > countChar) continue;
+ while ((nextfile=next())) {
+ snextfile = nextfile->GetName();
// Loop 'find' results and get next LFN
if (countZero == nmaxmerge) {
// First file in chunk - create file merger and add previous chunk if any.
outputChunk.ReplaceAll(".root", Form("_%04d.root", countChunk));
}
// If last file found, put merged results in the output file
- if (map == res->Last()) outputChunk = outputFile;
+ if (nextfile == listoffiles->Last()) outputChunk = outputFile;
// Add file to be merged and decrement chunk counter.
- fm->AddFile(objs->GetString());
+ fm->AddFile(snextfile);
countZero--;
- if (countZero==0 || map == res->Last()) {
+ if (countZero==0 || nextfile == listoffiles->Last()) {
if (!fm->GetMergeList() || !fm->GetMergeList()->GetSize()) {
// Nothing found - skip this output
::Warning("MergeOutput", "No <%s> files found.", inputFile.Data());
- delete res;
- delete fm;
- return kFALSE;
+ merged = kFALSE;
+ break;
}
fm->OutputFile(outputChunk);
// Merge the outputs, then go to next chunk
if (!fm->Merge()) {
::Error("MergeOutput", "Could not merge all <%s> files", outputFile.Data());
- delete res;
- delete fm;
- return kFALSE;
+ merged = kFALSE;
+ break;
} else {
::Info("MergeOutputs", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), outputChunk.Data());
gSystem->Unlink(previousChunk);
}
- if (map == res->Last()) {
- delete res;
- delete fm;
- break;
- }
+ if (nextfile == listoffiles->Last()) break;
countChunk++;
countZero = nmaxmerge;
previousChunk = outputChunk;
}
}
+ delete listoffiles;
+ delete fm;
return merged;
}
// Merging stage different than 0.
// Move to the begining of the requested chunk.
- outputChunk = outputFile;
- if (nmaxmerge < res->GetSize()) {
- if (ichunk*nmaxmerge >= res->GetSize()) {
- ::Error("MergeOutput", "Cannot merge merge chunk %d grouping %d files from %d total.", ichunk, nmaxmerge, res->GetSize());
- delete res;
- return kFALSE;
- }
- for (Int_t counter=0; counter<ichunk*nmaxmerge; counter++) nextmap();
- outputChunk.ReplaceAll(".root", Form("-Stage%02d_%04d.root", stage, ichunk));
- }
- countZero = nmaxmerge;
fm = new TFileMerger(kFALSE);
fm->SetFastMethod(kTRUE);
- while ((map=(TMap*)nextmap())) {
- // Loop 'find' results and get next LFN
- TObjString *objs = dynamic_cast<TObjString*>(map->GetValue("turl"));
- if (!objs || !objs->GetString().Length()) {
- // Nothing found - skip this output
- delete res;
- delete fm;
- return kFALSE;
- }
- // Add file to be merged and decrement chunk counter.
- fm->AddFile(objs->GetString());
- countZero--;
- if (countZero==0) break;
- }
- delete res;
+ while ((nextfile=next())) fm->AddFile(nextfile->GetName());
+ delete listoffiles;
if (!fm->GetMergeList() || !fm->GetMergeList()->GetSize()) {
// Nothing found - skip this output
::Warning("MergeOutput", "No <%s> files found.", inputFile.Data());
delete fm;
return kFALSE;
}
- fm->OutputFile(outputChunk);
+ fm->OutputFile(outputFile);
// Merge the outputs
if (!fm->Merge()) {
::Error("MergeOutput", "Could not merge all <%s> files", outputFile.Data());
delete fm;
return kFALSE;
} else {
- ::Info("MergeOutput", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), outputChunk.Data());
+ ::Info("MergeOutput", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), outputFile.Data());
}
delete fm;
return kTRUE;
if (fFastReadOption) {
Warning("MergeOutputs", "You requested FastRead option. Using xrootd flags to reduce timeouts. This may skip some files that could be accessed ! \
\n+++ NOTE: To disable this option, use: plugin->SetFastReadOption(kFALSE)");
- gEnv->SetValue("XNet.ConnectTimeout",10);
- gEnv->SetValue("XNet.RequestTimeout",10);
+ gEnv->SetValue("XNet.ConnectTimeout",50);
+ gEnv->SetValue("XNet.RequestTimeout",50);
gEnv->SetValue("XNet.MaxRedirectCount",2);
- gEnv->SetValue("XNet.ReconnectTimeout",10);
+ gEnv->SetValue("XNet.ReconnectTimeout",50);
gEnv->SetValue("XNet.FirstConnectMaxCnt",1);
}
// Make sure we change the temporary directory
if (!fMergeExcludes.Contains(outputFile)) break;
}
delete list;
- Bool_t done = CheckMergedFiles(outputFile, runOutDir, fMaxMergeFiles, kTRUE, mergeJDLName);
+ Bool_t done = CheckMergedFiles(outputFile, runOutDir, fMaxMergeFiles, mergeJDLName);
if (!done) return kFALSE;
}
if (!ntosubmit) return kTRUE;
// Change temp directory to current one
out << "// Set temporary merging directory to current one" << endl;
out << " gSystem->Setenv(\"TMPDIR\", gSystem->pwd());" << endl << endl;
+ // Reset existing include path
+ out << "// Reset existing include path and add current directory first in the search" << endl;
+ out << " gSystem->SetIncludePath(\"-I.\");" << endl;
if (!fExecutableCommand.Contains("aliroot")) {
out << "// load base root libraries" << endl;
out << " gSystem->Load(\"libTree\");" << endl;
}
if (list) delete list;
}
- out << "// include path" << endl;
- if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
- out << " gSystem->AddIncludePath(\"-I$ALICE_ROOT/include\");" << endl << endl;
out << "// Load analysis framework libraries" << endl;
TString setupPar = "AliAnalysisAlien::SetupPar";
if (!fPackages) {
out << " if (!" << setupPar << "(\"" << obj->GetName() << "\")) return;" << endl;
}
}
+ out << "// include path" << endl;
+ // Get the include path from the interpreter and remove entries pointing to AliRoot
+ out << " TString intPath = gInterpreter->GetIncludePath();" << endl;
+ out << " TObjArray *listpaths = intPath.Tokenize(\" \");" << endl;
+ out << " TIter nextpath(listpaths);" << endl;
+ out << " TObjString *pname;" << endl;
+ out << " while ((pname=(TObjString*)nextpath())) {" << endl;
+ out << " TString current = pname->GetName();" << endl;
+ out << " if (current.Contains(\"AliRoot\") || current.Contains(\"ALICE_ROOT\")) continue;" << endl;
+ out << " gSystem->AddIncludePath(current);" << endl;
+ out << " }" << endl;
+ out << " if (listpaths) delete listpaths;" << endl;
+ if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
+ out << " gROOT->ProcessLine(\".include $ALICE_ROOT/include\");" << endl;
+ out << " printf(\"Include path: %s\\n\", gSystem->GetIncludePath());" << endl << endl;
if (fAdditionalLibs.Length()) {
out << "// Add aditional AliRoot libraries" << endl;
TObjArray *list = fAdditionalLibs.Tokenize(" ");
if (list) delete list;
}
out << endl;
+// out << " printf(\"Currently load libraries:\\n\");" << endl;
+// out << " printf(\"%s\\n\", gSystem->GetLibraries());" << endl;
if (fFastReadOption) {
Warning("WriteAnalysisMacro", "!!! You requested FastRead option. Using xrootd flags to reduce timeouts in the grid jobs. This may skip some files that could be accessed !!! \
\n+++ NOTE: To disable this option, use: plugin->SetFastReadOption(kFALSE)");
out << "// fast xrootd reading enabled" << endl;
out << " printf(\"!!! You requested FastRead option. Using xrootd flags to reduce timeouts. Note that this may skip some files that could be accessed !!!\");" << endl;
- out << " gEnv->SetValue(\"XNet.ConnectTimeout\",10);" << endl;
- out << " gEnv->SetValue(\"XNet.RequestTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.ConnectTimeout\",50);" << endl;
+ out << " gEnv->SetValue(\"XNet.RequestTimeout\",50);" << endl;
out << " gEnv->SetValue(\"XNet.MaxRedirectCount\",2);" << endl;
- out << " gEnv->SetValue(\"XNet.ReconnectTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.ReconnectTimeout\",50);" << endl;
out << " gEnv->SetValue(\"XNet.FirstConnectMaxCnt\",1);" << endl << endl;
}
out << "// connect to AliEn and make the chain" << endl;
TString func = mergingMacro;
TString comment;
func.ReplaceAll(".C", "");
- out << "void " << func.Data() << "(const char *dir, Int_t stage=0, Int_t ichunk=0)" << endl;
+ out << "void " << func.Data() << "(const char *dir, Int_t stage=0, Int_t laststage=0)" << endl;
out << "{" << endl;
out << "// Automatically generated merging macro executed in grid subjobs" << endl << endl;
out << " TStopwatch timer;" << endl;
out << " timer.Start();" << endl << endl;
+ // Reset existing include path
+ out << "// Reset existing include path and add current directory first in the search" << endl;
+ out << " gSystem->SetIncludePath(\"-I.\");" << endl;
if (!fExecutableCommand.Contains("aliroot")) {
out << "// load base root libraries" << endl;
out << " gSystem->Load(\"libTree\");" << endl;
}
if (list) delete list;
}
- out << "// include path" << endl;
- if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
- out << " gSystem->AddIncludePath(\"-I$ALICE_ROOT/include\");" << endl << endl;
out << "// Load analysis framework libraries" << endl;
if (!fPackages) {
if (!fExecutableCommand.Contains("aliroot")) {
out << " if (!" << setupPar << "(\"" << obj->GetName() << "\")) return;" << endl;
}
}
+ out << "// include path" << endl;
+ // Get the include path from the interpreter and remove entries pointing to AliRoot
+ out << " TString intPath = gInterpreter->GetIncludePath();" << endl;
+ out << " TObjArray *listpaths = intPath.Tokenize(\" \");" << endl;
+ out << " TIter nextpath(listpaths);" << endl;
+ out << " TObjString *pname;" << endl;
+ out << " while ((pname=(TObjString*)nextpath())) {" << endl;
+ out << " TString current = pname->GetName();" << endl;
+ out << " if (current.Contains(\"AliRoot\") || current.Contains(\"ALICE_ROOT\")) continue;" << endl;
+ out << " gSystem->AddIncludePath(current);" << endl;
+ out << " }" << endl;
+ out << " if (listpaths) delete listpaths;" << endl;
+ if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
+ out << " gROOT->ProcessLine(\".include $ALICE_ROOT/include\");" << endl;
+ out << " printf(\"Include path: %s\\n\", gSystem->GetIncludePath());" << endl << endl;
if (fAdditionalLibs.Length()) {
out << "// Add aditional AliRoot libraries" << endl;
TObjArray *list = fAdditionalLibs.Tokenize(" ");
Warning("WriteMergingMacro", "!!! You requested FastRead option. Using xrootd flags to reduce timeouts in the grid merging jobs. Note that this may skip some files that could be accessed !!!");
out << "// fast xrootd reading enabled" << endl;
out << " printf(\"!!! You requested FastRead option. Using xrootd flags to reduce timeouts. Note that this may skip some files that could be accessed !!!\");" << endl;
- out << " gEnv->SetValue(\"XNet.ConnectTimeout\",10);" << endl;
- out << " gEnv->SetValue(\"XNet.RequestTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.ConnectTimeout\",50);" << endl;
+ out << " gEnv->SetValue(\"XNet.RequestTimeout\",50);" << endl;
out << " gEnv->SetValue(\"XNet.MaxRedirectCount\",2);" << endl;
- out << " gEnv->SetValue(\"XNet.ReconnectTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.ReconnectTimeout\",50);" << endl;
out << " gEnv->SetValue(\"XNet.FirstConnectMaxCnt\",1);" << endl << endl;
}
// Change temp directory to current one
out << " gSystem->Setenv(\"TMPDIR\", gSystem->pwd());" << endl << endl;
out << "// Connect to AliEn" << endl;
out << " if (!TGrid::Connect(\"alien://\")) return;" << endl;
- out << " Bool_t laststage = kFALSE;" << endl;
out << " TString outputDir = dir;" << endl;
out << " TString outputFiles = \"" << GetListOfFiles("out") << "\";" << endl;
out << " TString mergeExcludes = \"" << fMergeExcludes << "\";" << endl;
out << " continue;" << endl;
out << " }" << endl;
out << " if (mergeExcludes.Contains(outputFile.Data())) continue;" << endl;
- out << " merged = AliAnalysisAlien::MergeOutput(outputFile, outputDir, " << fMaxMergeFiles << ", stage, ichunk);" << endl;
+ out << " merged = AliAnalysisAlien::MergeOutput(outputFile, outputDir, " << fMaxMergeFiles << ", stage);" << endl;
out << " if (!merged) {" << endl;
out << " printf(\"ERROR: Cannot merge %s\\n\", outputFile.Data());" << endl;
out << " return;" << endl;
out << " }" << endl;
- out << " // Check if this was the last stage. If yes, run terminate for the tasks." << endl;
- out << " if (!gSystem->AccessPathName(outputFile)) laststage = kTRUE;" << endl;
out << " }" << endl;
out << " // all outputs merged, validate" << endl;
out << " ofstream out;" << endl;
out << " // read the analysis manager from file" << endl;
TString analysisFile = fExecutable;
analysisFile.ReplaceAll(".sh", ".root");
- out << " if (!laststage) return;" << endl;
+ out << " if (laststage<10000) return;" << endl;
out << " TFile *file = TFile::Open(\"" << analysisFile << "\");" << endl;
out << " if (!file) return;" << endl;
out << " TIter nextkey(file->GetListOfKeys());" << endl;
--- /dev/null
+/**************************************************************************
+ * Copyright(c) 1998-1999, ALICE Experiment at CERN, All rights reserved. *
+ * *
+ * Author: The ALICE Off-line Project. *
+ * Contributors are mentioned in the code where appropriate. *
+ * *
+ * Permission to use, copy, modify and distribute this software and its *
+ * documentation strictly for non-commercial purposes is hereby granted *
+ * without fee, provided that the above copyright notice appears in all *
+ * copies and that both the copyright notice and this permission notice *
+ * appear in the supporting documentation. The authors make no claims *
+ * about the suitability of this software for any purpose. It is *
+ * provided "as is" without express or implied warranty. *
+ **************************************************************************/
+
+/* $Id$ */
+
+/* AliAnalysisTaskEx01.cxx
+ *
+ * Template task producing a P_t spectrum and pseudorapidity distribution.
+ * Includes explanations of physics and primary track selections
+ *
+ * Instructions for adding histograms can be found below, starting with NEW HISTO
+ *
+ * Based on tutorial example from offline pages
+ * Edited by Arvinder Palaha
+ */
+#include "AliAnalysisTaskEx01.h"
+
+#include "Riostream.h"
+#include "TChain.h"
+#include "TTree.h"
+#include "TH1F.h"
+#include "TH2F.h"
+#include "TCanvas.h"
+#include "TList.h"
+
+#include "AliAnalysisTaskSE.h"
+#include "AliAnalysisManager.h"
+#include "AliStack.h"
+#include "AliESDtrackCuts.h"
+#include "AliESDEvent.h"
+#include "AliESDInputHandler.h"
+#include "AliAODEvent.h"
+#include "AliMCEvent.h"
+
+ClassImp(AliAnalysisTaskEx01)
+
+//________________________________________________________________________
+AliAnalysisTaskEx01::AliAnalysisTaskEx01() // All data members should be initialised here
+ :AliAnalysisTaskSE(),
+ fOutput(0),
+ fTrackCuts(0),
+ fHistPt(0),
+ fHistEta(0) // The last in the above list should not have a comma after it
+{
+ // Dummy constructor ALWAYS needed for I/O.
+}
+
+//________________________________________________________________________
+AliAnalysisTaskEx01::AliAnalysisTaskEx01(const char *name) // All data members should be initialised here
+ :AliAnalysisTaskSE(name),
+ fOutput(0),
+ fTrackCuts(0),
+ fHistPt(0),
+ fHistEta(0) // The last in the above list should not have a comma after it
+{
+ // Constructor
+ // Define input and output slots here (never in the dummy constructor)
+ // Input slot #0 works with a TChain - it is connected to the default input container
+ // Output slot #1 writes into a TH1 container
+ DefineOutput(1, TList::Class()); // for output list
+}
+
+//________________________________________________________________________
+AliAnalysisTaskEx01::~AliAnalysisTaskEx01()
+{
+ // Destructor. Clean-up the output list, but not the histograms that are put inside
+ // (the list is owner and will clean-up these histograms). Protect in PROOF case.
+ if (fOutput && !AliAnalysisManager::GetAnalysisManager()->IsProofMode()) {
+ delete fOutput;
+ }
+ if (fTrackCuts) delete fTrackCuts;
+}
+
+//________________________________________________________________________
+void AliAnalysisTaskEx01::UserCreateOutputObjects()
+{
+ // Create histograms
+ // Called once (on the worker node)
+
+ fOutput = new TList();
+ fOutput->SetOwner(); // IMPORTANT!
+
+ fTrackCuts = AliESDtrackCuts::GetStandardITSTPCTrackCuts2010(kTRUE);
+ // === Primary Track Selection ===
+ //
+ // The definition of a primary track is taken from the ALICE Twiki
+ // page https://twiki.cern.ch/twiki/bin/view/ALICE/SelectionOfPrimaryTracksForPpDataAnalysis
+ // using the following parameters for a standard dN/dPt analysis:
+ // track quality cuts:
+ // esdTrackCuts->SetMinNClustersTPC(70);
+ // esdTrackCuts->SetMaxChi2PerClusterTPC(4);
+ // esdTrackCuts->SetAcceptKinkDaughters(kFALSE);
+ // esdTrackCuts->SetRequireTPCRefit(kTRUE);
+ // esdTrackCuts->SetRequireITSRefit(kTRUE);
+ // esdTrackCuts->SetClusterRequirementITS(AliESDtrackCuts::kSPD,
+ // AliESDtrackCuts::kAny);
+ // dca:
+ // if(selPrimaries) {
+ // // 7*(0.0026+0.0050/pt^1.01)
+ // esdTrackCuts->SetMaxDCAToVertexXYPtDep("0.0182+0.0350/pt^1.01");
+ // }
+ // esdTrackCuts->SetMaxDCAToVertexZ(2);
+ // esdTrackCuts->SetDCAToVertex2D(kFALSE);
+ // esdTrackCuts->SetRequireSigmaToVertex(kFALSE);
+ //
+ // The Primary Track Selection is implemented here by creating an
+ // AliESDtrackCuts object, with kTRUE argument to choose primary tracks.
+ //
+ // By default, it is set to the above conditions which are suitable for
+ // a standard inclusive dN/dPt analysis. For others, such as identified
+ // dN/dPt or strangeness as well as others, follow the above link for
+ // the specific changes to include in the selection.
+
+ // To change cuts after selecting some default set, one can use
+ // esdtrackcuts->SetMinNClustersTPC(70) for example
+
+ // Create histograms
+ Int_t ptbins = 15;
+ Float_t ptlow = 0.1, ptup = 3.1;
+ fHistPt = new TH1F("fHistPt", "P_{T} distribution for reconstructed", ptbins, ptlow, ptup);
+ fHistPt->GetXaxis()->SetTitle("P_{T} (GeV/c)");
+ fHistPt->GetYaxis()->SetTitle("dN/dP_{T} (c/GeV)");
+ fHistPt->SetMarkerStyle(kFullCircle);
+
+ Int_t etabins = 40;
+ Float_t etalow = -2.0, etaup = 2.0;
+ fHistEta = new TH1F("fHistEta","#eta distribution for reconstructed",etabins, etalow, etaup);
+ fHistEta->GetXaxis()->SetTitle("#eta");
+ fHistEta->GetYaxis()->SetTitle("counts");
+
+ // NEW HISTO should be defined here, with a sensible name,
+
+ fOutput->Add(fHistPt);
+ fOutput->Add(fHistEta);
+ // NEW HISTO added to fOutput here
+ PostData(1, fOutput); // Post data for ALL output slots >0 here, to get at least an empty histogram
+}
+
+//________________________________________________________________________
+void AliAnalysisTaskEx01::UserExec(Option_t *)
+{
+ // Main loop
+ // Called for each event
+
+
+ // Create pointer to reconstructed event
+ AliVEvent *event = InputEvent();
+ if (!event) { Printf("ERROR: Could not retrieve event"); return; }
+
+ // If the task accesses MC info, this can be done as in the commented block below:
+ /*
+ // Create pointer to reconstructed event
+ AliMCEvent *mcEvent = MCEvent();
+ if (!mcEvent) { Printf("ERROR: Could not retrieve MC event"); return; }
+ Printf("MC particles: %d", mcEvent->GetNumberOfTracks());
+
+ // set up a stack for use in check for primary/stable particles
+ AliStack* stack = mcEvent->Stack();
+ if( !stack ) { Printf( "Stack not available"); return; }
+ */
+
+ // create pointer to event
+ AliESDEvent* esd = dynamic_cast<AliESDEvent*>(event);
+ if (!esd) {
+ AliError("Cannot get the ESD event");
+ return;
+ }
+// AliESDHeader* esdheader = (AliESDHeader*)esd->GetHeader();
+
+ // === Physics Selection Task ===
+ //
+ // To perform a physics selection here, a bitwise operation is used against
+ // the UInt_t mask which is extracted in the following way:
+ //
+ // UInt_t mask = ((AliInputEventHandler*)(AliAnalysisManager::GetAnalysisManager()->GetInputEventHandler()))->IsEventSelected();
+ //
+ // This can be tested to produce the following
+ //
+ // Bool_t bMinBias = (mask == AliVEvent::kMB) ? 1 : 0; // check if minimum bias trigger class fired
+ // Bool_t bHighMult = (mask == AliVEvent::kHighMult) ? 1 : 0; // check if high multiplicity trigger class fired
+ //
+ // For more complicated trigger selections, one can directly test both
+ // trigger classes and fired trigger inputs for a particular event, for e.g.
+ //
+ // Bool_t bCSH1 = (esd->IsTriggerClassFired("CSH1-B-NOPF-ALLNOTRD")) ? 1 : 0;
+ // Bool_t b0SH1 = (esdheader->IsTriggerInputFired("0SH1")) ? 1 : 0;
+ //
+ // These booleans can then be used to fill different histograms for specific
+ // conditions, or summed to make one cut for all events that fill histos.
+
+ // Track loop for reconstructed event
+ Int_t ntracks = esd->GetNumberOfTracks();
+ for(Int_t i = 0; i < ntracks; i++) {
+ AliESDtrack* esdtrack = esd->GetTrack(i); // pointer to reconstructed to track
+ if(!esdtrack) {
+ AliError(Form("ERROR: Could not retrieve esdtrack %d",i));
+ continue;
+ }
+
+ // Do some fast cuts first
+ // check for good reconstructed vertex
+ if(!(esd->GetPrimaryVertex()->GetStatus())) continue;
+ // if vertex is from spd vertexZ, require more stringent cut
+ if (esd->GetPrimaryVertex()->IsFromVertexerZ()) {
+ if (esd->GetPrimaryVertex()->GetDispersion()>0.02 || esd->GetPrimaryVertex()->GetZRes()>0.25 ) continue; // bad vertex from VertexerZ
+ }
+ // Some MC checks, if MC is used
+ //if(esdtrack->GetLabel() < 0) continue; // get rid of "ghost" tracks
+
+ // ... and the thorough checking of ESD cuts after.
+ // if this is not a primary track, skip to the next one
+ if(!fTrackCuts->AcceptTrack(esdtrack)) continue;
+
+ fHistPt->Fill(esdtrack->Pt());
+ fHistEta->Fill(esdtrack->Eta());
+ }
+ // NEW HISTO should be filled before this point, as PostData puts the
+ // information for this iteration of the UserExec in the container
+ PostData(1, fOutput);
+}
+
+
+//________________________________________________________________________
+void AliAnalysisTaskEx01::Terminate(Option_t *)
+{
+ // Draw result to screen, or perform fitting, normalizations
+ // Called once at the end of the query
+
+ fOutput = dynamic_cast<TList*> (GetOutputData(1));
+ if(!fOutput) { Printf("ERROR: could not retrieve TList fOutput"); return; }
+
+ fHistPt = dynamic_cast<TH1F*> (fOutput->FindObject("fHistPt"));
+ if (!fHistPt) { Printf("ERROR: could not retrieve fHistPt"); return;}
+ fHistEta = dynamic_cast<TH1F*> (fOutput->FindObject("fHistEta"));
+ if (!fHistEta) { Printf("ERROR: could not retrieve fHistEta"); return;}
+
+ // Get the physics selection histograms with the selection statistics
+ //AliAnalysisManager *mgr = AliAnalysisManager::GetAnalysisManager();
+ //AliESDInputHandler *inputH = dynamic_cast<AliESDInputHandler*>(mgr->GetInputEventHandler());
+ //TH2F *histStat = (TH2F*)inputH->GetStatistics();
+
+
+ // NEW HISTO should be retrieved from the TList container in the above way,
+ // so it is available to draw on a canvas such as below
+
+ TCanvas *c = new TCanvas("AliAnalysisTaskEx01","P_{T} & #eta",10,10,1020,510);
+ c->Divide(2,1);
+ c->cd(1)->SetLogy();
+ fHistPt->DrawCopy("E");
+ c->cd(2);
+ fHistEta->DrawCopy("E");
+}
--- /dev/null
+// run.C
+//
+// Template run macro for AliBasicTask.cxx/.h with example layout of
+// physics selections and options, in macro and task.
+//
+// Author: Arvinder Palaha
+//
+class AliAnalysisGrid;
+
+//______________________________________________________________________________
+void run(
+ const char* runtype = "proof", // local, proof or grid
+ const char *gridmode = "full", // Set the run mode (can be "full", "test", "offline", "submit" or "terminate"). Full & Test work for proof
+ const bool bMCtruth = 0, // 1 = MCEvent handler is on (MC truth), 0 = MCEvent handler is off (MC reconstructed/real data)
+ const bool bMCphyssel = 0, // 1 = looking at MC truth or reconstructed, 0 = looking at real data
+ const Long64_t nentries = 2000, // for local and proof mode, ignored in grid mode. Set to 1234567890 for all events.
+ const Long64_t firstentry = 0, // for local and proof mode, ignored in grid mode
+ const char *proofdataset = "/alice/data/LHC10c_000120821_p1", // path to dataset on proof cluster, for proof analysis
+ const char *proofcluster = "alice-caf.cern.ch", // which proof cluster to use in proof mode
+ const char *taskname = "example_task" // sets name of grid generated macros
+ )
+{
+ // check run type
+ if(runtype != "local" && runtype != "proof" && runtype != "grid"){
+ Printf("\n\tIncorrect run option, check first argument of run macro");
+ Printf("\tint runtype = local, proof or grid\n");
+ return;
+ }
+ Printf("%s analysis chosen",runtype);
+
+ // load libraries
+ gSystem->Load("libCore.so");
+ gSystem->Load("libGeom.so");
+ gSystem->Load("libVMC.so");
+ gSystem->Load("libPhysics.so");
+ gSystem->Load("libTree.so");
+ gSystem->Load("libSTEERBase.so");
+ gSystem->Load("libESD.so");
+ gSystem->Load("libAOD.so");
+ gSystem->Load("libANALYSIS.so");
+ gSystem->Load("libANALYSISalice.so");
+
+ // add aliroot indlude path
+ gROOT->ProcessLine(Form(".include %s/include",gSystem->ExpandPathName("$ALICE_ROOT")));
+ gROOT->SetStyle("Plain");
+
+ // analysis manager
+ AliAnalysisManager* mgr = new AliAnalysisManager(taskname);
+
+ // create the alien handler and attach it to the manager
+ AliAnalysisGrid *plugin = CreateAlienHandler(taskname, gridmode, proofcluster, proofdataset);
+ mgr->SetGridHandler(plugin);
+
+ AliVEventHandler* esdH = new AliESDInputHandler();
+ mgr->SetInputEventHandler(esdH);
+
+ // mc event handler
+ if(bMCtruth) {
+ AliMCEventHandler* mchandler = new AliMCEventHandler();
+ // Not reading track references
+ mchandler->SetReadTR(kFALSE);
+ mgr->SetMCtruthEventHandler(mchandler);
+ }
+
+ // === Physics Selection Task ===
+ //
+ // In SelectCollisionCandidate(), default is kMB, so the task UserExec()
+ // function is only called for these events.
+ // Options are:
+ // kMB Minimum Bias trigger
+ // kMBNoTRD Minimum bias trigger where the TRD is not read out
+ // kMUON Muon trigger
+ // kHighMult High-Multiplicity Trigger
+ // kUserDefined For manually defined trigger selection
+ //
+ // Multiple options possible with the standard AND/OR operators && and ||
+ // These all have the usual offline SPD or V0 selections performed.
+ //
+ // With a pointer to the physics selection object using physSelTask->GetPhysicsSelection(),
+ // one can manually set the selected and background classes using:
+ // AddCollisionTriggerClass("+CINT1B-ABCE-NOPF-ALL")
+ // AddBGTriggerClass("+CINT1A-ABCE-NOPF-ALL");
+ //
+ // One can also specify multiple classes at once, or require a class to NOT
+ // trigger, for e.g.
+ // AddBGTriggerClass("+CSMBA-ABCE-NOPF-ALL -CSMBB-ABCE-NOPF-ALL");
+ //
+ // NOTE that manually setting the physics selection overrides the standard
+ // selection, so it must be done in completeness.
+ //
+ // ALTERNATIVELY, one can make the physics selection inside the task
+ // UserExec().
+ // For this case, comment out the task->SelectCol.... line,
+ // and see AliBasicTask.cxx UserExec() function for details on this.
+
+ gROOT->LoadMacro("$ALICE_ROOT/ANALYSIS/macros/AddTaskPhysicsSelection.C");
+ AliPhysicsSelectionTask *physSelTask = AddTaskPhysicsSelection(bMCphyssel);
+ if(!physSelTask) { Printf("no physSelTask"); return; }
+ //AliPhysicsSelection *physSel = physSelTask->GetPhysicsSelection();
+ //physSel->AddCollisionTriggerClass("+CINT1B-ABCE-NOPF-ALL");// #3119 #769");
+
+ // create task
+ gROOT->LoadMacro("AliAnalysisTaskEx01.cxx++g");
+ AliAnalysisTaskSE* task = new AliAnalysisTaskEx01(taskname);
+ task->SelectCollisionCandidates(AliVEvent::kMB); // if physics selection performed in UserExec(), this line should be commented
+ mgr->AddTask(task);
+
+ // set output root file name for different analysis
+ TString outfilename = Form("list.%s.root",runtype);
+
+ // create containers for input/output
+ AliAnalysisDataContainer *cinput = mgr->GetCommonInputContainer();
+ AliAnalysisDataContainer *coutput1 = mgr->CreateContainer("coutput1", TList::Class(), AliAnalysisManager::kOutputContainer, outfilename);
+
+ // connect input/output
+ mgr->ConnectInput(task, 0, cinput);
+ mgr->ConnectOutput(task, 1, coutput1);
+
+ // enable debug printouts
+ mgr->SetDebugLevel(2);
+ if (!mgr->InitAnalysis()) return;
+ mgr->PrintStatus();
+
+ // start analysis
+ Printf("Starting Analysis....");
+ mgr->StartAnalysis(runtype,nentries,firstentry);
+}
+
+//______________________________________________________________________________
+AliAnalysisGrid* CreateAlienHandler(const char *taskname, const char *gridmode, const char *proofcluster, const char *proofdataset)
+{
+ AliAnalysisAlien *plugin = new AliAnalysisAlien();
+ // Set the run mode (can be "full", "test", "offline", "submit" or "terminate")
+ plugin->SetRunMode(gridmode);
+
+ // Set versions of used packages
+ plugin->SetAPIVersion("V1.1x");
+ plugin->SetROOTVersion("v5-27-06b");
+ plugin->SetAliROOTVersion("v4-21-08-AN");
+
+ // Declare input data to be processed.
+
+ // Method 1: Create automatically XML collections using alien 'find' command.
+ // Define production directory LFN
+ plugin->SetGridDataDir("/alice/data/2010/LHC10b");
+ // On real reconstructed data:
+ // plugin->SetGridDataDir("/alice/data/2009/LHC09d");
+ // Set data search pattern
+ //plugin->SetDataPattern("*ESDs.root"); // THIS CHOOSES ALL PASSES
+ // Data pattern for reconstructed data
+ plugin->SetDataPattern("*ESDs/pass2/*ESDs.root"); // CHECK LATEST PASS OF DATA SET IN ALIENSH
+ plugin->SetRunPrefix("000"); // real data
+ // ...then add run numbers to be considered
+ plugin->AddRunNumber(115514);
+ //plugin->SetRunRange(114917,115322);
+ plugin->SetNrunsPerMaster(1);
+ plugin->SetOutputToRunNo();
+ // comment out the next line when using the "terminate" option, unless
+ // you want separate merged files for each run
+ plugin->SetMergeViaJDL();
+
+ // Method 2: Declare existing data files (raw collections, xml collections, root file)
+ // If no path mentioned data is supposed to be in the work directory (see SetGridWorkingDir())
+ // XML collections added via this method can be combined with the first method if
+ // the content is compatible (using or not tags)
+ // plugin->AddDataFile("tag.xml");
+ // plugin->AddDataFile("/alice/data/2008/LHC08c/000057657/raw/Run57657.Merged.RAW.tag.root");
+
+ // Define alien work directory where all files will be copied. Relative to alien $HOME.
+ plugin->SetGridWorkingDir(taskname);
+
+ // Declare alien output directory. Relative to working directory.
+ plugin->SetGridOutputDir("out"); // In this case will be $HOME/taskname/out
+
+ // Declare the analysis source files names separated by blancs. To be compiled runtime
+ // using ACLiC on the worker nodes.
+ plugin->SetAnalysisSource("AliAnalysisTaskEx01.cxx");
+
+ // Declare all libraries (other than the default ones for the framework. These will be
+ // loaded by the generated analysis macro. Add all extra files (task .cxx/.h) here.
+ plugin->SetAdditionalLibs("AliAnalysisTaskEx01.h AliAnalysisTaskEx01.cxx");
+
+ // Declare the output file names separated by blancs.
+ // (can be like: file.root or file.root@ALICE::Niham::File)
+ // To only save certain files, use SetDefaultOutputs(kFALSE), and then
+ // SetOutputFiles("list.root other.filename") to choose which files to save
+ plugin->SetDefaultOutputs();
+ //plugin->SetOutputFiles("list.root");
+
+ // Optionally set a name for the generated analysis macro (default MyAnalysis.C)
+ plugin->SetAnalysisMacro(Form("%s.C",taskname));
+
+ // Optionally set maximum number of input files/subjob (default 100, put 0 to ignore)
+ plugin->SetSplitMaxInputFileNumber(100);
+
+ // Optionally modify the executable name (default analysis.sh)
+ plugin->SetExecutable(Form("%s.sh",taskname));
+
+ // set number of test files to use in "test" mode
+ plugin->SetNtestFiles(10);
+
+ // Optionally resubmit threshold.
+ plugin->SetMasterResubmitThreshold(90);
+
+ // Optionally set time to live (default 30000 sec)
+ plugin->SetTTL(30000);
+
+ // Optionally set input format (default xml-single)
+ plugin->SetInputFormat("xml-single");
+
+ // Optionally modify the name of the generated JDL (default analysis.jdl)
+ plugin->SetJDLName(Form("%s.jdl",taskname));
+
+ // Optionally modify job price (default 1)
+ plugin->SetPrice(1);
+
+ // Optionally modify split mode (default 'se')
+ plugin->SetSplitMode("se");
+
+ //----------------------------------------------------------
+ //--- PROOF MODE SPECIFIC SETTINGS ------------
+ //----------------------------------------------------------
+ // Proof cluster
+ plugin->SetProofCluster(proofcluster);
+ // Dataset to be used
+ plugin->SetProofDataSet(proofdataset);
+ // May need to reset proof. Supported modes: 0-no reset, 1-soft, 2-hard
+ plugin->SetProofReset(0);
+ // May limit number of workers
+ plugin->SetNproofWorkers(0);
+ // May limit the number of workers per slave
+ plugin->SetNproofWorkersPerSlave(1);
+ // May use a specific version of root installed in proof
+ plugin->SetRootVersionForProof("current");
+ // May set the aliroot mode. Check http://aaf.cern.ch/node/83
+ plugin->SetAliRootMode("default"); // Loads AF libs by default
+ // May request ClearPackages (individual ClearPackage not supported)
+ plugin->SetClearPackages(kFALSE);
+ // Plugin test mode works only providing a file containing test file locations, used in "local" mode also
+ plugin->SetFileForTestMode("files.txt"); // file should contain path name to a local directory containg *ESDs.root etc
+ // Request connection to alien upon connection to grid
+ plugin->SetProofConnectGrid(kFALSE);
+
+ return plugin;
+}
+