fNtestFiles(0),
fNrunsPerMaster(0),
fMaxMergeFiles(0),
+ fMaxMergeStages(0),
fNsubmitted(0),
fProductionMode(0),
fOutputToRunNo(0),
fFileForTestMode(),
fRootVersionForProof(),
fAliRootMode(),
+ fMergeDirName(),
fInputFiles(0),
fPackages(0)
{
fNtestFiles(0),
fNrunsPerMaster(0),
fMaxMergeFiles(0),
+ fMaxMergeStages(0),
fNsubmitted(0),
fProductionMode(0),
fOutputToRunNo(0),
fFileForTestMode(),
fRootVersionForProof(),
fAliRootMode(),
+ fMergeDirName(),
fInputFiles(0),
fPackages(0)
{
fNtestFiles(other.fNtestFiles),
fNrunsPerMaster(other.fNrunsPerMaster),
fMaxMergeFiles(other.fMaxMergeFiles),
+ fMaxMergeStages(other.fMaxMergeStages),
fNsubmitted(other.fNsubmitted),
fProductionMode(other.fProductionMode),
fOutputToRunNo(other.fOutputToRunNo),
fFileForTestMode(other.fFileForTestMode),
fRootVersionForProof(other.fRootVersionForProof),
fAliRootMode(other.fAliRootMode),
+ fMergeDirName(other.fMergeDirName),
fInputFiles(0),
fPackages(0)
{
fNtestFiles = other.fNtestFiles;
fNrunsPerMaster = other.fNrunsPerMaster;
fMaxMergeFiles = other.fMaxMergeFiles;
+ fMaxMergeStages = other.fMaxMergeStages;
fNsubmitted = other.fNsubmitted;
fProductionMode = other.fProductionMode;
fOutputToRunNo = other.fOutputToRunNo;
fFileForTestMode = other.fFileForTestMode;
fRootVersionForProof = other.fRootVersionForProof;
fAliRootMode = other.fAliRootMode;
+ fMergeDirName = other.fMergeDirName;
if (other.fInputFiles) {
fInputFiles = new TObjArray();
TIter next(other.fInputFiles);
return *this;
}
+//______________________________________________________________________________
+void AliAnalysisAlien::SetRunPrefix(const char *prefix)
+{
+// Set the run number format. Can be a prefix or a format like "%09d"
+ fRunPrefix = prefix;
+ if (!fRunPrefix.Contains("%")) fRunPrefix += "%d";
+}
+
//______________________________________________________________________________
void AliAnalysisAlien::AddIncludePath(const char *path)
{
{
// Add a run number to the list of runs to be processed.
if (fRunNumbers.Length()) fRunNumbers += " ";
- fRunNumbers += Form("%s%d", fRunPrefix.Data(), run);
+ fRunNumbers += Form(fRunPrefix.Data(), run);
}
+//______________________________________________________________________________
+void AliAnalysisAlien::AddRunList(const char* runList)
+{
+// Add several runs into the list of runs; they are expected to be separated by a blank character.
+ TString sList = runList;
+ TObjArray *list = sList.Tokenize(" ");
+ Int_t n = list->GetEntries();
+ for (Int_t i = 0; i < n; i++) {
+ TObjString *os = (TObjString*)list->At(i);
+ AddRunNumber(os->GetString().Atoi());
+ }
+ delete list;
+}
+
//______________________________________________________________________________
void AliAnalysisAlien::AddRunNumber(const char* run)
{
Error("CkeckInputData", "AliEn path to base data directory must be set.\n = Use: SetGridDataDir()");
return kFALSE;
}
+ if (fMergeViaJDL) {
+ Error("CheckInputData", "Merging via jdl works only with run numbers, run range or provided xml");
+ return kFALSE;
+ }
Info("CheckInputData", "Analysis will make a single xml for base data directory %s",fGridDataDir.Data());
if (fDataPattern.Contains("tag") && TestBit(AliAnalysisGrid::kTest))
TObject::SetBit(AliAnalysisGrid::kUseTags, kTRUE); // ADDED (fix problem in determining the tag usage in test mode)
// Check validity of run number(s)
TObjArray *arr;
TObjString *os;
+ TString format;
Int_t nruns = 0;
TString schunk, schunk2;
TString path;
} else {
Info("CheckDataType", "Using run range [%d, %d]", fRunRange[0], fRunRange[1]);
for (Int_t irun=fRunRange[0]; irun<=fRunRange[1]; irun++) {
- path = Form("%s/%s%d ", fGridDataDir.Data(), fRunPrefix.Data(), irun);
+ format = Form("%%s/%s ", fRunPrefix.Data());
+ path = Form(format.Data(), fGridDataDir.Data(), irun);
if (!DirectoryExists(path)) {
-// Warning("CheckInputData", "Run number %d not found in path: <%s>", irun, path.Data());
continue;
}
- path = Form("%s/%s%d.xml", workdir.Data(),fRunPrefix.Data(),irun);
+ format = Form("%%s/%s.xml", fRunPrefix.Data());
+ path = Form(format.Data(), workdir.Data(),irun);
TString msg = "\n##### file: ";
msg += path;
msg += " type: xml_collection;";
else msg += " using_tags: No";
Info("CheckDataType", "%s", msg.Data());
if (fNrunsPerMaster<2) {
- AddDataFile(Form("%s%d.xml",fRunPrefix.Data(),irun));
+ format = Form("%s.xml", fRunPrefix.Data());
+ AddDataFile(Form(format.Data(),irun));
} else {
nruns++;
if (((nruns-1)%fNrunsPerMaster) == 0) {
- schunk = Form("%s%d", fRunPrefix.Data(),irun);
+ schunk = Form(fRunPrefix.Data(),irun);
}
- schunk2 = Form("_%s%d.xml", fRunPrefix.Data(), irun);
+ format = Form("_%s.xml", fRunPrefix.Data());
+ schunk2 = Form(format.Data(), irun);
if ((nruns%fNrunsPerMaster)!=0 && irun != fRunRange[1]) continue;
schunk += schunk2;
AddDataFile(schunk);
workdir += fGridWorkingDir;
// Compose the 'find' command arguments
+ TString format;
TString command;
TString options = "-x collection ";
if (TestBit(AliAnalysisGrid::kTest)) options += Form("-l %d ", fNtestFiles);
} else {
// Process a full run range.
for (Int_t irun=fRunRange[0]; irun<=fRunRange[1]; irun++) {
- path = Form("%s/%s%d ", fGridDataDir.Data(), fRunPrefix.Data(), irun);
+ format = Form("%%s/%s ", fRunPrefix.Data());
+ path = Form(format.Data(), fGridDataDir.Data(), irun);
if (!DirectoryExists(path)) continue;
// CdWork();
+ format = Form("%s.xml", fRunPrefix.Data());
if (TestBit(AliAnalysisGrid::kTest)) file = "wn.xml";
- else file = Form("%s%d.xml", fRunPrefix.Data(), irun);
+ else file = Form(format.Data(), irun);
if (FileExists(file) && fNrunsPerMaster<2 && !TestBit(AliAnalysisGrid::kTest)) {
if (fOverwriteMode) gGrid->Rm(file);
else {
}
printf(" Merging collection <%s> into %d runs chunk...\n",file.Data(),fNrunsPerMaster);
if (((nruns-1)%fNrunsPerMaster) == 0) {
- schunk = Form("%s%d", fRunPrefix.Data(), irun);
+ schunk = Form(fRunPrefix.Data(), irun);
cbase = (TGridCollection*)gROOT->ProcessLine(Form("new TAlienCollection(\"%s\", 1000000);",file.Data()));
} else {
cadd = (TGridCollection*)gROOT->ProcessLine(Form("new TAlienCollection(\"%s\", 1000000);",file.Data()));
cbase->Add(cadd);
delete cadd;
}
- schunk2 = Form("%s_%s%d.xml", schunk.Data(), fRunPrefix.Data(), irun);
+ format = Form("%%s_%s.xml", fRunPrefix.Data());
+ schunk2 = Form(format.Data(), schunk.Data(), irun);
if ((nruns%fNrunsPerMaster)!=0 && irun!=fRunRange[1] && schunk2 != fInputFiles->Last()->GetName()) {
continue;
}
Error("CreateJDL", "Could not create alien output directory %s", fGridOutputDir.Data());
// error = kTRUE;
}
+ } else {
+ Warning("CreateJDL", "#### Output directory %s exists! If this contains old data, jobs will fail with ERROR_SV !!! ###", fGridOutputDir.Data());
}
gGrid->Cd(workdir);
}
if (!fArguments.IsNull())
fGridJDL->SetArguments(fArguments, "Arguments for the executable command");
if (IsOneStageMerging()) fMergingJDL->SetArguments(fGridOutputDir);
- else fMergingJDL->SetArguments("$1 $2 $3");
+ else {
+ if (fProductionMode) fMergingJDL->SetArguments("wn.xml $4"); // xml, stage
+ else fMergingJDL->SetArguments("wn.xml $2"); // xml, stage
+ }
+
fGridJDL->SetValue("TTL", Form("\"%d\"",fTTL));
fGridJDL->SetDescription("TTL", Form("Time after which the job is killed (%d min.)", fTTL/60));
fMergingJDL->SetValue("TTL", Form("\"%d\"",fTTL));
if (fSplitMaxInputFileNumber > 0) {
fGridJDL->SetValue("SplitMaxInputFileNumber", Form("\"%d\"", fSplitMaxInputFileNumber));
fGridJDL->SetDescription("SplitMaxInputFileNumber", "Maximum number of input files to be processed per subjob");
+ }
+ if (!IsOneStageMerging()) {
+ fMergingJDL->SetValue("SplitMaxInputFileNumber", Form("\"%d\"",fMaxMergeFiles));
+ fMergingJDL->SetDescription("SplitMaxInputFileNumber", "Maximum number of input files to be merged in one go");
}
if (fSplitMode.Length()) {
fGridJDL->SetValue("Split", Form("\"%s\"", fSplitMode.Data()));
fGridJDL->SetDescription("Split", "We split per SE or file");
- }
+ }
+ fMergingJDL->SetValue("Split", "\"se\"");
+ fMergingJDL->SetDescription("Split", "We split per SE for merging in stages");
if (!fAliROOTVersion.IsNull()) {
fGridJDL->AddToPackages("AliRoot", fAliROOTVersion,"VO_ALICE", "List of requested packages");
fMergingJDL->AddToPackages("AliRoot", fAliROOTVersion, "VO_ALICE", "List of requested packages");
}
fGridJDL->SetInputDataListFormat(fInputFormat, "Format of input data");
fGridJDL->SetInputDataList("wn.xml", "Collection name to be processed on each worker node");
+ fMergingJDL->SetInputDataListFormat(fInputFormat, "Format of input data");
+ fMergingJDL->SetInputDataList("wn.xml", "Collection name to be processed on each worker node");
fGridJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(), fAnalysisMacro.Data()), "List of input files to be uploaded to workers");
TString analysisFile = fExecutable;
analysisFile.ReplaceAll(".sh", ".root");
delete arr;
}
files.ReplaceAll(".root", "*.root");
- outputArchive += Form("root_archive.zip:%s@disk=%d",files.Data(),fNreplicas);
+ outputArchive += Form("root_archive.zip:%s,*.stat@disk=%d",files.Data(),fNreplicas);
} else {
- outputArchive = fOutputArchive;
+ TString files = fOutputArchive;
+ files.ReplaceAll(".root", "*.root"); // nreplicas etc should be already atttached by use
+ outputArchive = files;
}
arr = outputArchive.Tokenize(" ");
TIter next2(arr);
while ((os=(TObjString*)next2())) {
if (!first) comment = NULL;
TString currentfile = os->GetString();
- if (!IsOneStageMerging()) currentfile.ReplaceAll(".zip", "-Stage$2_$3.zip");
if (!currentfile.Contains("@") && fCloseSE.Length())
fMergingJDL->AddToOutputArchive(Form("%s@%s",currentfile.Data(), fCloseSE.Data()), comment);
else
fGridJDL->AddToOutputSandbox(Form("%s@%s",os->GetString().Data(), fCloseSE.Data()), comment);
else
fGridJDL->AddToOutputSandbox(os->GetString(), comment);
- first = kFALSE;
+ first = kFALSE;
if (fMergeExcludes.Contains(sout)) continue;
if (!os->GetString().Contains("@") && fCloseSE.Length())
fMergingJDL->AddToOutputSandbox(Form("%s@%s",os->GetString().Data(), fCloseSE.Data()), comment);
TString workdir;
if (!fProductionMode && !fGridWorkingDir.BeginsWith("/alice")) workdir = gGrid->GetHomeDirectory();
workdir += fGridWorkingDir;
-
+ TString stageName = "$2";
+ if (fProductionMode) stageName = "$4";
+ if (!fMergeDirName.IsNull()) {
+ fMergingJDL->AddToInputDataCollection(Form("LF:$1/%s/Stage_%s.xml,nodownload",fMergeDirName.Data(),stageName.Data()), "Collection of files to be merged for current stage");
+ fMergingJDL->SetOutputDirectory(Form("$1/%s/Stage_%s/#alien_counter_03i#",fMergeDirName.Data(),stageName.Data()), "Output directory");
+ } else {
+ fMergingJDL->AddToInputDataCollection(Form("LF:$1/Stage_%s.xml,nodownload",stageName.Data()), "Collection of files to be merged for current stage");
+ fMergingJDL->SetOutputDirectory(Form("$1/Stage_%s/#alien_counter_03i#",stageName.Data()), "Output directory");
+ }
if (fProductionMode) {
TIter next(fInputFiles);
- while ((os=next()))
+ while ((os=next())) {
fGridJDL->AddToInputDataCollection(Form("LF:%s,nodownload", os->GetName()), "Input xml collections");
+ }
fGridJDL->SetOutputDirectory(Form("%s/#alien_counter_04i#", fGridOutputDir.Data()));
- fMergingJDL->SetOutputDirectory(fGridOutputDir);
} else {
if (!fRunNumbers.Length() && !fRunRange[0]) {
// One jdl with no parameters in case input data is specified by name.
else fGridJDL->SetOutputDirectory(Form("%s/$2",fGridOutputDir.Data()), "Output directory");
} else {
fGridJDL->SetOutputDirectory(Form("%s/$2/#alien_counter_03i#", fGridOutputDir.Data()), "Output directory");
- fMergingJDL->SetOutputDirectory("$1", "Output directory");
}
}
}
// Generate the JDL as a string
TString sjdl = fGridJDL->Generate();
TString sjdl1 = fMergingJDL->Generate();
- Int_t index;
+ // Final merge jdl
+ if (!fMergeDirName.IsNull()) {
+ fMergingJDL->SetOutputDirectory(Form("$1/%s",fMergeDirName.Data()), "Output directory");
+ fMergingJDL->AddToInputSandbox(Form("LF:$1/%s/Stage_%s.xml",fMergeDirName.Data(),stageName.Data()));
+ } else {
+ fMergingJDL->SetOutputDirectory("$1", "Output directory");
+ fMergingJDL->AddToInputSandbox(Form("LF:$1/Stage_%s.xml",stageName.Data()));
+ }
+ TString sjdl2 = fMergingJDL->Generate();
+ Int_t index, index1;
sjdl.ReplaceAll("\"LF:", "\n \"LF:");
sjdl.ReplaceAll("(member", "\n (member");
sjdl.ReplaceAll("\",\"VO_", "\",\n \"VO_");
sjdl1.ReplaceAll("{\n \n", "{\n");
sjdl1.ReplaceAll("\n\n", "\n");
sjdl1.ReplaceAll("OutputDirectory", "OutputDir");
+ sjdl2.ReplaceAll("\"LF:", "\n \"LF:");
+ sjdl2.ReplaceAll("(member", "\n (member");
+ sjdl2.ReplaceAll("\",\"VO_", "\",\n \"VO_");
+ sjdl2.ReplaceAll("{", "{\n ");
+ sjdl2.ReplaceAll("};", "\n};");
+ sjdl2.ReplaceAll("{\n \n", "{\n");
+ sjdl2.ReplaceAll("\n\n", "\n");
+ sjdl2.ReplaceAll("OutputDirectory", "OutputDir");
sjdl += "JDLVariables = \n{\n \"Packages\",\n \"OutputDir\"\n};\n";
sjdl.Prepend(Form("Jobtag = {\n \"comment:%s\"\n};\n", fJobTag.Data()));
index = sjdl.Index("JDLVariables");
if (index >= 0) sjdl.Insert(index, "\n# JDL variables\n");
sjdl += "Workdirectorysize = {\"5000MB\"};";
+ sjdl1 += "Workdirectorysize = {\"5000MB\"};";
sjdl1 += "JDLVariables = \n{\n \"Packages\",\n \"OutputDir\"\n};\n";
index = fJobTag.Index(":");
if (index < 0) index = fJobTag.Length();
TString jobTag = fJobTag;
- jobTag.Insert(index, "_Merging");
+ if (fProductionMode) jobTag.Insert(index,"_Stage$4");
sjdl1.Prepend(Form("Jobtag = {\n \"comment:%s_Merging\"\n};\n", jobTag.Data()));
- sjdl1.Prepend("# Generated merging jdl\n# $1 = full alien path to output directory to be merged\n# $2 = merging stage\n# $3 = merged chunk\n");
+ if (fProductionMode) {
+ sjdl1.Prepend("# Generated merging jdl (production mode) \
+ \n# $1 = full alien path to output directory to be merged \
+ \n# $2 = train number \
+ \n# $3 = production (like LHC10b) \
+ \n# $4 = merging stage \
+ \n# Stage_<n>.xml made via: find <OutputDir> *Stage<n-1>/*root_archive.zip\n");
+ sjdl2.Prepend(Form("Jobtag = {\n \"comment:%s_FinalMerging\"\n};\n", jobTag.Data()));
+ sjdl2.Prepend("# Generated merging jdl \
+ \n# $1 = full alien path to output directory to be merged \
+ \n# $2 = train number \
+ \n# $3 = production (like LHC10b) \
+ \n# $4 = merging stage \
+ \n# Stage_<n>.xml made via: find <OutputDir> *Stage<n-1>/*root_archive.zip\n");
+ } else {
+ sjdl1.Prepend("# Generated merging jdl \
+ \n# $1 = full alien path to output directory to be merged \
+ \n# $2 = merging stage \
+ \n# xml made via: find <OutputDir> *Stage<n-1>/*root_archive.zip\n");
+ sjdl2.Prepend(Form("Jobtag = {\n \"comment:%s_FinalMerging\"\n};\n", jobTag.Data()));
+ sjdl2.Prepend("# Generated merging jdl \
+ \n# $1 = full alien path to output directory to be merged \
+ \n# $2 = merging stage \
+ \n# xml made via: find <OutputDir> *Stage<n-1>/*root_archive.zip\n");
+ }
index = sjdl1.Index("JDLVariables");
if (index >= 0) sjdl1.Insert(index, "\n# JDL variables\n");
+ index = sjdl2.Index("JDLVariables");
+ if (index >= 0) sjdl2.Insert(index, "\n# JDL variables\n");
sjdl1 += "Workdirectorysize = {\"5000MB\"};";
+ sjdl2 += "Workdirectorysize = {\"5000MB\"};";
+ index = sjdl2.Index("Split =");
+ if (index>=0) {
+ index1 = sjdl2.Index("\n", index);
+ sjdl2.Remove(index, index1-index+1);
+ }
+ index = sjdl2.Index("SplitMaxInputFileNumber");
+ if (index>=0) {
+ index1 = sjdl2.Index("\n", index);
+ sjdl2.Remove(index, index1-index+1);
+ }
+ index = sjdl2.Index("InputDataCollection");
+ if (index>=0) {
+ index1 = sjdl2.Index(";", index);
+ sjdl2.Remove(index, index1-index+1);
+ }
+ index = sjdl2.Index("InputDataListFormat");
+ if (index>=0) {
+ index1 = sjdl2.Index("\n", index);
+ sjdl2.Remove(index, index1-index+1);
+ }
+ index = sjdl2.Index("InputDataList");
+ if (index>=0) {
+ index1 = sjdl2.Index("\n", index);
+ sjdl2.Remove(index, index1-index+1);
+ }
+ sjdl2.ReplaceAll("wn.xml", Form("Stage_%s.xml",stageName.Data()));
// Write jdl to file
ofstream out;
out.open(fJDLName.Data(), ios::out);
return kFALSE;
}
out << sjdl << endl;
+ out.close();
TString mergeJDLName = fExecutable;
mergeJDLName.ReplaceAll(".sh", "_merge.jdl");
if (fMergeViaJDL) {
ofstream out1;
out1.open(mergeJDLName.Data(), ios::out);
- if (out.bad()) {
+ if (out1.bad()) {
Error("WriteJDL", "Bad file name: %s", mergeJDLName.Data());
return kFALSE;
}
out1 << sjdl1 << endl;
+ out1.close();
+ ofstream out2;
+ TString finalJDL = mergeJDLName;
+ finalJDL.ReplaceAll(".jdl", "_final.jdl");
+ out2.open(finalJDL.Data(), ios::out);
+ if (out2.bad()) {
+ Error("WriteJDL", "Bad file name: %s", finalJDL.Data());
+ return kFALSE;
+ }
+ out2 << sjdl2 << endl;
+ out2.close();
}
// Copy jdl to grid workspace
} else {
TString locjdl = Form("%s/%s", fGridOutputDir.Data(),fJDLName.Data());
TString locjdl1 = Form("%s/%s", fGridOutputDir.Data(),mergeJDLName.Data());
+ TString finalJDL = mergeJDLName;
+ finalJDL.ReplaceAll(".jdl", "_final.jdl");
+ TString locjdl2 = Form("%s/%s", fGridOutputDir.Data(),finalJDL.Data());
if (fProductionMode) {
locjdl = Form("%s/%s", workdir.Data(),fJDLName.Data());
locjdl1 = Form("%s/%s", workdir.Data(),mergeJDLName.Data());
+ locjdl2 = Form("%s/%s", workdir.Data(),finalJDL.Data());
}
if (FileExists(locjdl)) gGrid->Rm(locjdl);
if (FileExists(locjdl1)) gGrid->Rm(locjdl1);
+ if (FileExists(locjdl2)) gGrid->Rm(locjdl2);
Info("WriteJDL", "\n##### Copying JDL file <%s> to your AliEn output directory", fJDLName.Data());
TFile::Cp(Form("file:%s",fJDLName.Data()), Form("alien://%s", locjdl.Data()));
if (fMergeViaJDL) {
- Info("WriteJDL", "\n##### Copying merging JDL file <%s> to your AliEn output directory", mergeJDLName.Data());
+ Info("WriteJDL", "\n##### Copying merging JDL files <%s> to your AliEn output directory", mergeJDLName.Data());
TFile::Cp(Form("file:%s",mergeJDLName.Data()), Form("alien://%s", locjdl1.Data()));
+ TFile::Cp(Form("file:%s",finalJDL.Data()), Form("alien://%s", locjdl2.Data()));
}
}
return kTRUE;
{
// Returns true if file exists.
if (!gGrid) return kFALSE;
- TGridResult *res = gGrid->Ls(lfn);
+ TString slfn = lfn;
+ slfn.ReplaceAll("alien://","");
+ TGridResult *res = gGrid->Ls(slfn);
if (!res) return kFALSE;
TMap *map = dynamic_cast<TMap*>(res->At(0));
if (!map) {
Int_t pid;
for (Int_t ijob=0; ijob<nentries; ijob++) {
status = (TGridJobStatus *)list->At(ijob);
- pid = gROOT->ProcessLine(Form("atoi(((TAlienJobStatus*)0x%lx)->GetKey(\"queueId\"));", (ULong_t)status));
+ pid = gROOT->ProcessLine(Form("atoi(((TAlienJobStatus*)%p)->GetKey(\"queueId\"));", status));
if (pid<jobidstart) continue;
if (pid == lastid) {
- gROOT->ProcessLine(Form("sprintf((char*)0x%lx,((TAlienJobStatus*)0x%lx)->GetKey(\"status\"));",(ULong_t)mstatus, (ULong_t)status));
+ gROOT->ProcessLine(Form("sprintf((char*)%p,((TAlienJobStatus*)%p)->GetKey(\"status\"));",mstatus, status));
}
switch (status->GetStatus()) {
case TGridJobStatus::kWAITING:
if (fRunNumbers.Length())
printf("= Run numbers to be processed: _________________ %s\n", fRunNumbers.Data());
if (fRunRange[0])
- printf("= Run range to be processed: ___________________ %s%d-%s%d\n", fRunPrefix.Data(), fRunRange[0], fRunPrefix.Data(), fRunRange[1]);
+ printf("= Run range to be processed: ___________________ %d-%d\n", fRunRange[0], fRunRange[1]);
if (!fRunRange[0] && !fRunNumbers.Length()) {
TIter next(fInputFiles);
TObject *obj;
fNreplicas = 2;
fRunRange[0] = 0;
fRunRange[1] = 0;
+ fRunPrefix = "%d";
fNrunsPerMaster = 1;
fMaxMergeFiles = 100;
fRunNumbers = "";
}
//______________________________________________________________________________
-Bool_t AliAnalysisAlien::CheckMergedFiles(const char *filename, const char *aliendir, Int_t nperchunk, Bool_t submit, const char *jdl)
+Bool_t AliAnalysisAlien::CheckMergedFiles(const char *filename, const char *aliendir, Int_t nperchunk, const char *jdl)
{
-// Static method that checks the status of merging. This can submit merging jobs that did not produced the expected
-// output. If <submit> is false (checking) returns true only when the final merged file was found. If submit is true returns
-// true if the jobs were successfully submitted.
- Int_t countOrig = 0;
- Int_t countStage = 0;
- Int_t stage = 0;
- Int_t i;
- Bool_t doneFinal = kFALSE;
- TBits chunksDone;
- TString saliendir(aliendir);
- TString sfilename, stmp;
- saliendir.ReplaceAll("//","/");
- saliendir = saliendir.Strip(TString::kTrailing, '/');
- if (!gGrid) {
- ::Error("GetNregisteredFiles", "You need to be connected to AliEn.");
+// Checks current merge stage, makes xml for the next stage, counts number of files, submits next stage.
+ // First check if the result is already in the output directory.
+ if (FileExists(Form("%s/%s",aliendir,filename))) {
+ printf("Final merged results found. Not merging again.\n");
return kFALSE;
}
- sfilename = filename;
- sfilename.ReplaceAll(".root", "*.root");
- printf("Checking directory <%s> for merged files <%s> ...\n", aliendir, sfilename.Data());
- TString command = Form("find %s/ *%s", saliendir.Data(), sfilename.Data());
- TGridResult *res = gGrid->Command(command);
- if (!res) {
- ::Error("GetNregisteredFiles","Error: No result for the find command\n");
+ // Now check the last stage done.
+ Int_t stage = 0;
+ while (1) {
+ if (!FileExists(Form("%s/Stage_%d.xml",aliendir, stage+1))) break;
+ stage++;
+ }
+ // Next stage of merging
+ stage++;
+ TString pattern = "*root_archive.zip";
+ if (stage>1) pattern = Form("Stage_%d/*root_archive.zip", stage-1);
+ TGridResult *res = gGrid->Command(Form("find -x Stage_%d %s %s", stage, aliendir, pattern.Data()));
+ if (res) delete res;
+ // Write standard output to file
+ gROOT->ProcessLine(Form("gGrid->Stdout(); > %s", Form("Stage_%d.xml",stage)));
+ // Count the number of files inside
+ ifstream ifile;
+ ifile.open(Form("Stage_%d.xml",stage));
+ if (!ifile.good()) {
+ ::Error("CheckMergedFiles", "Could not redirect result of the find command to file %s", Form("Stage_%d.xml",stage));
return kFALSE;
- }
- TIter nextmap(res);
- TMap *map = 0;
- while ((map=(TMap*)nextmap())) {
- TString turl = map->GetValue("turl")->GetName();
- if (!turl.Length()) {
- // Nothing found
- delete res;
- return kFALSE;
- }
- turl.ReplaceAll("alien://", "");
- turl.ReplaceAll(saliendir, "");
- sfilename = gSystem->BaseName(turl);
- turl = turl.Strip(TString::kLeading, '/');
- // Now check to what the file corresponds to:
- // original output - aliendir/%03d/filename
- // merged file (which stage) - aliendir/filename-Stage%02d_%04d
- // final merged file - aliendir/filename
- if (sfilename == turl) {
- if (sfilename == filename) {
- doneFinal = kTRUE;
- } else {
- // check stage
- Int_t index = sfilename.Index("Stage");
- if (index<0) continue;
- stmp = sfilename(index+5,2);
- Int_t istage = atoi(stmp);
- stmp = sfilename(index+8,4);
- Int_t ijob = atoi(stmp);
- if (istage<stage) continue; // Ignore lower stages
- if (istage>stage) {
- countStage = 0;
- chunksDone.ResetAllBits();
- stage = istage;
- }
- countStage++;
- chunksDone.SetBitNumber(ijob);
- }
- } else {
- countOrig++;
- }
- if (doneFinal) {
- delete res;
- printf("=> Removing files from previous stages...\n");
- gGrid->Rm(Form("%s/*Stage*.root", aliendir));
- for (i=1; i<stage; i++)
- gGrid->Rm(Form("%s/*Stage%d*.zip", aliendir, i));
- return kTRUE;
- }
+ }
+ TString line;
+ Int_t nfiles = 0;
+ while (!ifile.eof()) {
+ ifile >> line;
+ if (line.Contains("/event")) nfiles++;
}
- delete res;
- // Compute number of jobs that were submitted for the current stage
- Int_t ntotstage = countOrig;
- for (i=1; i<=stage; i++) {
- if (ntotstage%nperchunk) ntotstage = (ntotstage/nperchunk)+1;
- else ntotstage = (ntotstage/nperchunk);
+ ifile.close();
+ if (!nfiles) {
+ ::Error("CheckMergedFiles", "Cannot start Stage_%d merging since Stage_%d did not produced yet output", stage, stage-1);
+ return kFALSE;
+ } else {
+ printf("=== Stage_%d produced %d files\n", stage-1, nfiles);
}
- // Now compare with the number of set bits in the chunksDone array
- Int_t nmissing = (stage>0)?(ntotstage - countStage):0;
- // Print the info
- printf("*** Found %d original files\n", countOrig);
- if (stage==0) printf("*** No merging completed so far.\n");
- else printf("*** Found %d out of %d files merged for stage %d\n", countStage, ntotstage, stage);
- if (nmissing) printf("*** Number of merged files missing for this stage: %d -> check merging job completion\n", nmissing);
- if (!submit) return doneFinal;
- // Sumbit merging jobs for all missing chunks for the current stage.
- TString query = Form("submit %s %s", jdl, aliendir);
- Int_t ichunk = -1;
- if (nmissing) {
- for (i=0; i<nmissing; i++) {
- ichunk = chunksDone.FirstNullBit(ichunk+1);
- Int_t jobId = SubmitSingleJob(Form("%s %d %d", query.Data(), stage, ichunk));
- if (!jobId) return kFALSE;
- }
- return kTRUE;
+ // Copy the file in the output directory
+ printf("===> Copying collection %s in the output directory %s\n", Form("Stage_%d.xml",stage), aliendir);
+ TFile::Cp(Form("Stage_%d.xml",stage), Form("alien://%s/Stage_%d.xml",aliendir,stage));
+ // Check if this is the last stage to be done.
+ Bool_t laststage = (nfiles<nperchunk);
+ if (fMaxMergeStages && stage>=fMaxMergeStages) laststage = kTRUE;
+ if (laststage) {
+ printf("### Submiting final merging stage %d\n", stage);
+ TString finalJDL = jdl;
+ finalJDL.ReplaceAll(".jdl", "_final.jdl");
+ TString query = Form("submit %s %s %d", finalJDL.Data(), aliendir, stage);
+ Int_t jobId = SubmitSingleJob(query);
+ if (!jobId) return kFALSE;
+ } else {
+ printf("### Submiting merging stage %d\n", stage);
+ TString query = Form("submit %s %s %d", jdl, aliendir, stage);
+ Int_t jobId = SubmitSingleJob(query);
+ if (!jobId) return kFALSE;
}
- // Submit next stage of merging
- if (stage==0) countStage = countOrig;
- Int_t nchunks = (countStage/nperchunk);
- if (countStage%nperchunk) nchunks += 1;
- for (i=0; i<nchunks; i++) {
- Int_t jobId = SubmitSingleJob(Form("%s %d %d", query.Data(), stage+1, i));
- if (!jobId) return kFALSE;
- }
- return kTRUE;
-}
+ return kTRUE;
+}
//______________________________________________________________________________
Int_t AliAnalysisAlien::SubmitSingleJob(const char *query)
}
//______________________________________________________________________________
-Bool_t AliAnalysisAlien::MergeOutput(const char *output, const char *basedir, Int_t nmaxmerge, Int_t stage, Int_t ichunk)
+Bool_t AliAnalysisAlien::MergeOutput(const char *output, const char *basedir, Int_t nmaxmerge, Int_t stage)
{
-// Merge given output files from basedir. The file merger will merge nmaxmerge
-// files in a group. Merging can be done in stages:
-// stage=0 : will merge all existing files in a single stage
-// stage=1 : does a find command for all files that do NOT contain the string "Stage".
-// If their number is bigger that nmaxmerge, only the files from
-// ichunk*nmaxmerge to ichunk*(nmaxmerge+1)-1 will get merged as output_stage_<ichunk>
-// stage=n : does a find command for files named <output>Stage<stage-1>_*. If their number is bigger than
-// nmaxmerge, merge just the chunk ichunk, otherwise write the merged output to the file
-// named <output>.
+// Merge given output files from basedir. Basedir can be an alien output directory
+// but also an xml file with root_archive.zip locations. The file merger will merge nmaxmerge
+// files in a group (ignored for xml input). Merging can be done in stages:
+// stage=0 : will merge all existing files in a single stage, supporting resume if run locally
+// stage=1 : works with an xml of all root_archive.zip in the output directory
+// stage>1 : works with an xml of all root_archive.zip in the Stage_<n-1> directory
TString outputFile = output;
TString command;
TString outputChunk;
TString previousChunk = "";
+ TObjArray *listoffiles = new TObjArray();
+// listoffiles->SetOwner();
Int_t countChunk = 0;
Int_t countZero = nmaxmerge;
Bool_t merged = kTRUE;
Int_t index = outputFile.Index("@");
if (index > 0) outputFile.Remove(index);
TString inputFile = outputFile;
- if (stage>1) inputFile.ReplaceAll(".root", Form("-Stage%02d_*.root", stage-1));
- command = Form("find %s/ *%s", basedir, inputFile.Data());
- printf("command: %s\n", command.Data());
- TGridResult *res = gGrid->Command(command);
- if (!res) {
+ TString sbasedir = basedir;
+ if (sbasedir.Contains(".xml")) {
+ // Merge files pointed by the xml - ignore nmaxmerge and set ichunk to 0
+ nmaxmerge = 9999999;
+ TGridCollection *coll = (TGridCollection*)gROOT->ProcessLine(Form("TAlienCollection::Open(\"%s\");", basedir));
+ if (!coll) {
+ ::Error("MergeOutput", "Input XML collection empty.");
+ return kFALSE;
+ }
+ // Iterate grid collection
+ while (coll->Next()) {
+ TString fname = gSystem->DirName(coll->GetTURL());
+ fname += "/";
+ fname += inputFile;
+ listoffiles->Add(new TNamed(fname.Data(),""));
+ }
+ } else {
+ command = Form("find %s/ *%s", basedir, inputFile.Data());
+ printf("command: %s\n", command.Data());
+ TGridResult *res = gGrid->Command(command);
+ if (!res) {
+ ::Error("MergeOutput","No result for the find command\n");
+ delete listoffiles;
+ return kFALSE;
+ }
+ TIter nextmap(res);
+ TMap *map = 0;
+ while ((map=(TMap*)nextmap())) {
+ TObjString *objs = dynamic_cast<TObjString*>(map->GetValue("turl"));
+ if (!objs || !objs->GetString().Length()) {
+ // Nothing found - skip this output
+ delete res;
+ delete listoffiles;
+ return kFALSE;
+ }
+ listoffiles->Add(new TNamed(objs->GetName(),""));
+ }
+ delete res;
+ }
+ if (!listoffiles->GetEntries()) {
::Error("MergeOutput","No result for the find command\n");
+ delete listoffiles;
return kFALSE;
}
TFileMerger *fm = 0;
- TIter nextmap(res);
- TMap *map = 0;
+ TIter next0(listoffiles);
+ TObjArray *listoffilestmp = new TObjArray();
+ listoffilestmp->SetOwner();
+ TObject *nextfile;
+ TString snextfile;
+ // Keep only the files at upper level
+ Int_t countChar = 0;
+ while ((nextfile=next0())) {
+ snextfile = nextfile->GetName();
+ Int_t crtCount = snextfile.CountChar('/');
+ if (nextfile == listoffiles->First()) countChar = crtCount;
+ if (crtCount < countChar) countChar = crtCount;
+ }
+ next0.Reset();
+ while ((nextfile=next0())) {
+ snextfile = nextfile->GetName();
+ Int_t crtCount = snextfile.CountChar('/');
+ if (crtCount > countChar) {
+ delete nextfile;
+ continue;
+ }
+ listoffilestmp->Add(nextfile);
+ }
+ delete listoffiles;
+ listoffiles = listoffilestmp; // Now contains 'good' files
+ listoffiles->Print();
+ TIter next(listoffiles);
// Check if there is a merge operation to resume. Works only for stage 0 or 1.
outputChunk = outputFile;
outputChunk.ReplaceAll(".root", "_*.root");
if (!gSystem->Exec(Form("ls %s 2>/dev/null", outputChunk.Data()))) {
while (1) {
// Skip as many input files as in a chunk
- for (Int_t counter=0; counter<nmaxmerge; counter++) map = (TMap*)nextmap();
- if (!map) {
- ::Error("MergeOutput", "Cannot resume merging for <%s>, nentries=%d", outputFile.Data(), res->GetSize());
- delete res;
- return kFALSE;
+ for (Int_t counter=0; counter<nmaxmerge; counter++) {
+ nextfile = next();
+ if (!nextfile) {
+ ::Error("MergeOutput", "Mismatch found. Please remove partial merged files from local dir.");
+ delete listoffiles;
+ return kFALSE;
+ }
+ snextfile = nextfile->GetName();
}
outputChunk = outputFile;
outputChunk.ReplaceAll(".root", Form("_%04d.root", countChunk));
}
countZero = nmaxmerge;
- while ((map=(TMap*)nextmap())) {
- // Loop 'find' results and get next LFN
+ while ((nextfile=next())) {
+ snextfile = nextfile->GetName();
+ // Loop 'find' results and get next LFN
if (countZero == nmaxmerge) {
// First file in chunk - create file merger and add previous chunk if any.
fm = new TFileMerger(kFALSE);
outputChunk.ReplaceAll(".root", Form("_%04d.root", countChunk));
}
// If last file found, put merged results in the output file
- if (map == res->Last()) outputChunk = outputFile;
- TObjString *objs = dynamic_cast<TObjString*>(map->GetValue("turl"));
- if (!objs || !objs->GetString().Length()) {
- // Nothing found - skip this output
- delete res;
- delete fm;
- return kFALSE;
- }
+ if (nextfile == listoffiles->Last()) outputChunk = outputFile;
// Add file to be merged and decrement chunk counter.
- fm->AddFile(objs->GetString());
+ fm->AddFile(snextfile);
countZero--;
- if (countZero==0 || map == res->Last()) {
+ if (countZero==0 || nextfile == listoffiles->Last()) {
if (!fm->GetMergeList() || !fm->GetMergeList()->GetSize()) {
// Nothing found - skip this output
::Warning("MergeOutput", "No <%s> files found.", inputFile.Data());
- delete res;
- delete fm;
- return kFALSE;
+ merged = kFALSE;
+ break;
}
fm->OutputFile(outputChunk);
// Merge the outputs, then go to next chunk
if (!fm->Merge()) {
::Error("MergeOutput", "Could not merge all <%s> files", outputFile.Data());
- delete res;
- delete fm;
- return kFALSE;
+ merged = kFALSE;
+ break;
} else {
::Info("MergeOutputs", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), outputChunk.Data());
gSystem->Unlink(previousChunk);
}
- if (map == res->Last()) {
- delete res;
- delete fm;
- break;
- }
+ if (nextfile == listoffiles->Last()) break;
countChunk++;
countZero = nmaxmerge;
previousChunk = outputChunk;
}
}
+ delete listoffiles;
+ delete fm;
return merged;
}
// Merging stage different than 0.
// Move to the begining of the requested chunk.
- outputChunk = outputFile;
- if (nmaxmerge < res->GetSize()) {
- if (ichunk*nmaxmerge >= res->GetSize()) {
- ::Error("MergeOutput", "Cannot merge merge chunk %d grouping %d files from %d total.", ichunk, nmaxmerge, res->GetSize());
- delete res;
- return kFALSE;
- }
- for (Int_t counter=0; counter<ichunk*nmaxmerge; counter++) map = (TMap*)nextmap();
- outputChunk.ReplaceAll(".root", Form("-Stage%02d_%04d.root", stage, ichunk));
- }
- countZero = nmaxmerge;
fm = new TFileMerger(kFALSE);
fm->SetFastMethod(kTRUE);
- while ((map=(TMap*)nextmap())) {
- // Loop 'find' results and get next LFN
- TObjString *objs = dynamic_cast<TObjString*>(map->GetValue("turl"));
- if (!objs || !objs->GetString().Length()) {
- // Nothing found - skip this output
- delete res;
- delete fm;
- return kFALSE;
- }
- // Add file to be merged and decrement chunk counter.
- fm->AddFile(objs->GetString());
- countZero--;
- if (countZero==0) break;
- }
- delete res;
+ while ((nextfile=next())) fm->AddFile(nextfile->GetName());
+ delete listoffiles;
if (!fm->GetMergeList() || !fm->GetMergeList()->GetSize()) {
// Nothing found - skip this output
::Warning("MergeOutput", "No <%s> files found.", inputFile.Data());
delete fm;
return kFALSE;
}
- fm->OutputFile(outputChunk);
+ fm->OutputFile(outputFile);
// Merge the outputs
if (!fm->Merge()) {
::Error("MergeOutput", "Could not merge all <%s> files", outputFile.Data());
delete fm;
return kFALSE;
} else {
- ::Info("MergeOutput", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), outputChunk.Data());
+ ::Info("MergeOutput", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), outputFile.Data());
}
delete fm;
return kTRUE;
if (fFastReadOption) {
Warning("MergeOutputs", "You requested FastRead option. Using xrootd flags to reduce timeouts. This may skip some files that could be accessed ! \
\n+++ NOTE: To disable this option, use: plugin->SetFastReadOption(kFALSE)");
- gEnv->SetValue("XNet.ConnectTimeout",10);
- gEnv->SetValue("XNet.RequestTimeout",10);
+ gEnv->SetValue("XNet.ConnectTimeout",50);
+ gEnv->SetValue("XNet.RequestTimeout",50);
gEnv->SetValue("XNet.MaxRedirectCount",2);
- gEnv->SetValue("XNet.ReconnectTimeout",10);
+ gEnv->SetValue("XNet.ReconnectTimeout",50);
gEnv->SetValue("XNet.FirstConnectMaxCnt",1);
}
// Make sure we change the temporary directory
}
// Is a given aliroot mode requested ?
TList optionsList;
+ TString parLibs;
if (!fAliRootMode.IsNull()) {
TString alirootMode = fAliRootMode;
if (alirootMode == "default") alirootMode = "";
optionsList.Add(new TNamed("ALIROOT_MODE", alirootMode.Data()));
// Check the additional libs to be loaded
TString extraLibs;
+ Bool_t parMode = kFALSE;
if (!alirootMode.IsNull()) extraLibs = "ANALYSIS:ANALYSISalice";
// Parse the extra libs for .so
if (fAdditionalLibs.Length()) {
TObjArray *list = fAdditionalLibs.Tokenize(" ");
TIter next(list);
TObjString *str;
- while((str=(TObjString*)next()) && str->GetString().Contains(".so")) {
- TString stmp = str->GetName();
- if (stmp.BeginsWith("lib")) stmp.Remove(0,3);
- stmp.ReplaceAll(".so","");
- if (!extraLibs.IsNull()) extraLibs += ":";
- extraLibs += stmp;
+ while((str=(TObjString*)next())) {
+ if (str->GetString().Contains(".so")) {
+ if (parMode) {
+ Warning("StartAnalysis", "Plugin does not support loading libs after par files in PROOF mode. Library %s and following will not load on workers", str->GetName());
+ break;
+ }
+ TString stmp = str->GetName();
+ if (stmp.BeginsWith("lib")) stmp.Remove(0,3);
+ stmp.ReplaceAll(".so","");
+ if (!extraLibs.IsNull()) extraLibs += ":";
+ extraLibs += stmp;
+ continue;
+ }
+ if (str->GetString().Contains(".par")) {
+ // The first par file found in the list will not allow any further .so
+ parMode = kTRUE;
+ if (!parLibs.IsNull()) parLibs += ":";
+ parLibs += str->GetName();
+ continue;
+ }
}
if (list) delete list;
}
if (!fIncludePath.IsNull()) {
TString includePath = fIncludePath;
includePath.ReplaceAll(" ",":");
+ includePath.ReplaceAll("$ALICE_ROOT","");
+ includePath.ReplaceAll("${ALICE_ROOT}","");
+ includePath.ReplaceAll("-I","");
includePath.Strip(TString::kTrailing, ':');
Info("StartAnalysis", "Adding extra includes: %s",includePath.Data());
optionsList.Add(new TNamed("ALIROOT_EXTRA_INCLUDES",includePath.Data()));
printf("%s %s\n", obj->GetName(), obj->GetTitle());
}
if (!gROOT->ProcessLine(Form("gProof->UploadPackage(\"%s\");",alirootLite.Data()))
- && !gROOT->ProcessLine(Form("gProof->EnablePackage(\"%s\", (TList*)0x%lx);",alirootLite.Data(),(ULong_t)&optionsList))) {
+ && !gROOT->ProcessLine(Form("gProof->EnablePackage(\"%s\", (TList*)%p);",alirootLite.Data(),&optionsList))) {
Info("StartAnalysis", "AliRootProofLite enabled");
} else {
Error("StartAnalysis", "There was an error trying to enable package AliRootProofLite.par");
return kFALSE;
}
} else {
- if (gROOT->ProcessLine(Form("gProof->EnablePackage(\"VO_ALICE@AliRoot::%s\", (TList*)0x%lx);",
- fAliROOTVersion.Data(), (ULong_t)&optionsList))) {
+ if (gROOT->ProcessLine(Form("gProof->EnablePackage(\"VO_ALICE@AliRoot::%s\", (TList*)%p, kTRUE);",
+ fAliROOTVersion.Data(), &optionsList))) {
Error("StartAnalysis", "There was an error trying to enable package VO_ALICE@AliRoot::%s", fAliROOTVersion.Data());
return kFALSE;
}
}
+ // Enable first par files from fAdditionalLibs
+ if (!parLibs.IsNull()) {
+ TObjArray *list = parLibs.Tokenize(":");
+ TIter next(list);
+ TObjString *package;
+ while((package=(TObjString*)next())) {
+ TString spkg = package->GetName();
+ spkg.ReplaceAll(".par", "");
+ gSystem->Exec(TString::Format("rm -rf %s", spkg.Data()));
+ if (!gROOT->ProcessLine(Form("gProof->UploadPackage(\"%s\");", package->GetName()))) {
+ TString enablePackage = (testMode)?Form("gProof->EnablePackage(\"%s\",kFALSE);", package->GetName()):Form("gProof->EnablePackage(\"%s\",kTRUE);", package->GetName());
+ if (gROOT->ProcessLine(enablePackage)) {
+ Error("StartAnalysis", "There was an error trying to enable package %s", package->GetName());
+ return kFALSE;
+ }
+ } else {
+ Error("StartAnalysis", "There was an error trying to upload package %s", package->GetName());
+ return kFALSE;
+ }
+ }
+ if (list) delete list;
+ }
} else {
if (fAdditionalLibs.Contains(".so") && !testMode) {
Error("StartAnalysis", "You request additional libs to be loaded but did not enabled any AliRoot mode. Please refer to: \
TIter next(fPackages);
TObject *package;
while ((package=next())) {
+ // Skip packages already enabled
+ if (parLibs.Contains(package->GetName())) continue;
+ TString spkg = package->GetName();
+ spkg.ReplaceAll(".par", "");
+ gSystem->Exec(TString::Format("rm -rf %s", spkg.Data()));
if (gROOT->ProcessLine(Form("gProof->UploadPackage(\"%s\");", package->GetName()))) {
if (gROOT->ProcessLine(Form("gProof->EnablePackage(\"%s\",kTRUE);", package->GetName()))) {
Error("StartAnalysis", "There was an error trying to enable package %s", package->GetName());
}
TFileCollection *coll = new TFileCollection();
coll->AddFromFile(fFileForTestMode);
- gROOT->ProcessLine(Form("gProof->RegisterDataSet(\"test_collection\", (TFileCollection*)0x%lx, \"OV\");", (ULong_t)coll));
+ gROOT->ProcessLine(Form("gProof->RegisterDataSet(\"test_collection\", (TFileCollection*)%p, \"OV\");", coll));
gROOT->ProcessLine("gProof->ShowDataSets()");
}
return kTRUE;
}
// Compose the output archive.
fOutputArchive = "log_archive.zip:std*@disk=1 ";
- fOutputArchive += Form("root_archive.zip:%s@disk=%d",fOutputFiles.Data(),fNreplicas);
+ fOutputArchive += Form("root_archive.zip:%s,*.stat@disk=%d",fOutputFiles.Data(),fNreplicas);
}
// if (!fCloseSE.Length()) fCloseSE = gSystem->Getenv("alien_CLOSE_SE");
if (TestBit(AliAnalysisGrid::kOffline)) {
}
delete list;
gSystem->Exec(Form("bash %s 2>stderr", fExecutable.Data()));
- TString validationScript = fExecutable;
- validationScript.ReplaceAll(".sh", "_validation.sh");
- gSystem->Exec(Form("bash %s",validationScript.Data()));
+ gSystem->Exec(Form("bash %s",fValidationScript.Data()));
// gSystem->Exec("cat stdout");
return kFALSE;
}
if (!fTerminateFiles.IsNull()) {
fTerminateFiles.Strip();
fTerminateFiles.ReplaceAll(" ",",");
- TObjArray *fextra = extra.Tokenize(",");
+ TObjArray *fextra = fTerminateFiles.Tokenize(",");
TIter nextx(fextra);
TObject *obj;
while ((obj=nextx())) {
gGrid->Cd(fGridOutputDir);
TString mergeJDLName = fExecutable;
mergeJDLName.ReplaceAll(".sh", "_merge.jdl");
+ if (!fInputFiles) {
+ Error("SubmitMerging", "You have to use explicit run numbers or run range to merge via JDL!");
+ return kFALSE;
+ }
Int_t ntosubmit = fInputFiles->GetEntries();
for (Int_t i=0; i<ntosubmit; i++) {
TString runOutDir = gSystem->BaseName(fInputFiles->At(i)->GetName());
printf("### Submitting merging job for run <%s>\n", runOutDir.Data());
runOutDir = Form("%s/%s", fGridOutputDir.Data(), runOutDir.Data());
} else {
- // The output directory is the master number in 3 digits format
- printf("### Submitting merging job for master <%03d>\n", i);
- runOutDir = Form("%s/%03d",fGridOutputDir.Data(), i);
+ if (!fRunNumbers.Length() && !fRunRange[0]) {
+ // The output directory is the grid outdir
+ printf("### Submitting merging job for the full output directory %s.\n", fGridOutputDir.Data());
+ runOutDir = fGridOutputDir;
+ } else {
+ // The output directory is the master number in 3 digits format
+ printf("### Submitting merging job for master <%03d>\n", i);
+ runOutDir = Form("%s/%03d",fGridOutputDir.Data(), i);
+ }
}
// Check now the number of merging stages.
TObjArray *list = fOutputFiles.Tokenize(",");
if (!fMergeExcludes.Contains(outputFile)) break;
}
delete list;
- Bool_t done = CheckMergedFiles(outputFile, runOutDir, fMaxMergeFiles, kTRUE, mergeJDLName);
- if (!done) return kFALSE;
+ Bool_t done = CheckMergedFiles(outputFile, runOutDir, fMaxMergeFiles, mergeJDLName);
+ if (!done && (i==ntosubmit-1)) return kFALSE;
+ if (!fRunNumbers.Length() && !fRunRange[0]) break;
}
if (!ntosubmit) return kTRUE;
- Info("StartAnalysis", "\n#### STARTING AN ALIEN SHELL FOR YOU. EXIT WHEN YOUR MERGING JOBS HAVE FINISHED. #### \
- \n You may exit at any time and terminate the job later using the option <terminate> but disabling SetMergeViaJDL\
- \n ##################################################################################");
+ Info("StartAnalysis", "\n #### STARTING AN ALIEN SHELL FOR YOU. You can exit any time or inspect your jobs in a different shell.##########\
+ \n Make sure your jobs are in a final state (you can resubmit failed ones via 'masterjob <id> resubmit ERROR_ALL')\
+ \n Rerun in 'terminate' mode to submit all merging stages, each AFTER the previous one completed. The final merged \
+ \n output will be written to your alien output directory, while separate stages in <Stage_n>. \
+ \n ################################################################################################################");
gSystem->Exec("aliensh");
return kTRUE;
}
TString func = fAnalysisMacro;
TString type = "ESD";
TString comment = "// Analysis using ";
- if (TObject::TestBit(AliAnalysisGrid::kUseESD)) comment += "ESD";
- if (TObject::TestBit(AliAnalysisGrid::kUseAOD)) {
- type = "AOD";
- comment += "AOD";
- }
+ if (IsUseMCchain()) {
+ type = "MC";
+ comment += "MC";
+ } else {
+ if (TObject::TestBit(AliAnalysisGrid::kUseESD)) comment += "ESD";
+ if (TObject::TestBit(AliAnalysisGrid::kUseAOD)) {
+ type = "AOD";
+ comment += "AOD";
+ }
+ }
if (type!="AOD" && fFriendChainName!="") {
Error("WriteAnalysisMacro", "Friend chain can be attached only to AOD");
return;
// Change temp directory to current one
out << "// Set temporary merging directory to current one" << endl;
out << " gSystem->Setenv(\"TMPDIR\", gSystem->pwd());" << endl << endl;
+ // Reset existing include path
+ out << "// Reset existing include path and add current directory first in the search" << endl;
+ out << " gSystem->SetIncludePath(\"-I.\");" << endl;
if (!fExecutableCommand.Contains("aliroot")) {
out << "// load base root libraries" << endl;
out << " gSystem->Load(\"libTree\");" << endl;
}
if (list) delete list;
}
- out << "// include path" << endl;
- if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
- out << " gSystem->AddIncludePath(\"-I$ALICE_ROOT/include\");" << endl << endl;
out << "// Load analysis framework libraries" << endl;
TString setupPar = "AliAnalysisAlien::SetupPar";
if (!fPackages) {
out << " if (!" << setupPar << "(\"" << obj->GetName() << "\")) return;" << endl;
}
}
+ out << "// include path" << endl;
+ // Get the include path from the interpreter and remove entries pointing to AliRoot
+ out << " TString intPath = gInterpreter->GetIncludePath();" << endl;
+ out << " TObjArray *listpaths = intPath.Tokenize(\" \");" << endl;
+ out << " TIter nextpath(listpaths);" << endl;
+ out << " TObjString *pname;" << endl;
+ out << " while ((pname=(TObjString*)nextpath())) {" << endl;
+ out << " TString current = pname->GetName();" << endl;
+ out << " if (current.Contains(\"AliRoot\") || current.Contains(\"ALICE_ROOT\")) continue;" << endl;
+ out << " gSystem->AddIncludePath(current);" << endl;
+ out << " }" << endl;
+ out << " if (listpaths) delete listpaths;" << endl;
+ if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
+ out << " gROOT->ProcessLine(\".include $ALICE_ROOT/include\");" << endl;
+ out << " printf(\"Include path: %s\\n\", gSystem->GetIncludePath());" << endl << endl;
if (fAdditionalLibs.Length()) {
out << "// Add aditional AliRoot libraries" << endl;
TObjArray *list = fAdditionalLibs.Tokenize(" ");
if (list) delete list;
}
out << endl;
+// out << " printf(\"Currently load libraries:\\n\");" << endl;
+// out << " printf(\"%s\\n\", gSystem->GetLibraries());" << endl;
if (fFastReadOption) {
Warning("WriteAnalysisMacro", "!!! You requested FastRead option. Using xrootd flags to reduce timeouts in the grid jobs. This may skip some files that could be accessed !!! \
\n+++ NOTE: To disable this option, use: plugin->SetFastReadOption(kFALSE)");
out << "// fast xrootd reading enabled" << endl;
out << " printf(\"!!! You requested FastRead option. Using xrootd flags to reduce timeouts. Note that this may skip some files that could be accessed !!!\");" << endl;
- out << " gEnv->SetValue(\"XNet.ConnectTimeout\",10);" << endl;
- out << " gEnv->SetValue(\"XNet.RequestTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.ConnectTimeout\",50);" << endl;
+ out << " gEnv->SetValue(\"XNet.RequestTimeout\",50);" << endl;
out << " gEnv->SetValue(\"XNet.MaxRedirectCount\",2);" << endl;
- out << " gEnv->SetValue(\"XNet.ReconnectTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.ReconnectTimeout\",50);" << endl;
out << " gEnv->SetValue(\"XNet.FirstConnectMaxCnt\",1);" << endl << endl;
}
out << "// connect to AliEn and make the chain" << endl;
out << "// Create a chain using url's from xml file" << endl;
out << " TString filename;" << endl;
out << " Int_t run = 0;" << endl;
- out << " TString treename = type;" << endl;
- out << " treename.ToLower();" << endl;
- out << " treename += \"Tree\";" << endl;
+ if (IsUseMCchain()) {
+ out << " TString treename = \"TE\";" << endl;
+ } else {
+ out << " TString treename = type;" << endl;
+ out << " treename.ToLower();" << endl;
+ out << " treename += \"Tree\";" << endl;
+ }
out << " printf(\"***************************************\\n\");" << endl;
out << " printf(\" Getting chain of trees %s\\n\", treename.Data());" << endl;
out << " printf(\"***************************************\\n\");" << endl;
out << "// Compile the package and set it up." << endl;
out << " TString pkgdir = package;" << endl;
out << " pkgdir.ReplaceAll(\".par\",\"\");" << endl;
- out << " gSystem->Exec(Form(\"tar xvzf %s.par\", pkgdir.Data()));" << endl;
+ out << " gSystem->Exec(TString::Format(\"tar xvzf %s.par\", pkgdir.Data()));" << endl;
out << " TString cdir = gSystem->WorkingDirectory();" << endl;
out << " gSystem->ChangeDirectory(pkgdir);" << endl;
out << " // Check for BUILD.sh and execute" << endl;
TString func = mergingMacro;
TString comment;
func.ReplaceAll(".C", "");
- out << "void " << func.Data() << "(const char *dir, Int_t stage=0, Int_t ichunk=0)" << endl;
+ out << "void " << func.Data() << "(const char *dir, Int_t stage=0)" << endl;
out << "{" << endl;
out << "// Automatically generated merging macro executed in grid subjobs" << endl << endl;
out << " TStopwatch timer;" << endl;
out << " timer.Start();" << endl << endl;
+ // Reset existing include path
+ out << "// Reset existing include path and add current directory first in the search" << endl;
+ out << " gSystem->SetIncludePath(\"-I.\");" << endl;
if (!fExecutableCommand.Contains("aliroot")) {
out << "// load base root libraries" << endl;
out << " gSystem->Load(\"libTree\");" << endl;
}
if (list) delete list;
}
- out << "// include path" << endl;
- if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
- out << " gSystem->AddIncludePath(\"-I$ALICE_ROOT/include\");" << endl << endl;
out << "// Load analysis framework libraries" << endl;
if (!fPackages) {
if (!fExecutableCommand.Contains("aliroot")) {
out << " if (!" << setupPar << "(\"" << obj->GetName() << "\")) return;" << endl;
}
}
+ out << "// include path" << endl;
+ // Get the include path from the interpreter and remove entries pointing to AliRoot
+ out << " TString intPath = gInterpreter->GetIncludePath();" << endl;
+ out << " TObjArray *listpaths = intPath.Tokenize(\" \");" << endl;
+ out << " TIter nextpath(listpaths);" << endl;
+ out << " TObjString *pname;" << endl;
+ out << " while ((pname=(TObjString*)nextpath())) {" << endl;
+ out << " TString current = pname->GetName();" << endl;
+ out << " if (current.Contains(\"AliRoot\") || current.Contains(\"ALICE_ROOT\")) continue;" << endl;
+ out << " gSystem->AddIncludePath(current);" << endl;
+ out << " }" << endl;
+ out << " if (listpaths) delete listpaths;" << endl;
+ if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
+ out << " gROOT->ProcessLine(\".include $ALICE_ROOT/include\");" << endl;
+ out << " printf(\"Include path: %s\\n\", gSystem->GetIncludePath());" << endl << endl;
if (fAdditionalLibs.Length()) {
out << "// Add aditional AliRoot libraries" << endl;
TObjArray *list = fAdditionalLibs.Tokenize(" ");
Warning("WriteMergingMacro", "!!! You requested FastRead option. Using xrootd flags to reduce timeouts in the grid merging jobs. Note that this may skip some files that could be accessed !!!");
out << "// fast xrootd reading enabled" << endl;
out << " printf(\"!!! You requested FastRead option. Using xrootd flags to reduce timeouts. Note that this may skip some files that could be accessed !!!\");" << endl;
- out << " gEnv->SetValue(\"XNet.ConnectTimeout\",10);" << endl;
- out << " gEnv->SetValue(\"XNet.RequestTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.ConnectTimeout\",50);" << endl;
+ out << " gEnv->SetValue(\"XNet.RequestTimeout\",50);" << endl;
out << " gEnv->SetValue(\"XNet.MaxRedirectCount\",2);" << endl;
- out << " gEnv->SetValue(\"XNet.ReconnectTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.ReconnectTimeout\",50);" << endl;
out << " gEnv->SetValue(\"XNet.FirstConnectMaxCnt\",1);" << endl << endl;
}
// Change temp directory to current one
out << " gSystem->Setenv(\"TMPDIR\", gSystem->pwd());" << endl << endl;
out << "// Connect to AliEn" << endl;
out << " if (!TGrid::Connect(\"alien://\")) return;" << endl;
- out << " Bool_t laststage = kFALSE;" << endl;
out << " TString outputDir = dir;" << endl;
out << " TString outputFiles = \"" << GetListOfFiles("out") << "\";" << endl;
out << " TString mergeExcludes = \"" << fMergeExcludes << "\";" << endl;
out << " continue;" << endl;
out << " }" << endl;
out << " if (mergeExcludes.Contains(outputFile.Data())) continue;" << endl;
- out << " merged = AliAnalysisAlien::MergeOutput(outputFile, outputDir, " << fMaxMergeFiles << ", stage, ichunk);" << endl;
+ out << " merged = AliAnalysisAlien::MergeOutput(outputFile, outputDir, " << fMaxMergeFiles << ", stage);" << endl;
out << " if (!merged) {" << endl;
out << " printf(\"ERROR: Cannot merge %s\\n\", outputFile.Data());" << endl;
out << " return;" << endl;
out << " }" << endl;
- out << " // Check if this was the last stage. If yes, run terminate for the tasks." << endl;
- out << " if (!gSystem->AccessPathName(outputFile)) laststage = kTRUE;" << endl;
out << " }" << endl;
out << " // all outputs merged, validate" << endl;
out << " ofstream out;" << endl;
out << " // read the analysis manager from file" << endl;
TString analysisFile = fExecutable;
analysisFile.ReplaceAll(".sh", ".root");
- out << " if (!laststage) return;" << endl;
+ out << " if (!outputDir.Contains(\"Stage\")) return;" << endl;
out << " TFile *file = TFile::Open(\"" << analysisFile << "\");" << endl;
out << " if (!file) return;" << endl;
out << " TIter nextkey(file->GetListOfKeys());" << endl;
out << " ::Error(\"" << func.Data() << "\", \"No analysis manager found in file" << analysisFile <<"\");" << endl;
out << " return;" << endl;
out << " }" << endl << endl;
+ out << " mgr->SetRunFromPath(mgr->GetRunFromAlienPath(dir));" << endl;
out << " mgr->SetSkipTerminate(kFALSE);" << endl;
out << " mgr->PrintStatus();" << endl;
if (AliAnalysisManager::GetAnalysisManager()) {
out << "// Compile the package and set it up." << endl;
out << " TString pkgdir = package;" << endl;
out << " pkgdir.ReplaceAll(\".par\",\"\");" << endl;
- out << " gSystem->Exec(Form(\"tar xvzf %s.par\", pkgdir.Data()));" << endl;
+ out << " gSystem->Exec(TString::Format(\"tar xvzf %s.par\", pkgdir.Data()));" << endl;
out << " TString cdir = gSystem->WorkingDirectory();" << endl;
out << " gSystem->ChangeDirectory(pkgdir);" << endl;
out << " // Check for BUILD.sh and execute" << endl;
// LD_LIBRARY_PATH
TString pkgdir = package;
pkgdir.ReplaceAll(".par","");
- gSystem->Exec(Form("tar xvzf %s.par", pkgdir.Data()));
+ gSystem->Exec(TString::Format("tar xzf %s.par", pkgdir.Data()));
TString cdir = gSystem->WorkingDirectory();
gSystem->ChangeDirectory(pkgdir);
// Check for BUILD.sh and execute
return;
}
out << "#!/bin/bash" << endl;
+ // Make sure we can properly compile par files
+ out << "export LD_LIBRARY_PATH=.:$LD_LIBRARY_PATH" << endl;
out << "echo \"=========================================\"" << endl;
out << "echo \"############## PATH : ##############\"" << endl;
out << "echo $PATH" << endl;
out << "echo \"############## memory : ##############\"" << endl;
out << "free -m" << endl;
out << "echo \"=========================================\"" << endl << endl;
- // Make sure we can properly compile par files
- if (TObject::TestBit(AliAnalysisGrid::kUsePars)) out << "export LD_LIBRARY_PATH=.:$LD_LIBRARY_PATH" << endl;
out << fExecutableCommand << " ";
out << fAnalysisMacro.Data() << " " << fExecutableArgs.Data() << endl << endl;
out << "echo \"======== " << fAnalysisMacro.Data() << " finished with exit code: $? ========\"" << endl;
return;
}
out << "#!/bin/bash" << endl;
+ // Make sure we can properly compile par files
+ out << "export LD_LIBRARY_PATH=.:$LD_LIBRARY_PATH" << endl;
out << "echo \"=========================================\"" << endl;
out << "echo \"############## PATH : ##############\"" << endl;
out << "echo $PATH" << endl;
out << "echo \"############## memory : ##############\"" << endl;
out << "free -m" << endl;
out << "echo \"=========================================\"" << endl << endl;
- // Make sure we can properly compile par files
- if (TObject::TestBit(AliAnalysisGrid::kUsePars)) out << "export LD_LIBRARY_PATH=.:$LD_LIBRARY_PATH" << endl;
TString mergeMacro = fExecutable;
mergeMacro.ReplaceAll(".sh", "_merge.C");
if (IsOneStageMerging())
out << "export ARG=\"" << mergeMacro << "(\\\"$1\\\")\"" << endl;
else
- out << "export ARG=\"" << mergeMacro << "(\\\"$1\\\",$2,$3)\"" << endl;
+ out << "export ARG=\"" << mergeMacro << "(\\\"$1\\\",$2)\"" << endl;
out << fExecutableCommand << " " << "$ARG" << endl;
out << "echo \"======== " << mergeMacro.Data() << " finished with exit code: $? ========\"" << endl;
out << "echo \"############## memory after: ##############\"" << endl;
TObjArray *arr = outputFiles.Tokenize(",");
TIter next1(arr);
TString outputFile;
- while ((os=(TObjString*)next1())) {
+ while (!merge && (os=(TObjString*)next1())) {
+ // No need to validate outputs produced by merging since the merging macro does this
outputFile = os->GetString();
Int_t index = outputFile.Index("@");
if (index > 0) outputFile.Remove(index);
- if (!merge && fTerminateFiles.Contains(outputFile)) continue;
- if (merge && fMergeExcludes.Contains(outputFile)) continue;
+ if (fTerminateFiles.Contains(outputFile)) continue;
if (outputFile.Contains("*")) continue;
out << "if ! [ -f " << outputFile.Data() << " ] ; then" << endl;
out << " error=1" << endl;