#include "TGridResult.h"
#include "TGridCollection.h"
#include "TGridJDL.h"
+#include "TGridJobStatusList.h"
+#include "TGridJobStatus.h"
#include "TFileMerger.h"
#include "AliAnalysisManager.h"
#include "AliVEventHandler.h"
fMaxInitFailed(0),
fMasterResubmitThreshold(0),
fNtestFiles(0),
+ fNrunsPerMaster(0),
+ fMaxMergeFiles(0),
+ fNsubmitted(0),
fRunNumbers(),
fExecutable(),
fArguments(),
fMergeExcludes(),
fIncludePath(),
fCloseSE(),
+ fFriendChainName(),
fInputFiles(0),
fPackages(0)
{
fMaxInitFailed(0),
fMasterResubmitThreshold(0),
fNtestFiles(0),
+ fNrunsPerMaster(0),
+ fMaxMergeFiles(0),
+ fNsubmitted(0),
fRunNumbers(),
fExecutable(),
fArguments(),
fMergeExcludes(),
fIncludePath(),
fCloseSE(),
+ fFriendChainName(),
fInputFiles(0),
fPackages(0)
{
fMaxInitFailed(other.fMaxInitFailed),
fMasterResubmitThreshold(other.fMasterResubmitThreshold),
fNtestFiles(other.fNtestFiles),
+ fNrunsPerMaster(other.fNrunsPerMaster),
+ fMaxMergeFiles(other.fMaxMergeFiles),
+ fNsubmitted(other.fNsubmitted),
fRunNumbers(other.fRunNumbers),
fExecutable(other.fExecutable),
fArguments(other.fArguments),
fMergeExcludes(other.fMergeExcludes),
fIncludePath(other.fIncludePath),
fCloseSE(other.fCloseSE),
+ fFriendChainName(other.fFriendChainName),
fInputFiles(0),
fPackages(0)
{
// Copy ctor.
fGridJDL = (TGridJDL*)gROOT->ProcessLine("new TAlienJDL()");
+ fRunRange[0] = other.fRunRange[0];
+ fRunRange[1] = other.fRunRange[1];
if (other.fInputFiles) {
fInputFiles = new TObjArray();
TIter next(other.fInputFiles);
fMergeExcludes = other.fMergeExcludes;
fIncludePath = other.fIncludePath;
fCloseSE = other.fCloseSE;
+ fFriendChainName = other.fFriendChainName;
if (other.fInputFiles) {
fInputFiles = new TObjArray();
TIter next(other.fInputFiles);
Bool_t AliAnalysisAlien::CheckInputData()
{
// Check validity of input data. If necessary, create xml files.
- if (!fInputFiles && !fRunNumbers.Length()) {
- Error("CheckInputData", "You have to specify either a set of run numbers or some existing grid files. Use AddRunNumber()/AddDataFile().");
- return kFALSE;
+ if (!fInputFiles && !fRunNumbers.Length() && !fRunRange[0]) {
+ if (!fGridDataDir.Length()) {
+ Error("CkeckInputData", "AliEn path to base data directory must be set.\n = Use: SetGridDataDir()");
+ return kFALSE;
+ }
+ Info("CheckInputData", "Analysis will make a single xml for base data directory %s",fGridDataDir.Data());
+ return kTRUE;
}
// Process declared files
Bool_t is_collection = kFALSE;
}
}
// Process requested run numbers
- if (!fRunNumbers.Length()) return kTRUE;
+ if (!fRunNumbers.Length() && !fRunRange[0]) return kTRUE;
// Check validity of alien data directory
if (!fGridDataDir.Length()) {
Error("CkeckInputData", "AliEn path to base data directory must be set.\n = Use: SetGridDataDir()");
// Check validity of run number(s)
TObjArray *arr;
TObjString *os;
+ Int_t nruns = 0;
+ TString schunk;
TString path;
if (!checked) {
checked = kTRUE;
return kFALSE;
}
if (fRunNumbers.Length()) {
+ Info("CheckDataType", "Using supplied run numbers (run ranges are ignored)");
arr = fRunNumbers.Tokenize(" ");
TIter next(arr);
while ((os=(TObjString*)next())) {
path = Form("%s/%s ", fGridDataDir.Data(), os->GetString().Data());
if (!gGrid->Cd(path)) {
- Error("CheckInputData", "Run number %s not found in path: %s", os->GetString().Data(), path.Data());
- return kFALSE;
+ Warning("CheckInputData", "Run number %s not found in path: %s", os->GetString().Data(), path.Data());
+ continue;
}
path = Form("%s/%s.xml", workdir.Data(),os->GetString().Data());
TString msg = "\n##### file: ";
if (use_tags) msg += " using_tags: Yes";
else msg += " using_tags: No";
Info("CheckDataType", msg.Data());
- AddDataFile(path);
+ if (fNrunsPerMaster<2) {
+ AddDataFile(Form("%s.xml", os->GetString().Data()));
+ } else {
+ nruns++;
+ if (((nruns-1)%fNrunsPerMaster) == 0) {
+ schunk = os->GetString();
+ }
+ if ((nruns%fNrunsPerMaster)!=0 && os!=arr->Last()) continue;
+ schunk += Form("_%s.xml", os->GetString().Data());
+ AddDataFile(schunk);
+ }
}
delete arr;
+ } else {
+ Info("CheckDataType", "Using run range [%d, %d]", fRunRange[0], fRunRange[1]);
+ for (Int_t irun=fRunRange[0]; irun<=fRunRange[1]; irun++) {
+ path = Form("%s/%d ", fGridDataDir.Data(), irun);
+ if (!gGrid->Cd(path)) {
+ Warning("CheckInputData", "Run number %d not found in path: %s", irun, path.Data());
+ continue;
+ }
+ path = Form("%s/%d.xml", workdir.Data(),irun);
+ TString msg = "\n##### file: ";
+ msg += path;
+ msg += " type: xml_collection;";
+ if (use_tags) msg += " using_tags: Yes";
+ else msg += " using_tags: No";
+ Info("CheckDataType", msg.Data());
+ if (fNrunsPerMaster<2) {
+ AddDataFile(Form("%d.xml",irun));
+ } else {
+ nruns++;
+ if (((nruns-1)%fNrunsPerMaster) == 0) {
+ schunk = Form("%d", irun);
+ }
+ if ((nruns%fNrunsPerMaster)!=0 && irun != fRunRange[1]) continue;
+ schunk += Form("_%d.xml", irun);
+ AddDataFile(schunk);
+ }
+ }
}
return kTRUE;
}
TString file;
TString path;
- if (!fRunNumbers.Length()) return kTRUE;
- // Several runs
- TObjArray *arr = fRunNumbers.Tokenize(" ");
- TObjString *os;
- TIter next(arr);
- while ((os=(TObjString*)next())) {
- path = Form("%s/%s ", fGridDataDir.Data(), os->GetString().Data());
- if (TestBit(AliAnalysisGrid::kTest)) file = "wn.xml";
- else file = Form("%s.xml", os->GetString().Data());
- if (FileExists(file) && !TestBit(AliAnalysisGrid::kTest)) {
- Info("CreateDataset", "\n##### Removing previous dataset %s", file.Data());
- gGrid->Rm(file);
- }
- command = "find ";
- command += options;
- command += path;
- command += pattern;
-// conditions = Form(" > %s", file.Data());
- command += conditions;
- TGridResult *res = gGrid->Command(command);
- if (res) delete res;
- // Write standard output to file
- gROOT->ProcessLine(Form("gGrid->Stdout(); > %s", file.Data()));
- if (TestBit(AliAnalysisGrid::kTest)) break;
- // Copy xml file to alien space
- TFile::Cp(Form("file:%s",file.Data()), Form("alien://%s/%s",workdir.Data(), file.Data()));
- if (!FileExists(file)) {
- Error("CreateDataset", "Command %s did NOT succeed", command.Data());
- delete arr;
+ Int_t nruns = 0;
+ TString schunk;
+ TGridCollection *cbase=0, *cadd=0;
+ if (!fRunNumbers.Length() && !fRunRange[0]) {
+ if (fInputFiles && fInputFiles->GetEntries()) return kTRUE;
+ // Make a single data collection from data directory.
+ path = fGridDataDir;
+ if (!gGrid->Cd(path)) {
+ Error("CreateDataset", "Path to data directory %s not valid",fGridDataDir.Data());
return kFALSE;
+ }
+ CdWork();
+ if (TestBit(AliAnalysisGrid::kTest)) file = "wn.xml";
+ else file = Form("%s.xml", gSystem->BaseName(path));
+ if (gSystem->AccessPathName(file)) {
+ command = "find ";
+ command += options;
+ command += path;
+ command += " ";
+ command += pattern;
+ command += conditions;
+ printf("command: %s\n", command.Data());
+ TGridResult *res = gGrid->Command(command);
+ if (res) delete res;
+ // Write standard output to file
+ gROOT->ProcessLine(Form("gGrid->Stdout(); > %s", file.Data()));
+ }
+ if (!TestBit(AliAnalysisGrid::kTest) && !FileExists(file)) {
+ // Copy xml file to alien space
+ TFile::Cp(Form("file:%s",file.Data()), Form("alien://%s/%s",workdir.Data(), file.Data()));
+ if (!FileExists(file)) {
+ Error("CreateDataset", "Command %s did NOT succeed", command.Data());
+ return kFALSE;
+ }
+ // Update list of files to be processed.
}
+ AddDataFile(Form("%s/%s", workdir.Data(), file.Data()));
+ return kTRUE;
}
- delete arr;
+ // Several runs
+ if (fRunNumbers.Length()) {
+ TObjArray *arr = fRunNumbers.Tokenize(" ");
+ TObjString *os;
+ TIter next(arr);
+ while ((os=(TObjString*)next())) {
+ path = Form("%s/%s ", fGridDataDir.Data(), os->GetString().Data());
+ if (!gGrid->Cd(path)) continue;
+ CdWork();
+ if (TestBit(AliAnalysisGrid::kTest)) file = "wn.xml";
+ else file = Form("%s.xml", os->GetString().Data());
+ // If local collection file does not exist, create it via 'find' command.
+ if (gSystem->AccessPathName(file)) {
+ command = "find ";
+ command += options;
+ command += path;
+ command += pattern;
+ command += conditions;
+ TGridResult *res = gGrid->Command(command);
+ if (res) delete res;
+ // Write standard output to file
+ gROOT->ProcessLine(Form("gGrid->Stdout(); > %s", file.Data()));
+ }
+ if (TestBit(AliAnalysisGrid::kTest)) break;
+ // Check if there is one run per master job.
+ if (fNrunsPerMaster<2) {
+ if (FileExists(file)) {
+ Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", file.Data());
+ continue;
+ }
+ // Copy xml file to alien space
+ TFile::Cp(Form("file:%s",file.Data()), Form("alien://%s/%s",workdir.Data(), file.Data()));
+ if (!FileExists(file)) {
+ Error("CreateDataset", "Command %s did NOT succeed", command.Data());
+ delete arr;
+ return kFALSE;
+ }
+ } else {
+ nruns++;
+ if (((nruns-1)%fNrunsPerMaster) == 0) {
+ schunk = os->GetString();
+ cbase = (TGridCollection*)gROOT->ProcessLine(Form("new TAlienCollection(\"%s\", 1000000);",file.Data()));
+ } else {
+ cadd = (TGridCollection*)gROOT->ProcessLine(Form("new TAlienCollection(\"%s\", 1000000);",file.Data()));
+ printf(" Merging collection <%s> into masterjob input...\n", file.Data());
+ cbase->Add(cadd);
+ delete cadd;
+ }
+ if ((nruns%fNrunsPerMaster)!=0 && os!=arr->Last()) {
+ continue;
+ }
+ schunk += Form("_%s.xml", os->GetString().Data());
+ if (FileExists(schunk)) {
+ Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", schunk.Data());
+ continue;
+ }
+ printf("Exporting merged collection <%s> and copying to AliEn.\n", schunk.Data());
+ cbase->ExportXML(Form("file://%s", schunk.Data()),kFALSE,kFALSE, schunk, "Merged runs");
+ TFile::Cp(Form("file:%s",file.Data()), Form("alien://%s/%s",workdir.Data(), file.Data()));
+ if (!FileExists(schunk)) {
+ Error("CreateDataset", "Copy command did NOT succeed for %s", schunk.Data());
+ delete arr;
+ return kFALSE;
+ }
+ }
+ }
+ delete arr;
+ } else {
+ // Process a full run range.
+ for (Int_t irun=fRunRange[0]; irun<=fRunRange[1]; irun++) {
+ path = Form("%s/%d ", fGridDataDir.Data(), irun);
+ if (!gGrid->Cd(path)) continue;
+ CdWork();
+ if (TestBit(AliAnalysisGrid::kTest)) file = "wn.xml";
+ else file = Form("%d.xml", irun);
+ if (FileExists(file) && fNrunsPerMaster<2 && !TestBit(AliAnalysisGrid::kTest)) {
+ Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", file.Data());
+// gGrid->Rm(file);
+ continue;
+ }
+ // If local collection file does not exist, create it via 'find' command.
+ if (gSystem->AccessPathName(file)) {
+ command = "find ";
+ command += options;
+ command += path;
+ command += pattern;
+ command += conditions;
+ TGridResult *res = gGrid->Command(command);
+ if (res) delete res;
+ // Write standard output to file
+ gROOT->ProcessLine(Form("gGrid->Stdout(); > %s", file.Data()));
+ }
+ if (TestBit(AliAnalysisGrid::kTest)) break;
+ // Check if there is one run per master job.
+ if (fNrunsPerMaster<2) {
+ if (FileExists(file)) {
+ Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", file.Data());
+ continue;
+ }
+ // Copy xml file to alien space
+ TFile::Cp(Form("file:%s",file.Data()), Form("alien://%s/%s",workdir.Data(), file.Data()));
+ if (!FileExists(file)) {
+ Error("CreateDataset", "Command %s did NOT succeed", command.Data());
+ return kFALSE;
+ }
+ } else {
+ nruns++;
+ // Check if the collection for the chunk exist locally.
+ Int_t nchunk = (nruns-1)/fNrunsPerMaster;
+ if (FileExists(fInputFiles->At(nchunk)->GetName())) continue;
+ printf(" Merging collection <%s> into %d runs chunk...\n",file.Data(),fNrunsPerMaster);
+ if (((nruns-1)%fNrunsPerMaster) == 0) {
+ schunk = Form("%d", irun);
+ cbase = (TGridCollection*)gROOT->ProcessLine(Form("new TAlienCollection(\"%s\", 1000000);",file.Data()));
+ } else {
+ cadd = (TGridCollection*)gROOT->ProcessLine(Form("new TAlienCollection(\"%s\", 1000000);",file.Data()));
+ cbase->Add(cadd);
+ delete cadd;
+ }
+ if ((nruns%fNrunsPerMaster)!=0 && irun!=fRunRange[1]) {
+ continue;
+ }
+ schunk += Form("_%d.xml", irun);
+ if (FileExists(schunk)) {
+ Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", schunk.Data());
+ continue;
+ }
+ printf("Exporting merged collection <%s> and copying to AliEn.\n", schunk.Data());
+ cbase->ExportXML(Form("file://%s", schunk.Data()),kFALSE,kFALSE, schunk, "Merged runs");
+ if (FileExists(schunk)) {
+ Info("CreateDataset", "\n##### Dataset %s exist. Skipping copy...", schunk.Data());
+ continue;
+ }
+ TFile::Cp(Form("file:%s",schunk.Data()), Form("alien://%s/%s",workdir.Data(), schunk.Data()));
+ if (!FileExists(schunk)) {
+ Error("CreateDataset", "Copy command did NOT succeed for %s", schunk.Data());
+ return kFALSE;
+ }
+ }
+ }
+ }
return kTRUE;
}
Error("CreateJDL", "You must define AliEn output directory");
error = kTRUE;
} else {
+ if (!fGridOutputDir.Contains("/")) fGridOutputDir = Form("%s/%s", workdir.Data(), fGridOutputDir.Data());
if (!gGrid->Cd(fGridOutputDir)) {
if (gGrid->Mkdir(fGridOutputDir)) {
Info("CreateJDL", "\n##### Created alien output directory %s", fGridOutputDir.Data());
fGridJDL->AddToPackages("APISCONFIG", fAPIVersion);
fGridJDL->SetInputDataListFormat(fInputFormat);
fGridJDL->SetInputDataList("wn.xml");
- if (fInputFiles) {
- TIter next(fInputFiles);
- while ((os=(TObjString*)next()))
- fGridJDL->AddToInputDataCollection(Form("LF:%s,nodownload", os->GetString().Data()));
- }
fGridJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(), fAnalysisMacro.Data()));
fGridJDL->AddToInputSandbox(Form("LF:%s/analysis.root", workdir.Data()));
if (IsUsingTags() && !gSystem->AccessPathName("ConfigureCuts.C"))
fGridJDL->AddToOutputArchive(os->GetString());
delete arr;
}
- fGridJDL->SetOutputDirectory(Form("%s/%s/#alien_counter_03i#", workdir.Data(), fGridOutputDir.Data()));
arr = fOutputFiles.Tokenize(" ");
TIter next(arr);
while ((os=(TObjString*)next())) {
fGridJDL->SetValue("Price", Form("\"%d\"", fPrice));
fGridJDL->SetValidationCommand(Form("%s/validate.sh", workdir.Data()));
if (fMasterResubmitThreshold) fGridJDL->SetValue("MasterResubmitThreshold", Form("\"%d%%\"", fMasterResubmitThreshold));
- // Generate the JDL as a string
- TString sjdl = fGridJDL->Generate();
- Int_t index;
- index = sjdl.Index("Executable");
- if (index >= 0) sjdl.Insert(index, "\n# This is the startup script\n");
- index = sjdl.Index("Split ");
- if (index >= 0) sjdl.Insert(index, "\n# We split per storage element\n");
- index = sjdl.Index("SplitMaxInputFileNumber");
- if (index >= 0) sjdl.Insert(index, "\n# We want each subjob to get maximum this number of input files\n");
- index = sjdl.Index("InputDataCollection");
- if (index >= 0) sjdl.Insert(index, "# Input xml collections\n");
- index = sjdl.Index("InputFile");
- if (index >= 0) sjdl.Insert(index, "\n# List of input files to be uploaded to wn's\n");
- index = sjdl.Index("InputDataList ");
- if (index >= 0) sjdl.Insert(index, "\n# Collection to be processed on wn\n");
- index = sjdl.Index("InputDataListFormat");
- if (index >= 0) sjdl.Insert(index, "\n# Format of input data\n");
- index = sjdl.Index("Price");
- if (index >= 0) sjdl.Insert(index, "\n# AliEn price for this job\n");
- index = sjdl.Index("Requirements");
- if (index >= 0) sjdl.Insert(index, "\n# Additional requirements for the computing element\n");
- index = sjdl.Index("Packages");
- if (index >= 0) sjdl.Insert(index, "\n# Packages to be used\n");
- index = sjdl.Index("User");
- if (index >= 0) sjdl.Insert(index, "\n# AliEn user\n");
- index = sjdl.Index("TTL");
- if (index >= 0) sjdl.Insert(index, "\n# Time to live for the job\n");
- index = sjdl.Index("OutputFile");
- if (index >= 0) sjdl.Insert(index, "\n# List of output files to be registered\n");
- index = sjdl.Index("OutputDir");
- if (index >= 0) sjdl.Insert(index, "\n# Output directory\n");
- index = sjdl.Index("OutputArchive");
- if (index >= 0) sjdl.Insert(index, "\n# Files to be archived\n");
- index = sjdl.Index("MaxInitFailed");
- if (index >= 0) sjdl.Insert(index, "\n# Maximum number of first failing jobs to abort the master job\n");
- index = sjdl.Index("MasterResubmitThreshold");
- if (index >= 0) sjdl.Insert(index, "\n# Resubmit failed jobs until DONE rate reaches this percentage\n");
- sjdl.ReplaceAll("ValidationCommand", "Validationcommand");
- index = sjdl.Index("Validationcommand");
- if (index >= 0) sjdl.Insert(index, "\n# Validation script to be run for each subjob\n");
- sjdl.ReplaceAll("\"LF:", "\n \"LF:");
- sjdl.ReplaceAll("(member", "\n (member");
- sjdl.ReplaceAll("\",\"VO_", "\",\n \"VO_");
- sjdl.ReplaceAll("{", "{\n ");
- sjdl.ReplaceAll("};", "\n};");
- sjdl.ReplaceAll("{\n \n", "{\n");
- sjdl.ReplaceAll("\n\n", "\n");
- sjdl.ReplaceAll("OutputDirectory", "OutputDir");
- sjdl += "JDLVariables = \n{\n \"Packages\",\n \"OutputDir\"\n};\n";
- sjdl.Prepend("JobTag = \"Automatically generated analysis JDL\";\n");
- index = sjdl.Index("JDLVariables");
- if (index >= 0) sjdl.Insert(index, "\n# JDL variables\n");
- // Write jdl to file
- ofstream out;
- out.open(fJDLName.Data(), ios::out);
- if (out.bad()) {
- Error("CreateJDL", "Bad file name: %s", fJDLName.Data());
- return kFALSE;
- }
- out << sjdl << endl;
+ // Write a jdl with 2 input parameters: collection name and output dir name.
+ WriteJDL(copy);
}
// Copy jdl to grid workspace
- if (!copy) {
- Info("CreateJDL", "\n##### You may want to review jdl:%s and analysis macro:%s before running in <submit> mode", fJDLName.Data(), fAnalysisMacro.Data());
- } else {
- Info("CreateJDL", "\n##### Copying JDL file <%s> to your AliEn working space", fJDLName.Data());
- if (FileExists(fJDLName)) gGrid->Rm(fJDLName);
- TFile::Cp(Form("file:%s",fJDLName.Data()), Form("alien://%s/%s", workdir.Data(), fJDLName.Data()));
+ if (copy) {
if (fAdditionalLibs.Length()) {
arr = fAdditionalLibs.Tokenize(" ");
TObjString *os;
return kTRUE;
}
+//______________________________________________________________________________
+Bool_t AliAnalysisAlien::WriteJDL(Bool_t copy)
+{
+// Writes one or more JDL's corresponding to findex. If findex is negative,
+// all run numbers are considered in one go (jdl). For non-negative indices
+// they correspond to the indices in the array fInputFiles.
+ if (!fInputFiles) return kFALSE;
+ TObjString *os;
+ TString workdir = gGrid->GetHomeDirectory();
+ workdir += fGridWorkingDir;
+
+ if (!fRunNumbers.Length() && !fRunRange[0]) {
+ // One jdl with no parameters in case input data is specified by name.
+ TIter next(fInputFiles);
+ while ((os=(TObjString*)next()))
+ fGridJDL->AddToInputDataCollection(Form("LF:%s,nodownload", os->GetString().Data()));
+ fGridJDL->SetOutputDirectory(Form("%s/#alien_counter_03i#", fGridOutputDir.Data()));
+ } else {
+ // One jdl to be submitted with 2 input parameters: data collection name and output dir prefix
+ fGridJDL->AddToInputDataCollection(Form("LF:%s/$1,nodownload", workdir.Data()));
+ fGridJDL->SetOutputDirectory(Form("%s/$2#alien_counter_03i#", fGridOutputDir.Data()));
+ }
+
+
+ // Generate the JDL as a string
+ TString sjdl = fGridJDL->Generate();
+ Int_t index;
+ index = sjdl.Index("Executable");
+ if (index >= 0) sjdl.Insert(index, "\n# This is the startup script\n");
+ index = sjdl.Index("Split ");
+ if (index >= 0) sjdl.Insert(index, "\n# We split per storage element\n");
+ index = sjdl.Index("SplitMaxInputFileNumber");
+ if (index >= 0) sjdl.Insert(index, "\n# We want each subjob to get maximum this number of input files\n");
+ index = sjdl.Index("InputDataCollection");
+ if (index >= 0) sjdl.Insert(index, "# Input xml collections\n");
+ index = sjdl.Index("InputFile");
+ if (index >= 0) sjdl.Insert(index, "\n# List of input files to be uploaded to wn's\n");
+ index = sjdl.Index("InputDataList ");
+ if (index >= 0) sjdl.Insert(index, "\n# Collection to be processed on wn\n");
+ index = sjdl.Index("InputDataListFormat");
+ if (index >= 0) sjdl.Insert(index, "\n# Format of input data\n");
+ index = sjdl.Index("Price");
+ if (index >= 0) sjdl.Insert(index, "\n# AliEn price for this job\n");
+ index = sjdl.Index("Requirements");
+ if (index >= 0) sjdl.Insert(index, "\n# Additional requirements for the computing element\n");
+ index = sjdl.Index("Packages");
+ if (index >= 0) sjdl.Insert(index, "\n# Packages to be used\n");
+ index = sjdl.Index("User =");
+ if (index >= 0) sjdl.Insert(index, "\n# AliEn user\n");
+ index = sjdl.Index("TTL");
+ if (index >= 0) sjdl.Insert(index, "\n# Time to live for the job\n");
+ index = sjdl.Index("OutputFile");
+ if (index >= 0) sjdl.Insert(index, "\n# List of output files to be registered\n");
+ index = sjdl.Index("OutputDir");
+ if (index >= 0) sjdl.Insert(index, "\n# Output directory\n");
+ index = sjdl.Index("OutputArchive");
+ if (index >= 0) sjdl.Insert(index, "\n# Files to be archived\n");
+ index = sjdl.Index("MaxInitFailed");
+ if (index >= 0) sjdl.Insert(index, "\n# Maximum number of first failing jobs to abort the master job\n");
+ index = sjdl.Index("MasterResubmitThreshold");
+ if (index >= 0) sjdl.Insert(index, "\n# Resubmit failed jobs until DONE rate reaches this percentage\n");
+ sjdl.ReplaceAll("ValidationCommand", "Validationcommand");
+ index = sjdl.Index("Validationcommand");
+ if (index >= 0) sjdl.Insert(index, "\n# Validation script to be run for each subjob\n");
+ sjdl.ReplaceAll("\"LF:", "\n \"LF:");
+ sjdl.ReplaceAll("(member", "\n (member");
+ sjdl.ReplaceAll("\",\"VO_", "\",\n \"VO_");
+ sjdl.ReplaceAll("{", "{\n ");
+ sjdl.ReplaceAll("};", "\n};");
+ sjdl.ReplaceAll("{\n \n", "{\n");
+ sjdl.ReplaceAll("\n\n", "\n");
+ sjdl.ReplaceAll("OutputDirectory", "OutputDir");
+ sjdl += "JDLVariables = \n{\n \"Packages\",\n \"OutputDir\"\n};\n";
+ sjdl.Prepend("JobTag = \"Automatically generated analysis JDL\";\n");
+ index = sjdl.Index("JDLVariables");
+ if (index >= 0) sjdl.Insert(index, "\n# JDL variables\n");
+ // Write jdl to file
+ ofstream out;
+ out.open(fJDLName.Data(), ios::out);
+ if (out.bad()) {
+ Error("CreateJDL", "Bad file name: %s", fJDLName.Data());
+ return kFALSE;
+ }
+ out << sjdl << endl;
+
+ // Copy jdl to grid workspace
+ if (!copy) {
+ Info("CreateJDL", "\n##### You may want to review jdl:%s and analysis macro:%s before running in <submit> mode", fJDLName.Data(), fAnalysisMacro.Data());
+ } else {
+ Info("CreateJDL", "\n##### Copying JDL file <%s> to your AliEn output directory", fJDLName.Data());
+ TString locjdl = Form("%s/%s", fGridOutputDir.Data(),fJDLName.Data());
+ if (FileExists(locjdl)) gGrid->Rm(locjdl);
+ TFile::Cp(Form("file:%s",fJDLName.Data()), Form("alien://%s/%s", fGridOutputDir.Data(), fJDLName.Data()));
+ }
+ return kTRUE;
+}
+
//______________________________________________________________________________
Bool_t AliAnalysisAlien::FileExists(const char *lfn) const
{
fPackages->Add(new TObjString(pkg));
}
+//______________________________________________________________________________
+const char *AliAnalysisAlien::GetJobStatus(Int_t jobidstart, Int_t lastid, Int_t &nrunning, Int_t &nwaiting, Int_t &nerror, Int_t &ndone)
+{
+// Get job status for all jobs with jobid>jobidstart.
+ static char mstatus[20];
+ mstatus[0] = '\0';
+ nrunning = 0;
+ nwaiting = 0;
+ nerror = 0;
+ ndone = 0;
+ TGridJobStatusList *list = gGrid->Ps("");
+ if (!list) return mstatus;
+ Int_t nentries = list->GetSize();
+ TGridJobStatus *status;
+ Int_t pid;
+ for (Int_t ijob=0; ijob<nentries; ijob++) {
+ status = (TGridJobStatus *)list->At(ijob);
+ pid = gROOT->ProcessLine(Form("atoi(((TAlienJobStatus*)0x%lx)->GetKey(\"queueId\"));", (ULong_t)status));
+ if (pid<jobidstart) continue;
+ if (pid == lastid) {
+ gROOT->ProcessLine(Form("sprintf((char*)0x%lx,((TAlienJobStatus*)0x%lx)->GetKey(\"status\"));",(ULong_t)mstatus, (ULong_t)status));
+ }
+ switch (status->GetStatus()) {
+ case TGridJobStatus::kWAITING:
+ nwaiting++; break;
+ case TGridJobStatus::kRUNNING:
+ nrunning++; break;
+ case TGridJobStatus::kABORTED:
+ case TGridJobStatus::kFAIL:
+ case TGridJobStatus::kUNKNOWN:
+ nerror++; break;
+ case TGridJobStatus::kDONE:
+ ndone++;
+ }
+ }
+ list->Delete();
+ delete list;
+ return mstatus;
+}
+
//______________________________________________________________________________
Bool_t AliAnalysisAlien::IsCollection(const char *lfn) const
{
return kFALSE;
}
+//______________________________________________________________________________
+void AliAnalysisAlien::Print(Option_t *) const
+{
+// Print current plugin settings.
+ printf("### AliEn analysis plugin current settings ###\n");
+ printf("= Version of API requested: ____________________ %s\n", fAPIVersion.Data());
+ printf("= Version of ROOT requested: ___________________ %s\n", fROOTVersion.Data());
+ printf("= Version of AliRoot requested: ________________ %s\n", fAliROOTVersion.Data());
+ if (fUser.Length())
+ printf("= User running the plugin: _____________________ %s\n", fUser.Data());
+ printf("= Grid workdir relative to user $HOME: _________ %s\n", fGridWorkingDir.Data());
+ printf("= Grid output directory relative to workdir: ___ %s\n", fGridOutputDir.Data());
+ printf("= Data base directory path requested: __________ %s\n", fGridDataDir.Data());
+ printf("= Data search pattern: _________________________ %s\n", fDataPattern.Data());
+ printf("= Input data format: ___________________________ %s\n", fInputFormat.Data());
+ if (fRunNumbers.Length())
+ printf("= Run numbers to be processed: _________________ %s\n", fRunNumbers.Data());
+ if (fRunRange[0])
+ printf("= Run range to be processed: ___________________ %d-%d\n", fRunRange[0], fRunRange[1]);
+ if (!fRunRange[0] && !fRunNumbers.Length()) {
+ TIter next(fInputFiles);
+ TObject *obj;
+ TString list;
+ while ((obj=next())) list += obj->GetName();
+ printf("= Input files to be processed: _________________ %s\n", list.Data());
+ }
+ if (TestBit(AliAnalysisGrid::kTest))
+ printf("= Number of input files used in test mode: _____ %d\n", fNtestFiles);
+ printf("= List of output files to be registered: _______ %s\n", fOutputFiles.Data());
+ printf("= List of outputs going to be archived: ________ %s\n", fOutputArchive.Data());
+ printf("= List of outputs that should not be merged: ___ %s\n", fMergeExcludes.Data());
+ printf("=====================================================================\n");
+ printf("= Job price: ___________________________________ %d\n", fPrice);
+ printf("= Time to live (TTL): __________________________ %d\n", fTTL);
+ printf("= Max files per subjob: ________________________ %d\n", fSplitMaxInputFileNumber);
+ if (fMaxInitFailed>0)
+ printf("= Max number of subjob fails to kill: __________ %d\n", fMaxInitFailed);
+ if (fMasterResubmitThreshold>0)
+ printf("= Resubmit master job if failed subjobs >_______ %d\n", fMasterResubmitThreshold);
+ if (fNrunsPerMaster>0)
+ printf("= Number of runs per master job: _______________ %d\n", fNrunsPerMaster);
+ printf("= Number of files in one chunk to be merged: ___ %d\n", fMaxMergeFiles);
+ printf("= Name of the generated execution script: ______ %s\n",fExecutable.Data());
+ if (fArguments.Length())
+ printf("= Arguments for the execution script: __________ %s\n",fArguments.Data());
+ printf("= Name of the generated analysis macro: ________ %s\n",fAnalysisMacro.Data());
+ printf("= User analysis files to be deployed: __________ %s\n",fAnalysisSource.Data());
+ printf("= Additional libs to be loaded or souces to be compiled runtime: <%s>\n",fAdditionalLibs.Data());
+ printf("= Master jobs split mode: ______________________ %s\n",fSplitMode.Data());
+ if (fDatasetName)
+ printf("= Custom name for the dataset to be created: ___ %s\n", fDatasetName.Data());
+ printf("= Name of the generated JDL: ___________________ %s\n", fJDLName.Data());
+ if (fIncludePath.Data())
+ printf("= Include path for runtime task compilation: ___ %s\n", fIncludePath.Data());
+ if (fCloseSE.Length())
+ printf("= Force job outputs to storage element: ________ %s\n", fCloseSE.Data());
+ if (fFriendChainName.Length())
+ printf("= Open friend chain file on worker: ____________ %s\n", fFriendChainName.Data());
+ if (fPackages) {
+ TIter next(fPackages);
+ TObject *obj;
+ TString list;
+ while ((obj=next())) list += obj->GetName();
+ printf("= Par files to be used: ________________________ %s\n", list.Data());
+ }
+}
+
//______________________________________________________________________________
void AliAnalysisAlien::SetDefaults()
{
fMaxInitFailed = 0;
fMasterResubmitThreshold = 0;
fNtestFiles = 10;
+ fRunRange[0] = 0;
+ fRunRange[1] = 0;
+ fNrunsPerMaster = 1;
+ fMaxMergeFiles = 100;
fRunNumbers = "";
fExecutable = "analysis.sh";
fArguments = "";
fGridWorkingDir = "";
fGridDataDir = ""; // Can be like: /alice/sim/PDC_08a/LHC08c9/
fDataPattern = "*AliESDs.root"; // Can be like: *AliESDs.root, */pass1/*AliESDs.root, ...
+ fFriendChainName = "";
fGridOutputDir = "output";
fOutputArchive = "log_archive.zip:stdout,stderr root_archive.zip:*.root";
fOutputFiles = ""; // Like "AliAODs.root histos.root"
return kFALSE;
}
// Get the output path
- TString output = Form("/%s/%s/%s", gGrid->GetHomeDirectory(), fGridWorkingDir.Data(), fGridOutputDir.Data());
- if (!gGrid->Cd(output)) output = Form("/%s/%s", gGrid->GetHomeDirectory(), fGridOutputDir.Data());
- if (!gGrid->Cd(output)) {
+ if (!fGridOutputDir.Contains("/")) fGridOutputDir = Form("/%s/%s/%s", gGrid->GetHomeDirectory(), fGridWorkingDir.Data(), fGridOutputDir.Data());
+ if (!gGrid->Cd(fGridOutputDir)) {
Error("MergeOutputs", "Grid output directory %s not found. Terminate() will NOT be executed", fGridOutputDir.Data());
return kFALSE;
}
TObjString *str;
TString command;
TString output_file;
+ TString output_chunk;
+ TString previous_chunk;
+ Int_t count_chunk = 0;
+ Int_t count_zero = fMaxMergeFiles;
Bool_t merged = kTRUE;
while((str=(TObjString*)next())) {
output_file = str->GetString();
Int_t index = output_file.Index("@");
if (index > 0) output_file.Remove(index);
+ // Skip already merged outputs
+ if (!gSystem->AccessPathName(output_file)) {
+ Info("MergeOutputs", "Output file <%s> found. Not merging again.", output_file.Data());
+ continue;
+ }
if (fMergeExcludes.Length() &&
fMergeExcludes.Contains(output_file.Data())) continue;
- command = Form("find %s/ *%s", output.Data(), output_file.Data());
+ // Perform a 'find' command in the output directory, looking for registered outputs
+ command = Form("find %s/ *%s", fGridOutputDir.Data(), output_file.Data());
printf("command: %s\n", command.Data());
TGridResult *res = gGrid->Command(command);
if (!res) continue;
TFileMerger *fm = 0;
TIter nextmap(res);
- TMap *map;
+ TMap *map = 0;
+ previous_chunk = "";
+ count_chunk = 0;
+ // Check if there is a merge operation to resume
+ output_chunk = output_file;
+ output_chunk.ReplaceAll(".root", "_*.root");
+ if (!gSystem->Exec(Form("ls %s", output_chunk.Data()))) {
+ while (1) {
+ for (Int_t counter=0; counter<fMaxMergeFiles; counter++) map = (TMap*)nextmap();
+ if (!map) {
+ Error("MergeOutputs", "Cannot resume merging for <%s>, nentries=%d", output_file.Data(), res->GetSize());
+ delete res;
+ return kFALSE;
+ }
+ output_chunk = output_file;
+ output_chunk.ReplaceAll(".root", Form("_%04d.root", count_chunk));
+ printf("%s\n", output_chunk.Data());
+ count_chunk++;
+ if (gSystem->AccessPathName(output_chunk)) continue;
+ // Merged file with chunks up to <count_chunk> found
+ printf("Resume merging of <%s> from <%s>\n", output_file.Data(), output_chunk.Data());
+ previous_chunk = output_chunk;
+ break;
+ }
+ }
+ count_zero = fMaxMergeFiles;
while ((map=(TMap*)nextmap())) {
+ // Loop 'find' results and get next LFN
+ if (count_zero == fMaxMergeFiles) {
+ // First file in chunk - create file merger and add previous chunk if any.
+ fm = new TFileMerger(kFALSE);
+ fm->SetFastMethod(kTRUE);
+ if (previous_chunk.Length()) fm->AddFile(previous_chunk.Data());
+ output_chunk = output_file;
+ output_chunk.ReplaceAll(".root", Form("_%04d.root", count_chunk));
+ }
+ // If last file found, put merged results in the output file
+ if (map == res->Last()) output_chunk = output_file;
TObjString *objs = dynamic_cast<TObjString*>(map->GetValue("turl"));
if (!objs || !objs->GetString().Length()) {
+ // Nothing found - skip this output
delete res;
- continue;
- }
- if (!fm) {
- fm = new TFileMerger(kFALSE);
- fm->SetFastMethod(kTRUE);
- fm->OutputFile(output_file);
+ delete fm;
+ break;
+ }
+ // Add file to be merged and decrement chunk counter.
+ fm->AddFile(objs->GetString());
+ count_zero--;
+ if (count_zero==0 || map == res->Last()) {
+ fm->OutputFile(output_chunk);
+ if (!fm->GetMergeList() || !fm->GetMergeList()->GetSize()) {
+ // Nothing found - skip this output
+ Warning("MergeOutputs", "No <%s> files found.", output_file.Data());
+ delete res;
+ delete fm;
+ break;
+ }
+ // Merge the outputs, then go to next chunk
+ if (!fm->Merge()) {
+ Error("MergeOutputs", "Could not merge all <%s> files", output_file.Data());
+ delete res;
+ delete fm;
+ merged = kFALSE;
+ break;
+ } else {
+ Info("MergeOutputs", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), output_chunk.Data());
+ gSystem->Unlink(previous_chunk);
+ }
+ if (map == res->Last()) {
+ delete res;
+ delete fm;
+ break;
+ }
+ count_chunk++;
+ count_zero = fMaxMergeFiles;
+ previous_chunk = output_chunk;
}
- fm->AddFile(objs->GetString());
- }
- if (!fm || !fm->GetMergeList() || !fm->GetMergeList()->GetSize()) {
- Warning("MergeOutputs", "No <%s> files found.", output_file.Data());
- merged = kFALSE;
- delete res;
- continue;
}
- if (!fm->Merge()) {
- Error("MergeOutputs", "Could not merge all <%s> files", output_file.Data());
- merged = kFALSE;
- } else {
- Info("MergeOutputs", "\n##### Merged %d output files <%s>", fm->GetMergeList()->GetSize(), output_file.Data());
- }
- delete fm;
- delete res;
}
if (!merged) {
Error("MergeOutputs", "Terminate() will NOT be executed");
if (fOutputFiles.Length()) fOutputFiles += " ";
fOutputFiles += filename;
}
+ // Add extra files registered to the analysis manager
+ if (mgr->GetExtraFiles().Length()) {
+ if (fOutputFiles.Length()) fOutputFiles += " ";
+ fOutputFiles += mgr->GetExtraFiles();
+ }
}
- if (!fCloseSE.Length()) fCloseSE = gSystem->Getenv("alien_CLOSE_SE");
+// if (!fCloseSE.Length()) fCloseSE = gSystem->Getenv("alien_CLOSE_SE");
if (TestBit(AliAnalysisGrid::kOffline)) {
Info("StartAnalysis","\n##### OFFLINE MODE ##### Files to be used in GRID are produced but not copied \
\n there nor any job run. You can revise the JDL and analysis \
if (!Connect()) {
Error("StartAnalysis", "Cannot start grid analysis without grid connection");
return;
- }
+ }
+ Print();
if (!CheckInputData()) {
Error("StartAnalysis", "There was an error in preprocessing your requested input data");
return;
// gSystem->Exec("cat stdout");
return;
}
- // Submit AliEn job
- CdWork();
- TGridResult *res = gGrid->Command(Form("submit %s", fJDLName.Data()));
+ // Submit AliEn job(s)
+ gGrid->Cd(fGridOutputDir);
+ TGridResult *res;
TString jobID = "";
- if (res) {
- const char *cjobId = res->GetKey(0,"jobId");
- if (!cjobId) {
- Error("StartAnalysis", "Your JDL %s could not be submitted", fJDLName.Data());
- return;
- } else {
- Info("StartAnalysis", "\n_______________________________________________________________________ \
- \n##### Your JDL %s was successfully submitted. \nTHE JOB ID IS: %s \
- \n_______________________________________________________________________",
- fJDLName.Data(), cjobId);
- jobID = cjobId;
- }
- delete res;
+ if (!fRunNumbers.Length() && !fRunRange[0]) {
+ // Submit a given xml or a set of runs
+ res = gGrid->Command(Form("submit %s", fJDLName.Data()));
+ printf("*************************** %s\n",Form("submit %s", fJDLName.Data()));
+ if (res) {
+ const char *cjobId = res->GetKey(0,"jobId");
+ if (!cjobId) {
+ Error("StartAnalysis", "Your JDL %s could not be submitted", fJDLName.Data());
+ return;
+ } else {
+ Info("StartAnalysis", "\n_______________________________________________________________________ \
+ \n##### Your JDL %s was successfully submitted. \nTHE JOB ID IS: %s \
+ \n_______________________________________________________________________",
+ fJDLName.Data(), cjobId);
+ jobID = cjobId;
+ }
+ delete res;
+ }
+ } else {
+ // Submit for a range of enumeration of runs.
+ Submit();
}
+
Info("StartAnalysis", "\n#### STARTING AN ALIEN SHELL FOR YOU. EXIT WHEN YOUR JOB %s HAS FINISHED. #### \
\n You may exit at any time and terminate the job later using the option <terminate> \
\n ##################################################################################", jobID.Data());
- //gGrid->Shell();
gSystem->Exec("aliensh");
}
+//______________________________________________________________________________
+void AliAnalysisAlien::Submit()
+{
+// Submit all master jobs.
+ Int_t nmasterjobs = fInputFiles->GetEntries();
+ Long_t tshoot = gSystem->Now();
+ if (!fNsubmitted) SubmitNext();
+ while (fNsubmitted < nmasterjobs) {
+ Long_t now = gSystem->Now();
+ if ((now-tshoot)>30000) {
+ tshoot = now;
+ SubmitNext();
+ }
+ }
+}
+
+//______________________________________________________________________________
+void AliAnalysisAlien::SubmitNext()
+{
+// Submit next bunch of master jobs if the queue is free.
+ static Bool_t iscalled = kFALSE;
+ static Int_t firstmaster = 0;
+ static Int_t lastmaster = 0;
+ static Int_t npermaster = 0;
+ if (iscalled) return;
+ iscalled = kTRUE;
+ Int_t nrunning=0, nwaiting=0, nerror=0, ndone=0;
+ Int_t ntosubmit = 0;
+ TGridResult *res;
+ TString jobID = "";
+ if (!fNsubmitted) ntosubmit = 1;
+ else {
+ TString status = GetJobStatus(firstmaster, lastmaster, nrunning, nwaiting, nerror, ndone);
+ printf("=== master %d: %s\n", lastmaster, status.Data());
+ // If last master not split, just return
+ if (status != "SPLIT") {iscalled = kFALSE; return;}
+ // No more than 100 waiting jobs
+ if (nwaiting>100) {iscalled = kFALSE; return;}
+ npermaster = (nrunning+nwaiting+nerror+ndone)/fNsubmitted;
+ if (npermaster) ntosubmit = (100-nwaiting)/npermaster;
+ printf("=== WAITING(%d) RUNNING(%d) DONE(%d) OTHER(%d) NperMaster=%d => to submit %d jobs\n",
+ nwaiting, nrunning, ndone, nerror, npermaster, ntosubmit);
+ }
+ Int_t nmasterjobs = fInputFiles->GetEntries();
+ for (Int_t i=0; i<ntosubmit; i++) {
+ // Submit for a range of enumeration of runs.
+ if (fNsubmitted>=nmasterjobs) {iscalled = kFALSE; return;}
+ TString query;
+ query = Form("submit %s %s %03d", fJDLName.Data(), fInputFiles->At(fNsubmitted)->GetName(), fNsubmitted);
+ printf("********* %s\n",query.Data());
+ res = gGrid->Command(query);
+ if (res) {
+ const char *cjobId1 = res->GetKey(0,"jobId");
+ if (!cjobId1) {
+ Error("StartAnalysis", "Your JDL %s could not be submitted", fJDLName.Data());
+ iscalled = kFALSE;
+ return;
+ } else {
+ Info("StartAnalysis", "\n_______________________________________________________________________ \
+ \n##### Your JDL %s submitted (%d to go). \nTHE JOB ID IS: %s \
+ \n_______________________________________________________________________",
+ fJDLName.Data(), nmasterjobs-fNsubmitted-1, cjobId1);
+ jobID += cjobId1;
+ jobID += " ";
+ lastmaster = atoi(cjobId1);
+ if (!firstmaster) firstmaster = lastmaster;
+ fNsubmitted++;
+ }
+ delete res;
+ }
+ }
+ iscalled = kFALSE;
+}
+
//______________________________________________________________________________
void AliAnalysisAlien::WriteAnalysisFile()
{
type = "AOD";
comment += "AOD";
}
+ if (type!="AOD" && fFriendChainName!="") {
+ Error("WriteAnalysisMacro", "Friend chain can be attached only to AOD");
+ return;
+ }
if (TObject::TestBit(AliAnalysisGrid::kUseMC)) comment += "/MC";
else comment += " data";
out << "const char *anatype = \"" << type.Data() << "\";" << endl << endl;
out << "{" << endl;
out << comment.Data() << endl;
out << "// Automatically generated analysis steering macro executed in grid subjobs" << endl << endl;
+ out << " TStopwatch timer;" << endl;
+ out << " timer.Start();" << endl << endl;
out << "// load base root libraries" << endl;
out << " gSystem->Load(\"libTree\");" << endl;
out << " gSystem->Load(\"libGeom\");" << endl;
out << " gSystem->Load(\"libVMC\");" << endl;
out << " gSystem->Load(\"libPhysics\");" << endl << endl;
+ out << "// Load analysis framework libraries" << endl;
if (!fPackages) {
- out << "// Load analysis framework libraries" << endl;
out << " gSystem->Load(\"libSTEERBase\");" << endl;
out << " gSystem->Load(\"libESD\");" << endl;
out << " gSystem->Load(\"libAOD\");" << endl;
out << " gSystem->Load(\"libANALYSIS\");" << endl;
- out << " gSystem->Load(\"libANALYSISalice\");" << endl << endl;
+ out << " gSystem->Load(\"libANALYSISalice\");" << endl;
+ out << " gSystem->Load(\"libCORRFW\");" << endl << endl;
} else {
- out << "// Compile all par packages" << endl;
TIter next(fPackages);
TObject *obj;
- while ((obj=next()))
+ TString pkgname;
+ Bool_t hasSTEERBase = kFALSE;
+ Bool_t hasESD = kFALSE;
+ Bool_t hasAOD = kFALSE;
+ Bool_t hasANALYSIS = kFALSE;
+ Bool_t hasANALYSISalice = kFALSE;
+ Bool_t hasCORRFW = kFALSE;
+ while ((obj=next())) {
+ pkgname = obj->GetName();
+ if (pkgname == "STEERBase" ||
+ pkgname == "STEERBase.par") hasSTEERBase = kTRUE;
+ if (pkgname == "ESD" ||
+ pkgname == "ESD.par") hasESD = kTRUE;
+ if (pkgname == "AOD" ||
+ pkgname == "AOD.par") hasAOD = kTRUE;
+ if (pkgname == "ANALYSIS" ||
+ pkgname == "ANALYSIS.par") hasANALYSIS = kTRUE;
+ if (pkgname == "ANALYSISalice" ||
+ pkgname == "ANALYSISalice.par") hasANALYSISalice = kTRUE;
+ if (pkgname == "CORRFW" ||
+ pkgname == "CORRFW.par") hasCORRFW = kTRUE;
+ }
+ if (!hasSTEERBase) out << " gSystem->Load(\"libSTEERBase\");" << endl;
+ else out << " if (!SetupPar(\"STEERBase\")) return;" << endl;
+ if (!hasESD) out << " gSystem->Load(\"libESD\");" << endl;
+ else out << " if (!SetupPar(\"ESD\")) return;" << endl;
+ if (!hasAOD) out << " gSystem->Load(\"libAOD\");" << endl;
+ else out << " if (!SetupPar(\"AOD\")) return;" << endl;
+ if (!hasANALYSIS) out << " gSystem->Load(\"libANALYSIS\");" << endl;
+ else out << " if (!SetupPar(\"ANALYSIS\")) return;" << endl;
+ if (!hasANALYSISalice) out << " gSystem->Load(\"libANALYSISalice\");" << endl;
+ else out << " if (!SetupPar(\"ANALYSISalice\")) return;" << endl;
+ if (!hasCORRFW) out << " gSystem->Load(\"libCORRFW\");" << endl << endl;
+ else out << " if (!SetupPar(\"CORRFW\")) return;" << endl << endl;
+ out << "// Compile other par packages" << endl;
+ next.Reset();
+ while ((obj=next())) {
+ pkgname = obj->GetName();
+ if (pkgname == "STEERBase" ||
+ pkgname == "STEERBase.par" ||
+ pkgname == "ESD" ||
+ pkgname == "ESD.par" ||
+ pkgname == "AOD" ||
+ pkgname == "AOD.par" ||
+ pkgname == "ANALYSIS" ||
+ pkgname == "ANALYSIS.par" ||
+ pkgname == "ANALYSISalice" ||
+ pkgname == "ANALYSISalice.par" ||
+ pkgname == "CORRFW" ||
+ pkgname == "CORRFW.par") continue;
out << " if (!SetupPar(\"" << obj->GetName() << "\")) return;" << endl;
+ }
}
out << "// include path" << endl;
if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
out << " }" << endl << endl;
out << " mgr->PrintStatus();" << endl;
out << " mgr->StartAnalysis(\"localfile\", chain);" << endl;
+ out << " timer.Stop();" << endl;
+ out << " timer.Print();" << endl;
out << "}" << endl << endl;
if (IsUsingTags()) {
out << "TChain* CreateChainFromTags(const char *xmlfile, const char *type=\"ESD\")" << endl;
out << " gROOT->LoadMacro(\"ConfigureCuts.C\");" << endl;
out << " ConfigureCuts(runCuts, lhcCuts, detCuts, evCuts);" << endl;
out << " }" << endl;
- out << " TChain *chain = tagAna->QueryTags(runCuts, lhcCuts, detCuts, evCuts);" << endl;
+ if (fFriendChainName=="") {
+ out << " TChain *chain = tagAna->QueryTags(runCuts, lhcCuts, detCuts, evCuts);" << endl;
+ } else {
+ out << " TString tmpColl=\"tmpCollection.xml\";" << endl;
+ out << " tagAna->CreateXMLCollection(tmpColl.Data(),runCuts, lhcCuts, detCuts, evCuts);" << endl;
+ out << " TChain *chain = CreateChain(tmpColl.Data(),type);" << endl;
+ }
out << " if (!chain || !chain->GetNtrees()) return NULL;" << endl;
out << " chain->ls();" << endl;
out << " return chain;" << endl;
- out << "}" << endl;
+ out << "}" << endl << endl;
if (gSystem->AccessPathName("ConfigureCuts.C")) {
TString msg = "\n##### You may want to provide a macro ConfigureCuts.C with a method:\n";
msg += " void ConfigureCuts(AliRunTagCuts *runCuts,\n";
msg += " AliEventTagCuts *evCuts)";
Info("WriteAnalysisMacro", msg.Data());
}
- } else {
+ }
+ if (!IsUsingTags() || fFriendChainName!="") {
+ out <<"//________________________________________________________________________________" << endl;
out << "TChain* CreateChain(const char *xmlfile, const char *type=\"ESD\")" << endl;
out << "{" << endl;
out << "// Create a chain using url's from xml file" << endl;
out << " return NULL;" << endl;
out << " }" << endl;
out << " TChain *chain = new TChain(treename);" << endl;
+ if(fFriendChainName!="") {
+ out << " TChain *chainFriend = new TChain(treename);" << endl;
+ }
out << " coll->Reset();" << endl;
- out << " while (coll->Next()) chain->Add(coll->GetTURL(\"\"));" << endl;
+ out << " while (coll->Next()) {" << endl;
+ out << " chain->Add(coll->GetTURL(\"\"));" << endl;
+ if(fFriendChainName!="") {
+ out << " TString fileFriend=coll->GetTURL(\"\");" << endl;
+ out << " fileFriend.ReplaceAll(\"AliAOD.root\",\""<<fFriendChainName.Data()<<"\");" << endl;
+ out << " fileFriend.ReplaceAll(\"AliAODs.root\",\""<<fFriendChainName.Data()<<"\");" << endl;
+ out << " chainFriend->Add(fileFriend.Data());" << endl;
+ }
+ out << " }" << endl;
out << " if (!chain->GetNtrees()) {" << endl;
out << " ::Error(\"CreateChain\", \"No tree found from collection %s\", xmlfile);" << endl;
out << " return NULL;" << endl;
out << " }" << endl;
+ if(fFriendChainName!="") {
+ out << " chain->AddFriend(chainFriend);" << endl;
+ }
out << " return chain;" << endl;
- out << "}" << endl;
+ out << "}" << endl << endl;
}
if (fPackages) {
+ out <<"//________________________________________________________________________________" << endl;
out << "Bool_t SetupPar(const char *package) {" << endl;
out << "// Compile the package and set it up." << endl;
out << " TString pkgdir = package;" << endl;
out << " pkgdir.ReplaceAll(\".par\",\"\");" << endl;
- out << " gSystem->Exec(Form(\"tar xvzf %s\", package));" << endl;
+ out << " gSystem->Exec(Form(\"tar xvzf %s.par\", pkgdir.Data()));" << endl;
out << " TString cdir = gSystem->WorkingDirectory();" << endl;
out << " gSystem->ChangeDirectory(pkgdir);" << endl;
out << " // Check for BUILD.sh and execute" << endl;
out << " printf(\"*** Building PAR archive ***\\n\");" << endl;
out << " printf(\"*******************************\\n\");" << endl;
out << " if (gSystem->Exec(\"PROOF-INF/BUILD.sh\")) {" << endl;
- out << " ::Error(\"SetupPar\", \"Cannot build par archive %s\", package);" << endl;
+ out << " ::Error(\"SetupPar\", \"Cannot build par archive %s\", pkgdir.Data());" << endl;
out << " gSystem->ChangeDirectory(cdir);" << endl;
out << " return kFALSE;" << endl;
out << " }" << endl;
out << " } else {" << endl;
- out << " ::Error(\"SetupPar\",\"Cannot access PROOF-INF/BUILD.sh for package %s\", package);" << endl;
+ out << " ::Error(\"SetupPar\",\"Cannot access PROOF-INF/BUILD.sh for package %s\", pkgdir.Data());" << endl;
out << " gSystem->ChangeDirectory(cdir);" << endl;
out << " return kFALSE;" << endl;
out << " }" << endl;
out << " printf(\"*******************************\\n\");" << endl;
out << " gROOT->Macro(\"PROOF-INF/SETUP.C\");" << endl;
out << " } else {" << endl;
- out << " ::Error(\"SetupPar\",\"Cannot access PROOF-INF/SETUP.C for package %s\", package);" << endl;
+ out << " ::Error(\"SetupPar\",\"Cannot access PROOF-INF/SETUP.C for package %s\", pkgdir.Data());" << endl;
out << " gSystem->ChangeDirectory(cdir);" << endl;
out << " return kFALSE;" << endl;
out << " }" << endl;
out << "ls -la ./" << out_stream << endl;
out << "echo \"* ----------------------------------------------------*\"" << out_stream << endl << endl;
out << "##################################################" << endl;
+
+ out << "" << endl;
+ out << "parArch=`grep -Ei \"Cannot Build the PAR Archive\" stderr`" << endl;
+ out << "segViol=`grep -Ei \"Segmentation violation\" stderr`" << endl;
+ out << "segFault=`grep -Ei \"Segmentation fault\" stderr`" << endl;
+ out << "" << endl;
+
+ out << "if [ ! -f stderr ] ; then" << endl;
+ out << " error=1" << endl;
+ out << " echo \"* ########## Job not validated - no stderr ###\" " << out_stream << endl;
+ out << " echo \"Error = $error\" " << out_stream << endl;
+ out << "fi" << endl;
+
+ out << "if [ \"$parArch\" != \"\" ] ; then" << endl;
+ out << " error=1" << endl;
+ out << " echo \"* ########## Job not validated - PAR archive not built ###\" " << out_stream << endl;
+ out << " echo \"$parArch\" " << out_stream << endl;
+ out << " echo \"Error = $error\" " << out_stream << endl;
+ out << "fi" << endl;
+
+ out << "if [ \"$segViol\" != \"\" ] ; then" << endl;
+ out << " error=1" << endl;
+ out << " echo \"* ########## Job not validated - Segment. violation ###\" " << out_stream << endl;
+ out << " echo \"$segViol\" " << out_stream << endl;
+ out << " echo \"Error = $error\" " << out_stream << endl;
+ out << "fi" << endl;
+
+ out << "if [ \"$segFault\" != \"\" ] ; then" << endl;
+ out << " error=1" << endl;
+ out << " echo \"* ########## Job not validated - Segment. fault ###\" " << out_stream << endl;
+ out << " echo \"$segFault\" " << out_stream << endl;
+ out << " echo \"Error = $error\" " << out_stream << endl;
+ out << "fi" << endl;
+
+ // Part dedicated to the specific analyses running into the train
+
TObjArray *arr = fOutputFiles.Tokenize(" ");
TIter next1(arr);
TString output_file;