//==============================================================================
#include "Riostream.h"
+#include "TEnv.h"
+#include "TBits.h"
+#include "TError.h"
#include "TROOT.h"
#include "TSystem.h"
#include "TFile.h"
AliAnalysisAlien::AliAnalysisAlien()
:AliAnalysisGrid(),
fGridJDL(NULL),
+ fMergingJDL(NULL),
fPrice(0),
fTTL(0),
fSplitMaxInputFileNumber(0),
fMaxMergeFiles(0),
fNsubmitted(0),
fProductionMode(0),
+ fOutputToRunNo(0),
+ fMergeViaJDL(0),
+ fFastReadOption(0),
+ fOverwriteMode(1),
+ fNreplicas(2),
fRunNumbers(),
fExecutable(),
fExecutableCommand(),
fExecutableArgs(),
fAnalysisMacro(),
fAnalysisSource(),
+ fAdditionalRootLibs(),
fAdditionalLibs(),
fSplitMode(),
fAPIVersion(),
AliAnalysisAlien::AliAnalysisAlien(const char *name)
:AliAnalysisGrid(name),
fGridJDL(NULL),
+ fMergingJDL(NULL),
fPrice(0),
fTTL(0),
fSplitMaxInputFileNumber(0),
fMaxMergeFiles(0),
fNsubmitted(0),
fProductionMode(0),
+ fOutputToRunNo(0),
+ fMergeViaJDL(0),
+ fFastReadOption(0),
+ fOverwriteMode(1),
+ fNreplicas(2),
fRunNumbers(),
fExecutable(),
fExecutableCommand(),
fExecutableArgs(),
fAnalysisMacro(),
fAnalysisSource(),
+ fAdditionalRootLibs(),
fAdditionalLibs(),
fSplitMode(),
fAPIVersion(),
AliAnalysisAlien::AliAnalysisAlien(const AliAnalysisAlien& other)
:AliAnalysisGrid(other),
fGridJDL(NULL),
+ fMergingJDL(NULL),
fPrice(other.fPrice),
fTTL(other.fTTL),
fSplitMaxInputFileNumber(other.fSplitMaxInputFileNumber),
fMaxMergeFiles(other.fMaxMergeFiles),
fNsubmitted(other.fNsubmitted),
fProductionMode(other.fProductionMode),
+ fOutputToRunNo(other.fOutputToRunNo),
+ fMergeViaJDL(other.fMergeViaJDL),
+ fFastReadOption(other.fFastReadOption),
+ fOverwriteMode(other.fOverwriteMode),
+ fNreplicas(other.fNreplicas),
fRunNumbers(other.fRunNumbers),
fExecutable(other.fExecutable),
fExecutableCommand(other.fExecutableCommand),
fExecutableArgs(other.fExecutableArgs),
fAnalysisMacro(other.fAnalysisMacro),
fAnalysisSource(other.fAnalysisSource),
+ fAdditionalRootLibs(other.fAdditionalRootLibs),
fAdditionalLibs(other.fAdditionalLibs),
fSplitMode(other.fSplitMode),
fAPIVersion(other.fAPIVersion),
{
// Copy ctor.
fGridJDL = (TGridJDL*)gROOT->ProcessLine("new TAlienJDL()");
+ fMergingJDL = (TGridJDL*)gROOT->ProcessLine("new TAlienJDL()");
fRunRange[0] = other.fRunRange[0];
fRunRange[1] = other.fRunRange[1];
if (other.fInputFiles) {
{
// Destructor.
if (fGridJDL) delete fGridJDL;
+ if (fMergingJDL) delete fMergingJDL;
if (fInputFiles) delete fInputFiles;
if (fPackages) delete fPackages;
}
if (this != &other) {
AliAnalysisGrid::operator=(other);
fGridJDL = (TGridJDL*)gROOT->ProcessLine("new TAlienJDL()");
+ fMergingJDL = (TGridJDL*)gROOT->ProcessLine("new TAlienJDL()");
fPrice = other.fPrice;
fTTL = other.fTTL;
fSplitMaxInputFileNumber = other.fSplitMaxInputFileNumber;
fMaxMergeFiles = other.fMaxMergeFiles;
fNsubmitted = other.fNsubmitted;
fProductionMode = other.fProductionMode;
+ fOutputToRunNo = other.fOutputToRunNo;
+ fMergeViaJDL = other.fMergeViaJDL;
+ fFastReadOption = other.fFastReadOption;
+ fOverwriteMode = other.fOverwriteMode;
+ fNreplicas = other.fNreplicas;
fRunNumbers = other.fRunNumbers;
fExecutable = other.fExecutable;
fExecutableCommand = other.fExecutableCommand;
fExecutableArgs = other.fExecutableArgs;
fAnalysisMacro = other.fAnalysisMacro;
fAnalysisSource = other.fAnalysisSource;
+ fAdditionalRootLibs = other.fAdditionalRootLibs;
fAdditionalLibs = other.fAdditionalLibs;
fSplitMode = other.fSplitMode;
fAPIVersion = other.fAPIVersion;
{
// Try to connect to AliEn. User needs a valid token and /tmp/gclient_env_$UID sourced.
if (gGrid && gGrid->IsConnected()) return kTRUE;
- if (!gSystem->Getenv("alien_API_USER")) {
- Error("Connect", "Make sure you:\n 1. Have called: alien-token-init <username> today\n 2. Have sourced /tmp/gclient_env_%s",
- gSystem->Getenv("UID"));
- return kFALSE;
- }
if (!gGrid) {
Info("Connect", "Trying to connect to AliEn ...");
TGrid::Connect("alien://");
}
// Work directory not existing - create it
gGrid->Cd(homedir);
- if (gGrid->Mkdir(workdir)) {
+ if (gGrid->Mkdir(workdir, "-p")) {
gGrid->Cd(fGridWorkingDir);
Info("CreateJDL", "\n##### Created alien working directory %s", fGridWorkingDir.Data());
} else {
}
}
+//______________________________________________________________________________
+Bool_t AliAnalysisAlien::CheckFileCopy(const char *alienpath)
+{
+// Check if file copying is possible.
+ if (!Connect()) {
+ Error("CheckFileCopy", "Not connected to AliEn. File copying cannot be tested.");
+ return kFALSE;
+ }
+ Info("CheckFileCopy", "Checking possibility to copy files to your AliEn home directory... \
+ \n +++ NOTE: You can disable this via: plugin->SetCheckCopy(kFALSE);");
+ // Check if alien_CLOSE_SE is defined
+ TString closeSE = gSystem->Getenv("alien_CLOSE_SE");
+ if (!closeSE.IsNull()) {
+ Info("CheckFileCopy", "Your current close storage is pointing to: \
+ \n alien_CLOSE_SE = \"%s\"", closeSE.Data());
+ } else {
+ Warning("CheckFileCopy", "Your current close storage is empty ! Depending on your location, file copying may fail.");
+ }
+ // Check if grid directory exists.
+ if (!DirectoryExists(alienpath)) {
+ Error("CheckFileCopy", "Alien path %s does not seem to exist", alienpath);
+ return kFALSE;
+ }
+ TFile f("plugin_test_copy", "RECREATE");
+ // User may not have write permissions to current directory
+ if (f.IsZombie()) {
+ Error("CheckFileCopy", "Cannot create local test file. Do you have write access to current directory: <%s> ?",
+ gSystem->WorkingDirectory());
+ return kFALSE;
+ }
+ f.Close();
+ if (FileExists(Form("alien://%s/%s",alienpath, f.GetName()))) gGrid->Rm(Form("alien://%s/%s",alienpath, f.GetName()));
+ if (!TFile::Cp(f.GetName(), Form("alien://%s/%s",alienpath, f.GetName()))) {
+ Error("CheckFileCopy", "Cannot copy files to Alien destination: <%s> This may be temporary, or: \
+ \n# 1. Make sure you have write permissions there. If this is the case: \
+ \n# 2. Check the storage availability at: http://alimonitor.cern.ch/stats?page=SE/table \
+ \n# Do: export alien_CLOSE_SE=\"working_disk_SE\" \
+ \n# To make this permanent put in in your .bashrc (in .alienshrc is not enough) \
+ \n# Redo token: rm /tmp/x509up_u$UID then: alien-token-init <username>", alienpath);
+ gSystem->Unlink(f.GetName());
+ return kFALSE;
+ }
+ gSystem->Unlink(f.GetName());
+ gGrid->Rm(Form("%s%s",alienpath,f.GetName()));
+ Info("CheckFileCopy", "### ...SUCCESS ###");
+ return kTRUE;
+}
+
//______________________________________________________________________________
Bool_t AliAnalysisAlien::CheckInputData()
{
return kTRUE;
}
// Process declared files
- Bool_t is_collection = kFALSE;
- Bool_t is_xml = kFALSE;
- Bool_t use_tags = kFALSE;
+ Bool_t isCollection = kFALSE;
+ Bool_t isXml = kFALSE;
+ Bool_t useTags = kFALSE;
Bool_t checked = kFALSE;
CdWork();
TString file;
CheckDataType(file, iscoll, isxml, usetags);
if (!checked) {
checked = kTRUE;
- is_collection = iscoll;
- is_xml = isxml;
- use_tags = usetags;
- TObject::SetBit(AliAnalysisGrid::kUseTags, use_tags);
+ isCollection = iscoll;
+ isXml = isxml;
+ useTags = usetags;
+ TObject::SetBit(AliAnalysisGrid::kUseTags, useTags);
} else {
- if ((iscoll != is_collection) || (isxml != is_xml) || (usetags != use_tags)) {
+ if ((iscoll != isCollection) || (isxml != isXml) || (usetags != useTags)) {
Error("CheckInputData", "Some conflict was found in the types of inputs");
return kFALSE;
}
Error("CheckInputData", "Data directory %s not existing.", fGridDataDir.Data());
return kFALSE;
}
- if (is_collection) {
+ if (isCollection) {
Error("CheckInputData", "You are using raw AliEn collections as input. Cannot process run numbers.");
return kFALSE;
}
- if (checked && !is_xml) {
+ if (checked && !isXml) {
Error("CheckInputData", "Cannot mix processing of full runs with non-xml files");
return kFALSE;
}
TString path;
if (!checked) {
checked = kTRUE;
- use_tags = fDataPattern.Contains("tag");
- TObject::SetBit(AliAnalysisGrid::kUseTags, use_tags);
+ useTags = fDataPattern.Contains("tag");
+ TObject::SetBit(AliAnalysisGrid::kUseTags, useTags);
}
- if (use_tags != fDataPattern.Contains("tag")) {
+ if (useTags != fDataPattern.Contains("tag")) {
Error("CheckInputData", "Cannot mix input files using/not using tags");
return kFALSE;
}
TString msg = "\n##### file: ";
msg += path;
msg += " type: xml_collection;";
- if (use_tags) msg += " using_tags: Yes";
+ if (useTags) msg += " using_tags: Yes";
else msg += " using_tags: No";
Info("CheckDataType", msg.Data());
if (fNrunsPerMaster<2) {
TString msg = "\n##### file: ";
msg += path;
msg += " type: xml_collection;";
- if (use_tags) msg += " using_tags: Yes";
+ if (useTags) msg += " using_tags: Yes";
else msg += " using_tags: No";
Info("CheckDataType", msg.Data());
if (fNrunsPerMaster<2) {
Bool_t AliAnalysisAlien::CreateDataset(const char *pattern)
{
// Create dataset for the grid data directory + run number.
- if (TestBit(AliAnalysisGrid::kOffline)) return kFALSE;
+ if (TestBit(AliAnalysisGrid::kOffline)) return kTRUE;
if (!Connect()) {
Error("CreateDataset", "Cannot create dataset with no grid connection");
return kFALSE;
// CdWork();
if (TestBit(AliAnalysisGrid::kTest)) file = "wn.xml";
else file = Form("%s.xml", gSystem->BaseName(path));
- if (gSystem->AccessPathName(file) || TestBit(AliAnalysisGrid::kTest)) {
+ if (gSystem->AccessPathName(file) || TestBit(AliAnalysisGrid::kTest) || fOverwriteMode) {
command = "find ";
command += options;
command += path;
if (res) delete res;
// Write standard output to file
gROOT->ProcessLine(Form("gGrid->Stdout(); > %s", file.Data()));
- }
- if (!TestBit(AliAnalysisGrid::kTest) && !FileExists(file)) {
+ Bool_t hasGrep = (gSystem->Exec("grep --version 2>/dev/null > /dev/null")==0)?kTRUE:kFALSE;
+ Bool_t nullFile = kFALSE;
+ if (!hasGrep) {
+ Warning("CreateDataset", "'grep' command not available on this system - cannot validate the result of the grid 'find' command");
+ } else {
+ nullFile = (gSystem->Exec(Form("grep /event %s 2>/dev/null > /dev/null",file.Data()))==0)?kFALSE:kTRUE;
+ if (nullFile) {
+ Error("CreateDataset","Dataset %s produced by the previous find command is empty !", file.Data());
+ return kFALSE;
+ }
+ }
+ }
+ Bool_t fileExists = FileExists(file);
+ if (!TestBit(AliAnalysisGrid::kTest) && (!fileExists || fOverwriteMode)) {
// Copy xml file to alien space
+ if (fileExists) gGrid->Rm(file);
TFile::Cp(Form("file:%s",file.Data()), Form("alien://%s/%s",workdir.Data(), file.Data()));
if (!FileExists(file)) {
Error("CreateDataset", "Command %s did NOT succeed", command.Data());
return kTRUE;
}
// Several runs
+ Bool_t nullResult = kTRUE;
if (fRunNumbers.Length()) {
TObjArray *arr = fRunNumbers.Tokenize(" ");
TObjString *os;
if (TestBit(AliAnalysisGrid::kTest)) file = "wn.xml";
else file = Form("%s.xml", os->GetString().Data());
// If local collection file does not exist, create it via 'find' command.
- if (gSystem->AccessPathName(file)) {
+ if (gSystem->AccessPathName(file) || TestBit(AliAnalysisGrid::kTest) || fOverwriteMode) {
command = "find ";
command += options;
command += path;
if (res) delete res;
// Write standard output to file
gROOT->ProcessLine(Form("gGrid->Stdout(); > %s", file.Data()));
- }
+ Bool_t hasGrep = (gSystem->Exec("grep --version 2>/dev/null > /dev/null")==0)?kTRUE:kFALSE;
+ Bool_t nullFile = kFALSE;
+ if (!hasGrep) {
+ Warning("CreateDataset", "'grep' command not available on this system - cannot validate the result of the grid 'find' command");
+ } else {
+ nullFile = (gSystem->Exec(Form("grep /event %s 2>/dev/null > /dev/null",file.Data()))==0)?kFALSE:kTRUE;
+ if (nullFile) {
+ Warning("CreateDataset","Dataset %s produced by: <%s> is empty !", file.Data(), command.Data());
+ fRunNumbers.ReplaceAll(os->GetString().Data(), "");
+ continue;
+ }
+ }
+ nullResult = kFALSE;
+ }
if (TestBit(AliAnalysisGrid::kTest)) break;
// Check if there is one run per master job.
if (fNrunsPerMaster<2) {
if (FileExists(file)) {
- Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", file.Data());
- continue;
+ if (fOverwriteMode) gGrid->Rm(file);
+ else {
+ Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", file.Data());
+ continue;
+ }
}
// Copy xml file to alien space
TFile::Cp(Form("file:%s",file.Data()), Form("alien://%s/%s",workdir.Data(), file.Data()));
continue;
}
schunk += Form("_%s.xml", os->GetString().Data());
- if (FileExists(schunk)) {
- Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", schunk.Data());
- continue;
+ if (FileExists(schunk)) {
+ if (fOverwriteMode) gGrid->Rm(file);
+ else {
+ Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", schunk.Data());
+ continue;
+ }
}
printf("Exporting merged collection <%s> and copying to AliEn\n", schunk.Data());
cbase->ExportXML(Form("file://%s", schunk.Data()),kFALSE,kFALSE, schunk, "Merged runs");
delete arr;
return kFALSE;
}
- }
+ }
}
delete arr;
+ if (nullResult) {
+ Error("CreateDataset", "No valid dataset corresponding to the query!");
+ return kFALSE;
+ }
} else {
// Process a full run range.
for (Int_t irun=fRunRange[0]; irun<=fRunRange[1]; irun++) {
// CdWork();
if (TestBit(AliAnalysisGrid::kTest)) file = "wn.xml";
else file = Form("%s%d.xml", fRunPrefix.Data(), irun);
- if (FileExists(file) && fNrunsPerMaster<2 && !TestBit(AliAnalysisGrid::kTest)) {
- Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", file.Data());
-// gGrid->Rm(file);
- continue;
+ if (FileExists(file) && fNrunsPerMaster<2 && !TestBit(AliAnalysisGrid::kTest)) {
+ if (fOverwriteMode) gGrid->Rm(file);
+ else {
+ Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", file.Data());
+ continue;
+ }
}
// If local collection file does not exist, create it via 'find' command.
- if (gSystem->AccessPathName(file)) {
+ if (gSystem->AccessPathName(file) || TestBit(AliAnalysisGrid::kTest) || fOverwriteMode) {
command = "find ";
command += options;
command += path;
if (res) delete res;
// Write standard output to file
gROOT->ProcessLine(Form("gGrid->Stdout(); > %s", file.Data()));
+ Bool_t hasGrep = (gSystem->Exec("grep --version 2>/dev/null > /dev/null")==0)?kTRUE:kFALSE;
+ Bool_t nullFile = kFALSE;
+ if (!hasGrep) {
+ Warning("CreateDataset", "'grep' command not available on this system - cannot validate the result of the grid 'find' command");
+ } else {
+ nullFile = (gSystem->Exec(Form("grep /event %s 2>/dev/null > /dev/null",file.Data()))==0)?kFALSE:kTRUE;
+ if (nullFile) {
+ Warning("CreateDataset","Dataset %s produced by: <%s> is empty !", file.Data(), command.Data());
+ continue;
+ }
+ }
+ nullResult = kFALSE;
}
if (TestBit(AliAnalysisGrid::kTest)) break;
// Check if there is one run per master job.
if (fNrunsPerMaster<2) {
if (FileExists(file)) {
- Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", file.Data());
- continue;
+ if (fOverwriteMode) gGrid->Rm(file);
+ else {
+ Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", file.Data());
+ continue;
+ }
}
// Copy xml file to alien space
TFile::Cp(Form("file:%s",file.Data()), Form("alien://%s/%s",workdir.Data(), file.Data()));
nruns++;
// Check if the collection for the chunk exist locally.
Int_t nchunk = (nruns-1)/fNrunsPerMaster;
- if (FileExists(fInputFiles->At(nchunk)->GetName())) continue;
+ if (FileExists(fInputFiles->At(nchunk)->GetName())) {
+ if (fOverwriteMode) gGrid->Rm(fInputFiles->At(nchunk)->GetName());
+ else continue;
+ }
printf(" Merging collection <%s> into %d runs chunk...\n",file.Data(),fNrunsPerMaster);
if (((nruns-1)%fNrunsPerMaster) == 0) {
schunk = Form("%s%d", fRunPrefix.Data(), irun);
}
schunk = schunk2;
if (FileExists(schunk)) {
- Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", schunk.Data());
- continue;
+ if (fOverwriteMode) gGrid->Rm(schunk);
+ else {
+ Info("CreateDataset", "\n##### Dataset %s exist. Skipping creation...", schunk.Data());
+ continue;
+ }
}
printf("Exporting merged collection <%s> and copying to AliEn.\n", schunk.Data());
cbase->ExportXML(Form("file://%s", schunk.Data()),kFALSE,kFALSE, schunk, "Merged runs");
if (FileExists(schunk)) {
- Info("CreateDataset", "\n##### Dataset %s exist. Skipping copy...", schunk.Data());
- continue;
+ if (fOverwriteMode) gGrid->Rm(schunk);
+ else {
+ Info("CreateDataset", "\n##### Dataset %s exist. Skipping copy...", schunk.Data());
+ continue;
+ }
}
TFile::Cp(Form("file:%s",schunk.Data()), Form("alien://%s/%s",workdir.Data(), schunk.Data()));
if (!FileExists(schunk)) {
}
}
}
+ if (nullResult) {
+ Error("CreateDataset", "No valid dataset corresponding to the query!");
+ return kFALSE;
+ }
}
return kTRUE;
}
} else {
if (!fGridOutputDir.Contains("/")) fGridOutputDir = Form("%s/%s", workdir.Data(), fGridOutputDir.Data());
if (!DirectoryExists(fGridOutputDir)) {
- if (gGrid->Mkdir(fGridOutputDir)) {
+ if (gGrid->Mkdir(fGridOutputDir,"-p")) {
Info("CreateJDL", "\n##### Created alien output directory %s", fGridOutputDir.Data());
} else {
Error("CreateJDL", "Could not create alien output directory %s", fGridOutputDir.Data());
// Exit if any error up to now
if (error) return kFALSE;
// Set JDL fields
- fGridJDL->SetValue("User", Form("\"%s\"", fUser.Data()));
- fGridJDL->SetExecutable(fExecutable);
+ if (!fUser.IsNull()) {
+ fGridJDL->SetValue("User", Form("\"%s\"", fUser.Data()));
+ fMergingJDL->SetValue("User", Form("\"%s\"", fUser.Data()));
+ }
+ fGridJDL->SetExecutable(fExecutable, "This is the startup script");
+ TString mergeExec = fExecutable;
+ mergeExec.ReplaceAll(".sh", "_merge.sh");
+ fMergingJDL->SetExecutable(mergeExec, "This is the startup script");
+ mergeExec.ReplaceAll(".sh", ".C");
+ fMergingJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(),mergeExec.Data()), "List of input files to be uploaded to workers");
if (!fArguments.IsNull())
fGridJDL->SetArguments(fArguments, "Arguments for the executable command");
-// fGridJDL->SetTTL((UInt_t)fTTL);
- fGridJDL->SetValue("TTL", Form("\"%d\"", fTTL));
- if (fMaxInitFailed > 0)
+ fMergingJDL->SetArguments("$1 $2 $3");
+ fGridJDL->SetValue("TTL", Form("\"%d\"",fTTL));
+ fGridJDL->SetDescription("TTL", Form("Time after which the job is killed (%d min.)", fTTL/60));
+ fMergingJDL->SetValue("TTL", Form("\"%d\"",fTTL));
+ fMergingJDL->SetDescription("TTL", Form("Time after which the job is killed (%d min.)", fTTL/60));
+
+ if (fMaxInitFailed > 0) {
fGridJDL->SetValue("MaxInitFailed", Form("\"%d\"",fMaxInitFailed));
- if (fSplitMaxInputFileNumber > 0)
+ fGridJDL->SetDescription("MaxInitFailed", "Maximum number of first failing jobs to abort the master job");
+ }
+ if (fSplitMaxInputFileNumber > 0) {
fGridJDL->SetValue("SplitMaxInputFileNumber", Form("\"%d\"", fSplitMaxInputFileNumber));
- if (fSplitMode.Length())
+ fGridJDL->SetDescription("SplitMaxInputFileNumber", "Maximum number of input files to be processed per subjob");
+ }
+ if (fSplitMode.Length()) {
fGridJDL->SetValue("Split", Form("\"%s\"", fSplitMode.Data()));
-// fGridJDL->SetSplitMode(fSplitMode, (UInt_t)fSplitMaxInputFileNumber);
- if (fAliROOTVersion.Length())
- fGridJDL->AddToPackages("AliRoot", fAliROOTVersion);
- if (fROOTVersion.Length())
+ fGridJDL->SetDescription("Split", "We split per SE or file");
+ }
+ if (!fAliROOTVersion.IsNull()) {
+ fGridJDL->AddToPackages("AliRoot", fAliROOTVersion,"VO_ALICE", "List of requested packages");
+ fMergingJDL->AddToPackages("AliRoot", fAliROOTVersion, "VO_ALICE", "List of requested packages");
+ }
+ if (!fROOTVersion.IsNull()) {
fGridJDL->AddToPackages("ROOT", fROOTVersion);
- if (fAPIVersion.Length())
+ fMergingJDL->AddToPackages("ROOT", fROOTVersion);
+ }
+ if (!fAPIVersion.IsNull()) {
fGridJDL->AddToPackages("APISCONFIG", fAPIVersion);
+ fMergingJDL->AddToPackages("APISCONFIG", fAPIVersion);
+ }
if (!fExternalPackages.IsNull()) {
arr = fExternalPackages.Tokenize(" ");
TIter next(arr);
TString pkgversion = pkgname(index+2, pkgname.Length());
pkgname.Remove(index);
fGridJDL->AddToPackages(pkgname, pkgversion);
+ fMergingJDL->AddToPackages(pkgname, pkgversion);
}
delete arr;
}
- fGridJDL->SetInputDataListFormat(fInputFormat);
- fGridJDL->SetInputDataList("wn.xml");
- fGridJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(), fAnalysisMacro.Data()));
+ fGridJDL->SetInputDataListFormat(fInputFormat, "Format of input data");
+ fGridJDL->SetInputDataList("wn.xml", "Collection name to be processed on each worker node");
+ fGridJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(), fAnalysisMacro.Data()), "List of input files to be uploaded to workers");
TString analysisFile = fExecutable;
analysisFile.ReplaceAll(".sh", ".root");
fGridJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(),analysisFile.Data()));
+ fMergingJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(),analysisFile.Data()));
if (IsUsingTags() && !gSystem->AccessPathName("ConfigureCuts.C"))
fGridJDL->AddToInputSandbox(Form("LF:%s/ConfigureCuts.C", workdir.Data()));
if (fAdditionalLibs.Length()) {
while ((os=(TObjString*)next())) {
if (os->GetString().Contains(".so")) continue;
fGridJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(), os->GetString().Data()));
+ fMergingJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(), os->GetString().Data()));
}
delete arr;
}
if (fPackages) {
TIter next(fPackages);
TObject *obj;
- while ((obj=next()))
+ while ((obj=next())) {
fGridJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(), obj->GetName()));
+ fMergingJDL->AddToInputSandbox(Form("LF:%s/%s", workdir.Data(), obj->GetName()));
+ }
}
if (fOutputArchive.Length()) {
arr = fOutputArchive.Tokenize(" ");
TIter next(arr);
- while ((os=(TObjString*)next()))
- if (!os->GetString().Contains("@") && fCloseSE.Length())
- fGridJDL->AddToOutputArchive(Form("%s@%s",os->GetString().Data(), fCloseSE.Data()));
- else
- fGridJDL->AddToOutputArchive(os->GetString());
+ Bool_t first = kTRUE;
+ const char *comment = "Files to be archived";
+ const char *comment1 = comment;
+ while ((os=(TObjString*)next())) {
+ if (!first) comment = NULL;
+ if (!os->GetString().Contains("@") && fCloseSE.Length())
+ fGridJDL->AddToOutputArchive(Form("%s@%s",os->GetString().Data(), fCloseSE.Data()), comment);
+ else
+ fGridJDL->AddToOutputArchive(os->GetString(), comment);
+ first = kFALSE;
+ }
delete arr;
+ TString outputArchive = fOutputArchive;
+ if (!fMergeExcludes.IsNull()) {
+ arr = fMergeExcludes.Tokenize(" ");
+ TIter next1(arr);
+ while ((os=(TObjString*)next1())) {
+ outputArchive.ReplaceAll(Form("%s,",os->GetString().Data()),"");
+ outputArchive.ReplaceAll(os->GetString(),"");
+ }
+ delete arr;
+ }
+ arr = outputArchive.Tokenize(" ");
+ TIter next2(arr);
+ comment = comment1;
+ first = kTRUE;
+ while ((os=(TObjString*)next2())) {
+ if (!first) comment = NULL;
+ TString currentfile = os->GetString();
+ currentfile.ReplaceAll(".root", "*.root");
+ currentfile.ReplaceAll(".zip", "-Stage$2_$3.zip");
+ if (!currentfile.Contains("@") && fCloseSE.Length())
+ fMergingJDL->AddToOutputArchive(Form("%s@%s",currentfile.Data(), fCloseSE.Data()), comment);
+ else
+ fMergingJDL->AddToOutputArchive(currentfile, comment);
+ first = kFALSE;
+ }
+ delete arr;
}
- arr = fOutputFiles.Tokenize(" ");
+ arr = fOutputFiles.Tokenize(",");
TIter next(arr);
+ Bool_t first = kTRUE;
+ const char *comment = "Files to be archived";
+ const char *comment1 = comment;
while ((os=(TObjString*)next())) {
// Ignore ouputs in jdl that are also in outputarchive
TString sout = os->GetString();
if (sout.Index("@")>0) sout.Remove(sout.Index("@"));
if (fOutputArchive.Contains(sout)) continue;
+ if (!first) comment = NULL;
if (!os->GetString().Contains("@") && fCloseSE.Length())
- fGridJDL->AddToOutputSandbox(Form("%s@%s",os->GetString().Data(), fCloseSE.Data()));
+ fGridJDL->AddToOutputSandbox(Form("%s@%s",os->GetString().Data(), fCloseSE.Data()), comment);
else
- fGridJDL->AddToOutputSandbox(os->GetString());
+ fGridJDL->AddToOutputSandbox(os->GetString(), comment);
+ first = kFALSE;
}
delete arr;
-// fGridJDL->SetPrice((UInt_t)fPrice);
- fGridJDL->SetValue("Price", Form("\"%d\"", fPrice));
+ if (fOutputFiles.Length()) {
+ TString outputFiles = fOutputFiles;
+ if (!fMergeExcludes.IsNull()) {
+ arr = fMergeExcludes.Tokenize(" ");
+ TIter next1(arr);
+ while ((os=(TObjString*)next1())) {
+ outputFiles.ReplaceAll(Form("%s,",os->GetString().Data()),"");
+ outputFiles.ReplaceAll(os->GetString(),"");
+ }
+ delete arr;
+ }
+ arr = outputFiles.Tokenize(" ");
+ TIter next2(arr);
+ comment = comment1;
+ first = kTRUE;
+ while ((os=(TObjString*)next2())) {
+ // Ignore ouputs in jdl that are also in outputarchive
+ TString sout = os->GetString();
+ if (sout.Index("@")>0) sout.Remove(sout.Index("@"));
+ if (fOutputArchive.Contains(sout)) continue;
+ if (!first) comment = NULL;
+ if (!os->GetString().Contains("@") && fCloseSE.Length())
+ fMergingJDL->AddToOutputSandbox(Form("%s@%s",os->GetString().Data(), fCloseSE.Data()), comment);
+ else
+ fMergingJDL->AddToOutputSandbox(os->GetString(), comment);
+ }
+ delete arr;
+ }
+ fGridJDL->SetPrice((UInt_t)fPrice, "AliEn price for this job");
+ fMergingJDL->SetPrice((UInt_t)fPrice, "AliEn price for this job");
TString validationScript = fExecutable;
validationScript.ReplaceAll(".sh", "_validation.sh");
- fGridJDL->SetValidationCommand(Form("%s/%s", workdir.Data(),validationScript.Data()));
- if (fMasterResubmitThreshold) fGridJDL->SetValue("MasterResubmitThreshold", Form("\"%d%%\"", fMasterResubmitThreshold));
+ fGridJDL->SetValidationCommand(Form("%s/%s", workdir.Data(),validationScript.Data()), "Validation script to be run for each subjob");
+ validationScript = fExecutable;
+ validationScript.ReplaceAll(".sh", "_mergevalidation.sh");
+ fMergingJDL->SetValidationCommand(Form("%s/%s", workdir.Data(),validationScript.Data()), "Validation script to be run for each subjob");
+ if (fMasterResubmitThreshold) {
+ fGridJDL->SetValue("MasterResubmitThreshold", Form("\"%d%%\"", fMasterResubmitThreshold));
+ fGridJDL->SetDescription("MasterResubmitThreshold", "Resubmit failed jobs until DONE rate reaches this percentage");
+ }
// Write a jdl with 2 input parameters: collection name and output dir name.
WriteJDL(copy);
}
return kFALSE;
} else {
if (!fGridOutputDir.Contains("/")) fGridOutputDir = Form("%s/%s", workdir.Data(), fGridOutputDir.Data());
- if (!DirectoryExists(fGridOutputDir)) {
- if (gGrid->Mkdir(fGridOutputDir)) {
+ if (!fProductionMode && !DirectoryExists(fGridOutputDir)) {
+ if (gGrid->Mkdir(fGridOutputDir,"-p")) {
Info("CreateJDL", "\n##### Created alien output directory %s", fGridOutputDir.Data());
} else {
Error("CreateJDL", "Could not create alien output directory %s", fGridOutputDir.Data());
gGrid->Cd(workdir);
}
if (TestBit(AliAnalysisGrid::kSubmit)) {
- Info("CreateJDL", "\n##### Copying JDL file <%s> to your AliEn output directory", fJDLName.Data());
+ TString mergeJDLName = fExecutable;
+ mergeJDLName.ReplaceAll(".sh", "_merge.jdl");
TString locjdl = Form("%s/%s", fGridOutputDir.Data(),fJDLName.Data());
- if (fProductionMode)
+ TString locjdl1 = Form("%s/%s", fGridOutputDir.Data(),mergeJDLName.Data());
+ if (fProductionMode) {
locjdl = Form("%s/%s", workdir.Data(),fJDLName.Data());
+ locjdl1 = Form("%s/%s", workdir.Data(),mergeJDLName.Data());
+ }
if (FileExists(locjdl)) gGrid->Rm(locjdl);
+ if (FileExists(locjdl1)) gGrid->Rm(locjdl1);
+ Info("CreateJDL", "\n##### Copying JDL file <%s> to your AliEn output directory", fJDLName.Data());
TFile::Cp(Form("file:%s",fJDLName.Data()), Form("alien://%s", locjdl.Data()));
+ if (fMergeViaJDL) {
+ Info("CreateJDL", "\n##### Copying merging JDL file <%s> to your AliEn output directory", mergeJDLName.Data());
+ TFile::Cp(Form("file:%s",mergeJDLName.Data()), Form("alien://%s", locjdl1.Data()));
+ }
}
if (fAdditionalLibs.Length()) {
arr = fAdditionalLibs.Tokenize(" ");
TIter next(fPackages);
TObject *obj;
while ((obj=next())) {
+ if (FileExists(obj->GetName())) gGrid->Rm(obj->GetName());
Info("CreateJDL", "\n##### Copying dependency: <%s> to your alien workspace", obj->GetName());
TFile::Cp(Form("file:%s",obj->GetName()), Form("alien://%s/%s", workdir.Data(), obj->GetName()));
}
// One jdl with no parameters in case input data is specified by name.
TIter next(fInputFiles);
while ((os=(TObjString*)next()))
- fGridJDL->AddToInputDataCollection(Form("LF:%s,nodownload", os->GetString().Data()));
+ fGridJDL->AddToInputDataCollection(Form("LF:%s,nodownload", os->GetString().Data()), "Input xml collections");
if (!fOutputSingle.IsNull())
- fGridJDL->SetOutputDirectory(Form("#alienfulldir#/%s",fOutputSingle.Data()));
- else
- fGridJDL->SetOutputDirectory(Form("%s/#alien_counter_03i#", fGridOutputDir.Data()));
+ fGridJDL->SetOutputDirectory(Form("#alienfulldir#/../%s",fOutputSingle.Data()), "Output directory");
+ else {
+ fGridJDL->SetOutputDirectory(Form("%s/#alien_counter_03i#", fGridOutputDir.Data()), "Output directory");
+ fMergingJDL->SetOutputDirectory(fGridOutputDir);
+ }
} else {
// One jdl to be submitted with 2 input parameters: data collection name and output dir prefix
- fGridJDL->AddToInputDataCollection(Form("LF:%s/$1,nodownload", workdir.Data()));
- if (!fOutputSingle.IsNull())
- fGridJDL->SetOutputDirectory(Form("#alienfulldir#/%s",fOutputSingle.Data()));
- fGridJDL->SetOutputDirectory(Form("%s/$2/#alien_counter_03i#", fGridOutputDir.Data()));
+ fGridJDL->AddToInputDataCollection(Form("LF:%s/$1,nodownload", workdir.Data()), "Input xml collections");
+ if (!fOutputSingle.IsNull()) {
+ if (!fOutputToRunNo) fGridJDL->SetOutputDirectory(Form("#alienfulldir#/%s",fOutputSingle.Data()), "Output directory");
+ else fGridJDL->SetOutputDirectory(Form("%s/$2",fGridOutputDir.Data()), "Output directory");
+ } else {
+ fGridJDL->SetOutputDirectory(Form("%s/$2/#alien_counter_03i#", fGridOutputDir.Data()), "Output directory");
+ fMergingJDL->SetOutputDirectory(Form("$1", fGridOutputDir.Data()), "Output directory");
+ }
}
// Generate the JDL as a string
TString sjdl = fGridJDL->Generate();
+ TString sjdl1 = fMergingJDL->Generate();
Int_t index;
- index = sjdl.Index("Executable");
- if (index >= 0) sjdl.Insert(index, "\n# This is the startup script\n");
- index = sjdl.Index("Split ");
- if (index >= 0) sjdl.Insert(index, "\n# We split per SE or file\n");
- index = sjdl.Index("SplitMaxInputFileNumber");
- if (index >= 0) sjdl.Insert(index, "\n# We want each subjob to get maximum this number of input files\n");
- index = sjdl.Index("InputDataCollection");
- if (index >= 0) sjdl.Insert(index, "# Input xml collections\n");
- index = sjdl.Index("InputFile");
- if (index >= 0) sjdl.Insert(index, "\n# List of input files to be uploaded to wn's\n");
- index = sjdl.Index("InputDataList ");
- if (index >= 0) sjdl.Insert(index, "\n# Collection to be processed on wn\n");
- index = sjdl.Index("InputDataListFormat");
- if (index >= 0) sjdl.Insert(index, "\n# Format of input data\n");
- index = sjdl.Index("Price");
- if (index >= 0) sjdl.Insert(index, "\n# AliEn price for this job\n");
- index = sjdl.Index("Requirements");
- if (index >= 0) sjdl.Insert(index, "\n# Additional requirements for the computing element\n");
- index = sjdl.Index("Packages");
- if (index >= 0) sjdl.Insert(index, "\n# Packages to be used\n");
- index = sjdl.Index("User =");
- if (index >= 0) sjdl.Insert(index, "\n# AliEn user\n");
- index = sjdl.Index("TTL");
- if (index >= 0) sjdl.Insert(index, "\n# Time to live for the job\n");
- index = sjdl.Index("OutputFile");
- if (index >= 0) sjdl.Insert(index, "\n# List of output files to be registered\n");
- index = sjdl.Index("OutputDir");
- if (index >= 0) sjdl.Insert(index, "\n# Output directory\n");
- index = sjdl.Index("OutputArchive");
- if (index >= 0) sjdl.Insert(index, "\n# Files to be archived\n");
- index = sjdl.Index("MaxInitFailed");
- if (index >= 0) sjdl.Insert(index, "\n# Maximum number of first failing jobs to abort the master job\n");
- index = sjdl.Index("MasterResubmitThreshold");
- if (index >= 0) sjdl.Insert(index, "\n# Resubmit failed jobs until DONE rate reaches this percentage\n");
- sjdl.ReplaceAll("ValidationCommand", "Validationcommand");
- index = sjdl.Index("Validationcommand");
- if (index >= 0) sjdl.Insert(index, "\n# Validation script to be run for each subjob\n");
sjdl.ReplaceAll("\"LF:", "\n \"LF:");
sjdl.ReplaceAll("(member", "\n (member");
sjdl.ReplaceAll("\",\"VO_", "\",\n \"VO_");
sjdl.ReplaceAll("{\n \n", "{\n");
sjdl.ReplaceAll("\n\n", "\n");
sjdl.ReplaceAll("OutputDirectory", "OutputDir");
+ sjdl1.ReplaceAll("\"LF:", "\n \"LF:");
+ sjdl1.ReplaceAll("(member", "\n (member");
+ sjdl1.ReplaceAll("\",\"VO_", "\",\n \"VO_");
+ sjdl1.ReplaceAll("{", "{\n ");
+ sjdl1.ReplaceAll("};", "\n};");
+ sjdl1.ReplaceAll("{\n \n", "{\n");
+ sjdl1.ReplaceAll("\n\n", "\n");
+ sjdl1.ReplaceAll("OutputDirectory", "OutputDir");
sjdl += "JDLVariables = \n{\n \"Packages\",\n \"OutputDir\"\n};\n";
sjdl.Prepend(Form("Jobtag = {\n \"comment:%s\"\n};\n", fJobTag.Data()));
index = sjdl.Index("JDLVariables");
if (index >= 0) sjdl.Insert(index, "\n# JDL variables\n");
+ sjdl += "Workdirectorysize = {\"5000MB\"};";
+ sjdl1 += "JDLVariables = \n{\n \"Packages\",\n \"OutputDir\"\n};\n";
+ sjdl1.Prepend(Form("Jobtag = {\n \"comment:%s_Merging\"\n};\n", fJobTag.Data()));
+ sjdl1.Prepend("# Generated merging jdl\n# $1 = full alien path to output directory to be merged\n# $2 = merging stage\n# $3 = merged chunk\n");
+ index = sjdl1.Index("JDLVariables");
+ if (index >= 0) sjdl1.Insert(index, "\n# JDL variables\n");
+ sjdl1 += "Workdirectorysize = {\"5000MB\"};";
// Write jdl to file
ofstream out;
out.open(fJDLName.Data(), ios::out);
return kFALSE;
}
out << sjdl << endl;
+ TString mergeJDLName = fExecutable;
+ mergeJDLName.ReplaceAll(".sh", "_merge.jdl");
+ if (fMergeViaJDL) {
+ ofstream out1;
+ out1.open(mergeJDLName.Data(), ios::out);
+ if (out.bad()) {
+ Error("CreateJDL", "Bad file name: %s", mergeJDLName.Data());
+ return kFALSE;
+ }
+ out1 << sjdl1 << endl;
+ }
// Copy jdl to grid workspace
if (!copy) {
Info("CreateJDL", "\n##### You may want to review jdl:%s and analysis macro:%s before running in <submit> mode", fJDLName.Data(), fAnalysisMacro.Data());
} else {
- Info("CreateJDL", "\n##### Copying JDL file <%s> to your AliEn output directory", fJDLName.Data());
TString locjdl = Form("%s/%s", fGridOutputDir.Data(),fJDLName.Data());
- if (fProductionMode)
+ TString locjdl1 = Form("%s/%s", fGridOutputDir.Data(),mergeJDLName.Data());
+ if (fProductionMode) {
locjdl = Form("%s/%s", workdir.Data(),fJDLName.Data());
+ locjdl1 = Form("%s/%s", workdir.Data(),mergeJDLName.Data());
+ }
if (FileExists(locjdl)) gGrid->Rm(locjdl);
+ if (FileExists(locjdl1)) gGrid->Rm(locjdl1);
+ Info("CreateJDL", "\n##### Copying JDL file <%s> to your AliEn output directory", fJDLName.Data());
TFile::Cp(Form("file:%s",fJDLName.Data()), Form("alien://%s", locjdl.Data()));
+ if (fMergeViaJDL) {
+ Info("CreateJDL", "\n##### Copying merging JDL file <%s> to your AliEn output directory", mergeJDLName.Data());
+ TFile::Cp(Form("file:%s",mergeJDLName.Data()), Form("alien://%s", locjdl1.Data()));
+ }
}
return kTRUE;
}
}
//______________________________________________________________________________
-void AliAnalysisAlien::CheckDataType(const char *lfn, Bool_t &is_collection, Bool_t &is_xml, Bool_t &use_tags)
+void AliAnalysisAlien::CheckDataType(const char *lfn, Bool_t &isCollection, Bool_t &isXml, Bool_t &useTags)
{
// Check input data type.
- is_collection = kFALSE;
- is_xml = kFALSE;
- use_tags = kFALSE;
+ isCollection = kFALSE;
+ isXml = kFALSE;
+ useTags = kFALSE;
if (!gGrid) {
Error("CheckDataType", "No connection to grid");
return;
}
- is_collection = IsCollection(lfn);
+ isCollection = IsCollection(lfn);
TString msg = "\n##### file: ";
msg += lfn;
- if (is_collection) {
+ if (isCollection) {
msg += " type: raw_collection;";
// special treatment for collections
- is_xml = kFALSE;
+ isXml = kFALSE;
// check for tag files in the collection
TGridResult *res = gGrid->Command(Form("listFilesFromCollection -z -v %s",lfn), kFALSE);
if (!res) {
return;
}
TString file = typeStr;
- use_tags = file.Contains(".tag");
- if (use_tags) msg += " using_tags: Yes";
+ useTags = file.Contains(".tag");
+ if (useTags) msg += " using_tags: Yes";
else msg += " using_tags: No";
Info("CheckDataType", msg.Data());
return;
}
TString slfn(lfn);
slfn.ToLower();
- is_xml = slfn.Contains(".xml");
- if (is_xml) {
+ isXml = slfn.Contains(".xml");
+ if (isXml) {
// Open xml collection and check if there are tag files inside
msg += " type: xml_collection;";
TGridCollection *coll = (TGridCollection*)gROOT->ProcessLine(Form("TAlienCollection::Open(\"alien://%s\",1);",lfn));
map = (TMap*)map->GetValue("");
TString file;
if (map && map->GetValue("name")) file = map->GetValue("name")->GetName();
- use_tags = file.Contains(".tag");
+ useTags = file.Contains(".tag");
delete coll;
- if (use_tags) msg += " using_tags: Yes";
+ if (useTags) msg += " using_tags: Yes";
else msg += " using_tags: No";
Info("CheckDataType", msg.Data());
return;
}
- use_tags = slfn.Contains(".tag");
+ useTags = slfn.Contains(".tag");
if (slfn.Contains(".root")) msg += " type: root file;";
- else msg += " type: unhnown file;";
- if (use_tags) msg += " using_tags: Yes";
+ else msg += " type: unknown file;";
+ if (useTags) msg += " using_tags: Yes";
else msg += " using_tags: No";
Info("CheckDataType", msg.Data());
}
pkg.ReplaceAll(".par", "");
pkg += ".par";
if (gSystem->AccessPathName(pkg)) {
- Error("EnablePackage", "Package %s not found", pkg.Data());
+ Fatal("EnablePackage", "Package %s not found", pkg.Data());
return;
}
if (!TObject::TestBit(AliAnalysisGrid::kUsePars))
return kFALSE;
}
+//______________________________________________________________________________
+Bool_t AliAnalysisAlien::IsSingleOutput() const
+{
+// Check if single-ouput option is on.
+ return (!fOutputSingle.IsNull());
+}
+
//______________________________________________________________________________
void AliAnalysisAlien::Print(Option_t *) const
{
// Print current plugin settings.
printf("### AliEn analysis plugin current settings ###\n");
+ printf("= OverwriteMode:________________________________ %d\n", fOverwriteMode);
+ if (fOverwriteMode) {
+ printf("***** NOTE: Overwrite mode will overwrite the input generated datasets and partial results from previous analysis. \
+ \n***** To disable, use: plugin->SetOverwriteMode(kFALSE);\n");
+ }
+ printf("= Copy files to grid: __________________________ %s\n", (IsUseCopy())?"YES":"NO");
+ printf("= Check if files can be copied to grid: ________ %s\n", (IsCheckCopy())?"YES":"NO");
printf("= Production mode:______________________________ %d\n", fProductionMode);
printf("= Version of API requested: ____________________ %s\n", fAPIVersion.Data());
printf("= Version of ROOT requested: ___________________ %s\n", fROOTVersion.Data());
printf("= Max number of subjob fails to kill: __________ %d\n", fMaxInitFailed);
if (fMasterResubmitThreshold>0)
printf("= Resubmit master job if failed subjobs >_______ %d\n", fMasterResubmitThreshold);
+ printf("= Number of replicas for the output files_______ %d\n", fNreplicas);
if (fNrunsPerMaster>0)
printf("= Number of runs per master job: _______________ %d\n", fNrunsPerMaster);
printf("= Number of files in one chunk to be merged: ___ %d\n", fMaxMergeFiles);
// Set default values for everything. What cannot be filled will be left empty.
if (fGridJDL) delete fGridJDL;
fGridJDL = (TGridJDL*)gROOT->ProcessLine("new TAlienJDL()");
+ fMergingJDL = (TGridJDL*)gROOT->ProcessLine("new TAlienJDL()");
fPrice = 1;
fTTL = 30000;
fSplitMaxInputFileNumber = 100;
fMaxInitFailed = 0;
fMasterResubmitThreshold = 0;
fNtestFiles = 10;
+ fNreplicas = 2;
fRunRange[0] = 0;
fRunRange[1] = 0;
fNrunsPerMaster = 1;
fDataPattern = "*AliESDs.root"; // Can be like: *AliESDs.root, */pass1/*AliESDs.root, ...
fFriendChainName = "";
fGridOutputDir = "output";
- fOutputArchive = "log_archive.zip:stdout,stderr root_archive.zip:*.root";
+ fOutputArchive = "log_archive.zip:std*@disk=1 root_archive.zip:*.root@disk=2";
fOutputFiles = ""; // Like "AliAODs.root histos.root"
fInputFormat = "xml-single";
fJDLName = "analysis.jdl";
fJobTag = "Automatically generated analysis JDL";
fMergeExcludes = "";
+ fMergeViaJDL = 0;
+ SetUseCopy(kTRUE);
+ SetCheckCopy(kTRUE);
+ SetDefaultOutputs(kTRUE);
+ fOverwriteMode = 1;
}
//______________________________________________________________________________
-Bool_t AliAnalysisAlien::MergeOutputs()
+Bool_t AliAnalysisAlien::CheckMergedFiles(const char *filename, const char *aliendir, Int_t nperchunk, Bool_t submit, const char *jdl)
{
-// Merge analysis outputs existing in the AliEn space.
- if (TestBit(AliAnalysisGrid::kTest)) return kTRUE;
- if (TestBit(AliAnalysisGrid::kOffline)) return kFALSE;
- if (!Connect()) {
- Error("MergeOutputs", "Cannot merge outputs without grid connection. Terminate will NOT be executed");
- return kFALSE;
- }
- // Get the output path
- if (!fGridOutputDir.Contains("/")) fGridOutputDir = Form("/%s/%s/%s", gGrid->GetHomeDirectory(), fGridWorkingDir.Data(), fGridOutputDir.Data());
- if (!DirectoryExists(fGridOutputDir)) {
- Error("MergeOutputs", "Grid output directory %s not found. Terminate() will NOT be executed", fGridOutputDir.Data());
+// Static method that checks the status of merging. This can submit merging jobs that did not produced the expected
+// output. If <submit> is false (checking) returns true only when the final merged file was found. If submit is true returns
+// true if the jobs were successfully submitted.
+ Int_t countOrig = 0;
+ Int_t countStage = 0;
+ Int_t stage = 0;
+ Int_t i;
+ Bool_t doneFinal = kFALSE;
+ TBits chunksDone;
+ TString saliendir(aliendir);
+ TString sfilename, stmp;
+ saliendir.ReplaceAll("//","/");
+ saliendir = saliendir.Strip(TString::kTrailing, '/');
+ if (!gGrid) {
+ ::Error("GetNregisteredFiles", "You need to be connected to AliEn.");
return kFALSE;
}
- if (!fOutputFiles.Length()) {
- Error("MergeOutputs", "No output file names defined. Are you running the right AliAnalysisAlien configuration ?");
+ sfilename = filename;
+ sfilename.ReplaceAll(".root", "*.root");
+ printf("Checking directory <%s> for merged files <%s> ...\n", aliendir, sfilename.Data());
+ TString command = Form("find %s/ *%s", saliendir.Data(), sfilename.Data());
+ TGridResult *res = gGrid->Command(command);
+ if (!res) {
+ ::Error("GetNregisteredFiles","Error: No result for the find command\n");
return kFALSE;
+ }
+ TIter nextmap(res);
+ TMap *map = 0;
+ while ((map=(TMap*)nextmap())) {
+ TString turl = map->GetValue("turl")->GetName();
+ if (!turl.Length()) {
+ // Nothing found
+ delete res;
+ return kFALSE;
+ }
+ turl.ReplaceAll("alien://", "");
+ turl.ReplaceAll(saliendir, "");
+ sfilename = gSystem->BaseName(turl);
+ turl = turl.Strip(TString::kLeading, '/');
+ // Now check to what the file corresponds to:
+ // original output - aliendir/%03d/filename
+ // merged file (which stage) - aliendir/filename-Stage%02d_%04d
+ // final merged file - aliendir/filename
+ if (sfilename == turl) {
+ if (sfilename == filename) {
+ doneFinal = kTRUE;
+ } else {
+ // check stage
+ Int_t index = sfilename.Index("Stage");
+ if (index<0) continue;
+ stmp = sfilename(index+5,2);
+ Int_t istage = atoi(stmp);
+ stmp = sfilename(index+8,4);
+ Int_t ijob = atoi(stmp);
+ if (istage<stage) continue; // Ignore lower stages
+ if (istage>stage) {
+ countStage = 0;
+ chunksDone.ResetAllBits();
+ stage = istage;
+ }
+ countStage++;
+ chunksDone.SetBitNumber(ijob);
+ }
+ } else {
+ countOrig++;
+ }
+ if (doneFinal) {
+ delete res;
+ printf("=> Removing files from previous stages...\n");
+ gGrid->Rm(Form("%s/*Stage*.root", aliendir));
+ return kTRUE;
+ }
+ }
+ delete res;
+ // Compute number of jobs that were submitted for the current stage
+ Int_t ntotstage = countOrig;
+ for (i=1; i<=stage; i++) {
+ if (ntotstage%nperchunk) ntotstage = (ntotstage/nperchunk)+1;
+ else ntotstage = (ntotstage/nperchunk);
}
- TObjArray *list = fOutputFiles.Tokenize(" ");
- TIter next(list);
- TObjString *str;
+ // Now compare with the number of set bits in the chunksDone array
+ Int_t nmissing = (stage>0)?(ntotstage - countStage):0;
+ // Print the info
+ printf("*** Found %d original files\n", countOrig);
+ if (stage==0) printf("*** No merging completed so far.\n");
+ else printf("*** Found %d out of %d files merged for stage %d\n", countStage, ntotstage, stage);
+ if (nmissing) printf("*** Number of merged files missing for this stage: %d -> check merging job completion\n", nmissing);
+ if (!submit) return doneFinal;
+ // Sumbit merging jobs for all missing chunks for the current stage.
+ TString query = Form("submit %s %s", jdl, aliendir);
+ Int_t ichunk = -1;
+ if (nmissing) {
+ for (i=0; i<nmissing; i++) {
+ ichunk = chunksDone.FirstNullBit(ichunk+1);
+ Int_t jobId = SubmitSingleJob(Form("%s %d %d", query.Data(), stage, ichunk));
+ if (!jobId) return kFALSE;
+ }
+ return kTRUE;
+ }
+ // Submit next stage of merging
+ if (stage==0) countStage = countOrig;
+ Int_t nchunks = (countStage/nperchunk);
+ if (countStage%nperchunk) nchunks += 1;
+ for (i=0; i<nchunks; i++) {
+ Int_t jobId = SubmitSingleJob(Form("%s %d %d", query.Data(), stage+1, i));
+ if (!jobId) return kFALSE;
+ }
+ return kTRUE;
+}
+
+//______________________________________________________________________________
+Int_t AliAnalysisAlien::SubmitSingleJob(const char *query)
+{
+// Submits a single job corresponding to the query and returns job id. If 0 submission failed.
+ if (!gGrid) return 0;
+ printf("=> %s ------> ",query);
+ TGridResult *res = gGrid->Command(query);
+ if (!res) return 0;
+ TString jobId = res->GetKey(0,"jobId");
+ delete res;
+ if (jobId.IsNull()) {
+ printf("submission failed. Reason:\n");
+ gGrid->Stdout();
+ gGrid->Stderr();
+ ::Error("SubmitSingleJob", "Your query %s could not be submitted", query);
+ return 0;
+ }
+ printf(" Job id: %s\n", jobId.Data());
+ return atoi(jobId);
+}
+
+//______________________________________________________________________________
+Bool_t AliAnalysisAlien::MergeOutput(const char *output, const char *basedir, Int_t nmaxmerge, Int_t stage, Int_t ichunk)
+{
+// Merge given output files from basedir. The file merger will merge nmaxmerge
+// files in a group. Merging can be done in stages:
+// stage=0 : will merge all existing files in a single stage
+// stage=1 : does a find command for all files that do NOT contain the string "Stage".
+// If their number is bigger that nmaxmerge, only the files from
+// ichunk*nmaxmerge to ichunk*(nmaxmerge+1)-1 will get merged as output_stage_<ichunk>
+// stage=n : does a find command for files named <output>Stage<stage-1>_*. If their number is bigger than
+// nmaxmerge, merge just the chunk ichunk, otherwise write the merged output to the file
+// named <output>.
+ TString outputFile = output;
TString command;
- TString output_file;
- TString output_chunk;
- TString previous_chunk;
- Int_t count_chunk = 0;
- Int_t count_zero = fMaxMergeFiles;
+ TString outputChunk;
+ TString previousChunk = "";
+ Int_t countChunk = 0;
+ Int_t countZero = nmaxmerge;
Bool_t merged = kTRUE;
- while((str=(TObjString*)next())) {
- output_file = str->GetString();
- Int_t index = output_file.Index("@");
- if (index > 0) output_file.Remove(index);
- // Skip already merged outputs
- if (!gSystem->AccessPathName(output_file)) {
- Info("MergeOutputs", "Output file <%s> found. Not merging again.", output_file.Data());
- continue;
- }
- if (fMergeExcludes.Length() &&
- fMergeExcludes.Contains(output_file.Data())) continue;
- // Perform a 'find' command in the output directory, looking for registered outputs
- command = Form("find %s/ *%s", fGridOutputDir.Data(), output_file.Data());
- printf("command: %s\n", command.Data());
- TGridResult *res = gGrid->Command(command);
- if (!res) continue;
- TFileMerger *fm = 0;
- TIter nextmap(res);
- TMap *map = 0;
- previous_chunk = "";
- count_chunk = 0;
- // Check if there is a merge operation to resume
- output_chunk = output_file;
- output_chunk.ReplaceAll(".root", "_*.root");
- if (!gSystem->Exec(Form("ls %s", output_chunk.Data()))) {
+ Int_t index = outputFile.Index("@");
+ if (index > 0) outputFile.Remove(index);
+ TString inputFile = outputFile;
+ if (stage>1) inputFile.ReplaceAll(".root", Form("-Stage%02d_*.root", stage-1));
+ command = Form("find %s/ *%s", basedir, inputFile.Data());
+ printf("command: %s\n", command.Data());
+ TGridResult *res = gGrid->Command(command);
+ if (!res) {
+ ::Error("MergeOutput","No result for the find command\n");
+ return kFALSE;
+ }
+
+ TFileMerger *fm = 0;
+ TIter nextmap(res);
+ TMap *map = 0;
+ // Check if there is a merge operation to resume. Works only for stage 0 or 1.
+ outputChunk = outputFile;
+ outputChunk.ReplaceAll(".root", "_*.root");
+ // Check for existent temporary merge files
+ // Check overwrite mode and remove previous partial results if needed
+ // Preserve old merging functionality for stage 0.
+ if (stage==0) {
+ if (!gSystem->Exec(Form("ls %s 2>/dev/null", outputChunk.Data()))) {
while (1) {
- for (Int_t counter=0; counter<fMaxMergeFiles; counter++) map = (TMap*)nextmap();
+ // Skip as many input files as in a chunk
+ for (Int_t counter=0; counter<nmaxmerge; counter++) map = (TMap*)nextmap();
if (!map) {
- Error("MergeOutputs", "Cannot resume merging for <%s>, nentries=%d", output_file.Data(), res->GetSize());
+ ::Error("MergeOutput", "Cannot resume merging for <%s>, nentries=%d", outputFile.Data(), res->GetSize());
delete res;
return kFALSE;
}
- output_chunk = output_file;
- output_chunk.ReplaceAll(".root", Form("_%04d.root", count_chunk));
- printf("%s\n", output_chunk.Data());
- count_chunk++;
- if (gSystem->AccessPathName(output_chunk)) continue;
- // Merged file with chunks up to <count_chunk> found
- printf("Resume merging of <%s> from <%s>\n", output_file.Data(), output_chunk.Data());
- previous_chunk = output_chunk;
+ outputChunk = outputFile;
+ outputChunk.ReplaceAll(".root", Form("_%04d.root", countChunk));
+ countChunk++;
+ if (gSystem->AccessPathName(outputChunk)) continue;
+ // Merged file with chunks up to <countChunk> found
+ ::Info("MergeOutput", "Resume merging of <%s> from <%s>\n", outputFile.Data(), outputChunk.Data());
+ previousChunk = outputChunk;
break;
}
- }
- count_zero = fMaxMergeFiles;
+ }
+ countZero = nmaxmerge;
+
while ((map=(TMap*)nextmap())) {
// Loop 'find' results and get next LFN
- if (count_zero == fMaxMergeFiles) {
+ if (countZero == nmaxmerge) {
// First file in chunk - create file merger and add previous chunk if any.
fm = new TFileMerger(kFALSE);
fm->SetFastMethod(kTRUE);
- if (previous_chunk.Length()) fm->AddFile(previous_chunk.Data());
- output_chunk = output_file;
- output_chunk.ReplaceAll(".root", Form("_%04d.root", count_chunk));
+ if (previousChunk.Length()) fm->AddFile(previousChunk.Data());
+ outputChunk = outputFile;
+ outputChunk.ReplaceAll(".root", Form("_%04d.root", countChunk));
}
// If last file found, put merged results in the output file
- if (map == res->Last()) output_chunk = output_file;
+ if (map == res->Last()) outputChunk = outputFile;
TObjString *objs = dynamic_cast<TObjString*>(map->GetValue("turl"));
if (!objs || !objs->GetString().Length()) {
// Nothing found - skip this output
delete res;
delete fm;
- break;
+ return kFALSE;
}
// Add file to be merged and decrement chunk counter.
fm->AddFile(objs->GetString());
- count_zero--;
- if (count_zero==0 || map == res->Last()) {
- fm->OutputFile(output_chunk);
+ countZero--;
+ if (countZero==0 || map == res->Last()) {
if (!fm->GetMergeList() || !fm->GetMergeList()->GetSize()) {
// Nothing found - skip this output
- Warning("MergeOutputs", "No <%s> files found.", output_file.Data());
+ ::Warning("MergeOutput", "No <%s> files found.", inputFile.Data());
delete res;
delete fm;
- break;
+ return kFALSE;
}
+ fm->OutputFile(outputChunk);
// Merge the outputs, then go to next chunk
if (!fm->Merge()) {
- Error("MergeOutputs", "Could not merge all <%s> files", output_file.Data());
+ ::Error("MergeOutput", "Could not merge all <%s> files", outputFile.Data());
delete res;
delete fm;
- merged = kFALSE;
- break;
+ return kFALSE;
} else {
- Info("MergeOutputs", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), output_chunk.Data());
- gSystem->Unlink(previous_chunk);
+ ::Info("MergeOutputs", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), outputChunk.Data());
+ gSystem->Unlink(previousChunk);
}
if (map == res->Last()) {
delete res;
delete fm;
break;
}
- count_chunk++;
- count_zero = fMaxMergeFiles;
- previous_chunk = output_chunk;
+ countChunk++;
+ countZero = nmaxmerge;
+ previousChunk = outputChunk;
}
}
+ return merged;
+ }
+ // Merging stage different than 0.
+ // Move to the begining of the requested chunk.
+ outputChunk = outputFile;
+ if (nmaxmerge < res->GetSize()) {
+ if (ichunk*nmaxmerge >= res->GetSize()) {
+ ::Error("MergeOutput", "Cannot merge merge chunk %d grouping %d files from %d total.", ichunk, nmaxmerge, res->GetSize());
+ delete res;
+ return kFALSE;
+ }
+ for (Int_t counter=0; counter<ichunk*nmaxmerge; counter++) map = (TMap*)nextmap();
+ outputChunk.ReplaceAll(".root", Form("-Stage%02d_%04d.root", stage, ichunk));
+ }
+ countZero = nmaxmerge;
+ fm = new TFileMerger(kFALSE);
+ fm->SetFastMethod(kTRUE);
+ while ((map=(TMap*)nextmap())) {
+ // Loop 'find' results and get next LFN
+ TObjString *objs = dynamic_cast<TObjString*>(map->GetValue("turl"));
+ if (!objs || !objs->GetString().Length()) {
+ // Nothing found - skip this output
+ delete res;
+ delete fm;
+ return kFALSE;
+ }
+ // Add file to be merged and decrement chunk counter.
+ fm->AddFile(objs->GetString());
+ countZero--;
+ if (countZero==0) break;
+ }
+ delete res;
+ if (!fm->GetMergeList() || !fm->GetMergeList()->GetSize()) {
+ // Nothing found - skip this output
+ ::Warning("MergeOutput", "No <%s> files found.", inputFile.Data());
+ delete fm;
+ return kFALSE;
+ }
+ fm->OutputFile(outputChunk);
+ // Merge the outputs
+ if (!fm->Merge()) {
+ ::Error("MergeOutput", "Could not merge all <%s> files", outputFile.Data());
+ delete fm;
+ return kFALSE;
+ } else {
+ ::Info("MergeOutput", "\n##### Merged %d output files to <%s>", fm->GetMergeList()->GetSize(), outputChunk.Data());
+ }
+ delete fm;
+ return kTRUE;
+}
+
+//______________________________________________________________________________
+Bool_t AliAnalysisAlien::MergeOutputs()
+{
+// Merge analysis outputs existing in the AliEn space.
+ if (TestBit(AliAnalysisGrid::kTest)) return kTRUE;
+ if (TestBit(AliAnalysisGrid::kOffline)) return kFALSE;
+ if (!Connect()) {
+ Error("MergeOutputs", "Cannot merge outputs without grid connection. Terminate will NOT be executed");
+ return kFALSE;
+ }
+ if (fMergeViaJDL) {
+ if (!TestBit(AliAnalysisGrid::kMerge)) {
+ Info("MergeOutputs", "### Re-run with <MergeViaJDL> option in terminate mode of the plugin to submit merging jobs ###");
+ return kFALSE;
+ }
+ if (fProductionMode) {
+ Info("MergeOutputs", "### Merging will be submitted by LPM manager... ###");
+ return kFALSE;
+ }
+ Info("MergeOutputs", "Submitting merging JDL");
+ if (!SubmitMerging()) return kFALSE;
+ Info("MergeOutputs", "### Re-run with <MergeViaJDL> off to collect results after merging jobs are done ###");
+ Info("MergeOutputs", "### The Terminate() method is executed by the merging jobs");
+ return kFALSE;
+ }
+ // Get the output path
+ if (!fGridOutputDir.Contains("/")) fGridOutputDir = Form("/%s/%s/%s", gGrid->GetHomeDirectory(), fGridWorkingDir.Data(), fGridOutputDir.Data());
+ if (!DirectoryExists(fGridOutputDir)) {
+ Error("MergeOutputs", "Grid output directory %s not found. Terminate() will NOT be executed", fGridOutputDir.Data());
+ return kFALSE;
+ }
+ if (!fOutputFiles.Length()) {
+ Error("MergeOutputs", "No output file names defined. Are you running the right AliAnalysisAlien configuration ?");
+ return kFALSE;
+ }
+ // Check if fast read option was requested
+ Info("MergeOutputs", "Started local merging of output files from: alien://%s \
+ \n======= overwrite mode = %d", fGridOutputDir.Data(), (Int_t)fOverwriteMode);
+ if (fFastReadOption) {
+ Warning("MergeOutputs", "You requested FastRead option. Using xrootd flags to reduce timeouts. This may skip some files that could be accessed ! \
+ \n+++ NOTE: To disable this option, use: plugin->SetFastReadOption(kFALSE)");
+ gEnv->SetValue("XNet.ConnectTimeout",10);
+ gEnv->SetValue("XNet.RequestTimeout",10);
+ gEnv->SetValue("XNet.MaxRedirectCount",2);
+ gEnv->SetValue("XNet.ReconnectTimeout",10);
+ gEnv->SetValue("XNet.FirstConnectMaxCnt",1);
+ }
+ // Make sure we change the temporary directory
+ gSystem->Setenv("TMPDIR", gSystem->pwd());
+ TObjArray *list = fOutputFiles.Tokenize(",");
+ TIter next(list);
+ TObjString *str;
+ TString outputFile;
+ Bool_t merged = kTRUE;
+ while((str=(TObjString*)next())) {
+ outputFile = str->GetString();
+ Int_t index = outputFile.Index("@");
+ if (index > 0) outputFile.Remove(index);
+ TString outputChunk = outputFile;
+ outputChunk.ReplaceAll(".root", "_*.root");
+ // Skip already merged outputs
+ if (!gSystem->AccessPathName(outputFile)) {
+ if (fOverwriteMode) {
+ Info("MergeOutputs", "Overwrite mode. Existing file %s was deleted.", outputFile.Data());
+ gSystem->Unlink(outputFile);
+ if (!gSystem->Exec(Form("ls %s 2>/dev/null", outputChunk.Data()))) {
+ Info("MergeOutput", "Overwrite mode: partial merged files %s will removed",
+ outputChunk.Data());
+ gSystem->Exec(Form("rm -f %s", outputChunk.Data()));
+ }
+ } else {
+ Info("MergeOutputs", "Output file <%s> found. Not merging again.", outputFile.Data());
+ continue;
+ }
+ } else {
+ if (!gSystem->Exec(Form("ls %s 2>/dev/null", outputChunk.Data()))) {
+ Info("MergeOutput", "Overwrite mode: partial merged files %s will removed",
+ outputChunk.Data());
+ gSystem->Exec(Form("rm -f %s", outputChunk.Data()));
+ }
+ }
+ if (fMergeExcludes.Length() &&
+ fMergeExcludes.Contains(outputFile.Data())) continue;
+ // Perform a 'find' command in the output directory, looking for registered outputs
+ merged = MergeOutput(outputFile, fGridOutputDir, fMaxMergeFiles);
+ if (!merged) {
+ Error("MergeOutputs", "Terminate() will NOT be executed");
+ return kFALSE;
+ }
+ TFile *fileOpened = (TFile*)gROOT->GetListOfFiles()->FindObject(outputFile);
+ if (fileOpened) fileOpened->Close();
}
- if (!merged) {
- Error("MergeOutputs", "Terminate() will NOT be executed");
- }
- return merged;
+ return kTRUE;
}
//______________________________________________________________________________
// Use the output files connected to output containers from the analysis manager
// rather than the files defined by SetOutputFiles
if (flag && !TObject::TestBit(AliAnalysisGrid::kDefaultOutputs))
- Info("SetDefaultOutputs", "Plugin will use the output files taken from \
- analysis manager");
+ Info("SetDefaultOutputs", "Plugin will use the output files taken from analysis manager");
TObject::SetBit(AliAnalysisGrid::kDefaultOutputs, flag);
}
+//______________________________________________________________________________
+void AliAnalysisAlien::SetOutputFiles(const char *list)
+{
+// Manually set the output files list.
+// Removes duplicates. Not allowed if default outputs are not disabled.
+ if (TObject::TestBit(AliAnalysisGrid::kDefaultOutputs)) {
+ Fatal("SetOutputFiles", "You have to explicitly call SetDefaultOutputs(kFALSE) to manually set output files.");
+ return;
+ }
+ Info("SetOutputFiles", "Output file list is set manually - you are on your own.");
+ fOutputFiles = "";
+ TString slist = list;
+ if (slist.Contains("@")) Warning("SetOutputFiles","The plugin does not allow explicit SE's. Please use: SetNumberOfReplicas() instead.");
+ TObjArray *arr = slist.Tokenize(" ");
+ TObjString *os;
+ TIter next(arr);
+ TString sout;
+ while ((os=(TObjString*)next())) {
+ sout = os->GetString();
+ if (sout.Index("@")>0) sout.Remove(sout.Index("@"));
+ if (fOutputFiles.Contains(sout)) continue;
+ if (!fOutputFiles.IsNull()) fOutputFiles += ",";
+ fOutputFiles += sout;
+ }
+ delete arr;
+}
+
+//______________________________________________________________________________
+void AliAnalysisAlien::SetOutputArchive(const char *list)
+{
+// Manually set the output archive list. Free text - you are on your own...
+// Not allowed if default outputs are not disabled.
+ if (TObject::TestBit(AliAnalysisGrid::kDefaultOutputs)) {
+ Fatal("SetOutputArchive", "You have to explicitly call SetDefaultOutputs(kFALSE) to manually set the output archives.");
+ return;
+ }
+ Info("SetOutputArchive", "Output archive is set manually - you are on your own.");
+ fOutputArchive = list;
+}
+
+//______________________________________________________________________________
+void AliAnalysisAlien::SetPreferedSE(const char */*se*/)
+{
+// Setting a prefered output SE is not allowed anymore.
+ Warning("SetPreferedSE", "Setting a preferential SE is not allowed anymore via the plugin. Use SetNumberOfReplicas() and SetDefaultOutputs()");
+}
+
//______________________________________________________________________________
Bool_t AliAnalysisAlien::StartAnalysis(Long64_t /*nentries*/, Long64_t /*firstEntry*/)
{
filename = mgr->GetOutputEventHandler()->GetOutputFileName();
}
if (fOutputFiles.Contains(filename)) continue;
- if (fOutputFiles.Length()) fOutputFiles += " ";
+ if (fOutputFiles.Length()) fOutputFiles += ",";
fOutputFiles += filename;
}
// Add extra files registered to the analysis manager
if (mgr->GetExtraFiles().Length()) {
- if (fOutputFiles.Length()) fOutputFiles += " ";
- fOutputFiles += mgr->GetExtraFiles();
+ if (fOutputFiles.Length()) fOutputFiles += ",";
+ TString extra = mgr->GetExtraFiles();
+ extra.ReplaceAll(" ", ",");
+ // Protection in case extra files do not exist (will it work?)
+ extra.ReplaceAll(".root", "*.root");
+ fOutputFiles += extra;
}
+ // Compose the output archive.
+ fOutputArchive = "log_archive.zip:std*@disk=1 ";
+ fOutputArchive += Form("root_archive.zip:%s@disk=%d",fOutputFiles.Data(),fNreplicas);
}
// if (!fCloseSE.Length()) fCloseSE = gSystem->Getenv("alien_CLOSE_SE");
if (TestBit(AliAnalysisGrid::kOffline)) {
\n space and job submitted.");
} else if (TestBit(AliAnalysisGrid::kMerge)) {
Info("StartAnalysis","\n##### MERGE MODE ##### The registered outputs of the analysis will be merged");
+ if (fMergeViaJDL) CheckInputData();
return kTRUE;
} else {
Info("StartAnalysis","\n##### FULL ANALYSIS MODE ##### Producing needed files and submitting your analysis job...");
}
+ Print();
if (!Connect()) {
Error("StartAnalysis", "Cannot start grid analysis without grid connection");
return kFALSE;
}
- Print();
+ if (IsCheckCopy()) CheckFileCopy(gGrid->GetHomeDirectory());
if (!CheckInputData()) {
Error("StartAnalysis", "There was an error in preprocessing your requested input data");
return kFALSE;
}
- CreateDataset(fDataPattern);
+ if (!CreateDataset(fDataPattern)) {
+ TString serror;
+ if (!fRunNumbers.Length() && !fRunRange[0]) serror = Form("path to data directory: <%s>", fGridDataDir.Data());
+ if (fRunNumbers.Length()) serror = "run numbers";
+ if (fRunRange[0]) serror = Form("run range [%d, %d]", fRunRange[0], fRunRange[1]);
+ serror += Form("\n or data pattern <%s>", fDataPattern.Data());
+ Error("StartAnalysis", "No data to process. Please fix %s in your plugin configuration.", serror.Data());
+ return kFALSE;
+ }
WriteAnalysisFile();
WriteAnalysisMacro();
WriteExecutable();
WriteValidationScript();
+ if (fMergeViaJDL) {
+ WriteMergingMacro();
+ WriteMergeExecutable();
+ WriteValidationScript(kTRUE);
+ }
if (!CreateJDL()) return kFALSE;
if (TestBit(AliAnalysisGrid::kOffline)) return kFALSE;
if (TestBit(AliAnalysisGrid::kTest)) {
Info("StartAnalysis", "\n_______________________________________________________________________ \
\n Running analysis script in a daughter shell as on a worker node \
\n_______________________________________________________________________");
- TObjArray *list = fOutputFiles.Tokenize(" ");
+ TObjArray *list = fOutputFiles.Tokenize(",");
TIter next(list);
TObjString *str;
- TString output_file;
+ TString outputFile;
while((str=(TObjString*)next())) {
- output_file = str->GetString();
- Int_t index = output_file.Index("@");
- if (index > 0) output_file.Remove(index);
- if (!gSystem->AccessPathName(output_file)) gSystem->Exec(Form("rm %s", output_file.Data()));
+ outputFile = str->GetString();
+ Int_t index = outputFile.Index("@");
+ if (index > 0) outputFile.Remove(index);
+ if (!gSystem->AccessPathName(outputFile)) gSystem->Exec(Form("rm %s", outputFile.Data()));
}
delete list;
gSystem->Exec(Form("bash %s 2>stderr", fExecutable.Data()));
if (res) {
const char *cjobId = res->GetKey(0,"jobId");
if (!cjobId) {
+ gGrid->Stdout();
+ gGrid->Stderr();
Error("StartAnalysis", "Your JDL %s could not be submitted", fJDLName.Data());
return kFALSE;
} else {
jobID = cjobId;
}
delete res;
+ } else {
+ Error("StartAnalysis", "No grid result after submission !!! Bailing out...");
+ return kFALSE;
}
} else {
// Submit for a range of enumeration of runs.
- Submit();
+ if (!Submit()) return kFALSE;
}
Info("StartAnalysis", "\n#### STARTING AN ALIEN SHELL FOR YOU. EXIT WHEN YOUR JOB %s HAS FINISHED. #### \
}
//______________________________________________________________________________
-void AliAnalysisAlien::Submit()
+Bool_t AliAnalysisAlien::Submit()
{
// Submit all master jobs.
Int_t nmasterjobs = fInputFiles->GetEntries();
Long_t tshoot = gSystem->Now();
- if (!fNsubmitted) SubmitNext();
+ if (!fNsubmitted && !SubmitNext()) return kFALSE;
while (fNsubmitted < nmasterjobs) {
Long_t now = gSystem->Now();
if ((now-tshoot)>30000) {
tshoot = now;
- SubmitNext();
+ if (!SubmitNext()) return kFALSE;
}
}
+ return kTRUE;
}
//______________________________________________________________________________
-void AliAnalysisAlien::SubmitNext()
+Bool_t AliAnalysisAlien::SubmitMerging()
+{
+// Submit all merging jobs.
+ if (!fGridOutputDir.Contains("/")) fGridOutputDir = Form("/%s/%s/%s", gGrid->GetHomeDirectory(), fGridWorkingDir.Data(), fGridOutputDir.Data());
+ gGrid->Cd(fGridOutputDir);
+ TString mergeJDLName = fExecutable;
+ mergeJDLName.ReplaceAll(".sh", "_merge.jdl");
+ Int_t ntosubmit = fInputFiles->GetEntries();
+ for (Int_t i=0; i<ntosubmit; i++) {
+ TString runOutDir = gSystem->BaseName(fInputFiles->At(i)->GetName());
+ runOutDir.ReplaceAll(".xml", "");
+ if (fOutputToRunNo) {
+ // The output directory is the run number
+ printf("### Submitting merging job for run <%s>\n", runOutDir.Data());
+ runOutDir = Form("%s/%s", fGridOutputDir.Data(), runOutDir.Data());
+ } else {
+ // The output directory is the master number in 3 digits format
+ printf("### Submitting merging job for master <%03d>\n", i);
+ runOutDir = Form("%s/%03d",fGridOutputDir.Data(), i);
+ }
+ // Check now the number of merging stages.
+ TObjArray *list = fOutputFiles.Tokenize(",");
+ TIter next(list);
+ TObjString *str;
+ TString outputFile;
+ while((str=(TObjString*)next())) {
+ outputFile = str->GetString();
+ Int_t index = outputFile.Index("@");
+ if (index > 0) outputFile.Remove(index);
+ if (!fMergeExcludes.Contains(outputFile)) break;
+ }
+ delete list;
+ Bool_t done = CheckMergedFiles(outputFile, runOutDir, fMaxMergeFiles, kTRUE, mergeJDLName);
+ if (!done) return kFALSE;
+ }
+ if (!ntosubmit) return kTRUE;
+ Info("StartAnalysis", "\n#### STARTING AN ALIEN SHELL FOR YOU. EXIT WHEN YOUR MERGING JOBS HAVE FINISHED. #### \
+ \n You may exit at any time and terminate the job later using the option <terminate> but disabling SetMergeViaJDL\
+ \n ##################################################################################");
+ gSystem->Exec("aliensh");
+ return kTRUE;
+}
+
+//______________________________________________________________________________
+Bool_t AliAnalysisAlien::SubmitNext()
{
// Submit next bunch of master jobs if the queue is free.
static Bool_t iscalled = kFALSE;
static Int_t firstmaster = 0;
static Int_t lastmaster = 0;
static Int_t npermaster = 0;
- if (iscalled) return;
+ if (iscalled) return kTRUE;
iscalled = kTRUE;
Int_t nrunning=0, nwaiting=0, nerror=0, ndone=0;
Int_t ntosubmit = 0;
TString status = GetJobStatus(firstmaster, lastmaster, nrunning, nwaiting, nerror, ndone);
printf("=== master %d: %s\n", lastmaster, status.Data());
// If last master not split, just return
- if (status != "SPLIT") {iscalled = kFALSE; return;}
+ if (status != "SPLIT") {iscalled = kFALSE; return kTRUE;}
// No more than 100 waiting jobs
- if (nwaiting>100) {iscalled = kFALSE; return;}
+ if (nwaiting>100) {iscalled = kFALSE; return kTRUE;}
npermaster = (nrunning+nwaiting+nerror+ndone)/fNsubmitted;
if (npermaster) ntosubmit = (100-nwaiting)/npermaster;
+ if (!ntosubmit) ntosubmit = 1;
printf("=== WAITING(%d) RUNNING(%d) DONE(%d) OTHER(%d) NperMaster=%d => to submit %d jobs\n",
nwaiting, nrunning, ndone, nerror, npermaster, ntosubmit);
}
Int_t nmasterjobs = fInputFiles->GetEntries();
for (Int_t i=0; i<ntosubmit; i++) {
// Submit for a range of enumeration of runs.
- if (fNsubmitted>=nmasterjobs) {iscalled = kFALSE; return;}
+ if (fNsubmitted>=nmasterjobs) {iscalled = kFALSE; return kTRUE;}
TString query;
- query = Form("submit %s %s %03d", fJDLName.Data(), fInputFiles->At(fNsubmitted)->GetName(), fNsubmitted);
+ TString runOutDir = gSystem->BaseName(fInputFiles->At(fNsubmitted)->GetName());
+ runOutDir.ReplaceAll(".xml", "");
+ if (fOutputToRunNo)
+ query = Form("submit %s %s %s", fJDLName.Data(), fInputFiles->At(fNsubmitted)->GetName(), runOutDir.Data());
+ else
+ query = Form("submit %s %s %03d", fJDLName.Data(), fInputFiles->At(fNsubmitted)->GetName(), fNsubmitted);
printf("********* %s\n",query.Data());
res = gGrid->Command(query);
if (res) {
TString cjobId1 = res->GetKey(0,"jobId");
if (!cjobId1.Length()) {
- Error("StartAnalysis", "Your JDL %s could not be submitted", fJDLName.Data());
iscalled = kFALSE;
- return;
+ gGrid->Stdout();
+ gGrid->Stderr();
+ Error("StartAnalysis", "Your JDL %s could not be submitted. The message was:", fJDLName.Data());
+ return kFALSE;
} else {
Info("StartAnalysis", "\n_______________________________________________________________________ \
\n##### Your JDL %s submitted (%d to go). \nTHE JOB ID IS: %s \
fNsubmitted++;
}
delete res;
+ } else {
+ Error("StartAnalysis", "No grid result after submission !!! Bailing out...");
+ return kFALSE;
}
}
iscalled = kFALSE;
+ return kTRUE;
}
//______________________________________________________________________________
TDirectory *cdir = gDirectory;
TFile *file = TFile::Open(analysisFile, "RECREATE");
if (file) {
+ // Skip task Terminate calls for the grid job (but not in test mode, where we want to check also the terminate mode
+ if (!TestBit(AliAnalysisGrid::kTest)) mgr->SetSkipTerminate(kTRUE);
+ // Unless merging makes no sense
+ if (IsSingleOutput()) mgr->SetSkipTerminate(kFALSE);
mgr->Write();
delete file;
+ // Enable termination for local jobs
+ mgr->SetSkipTerminate(kFALSE);
}
if (cdir) cdir->cd();
Info("WriteAnalysisFile", "\n##### Analysis manager: %s wrote to file <%s>\n", mgr->GetName(),analysisFile.Data());
Error("WriteAnalysisMacro", "could not open file %s for writing", fAnalysisMacro.Data());
return;
}
+ Bool_t hasSTEERBase = kFALSE;
+ Bool_t hasESD = kFALSE;
+ Bool_t hasAOD = kFALSE;
+ Bool_t hasANALYSIS = kFALSE;
+ Bool_t hasANALYSISalice = kFALSE;
+ Bool_t hasCORRFW = kFALSE;
TString func = fAnalysisMacro;
TString type = "ESD";
TString comment = "// Analysis using ";
out << " gSystem->Load(\"libVMC\");" << endl;
out << " gSystem->Load(\"libPhysics\");" << endl << endl;
out << " gSystem->Load(\"libMinuit\");" << endl << endl;
+ if (fAdditionalRootLibs.Length()) {
+ // in principle libtree /lib geom libvmc etc. can go into this list, too
+ out << "// Add aditional libraries" << endl;
+ TObjArray *list = fAdditionalRootLibs.Tokenize(" ");
+ TIter next(list);
+ TObjString *str;
+ while((str=(TObjString*)next())) {
+ if (str->GetString().Contains(".so"))
+ out << " gSystem->Load(\"" << str->GetString().Data() << "\");" << endl;
+ }
+ if (list) delete list;
+ }
+ out << "// include path" << endl;
+ if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
+ out << " gSystem->AddIncludePath(\"-I$ALICE_ROOT/include\");" << endl << endl;
out << "// Load analysis framework libraries" << endl;
+ TString setupPar = "AliAnalysisAlien::SetupPar";
if (!fPackages) {
out << " gSystem->Load(\"libSTEERBase\");" << endl;
out << " gSystem->Load(\"libESD\");" << endl;
TIter next(fPackages);
TObject *obj;
TString pkgname;
- Bool_t hasSTEERBase = kFALSE;
- Bool_t hasESD = kFALSE;
- Bool_t hasAOD = kFALSE;
- Bool_t hasANALYSIS = kFALSE;
- Bool_t hasANALYSISalice = kFALSE;
- Bool_t hasCORRFW = kFALSE;
while ((obj=next())) {
pkgname = obj->GetName();
if (pkgname == "STEERBase" ||
pkgname == "ANALYSISalice.par") hasANALYSISalice = kTRUE;
if (pkgname == "CORRFW" ||
pkgname == "CORRFW.par") hasCORRFW = kTRUE;
- }
+ }
+ if (hasANALYSISalice) setupPar = "SetupPar";
if (!hasSTEERBase) out << " gSystem->Load(\"libSTEERBase\");" << endl;
- else out << " if (!SetupPar(\"STEERBase\")) return;" << endl;
+ else out << " if (!" << setupPar << "(\"STEERBase\")) return;" << endl;
if (!hasESD) out << " gSystem->Load(\"libESD\");" << endl;
- else out << " if (!SetupPar(\"ESD\")) return;" << endl;
+ else out << " if (!" << setupPar << "(\"ESD\")) return;" << endl;
if (!hasAOD) out << " gSystem->Load(\"libAOD\");" << endl;
- else out << " if (!SetupPar(\"AOD\")) return;" << endl;
+ else out << " if (!" << setupPar << "(\"AOD\")) return;" << endl;
if (!hasANALYSIS) out << " gSystem->Load(\"libANALYSIS\");" << endl;
- else out << " if (!SetupPar(\"ANALYSIS\")) return;" << endl;
+ else out << " if (!" << setupPar << "(\"ANALYSIS\")) return;" << endl;
if (!hasANALYSISalice) out << " gSystem->Load(\"libANALYSISalice\");" << endl;
- else out << " if (!SetupPar(\"ANALYSISalice\")) return;" << endl;
+ else out << " if (!" << setupPar << "(\"ANALYSISalice\")) return;" << endl;
if (!hasCORRFW) out << " gSystem->Load(\"libCORRFW\");" << endl << endl;
- else out << " if (!SetupPar(\"CORRFW\")) return;" << endl << endl;
+ else out << " if (!" << setupPar << "(\"CORRFW\")) return;" << endl << endl;
out << "// Compile other par packages" << endl;
next.Reset();
while ((obj=next())) {
pkgname == "ANALYSISalice.par" ||
pkgname == "CORRFW" ||
pkgname == "CORRFW.par") continue;
- out << " if (!SetupPar(\"" << obj->GetName() << "\")) return;" << endl;
+ out << " if (!" << setupPar << "(\"" << obj->GetName() << "\")) return;" << endl;
}
}
- out << "// include path" << endl;
- if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
- out << " gSystem->AddIncludePath(\"-I$ALICE_ROOT/include\");" << endl << endl;
if (fAdditionalLibs.Length()) {
out << "// Add aditional AliRoot libraries" << endl;
TObjArray *list = fAdditionalLibs.Tokenize(" ");
while((str=(TObjString*)next())) {
if (str->GetString().Contains(".so"))
out << " gSystem->Load(\"" << str->GetString().Data() << "\");" << endl;
+ if (str->GetString().Contains(".par"))
+ out << " if (!" << setupPar << "(\"" << str->GetString() << "\")) return;" << endl;
}
if (list) delete list;
}
if (list) delete list;
}
out << endl;
+ if (fFastReadOption) {
+ Warning("WriteAnalysisMacro", "!!! You requested FastRead option. Using xrootd flags to reduce timeouts in the grid jobs. This may skip some files that could be accessed !!! \
+ \n+++ NOTE: To disable this option, use: plugin->SetFastReadOption(kFALSE)");
+ out << "// fast xrootd reading enabled" << endl;
+ out << " printf(\"!!! You requested FastRead option. Using xrootd flags to reduce timeouts. Note that this may skip some files that could be accessed !!!\");" << endl;
+ out << " gEnv->SetValue(\"XNet.ConnectTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.RequestTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.MaxRedirectCount\",2);" << endl;
+ out << " gEnv->SetValue(\"XNet.ReconnectTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.FirstConnectMaxCnt\",1);" << endl << endl;
+ }
+ // Change temp directory to current one
+ out << "// Set temporary merging directory to current one" << endl;
+ out << " gSystem->Setenv(\"TMPDIR\", gSystem->pwd());" << endl << endl;
out << "// connect to AliEn and make the chain" << endl;
out << " if (!TGrid::Connect(\"alien://\")) return;" << endl;
if (IsUsingTags()) {
out << " TChain *chain = CreateChainFromTags(\"wn.xml\", anatype);" << endl << endl;
} else {
- if(fFriendChainName!="AliAOD.VertexingHF.root") {
- out << " TChain *chain = CreateChain(\"wn.xml\", anatype);" << endl << endl;
- } else {
- out << " // Check if the macro to create the chain was provided" << endl;
- out << " if (gSystem->AccessPathName(\"MakeAODInputChain.C\")) {" << endl;
- out << " ::Error(\"" << func.Data() << "\", \"File MakeAODInputChain.C not provided. Aborting.\");" << endl;
- out << " return;" << endl;
- out << " }" << endl;
- out << " gROOT->LoadMacro(\"MakeAODInputChain.C\");" << endl;
- out << " TChain *chain = MakeAODInputChain(\"wn.xml\",\"none\");" << endl << endl;
- }
+ out << " TChain *chain = CreateChain(\"wn.xml\", anatype);" << endl << endl;
}
out << "// read the analysis manager from file" << endl;
TString analysisFile = fExecutable;
out << " mgr = (AliAnalysisManager*)file->Get(key->GetName());" << endl;
out << " };" << endl;
out << " if (!mgr) {" << endl;
- out << " ::Error(\"" << func.Data() << "\", \"No analysis manager found in file" << analysisFile <<"\");" << endl;
+ out << " ::Error(\"" << func.Data() << "\", \"No analysis manager found in file " << analysisFile <<"\");" << endl;
out << " return;" << endl;
out << " }" << endl << endl;
out << " mgr->PrintStatus();" << endl;
+ if (AliAnalysisManager::GetAnalysisManager()) {
+ if (AliAnalysisManager::GetAnalysisManager()->GetDebugLevel()>3) {
+ out << " gEnv->SetValue(\"XNet.Debug\", \"1\");" << endl;
+ } else {
+ out << " AliLog::SetGlobalLogLevel(AliLog::kError);" << endl;
+ }
+ }
out << " mgr->StartAnalysis(\"localfile\", chain);" << endl;
out << " timer.Stop();" << endl;
out << " timer.Print();" << endl;
out << " return chain;" << endl;
out << "}" << endl << endl;
}
- if (fPackages) {
+ if (hasANALYSISalice) {
out <<"//________________________________________________________________________________" << endl;
out << "Bool_t SetupPar(const char *package) {" << endl;
out << "// Compile the package and set it up." << endl;
}
}
+//______________________________________________________________________________
+void AliAnalysisAlien::WriteMergingMacro()
+{
+// Write a macro to merge the outputs per master job.
+ if (!fMergeViaJDL) return;
+ if (!fOutputFiles.Length()) {
+ Error("WriteMergingMacro", "No output file names defined. Are you running the right AliAnalysisAlien configuration ?");
+ return;
+ }
+ TString mergingMacro = fExecutable;
+ mergingMacro.ReplaceAll(".sh","_merge.C");
+ if (!fGridOutputDir.Contains("/")) fGridOutputDir = Form("/%s/%s/%s", gGrid->GetHomeDirectory(), fGridWorkingDir.Data(), fGridOutputDir.Data());
+ if (!TestBit(AliAnalysisGrid::kSubmit)) {
+ ofstream out;
+ out.open(mergingMacro.Data(), ios::out);
+ if (!out.good()) {
+ Error("WriteMergingMacro", "could not open file %s for writing", fAnalysisMacro.Data());
+ return;
+ }
+ Bool_t hasSTEERBase = kFALSE;
+ Bool_t hasESD = kFALSE;
+ Bool_t hasAOD = kFALSE;
+ Bool_t hasANALYSIS = kFALSE;
+ Bool_t hasANALYSISalice = kFALSE;
+ Bool_t hasCORRFW = kFALSE;
+ TString func = mergingMacro;
+ TString comment;
+ func.ReplaceAll(".C", "");
+ out << "void " << func.Data() << "(const char *dir, Int_t stage=0, Int_t ichunk=0)" << endl;
+ out << "{" << endl;
+ out << "// Automatically generated merging macro executed in grid subjobs" << endl << endl;
+ out << " TStopwatch timer;" << endl;
+ out << " timer.Start();" << endl << endl;
+ if (!fExecutableCommand.Contains("aliroot")) {
+ out << "// load base root libraries" << endl;
+ out << " gSystem->Load(\"libTree\");" << endl;
+ out << " gSystem->Load(\"libGeom\");" << endl;
+ out << " gSystem->Load(\"libVMC\");" << endl;
+ out << " gSystem->Load(\"libPhysics\");" << endl << endl;
+ out << " gSystem->Load(\"libMinuit\");" << endl << endl;
+ }
+ if (fAdditionalRootLibs.Length()) {
+ // in principle libtree /lib geom libvmc etc. can go into this list, too
+ out << "// Add aditional libraries" << endl;
+ TObjArray *list = fAdditionalRootLibs.Tokenize(" ");
+ TIter next(list);
+ TObjString *str;
+ while((str=(TObjString*)next())) {
+ if (str->GetString().Contains(".so"))
+ out << " gSystem->Load(\"" << str->GetString().Data() << "\");" << endl;
+ }
+ if (list) delete list;
+ }
+ out << "// include path" << endl;
+ if (fIncludePath.Length()) out << " gSystem->AddIncludePath(\"" << fIncludePath.Data() << "\");" << endl;
+ out << " gSystem->AddIncludePath(\"-I$ALICE_ROOT/include\");" << endl << endl;
+ out << "// Load analysis framework libraries" << endl;
+ if (!fPackages) {
+ if (!fExecutableCommand.Contains("aliroot")) {
+ out << " gSystem->Load(\"libSTEERBase\");" << endl;
+ out << " gSystem->Load(\"libESD\");" << endl;
+ out << " gSystem->Load(\"libAOD\");" << endl;
+ }
+ out << " gSystem->Load(\"libANALYSIS\");" << endl;
+ out << " gSystem->Load(\"libANALYSISalice\");" << endl;
+ out << " gSystem->Load(\"libCORRFW\");" << endl << endl;
+ } else {
+ TIter next(fPackages);
+ TObject *obj;
+ TString pkgname;
+ TString setupPar = "AliAnalysisAlien::SetupPar";
+ while ((obj=next())) {
+ pkgname = obj->GetName();
+ if (pkgname == "STEERBase" ||
+ pkgname == "STEERBase.par") hasSTEERBase = kTRUE;
+ if (pkgname == "ESD" ||
+ pkgname == "ESD.par") hasESD = kTRUE;
+ if (pkgname == "AOD" ||
+ pkgname == "AOD.par") hasAOD = kTRUE;
+ if (pkgname == "ANALYSIS" ||
+ pkgname == "ANALYSIS.par") hasANALYSIS = kTRUE;
+ if (pkgname == "ANALYSISalice" ||
+ pkgname == "ANALYSISalice.par") hasANALYSISalice = kTRUE;
+ if (pkgname == "CORRFW" ||
+ pkgname == "CORRFW.par") hasCORRFW = kTRUE;
+ }
+ if (hasANALYSISalice) setupPar = "SetupPar";
+ if (!hasSTEERBase) out << " gSystem->Load(\"libSTEERBase\");" << endl;
+ else out << " if (!" << setupPar << "(\"STEERBase\")) return;" << endl;
+ if (!hasESD) out << " gSystem->Load(\"libESD\");" << endl;
+ else out << " if (!" << setupPar << "(\"ESD\")) return;" << endl;
+ if (!hasAOD) out << " gSystem->Load(\"libAOD\");" << endl;
+ else out << " if (!" << setupPar << "(\"AOD\")) return;" << endl;
+ if (!hasANALYSIS) out << " gSystem->Load(\"libANALYSIS\");" << endl;
+ else out << " if (!" << setupPar << "(\"ANALYSIS\")) return;" << endl;
+ if (!hasANALYSISalice) out << " gSystem->Load(\"libANALYSISalice\");" << endl;
+ else out << " if (!" << setupPar << "(\"ANALYSISalice\")) return;" << endl;
+ if (!hasCORRFW) out << " gSystem->Load(\"libCORRFW\");" << endl << endl;
+ else out << " if (!" << setupPar << "(\"CORRFW\")) return;" << endl << endl;
+ out << "// Compile other par packages" << endl;
+ next.Reset();
+ while ((obj=next())) {
+ pkgname = obj->GetName();
+ if (pkgname == "STEERBase" ||
+ pkgname == "STEERBase.par" ||
+ pkgname == "ESD" ||
+ pkgname == "ESD.par" ||
+ pkgname == "AOD" ||
+ pkgname == "AOD.par" ||
+ pkgname == "ANALYSIS" ||
+ pkgname == "ANALYSIS.par" ||
+ pkgname == "ANALYSISalice" ||
+ pkgname == "ANALYSISalice.par" ||
+ pkgname == "CORRFW" ||
+ pkgname == "CORRFW.par") continue;
+ out << " if (!" << setupPar << "(\"" << obj->GetName() << "\")) return;" << endl;
+ }
+ }
+ if (fAdditionalLibs.Length()) {
+ out << "// Add aditional AliRoot libraries" << endl;
+ TObjArray *list = fAdditionalLibs.Tokenize(" ");
+ TIter next(list);
+ TObjString *str;
+ while((str=(TObjString*)next())) {
+ if (str->GetString().Contains(".so"))
+ out << " gSystem->Load(\"" << str->GetString().Data() << "\");" << endl;
+ }
+ if (list) delete list;
+ }
+ out << endl;
+ out << "// Analysis source to be compiled at runtime (if any)" << endl;
+ if (fAnalysisSource.Length()) {
+ TObjArray *list = fAnalysisSource.Tokenize(" ");
+ TIter next(list);
+ TObjString *str;
+ while((str=(TObjString*)next())) {
+ out << " gROOT->ProcessLine(\".L " << str->GetString().Data() << "+g\");" << endl;
+ }
+ if (list) delete list;
+ }
+ out << endl;
+
+ if (fFastReadOption) {
+ Warning("WriteMergingMacro", "!!! You requested FastRead option. Using xrootd flags to reduce timeouts in the grid merging jobs. Note that this may skip some files that could be accessed !!!");
+ out << "// fast xrootd reading enabled" << endl;
+ out << " printf(\"!!! You requested FastRead option. Using xrootd flags to reduce timeouts. Note that this may skip some files that could be accessed !!!\");" << endl;
+ out << " gEnv->SetValue(\"XNet.ConnectTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.RequestTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.MaxRedirectCount\",2);" << endl;
+ out << " gEnv->SetValue(\"XNet.ReconnectTimeout\",10);" << endl;
+ out << " gEnv->SetValue(\"XNet.FirstConnectMaxCnt\",1);" << endl << endl;
+ }
+ // Change temp directory to current one
+ out << "// Set temporary merging directory to current one" << endl;
+ out << " gSystem->Setenv(\"TMPDIR\", gSystem->pwd());" << endl << endl;
+ out << "// Connect to AliEn" << endl;
+ out << " if (!TGrid::Connect(\"alien://\")) return;" << endl;
+ out << " Bool_t laststage = kFALSE;" << endl;
+ out << " TString outputDir = dir;" << endl;
+ out << " TString outputFiles = \"" << fOutputFiles << "\";" << endl;
+ out << " TString mergeExcludes = \"" << fMergeExcludes << "\";" << endl;
+ out << " mergeExcludes += \"" << AliAnalysisManager::GetAnalysisManager()->GetExtraFiles() << "\";" << endl;
+ out << " TObjArray *list = outputFiles.Tokenize(\",\");" << endl;
+ out << " TIter *iter = new TIter(list);" << endl;
+ out << " TObjString *str;" << endl;
+ out << " TString outputFile;" << endl;
+ out << " Bool_t merged = kTRUE;" << endl;
+ out << " while((str=(TObjString*)iter->Next())) {" << endl;
+ out << " outputFile = str->GetString();" << endl;
+ out << " if (outputFile.Contains(\"*\")) continue;" << endl;
+ out << " Int_t index = outputFile.Index(\"@\");" << endl;
+ out << " if (index > 0) outputFile.Remove(index);" << endl;
+ out << " // Skip already merged outputs" << endl;
+ out << " if (!gSystem->AccessPathName(outputFile)) {" << endl;
+ out << " printf(\"Output file <%s> found. Not merging again.\",outputFile.Data());" << endl;
+ out << " continue;" << endl;
+ out << " }" << endl;
+ out << " if (mergeExcludes.Contains(outputFile.Data())) continue;" << endl;
+ out << " merged = AliAnalysisAlien::MergeOutput(outputFile, outputDir, " << fMaxMergeFiles << ", stage, ichunk);" << endl;
+ out << " if (!merged) {" << endl;
+ out << " printf(\"ERROR: Cannot merge %s\\n\", outputFile.Data());" << endl;
+ out << " return;" << endl;
+ out << " }" << endl;
+ out << " // Check if this was the last stage. If yes, run terminate for the tasks." << endl;
+ out << " if (!gSystem->AccessPathName(outputFile)) laststage = kTRUE;" << endl;
+ out << " }" << endl;
+ out << " // all outputs merged, validate" << endl;
+ out << " ofstream out;" << endl;
+ out << " out.open(\"outputs_valid\", ios::out);" << endl;
+ out << " out.close();" << endl;
+ out << " // read the analysis manager from file" << endl;
+ TString analysisFile = fExecutable;
+ analysisFile.ReplaceAll(".sh", ".root");
+ out << " if (!laststage) return;" << endl;
+ out << " TFile *file = TFile::Open(\"" << analysisFile << "\");" << endl;
+ out << " if (!file) return;" << endl;
+ out << " TIter nextkey(file->GetListOfKeys());" << endl;
+ out << " AliAnalysisManager *mgr = 0;" << endl;
+ out << " TKey *key;" << endl;
+ out << " while ((key=(TKey*)nextkey())) {" << endl;
+ out << " if (!strcmp(key->GetClassName(), \"AliAnalysisManager\"))" << endl;
+ out << " mgr = (AliAnalysisManager*)file->Get(key->GetName());" << endl;
+ out << " };" << endl;
+ out << " if (!mgr) {" << endl;
+ out << " ::Error(\"" << func.Data() << "\", \"No analysis manager found in file" << analysisFile <<"\");" << endl;
+ out << " return;" << endl;
+ out << " }" << endl << endl;
+ out << " mgr->SetSkipTerminate(kFALSE);" << endl;
+ out << " mgr->PrintStatus();" << endl;
+ if (AliAnalysisManager::GetAnalysisManager()) {
+ if (AliAnalysisManager::GetAnalysisManager()->GetDebugLevel()>3) {
+ out << " gEnv->SetValue(\"XNet.Debug\", \"1\");" << endl;
+ } else {
+ out << " AliLog::SetGlobalLogLevel(AliLog::kError);" << endl;
+ }
+ }
+ out << " mgr->StartAnalysis(\"gridterminate\");" << endl;
+ out << "}" << endl << endl;
+ if (hasANALYSISalice) {
+ out <<"//________________________________________________________________________________" << endl;
+ out << "Bool_t SetupPar(const char *package) {" << endl;
+ out << "// Compile the package and set it up." << endl;
+ out << " TString pkgdir = package;" << endl;
+ out << " pkgdir.ReplaceAll(\".par\",\"\");" << endl;
+ out << " gSystem->Exec(Form(\"tar xvzf %s.par\", pkgdir.Data()));" << endl;
+ out << " TString cdir = gSystem->WorkingDirectory();" << endl;
+ out << " gSystem->ChangeDirectory(pkgdir);" << endl;
+ out << " // Check for BUILD.sh and execute" << endl;
+ out << " if (!gSystem->AccessPathName(\"PROOF-INF/BUILD.sh\")) {" << endl;
+ out << " printf(\"*******************************\\n\");" << endl;
+ out << " printf(\"*** Building PAR archive ***\\n\");" << endl;
+ out << " printf(\"*******************************\\n\");" << endl;
+ out << " if (gSystem->Exec(\"PROOF-INF/BUILD.sh\")) {" << endl;
+ out << " ::Error(\"SetupPar\", \"Cannot build par archive %s\", pkgdir.Data());" << endl;
+ out << " gSystem->ChangeDirectory(cdir);" << endl;
+ out << " return kFALSE;" << endl;
+ out << " }" << endl;
+ out << " } else {" << endl;
+ out << " ::Error(\"SetupPar\",\"Cannot access PROOF-INF/BUILD.sh for package %s\", pkgdir.Data());" << endl;
+ out << " gSystem->ChangeDirectory(cdir);" << endl;
+ out << " return kFALSE;" << endl;
+ out << " }" << endl;
+ out << " // Check for SETUP.C and execute" << endl;
+ out << " if (!gSystem->AccessPathName(\"PROOF-INF/SETUP.C\")) {" << endl;
+ out << " printf(\"*******************************\\n\");" << endl;
+ out << " printf(\"*** Setup PAR archive ***\\n\");" << endl;
+ out << " printf(\"*******************************\\n\");" << endl;
+ out << " gROOT->Macro(\"PROOF-INF/SETUP.C\");" << endl;
+ out << " } else {" << endl;
+ out << " ::Error(\"SetupPar\",\"Cannot access PROOF-INF/SETUP.C for package %s\", pkgdir.Data());" << endl;
+ out << " gSystem->ChangeDirectory(cdir);" << endl;
+ out << " return kFALSE;" << endl;
+ out << " }" << endl;
+ out << " // Restore original workdir" << endl;
+ out << " gSystem->ChangeDirectory(cdir);" << endl;
+ out << " return kTRUE;" << endl;
+ out << "}" << endl;
+ }
+ }
+ Bool_t copy = kTRUE;
+ if (TestBit(AliAnalysisGrid::kOffline) || TestBit(AliAnalysisGrid::kTest)) copy = kFALSE;
+ if (copy) {
+ CdWork();
+ TString workdir = gGrid->GetHomeDirectory();
+ workdir += fGridWorkingDir;
+ if (FileExists(mergingMacro)) gGrid->Rm(mergingMacro);
+ Info("WriteMergingMacro", "\n##### Copying merging macro: <%s> to your alien workspace", mergingMacro.Data());
+ TFile::Cp(Form("file:%s",mergingMacro.Data()), Form("alien://%s/%s", workdir.Data(), mergingMacro.Data()));
+ }
+}
+
+//______________________________________________________________________________
+Bool_t AliAnalysisAlien::SetupPar(const char *package)
+{
+// Compile the par file archive pointed by <package>. This must be present in the current directory.
+// Note that for loading the compiled library. The current directory should have precedence in
+// LD_LIBRARY_PATH
+ TString pkgdir = package;
+ pkgdir.ReplaceAll(".par","");
+ gSystem->Exec(Form("tar xvzf %s.par", pkgdir.Data()));
+ TString cdir = gSystem->WorkingDirectory();
+ gSystem->ChangeDirectory(pkgdir);
+ // Check for BUILD.sh and execute
+ if (!gSystem->AccessPathName("PROOF-INF/BUILD.sh")) {
+ printf("**************************************************\n");
+ printf("*** Building PAR archive %s\n", package);
+ printf("**************************************************\n");
+ if (gSystem->Exec("PROOF-INF/BUILD.sh")) {
+ ::Error("SetupPar", "Cannot build par archive %s", pkgdir.Data());
+ gSystem->ChangeDirectory(cdir);
+ return kFALSE;
+ }
+ } else {
+ ::Error("SetupPar","Cannot access PROOF-INF/BUILD.sh for package %s", pkgdir.Data());
+ gSystem->ChangeDirectory(cdir);
+ return kFALSE;
+ }
+ // Check for SETUP.C and execute
+ if (!gSystem->AccessPathName("PROOF-INF/SETUP.C")) {
+ printf("**************************************************\n");
+ printf("*** Setup PAR archive %s\n", package);
+ printf("**************************************************\n");
+ gROOT->Macro("PROOF-INF/SETUP.C");
+ printf("*** Loaded library: %s\n", gSystem->GetLibraries(pkgdir,"",kFALSE));
+ } else {
+ ::Error("SetupPar","Cannot access PROOF-INF/SETUP.C for package %s", pkgdir.Data());
+ gSystem->ChangeDirectory(cdir);
+ return kFALSE;
+ }
+ // Restore original workdir
+ gSystem->ChangeDirectory(cdir);
+ return kTRUE;
+}
+
//______________________________________________________________________________
void AliAnalysisAlien::WriteExecutable()
{
out << "echo \"======== " << fAnalysisMacro.Data() << " finished with exit code: $? ========\"" << endl;
out << "echo \"############## memory after: ##############\"" << endl;
out << "free -m" << endl;
- out << "echo \"############## Last 10 lines from dmesg : ##############\"" << endl;
- out << "dmesg | tail -n 10" << endl;
}
Bool_t copy = kTRUE;
if (TestBit(AliAnalysisGrid::kOffline) || TestBit(AliAnalysisGrid::kTest)) copy = kFALSE;
CdWork();
TString workdir = gGrid->GetHomeDirectory();
TString bindir = Form("%s/bin", workdir.Data());
- if (!DirectoryExists(bindir)) gGrid->Mkdir(bindir);
+ if (!DirectoryExists(bindir)) gGrid->Mkdir(bindir,"-p");
workdir += fGridWorkingDir;
TString executable = Form("%s/bin/%s", gGrid->GetHomeDirectory(), fExecutable.Data());
if (FileExists(executable)) gGrid->Rm(executable);
}
}
+//______________________________________________________________________________
+void AliAnalysisAlien::WriteMergeExecutable()
+{
+// Generate the alien executable script for the merging job.
+ if (!fMergeViaJDL) return;
+ TString mergeExec = fExecutable;
+ mergeExec.ReplaceAll(".sh", "_merge.sh");
+ if (!TestBit(AliAnalysisGrid::kSubmit)) {
+ ofstream out;
+ out.open(mergeExec.Data(), ios::out);
+ if (out.bad()) {
+ Error("WriteMergingExecutable", "Bad file name for executable: %s", mergeExec.Data());
+ return;
+ }
+ out << "#!/bin/bash" << endl;
+ out << "echo \"=========================================\"" << endl;
+ out << "echo \"############## PATH : ##############\"" << endl;
+ out << "echo $PATH" << endl;
+ out << "echo \"############## LD_LIBRARY_PATH : ##############\"" << endl;
+ out << "echo $LD_LIBRARY_PATH" << endl;
+ out << "echo \"############## ROOTSYS : ##############\"" << endl;
+ out << "echo $ROOTSYS" << endl;
+ out << "echo \"############## which root : ##############\"" << endl;
+ out << "which root" << endl;
+ out << "echo \"############## ALICE_ROOT : ##############\"" << endl;
+ out << "echo $ALICE_ROOT" << endl;
+ out << "echo \"############## which aliroot : ##############\"" << endl;
+ out << "which aliroot" << endl;
+ out << "echo \"############## system limits : ##############\"" << endl;
+ out << "ulimit -a" << endl;
+ out << "echo \"############## memory : ##############\"" << endl;
+ out << "free -m" << endl;
+ out << "echo \"=========================================\"" << endl << endl;
+ // Make sure we can properly compile par files
+ if (TObject::TestBit(AliAnalysisGrid::kUsePars)) out << "export LD_LIBRARY_PATH=.:$LD_LIBRARY_PATH" << endl;
+ TString mergeMacro = fExecutable;
+ mergeMacro.ReplaceAll(".sh", "_merge.C");
+ out << "export ARG=\"" << mergeMacro << "(\\\"$1\\\",$2,$3)\"" << endl;
+ out << fExecutableCommand << " " << "$ARG" << endl;
+ out << "echo \"======== " << mergeMacro.Data() << " finished with exit code: $? ========\"" << endl;
+ out << "echo \"############## memory after: ##############\"" << endl;
+ out << "free -m" << endl;
+ }
+ Bool_t copy = kTRUE;
+ if (TestBit(AliAnalysisGrid::kOffline) || TestBit(AliAnalysisGrid::kTest)) copy = kFALSE;
+ if (copy) {
+ CdWork();
+ TString workdir = gGrid->GetHomeDirectory();
+ TString bindir = Form("%s/bin", workdir.Data());
+ if (!DirectoryExists(bindir)) gGrid->Mkdir(bindir,"-p");
+ workdir += fGridWorkingDir;
+ TString executable = Form("%s/bin/%s", gGrid->GetHomeDirectory(), mergeExec.Data());
+ if (FileExists(executable)) gGrid->Rm(executable);
+ Info("CreateJDL", "\n##### Copying executable file <%s> to your AliEn bin directory", mergeExec.Data());
+ TFile::Cp(Form("file:%s",mergeExec.Data()), Form("alien://%s", executable.Data()));
+ }
+}
+
//______________________________________________________________________________
void AliAnalysisAlien::WriteProductionFile(const char *filename) const
{
out << locjdl << " " << njobspermaster << endl;
Int_t nmasterjobs = fInputFiles->GetEntries();
for (Int_t i=0; i<nmasterjobs; i++) {
- out << Form("%s", fInputFiles->At(i)->GetName()) << " " << Form("%03d", i) << endl;
+ TString runOutDir = gSystem->BaseName(fInputFiles->At(i)->GetName());
+ runOutDir.ReplaceAll(".xml", "");
+ if (fOutputToRunNo)
+ out << Form("%s", fInputFiles->At(i)->GetName()) << " " << runOutDir << endl;
+ else
+ out << Form("%s", fInputFiles->At(i)->GetName()) << " " << Form("%03d", i) << endl;
}
Info("WriteProductionFile", "\n##### Copying production file <%s> to your work directory", filename);
+ if (FileExists(filename)) gGrid->Rm(filename);
TFile::Cp(Form("file:%s",filename), Form("alien://%s/%s", workdir.Data(),filename));
}
//______________________________________________________________________________
-void AliAnalysisAlien::WriteValidationScript()
+void AliAnalysisAlien::WriteValidationScript(Bool_t merge)
{
// Generate the alien validation script.
// Generate the validation script
TObjString *os;
TString validationScript = fExecutable;
- validationScript.ReplaceAll(".sh", "_validation.sh");
+ if (merge) validationScript.ReplaceAll(".sh", "_mergevalidation.sh");
+ else validationScript.ReplaceAll(".sh", "_validation.sh");
if (!Connect()) {
Error("WriteValidationScript", "Alien connection required");
return;
}
- TString out_stream = "";
- if (!TestBit(AliAnalysisGrid::kTest)) out_stream = " >> stdout";
+ TString outStream = "";
+ if (!TestBit(AliAnalysisGrid::kTest)) outStream = " >> stdout";
if (!TestBit(AliAnalysisGrid::kSubmit)) {
ofstream out;
out.open(validationScript, ios::out);
out << "fi" << endl << endl;
out << "cd $validateout;" << endl;
out << "validateworkdir=`pwd`;" << endl << endl;
- out << "echo \"*******************************************************\"" << out_stream << endl;
- out << "echo \"* Automatically generated validation script *\"" << out_stream << endl;
+ out << "echo \"*******************************************************\"" << outStream << endl;
+ out << "echo \"* Automatically generated validation script *\"" << outStream << endl;
out << "" << endl;
- out << "echo \"* Time: $validatetime \"" << out_stream << endl;
- out << "echo \"* Dir: $validateout\"" << out_stream << endl;
- out << "echo \"* Workdir: $validateworkdir\"" << out_stream << endl;
- out << "echo \"* ----------------------------------------------------*\"" << out_stream << endl;
- out << "ls -la ./" << out_stream << endl;
- out << "echo \"* ----------------------------------------------------*\"" << out_stream << endl << endl;
+ out << "echo \"* Time: $validatetime \"" << outStream << endl;
+ out << "echo \"* Dir: $validateout\"" << outStream << endl;
+ out << "echo \"* Workdir: $validateworkdir\"" << outStream << endl;
+ out << "echo \"* ----------------------------------------------------*\"" << outStream << endl;
+ out << "ls -la ./" << outStream << endl;
+ out << "echo \"* ----------------------------------------------------*\"" << outStream << endl << endl;
out << "##################################################" << endl;
-
- out << "" << endl;
- out << "parArch=`grep -Ei \"Cannot Build the PAR Archive\" stderr`" << endl;
- out << "segViol=`grep -Ei \"Segmentation violation\" stderr`" << endl;
- out << "segFault=`grep -Ei \"Segmentation fault\" stderr`" << endl;
out << "" << endl;
out << "if [ ! -f stderr ] ; then" << endl;
out << " error=1" << endl;
- out << " echo \"* ########## Job not validated - no stderr ###\" " << out_stream << endl;
- out << " echo \"Error = $error\" " << out_stream << endl;
+ out << " echo \"* ########## Job not validated - no stderr ###\" " << outStream << endl;
+ out << " echo \"Error = $error\" " << outStream << endl;
out << "fi" << endl;
+ out << "parArch=`grep -Ei \"Cannot Build the PAR Archive\" stderr`" << endl;
+ out << "segViol=`grep -Ei \"Segmentation violation\" stderr`" << endl;
+ out << "segFault=`grep -Ei \"Segmentation fault\" stderr`" << endl;
+ out << "glibcErr=`grep -Ei \"*** glibc detected ***\" stderr`" << endl;
+ out << "" << endl;
+
out << "if [ \"$parArch\" != \"\" ] ; then" << endl;
out << " error=1" << endl;
- out << " echo \"* ########## Job not validated - PAR archive not built ###\" " << out_stream << endl;
- out << " echo \"$parArch\" " << out_stream << endl;
- out << " echo \"Error = $error\" " << out_stream << endl;
+ out << " echo \"* ########## Job not validated - PAR archive not built ###\" " << outStream << endl;
+ out << " echo \"$parArch\" " << outStream << endl;
+ out << " echo \"Error = $error\" " << outStream << endl;
out << "fi" << endl;
out << "if [ \"$segViol\" != \"\" ] ; then" << endl;
out << " error=1" << endl;
- out << " echo \"* ########## Job not validated - Segment. violation ###\" " << out_stream << endl;
- out << " echo \"$segViol\" " << out_stream << endl;
- out << " echo \"Error = $error\" " << out_stream << endl;
+ out << " echo \"* ########## Job not validated - Segment. violation ###\" " << outStream << endl;
+ out << " echo \"$segViol\" " << outStream << endl;
+ out << " echo \"Error = $error\" " << outStream << endl;
out << "fi" << endl;
out << "if [ \"$segFault\" != \"\" ] ; then" << endl;
out << " error=1" << endl;
- out << " echo \"* ########## Job not validated - Segment. fault ###\" " << out_stream << endl;
- out << " echo \"$segFault\" " << out_stream << endl;
- out << " echo \"Error = $error\" " << out_stream << endl;
+ out << " echo \"* ########## Job not validated - Segment. fault ###\" " << outStream << endl;
+ out << " echo \"$segFault\" " << outStream << endl;
+ out << " echo \"Error = $error\" " << outStream << endl;
+ out << "fi" << endl;
+
+ out << "if [ \"$glibcErr\" != \"\" ] ; then" << endl;
+ out << " error=1" << endl;
+ out << " echo \"* ########## Job not validated - *** glibc detected *** ###\" " << outStream << endl;
+ out << " echo \"$glibcErr\" " << outStream << endl;
+ out << " echo \"Error = $error\" " << outStream << endl;
out << "fi" << endl;
// Part dedicated to the specific analyses running into the train
- TObjArray *arr = fOutputFiles.Tokenize(" ");
+ TObjArray *arr = fOutputFiles.Tokenize(",");
TIter next1(arr);
- TString output_file;
+ TString outputFile;
+ AliAnalysisManager *mgr = AliAnalysisManager::GetAnalysisManager();
+ TString extra = mgr->GetExtraFiles();
while ((os=(TObjString*)next1())) {
- output_file = os->GetString();
- Int_t index = output_file.Index("@");
- if (index > 0) output_file.Remove(index);
- out << "if ! [ -f " << output_file.Data() << " ] ; then" << endl;
+ if (merge) break;
+ outputFile = os->GetString();
+ Int_t index = outputFile.Index("@");
+ if (index > 0) outputFile.Remove(index);
+ if (merge && fMergeExcludes.Contains(outputFile)) continue;
+ if (extra.Contains(outputFile)) continue;
+ if (outputFile.Contains("*")) continue;
+ out << "if ! [ -f " << outputFile.Data() << " ] ; then" << endl;
out << " error=1" << endl;
- out << " echo \"Output file(s) not found. Job FAILED !\"" << out_stream << endl;
- out << " echo \"Output file(s) not found. Job FAILED !\" >> stderr" << endl;
+ out << " echo \"Output file " << outputFile << " not found. Job FAILED !\"" << outStream << endl;
+ out << " echo \"Output file " << outputFile << " not found. Job FAILED !\" >> stderr" << endl;
out << "fi" << endl;
}
delete arr;
out << "fi" << endl;
out << "if [ $error = 0 ] ; then" << endl;
- out << " echo \"* ---------------- Job Validated ------------------*\"" << out_stream << endl;
+ out << " echo \"* ---------------- Job Validated ------------------*\"" << outStream << endl;
+ if (!IsKeepLogs()) {
+ out << " echo \"* === Logs std* will be deleted === \"" << endl;
+ outStream = "";
+ out << " rm -f std*" << endl;
+ }
out << "fi" << endl;
- out << "echo \"* ----------------------------------------------------*\"" << out_stream << endl;
- out << "echo \"*******************************************************\"" << out_stream << endl;
+ out << "echo \"* ----------------------------------------------------*\"" << outStream << endl;
+ out << "echo \"*******************************************************\"" << outStream << endl;
out << "cd -" << endl;
out << "exit $error" << endl;
}