ClassImp(AliTrackletTaskMulti)
// centrality percentile (inverted: 100% - most central)
-const Float_t AliTrackletTaskMulti::fgkCentPerc[] = {0,10,20,30};
+const Float_t AliTrackletTaskMulti::fgkCentPerc[] = {0,100};//{0,5,10,20,30};
+//const Float_t AliTrackletTaskMulti::fgkCentPerc[] = {0,5,10,20,30,40};
//{0,10,20,30,40,50,60,70,80,90,95,101};
const char* AliTrackletTaskMulti::fgCentSelName[] = {"V0M","FMD","TRK","TKL","CL0","CL1","V0MvsFMD","TKLvsV0M","ZENvsZDC"};
fOutput->SetOwner();
//
//
- AliCDBManager *man = AliCDBManager::Instance();
- if (fUseMC) {
- Bool_t newGeom = kTRUE;
- man->SetDefaultStorage("alien://Folder=/alice/simulation/2008/v4-15-Release/Residual");
- if (newGeom) {
- // new geom
- AliCDBEntry* obj = man->Get("GRP/Geometry/Data",130844,8);
- AliGeomManager::SetGeometry((TGeoManager*) obj->GetObject());
- if (!AliGeomManager::ApplyAlignObjsToGeom("ITS",130844,6,-1)) AliFatal("Failed to misalign geometry");
+ Bool_t needGeom = GetDoNormalReco() || GetDoInjection() || GetDoRotation() || GetDoMixing();
+ if (needGeom) {
+ AliCDBManager *man = AliCDBManager::Instance();
+ if (fUseMC) {
+ Bool_t newGeom = kTRUE;
+ man->SetDefaultStorage("alien://Folder=/alice/simulation/2008/v4-15-Release/Residual");
+ if (newGeom) {
+ // new geom
+ AliCDBEntry* obj = man->Get("GRP/Geometry/Data",130844,8);
+ AliGeomManager::SetGeometry((TGeoManager*) obj->GetObject());
+ if (!AliGeomManager::ApplyAlignObjsToGeom("ITS",130844,6,-1)) AliFatal("Failed to misalign geometry");
+ }
+ else {
+ // old geom
+ AliCDBEntry* obj = man->Get("GRP/Geometry/Data",130845,7);
+ AliGeomManager::SetGeometry((TGeoManager*) obj->GetObject());
+ if (!AliGeomManager::ApplyAlignObjsToGeom("ITS",130845,5,-1)) AliFatal("Failed to misalign geometry");
+ }
}
else {
- // old geom
- AliCDBEntry* obj = man->Get("GRP/Geometry/Data",130845,7);
+ man->SetDefaultStorage("alien://Folder=/alice/data/2010/OCDB"); //man->SetRun(137366);
+ AliCDBEntry* obj = man->Get("GRP/Geometry/Data",137366);
AliGeomManager::SetGeometry((TGeoManager*) obj->GetObject());
- if (!AliGeomManager::ApplyAlignObjsToGeom("ITS",130845,5,-1)) AliFatal("Failed to misalign geometry");
+ if (!AliGeomManager::ApplyAlignObjsToGeom("ITS",137366,-1,-1)) AliFatal("Failed to misalign geometry");
}
}
- else {
- man->SetDefaultStorage("alien://Folder=/alice/data/2010/OCDB"); //man->SetRun(137366);
- AliCDBEntry* obj = man->Get("GRP/Geometry/Data",137366);
- AliGeomManager::SetGeometry((TGeoManager*) obj->GetObject());
- if (!AliGeomManager::ApplyAlignObjsToGeom("ITS",137366,-1,-1)) AliFatal("Failed to misalign geometry");
- }
- //
+ //
// Create histograms
fNCentBins = sizeof(fgkCentPerc)/sizeof(Float_t)-1;
//---------------------------------------------Standard histos per tracklet type--->>
UInt_t hPattern = 0xffffffff;
fHistosTrData = BookHistosSet("TrData",hPattern);
+ hPattern &= ~(BIT(kHEtaZvSPD1)); // fill single clusters for "data" only
if (GetDoInjection()) fHistosTrInj = BookHistosSet("TrInj",hPattern);
if (GetDoRotation()) fHistosTrRot = BookHistosSet("TrRot",hPattern);
if (GetDoMixing()) fHistosTrMix = BookHistosSet("TrMix",hPattern);
{
// Main loop
//
+ Bool_t needRecPoints = GetDoNormalReco() || GetDoInjection() || GetDoRotation() || GetDoMixing();
+ //
AliAnalysisManager* anMan = AliAnalysisManager::GetAnalysisManager();
fRPTree = fRPTreeMix = 0;
- AliESDInputHandlerRP *handRP = (AliESDInputHandlerRP*)anMan->GetInputEventHandler();
- if (!handRP) { printf("No RP handler\n"); return; }
- AliESDEvent *esd = handRP->GetEvent();
+ AliESDInputHandler *handler = (AliESDInputHandler*)anMan->GetInputEventHandler();
+ AliESDInputHandlerRP *handRP = 0;
+ if (needRecPoints) {
+ handRP = (AliESDInputHandlerRP*)handler;
+ if (!handRP) { printf("No RP handler\n"); return; }
+ }
+ AliESDEvent *esd = handler->GetEvent();
if (!esd) { printf("No AliESDEvent\n"); return; }
//
// do we need to initialize the field?
AliESDZDC *esdZDC = esd->GetESDZDC();
float zdcEnergy=0,zemEnergy=0;
if (esdZDC) {
- zdcEnergy = (esdZDC->GetZDCN1Energy() + esdZDC->GetZDCP1Energy() + esdZDC->GetZDCN2Energy()+ esdZDC->GetZDCP2Energy())/8;
+ zdcEnergy = (esdZDC->GetZDCN1Energy() + esdZDC->GetZDCP1Energy() + esdZDC->GetZDCN2Energy()+ esdZDC->GetZDCP2Energy());
zemEnergy = (esdZDC->GetZDCEMEnergy(0)+esdZDC->GetZDCEMEnergy(1))/8.;
}
((TH2*)fHistosCustom->UncheckedAt(kHZDCZEMNoSel))->Fill(zemEnergy,zdcEnergy);
//
Float_t centPercentile = centrality->GetCentralityPercentileUnchecked(fgCentSelName[fUseCentralityVar]);
+
+ // temporary >>>>>>>>>>>>>>>>>>>>>>>>
+ if (fUseCentralityVar==kCentZEMvsZDC) {
+ float zdcEn = zdcEnergy;
+ float zemEn = zemEnergy;
+ Float_t slope;
+ Float_t zdcPercentile;
+ if (zemEn > 295.) {
+ slope = (zdcEn + 15000.)/(zemEn - 295.);
+ slope += 2.23660e+02;
+ zdcPercentile = (TMath::ATan(slope) - 1.56664)/8.99571e-05;
+ if (zdcPercentile<0) zdcPercentile = 0;
+ }
+ else zdcPercentile = 100;
+ centPercentile = zdcPercentile;
+ }
+ // temporary >>>>>>>>>>>>>>>>>>>>>>>>
+
fCurrCentBin = GetCentralityBin(centPercentile);
//
// printf("CentPerc: %f : Bin %d\n",centPercentile, fCurrCentBin);
if (!fStack) { printf("Stack not available\n"); return; }
}
//
- fRPTree = handRP->GetTreeR("ITS");
- if (!fRPTree) { AliError(" Invalid ITS cluster tree !\n"); return; }
+ if (needRecPoints) {
+ fRPTree = handRP->GetTreeR("ITS");
+ if (!fRPTree) { AliError(" Invalid ITS cluster tree !\n"); return; }
+ }
//
// =============================================================================>>>
// MC Generator info
FillClusterInfo();
//
}
- if (!GetDoNormalReco()) FillHistos(kData,multESD); // fill data histos from ESD
+ if (!GetDoNormalReco()) {
+ FillHistos(kData,multESD); // fill data histos from ESD
+ FillClusterInfoFromMult(multESD, vtxf[2] );
+ }
//
// Injection: it must come right after the normal reco since needs its results
if (GetDoInjection()) {
}
// =============================================================================<<<
//
- delete fMultReco;
+ if (fMultReco) delete fMultReco;
fMultReco = 0;
//
}
//
// -------------------------------------------------
TH2F* hclinf=0;
- hclinf = new TH2F("cl0InfoUsed","#phi vs Z of used clusters, Lr0",60,-15,15, 80,0,2*TMath::Pi());
+ hclinf = new TH2F("cl0InfoUsed","#phi vs Z of used clusters, Lr0",64,-16,16, 80,0,2*TMath::Pi());
AddHisto(histos,hclinf,kHClUsedInfoL0);
- hclinf = new TH2F("cl1InfoUsed","#phi vs Z of used clusters, Lr1",60,-15,15, 2*80,0,2*TMath::Pi());
+ hclinf = new TH2F("cl1InfoUsed","#phi vs Z of used clusters, Lr1",64,-16,16, 2*80,0,2*TMath::Pi());
AddHisto(histos,hclinf,kHClUsedInfoL1);
- hclinf = new TH2F("cl0InfoAll","#phi vs Z of all clusters, Lr0",60,-15,15, 80,0,2*TMath::Pi());
+ hclinf = new TH2F("cl0InfoAll","#phi vs Z of all clusters, Lr0",64,-16,16, 80,0,2*TMath::Pi());
AddHisto(histos,hclinf,kHClAllInfoL0);
- hclinf = new TH2F("cl1InfoAll","#phi vs Z of all clusters, Lr1",60,-15,15, 2*80,0,2*TMath::Pi());
+ hclinf = new TH2F("cl1InfoAll","#phi vs Z of all clusters, Lr1",64,-16,16, 2*80,0,2*TMath::Pi());
AddHisto(histos,hclinf,kHClAllInfoL1);
//
// -------------------------------------------------
AddHisto(histos,h1,offs+kHWDist);
}
//
+ if (selHistos & (0x1<<kHEtaZvSPD1) ) {
+ sprintf(buffn,"b%d_%s_ZvEtaSPD1",ib,pref);
+ sprintf(bufft,"bin%d (%s) Zv vs Eta SPD1 clusters",ib,pref);
+ h2 = new TH2F(buffn,bufft,nEtaBins,fEtaMin,fEtaMax, nZVBins, fZVertexMin,fZVertexMax);
+ h2->GetXaxis()->SetTitle("#eta");
+ h2->GetYaxis()->SetTitle("Zv");
+ AddHisto(histos,h2,offs+kHEtaZvSPD1);
+ }
+ //
}
//
histos->SetOwner(kFALSE);
if (fCheckReconstructables) CheckReconstructables();
}
}
+ //
+ //-------------------------------------------------------------TMP RS - singles ------->>>
+ int offsH = fCurrCentBin*kNStandardH;
+ TH2* hSingles = (TH2*)histos->UncheckedAt(offsH+kHEtaZvSPD1);
+ if (hSingles) {
+ int nclS = mlt->GetNumberOfSingleClusters();
+ double *thtS = mlt->GetThetaSingle();
+ for (int ics=nclS;ics--;) {
+ double etaS = -TMath::Log(TMath::Tan(thtS[ics]/2));
+ if (etaS<fEtaMin || etaS>fEtaMax) continue;
+ hSingles->Fill(etaS,fESDVtx[2]);
+ }
+ }
+ //-------------------------------------------------------------TMP RS - singles -------<<<
//
}
//
}
+//_________________________________________________________________________
+void AliTrackletTaskMulti::FillClusterInfoFromMult(const AliMultiplicity* mlt, double zVertex)
+{
+ // fill info on clusters taking them from Multiplicity object
+ const double kRSPD2 = 3.9;
+ TH2F *hclU = (TH2F*)fHistosCustom->UncheckedAt(kHClUsedInfoL0);
+ TH2F *hclA = (TH2F*)fHistosCustom->UncheckedAt(kHClAllInfoL0);
+ int ntr = mlt->GetNumberOfTracklets();
+ for (int itr=ntr;itr--;) {
+ Bool_t goodTracklet = kTRUE;
+ if (TMath::Abs( mlt->GetDeltaPhi(itr)-fDPhiShift)>fDPhiSCut) goodTracklet = kFALSE;
+ if (mlt->CalcDist(itr) > fNStdCut) goodTracklet = kFALSE;
+ double phi = mlt->GetPhi(itr);
+ double z = kRSPD2/TMath::Tan(mlt->GetTheta(itr)) + zVertex;
+ if (goodTracklet) hclU->Fill(z,phi);
+ hclA->Fill(z,phi);
+ }
+ //
+ int ncl = mlt->GetNumberOfSingleClusters();
+ for (int icl=ncl;icl--;) {
+ double phi = mlt->GetPhiSingle(icl);
+ double z = kRSPD2/TMath::Tan(mlt->GetThetaSingle(icl)) + zVertex;
+ hclA->Fill(z,phi);
+ }
+ //
+}
+
kHDPhiDTheta, // measured dTheta vs dPhi
kHDPhiSDThetaX, // dTheta (1/sin^2 scaled if needed) vs dPhi (bending subtracted)
kHWDist, // Weighted distance
+ kHEtaZvSPD1, // histo zv vs eta for SPD1 single clusters
kNStandardH // number of standard histos per centrality bin
};
enum { // define here id's of any custom histos to be added to fHistosCustom
void FillMCPrimaries();
void FillSpecies(Int_t primsec, Int_t id);
void FillClusterInfo();
+ void FillClusterInfoFromMult(const AliMultiplicity* mlt, double zVertex);
Int_t GetPdgBin(Int_t pdgCode);
void CheckReconstructables();
Int_t GetCentralityBin(Float_t percentile) const;
Float_t myMergeFactor = -1; // if the files were manually merged, scale everything except statistics by 1/myMergeFactor
Int_t nCentBins = -1;
TList *listDt=0, *listMC=0;
-TObjArray resArr;
+TObjArray resArr, resDnDeta;
char outStr[1000];
char outTitle[1000];
TString uniqueName="";
gPad->SetLeftMargin(0.15);
//
// corrected data
- TH1* hsigCorr = ((TH2F*)res->At(shift + kSigCorr))->ProjectionX(prefN+"DataCorrSignal");
+ TString nms = prefN;
+ nms += "DataCorrSignal";
+ nms += "_";
+ nms += uniqueName;
+ TH1* hsigCorr = ((TH2F*)res->At(shift + kSigCorr))->ProjectionX(nms.Data());
SetHStyle(hsigCorr,kRed,20,1.0);
hsigCorr->Scale(1./hsigCorr->GetBinWidth(1));
hsigCorr->Draw();
TLatex *txfit = new TLatex(-0.2,hsigCorr->GetMinimum()*0.9, ftres);
txfit->SetTextSize(0.04);
txfit->Draw();
+ resDnDeta.AddAtAndExpand( hsigCorr, bin );
+ hsigCorr->SetDirectory(0);
//
// raw data
TH1* hraw = ((TH2F*)res->At(shift+kRawDtCut))->ProjectionX(prefN+"DataRaw");
+Bool_t needRecPoints = kFALSE;
+
void MyAnalysisMacroTrackletMulti
(TString dataset="/alice/sim/LHC10f8c_130844",
TString outFName = "trbg.root",
//
if (cutSigDPhiS<0) cutSigDPhiS = TMath::Sqrt(cutSigNStd)*dphi;
//
+ needRecPoints = doRec || doInj || doRot || doMix;
+ //
printf("Start Analysis for %s, max %d Events skipping %d, Event Cuts: %.1f<eta<%.1f, %.2f<Zv<%.2f\n",
dataset.Data(),nEvents,nEventsSkip,etaMin,etaMax,zMin,zMax);
printf("Centrality variable: %d\n",useCentVar);
printf("Loading Centrality task\n");
gROOT->LoadMacro("$ALICE_ROOT/ANALYSIS/macros/AddTaskCentrality.C");
AliCentralitySelectionTask *taskCentrality = AddTaskCentrality();
- taskCentrality->SetDebugLevel(2);
+ // taskCentrality->SetDebugLevel(2);
if (useMC) taskCentrality->SetMCInput();
// taskCentrality->Dump();
//
if (!esdInputHandler)
{
Info("CustomAnalysisTaskInputSetup", "Creating esdInputHandler ...");
- esdInputHandler = new AliESDInputHandlerRP();
+ if (needRecPoints)
+ esdInputHandler = new AliESDInputHandlerRP();
+ else
+ esdInputHandler = new AliESDInputHandler();
+ //
mgr->SetInputEventHandler(esdInputHandler);
}
-
+ //
if (useKine)
{
AliMCEventHandler* mcInputHandler = dynamic_cast<AliMCEventHandler*>(AliAnalysisManager::GetAnalysisManager()->GetMCtruthEventHandler());
-dN/dEta analysis can be done in 2 ways:
+Questions to ruben.shahoyan@cern.ch
+
+------------------------------------------------------------
+PbPb dN/dEta analysis can be done in 2 ways:
1) For single centrality bin:
Using AliTrackletTaskUni.{h,cxx} steered by the
beforehand)
-2) Mor multiple centrality bins at once:
+2) For multiple centrality bins at once:
AliTrackletTaskMulti.{h,cxx} steered by the
runAAFMulti.C and MyAnalysisMacroTrackletMulti.C
-It produces for each centrality bin a set of histos, particularly 2D histos for
+One should set the in the AliTrackletTaskMulti the
+const Float_t AliTrackletTaskMulti::fgkCentPerc[] ... aray with definition of selected
+centrality bins, i.e. {0.,5.,10.,100} will creat bin0 for 0-5% centrality, bin1 for 5-10% etc.
+
+The variable on which centrality is defined is selected via runAAFMulti useCentVar parameter,
+should correspond to one of AliTrackletMultTask
+enum {kCentV0M,kCentFMD,kCentTRK,kCentTKL,kCentCL0,kCentCL1,kCentV0MvsFMD,kCentTKLvsV0,kCentZEMvsZDC,kNCentTypes};
+
+The produces for each centrality bin a set of histos, particularly 2D histos for
Zv vs eta with the signal cut on "distance" already applied (by defaults one for
"w.dist" another for "dphi-bend" + 1D histo of "distance" for selected Zv,eta
range (to be used for the bg matching the tails of data"
One has to runAAFMulti.C for over the data and MC datasets and then analyse them
using the CorrectSpectraMulti.C macro
+The typical use of runAAFMulti is (used for PbPb analysis in 2010)
+root -q 'runAAFMulti.C(
+"/alice/data/LHC10h_000137366_p2", // input
+"resMultiWide_LHC10h_000137366_p2_eta_m08_p08_zv_m7_p7_zdczem.root", // output
+-1, // N events to process (-1 : all events)
+-0.5,0.5, // eta selection
+-7,7, // Zv selection
+8, // centrality variable
+0.7520, // rescale MC V0 to match data
+ 1.5, // cut on weighed distance used to extract signal
+ -1, // cut on dPhi-phiBent used to extract signal (if negative -> dphi*sqrt(cutSigNStd), recommended!)
+kTRUE, // fill MC info (macro detects automatically that MC is analysed from ../sim/.. in the input dataset name
+kTRUE, // redo tracklets reconstruction and use new AliMuliplicity to fill histos
+kTRUE // generate injected bg
+... the rest is better to not touch..
+)'
+
--------------------------------
Both methods can use 3 types of generated bg: injection, rotation and mixing.
Simultaneous eployment of all these methods is also possible, but may create
The corresponding CorrectSpectra... macros must be tuned for the bg.type used.
+-----------------------------------------------------
+Update: Wed Apr 20 16:23:55 CEST 2011
+Addapted AliTrackletTaskMulti for pp data analysis w/o bg generation. In this case
+the recpoints and the connection to OCDB (alien libs) are not needed.
+One should set the
+const Float_t AliTrackletTaskMulti::fgkCentPerc[] = {0,100};
+and chose a centrality variable availabe in pp, like V0.
+Note that in this mode the AliMultiplicity object from the input dataset is used.
+
+The typical call of runAAFMulti is:
+
+root -q 'runAAFMulti.C("/alice/data/LHC10e_000130844_p2","resppWide_LHC10e_130844_p2_eta_m26_p26_zv_m20_p20a.root",
+-1,
+-2.6,2.6,
+-20,20,0,
+0.7520,
+2., // put it to large value to not affect the tracklet selection from existing AliMultiplicity object
+-1,
+kTRUE,
+kFALSE, // DO NOT do new reco of tracklets
+kFALSE, // DO NOT do bg. generation by injection
+kFALSE, // DO NOT do bg.generation by rotation
+kFALSE, // DO NOT do bg.generation by mixing
+3.14159e+00, // irrelevant
+1., // irrelevant
+kFALSE, // NO scaling of dtheta by sin^2(theta) (that's how pp data was reconstructed so far...)
+2, // irrelevant
+0.08, // dphi tolerance
+0.025 // dtheta tolerance
+)'
+
+The sample macro ppcor.C shows how to extract dNdEta from the outputs of data and MC.
+
+------------------------------------------------------------
--- /dev/null
+#if !defined(__CINT__) || defined(__MAKECINT__)
+#include "TList.h"
+#include "TFile.h"
+#include "TStyle.h"
+#include "TH1F.h"
+#include "TH2F.h"
+#include "THnSparse.h"
+#include "TLegend.h"
+#include "TSystem.h"
+#include "TMath.h"
+#include "TCanvas.h"
+#include "TLegend.h"
+#include "TLatex.h"
+#include "TF1.h"
+#include "TLine.h"
+#include "TPaletteAxis.h"
+#include "TArrayD.h"
+#include "TGraphErrors.h"
+//
+//
+#endif
+
+
+TH2 *hMCAccTr,*hMCAccClsS,*hMCAccCls,*hMCGen,*hzvMC2;
+TH2 *hDtAccTr,*hDtAccClsS,*hDtAccCls,*hzvDt2;
+TH1 *hzvMC,*hzvDt,*hMCTrue;
+//
+TH2 *hTrAcceptance,*hTrDtAccNrm,*hTrDtCorr2D;
+TH2 *hClAcceptance,*hClDtAccNrm,*hClDtCorr2D;
+TH1 *hTrDtCorr1D,*hClDtCorr1D;
+//
+// for test of MC on MC only
+TH2 *hMCGenData = 0;
+TH1 *hMCTrueData = 0;
+
+TH1* GetHisto(TList* lst, const char* name, const char* pref="");
+void CorrectMult(TH2* hMCAcc, TH2* hMCGn,
+ TH2* hDtAcc, TH1* hzDt,
+ const char* pref,
+ // output
+ TH2 *&hDtAccNrm, // raw data normalized per Z bin
+ TH2 *&hAcceptance, // 2D acceptance
+ TH2 *&hDtCorr2D, // 2D corrected dN/dEta per Z bin (norm up to bin width)
+ TH1 *&hDtCorr1D // 1D corrected dN/dEta
+ );
+
+
+const double minZStat = 50;
+
+void ppcor(const char* flMC, const char* flDt)
+{
+ TFile *fileMC = TFile::Open(flMC);
+ TList* clistMC = (TList*)fileMC->Get("clist");
+ TFile *fileDt = TFile::Open(flDt);
+ TList* clistDt = (TList*)fileDt->Get("clist");
+ //
+ hMCAccTr = (TH2*)GetHisto(clistMC,"b0_TrData_ZvEtaCutT","_mc");
+ hMCAccClsS = (TH2*)GetHisto(clistMC,"b0_TrData_ZvEtaSPD1","_mc");
+ hMCAccCls = (TH2*)hMCAccClsS->Clone( Form("MCClusSPD_%s","_mc") );
+ hMCAccCls->Add(hMCAccTr);
+ hMCGen = (TH2*)GetHisto(clistMC,"b0_zvEtaPrimMC","_mc");
+ hzvMC2 = (TH2*)GetHisto(clistMC,"zv","_mc");
+ hzvMC = hzvMC2->ProjectionX("zvgenMC");
+ //
+ hMCTrue = hMCGen->ProjectionX("MCTrue");
+ if (hzvMC2->Integral()>0) hMCTrue->Scale(1./hzvMC2->Integral());
+ hMCTrue->Scale(1./hMCTrue->GetBinWidth(1));
+ //
+ hDtAccTr = (TH2*)GetHisto(clistDt,"b0_TrData_ZvEtaCutT","_dt");
+ hDtAccClsS = (TH2*)GetHisto(clistDt,"b0_TrData_ZvEtaSPD1","_dt");
+ hDtAccCls = (TH2*)hDtAccClsS->Clone( Form("DtClusSPD_%s","_dt") );
+ hDtAccCls->Add(hDtAccTr);
+ hzvDt2 = (TH2*)GetHisto(clistDt,"zv","_dt");
+ hzvDt = hzvDt2->ProjectionX("zvDt");
+ //
+ hMCGenData = (TH2*)GetHisto(clistDt,"b0_zvEtaPrimMC","_data");
+ if (hMCGenData) {
+ hMCTrueData = hMCGenData->ProjectionX("MCTrueData");
+ if (hzvDt2->Integral()>0) hMCTrueData->Scale(1./hzvDt2->Integral());
+ hMCTrueData->Scale(1./hMCTrueData->GetBinWidth(1));
+ }
+ //
+ CorrectMult(hMCAccTr,hMCGen,
+ hDtAccTr,hzvDt, "trc",
+ hTrDtAccNrm,hTrAcceptance,hTrDtCorr2D,hTrDtCorr1D);
+ //
+ CorrectMult(hMCAccCls,hMCGen,
+ hDtAccCls,hzvDt, "cls",
+ hClDtAccNrm,hClAcceptance,hClDtCorr2D,hClDtCorr1D);
+ //
+ //
+ hTrDtCorr1D->Draw(); // dndeta from tracklets
+ hClDtCorr1D->Draw(); // dndeta from clusters
+}
+
+
+TH1* GetHisto(TList* lst, const char* name, const char* pref)
+{
+ TH1* hst = (TH1*)lst->FindObject(name);
+ if (!hst) return 0;
+ TH1* hcl = (TH1*) hst->Clone( Form("%s%ss",hst->GetName(),pref) );
+ return hcl;
+}
+
+void CorrectMult(TH2* hMCAcc, TH2* hMCGn,
+ TH2* hDtAcc, TH1* hzDt,
+ const char* pref,
+ // output
+ TH2 *&hDtAccNrm, // raw data normalized per Z bin
+ TH2 *&hAcceptance, // 2D acceptance
+ TH2 *&hDtCorr2D, // 2D corrected dN/dEta per Z bin (norm up to bin width)
+ TH1 *&hDtCorr1D // 1D corrected dN/dEta
+ )
+{
+ //
+ int neta = hDtAcc->GetXaxis()->GetNbins();
+ int nz = hDtAcc->GetYaxis()->GetNbins();
+ //
+ hDtAccNrm = (TH2*) hDtAcc->Clone( Form("rawDataNormZ_%s",pref) );
+ hDtAccNrm->Reset();
+ for (int iz=1;iz<=nz;iz++) { // normalize data histo per Z bin
+ double zv = hDtAcc->GetYaxis()->GetBinCenter(iz);
+ int iz0 = hzDt->FindBin(zv);
+ double zst = hzDt->GetBinContent(iz0);
+ double zstE = hzDt->GetBinError(iz0);
+ if (zst<minZStat) continue;
+ for (int ix=1;ix<=neta;ix++) {
+ double vl = hDtAcc->GetBinContent(ix,iz);
+ double er = hDtAcc->GetBinError(ix,iz);
+ if (vl<1e-9 || er<1e-6) continue;
+ double rat = vl/zst;
+ double ratE = rat*TMath::Sqrt( er*er/(vl*vl) + (zstE*zstE)/(zst*zst) );
+ //printf("%2d %2d %+e(%+e) -> %+e(%+e)\n",ix,iz,vl,er,rat,ratE);
+ hDtAccNrm->SetBinContent(ix,iz,rat);
+ hDtAccNrm->SetBinError(ix,iz,ratE);
+ }
+ }
+ //
+ hAcceptance = (TH2*)hMCAcc->Clone( Form("Acceptance_%s",pref) );
+ hAcceptance->Divide(hMCGn);
+ //
+ hDtCorr2D = (TH2*)hDtAccNrm->Clone( Form("dNdEtaCor2D_%s",pref) );
+ hDtCorr2D->Divide(hAcceptance);
+ //
+ hDtCorr1D = hDtCorr2D->ProjectionX( Form("dNdEtaCor1D_%s",pref) );
+ hDtCorr1D->Reset();
+ //
+ for (int ix=1;ix<=neta;ix++) { // get weighted average
+ double sm=0,sme=0;
+ double bw = hDtCorr1D->GetBinWidth(ix);
+ for (int iz=1;iz<=nz;iz++) {
+ double vl = hDtCorr2D->GetBinContent(ix,iz);
+ double er = hDtCorr2D->GetBinError(ix,iz);
+ if (vl<1e-6 || er<1e-12) continue;
+ sm += vl/(er*er);
+ sme += 1./(er*er);
+ }
+ if (sme<1e-6) continue;
+ sm /= sme;
+ sme = 1./TMath::Sqrt(sme);
+ hDtCorr1D->SetBinContent(ix,sm/bw);
+ hDtCorr1D->SetBinError(ix,sme/bw);
+ }
+ //
+}
Bool_t checkReconstructables = kFALSE,//kTRUE, // fill histos for reconstructable (needs useMC and doRec)
//
//TString alirootVer = "VO_ALICE@AliRoot::v4-21-17-AN",
- TString alirootVer = "VO_ALICE@AliRoot::v4-21-17b-AN",
+ TString alirootVer = "VO_ALICE@AliRoot::v4-21-20-AN",
TString rootVer = "VO_ALICE@ROOT::v5-28-00a",
//
//TString proofCluster="shahoian@skaf.saske.sk"
//
Bool_t runLocal = kFALSE;//kTRUE; // true only for local test mode
if (runLocal) {
- dataset = "/default/shahoian/test";//"/default/shahoian/test";
- //dataset = "/default/shahoian/test";
+ // dataset = "/default/shahoian/test_pp";//"/default/shahoian/test";
+ dataset = "default/shahoian/test_pp130850";
proofCluster = "";
alirootVer = "AliRootProofLite";
nEvents = 500;
list->Add(new TNamed("ALIROOT_MODE" , alirootMode.Data()));
list->Add(new TNamed("ALIROOT_EXTRA_LIBS", extraLibs.Data()));
list->Add(new TNamed("ALIROOT_EXTRA_INCLUDES", "ITS:include"));
- list->Add(new TNamed("ALIROOT_ENABLE_ALIEN","1"));
+ if (doRec || doInj || doRot || doMix) list->Add(new TNamed("ALIROOT_ENABLE_ALIEN","1"));
//
//REM: same version of AliRoot on client!!!!! Otherwise error!!
TProof::Mgr(proofCluster.Data())->SetROOTVersion(rootVer.Data());
}
gProof->Exec("TObject *o = gEnv->GetTable()->FindObject(\"Proof.UseMergers\");"
"gEnv->GetTable()->Remove(o);", kTRUE);
- gProof->SetParameter("PROOF_UseMergers", -1);
+ gProof->SetParameter("PROOF_UseMergers", 0);
// Lets enable aliroot + extra libs on proof cluster
if (runLocal) gProof->UploadPackage(alirootVer.Data());
gProof->EnablePackage(alirootVer.Data(), list);