]> git.uio.no Git - u/mrichter/AliRoot.git/blame - PWGPP/TPC/macros/PIDCalib/checkShapeEtaTreePions.C
Merge branch 'feature-movesplit'
[u/mrichter/AliRoot.git] / PWGPP / TPC / macros / PIDCalib / checkShapeEtaTreePions.C
CommitLineData
88b71b9f 1#include "TTree.h"
2#include "TH2D.h"
3#include "TH1D.h"
4#include "TH3D.h"
5#include "TH3F.h"
6#include "THnSparse.h"
7#include "TProfile.h"
8#include "TProfile2D.h"
9#include "TF1.h"
10#include "TFitResultPtr.h"
11#include "TFitResult.h"
12#include "TCanvas.h"
13#include "TStyle.h"
14#include "TVirtualFitter.h"
15#include "TLinearFitter.h"
16#include "TList.h"
17#include "TString.h"
18#include "TLegend.h"
19#include "TLegendEntry.h"
20#include "TFile.h"
21#include "TGraphErrors.h"
22#include "TGraph.h"
23#include "TMath.h"
24#include "TDatime.h"
25#include "TSystem.h"
26#include "TSpline.h"
27
28#include "AliPID.h"
29
30#include <iostream>
31#include <iomanip>
32
33#include "THnSparseDefinitions.h"
34#include "ProgressBar.h"
35
36enum mapType {
37 kNoNormalisation = 1,
38 kSmallEtaNormalisation = 2,
39 kLargeEtaNormalisation = 3
40};
41enum type {
42 kTypeDelta = 1,
43 kTypeDeltaPrime = 2,
44 kTypeSigmaDeltaPrime = 3
45};
46
47const Double_t binContentThreshold = 1e-4;
48const TString suffixMapType[kLargeEtaNormalisation + 1] = {"", "NoNormalisation", "SmallEtaNormalisation", "LargeEtaNormalisation"};
49
50//__________________________________________________________________________________________
51Int_t getBinX(TH2D* h, Double_t tanTheta)
52{
53 Int_t binX = h->GetXaxis()->FindBin(tanTheta);
54
55 if (binX <= 0)
56 binX = 1;
57 if (binX > h->GetXaxis()->GetNbins())
58 binX = h->GetXaxis()->GetNbins();
59
60 return binX;
61}
62
63
64//_________________________________________________________________________________________
65Int_t getBinY(TH2D* h, Double_t dEdxInv)
66{
67 Int_t binY = h->GetYaxis()->FindBin(dEdxInv);
68
69 if (binY <= 0)
70 binY = 1;
71 if (binY > h->GetYaxis()->GetNbins())
72 binY = h->GetYaxis()->GetNbins();
73
74 return binY;
75}
76
77
78//__________________________________________________________________________________________
79void SetHistAxis(TH2D* h, Int_t type)
80{
81 h->GetXaxis()->SetTitle("tan(#Theta)");
82 h->GetYaxis()->SetTitle("1/(dE/dx) (arb. unit)");
83 if (type == kTypeDelta)
84 h->GetZaxis()->SetTitle("#Delta (arb. unit)");
85 else if (type == kTypeDeltaPrime)
86 h->GetZaxis()->SetTitle("#Delta' (arb. unit)");
87 else if (type == kTypeSigmaDeltaPrime)
88 h->GetZaxis()->SetTitle("Par1(#sigma_{rel}(#Delta'))");
89 else
90 printf("SetHistAxis: Unknown type %d!\n", type);
91 h->GetXaxis()->SetTitleSize(0.04);
92 h->GetXaxis()->SetTitleOffset(2.2);
93 h->GetXaxis()->SetLabelSize(0.03);
94 h->GetYaxis()->SetTitleSize(0.04);
95 h->GetYaxis()->SetTitleOffset(2.6);
96 h->GetYaxis()->SetLabelSize(0.03);
97 h->GetZaxis()->SetTitleSize(0.04);
98 h->GetZaxis()->SetTitleOffset(1.3);
99}
100
101//__________________________________________________________________________________________
102Double_t getMedianOfNonZeros(Double_t* input, const Int_t dim)
103{
104 Double_t values[dim];
105 for (Int_t i = 0; i < dim; i++)
106 values[i] = 0.0;
107
108 Int_t numNonZero = 0;
109
110 for (Int_t i = 0; i < dim; i++) {
111 if (input[i] > 0) {
112 values[numNonZero] = input[i];
113 numNonZero++;
114 }
115 }
116
117 return ((numNonZero > 0) ? TMath::Median(numNonZero, values) : 0);
118}
119
120
121//__________________________________________________________________________________________
122void normaliseHisto(TH2D* h, Double_t lowerLimit, Double_t upperLimit, Int_t type)
123{
124 if (lowerLimit >= upperLimit) {
125 printf("!!!Error normaliseHisto: lowerLimit >= upperLimit!\n");
126 return;
127 }
128
129 if (lowerLimit < 0 || upperLimit < 0) {
130 printf("!!!Error normaliseHisto: lowerLimit, upperLimit >= 0 required!\n");
131 return;
132 }
133
134 Int_t binLow = h->GetXaxis()->FindBin(lowerLimit);
135 if (binLow < 1)
136 binLow = 1;
137
138 Int_t binHigh = h->GetXaxis()->FindBin(upperLimit);
139 if (binHigh > h->GetXaxis()->GetNbins())
140 binHigh = h->GetXaxis()->GetNbins();
141
142 Int_t binLow2 = h->GetXaxis()->FindBin(-upperLimit);
143 if (binLow2 < 1)
144 binLow2 = 1;
145
146 Int_t binHigh2 = h->GetXaxis()->FindBin(-lowerLimit);
147 if (binHigh2 > h->GetXaxis()->GetNbins())
148 binHigh2 = h->GetXaxis()->GetNbins();
149
150 // If 0 is included in range, it might happen that both ranges overlap -> Remove one of the double counted binsPforMap
151 if (binHigh2 >= binLow) {
152 binHigh2 = binLow - 1;
153 if (binHigh2 < 1) {
154 printf("!!!Error: binHigh2 = binLow - 1 is < 1\n");
155 return;
156 }
157 }
158
159 if (binLow2 > binHigh2)
160 binLow2 = binHigh2;
161
162 // Normalise with respect to some given tan(theta)
163 // To be robust against fluctuations: Take median of a few tan(theta) bins around the desired value for scaling
164 const Int_t nThetaBins = (binHigh - binLow + 1) + (binHigh2 - binLow2 + 1);
165
166 if (nThetaBins <= 0) {
167 printf("Cannot renormalise histo due to bad limits for reference bins: %f -> %f\n", lowerLimit, upperLimit);
168 return;
169 }
170
171 Double_t* values;
172 values = new Double_t[nThetaBins];
173
174 for (Int_t i = 1; i <= h->GetYaxis()->GetNbins(); i++) {
175 // Reset values
176 Int_t binIndex = 0;
177 for (Int_t thetaBin = 1; thetaBin <= nThetaBins; thetaBin++)
178 values[thetaBin - 1] = 0;
179
180 for (Int_t thetaBin = binLow; thetaBin <= binHigh; thetaBin++) {
181 Double_t temp = h->GetBinContent(thetaBin, i);
182 values[binIndex] = (temp > 0 || type == kTypeDelta) ? temp : 0;
183 binIndex++;
184 }
185 for (Int_t thetaBin = binLow2; thetaBin <= binHigh2; thetaBin++) {
186 Double_t temp = h->GetBinContent(thetaBin, i);
187 values[binIndex] = (temp > 0 || type == kTypeDelta) ? temp : 0;
188 binIndex++;
189 }
190
191 Double_t temp = 0;
192 Double_t scale = 0;
193
194 if (type == kTypeDeltaPrime) {
195 temp = getMedianOfNonZeros(values, nThetaBins);
196 if (temp <= 0)
197 continue;
198 scale = 1. / temp;
199 }
200 else if (type == kTypeDelta) {
201 scale = TMath::Median(nThetaBins, values);
202 }
203 else {
204 printf("Error: Type %d not supported for normalisation!\n", type);
205 return;
206 }
207
208
209 for (Int_t thetaBin = 1; thetaBin <= h->GetXaxis()->GetNbins(); thetaBin++) {
210 if (type == kTypeDeltaPrime) {
211 h->SetBinContent(thetaBin, i, h->GetBinContent(thetaBin, i) * scale);
212 h->SetBinError(thetaBin, i, h->GetBinError(thetaBin, i) * scale);
213 }
214 else if (type == kTypeDelta) {
215 h->SetBinContent(thetaBin, i, h->GetBinContent(thetaBin, i) - scale);
216 h->SetBinError(thetaBin, i, h->GetBinError(thetaBin, i) - scale);
217 }
218 }
219 }
220 delete values;
221}
222
223
224//__________________________________________________________________________________________
225void eliminateNonPositivePointsInHisto(TH2D* h)
226{
227 const Int_t nNeighbours = 24;
228 Double_t* values;
229 values = new Double_t[nNeighbours];
230
231 // Search for bins with content <= 0 (bins with fit errors). Then take all surrounding points in +-2 rows and columns
232 // and take their median (only those bins without fit errors!) as the new bin content of the considered bin.
233
234 for (Int_t binX = 1; binX <= h->GetXaxis()->GetNbins(); binX++) {
235 Int_t firstBinLeft = TMath::Max(1, binX - 2);
236 Int_t lastBinRight = TMath::Min(h->GetXaxis()->GetNbins(), binX + 2);
237
238 for (Int_t binY = 1; binY <= h->GetYaxis()->GetNbins(); binY++) {
239 if (h->GetBinContent(binX, binY) <= 0) {
240 Int_t firstBinBelow = TMath::Max(1, binY - 2);
241 Int_t lastBinAbove = TMath::Min(h->GetYaxis()->GetNbins(), binY + 2);
242
243 // Reset values
244 Int_t binIndex = 0;
245 for (Int_t i = 0; i < nNeighbours; i++)
246 values[i] = 0;
247
248 for (Int_t binX2 = firstBinLeft; binX2 <= lastBinRight; binX2++) {
249 for (Int_t binY2 = firstBinBelow; binY2 <= lastBinAbove; binY2++) {
250 if (binX2 == binX && binY2 == binY)
251 continue; // skip bin that is currently under consideration
252
253 values[binIndex] = h->GetBinContent(binX2, binY2);
254 binIndex++;
255 }
256 }
257
258 Double_t temp = getMedianOfNonZeros(values, nNeighbours);
259 if (temp <= 0) {
260 printf("Error: Could no eliminate values <= 0 for bin at (%f, %f)!\n",
261 h->GetXaxis()->GetBinCenter(binX), h->GetYaxis()->GetBinCenter(binY));
262 temp = -1;
263 }
264
265 h->SetBinContent(binX, binY, temp);
266 }
267 }
268 }
269
270 delete values;
271}
272
273
274//__________________________________________________________________________________________
275void addPointToHyperplane(TH2D* h, TLinearFitter* linExtrapolation, Int_t binX, Int_t binY)
276{
277 if (h->GetBinContent(binX, binY) <= binContentThreshold)
278 return; // Reject bins without content (within some numerical precision) or with strange content
279
280 Double_t coord[2] = {0, 0};
281 coord[0] = h->GetXaxis()->GetBinCenter(binX);
282 coord[1] = h->GetYaxis()->GetBinCenter(binY);
283 Double_t binError = h->GetBinError(binX, binY);
284 if (binError <= 0) {
285 binError = 1000; // Should not happen because bins without content are rejected
286 printf("ERROR: This should never happen: Trying to add bin in addPointToHyperplane with error not set....\n");
287 }
288 linExtrapolation->AddPoint(coord, h->GetBinContent(binX, binY, binError));
289}
290
291//__________________________________________________________________________________________
292TH2D* refineHistoViaLinearExtrapolation(TH2D* h, Double_t refineFactorX, Double_t refineFactorY, Int_t mapType,
293 TFile* fSave, Bool_t skipBinsAtBoundaries = kFALSE)
294{
295 if (!h)
296 return 0x0;
297
298 Int_t nBinsX = h->GetXaxis()->GetNbins();
299 Int_t nBinsY = h->GetYaxis()->GetNbins();
300
301 Int_t firstYbin = 1;
302
303 if (skipBinsAtBoundaries) {
304 // Remove the two first and the last bin on y-axis
305 nBinsY -= 3;
306 firstYbin = 3;
307 }
308
309 // Normalise map on demand
310
311 // Use kNoNormalisation for final QA
312 if (mapType == kSmallEtaNormalisation) {
313 normaliseHisto(h, 0., 0.11, kTypeDeltaPrime);
314 }
315 else if (mapType == kLargeEtaNormalisation) {
316 normaliseHisto(h, 0.81, 0.99, kTypeDeltaPrime);
317 }
318
319 // Interpolate to finer map
320 TLinearFitter* linExtrapolation = new TLinearFitter(2, "hyp2", "");
321
322 Int_t nBinsXrefined = nBinsX * refineFactorX;
323 Int_t nBinsYrefined = nBinsY * refineFactorY;
324
325 TH2D* hRefined = new TH2D(Form("%s_%s_refined", h->GetName(), suffixMapType[mapType].Data()), Form("%s (refined)",h->GetTitle()),
326 nBinsXrefined, h->GetXaxis()->GetBinLowEdge(1), h->GetXaxis()->GetBinUpEdge(nBinsX),
327 nBinsYrefined, h->GetYaxis()->GetBinLowEdge(firstYbin), h->GetYaxis()->GetBinUpEdge(nBinsY));
328
329 for (Int_t binX = 1; binX <= nBinsXrefined; binX++) {
330 for (Int_t binY = 1; binY <= nBinsYrefined; binY++) {
331
332 linExtrapolation->ClearPoints();
333
334 hRefined->SetBinContent(binX, binY, 1); // Default value is 1
335
336 Double_t centerX = hRefined->GetXaxis()->GetBinCenter(binX);
337 Double_t centerY = hRefined->GetYaxis()->GetBinCenter(binY);
338
339 // For interpolation: Just take the corresponding bin from the old histo.
340 // For extrapolation: take the last available bin from the old histo.
341 // If the boundaries are to be skipped, also skip the corresponding bins
342 Int_t oldBinX = h->GetXaxis()->FindBin(centerX);
343 if (oldBinX < 1)
344 oldBinX = 1;
345 if (oldBinX > nBinsX)
346 oldBinX = nBinsX;
347
348 Int_t oldBinY = h->GetYaxis()->FindBin(centerY);
349 if (oldBinY < firstYbin)
350 oldBinY = firstYbin;
351 if (oldBinY > nBinsY)
352 oldBinY = nBinsY;
353
354 // Neighbours left column
355 if (oldBinX >= 2) {
356 if (oldBinY >= 2) {
357 addPointToHyperplane(h, linExtrapolation, oldBinX - 1, oldBinY - 1);
358 }
359
360 addPointToHyperplane(h, linExtrapolation, oldBinX - 1, oldBinY);
361
362 if (oldBinY < nBinsY) {
363 addPointToHyperplane(h, linExtrapolation, oldBinX - 1, oldBinY + 1);
364 }
365 }
366
367 // Neighbours (and point itself) same column
368 if (oldBinY >= 2) {
369 addPointToHyperplane(h, linExtrapolation, oldBinX, oldBinY - 1);
370 }
371
372 addPointToHyperplane(h, linExtrapolation, oldBinX, oldBinY);
373
374 if (oldBinY < nBinsY) {
375 addPointToHyperplane(h, linExtrapolation, oldBinX, oldBinY + 1);
376 }
377
378 // Neighbours right column
379 if (oldBinX < nBinsX) {
380 if (oldBinY >= 2) {
381 addPointToHyperplane(h, linExtrapolation, oldBinX + 1, oldBinY - 1);
382 }
383
384 addPointToHyperplane(h, linExtrapolation, oldBinX + 1, oldBinY);
385
386 if (oldBinY < nBinsY) {
387 addPointToHyperplane(h, linExtrapolation, oldBinX + 1, oldBinY + 1);
388 }
389 }
390
391
392 // Fit 2D-hyperplane
393 if (linExtrapolation->GetNpoints() <= 0)
394 continue;
395
396 if (linExtrapolation->Eval() != 0)// EvalRobust -> Takes much, much, [...], much more time (~hours instead of seconds)
397 continue;
398
399 // Fill the bin of the refined histogram with the extrapolated value
400 Double_t extrapolatedValue = linExtrapolation->GetParameter(0) + linExtrapolation->GetParameter(1) * centerX
401 + linExtrapolation->GetParameter(2) * centerY;
402
403 hRefined->SetBinContent(binX, binY, extrapolatedValue);
404 }
405 }
406
407 delete linExtrapolation;
408
409 return hRefined;
410}
411
412
413//__________________________________________________________________________________________
414TFitResult* doubleGaussFit(TH1D* h, Double_t currentMeanMomentum, TSpline3* splPr, TSpline3* splPi, TString fitOption = "QNS")
415{
416 TFitResultPtr result = h->Fit("gaus", fitOption.Data());
417
418 Double_t contaminationPeakMean = splPi->Eval(currentMeanMomentum / AliPID::ParticleMass(AliPID::kPion))
419 / splPr->Eval(currentMeanMomentum / AliPID::ParticleMass(AliPID::kProton));
420
421 if (contaminationPeakMean < h->GetXaxis()->GetBinLowEdge(1) ||
422 contaminationPeakMean > h->GetXaxis()->GetBinUpEdge(h->GetNbinsX())) {
423 return ((Int_t)result == 0) ? result.Get() : 0x0;
424 }
425
426 // Estimate parameters for doubleGauss fit
427 Double_t estimatedMean = 0;
428 Double_t estimatedSigma = 0;
429 Double_t estimatedYield = 0;
430
431 if ((Int_t) result == 0) {
432 estimatedMean = result->GetParams()[1];
433 estimatedSigma = result->GetParams()[2];
434 estimatedYield = result->GetParams()[0];
435 }
436 else {
437 estimatedMean = h->GetMean();
438 estimatedSigma = h->GetRMS();
439 estimatedYield = (estimatedSigma > 0) ? (h->Integral("width") / estimatedSigma) : h->GetEntries();
440 }
441
442 TF1* doubleGaus = new TF1("doubleGaus", "[0]*TMath::Gaus(x,[1],[2],0)+[3]*TMath::Gaus(x,[4],[2],0)", 0.6, 1.6);
443
444 Double_t newPars[5] = { estimatedYield, estimatedMean, estimatedSigma, estimatedYield / 10., contaminationPeakMean };
445 doubleGaus->SetParameters(newPars);
446 doubleGaus->SetParLimits(0, 0., 100. * estimatedYield);
447 doubleGaus->SetParLimits(1, 0.6, 1.6);
448 doubleGaus->SetParLimits(2, 0, 100. * estimatedSigma);
449 doubleGaus->SetParLimits(3, 0, 0.8 * estimatedYield);
450 doubleGaus->SetParLimits(4, contaminationPeakMean - 0.1, contaminationPeakMean + 0.1);//TODO doubleGaus->SetParLimits(4, 0.6, 1.6);
451 TFitResultPtr result2 = h->Fit(doubleGaus, fitOption.Data());
452
453 // If fit failed, return results of standard fit instead
454 if ((Int_t)result2 == 0) {
455 return result2.Get();
456 }
457
458 return ((Int_t)result == 0) ? result.Get() : 0x0;
459}
460
461
462//__________________________________________________________________________________________
463// NOTE: Use smoothing only for creation of sigma map. For normal eta map this pulls down the edges too much <-> trade-off between
464// (normally very small) jumps in the map and good description at the edges.
465// For the sigma map, there are not that sharp edges, but the fluctuations are by far stronger. Here it makes sense to use the smoothing.
466TH2* checkShapeEtaTreePions(TTree* tree = 0x0, Double_t lowResolutionFactorX = 1/*2 or 2.1*/, Double_t lowResolutionFactorY = 1/*2 or 2.1*/,
467 Bool_t doSmoothing = kFALSE, TString path = ".", TString suffixForFileName = "", Bool_t useDoubleGaussFit = kFALSE,
468 TString pathNameThetaMap = "finalCuts/uncorrected/0.6GeVcut/pp/7TeV/LHC10d.pass2/outputCheckShapeEtaTree_2012_07_18__14_04.root",
469 TString mapSuffix = "NoNormalisation",
470 TString pathNameSplinesFile = "DefaultSplines.root", TString prSplinesName = "TSPLINE3_DATA_PION_LHC10D_PASS2_PP_MEAN",
471 Int_t returnMapType = kNoNormalisation, TString treeName = "fTreePions")
472{
473 TString fileName = Form("bhess_PIDetaTreePions%s.root", suffixForFileName.Data());
474
475 if (lowResolutionFactorX <= 0 || lowResolutionFactorY <= 0) {
476 printf("Factor for lower resolution <= 0 is not allowed!\n");
477 return 0x0;
478 }
479
480 TFile* f = 0x0;
481
482 Bool_t saveResults = kFALSE;
483
484 if (!tree) {
485 f = TFile::Open(Form("%s/%s", path.Data(), fileName.Data()));
486 if (!f) {
487 std::cout << "Failed to open file \"" << Form("%s/%s", path.Data(), fileName.Data()) << "\"!" << std::endl;
488
489
490 return 0x0;
491 }
492
493 // Extract the data Tree
494 tree = dynamic_cast<TTree*>(f->Get(treeName.Data()));
495 if (!tree) {
496 std::cout << "Failed to load data tree!" << std::endl;
497 return 0x0;
498 }
499
500 saveResults = kTRUE;
501 }
502
503 TSpline3* splPr = 0x0;
504 TSpline3* splPi = 0x0;
505
506 // Extract the correction maps
507 TFile* fPrMap = TFile::Open(pathNameThetaMap.Data());
508 if (!fPrMap) {
509 std::cout << "Failed to open proton map file \"" << pathNameThetaMap.Data() << "\"!" << std::endl;
510 return 0x0;
511 }
512
513 TH2D* hPrMap = 0x0;
514
515 if (fPrMap) {
516 hPrMap = dynamic_cast<TH2D*>(fPrMap->Get(Form("hRefined%s", mapSuffix.Data())));
517 if (!hPrMap) {
518 std::cout << "Failed to load proton theta map!" << std::endl;
519 return 0x0;
520 }
521 }
522
523
524 //TString pathNameMomentumMap = "pionTree_11b2/outputCheckShapeEtaTreePions_2012_10_16__11_38.root";
525 //TString pathNameMomentumMap = "pionTree_10d1/outputCheckShapeEtaTreePions_2012_10_17__07_38.root";
526 TString pathNameMomentumMap = "finalCuts/uncorrected/0.6GeVcut/pp/7TeV/LHC10d.pass2/correctedV0splines_newV0tree/outputCheckShapeEtaTreePions_2012_10_23__16_02_correctedWith_10_38.root";
527 TFile* fPiMap = TFile::Open(pathNameMomentumMap.Data());
528 if (!fPiMap) {
529 std::cout << "Failed to open pion momentum map file \"" << pathNameMomentumMap.Data() << "\"!" << std::endl;
530 return 0x0;
531 }
532
533 TH2D* hPiMap = 0x0;
534
535 if (fPiMap) {
536 hPiMap = dynamic_cast<TH2D*>(fPiMap->Get("hRefinedSmallEtaNormalisation")); //TODO NOW NOW NOWForm("hRefined%s", mapSuffix.Data())));
537 if (!hPiMap) {
538 std::cout << "Failed to load pion momentum map!" << std::endl;
539 return 0x0;
540 }
541 }
542
543 // Output file
544
545 TFile* fSave = 0x0;
546 TString saveFileName = "";
547 if (saveResults) {
548 TDatime daTime;
549
550 TString saveSuffix = "";
551 if (suffixForFileName.Length() > 0)
552 saveSuffix = Form("_%s", suffixForFileName.Data());
553
554 saveFileName = Form("outputCheckShapeEtaTreePions_%04d_%02d_%02d__%02d_%02d%s.root", daTime.GetYear(), daTime.GetMonth(),
555 daTime.GetDay(), daTime.GetHour(), daTime.GetMinute(), saveSuffix.Data());
556
557 fSave = TFile::Open(Form("%s/%s", path.Data(), saveFileName.Data()), "recreate");
558 if (!fSave) {
559 std::cout << "Failed to open save file \"" << Form("%s/%s", path.Data(), saveFileName.Data()) << "\"!" << std::endl;
560 return 0x0;
561 }
562 }
563
564 printf("\nProcessing file %s\n", f->GetName());
565
566
567 // Only activate the branches of interest to save processing time
568 tree->SetBranchStatus("*", 0); // Disable all branches
569 tree->SetBranchStatus("pTPC", 1);
570 //tree->SetBranchStatus("pT", 1);
571 tree->SetBranchStatus("dEdx", 1);
572 tree->SetBranchStatus("dEdxExpected", 1);
573 tree->SetBranchStatus("tanTheta", 1);
574 //if (isPbPb)
575 // tree->SetBranchStatus("fMultiplicity", 1);
576 tree->SetBranchStatus("tpcSignalN", 1);
577
578 TString multiplicitySelectionString = "";
579// if (isPbPb)// Usually 20 bins from 0 to 4000 => bin width ~ 200
580// multiplicitySelectionString = "&& fMultiplicity > 3000 && fMultiplicity <= 3200";
581
582
583 Int_t binsX = 30 / lowResolutionFactorX;
584 Int_t binsY = 40 / lowResolutionFactorY;
585 const Double_t pBoundLow = 0.15;// 1. / 0.6;
586 const Double_t pBoundUp = 0.6;// 1. / 0.15;
587 const Double_t lowerTanTheta = -1.0;
588 const Double_t upperTanTheta = 1.0;
589
590 progressbar(0.);
591
592 TH2D* hMap = new TH2D("hMap", "#Delta' map for pTPC vs. tan(#theta) via Gauss fit;tan(#theta);p_{TPC} (GeV/c);#Delta'",
593 binsX, lowerTanTheta, upperTanTheta, binsY, pBoundLow, pBoundUp);
594
595 const Int_t nDim = 3;
596
597 Int_t thnBins[nDim] = { binsX, binsY, 200 };
598 Double_t xmin[nDim] = { lowerTanTheta, pBoundLow, 0.6 };
599 Double_t xmax[nDim] = { upperTanTheta, pBoundUp, 1.6 };
600 THnSparseD* hDummy = new THnSparseD("hDummy", "", nDim, thnBins, xmin, xmax);
601 TH3D* hPreMap = hDummy->Projection(0, 1, 2);
602 hPreMap->SetName("hPreMap");
603 hPreMap->SetTitle("#Delta' map for p_{TPC_Inner} vs. tan(#theta)");
604 hPreMap->GetXaxis()->SetTitle("tan(#theta)");
605 hPreMap->GetYaxis()->SetTitle("p_{TPC_inner} (GeV/c)");
606 hPreMap->GetZaxis()->SetTitle("#Delta'");
607
608 delete hDummy;
609
610 Long64_t nTreeEntries = tree->GetEntriesFast();
611
612 Double_t dEdx = 0.; // Measured dE/dx
613 Double_t dEdxExpected = 0.; // Expected dE/dx according to parametrisation
614 Double_t tanTheta = 0.; // Tangens of (local) theta at TPC inner wall
615 Double_t pTPC = 0.; // Momentum at TPC inner wall
616 Double_t pT = 0;
617 UShort_t tpcSignalN = 0; // Number of clusters used for dEdx
618 //Int_t fMultiplicity = 0;
619
620
621 tree->SetBranchAddress("dEdx", &dEdx);
622 tree->SetBranchAddress("dEdxExpected", &dEdxExpected);
623 tree->SetBranchAddress("tanTheta", &tanTheta);
624 tree->SetBranchAddress("tpcSignalN", &tpcSignalN);
625 tree->SetBranchAddress("pTPC", &pTPC);
626 //tree->SetBranchAddress("pT", &pT);
627 //if (isPbPb)
628 // tree->SetBranchAddress("fMultiplicity", &fMultiplicity);
629
630 Double_t correctionFactor = 1.0;
631 Double_t dEdxOrig = 0;
632
633 for (Long64_t i = 0; i < nTreeEntries; i++) {
634 tree->GetEntry(i);
635
636 if (dEdx <= 0 || dEdxExpected <= 0 || tpcSignalN <= 60)
637 continue;
638
639 dEdxOrig = dEdx;
640
641 // Correct for pure momentum dependence (azimuthal dependence comes into play)
642 correctionFactor = hPiMap->GetBinContent(getBinX(hPiMap, tanTheta), getBinY(hPiMap, pTPC));
643 correctionFactor -= 1.;
644 correctionFactor *= 0.5*(TMath::Erf((0.5 - pTPC) / 0.05) + 1.); // Only correct up to 0.5 GeV/c and switch off within about 2*0.05 GeV/c
645 correctionFactor += 1.;
646 //TODO NOW dEdx /= correctionFactor;
647
648
649 // Correct eta dependence
650 correctionFactor = hPrMap->GetBinContent(getBinX(hPrMap, tanTheta), getBinY(hPrMap, 1./dEdxOrig));
651 dEdx /= correctionFactor;
652
653 hPreMap->Fill(tanTheta, pTPC, dEdx / dEdxExpected);
654
655 if (i % 1000 == 0)
656 progressbar(100. * (((Double_t)i) / nTreeEntries));
657 }
658
659 progressbar(100.);
660
661
662
663 printf("\n\nStarted map creation....\n");
664
665
666 const Int_t numTotalBins = hPreMap->GetXaxis()->GetNbins() * hPreMap->GetYaxis()->GetNbins();
667
668 for (Int_t binY = 1; binY <= hPreMap->GetYaxis()->GetNbins(); binY++) {
669 for (Int_t binX = 1; binX <= hPreMap->GetXaxis()->GetNbins(); binX++) {
670 TH1D* hTempProjectionZ = 0x0;
671
672 hTempProjectionZ = hPreMap->ProjectionZ("hTempProjectionZ", binX, binX, binY, binY);
673
674 if (hTempProjectionZ->GetEntries() < 10) {
675 printf("\nWarning: Too few entries for (%f, %f - %f): skipped\n",
676 hPreMap->GetXaxis()->GetBinCenter(binX), hPreMap->GetYaxis()->GetBinLowEdge(binY),
677 hPreMap->GetYaxis()->GetBinUpEdge(binY));
678 printedSomething = kTRUE;
679
680 delete hTempProjectionZ;
681 continue;
682 }
683
684 Double_t meanWithoutFit = hTempProjectionZ->GetMean();
685
686 // Do not overwrite with data from lower statistics (larger momentum) -> Most of the cases should be caught by the merging!
687 TFitResult* fitRes = 0x0;
688 TFitResultPtr fitResPtr = hTempProjectionZ->Fit("gaus", "QSN", "");//TODO removed option L
689 fitRes = ((Int_t)fitResPtr == 0) ? fitResPtr.Get() : 0x0;
690
691 Double_t mean = 0;
692 Double_t meanError = 0;
693
694 // If the fit failed, use the mean of the histogram as an approximation instead
695 if (!fitRes) {
696 mean = meanWithoutFit;
697 meanError = 1000;
698
699 printf("\nWarning: Fit failed for (%f, %f), entries: %.0f -> Using mean instead (%f +- %f)\n",
700 hPreMap->GetXaxis()->GetBinCenter(binX), hPreMap->GetYaxis()->GetBinCenter(binY),
701 hTempProjectionZ->GetEntries(), mean, meanError);
702 printedSomething = kTRUE;
703 }
704 else {
705 mean = fitRes->GetParams()[1];
706 meanError = fitRes->GetErrors()[1];
707 }
708
709 hMap->SetBinContent(binX, binY, mean);
710 hMap->SetBinError(binX, binY, meanError);
711
712 delete hTempProjectionZ;
713
714 progressbar(100. * (((Double_t)(((binY - 1.) * hPreMap->GetXaxis()->GetNbins()) + binX)) / numTotalBins));
715 }
716 }
717
718 progressbar(100);
719
720
721
722 TH2D* hPureResults = (TH2D*)hMap->Clone("hPureResults");
723 TH2D* hRes3DprimeDiffSmooth = 0x0;
724
725 if (doSmoothing) {
726 // --------------------------------------------------
727 // Determine difference between smoothed and pure map (gauss fit)
728 hRes3DprimeDiffSmooth = (TH2D*)hMap->Clone("hRes3DprimeDiffSmooth");
729 hRes3DprimeDiffSmooth->SetTitle("tan(#theta) and pTPC dependence of #Delta': Smooth(Gauss) - Gauss");
730
731 // Smooth the histos
732
733 // If the smoothing is to be applied, the bins with fit errors (content <= 0) must be eliminated in order not to bias the smoothing.
734 // In case of no smoothing, the refinement will take care of such bins....
735 eliminateNonPositivePointsInHisto(hMap);
736
737
738 hMap->Smooth();
739
740
741 hRes3DprimeDiffSmooth->Add(hMap, -1);
742 //hRes3DprimeDiffSmooth->GetZaxis()->SetRangeUser(-0.05, 0.05);
743 SetHistAxis(hRes3DprimeDiffSmooth, kTypeDeltaPrime);
744 }
745
746 //hMap->GetZaxis()->SetRangeUser(0.95, 1.15);
747 SetHistAxis(hMap, kTypeDeltaPrime);
748
749
750 if (saveResults) {
751 fSave->cd();
752 hPreMap->Write();
753 }
754
755 delete hPreMap;
756
757
758 // --------------------------------------------------
759 // Refine the map
760 printf("\n\nMap created. Started to refine....\n\n");
761
762 Double_t factorX = 6;
763 Double_t factorY = 6;
764
765 if (lowResolutionFactorX != 1)
766 factorX *= lowResolutionFactorX;
767 if (lowResolutionFactorY != 1)
768 factorY *= lowResolutionFactorY;
769
770
771
772 Bool_t skipBinsAtBoundaries = kFALSE; // TODO NOW Diese Bins vielleicht doch nicht rausnehmen. Der Anstieg in Sigma bei extrem kleinen Impulsen ist insofern richtig, da hier die Splines abweichen und somit eine große Streuung auftritt. Und bei großen Impulsen sollten im Moment die V0s Abhilfe von fehlerhaftenAbweichungen schaffen. Beim Mean gilt dasselbe.
773 TH2D* hRefinedNoNorm = refineHistoViaLinearExtrapolation(hMap, factorX, factorY, kNoNormalisation, fSave, skipBinsAtBoundaries);
774 hRefinedNoNorm->SetName(Form("hRefined%s", suffixMapType[kNoNormalisation].Data()));
775 SetHistAxis(hRefinedNoNorm, kTypeDeltaPrime);
776
777 TH2D* hRefinedSmallEtaNorm = refineHistoViaLinearExtrapolation(hMap, factorX, factorY, kSmallEtaNormalisation, fSave, skipBinsAtBoundaries);
778 hRefinedSmallEtaNorm->SetName(Form("hRefined%s", suffixMapType[kSmallEtaNormalisation].Data()));
779 SetHistAxis(hRefinedSmallEtaNorm, kTypeDeltaPrime);
780
781 TH2D* hRefinedLargeEtaNorm = refineHistoViaLinearExtrapolation(hMap, factorX, factorY, kLargeEtaNormalisation, fSave, skipBinsAtBoundaries);
782 hRefinedLargeEtaNorm->SetName(Form("hRefined%s", suffixMapType[kLargeEtaNormalisation].Data()));
783 SetHistAxis(hRefinedLargeEtaNorm, kTypeDeltaPrime);
784
785 printf("\nDone!\n\n");
786
787 if (lowResolutionFactorX != 1 || lowResolutionFactorY != 1) {
788 printf("\n***WARNING***: Low resolution used for map creation! Resolution scaled down by factor (%f, %f)\n\n",
789 lowResolutionFactorX, lowResolutionFactorY);
790 }
791
792
793
794 TH2D* hPureResultsSmallEtaNorm = (TH2D*)hPureResults->Clone("hPureResultsSmallEtaNorm");
795 normaliseHisto(hPureResultsSmallEtaNorm, 0., 0.11, kTypeDeltaPrime);
796
797 if (saveResults) {
798 fSave->cd();
799 hPureResults->Write();
800 hPureResultsSmallEtaNorm->Write();
801 hMap->Write();
802 if (doSmoothing)
803 hRes3DprimeDiffSmooth->Write();
804
805 hRefinedNoNorm->Write();
806 hRefinedSmallEtaNorm->Write();
807 hRefinedLargeEtaNorm->Write();
808
809 TNamed* settings = new TNamed(
810 Form("Settings for map: lowResolutionFactor (%f, %f), doSmoothing %d", lowResolutionFactorX, lowResolutionFactorY, doSmoothing), "");
811 settings->Write();
812
813 f->Close();
814 fSave->Close();
815
816 gSystem->Exec(Form("echo \"%s\" >> %s/settingsForMapCreation.txt",
817 TString(settings->GetName()).ReplaceAll("Settings for map",
818 Form("Settings for map %s", saveFileName.Data())).Data(), path.Data()));
819 return 0x0;
820 }
821
822 if (returnMapType == kSmallEtaNormalisation)
823 return hRefinedSmallEtaNorm;
824 else if (returnMapType == kLargeEtaNormalisation)
825 return hRefinedLargeEtaNorm;
826 else if (returnMapType == kNoNormalisation)
827 return hRefinedNoNorm;
828
829 printf("Map type %d is not supported. Returning 0x0...\n", returnMapType);
830 return 0x0;
831}