temporary task for debugging and consistency checks
authorrbertens <rbertens@cern.ch>
Tue, 25 Nov 2014 16:42:13 +0000 (17:42 +0100)
committerrbertens <rbertens@cern.ch>
Tue, 25 Nov 2014 16:43:06 +0000 (17:43 +0100)
PWG/FLOW/Base/AliFlowAnalysisWithScalarProductDev.cxx [new file with mode: 0644]
PWG/FLOW/Base/AliFlowAnalysisWithScalarProductDev.h [new file with mode: 0644]
PWG/FLOW/Tasks/AliAnalysisTaskScalarProductDev.cxx [new file with mode: 0644]
PWG/FLOW/Tasks/AliAnalysisTaskScalarProductDev.h [new file with mode: 0644]

diff --git a/PWG/FLOW/Base/AliFlowAnalysisWithScalarProductDev.cxx b/PWG/FLOW/Base/AliFlowAnalysisWithScalarProductDev.cxx
new file mode 100644 (file)
index 0000000..ea18fd0
--- /dev/null
@@ -0,0 +1,733 @@
+/*************************************************************************
+* Copyright(c) 1998-2008, ALICE Experiment at CERN, All rights reserved. *
+*                                                                        *
+* Author: The ALICE Off-line Project.                                    *
+* Contributors are mentioned in the code where appropriate.              *
+*                                                                        *
+* Permission to use, copy, modify and distribute this software and its   *
+* documentation strictly for non-commercial purposes is hereby granted   *
+* without fee, provided that the above copyright notice appears in all   *
+* copies and that both the copyright notice and this permission notice   *
+* appear in the supporting documentation. The authors make no claims     *
+* about the suitability of this software for any purpose. It is          *
+* provided "as is" without express or implied warranty.                  * 
+**************************************************************************/
+
+#define ALIFLOWANALYSISWITHSCALARPRODUCTDEV_CXX
+#include "TFile.h"      
+#include "TList.h"
+#include "TMath.h"
+#include "TString.h"
+#include "TProfile.h"
+#include "TVector2.h"
+#include "TH1D.h"
+#include "TH1F.h"
+#include "TH2D.h"
+
+#include "AliFlowCommonConstants.h"
+#include "AliFlowEventSimple.h"
+#include "AliFlowVector.h"
+#include "AliFlowTrackSimple.h"
+#include "AliFlowCommonHist.h"
+#include "AliFlowCommonHistResults.h"
+#include "AliFlowAnalysisWithScalarProductDev.h"
+
+//////////////////////////////////////////////////////////////////////////////
+// AliFlowAnalysisWithScalarProductDev:
+// Description: Maker to analyze Flow from the Scalar Product method.
+// authors: Naomi van del Kolk (kolk@nikhef.nl)
+//          Ante Bilandzic (anteb@nikhef.nl)
+// mods:    Carlos Perez (cperez@nikhef.nl)
+//////////////////////////////////////////////////////////////////////////////
+
+ClassImp(AliFlowAnalysisWithScalarProductDev)
+
+//-----------------------------------------------------------------------
+AliFlowAnalysisWithScalarProductDev::AliFlowAnalysisWithScalarProductDev():
+fDebug(0),
+fMinimalBook(kFALSE),
+fUsePhiWeights(0),
+fApplyCorrectionForNUA(0),
+fHarmonic(2),
+fNormalizationType(1),
+fTotalQvector(3),
+fWeightsList(NULL),
+fHistList(NULL),
+fHistProConfig(NULL),
+fHistProQaQbNorm(NULL),
+fHistSumOfWeights(NULL),
+fHistProNUAq(NULL),
+fHistProQNorm(NULL),
+fHistProQaQb(NULL),
+fHistProQaQbM(NULL),
+fHistMaMb(NULL),
+fHistQNormQaQbNorm(NULL),
+fHistQaNormMa(NULL),
+fHistQbNormMb(NULL),
+fResolution(NULL),
+fHistQaQb(NULL),
+fHistQaQbCos(NULL),
+fHistNumberOfSubtractedDaughters(NULL),
+fCommonHists(NULL),
+fCommonHistsuQ(NULL),
+fCommonHistsRes(NULL)
+{
+  //ctor
+  for(int i=0; i!=2; ++i) {
+    fPhiWeightsSub[i] = NULL;
+    for(int j=0; j!=2; ++j) {
+      fHistProUQ[i][j] = NULL;
+      fHistProUQQaQb[i][j] = NULL;
+      fHistProNUAu[i][j][0] = NULL;
+      fHistProNUAu[i][j][1] = NULL;
+      for(int k=0; k!=3; ++k)
+        fHistSumOfWeightsu[i][j][k] = NULL;
+    }
+  }
+}
+//-----------------------------------------------------------------------
+AliFlowAnalysisWithScalarProductDev::~AliFlowAnalysisWithScalarProductDev() 
+{
+  //destructor
+  delete fWeightsList;
+  delete fHistList;
+}
+//-----------------------------------------------------------------------
+void AliFlowAnalysisWithScalarProductDev::Init() {
+  //Define all histograms
+  //printf("---Analysis with the Scalar Product Method--- Init\n");
+  //printf("--- fNormalizationType %d ---\n", fNormalizationType);
+  //save old value and prevent histograms from being added to directory
+  //to avoid name clashes in case multiple analaysis objects are used
+  //in an analysis
+  Bool_t oldHistAddStatus = TH1::AddDirectoryStatus();
+  TH1::AddDirectory(kFALSE);
+  fHistList = new TList();
+  fHistList->SetName("cobjSP");
+  fHistList->SetOwner();
+
+  TList *uQRelated = new TList();
+  uQRelated->SetName("uQ");
+  uQRelated->SetOwner();
+
+  TList *nuaRelated = new TList();
+  nuaRelated->SetName("NUA");
+  nuaRelated->SetOwner();
+
+  TList *errorRelated = new TList();
+  errorRelated->SetName("error");
+  errorRelated->SetOwner();
+
+  TList *tQARelated = new TList();
+  tQARelated->SetName("QA");
+  tQARelated->SetOwner();
+
+  fCommonHists = new AliFlowCommonHist("AliFlowCommonHist_SP","AliFlowCommonHist",fMinimalBook);
+  (fCommonHists->GetHarmonic())->Fill(0.5,fHarmonic); // store harmonic 
+  fHistList->Add(fCommonHists);
+
+  //  fCommonHistsuQ = new AliFlowCommonHist("AliFlowCommonHist_uQ");
+  //  (fCommonHistsuQ->GetHarmonic())->Fill(0.5,fHarmonic); // store harmonic 
+  //  fHistList->Add(fCommonHistsuQ);
+
+  fCommonHistsRes = new AliFlowCommonHistResults("AliFlowCommonHistResults_SP","",fHarmonic);
+  fHistList->Add(fCommonHistsRes);
+
+  fHistProConfig = new TProfile("FlowPro_Flags_SP","Flow_Flags_SP",4,0.5,4.5,"s");
+  fHistProConfig->GetXaxis()->SetBinLabel(1,"fApplyCorrectionForNUA");
+  fHistProConfig->GetXaxis()->SetBinLabel(2,"fNormalizationType");
+  fHistProConfig->GetXaxis()->SetBinLabel(3,"fUsePhiWeights");
+  fHistProConfig->GetXaxis()->SetBinLabel(4,"fHarmonic");
+  fHistProConfig->Fill(1,fApplyCorrectionForNUA);
+  fHistProConfig->Fill(2,fNormalizationType);
+  fHistProConfig->Fill(3,fUsePhiWeights);
+  fHistProConfig->Fill(4,fHarmonic);
+  fHistList->Add(fHistProConfig);
+
+  fHistProQaQbNorm = new TProfile("FlowPro_QaQbNorm_SP","FlowPro_QaQbNorm_SP", 1, 0.5, 1.5,"s");
+  fHistProQaQbNorm->SetYTitle("<QaQb/Na/Nb>");
+  errorRelated->Add(fHistProQaQbNorm);
+
+  fHistProNUAq = new TProfile("FlowPro_NUAq_SP","FlowPro_NUAq_SP", 6, 0.5, 6.5,"s");
+  fHistProNUAq->GetXaxis()->SetBinLabel( 1,"<<sin(#Phi_{a})>>");
+  fHistProNUAq->GetXaxis()->SetBinLabel( 2,"<<cos(#Phi_{a})>>");
+  fHistProNUAq->GetXaxis()->SetBinLabel( 3,"<<sin(#Phi_{b})>>");
+  fHistProNUAq->GetXaxis()->SetBinLabel( 4,"<<cos(#Phi_{b})>>");
+  fHistProNUAq->GetXaxis()->SetBinLabel( 5,"<<sin(#Phi_{t})>>");
+  fHistProNUAq->GetXaxis()->SetBinLabel( 6,"<<cos(#Phi_{t})>>");
+  nuaRelated->Add(fHistProNUAq);
+
+  fHistSumOfWeights = new TH1D("Flow_SumOfWeights_SP","Flow_SumOfWeights_SP",2,0.5, 2.5);
+  fHistSumOfWeights->GetXaxis()->SetBinLabel( 1,"#Sigma Na*Nb");
+  fHistSumOfWeights->GetXaxis()->SetBinLabel( 2,"#Sigma (Na*Nb)^2");
+  errorRelated->Add(fHistSumOfWeights);
+
+  TString sPOI[2] = {"RP","POI"}; // backward compatibility
+  TString sEta[2] = {"Pt","eta"}; // backward compatibility
+  TString sTitle[2] = {"p_{T} [GeV]","#eta"};
+  TString sWeights[3] = {"uQ","uQuQ","uQQaQb"};
+  Int_t iNbins[2];
+  Double_t dMin[2], dMax[2];
+  iNbins[0] = AliFlowCommonConstants::GetMaster()->GetNbinsPt();
+  iNbins[1] = AliFlowCommonConstants::GetMaster()->GetNbinsEta();
+  dMin[0]   = AliFlowCommonConstants::GetMaster()->GetPtMin();
+  dMin[1]   = AliFlowCommonConstants::GetMaster()->GetEtaMin();
+  dMax[0]   = AliFlowCommonConstants::GetMaster()->GetPtMax();
+  dMax[1]   = AliFlowCommonConstants::GetMaster()->GetEtaMax();
+  for(Int_t iPOI=0; iPOI!=2; ++iPOI) 
+    for(Int_t iSpace=0; iSpace!=2; ++iSpace) {
+    // uQ
+    fHistProUQ[iPOI][iSpace] = new TProfile( Form( "FlowPro_UQ_%s%s_SP", sEta[iSpace].Data(), sPOI[iPOI].Data() ),
+                                       Form( "FlowPro_UQ%s%s_SP", sEta[iSpace].Data(), sPOI[iPOI].Data() ),
+                                       iNbins[iSpace], dMin[iSpace], dMax[iSpace], "s");
+    fHistProUQ[iPOI][iSpace]->SetXTitle(sTitle[iSpace].Data());
+    fHistProUQ[iPOI][iSpace]->SetYTitle("<uQ>");
+    uQRelated->Add(fHistProUQ[iPOI][iSpace]);
+
+    // NUAu
+    fHistProNUAu[iPOI][iSpace][0] = new TProfile( Form("FlowPro_NUAu_%s%s_IM_SP", sEta[iSpace].Data(), sPOI[iPOI].Data() ),
+                                           Form("FlowPro_NUAu_%s%s_IM_SP", sEta[iSpace].Data(), sPOI[iPOI].Data() ),
+                                           iNbins[iSpace], dMin[iSpace], dMax[iSpace]);
+    fHistProNUAu[iPOI][iSpace][0]->SetXTitle(sTitle[iSpace].Data());
+    nuaRelated->Add(fHistProNUAu[iPOI][iSpace][0]);
+    fHistProNUAu[iPOI][iSpace][1] = new TProfile( Form("FlowPro_NUAu_%s%s_RE_SP", sEta[iSpace].Data(), sPOI[iPOI].Data() ),
+                                           Form("FlowPro_NUAu_%s%s_RE_SP", sEta[iSpace].Data(), sPOI[iPOI].Data() ),
+                                           iNbins[iSpace], dMin[iSpace], dMax[iSpace]);
+    fHistProNUAu[iPOI][iSpace][1]->SetXTitle(sTitle[iSpace].Data());
+    nuaRelated->Add(fHistProNUAu[iPOI][iSpace][1]);
+
+    // uQ QaQb
+    fHistProUQQaQb[iPOI][iSpace] = new TProfile( Form("FlowPro_UQQaQb_%s%s_SP", sEta[iSpace].Data(), sPOI[iPOI].Data() ),
+                                           Form("FlowPro_UQQaQb_%s%s_SP", sEta[iSpace].Data(), sPOI[iPOI].Data() ),
+                                           iNbins[iSpace], dMin[iSpace], dMax[iSpace]);
+    fHistProUQQaQb[iPOI][iSpace]->SetXTitle(sTitle[iSpace].Data());
+    fHistProUQQaQb[iPOI][iSpace]-> SetYTitle("<Qu QaQb>");
+    errorRelated->Add(fHistProUQQaQb[iPOI][iSpace]);
+
+    // uWeights
+    for(Int_t i=0; i!=3; ++i) {
+      fHistSumOfWeightsu[iPOI][iSpace][i] = new TH1D( Form("Flow_SumOfWeights_%s%s_%s_SP",sWeights[i].Data(),sPOI[iPOI].Data(),sEta[iSpace].Data()),
+                                                      Form("Flow_SumOfWeights_%s%s_%s_SP",sWeights[i].Data(),sPOI[iPOI].Data(),sEta[iSpace].Data()),
+                                                      iNbins[iSpace], dMin[iSpace], dMax[iSpace]);
+      fHistSumOfWeightsu[iPOI][iSpace][i]->SetYTitle(sWeights[i].Data());
+      fHistSumOfWeightsu[iPOI][iSpace][i]->SetXTitle(sTitle[iSpace].Data());
+      errorRelated->Add(fHistSumOfWeightsu[iPOI][iSpace][i]);
+    }
+  }
+  //weights
+  if(fUsePhiWeights) {
+    if(!fWeightsList) {
+      printf( "WARNING: fWeightsList is NULL in the Scalar Product method.\n" );
+      exit(0);
+    }
+    fPhiWeightsSub[0] = dynamic_cast<TH1F*>
+                        (fWeightsList->FindObject("phi_weights_sub0"));
+    if(!fPhiWeightsSub[0]) {
+      printf( "WARNING: phi_weights_sub0 not found in the Scalar Product method.\n" );
+      exit(0);  
+    }
+    nuaRelated->Add( fPhiWeightsSub[0] );
+    fPhiWeightsSub[1] = dynamic_cast<TH1F*>
+                      (fWeightsList->FindObject("phi_weights_sub1"));
+    if(!fPhiWeightsSub[1]) {
+      printf( "WARNING: phi_weights_sub1 not found in the Scalar Product method.\n" );
+      exit(0);  
+    }
+    nuaRelated->Add( fPhiWeightsSub[1] );
+  }
+
+  if(!fMinimalBook) {
+    fHistProQNorm = new TProfile("FlowPro_QNorm_SP","FlowPro_QNorm_SP",       1,0.5,1.5,"s");
+    fHistProQNorm->SetYTitle("<|Qa+Qb|>");
+    tQARelated->Add(fHistProQNorm);
+
+    fHistProQaQb  = new TProfile("FlowPro_QaQb_SP","FlowPro_QaQb_SP",         1,0.5,1.5,"s");
+    fHistProQaQb->SetYTitle("<QaQb>");
+    tQARelated->Add(fHistProQaQb);
+
+    fHistProQaQbM = new TProfile("FlowPro_QaQbvsM_SP","FlowPro_QaQbvsM_SP",1000,0.0,10000);
+    fHistProQaQbM->SetYTitle("<QaQb>");
+    fHistProQaQbM->SetXTitle("M");
+    fHistProQaQbM->Sumw2();
+    tQARelated->Add(fHistProQaQbM);
+
+    fHistMaMb = new TH2D("Flow_MavsMb_SP","Flow_MavsMb_SP",100,0.,100.,100,0.,100.);
+    fHistMaMb->SetYTitle("Ma");
+    fHistMaMb->SetXTitle("Mb");
+    tQARelated->Add(fHistMaMb);
+
+    fHistQNormQaQbNorm = new TH2D("Flow_QNormvsQaQbNorm_SP","Flow_QNormvsQaQbNorm_SP",88,-1.1,1.1,22,0.,1.1);
+    fHistQNormQaQbNorm->SetYTitle("|Q/Mq|");
+    fHistQNormQaQbNorm->SetXTitle("QaQb/MaMb");
+    tQARelated->Add(fHistQNormQaQbNorm);
+
+    fHistQaNormMa = new TH2D("Flow_QaNormvsMa_SP","Flow_QaNormvsMa_SP",100,0.,100.,22,0.,1.1);
+    fHistQaNormMa->SetYTitle("|Qa/Ma|");
+    fHistQaNormMa->SetXTitle("Ma");
+    tQARelated->Add(fHistQaNormMa);
+
+    fHistQbNormMb = new TH2D("Flow_QbNormvsMb_SP","Flow_QbNormvsMb_SP",100,0.,100.,22,0.,1.1);
+    fHistQbNormMb->SetYTitle("|Qb/Mb|");
+    fHistQbNormMb->SetXTitle("Mb");
+    tQARelated->Add(fHistQbNormMb);
+
+    fResolution = new TH1D("Flow_resolution_SP","Flow_resolution_SP",100,-1.0,1.0);
+    fResolution->SetYTitle("dN/d(Cos2(#phi_a - #phi_b))");
+    fResolution->SetXTitle("Cos2(#phi_a - #phi_b)");
+    tQARelated->Add(fResolution);
+
+    fHistQaQb = new TH1D("Flow_QaQb_SP","Flow_QaQb_SP",200,-100.,100.);
+    fHistQaQb->SetYTitle("dN/dQaQb");
+    fHistQaQb->SetXTitle("dQaQb");
+    tQARelated->Add(fHistQaQb);
+
+    fHistQaQbCos = new TH1D("Flow_QaQbCos_SP","Flow_QaQbCos_SP",63,0.,TMath::Pi());
+    fHistQaQbCos->SetYTitle("dN/d#phi");
+    fHistQaQbCos->SetXTitle("#phi");
+    tQARelated->Add(fHistQaQbCos);
+    
+    fHistNumberOfSubtractedDaughters = new TH1I("NumberOfSubtractedDaughtersInQ",";daughters;counts;Number of daughters subtracted from Q",5,0.,5.);
+    tQARelated->Add(fHistNumberOfSubtractedDaughters);
+  }
+
+  fHistList->Add(uQRelated);
+  fHistList->Add(nuaRelated);
+  fHistList->Add(errorRelated);
+  fHistList->Add(tQARelated);
+
+  TH1::AddDirectory(oldHistAddStatus);
+}
+
+//-----------------------------------------------------------------------
+void AliFlowAnalysisWithScalarProductDev::Make(AliFlowEventSimple* anEvent) {
+  // Scalar Product method
+  if (!anEvent) return; // for coverity
+
+  // Get Q vectors for the subevents
+  AliFlowVector* vQarray = new AliFlowVector[2];
+  if (fUsePhiWeights)
+    anEvent->Get2Qsub(vQarray,fHarmonic,fWeightsList,kTRUE);
+  else
+    anEvent->Get2Qsub(vQarray,fHarmonic);
+  // Subevent a
+  AliFlowVector vQa = vQarray[0];
+  // Subevent b
+  AliFlowVector vQb = vQarray[1];
+  delete [] vQarray;
+
+  Double_t dMa = vQa.GetMult();
+  if( dMa < 2 ) return;
+  Double_t dMb = vQb.GetMult();
+  if( dMb < 2 ) return;
+
+  //Normalizing: weight the Q vectors for the subevents
+  Double_t dNa = fNormalizationType ? dMa: vQa.Mod(); // SP corresponds to true
+  Double_t dNb = fNormalizationType ? dMb: vQb.Mod(); // SP corresponds to true
+  Double_t dWa = fNormalizationType ? dMa: 1; // SP corresponds to true
+  Double_t dWb = fNormalizationType ? dMb: 1; // SP corresponds to true
+
+  //Scalar product of the two subevents vectors
+  Double_t dQaQb = (vQa*vQb);
+
+  //printf("==============\n");
+  //printf("vQa: { %f, %f }\n",vQa.X(),vQa.Y());
+  //printf("QaQb/|Qa||Qb|: %f\n",dQaQb/vQa.Mod()/vQb.Mod());
+  //printf("QaQb/|Ma||Mb|: %f\n",dQaQb/dMa/dMb);
+  
+  //      01    10     11   <===   fTotalQVector
+  // Q ?= Qa or Qb or QaQb
+  AliFlowVector vQm;
+  vQm.Set(0.0,0.0);
+  Double_t dNq=0;
+  if( (fTotalQvector%2)>0 ) { // 01 or 11
+    vQm += vQa;
+    dNq += dMa;
+  }
+  if( fTotalQvector>1 ) { // 10 or 11
+    vQm += vQb;
+    dNq += dMb;
+  }
+  Double_t dWq = fNormalizationType ? dNq: 1; // SP corresponds to true
+  dNq = fNormalizationType ? dNq: vQm.Mod(); // SP corresponds to true
+
+  //fill some EP control histograms
+//  if( dNa < 0 || dNb < 0 || dNq < 0) return; THIS CAN NEVER HAPPEN
+  Double_t HistProQaQbNorm1[] = {dQaQb/dNa/dNb,dWa*dWb};  //Fill (QaQb/NaNb) with weight (WaWb).
+  //needed for the error calculation:
+  Double_t HistSumOfWeights1(dWa*dWb);
+  Double_t HistSumOfWeights2(pow(dWa*dWb,2.));
+  //needed for correcting non-uniform acceptance: 
+  Double_t HistProNUAq1[] = {vQa.Y()/dNa,dWa}; // to get <<sin(phi_a)>>
+  Double_t HistProNUAq2[] = {vQa.X()/dNa,dWa}; // to get <<cos(phi_a)>>
+  Double_t HistProNUAq3[] = {vQb.Y()/dNb,dWb}; // to get <<sin(phi_b)>>
+  Double_t HistProNUAq4[] = {vQb.X()/dNb,dWb}; // to get <<cos(phi_b)>>
+  Double_t HistProNUAq5[] = {vQm.Y()/dNq,dWq};
+  Double_t HistProNUAq6[] = {vQm.X()/dNq,dWq};
+
+ //loop over the tracks of the event
+  AliFlowTrackSimple*   pTrack = NULL; 
+  Int_t iNumberOfTracks = anEvent->NumberOfTracks(); 
+  for (Int_t i=0;i<iNumberOfTracks;i++) {
+    pTrack = anEvent->GetTrack(i) ; 
+    if (!pTrack) continue;
+    Double_t dPhi = pTrack->Phi();
+    Double_t dPt  = pTrack->Pt();
+    Double_t dEta = pTrack->Eta();
+
+    //calculate vU
+    TVector2 vU;
+    Double_t dUX = TMath::Cos(fHarmonic*dPhi);
+    Double_t dUY = TMath::Sin(fHarmonic*dPhi);
+    vU.Set(dUX,dUY);
+
+    //      01    10     11   <===   fTotalQVector
+    // Q ?= Qa or Qb or QaQb
+    vQm.Set(0.0,0.0); //start the loop fresh
+    Double_t dMq=0;
+    if( (fTotalQvector%2)>0 ) { // 01 or 11
+      vQm += vQa;
+      dMq += dMa;
+    }
+    if( fTotalQvector>1 ) { // 10 or 11
+      vQm += vQb;
+      dMq += dMb;
+    }
+
+    //remove track if in subevent
+    for(Int_t inSubEvent=0; inSubEvent<2; ++inSubEvent) {
+      if( !pTrack->InSubevent( inSubEvent ) )
+        continue;
+      if(inSubEvent==0)
+        if( (fTotalQvector%2)!=1 )
+          continue;
+      if(inSubEvent==1)
+        if( fTotalQvector<2 )
+          continue;
+      Double_t dW=1;
+      //Double_t dPhiCenter = dPhi;
+      //if phi weights are used
+      if(fUsePhiWeights && fPhiWeightsSub[inSubEvent]) {
+        Int_t iNBinsPhiSub = fPhiWeightsSub[inSubEvent]->GetNbinsX();
+        Int_t phiBin = 1+(Int_t)(TMath::Floor(dPhi*iNBinsPhiSub/TMath::TwoPi()));
+        dW = fPhiWeightsSub[inSubEvent]->GetBinContent(phiBin);
+        //dPhiCenter = fPhiWeightsSub[inSubEvent]->GetBinCenter(phiBin);
+      }
+      //Double_t dQmX = vQm.X() - dW*(pTrack->Weight())* TMath::Cos(fHarmonic*dPhiCenter);
+      //Double_t dQmY = vQm.Y() - dW*(pTrack->Weight())* TMath::Sin(fHarmonic*dPhiCenter);
+      //vQm.Set(dQmX,dQmY);
+      
+      //subtrack the track from the Q vector, but only if it was used to construct this
+      //Q vector: i.e. check wether it has the same tags and is in the same subevent
+      //this is especially important for the daughters (as for the mother it is already checked)
+      Int_t numberOfsubtractedDaughters=vQm.SubtractTrackWithDaughters(pTrack,dW);
+      
+      if(!fMinimalBook) {
+        fHistNumberOfSubtractedDaughters->Fill(numberOfsubtractedDaughters);
+      }
+
+      dMq = dMq-dW*pTrack->Weight();
+    }
+    dNq = fNormalizationType ? dMq : vQm.Mod();
+//    if(dNq < 0) return;
+    dWq = fNormalizationType ? dMq : 1;
+
+    //Filling QA (for compatibility with previous version)
+    if(!fMinimalBook) {
+//     if(dMa < 0 || dMb < 0 || vQa.Mod() < 0 || vQb.Mod() < 0 || dMq < 0) return;
+      fHistProQaQb->Fill(1,vQa*vQb,1);
+      fHistProQaQbM->Fill(anEvent->GetNumberOfRPs()+0.5,(vQa*vQb)/dMa/dMb,dMa*dMb);
+      fHistQaQbCos->Fill(TMath::ACos((vQa*vQb)/vQa.Mod()/vQb.Mod()));
+      fResolution->Fill( TMath::Cos( vQa.Phi()-vQb.Phi() ) );
+      fHistQaQb->Fill(vQa*vQb);
+      fHistMaMb->Fill(dMb,dMa);
+      fHistProQNorm->Fill(1,vQm.Mod()/dMq,dMq);
+      fHistQNormQaQbNorm->Fill((vQa*vQb)/dMa/dMb,vQm.Mod()/dMq);
+      fHistQaNormMa->Fill(dMa,vQa.Mod()/dMa);
+      fHistQbNormMb->Fill(dMb,vQb.Mod()/dMb);
+    }
+
+  //fill some EP control histograms
+  fHistProQaQbNorm->Fill(1.,HistProQaQbNorm1[0], HistProQaQbNorm1[1]);  //Fill (QaQb/NaNb) with weight (WaWb).
+  //needed for the error calculation:
+  fHistSumOfWeights -> Fill(1.,HistSumOfWeights1);
+  fHistSumOfWeights -> Fill(2.,HistSumOfWeights2);
+  //needed for correcting non-uniform acceptance: 
+  fHistProNUAq->Fill(1.,HistProNUAq1[0],HistProNUAq1[1]); // to get <<sin(phi_a)>>
+  fHistProNUAq->Fill(2.,HistProNUAq2[0],HistProNUAq2[1]); // to get <<cos(phi_a)>>
+  fHistProNUAq->Fill(3.,HistProNUAq3[0],HistProNUAq3[1]); // to get <<sin(phi_b)>>
+  fHistProNUAq->Fill(4.,HistProNUAq4[0],HistProNUAq4[1]); // to get <<cos(phi_b)>>
+  fHistProNUAq->Fill(5.,HistProNUAq5[0],HistProNUAq5[1]);
+  fHistProNUAq->Fill(6.,HistProNUAq6[0],HistProNUAq6[1]);
+
+    Double_t dUQ = vU*vQm;
+
+    //fill the profile histograms
+    for(Int_t iPOI=0; iPOI!=2; ++iPOI) {
+      if( (iPOI==0)&&(!pTrack->InRPSelection()) )
+        continue;
+      if( (iPOI==1)&&(!pTrack->InPOISelection(fPOItype)) )
+        continue;
+      fHistProUQ[iPOI][0]->Fill(dPt ,dUQ/dNq,dWq); //Fill (uQ/Nq') with weight (Nq')
+      fHistProUQ[iPOI][1]->Fill(dEta,dUQ/dNq,dWq); //Fill (uQ/Nq') with weight (Nq')
+      //needed for the error calculation:
+      fHistProUQQaQb[iPOI][0]-> Fill(dPt ,dUQ/dNq*dQaQb/dNa/dNb,dWq*dWa*dWb); //Fill [Qu/Nq']*[QaQb/NaNb] with weight (Nq')NaNb
+      fHistProUQQaQb[iPOI][1]-> Fill(dEta,dUQ/dNq*dQaQb/dNa/dNb,dWq*dWa*dWb); //Fill [Qu/Nq']*[QaQb/NaNb] with weight (Nq')NaNb
+      fHistSumOfWeightsu[iPOI][0][0]->Fill(dPt ,dWq);        // sum of Nq'     
+      fHistSumOfWeightsu[iPOI][0][1]->Fill(dPt ,pow(dWq,2.));// sum of Nq'^2     
+      fHistSumOfWeightsu[iPOI][0][2]->Fill(dPt ,dWq*dWa*dWb);// sum of Nq'*Na*Nb     
+      fHistSumOfWeightsu[iPOI][1][0]->Fill(dEta,dWq);        // sum of Nq'     
+      fHistSumOfWeightsu[iPOI][1][1]->Fill(dEta,pow(dWq,2.));// sum of Nq'^2     
+      fHistSumOfWeightsu[iPOI][1][2]->Fill(dEta,dNq*dWa*dWb);// sum of Nq'*Na*Nb
+      //NUA:
+      fHistProNUAu[iPOI][0][0]->Fill(dPt,dUY,1.); //sin u
+      fHistProNUAu[iPOI][0][1]->Fill(dPt,dUX,1.); //cos u
+      fHistProNUAu[iPOI][1][0]->Fill(dEta,dUY,1.); //sin u
+      fHistProNUAu[iPOI][1][1]->Fill(dEta,dUX,1.); //cos u
+    }
+  }//loop over tracks
+
+  //fill control histograms
+  if (fUsePhiWeights) {
+    fCommonHists->FillControlHistograms(anEvent,fWeightsList,kTRUE);
+  } else {
+    fCommonHists->FillControlHistograms(anEvent);
+  }
+
+}
+
+//--------------------------------------------------------------------  
+void AliFlowAnalysisWithScalarProductDev::GetOutputHistograms(TList *outputListHistos){
+  //get pointers to all output histograms (called before Finish())
+  fHistList = outputListHistos;
+
+  fCommonHists = (AliFlowCommonHist*) fHistList->FindObject("AliFlowCommonHist_SP");
+  //  fCommonHistsuQ = (AliFlowCommonHist*) fHistList->FindObject("AliFlowCommonHist_uQ");
+  fCommonHistsRes = (AliFlowCommonHistResults*) fHistList->FindObject("AliFlowCommonHistResults_SP");
+  fHistProConfig = (TProfile*) fHistList->FindObject("FlowPro_Flags_SP");
+  if(!fHistProConfig) printf("Error loading fHistProConfig\n");
+  TList *uQ = (TList*) fHistList->FindObject("uQ");
+  TList *nua = (TList*) fHistList->FindObject("NUA");
+  TList *error = (TList*) fHistList->FindObject("error");
+
+  fHistProQaQbNorm = (TProfile*) error->FindObject("FlowPro_QaQbNorm_SP");
+  if(!fHistProQaQbNorm) printf("Error loading fHistProQaQbNorm\n");
+  fHistProNUAq = (TProfile*) nua->FindObject("FlowPro_NUAq_SP");
+  if(!fHistProNUAq) printf("Error loading fHistProNUAq\n");
+  fHistSumOfWeights = (TH1D*) error->FindObject("Flow_SumOfWeights_SP");
+  if(!fHistSumOfWeights) printf("Error loading fHistSumOfWeights\n");
+
+  TString sPOI[2] = {"RP","POI"};
+  TString sEta[2] = {"Pt","eta"};
+  TString sWeights[3] = {"uQ","uQuQ","uQQaQb"};
+  for(Int_t iPOI=0; iPOI!=2; ++iPOI) for(Int_t iSpace=0; iSpace!=2; ++iSpace) {
+    fHistProUQ[iPOI][iSpace] = (TProfile*) uQ->FindObject( Form( "FlowPro_UQ_%s%s_SP", sEta[iSpace].Data(), sPOI[iPOI].Data() ) );
+    if(!fHistProUQ[iPOI][iSpace]) printf("Error loading fHistProUQ[%d][%d]\n",iPOI,iSpace);
+    fHistProNUAu[iPOI][iSpace][0] = (TProfile*) nua->FindObject( Form("FlowPro_NUAu_%s%s_IM_SP", sEta[iSpace].Data(), sPOI[iPOI].Data() ) );
+    if(!fHistProNUAu[iPOI][iSpace][0]) printf("Error loading fHistProNUAu[%d][%d][0]\n",iPOI,iSpace);
+    fHistProNUAu[iPOI][iSpace][1] = (TProfile*) nua->FindObject( Form("FlowPro_NUAu_%s%s_RE_SP", sEta[iSpace].Data(), sPOI[iPOI].Data() ) );
+    if(!fHistProNUAu[iPOI][iSpace][1]) printf("Error loading fHistProNUAu[%d][%d][1]\n",iPOI,iSpace);
+    fHistProUQQaQb[iPOI][iSpace] = (TProfile*) error->FindObject( Form("FlowPro_UQQaQb_%s%s_SP", sEta[iSpace].Data(), sPOI[iPOI].Data() ) );
+    for(Int_t i=0; i!=3; ++i){
+      fHistSumOfWeightsu[iPOI][iSpace][i] = (TH1D*) error->FindObject( Form("Flow_SumOfWeights_%s%s_%s_SP",sWeights[i].Data(),sPOI[iPOI].Data(),sEta[iSpace].Data()) );
+      if(!fHistSumOfWeightsu[iPOI][iSpace][i]) printf("Error loading fHistSumOfWeightsu[%d][%d][%d]\n",iPOI,iSpace,i);
+    }
+  }
+  if(fHistProConfig) {
+    fApplyCorrectionForNUA = (Int_t) fHistProConfig->GetBinContent(1);
+    fNormalizationType  = (Int_t) fHistProConfig->GetBinContent(2);
+    fUsePhiWeights = (Int_t) fHistProConfig->GetBinContent(3);
+    fHarmonic = (Int_t) fHistProConfig->GetBinContent(4);
+  }
+}            
+
+//--------------------------------------------------------------------            
+void AliFlowAnalysisWithScalarProductDev::Finish() {
+  //calculate flow and fill the AliFlowCommonHistResults
+  printf("AliFlowAnalysisWithScalarProductDev::Finish()\n");
+  
+  // access harmonic:
+  fApplyCorrectionForNUA = (Int_t)(fHistProConfig->GetBinContent(1));
+  fNormalizationType = (Int_t)(fHistProConfig->GetBinContent(2));
+  fHarmonic = (Int_t)(fHistProConfig->GetBinContent(4));
+  
+  printf("*************************************\n");
+  printf("*************************************\n");
+  printf("      Integrated flow from           \n");
+  printf("         Scalar Product              \n\n");
+  if(!fNormalizationType)
+    printf("          (BehaveAsEP)               \n\n");
+  
+  //Calculate reference flow
+  //----------------------------------
+  //weighted average over (QaQb/NaNb) with weight (NaNb)
+  Double_t dEntriesQaQb = fHistProQaQbNorm->GetEntries();
+  if( dEntriesQaQb < 1 )
+    return;
+  Double_t dQaQb  = fHistProQaQbNorm->GetBinContent(1);
+  if( dQaQb < 0 )
+    return;
+  Double_t dSpreadQaQb = fHistProQaQbNorm->GetBinError(1);
+
+  //NUA qq:
+  Double_t dImQa = fHistProNUAq->GetBinContent(1);
+  Double_t dReQa = fHistProNUAq->GetBinContent(2);
+  Double_t dImQb = fHistProNUAq->GetBinContent(3);
+  Double_t dReQb = fHistProNUAq->GetBinContent(4);
+  if(fApplyCorrectionForNUA) 
+    dQaQb = dQaQb - dImQa*dImQb - dReQa*dReQb; 
+  printf("QaQb = %f +- %f\n", dQaQb, (dSpreadQaQb/TMath::Sqrt(dEntriesQaQb)) );
+  Double_t dV = TMath::Sqrt(dQaQb);
+
+  printf("ResSub = %f\n", dV );
+  printf("fTotalQvector %d \n",fTotalQvector);
+  if(!fNormalizationType) {
+    if(fTotalQvector>2) {
+      dV = ComputeResolution( TMath::Sqrt2()*FindXi(dV,1e-6) );
+      printf("An estimate of the event plane resolution is: %f\n", dV );
+    }
+  }
+  printf("ResTot = %f\n", dV );
+  //statistical error of dQaQb: 
+  //  statistical error = term1 * spread * term2:
+  //  term1 = sqrt{sum_{i=1}^{N} w^2}/(sum_{i=1}^{N} w)
+  //  term2 = 1/sqrt(1-term1^2) 
+  Double_t dSumOfLinearWeights = fHistSumOfWeights->GetBinContent(1);
+  Double_t dSumOfQuadraticWeights = fHistSumOfWeights->GetBinContent(2);
+  Double_t dTerm1 = 0.;
+  Double_t dTerm2 = 0.;
+  if(dSumOfLinearWeights)
+    dTerm1 = pow(dSumOfQuadraticWeights,0.5)/dSumOfLinearWeights;
+  if(1.-pow(dTerm1,2.) > 0.)
+    dTerm2 = 1./pow(1-pow(dTerm1,2.),0.5);
+  Double_t dStatErrorQaQb = dTerm1 * dSpreadQaQb * dTerm2;
+  Double_t dVerr = 0.;
+  if(dQaQb > 0.)
+    dVerr = (1./(2.*pow(dQaQb,0.5)))*dStatErrorQaQb;
+  fCommonHistsRes->FillIntegratedFlow(dV,dVerr);
+  printf("v%d(subevents) = %f +- %f\n",fHarmonic,dV,dVerr);
+
+  Int_t iNbins[2];
+  iNbins[0] = AliFlowCommonConstants::GetMaster()->GetNbinsPt();
+  iNbins[1] = AliFlowCommonConstants::GetMaster()->GetNbinsEta();
+   
+  //Calculate differential flow and integrated flow (RP, POI)
+  //---------------------------------------------------------
+  //v as a function of eta for RP selection
+  for(Int_t iRFPorPOI=0; iRFPorPOI != 2; ++iRFPorPOI)
+  for(Int_t iPTorETA=0; iPTorETA != 2; ++iPTorETA)
+  for(Int_t b=1; b != iNbins[iPTorETA]+1; ++b) {
+    Double_t duQpro = fHistProUQ[iRFPorPOI][iPTorETA]->GetBinContent(b);
+    if(fApplyCorrectionForNUA)
+      duQpro = duQpro 
+       - fHistProNUAu[iRFPorPOI][iPTorETA][1]->GetBinContent(b)*fHistProNUAq->GetBinContent(6)
+       - fHistProNUAu[iRFPorPOI][iPTorETA][0]->GetBinContent(b)*fHistProNUAq->GetBinContent(5);
+    Double_t dv2pro = -999.;
+    if( TMath::Abs(dV!=0.) ) { dv2pro = duQpro/dV; }
+    //calculate the statistical error
+    Double_t dv2ProErr = CalculateStatisticalError(iRFPorPOI, iPTorETA, b, dStatErrorQaQb);
+    if( (iRFPorPOI==0)&&(iPTorETA==0) )
+      fCommonHistsRes->FillDifferentialFlowPtRP(  b, dv2pro, dv2ProErr);   
+    if( (iRFPorPOI==0)&&(iPTorETA==1) )
+      fCommonHistsRes->FillDifferentialFlowEtaRP( b, dv2pro, dv2ProErr);   
+    if( (iRFPorPOI==1)&&(iPTorETA==0) )
+      fCommonHistsRes->FillDifferentialFlowPtPOI( b, dv2pro, dv2ProErr);   
+    if( (iRFPorPOI==1)&&(iPTorETA==1) )
+      fCommonHistsRes->FillDifferentialFlowEtaPOI(b, dv2pro, dv2ProErr);
+    //printf("POI %d | PT %d >>> %f +- %f\n",iRFPorPOI,iPTorETA,dv2pro,dv2ProErr);
+  }
+  
+  printf("\n");
+  printf("*************************************\n");
+  printf("*************************************\n");
+}
+
+//-----------------------------------------------------------------------
+void AliFlowAnalysisWithScalarProductDev::WriteHistograms(TDirectoryFile *outputFileName) const {
+ //store the final results in output .root file
+ outputFileName->Add(fHistList);
+ outputFileName->Write(outputFileName->GetName(), TObject::kSingleKey);
+}
+
+//--------------------------------------------------------------------            
+Double_t AliFlowAnalysisWithScalarProductDev::CalculateStatisticalError(Int_t iRFPorPOI, Int_t iPTorETA, Int_t b, Double_t aStatErrorQaQb) const {
+  //calculate the statistical error for differential flow for bin b
+  Double_t duQproSpread = fHistProUQ[iRFPorPOI][iPTorETA]->GetBinError(b);
+  Double_t sumOfMq = fHistSumOfWeightsu[iRFPorPOI][iPTorETA][0]->GetBinContent(b);
+  Double_t sumOfMqSquared = fHistSumOfWeightsu[iRFPorPOI][iPTorETA][1]->GetBinContent(b);
+  Double_t dQaQb = fHistProQaQbNorm->GetBinContent(1);
+
+  //non-isotropic terms:  
+  if(fApplyCorrectionForNUA) {
+    Double_t dImQa = fHistProNUAq->GetBinContent(1);  // <<sin(phi_a)>>
+    Double_t dReQa = fHistProNUAq->GetBinContent(2);  // <<cos(phi_a)>>
+    Double_t dImQb = fHistProNUAq->GetBinContent(3);  // <<sin(phi_b)>>
+    Double_t dReQb = fHistProNUAq->GetBinContent(4);  // <<cos(phi_b)>>
+    dQaQb = dQaQb - dImQa*dImQb - dReQa*dReQb; 
+  }
+
+  Double_t dTerm1 = 0.;
+  Double_t dTerm2 = 0.;
+  if(sumOfMq) {
+    dTerm1 = (pow(sumOfMqSquared,0.5)/sumOfMq);
+  } 
+  if(1.-pow(dTerm1,2.)>0.) {
+    dTerm2 = 1./pow(1.-pow(dTerm1,2.),0.5); 
+  }
+  Double_t duQproErr = dTerm1*duQproSpread*dTerm2;
+  // covariances:
+  Double_t dTerm1Cov = fHistSumOfWeightsu[iRFPorPOI][iPTorETA][2]->GetBinContent(b);
+  Double_t dTerm2Cov = fHistSumOfWeights->GetBinContent(1);
+  Double_t dTerm3Cov = sumOfMq;
+  Double_t dWeightedCovariance = 0.;
+  if(dTerm2Cov*dTerm3Cov>0.) {
+    Double_t dDenominator = 1.-dTerm1Cov/(dTerm2Cov*dTerm3Cov);
+    Double_t dPrefactor = dTerm1Cov/(dTerm2Cov*dTerm3Cov);
+    if(dDenominator!=0) {
+      Double_t dCovariance = ( fHistProUQQaQb[iRFPorPOI][iPTorETA]->GetBinContent(b)-dQaQb*fHistProUQ[iRFPorPOI][iPTorETA]->GetBinContent(b))/dDenominator;
+      dWeightedCovariance = dCovariance*dPrefactor; 
+    }
+  }
+  Double_t dv2ProErr = 0.; // final statitical error: 
+  if(dQaQb>0.) {
+    Double_t dv2ProErrorSquared = (1./4.)*pow(dQaQb,-3.)*
+      (pow(fHistProUQ[iRFPorPOI][iPTorETA]->GetBinContent(b),2.)*pow(aStatErrorQaQb,2.)
+       + 4.*pow(dQaQb,2.)*pow(duQproErr,2.)
+       - 4.*dQaQb*fHistProUQ[iRFPorPOI][iPTorETA]->GetBinContent(b)*dWeightedCovariance);
+    if(dv2ProErrorSquared>0.) dv2ProErr = pow(dv2ProErrorSquared,0.5);
+  }
+  return dv2ProErr;
+}
+
+Double_t AliFlowAnalysisWithScalarProductDev::ComputeResolution( Double_t x ) const {
+  // Computes resolution for Event Plane method
+  if(x > 51.3) {
+    printf("Warning: Estimation of total resolution might be WRONG. Please check!");
+    return 0.99981;
+  }
+  Double_t a = x*x/4;
+  Double_t b = TMath::Exp(-a)*TMath::BesselI0(a)+TMath::Exp(-a)*TMath::BesselI1(a);
+  return TMath::Sqrt(TMath::PiOver2())/2*x*b;
+}
+
+Double_t AliFlowAnalysisWithScalarProductDev::FindXi( Double_t res, Double_t prec ) const {
+  // Computes x(res) for Event Plane method
+  if(res > 0.99981) {
+    printf("Warning: Resolution for subEvent is high. You reached the precision limit.");
+    return 51.3;
+  }
+  int nSteps =0;
+  Double_t xtmp=0, xmin=0, xmax=51.3, rtmp=0, delta=2*prec;
+  while( delta > prec ) {
+    xtmp = 0.5*(xmin+xmax);
+    rtmp = ComputeResolution(xtmp);
+    delta = TMath::Abs( res-rtmp );
+    if(rtmp>res) xmax = xtmp;
+    if(rtmp<res) xmin = xtmp;
+    nSteps++;
+  }
+  return xtmp;
+}
+
diff --git a/PWG/FLOW/Base/AliFlowAnalysisWithScalarProductDev.h b/PWG/FLOW/Base/AliFlowAnalysisWithScalarProductDev.h
new file mode 100644 (file)
index 0000000..f403c71
--- /dev/null
@@ -0,0 +1,117 @@
+/* Copyright(c) 1998-1999, ALICE Experiment at CERN, All rights reserved. *
+ * See cxx source for full Copyright notice                               */
+
+/* $Id$ */
+
+
+#ifndef ALIFLOWANALYSISWITHSCALARPRODUCTDEV_H
+#define ALIFLOWANALYSISWITHSCALARPRODUCTDEV_H
+
+class AliFlowTrackSimple;
+class AliFlowEventSimple;
+class AliFlowCommonHist;
+class AliFlowCommonHistResults;
+
+#include  "TList.h"
+#include "AliFlowAnalysis.h"
+
+class TH1D;
+class TH1F;
+class TH2D;
+class TProfile;
+class TDirectoryFile;
+
+/////////////////////////////////////////////////////////////////////////////
+// Description: Maker to analyze Flow from the Event Plane method.
+//              Adaptation based on Scalar Product
+// authors: Naomi van del Kolk (kolk@nikhef.nl)
+//          Ante Bilandzic (anteb@nikhef.nl)
+// mods:    Carlos Perez (cperez@nikhef.nl)
+/////////////////////////////////////////////////////////////////////////////
+class AliFlowAnalysisWithScalarProductDev : public AliFlowAnalysis {
+ public:
+   AliFlowAnalysisWithScalarProductDev();            //default constructor
+   virtual  ~AliFlowAnalysisWithScalarProductDev();  //destructor
+   void Init();                                       //Define output objects
+   void Make(AliFlowEventSimple* anEvent);            //Main routine
+   void GetOutputHistograms(TList *outputListHistos); //Copy output objects from TList
+   void Finish();                                     //Fill results
+   void WriteHistograms(TDirectoryFile *outputFileName) const; //writes histograms locally (for OnTheFly)
+
+
+   void SetHarmonic(Int_t iHarmonic)          { fHarmonic = iHarmonic; }
+   void SetApplyCorrectionForNUA(Bool_t iVal) { fApplyCorrectionForNUA = iVal?1:0; }
+   void SetNormalizationType(Int_t iVal)      { fNormalizationType = iVal; }
+   void SetDebug(Bool_t bVal)                 { fDebug = bVal; }
+   void SetBookOnlyBasicCCH(Bool_t bVal)           { fMinimalBook = bVal; }
+   void SetTotalQvector(Int_t iVal)           { fTotalQvector = iVal; }
+
+   void SetUsePhiWeights(Bool_t bVal)        { fUsePhiWeights = bVal; }
+   void SetWeightsList(TList* const aWeightsList)  { fWeightsList = (TList*)aWeightsList->Clone(); }
+  
+   TList*    GetHistList()      const { return fHistList; }
+   TProfile* GetHistProConfig() const { return fHistProConfig; }
+   TProfile* GetHistProUQ(Int_t iRFPorPOI, Int_t iPTorETA) const { return fHistProUQ[iRFPorPOI][iPTorETA]; }
+   TProfile* GetHistProQaQbNorm() const  { return fHistProQaQbNorm; }
+   TProfile* GetHistProNUAq()     const  { return fHistProNUAq; }
+   TProfile* GetHistProNUAu(Int_t iRFPorPOI, Int_t iPTorETA, Int_t iIMorRE) const { return fHistProNUAu[iRFPorPOI][iPTorETA][iIMorRE]; }
+   TH1D*     GetHistSumOfWeights() const { return fHistSumOfWeights; }
+   TProfile* GetHistProUQQaQb( Int_t iRFPorPOI, Int_t iPTorETA ) const { return fHistProUQQaQb[iRFPorPOI][iPTorETA]; }
+   TH1D*     GetHistSumOfWeightsu(Int_t iRFPorPOI, Int_t iPTorETA, Int_t iWeight) const { return fHistSumOfWeightsu[iRFPorPOI][iPTorETA][iWeight]; }
+   AliFlowCommonHist*        GetCommonHists()    const { return fCommonHists; }
+   AliFlowCommonHistResults* GetCommonHistsRes() const { return fCommonHistsRes; }
+   
+ private:
+   AliFlowAnalysisWithScalarProductDev(const AliFlowAnalysisWithScalarProductDev& anAnalysis);            //copy constructor
+   AliFlowAnalysisWithScalarProductDev& operator=(const AliFlowAnalysisWithScalarProductDev& anAnalysis); //assignment operator
+   Double_t CalculateStatisticalError( Int_t RFPorPOI, Int_t PTorETA, Int_t bin, Double_t errV ) const;
+   Double_t ComputeResolution( Double_t x ) const;
+   Double_t FindXi( Double_t res, Double_t prec ) const;
+
+      
+   Int_t fDebug ;                // flag for analysis: more print statements
+   Bool_t fMinimalBook;          // flag to turn off QA and minimize FlowCommonHist
+   Int_t fUsePhiWeights;         // use phi weights
+   Int_t fApplyCorrectionForNUA; // apply correction for non-uniform acceptance
+   Int_t fHarmonic;              // harmonic 
+   Int_t fNormalizationType;     // 0: EP mode || 1: SP mode
+   Int_t fTotalQvector;          // 1:Qa 2:Qb 3:QaQb
+
+   TList*     fWeightsList;      // list holding input histograms with phi weights
+   TList*     fHistList;         // list to hold all output histograms  
+   TProfile*  fHistProConfig;    // configuration values
+   TProfile*  fHistProQaQbNorm;  // average of QaQb
+   TH1D*      fHistSumOfWeights; // holds sum of Na*Nb and (Na*Nb)^2
+   TProfile*  fHistProNUAq;      // NUA related qq
+
+   //QAHists
+   TProfile* fHistProQNorm; // QNorm
+   TProfile* fHistProQaQb; // QaQb
+   TProfile* fHistProQaQbM; // QaQb/MaMb
+   TH2D* fHistMaMb;         // MaMb
+   TH2D* fHistQNormQaQbNorm; // QNorm vs QaQbNorm
+   TH2D* fHistQaNormMa; // QaNorm Ma
+   TH2D* fHistQbNormMb; // QbNorm Mb
+   TH1D* fResolution; // Resolution
+   TH1D* fHistQaQb; // QaQb
+   TH1D* fHistQaQbCos; // QaQbCos
+   TH1I* fHistNumberOfSubtractedDaughters; //how many daughters were subtracted during Q calculation?
+
+   AliFlowCommonHist*        fCommonHists;    // control histograms
+   AliFlowCommonHist*        fCommonHistsuQ;  // control histograms
+   AliFlowCommonHistResults* fCommonHistsRes; // results histograms
+
+   TH1F*      fPhiWeightsSub[2];           // histogram holding phi weights for subevents
+   TProfile*  fHistProUQ[2][2];            // uQ for RP|POI PT|ETA
+   TProfile*  fHistProUQQaQb[2][2];        // holds weighted average of <QuQaQb> for RP|POI PT|ETA
+   TH1D*      fHistSumOfWeightsu[2][2][3]; // holds sums of 0: Nq', 1: Nq'^2, 2: Nq'*Na*Nb
+   TProfile*  fHistProNUAu[2][2][2];          // NUA related qq for RP|POI PT|ETA
+
+   ClassDef(AliFlowAnalysisWithScalarProductDev,1)  // class version
+};
+
+#endif
diff --git a/PWG/FLOW/Tasks/AliAnalysisTaskScalarProductDev.cxx b/PWG/FLOW/Tasks/AliAnalysisTaskScalarProductDev.cxx
new file mode 100644 (file)
index 0000000..e77a8b5
--- /dev/null
@@ -0,0 +1,195 @@
+/*************************************************************************
+* Copyright(c) 1998-2008, ALICE Experiment at CERN, All rights reserved. *
+*                                                                        *
+* Author: The ALICE Off-line Project.                                    *
+* Contributors are mentioned in the code where appropriate.              *
+*                                                                        *
+* Permission to use, copy, modify and distribute this software and its   *
+* documentation strictly for non-commercial purposes is hereby granted   *
+* without fee, provided that the above copyright notice appears in all   *
+* copies and that both the copyright notice and this permission notice   *
+* appear in the supporting documentation. The authors make no claims     *
+* about the suitability of this software for any purpose. It is          *
+* provided "as is" without express or implied warranty.                  * 
+**************************************************************************/
+
+///////////////////////////////////////////////
+// AliAnalysisTaskScalarProductDev:
+//
+// analysis task for Scalar Product Method
+//
+// Author: Naomi van der Kolk (kolk@nikhef.nl)
+///////////////////////////////////////////////
+
+
+#include "Riostream.h" //needed as include
+
+class TFile;
+class TList;
+class AliAnalysisTaskSE;
+
+#include "TProfile.h"  //needed as include
+#include "AliAnalysisManager.h"
+#include "AliFlowEventSimple.h"
+
+#include "AliAnalysisTaskScalarProductDev.h"
+#include "AliFlowAnalysisWithScalarProductDev.h"
+#include "AliFlowCommonHist.h"
+#include "AliFlowCommonHistResults.h"
+
+#include "AliLog.h"
+
+using std::endl;
+using std::cout;
+ClassImp(AliAnalysisTaskScalarProductDev)
+
+//________________________________________________________________________
+AliAnalysisTaskScalarProductDev::AliAnalysisTaskScalarProductDev(const char *name, Bool_t usePhiWeights) : 
+  AliAnalysisTaskSE(name), 
+  fEvent(NULL),
+  fSP(NULL),
+  fListHistos(NULL),
+  fMinimalBook(kFALSE),
+  fUsePhiWeights(usePhiWeights),
+  fListWeights(NULL),
+  fRelDiffMsub(1.0),
+  fApplyCorrectionForNUA(kFALSE),
+  fHarmonic(2),
+  fNormalizationType(1),
+  fTotalQvector(NULL) 
+{
+  // Constructor
+  AliDebug(2,"AliAnalysisTaskScalarProductDev::AliAnalysisTaskScalarProductDev(const char *name)");
+
+  // Define input and output slots here
+  // Input slot #0 works with an AliFlowEventSimple
+  DefineInput(0, AliFlowEventSimple::Class());
+  // Input slot #1 is needed for the weights input file
+  if(usePhiWeights) {
+    DefineInput(1, TList::Class()); }
+  // Output slot #0 writes into a TList container
+  DefineOutput(1, TList::Class());
+
+  // Total Q-vector is: "QaQb" (means Qa+Qb), "Qa"  or "Qb"
+  fTotalQvector = new TString("QaQb");
+}
+
+//________________________________________________________________________
+AliAnalysisTaskScalarProductDev::AliAnalysisTaskScalarProductDev() : 
+  AliAnalysisTaskSE(), 
+  fEvent(NULL),
+  fSP(NULL),
+  fListHistos(NULL),
+  fMinimalBook(kFALSE),
+  fUsePhiWeights(kFALSE),
+  fListWeights(NULL),
+  fRelDiffMsub(1.0),
+  fApplyCorrectionForNUA(kFALSE),
+  fHarmonic(0),
+  fNormalizationType(1),
+  fTotalQvector(NULL) 
+  {
+  // Constructor
+    AliDebug(2,"AliAnalysisTaskScalarProductDev::AliAnalysisTaskScalarProductDev()");
+}
+
+//________________________________________________________________________
+AliAnalysisTaskScalarProductDev::~AliAnalysisTaskScalarProductDev()
+{
+  //
+  // Destructor
+  //
+
+  // histograms are in the output list and deleted when the output
+  // list is deleted by the TSelector dtor
+
+  //  if (ListHistos) {
+  //    delete fListHistos;
+  //    fListHistos = NULL;
+  //  }
+}
+
+//________________________________________________________________________
+void AliAnalysisTaskScalarProductDev::UserCreateOutputObjects() 
+{
+  // Called at every worker node to initialize
+  AliDebug(2,"AliAnalysisTaskScalarProductDev::CreateOutputObjects()");
+  
+  //Analyser
+  fSP = new AliFlowAnalysisWithScalarProductDev();
+  fSP->SetBookOnlyBasicCCH(fMinimalBook);
+
+  //set the allowed relative difference in the subevent multiplicities
+  //fSP->SetRelDiffMsub(fRelDiffMsub); 
+    
+  //apply automatic correction for non-uniform acceptance:
+  if (fApplyCorrectionForNUA) {
+    AliDebug(2,"Corrections for non-uniform acceptance applied in the Scalar Product method");
+  }
+  fSP->SetApplyCorrectionForNUA(fApplyCorrectionForNUA);
+  // harmonic: 
+  fSP->SetHarmonic(fHarmonic);
+  fSP->SetNormalizationType( fNormalizationType );
+  // total Q-vector:
+  Int_t totalQ = 0;
+  if( fTotalQvector->Contains("Qa") ) totalQ += 1;
+  if( fTotalQvector->Contains("Qb") ) totalQ += 2;
+  fSP->SetTotalQvector( totalQ );
+  //for using phi weights:
+  if(fUsePhiWeights) {
+    //pass the flag to the analysis class:
+    fSP->SetUsePhiWeights(fUsePhiWeights);
+    //get data from input slot #1 which is used for weights:
+    if(GetNinputs()==2) {                   
+      fListWeights = (TList*)GetInputData(1); 
+    }
+    //pass the list with weights to the analysis class:
+    if(fListWeights) fSP->SetWeightsList(fListWeights);
+  }
+  
+  fSP-> Init();
+
+  if (fSP->GetHistList()) {
+    fListHistos = fSP->GetHistList();
+  }
+  else {Printf("ERROR: Could not retrieve histogram list"); }
+
+  PostData(1,fListHistos);
+
+}
+
+//________________________________________________________________________
+void AliAnalysisTaskScalarProductDev::UserExec(Option_t *) 
+{
+  // Main loop
+  // Called for each event
+
+
+  fEvent = dynamic_cast<AliFlowEventSimple*>(GetInputData(0));
+  if (fEvent){
+    fSP->ProcessEvent(fEvent);
+  }
+  else {
+    cout << "Warning no input data for Scalar Product task!!!" << endl;
+  }
+    
+  //fListHistos->Print();      
+  PostData(1,fListHistos);
+  
+} 
+
+//________________________________________________________________________
+void AliAnalysisTaskScalarProductDev::Terminate(Option_t *) 
+{
+  // Called once at the end of the query
+  AliFlowAnalysisWithScalarProductDev* fSPTerm = new AliFlowAnalysisWithScalarProductDev() ;
+  fListHistos = (TList*)GetOutputData(1);
+  if (fListHistos) {
+      fSPTerm -> GetOutputHistograms(fListHistos);
+      fSPTerm -> Finish();
+      PostData(1,fListHistos);
+    }
+    
+  else { cout << "histgram list pointer is empty in Scalar Product" << endl; }
+
+}
diff --git a/PWG/FLOW/Tasks/AliAnalysisTaskScalarProductDev.h b/PWG/FLOW/Tasks/AliAnalysisTaskScalarProductDev.h
new file mode 100644 (file)
index 0000000..dbab1b6
--- /dev/null
@@ -0,0 +1,78 @@
+/* Copyright(c) 1998-1999, ALICE Experiment at CERN, All rights reserved. *
+* See cxx source for full Copyright notice */
+/* $Id: $ */
+
+#ifndef AliAnalysisTaskScalarProductDev_H
+#define AliAnalysisTaskScalarProductDev_H
+
+/////////////////////////////////////////////////
+// AliAnalysisTaskScalarProductDev:
+// analysis task for Scalar Product method
+// Author: Naomi van der Kolk (kolk@nikhef.nl)
+/////////////////////////////////////////////////
+
+class AliFlowEventSimple;
+class AliFlowAnalysisWithScalarProductDev;
+class TList;
+
+#include "TString.h"
+#include "AliAnalysisTaskSE.h"
+
+//===============================================================
+
+class AliAnalysisTaskScalarProductDev : public AliAnalysisTaskSE {
+ public:
+  AliAnalysisTaskScalarProductDev();
+  AliAnalysisTaskScalarProductDev(const char *name, Bool_t usePhiWeights);
+  virtual ~AliAnalysisTaskScalarProductDev();
+  
+  virtual void   UserCreateOutputObjects();
+  virtual void   UserExec(Option_t *option);
+  virtual void   Terminate(Option_t *);
+
+  void   SetUsePhiWeights(Bool_t const aPhiW) {this->fUsePhiWeights = aPhiW;}
+  Bool_t GetUsePhiWeights() const             {return this->fUsePhiWeights;}
+
+  void     SetRelDiffMsub(Double_t diff) { this->fRelDiffMsub = diff; }
+  Double_t GetRelDiffMsub() const        { return this->fRelDiffMsub; }
+  
+  void SetApplyCorrectionForNUA(Bool_t const applyCorrectionForNUA) {this->fApplyCorrectionForNUA = applyCorrectionForNUA;};
+  Bool_t GetApplyCorrectionForNUA() const {return this->fApplyCorrectionForNUA;};
+  
+  void SetHarmonic(Int_t const harmonic) {this->fHarmonic = harmonic;};
+  Int_t GetHarmonic() const {return this->fHarmonic;};   
+
+  void SetBehaveAsEP() { fNormalizationType = 0; }
+  
+  void SetTotalQvector(const char *tqv) {*this->fTotalQvector = tqv;}; 
+
+  void SetBookOnlyBasicCCH(Bool_t const aMB) {this->fMinimalBook = aMB;}
+
+ private:
+
+  AliAnalysisTaskScalarProductDev(const AliAnalysisTaskScalarProductDev& aAnalysisTask);
+  AliAnalysisTaskScalarProductDev& operator=(const AliAnalysisTaskScalarProductDev& aAnalysisTask); 
+
+  AliFlowEventSimple*               fEvent;         //input event
+  AliFlowAnalysisWithScalarProductDev* fSP;            // analysis object
+  TList*                            fListHistos;    // collection of output
+
+  Bool_t    fMinimalBook;   // flag to turn off QA and minimize FlowCommonHist                                                                                                                                                
+  Bool_t    fUsePhiWeights; // use phi weights
+  TList*    fListWeights;   // list with weights
+
+  Double_t  fRelDiffMsub;   // the relative difference the two subevent multiplicities can have
+  
+  Bool_t fApplyCorrectionForNUA; // apply automatic correction for non-uniform acceptance 
+  
+  Int_t fHarmonic;               // harmonic
+  Int_t fNormalizationType;      // 0: EP mode || 1: SP mode (default)
+
+  TString   *fTotalQvector;      // total Q-vector is: "QaQb" (means Qa+Qb), "Qa"  or "Qb"  
+  
+  ClassDef(AliAnalysisTaskScalarProductDev, 2); // example of analysis
+};
+
+//==================================================================
+
+#endif