]> git.uio.no Git - u/mrichter/AliRoot.git/blame - doc/aliroot-primer/primer.tex
New raw-data TAGS infrastructure
[u/mrichter/AliRoot.git] / doc / aliroot-primer / primer.tex
CommitLineData
c4593ee3 1\documentclass[12pt,a4paper,twoside]{article}
2\usepackage[margin=2cm]{geometry}
3\usepackage{graphicx}
4\usepackage{varioref}
5\usepackage{xspace}
6\usepackage{listings}
7\usepackage{ifpdf}
8\usepackage{hyperref}
9\newcommand{\class}[1]{\texttt{\textbf{#1}}\xspace}
10\newcommand{\method}[1]{\texttt{#1}\xspace}
11\renewcommand{\rmdefault}{ptm}
12
13%
14% ---------------------------------------------------------------
15% define new commands/symbols
16% ---------------------------------------------------------------
17%
18% General stuff
19%
20\hyphenation{ALICE}
21\hyphenation{between}
22\hyphenation{basis}
23\hyphenation{below}
24\hyphenation{because}
25\hyphenation{da-ta-ba-ses}
26
27\newcommand{\pt}{\ensuremath{p_{\mathrm{t}}}}
28\newcommand{\et}{\ensuremath{E_{\mathrm{T}}}}
29\newcommand {\pT} {\mbox{$p_{\rm t}$}}
30\newcommand{\mt}{\ensuremath{m_{\mathrm{t}}}}
31\newcommand {\grid} {Grid\@\xspace}
32\newcommand {\MC} {Monte~Carlo\@\xspace}
33\newcommand {\alien} {AliEn\@\xspace}
34\newcommand {\pp} {\mbox{p--p}\@\xspace}
35\newcommand {\pA} {\mbox{p--A}\@\xspace}
36\newcommand {\PbPb} {\mbox{Pb--Pb}\@\xspace}
37\newcommand {\aliroot} {AliRoot\@\xspace}
38\newcommand {\ROOT} {ROOT\@\xspace}
39\newcommand {\OO} {Object-Oriented\@\xspace}
40
41\newcommand{\mrm}{\mathrm}
42\newcommand{\dd}{\mrm{d}}
43\newcommand{\elm}{e.m.\@\xspace}
44\newcommand{\eg}{{e.g.~\@\xspace}}
45\newcommand{\ie}{i.e.\@\xspace}
46\newcommand{\Jpsi} {\mbox{J\kern-0.05em /\kern-0.05em$\psi$}\xspace}
47\newcommand{\psip} {\mbox{$\psi^\prime$}\xspace}
48\newcommand{\Ups} {\mbox{$\Upsilon$}\xspace}
49\newcommand{\Upsp} {\mbox{$\Upsilon^\prime$}\xspace}
50\newcommand{\Upspp} {\mbox{$\Upsilon^{\prime\prime}$}\xspace}
51\newcommand{\qqbar} {\mbox{$q\bar{q}$}\xspace}
52
53\newcommand {\grad} {\mbox{$^{\circ}$}}
54
55\newcommand {\rap} {\mbox{$\left | y \right | $}}
56\newcommand {\mass} {\mbox{\rm GeV$\kern-0.15em /\kern-0.12em c^2$}}
57\newcommand {\tev} {\mbox{${\rm TeV}$}}
58\newcommand {\gev} {\mbox{${\rm GeV}$}}
59\newcommand {\mev} {\mbox{${\rm MeV}$}}
60\newcommand {\kev} {\mbox{${\rm keV}$}}
61\newcommand {\mom} {\mbox{\rm GeV$\kern-0.15em /\kern-0.12em c$}}
62\newcommand {\mum} {\mbox{$\mu {\rm m}$}}
63\newcommand {\gmom} {\mbox{\rm GeV$\kern-0.15em /\kern-0.12em c$}}
64\newcommand {\mmass} {\mbox{\rm MeV$\kern-0.15em /\kern-0.12em c^2$}}
65\newcommand {\mmom} {\mbox{\rm MeV$\kern-0.15em /\kern-0.12em c$}}
66\newcommand {\nb} {\mbox{\rm nb}}
67\newcommand {\musec} {\mbox{$\mu {\rm s}$}}
68\newcommand {\cmq} {\mbox{${\rm cm}^{2}$}}
69\newcommand {\cm} {\mbox{${\rm cm}$}}
70\newcommand {\mm} {\mbox{${\rm mm}$}}
71\newcommand {\dens} {\mbox{${\rm g}\,{\rm cm}^{-3}$}}
72
73\lstset{ % general command to set parameter(s)
74% basicstyle=\small, % print whole listing small
75 basicstyle=\ttfamily, % print whole listing monospace
76 keywordstyle=\bfseries, % bold black keywords
77 identifierstyle=, % identifiers in italic
78 commentstyle=\itshape, % white comments in italic
79 stringstyle=\ttfamily, % typewriter type for strings
80 showstringspaces=false, % no special string spaces
81 columns=fullflexible, % Flexible columns
82 xleftmargin=2em, % Extra margin, left
83 xrightmargin=2em, % Extra margin, right
84 numbers=left, % Line numbers on the left
85 numberfirstline=true, % First line numbered
86 firstnumber=1, % Always start at 1
87 stepnumber=5, % Every fifth line
88 numberstyle=\footnotesize\itshape, % Style of line numbers
89 frame=lines} % Lines above and below listings
90
91%
92% ---------------------------------------------------------
93% - End of Definitions
94% ---------------------------------------------------------
95
96\begin{document}
97
98\title{AliRoot Primer}
99\author{Editor P.Hristov}
100\date{Version v4-05-06 \\
10122/11/2006
102}
103
104\maketitle
105\tableofcontents
106%\lstlistoflistings
107
108%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
109\cleardoublepage
110\section{Introduction}\label{Introduction}
111
112% -----------------------------------------------------------------------------
113
114
115\subsection{About this primer}
116
117The aim of this primer is to give some basic information about the
118ALICE offline framework (AliRoot) from users perspective. We explain
119in detail the installation procedure, and give examples of some
120typical use cases: detector description, event generation, particle
121transport, generation of ``summable digits'', event merging,
122reconstruction, particle identification, and generation of event
123summary data. The primer also includes some examples of analysis, and
124short description of the existing analysis classes in AliRoot. An
125updated version of the document can be downloaded from
126\url{http://aliceinfo.cern.ch/Offline/AliRoot/primer.html}.
127
128For the reader interested by the AliRoot architecture and by the
129performance studies done so far, a good starting point is Chapter 4 of
130the ALICE Physics Performance Report\cite{PPR}. Another important
131document is the ALICE Computing Technical Design Report\cite{CompTDR}.
132Some information contained there has been included in the present
133document, but most of the details have been omitted.
134
135AliRoot uses the ROOT\cite{ROOT} system as a foundation on which the
136framework for simulation, reconstruction and analysis is built. The
137transport of the particles through the detector is carried on by the
138Geant3\cite{Geant3} or FLUKA\cite{FLUKA} packages. Support for
139Geant4\cite{Geant4} transport package is coming soon.
140
141Except for large existing libraries, such as Pythia6\cite{MC:PYTH} and
142HIJING\cite{MC:HIJING}, and some remaining legacy code, this framework
143is based on the Object Oriented programming paradigm, and it is
144written in C++.
145
146The following packages are needed to install the fully operational
147software distribution:
148\begin{itemize}
149\item ROOT, available from \url{http://root.cern.ch}
150or using the ROOT CVS repository
151\begin{verbatim}
152:pserver:cvs@root.cern.ch:/user/cvs
153\end{verbatim}
154\item AliRoot from the ALICE offline CVS repository
155\begin{verbatim}
156:pserver:cvs@alisoft.cern.ch:/soft/cvsroot
157\end{verbatim}
158\item transport packages:
159\begin{itemize}
160\item GEANT~3 is available from the ROOT CVS repository
161\item FLUKA library can
162be obtained after registration from \url{http://www.fluka.org}
163\item GEANT~4 distribution from \url{http://cern.ch/geant4}.
164\end{itemize}
165\end{itemize}
166
167The access to the GRID resources and data is provided by the
168AliEn\cite{AliEn} system.
169
170The installation details are explained in Section \ref{Installation}.
171
172\subsection{AliRoot framework}\label{AliRootFramework}
173
174In HEP, a framework is a set of software tools that enables data
175processing. For example the old CERN Program Library was a toolkit to
176build a framework. PAW was the first example of integration of tools
177into a coherent ensemble specifically dedicated to data analysis. The
178role of the framework is shown in Fig.~\ref{MC:Parab}.
179
180\begin{figure}[ht]
181 \centering
182 \includegraphics[width=10cm]{picts/Parab}
183 \caption{Data processing framework.} \label{MC:Parab}
184\end{figure}
185
186The primary interactions are simulated via event generators, and the
187resulting kinematic tree is then used in the transport package. An
188event generator produces set of ``particles'' with their momenta. The
189set of particles, where one maintains the production history (in form
190of mother-daughter relationship and production vertex) forms the
191kinematic tree. More details can be found in the ROOT documentation of
192class \class{TParticle}. The transport package transports the
193particles through the set of detectors, and produces \textbf{hits},
194which in ALICE terminology means energy deposition at a given
195point. The hits contain also information (``track labels'') about the
196particles that have generated them. In case of calorimeters (PHOS and
197EMCAL) the hit is the energy deposition in the whole active volume of
198a detecting element. In some detectors the energy of the hit is used
199only for comparison with a given threshold, for example in TOF and ITS
200pixel layers.
201
202At the next step the detector response is taken into account, and the
203hits are transformed into \textbf{digits}. As it was explained above,
204the hits are closely related to the tracks which generated them. The
205transition from hits/tracks to digits/detectors is marked on the
206picture as ``disintegrated response'', the tracks are
207``disintegrated'' and only the labels carry the \MC information.
208There are two types of digits: \textbf{summable digits}, where one
209uses low thresholds and the result is additive, and {\bf digits},
210where the real thresholds are used, and result is similar to what one
211would get in the real data taking. In some sense the {\bf summable
212digits} are precursors of the \textbf{digits}. The noise simulation is
213activated when \textbf{digits} are produced. There are two differences
214between the \textbf{digits} and the \textbf{raw} data format produced
215by the detector: firstly, the information about the \MC particle
216generating the digit is kept as data member of the class
217\class{AliDigit}, and secondly, the raw data are stored in binary
218format as ``payload'' in a ROOT structure, while the digits are stored
219in ROOT classes. Two conversion chains are provided in AliRoot:
220\textbf{hits} $\to$ \textbf{summable digits} $\to$ \textbf{digits},
221and \textbf{hits} $\to$ \textbf{digits}. The summable digits are used
222for the so called ``event merging'', where a signal event is embedded
223in a signal-free underlying event. This technique is widely used in
224heavy-ion physics and allows to reuse the underlying events with
225substantial economy of computing resources. Optionally it is possible
226to perform the conversion \textbf{digits} $\to$ \textbf{raw data},
227which is used to estimate the expected data size, to evaluate the high
228level trigger algorithms, and to carry on the so called computing data
229challenges. The reconstruction and the HLT algorithms can work both
230with \textbf{digits} or with \textbf{raw data}. There is also the
231possibility to convert the \textbf{raw data} between the following
232formats: the format coming form the front-end electronics (FEE)
233through the detector data link (DDL), the format used in the data
234acquisition system (DAQ), and the ``rootified'' format. More details
235are given in section \ref{Simulation}.
236
237After the creation of digits, the reconstruction and analysis chain
238can be activated to evaluate the software and the detector
239performance, and to study some particular signatures. The
240reconstruction takes as input digits or raw data, real or simulated.
241The user can intervene into the cycle provided by the framework to
242replace any part of it with his own code or implement his own analysis
243of the data. I/O and user interfaces are part of the framework, as are
244data visualization and analysis tools and all procedures that are
245considered of general enough interest to be introduced into the
246framework. The scope of the framework evolves with time as the needs
247and understanding of the physics community evolve.
248
249The basic principles that have guided the design of the AliRoot
250framework are re-usability and modularity. There are almost as many
251definitions of these concepts as there are programmers. However, for
252our purpose, we adopt an operative heuristic definition that expresses
253our objective to minimize the amount of unused or rewritten code and
254maximize the participation of the physicists in the development of the
255code.
256
257\textbf{Modularity} allows replacement of parts of our system with
258minimal or no impact on the rest. Not every part of our system is
259expected to be replaced. Therefore we are aiming at modularity
260targeted to those elements that we expect to change. For example, we
261require the ability to change the event generator or the transport \MC
262without affecting the user code. There are elements that we do not
263plan to interchange, but rather to evolve in collaboration with their
264authors such as the ROOT I/O subsystem or the ROOT User Interface
265(UI), and therefore no effort is made to make our framework modular
266with respect to these. Whenever an element has to be modular in the
267sense above, we define an abstract interface to it. The codes from the
268different detectors are independent so that different detector groups
269can work concurrently on the system while minimizing the
270interference. We understand and accept the risk that at some point the
271need may arise to make modular a component that was not designed to
272be. For these cases, we have elaborated a development strategy that
273can handle design changes in production code.
274
275\textbf{Re-usability} is the protection of the investment made by the
276programming physicists of ALICE. The code embodies a large scientific
277knowledge and experience and is thus a precious resource. We preserve
278this investment by designing a modular system in the sense above and
279by making sure that we maintain the maximum amount of backward
280compatibility while evolving our system. This naturally generates
281requirements on the underlying framework prompting developments such
282as the introduction of automatic schema evolution in ROOT.
283
284The \textbf{support} of the AliRoot framework is a collaborative effort
285within the ALICE experiment. Question, suggestions, topics for
286discussion and messages are exchanged in the mailing list
287\url{alice-off@cern.ch}. Bug reports and tasks are submitted on the
288Savannah page \url{http://savannah.cern.ch/projects/aliroot/}.
289
290%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
291
292\newpage
293%\cleardoublepage
294\section{Installation and development tools}\label{Installation}
295
296% -----------------------------------------------------------------------------
297
298\subsection{Platforms and compilers}
299
300The main development and production platform is Linux on Intel 32 bits
301processors. The official Linux\cite{Linux} distribution at CERN is
302Scientific Linux SLC\cite{SLC}. The code works also on
303RedHat\cite{RedHat} version 7.3, 8.0, 9.0, Fedora Core\cite{Fedora} 1
304-- 5, and on many other Linux distributions. The main compiler on
305Linux is gcc\cite{gcc}: the recommended version is gcc 3.2.3 --
3063.4.6. The older releases (2.91.66, 2.95.2, 2.96) have problems in the
307FORTRAN optimization which has to be switched off for all the FORTRAN
308packages. AliRoot can be used with gcc 4.0.X where the FORTRAN
309compiler g77 is replaced by g95. The last release series of gcc (4.1)
310work with gfortran as well. As an option you can use Intel
311icc\cite{icc} compiler, which is supported as well. You can download
312it from \url{http://www.intel.com} and use it free of charge for
313non-commercial projects. Intel also provides free of charge the
314VTune\cite{VTune} profiling tool which is really one of the best
315available so far.
316
317AliRoot is supported on Intel 64 bit processors
318(Itanium\cite{Itanium}) running Linux. Both the gcc and Intel icc
319compilers can be used.
320
321On 64 bit AMD\cite{AMD} processors such as Opteron AliRoot runs
322successfully with the gcc compiler.
323
324The software is also regularly compiled and run on other Unix
325platforms. On Sun (SunOS 5.8) we recommend the CC compiler Sun
326WorkShop 6 update 1 C++ 5.2. The WorkShop integrates nice debugging
327and profiling facilities which are very useful for code development.
328
329On Compaq alpha server (Digital Unix V4.0) the default compiler is cxx
330( Compaq C++ V6.2-024 for Digital UNIX V4.0F). Alpha provides also its
331profiling tool pixie, which works well with shared libraries. AliRoot
332works also on alpha server running Linux, where the compiler is gcc.
333
334Recently AliRoot was ported to MacOS (Darwin). This OS is very
335sensitive to the circular dependences in the shared libraries, which
336makes it very useful as test platform.
337
338% -----------------------------------------------------------------------------
339
340\subsection{Essential CVS information}
341
342CVS\cite{CVS} stands for Concurrent Version System. It permits to a
343group of people to work simultaneously on groups of files (for
344instance program sources). It also records the history of files, which
345allows back tracking and file versioning. The official CVS Web page is
346\url{http://www.cvshome.org/}. CVS has a host of features, among them
347the most important are:
348\begin{itemize}
349\item CVS facilitates parallel and concurrent code development;
350\item it provides easy support and simple access;
351\item it has possibility to establish group permissions (for example
352 only detector experts and CVS administrators can commit code to
353 given detector module).
354\end{itemize}
355CVS has rich set of commands, the most important are described below.
356There exist several tools for visualization, logging and control which
357work with CVS. More information is available in the CVS documentation
358and manual\cite{CVSManual}.
359
360Usually the development process with CVS has the following features:
361\begin{itemize}
362\item all developers work on their \underline{own} copy of the project
363 (in one of their directories)
364\item they often have to \underline{synchronize} with a global
365 repository both to update with modifications from other people and
366 to commit their own changes.
367\end{itemize}
368
369Here below we give an example of a typical CVS session
370
371\begin{lstlisting}[language=sh]
372 # Login to the repository. The password is stored in ~/.cvspass
373 # If no cvs logout is done, the password remains there and
374 # one can access the repository without new login
375 % cvs -d :pserver:hristov@alisoft.cern.ch:/soft/cvsroot login
376 (Logging in to hristov@alisoft.cern.ch)
377 CVS password:
378 xxxxxxxx
379
380 # Check-Out a local version of the TPC module
381 % cvs -d :pserver:hristov@alisoft.cern.ch:/soft/cvsroot checkout TPC
382 cvs server: Updating TPC
383 U TPC/.rootrc
384 U TPC/AliTPC.cxx
385 U TPC/AliTPC.h
386 ...
387
388 # edit file AliTPC.h
389 # compile and test modifications
390
391 # Commit your changes to the repository with an appropriate comment
392 % cvs commit -m "add include file xxx.h" AliTPC.h
393 Checking in AliTPC.h;
394 /soft/cvsroot/AliRoot/TPC/AliTPC.h,v <-- AliTPC.h
395 new revision: 1.9; previous revision:1.8
396 done
397
398\end{lstlisting}
399
400Instead of specifying the repository and user name by -d option, one
401can export the environment variable CVSROOT, for example
402
403\begin{lstlisting}[language=sh]
404 % export CVSROOT=:pserver:hristov@alisoft.cern.ch:/soft/cvsroot
405\end{lstlisting}
406
407Once the local version has been checked out, inside the directory tree
408the CVSROOT is not needed anymore. The name of the actual repository
409can be found in CVS/Root file. This name can be redefined again using
410the -d option.
411
412In case somebody else has committed some changes in AliTPC.h file, the
413developer have to update the local version merging his own changes
414before committing them:
415
416\begin{lstlisting}[language=sh]
417 % cvs commit -m "add include file xxx.h" AliTPC.h
418 cvs server: Up-to-date check failed for `AliTPC.h'
419 cvs [server aborted]: correct above errors first!
420
421 % cvs update
422 cvs server: Updating .
423 RCS file: /soft/cvsroot/AliRoot/TPC/AliTPC.h,v
424 retrieving revision 1.9
425 retrieving revision 1.10
426 Merging differences between 1.9 and 1.10 into AliTPC.h
427
428 M AliTPC.h
429 # edit, compile and test modifications
430
431 % cvs commit -m "add include file xxx.h" AliTPC.h
432 Checking in AliTPC.h;
433 /soft/cvsroot/AliRoot/TPC/AliTPC.h,v <-- AliTPC.h
434 new revision: 1.11; previous revision: 1.10
435 done
436
437\end{lstlisting}
438\textbf{Important note:} CVS performs a purely mechanical merging, and
439it is the developer's to verify the result of this operation. It is
440especially true in case of conflicts, when the CVS tool is not able to
441merge the local and remote modifications consistently.
442
443
444\subsection{Main CVS commands}
445
446In the following examples we suppose that the CVSROOT environment
447variable is set, as it was shown above. In case a local version has
448been already checked out, the CVS repository is defined automatically
449inside the directory tree.
450
451\begin{itemize}
452\item\textbf{login} stores password in .cvspass. It is enough to login
453 once to the repository.
454
455\item\textbf{checkout} retrieves the source files of AliRoot version v4-04-Rev-08
456 \begin{lstlisting}[language=sh]
457 % cvs co -r v4-04-Rev-08 AliRoot
458 \end{lstlisting}
459
460\item\textbf{update} retrieves modifications from the repository and
461 merges them with the local ones. The -q option reduces the verbose
462 output, and the -z9 sets the compression level during the data
463 transfer. The option -A removes all the ``sticky'' tags, -d removes
464 the obsolete files from the local distribution, and -P retrieves the
465 new files which are missing from the local distribution. In this way
466 the local distribution will be updated to the latest code from the
467 main development branch.
468 \begin{lstlisting}[language=sh]
469 % cvs -qz9 update -AdP STEER
470 \end{lstlisting}
471
472\item\textbf{diff} shows differences between the local and repository
473 versions of the whole module STEER
474 \begin{lstlisting}[language=sh]
475 % cvs -qz9 diff STEER
476 \end{lstlisting}
477
478\item \textbf{add} adds files or directories to the repository. The
479 actual transfer is done when the commit command is invoked.
480 \begin{lstlisting}[language=sh]
481 % cvs -qz9 add AliTPCseed.*
482 \end{lstlisting}
483
484\item\textbf{remove} removes old files or directories from the
485 repository. The -f option forces the removal of the local files. In
486 the example below the whole module CASTOR will be scheduled for
487 removal.
488 \begin{lstlisting}[language=sh]
489 % cvs remove -f CASTOR
490 \end{lstlisting}
491
492\item\textbf{commit} checks in the local modifications to the
493 repository and increments the versions of the files. In the example
494 below all the changes made in the different files of the module
495 STEER will be committed to the repository. The -m option is
496 followed by the log message. In case you don't provide it you will
497 be prompted by an editor window. No commit is possible without the
498 log message which explains what was done.
499 \begin{lstlisting}[language=sh]
500 % cvs -qz9 commit -m ``Coding convention'' STEER
501 \end{lstlisting}
502
503\item\textbf{tag} creates new tags and/or branches (with -b option).
504 \begin{lstlisting}[language=sh]
505 % cvs tag -b v4-05-Release .
506 \end{lstlisting}
507\item\textbf{status} returns the actual status of a file: revision,
508 sticky tag, dates, options, and local modifications.
509 \begin{lstlisting}[language=sh]
510 % cvs status Makefile
511 \end{lstlisting}
512
513\item\textbf{logout} removes the password which is stored in
514 \$HOME/.cvspass. It is not really necessary unless the user really
515 wants to remove the password from that account.
516\end{itemize}
517
518
519% -----------------------------------------------------------------------------
520
521\subsection{Environment variables}
522
523Before the installation of AliRoot the user has to set some
524environment variables. In the following examples the user is working
525on Linux and the default shell is bash. It is enough to add to the
526.bash\_profile file few lines as shown below:
527
528\begin{lstlisting}[language=sh]
529 # ROOT
530 export ROOTSYS=/home/mydir/root
531 export PATH=$PATH\:$ROOTSYS/bin
532 export LD_LIBRARY_PATH=$LD_LIBRARY_PATH\:$ROOTSYS/lib
533
534 # AliRoot
535 export ALICE=/home/mydir/alice
536 export ALICE_ROOT=$ALICE/AliRoot
537 export ALICE_TARGET=`root-config --arch`
538 export PATH=$PATH\:$ALICE_ROOT/bin/tgt_${ALICE_TARGET}
539 export LD_LIBRARY_PATH=$LD_LIBRARY_PATH\:$ALICE_ROOT/lib/tgt_${ALICE_TARGET}
540
541 # Geant3
542 export PLATFORM=`root-config --arch` # Optional, defined otherwise in Geant3 Makefile
543 export
544 LD_LIBRARY_PATH=$LD_LIBRARY_PATH\:$ALICE/geant3/lib/tgt_${ALICE_TARGET}
545
546 # FLUKA
547 export FLUPRO=$ALICE/fluka # $FLUPRO is used in TFluka
548 export PATH=$PATH\:$FLUPRO/flutil
549
550 # Geant4: see the details later
551\end{lstlisting}
552
553where ``/home/mydir'' has to be replaced with the actual directory
554path. The meaning of the environment variables is the following:
555
556\texttt{ROOTSYS} -- the place where the ROOT package is located;
557
558\texttt{ALICE} -- top directory for all the software packages used in ALICE;
559
560\texttt{ALICE\_ROOT} -- the place where the AliRoot package is located, usually
561as subdirectory of ALICE;
562
563\texttt{ALICE\_TARGET} -- specific platform name. Up to release
564v4-01-Release this variable was set to the result of ``uname''
565command. Starting from AliRoot v4-02-05 the ROOT naming schema was
566adopted, and the user has to use ``root-config --arch'' command.
567
568\texttt{PLATFORM} -- the same as ALICE\_TARGET for the GEANT~3
569package. Until GEANT~3 v1-0 the user had to use `uname` to specify the
570platform. From version v1-0 on the ROOT platform is used instead
571(``root-config --arch''). This environment variable is set by default
572in the Geant3 Makefile.
573
574
575% -----------------------------------------------------------------------------
576
577\subsection{Software packages}
578
579\subsubsection{AliEn}
580
581The installation of AliEn is the first one to be done if you plan to
582access the GRID or need GRID-enabled Root. You can download the AliEn
583installer and use it in the following way:
584 \begin{lstlisting}[language=sh, title={AliEn installation}]
585 % wget http://alien.cern.ch/alien-installer
586 % chmod +x alien-installer
587 % ./alien-installer
588 \end{lstlisting}
589The alien-installer runs a dialog which prompts for the default
590selection and options. The default installation place for AliEn is
591/opt/alien, and the typical packages one has to install are ``client''
592and ``gshell''.
593
594\subsubsection{ROOT}
595
596All ALICE offline software is based on ROOT\cite{ROOT}. The ROOT
597framework offers a number of important elements which are exploited in
598AliRoot:
599
600\begin{itemize}
601\item a complete data analysis framework including all the PAW
602 features;
603\item an advanced Graphic User Interface (GUI) toolkit;
604\item a large set of utility functions, including several commonly
605 used mathematical functions, random number generators,
606 multi-parametric fit and minimization procedures;
607\item a complete set of object containers;
608\item integrated I/O with class schema evolution;
609\item C++ as a scripting language;
610\item documentation tools.
611\end{itemize}
612There is a nice ROOT user's guide which incorporates important and
613detailed information. For those who are not familiar with ROOT a good
614staring point is the ROOT Web page at \url{http://root.cern.ch}. Here
615the experienced users may find easily the latest version of the class
616descriptions and search for useful information.
617
618\noindent
619The recommended way to install ROOT is from the CVS sources, as it is
620shown below:
621
622\begin{enumerate}
623\item Login to the ROOT CVS repository if you haven't done it yet.
624 \begin{lstlisting}[language=sh]
625 % cvs -d :pserver:cvs@root.cern.ch:/user/cvs login
626 % CVS password: cvs
627 \end{lstlisting}
628
629\item Download (check out) the needed ROOT version (v5-13-04 in the example)
630 \begin{lstlisting}[language=sh]
631 % cvs -d :pserver:cvs@root.cern.ch:/user/cvs co -r v5-13-04 root
632 \end{lstlisting}
633 The appropriate combinations of Root, Geant3 and AliRoot versions
634 can be found at
635 \url{http://aliceinfo.cern.ch/Offline/AliRoot/Releases.html}
636
637\item The code is stored in the directory ``root''. You have to go
638 there, set the ROOTSYS environment variable (if this is not done in
639 advance),and configure ROOT. The ROOTSYS contains the full path to
640 the ROOT directory.
641
642 \lstinputlisting[language=sh, title={Root configuration}]{scripts/confroot}
643
644\item Now you can compile and test ROOT
645 \lstinputlisting[language=sh,title={Compiling and testing
646 ROOT}]{scripts/makeroot}
647
648\end{enumerate}
649
650At this point the user should have a working ROOT version on a Linux
651(32 bit Pentium processor with gcc compiler). The list of supported
652platforms can be obtained by ``./configure --help'' command.
653
654\subsubsection{GEANT~3}
655
656The installation of GEANT~3 is needed since for the moments this is
657the default particle transport package. A GEANT~3 description is
658available at
659\url{http://wwwasdoc.web.cern.ch/wwwasdoc/geant_html3/geantall.html}.
660You can download the GEANT~3 distribution from the ROOT CVS repository
661and compile it in the following way:
662
663\lstinputlisting[language=sh,title={Make GEANT3}]{scripts/makeg3}
664
665Please note that GEANT~3 is downloaded in \$ALICE directory. Another
666important feature is the PLATFORM environment variable. If it is not
667set, the Geant3 Makefile sets it to the result of `root-config
668--arch`.
669
670\subsubsection{GEANT~4}
671To use GEANT~4\cite{Geant4}, some additional software has to
672be installed. GEANT~4 needs CLHEP\cite{CLHEP} package, the user can
673get the tar file (here on ``tarball'') from
674\url{http://proj-clhep.web.cern.ch/proj-clhep/}.
675 Then the installation can be done in the following way:
676
677\lstinputlisting[language=sh, title={Make CLHEP}]{scripts/makeclhep}
678
679
680Another possibility is to use the CLHEP CVS repository:
681
682\lstinputlisting[language=sh, title={Make CLHEP from
683 CVS}]{scripts/makeclhepcvs}
684
685Now the following lines should be added to the .bash\_profile
686
687\begin{lstlisting}[language=sh]
688 % export CLHEP_BASE_DIR=$ALICE/CLHEP
689\end{lstlisting}
690
691The next step is to install GEANT~4. The GEANT~4 distribution is available from
692\url{http://geant4.web.cern.ch/geant4/}. Typically the following files
693will be downloaded (the current versions may differ from the ones below):
694\begin{itemize}
695\item geant4.8.1.p02.tar.gz: source tarball
696\item G4NDL.3.9.tar.gz: G4NDL version 3.9 neutron data files with thermal cross sections
697\item G4EMLOW4.0.tar.gz: data files for low energy electromagnetic processes - version 4.0
698\item PhotonEvaporation.2.0.tar.gz: data files for photon evaporation - version 2.0
699\item RadiativeDecay.3.0.tar.gz: data files for radioactive decay hadronic processes - version 3.0
700\item G4ELASTIC.1.1.tar.gz: data files for high energy elastic scattering processes - version 1.1
701\end{itemize}
702
703Then the following steps have to be executed:
704
705\lstinputlisting[language=sh, title={Make GEANT4}]{scripts/makeg4}
706
707The execution of the env.sh script can be made from the
708\texttt{\~{}/.bash\_profile} to have the GEANT~4 environment variables
709initialized automatically.
710
711\subsubsection{FLUKA}
712
713The installation of FLUKA\cite{FLUKA} consists of the following steps:
714
715\begin{enumerate}
716
717\item register as FLUKA user at \url{http://www.fluka.org} if you
718 haven't yet done so. You will receive your ``fuid'' number and will set
719 you password;
720
721\item download the latest FLUKA version from
722 \url{http://www.fluka.org}. Use your ``fuid'' registration and
723 password when prompted. You will obtain a tarball containing the
724 FLUKA libraries, for example fluka2006.3-linuxAA.tar.gz
725
726\item install the libraries;
727
728 \lstinputlisting[language=sh, title={install FLUKA}]{scripts/makefluka}
729
730\item compile TFluka;
731
732 \begin{lstlisting}[language=sh]
733 % cd $ALICE_ROOT
734 % make all-TFluka
735 \end{lstlisting}
736
737\item run AliRoot using FLUKA;
738 \begin{lstlisting}[language=sh]
739 % cd $ALICE_ROOT/TFluka/scripts
740 % ./runflukageo.sh
741 \end{lstlisting}
742
743 This script creates the directory tmp and inside all the necessary
744 links for data and configuration files and starts aliroot. For the
745 next run it is not necessary to run the script again. The tmp
746 directory can be kept or renamed. The user should run aliroot from
747 inside this directory.
748
749\item from the AliRoot prompt start the simulation;
750 \begin{lstlisting}[language=C++]
751 root [0] AliSimulation sim;
752 root [1] sim.Run();
753 \end{lstlisting}
754
755 You will get the results of the simulation in the tmp directory.
756
757\item reconstruct the simulated event;
758 \begin{lstlisting}[language=sh]
759 % cd tmp
760 % aliroot
761 \end{lstlisting}
762
763 and from the AliRoot prompt
764 \begin{lstlisting}[language=C++]
765 root [0] AliReconstruction rec;
766 root [1] rec.Run();
767 \end{lstlisting}
768
769\item report any problem you encounter to the offline list \url{alice-off@cern.ch}.
770
771\end{enumerate}
772
773
774\subsubsection{AliRoot}
775
776The AliRoot distribution is taken from the CVS repository and then
777\begin{lstlisting}[language=C++]
778 % cd $ALICE
779 % cvs -qz2 -d :pserver:cvs@alisoft.cern.ch:/soft/cvsroot co AliRoot
780 % cd $ALICE_ROOT
781 % make
782\end{lstlisting}
783
784The AliRoot code (the above example retrieves the HEAD version from CVS) is contained in
785ALICE\_ROOT directory. The ALICE\_TARGET is defined automatically in
786the \texttt{.bash\_profile} via the call to `root-config --arch`.
787
788
789
790\subsection{Debugging}
791
792While developing code or running some ALICE program, the user may be
793confronted with the following execution errors:
794
795\begin{itemize}
796\item floating exceptions: division by zero, sqrt from negative
797 argument, assignment of NaN, etc.
798\item segmentation violations/faults: attempt to access a memory
799 location that is not allowed to access, or in a way which is not
800 allowed.
801\item bus error: attempt to access memory that the computer cannot
802 address.
803\end{itemize}
804
805In this case, the user will have to debug the program to determine the
806source of the problem and fix it. There is several debugging
807techniques, which are briefly listed below:
808
809\begin{itemize}
810\item using \texttt{printf(...)}, \texttt{std::cout}, \texttt{assert(...)}, and
811 \texttt{AliDebug}.
812 \begin{itemize}
813 \item often this is the only easy way to find the origin of the
814 problem;
815 \item \texttt{assert(...)} aborts the program execution if the
816 argument is FALSE. It is a macro from \texttt{cassert}, it can be
817 inactivated by compiling with -DNDEBUG.
818 \end{itemize}
819\item using gdb
820 \begin{itemize}
821 \item gdb needs compilation with -g option. Sometimes -O2 -g
822 prevents from exact tracing, so it is save to use compilation with
823 -O0 -g for debugging purposes;
824 \item One can use it directly (gdb aliroot) or attach it to a
825 process (gdb aliroot 12345 where 12345 is the process id).
826 \end{itemize}
827\end{itemize}
828
829Below we report the main gdb commands and their descriptions:
830
831\begin{itemize}
832\item \textbf{run} starts the execution of the program;
833\item \textbf{Control-C} stops the execution and switches to the gdb shell;
834\item \textbf{where <n>} prints the program stack. Sometimes the program
835 stack is very long. The user can get the last n frames by specifying
836 n as a parameter to where;
837\item \textbf{print} prints the value of a variable or expression;
838
839 \begin{lstlisting}[language=sh]
840 (gdb) print *this
841 \end{lstlisting}
842\item \textbf{up} and \textbf{down} are used to navigate in the program stack;
843\item \textbf{quit} exits the gdb session;
844\item \textbf{break} sets break point;
845
846 \begin{lstlisting}[language=C++]
847 (gdb) break AliLoader.cxx:100
848 (gdb) break 'AliLoader::AliLoader()'
849 \end{lstlisting}
850
851 The automatic completion of the class methods via tab is available
852 in case an opening quote (`) is put in front of the class name.
853
854\item \textbf{cont} continues the run;
855\item \textbf{watch} sets watchpoint (very slow execution). The example below
856 shows how to check each change of fData;
857
858 \begin{lstlisting}[language=C++]
859 (gdb) watch *fData
860 \end{lstlisting}
861\item \textbf{list} shows the source code;
862\item \textbf{help} shows the description of commands.
863\end{itemize}
864
865
866\subsection{Profiling}
867
868Profiling is used to discover where the program spends most of the
869time, and to optimize the algorithms. There are several profiling
870tools available on different platforms:
871\begin{itemize}
872\item Linux tools:\\
873 gprof: compilation with -pg option, static libraries\\
874 oprofile: uses kernel module\\
875 VTune: instruments shared libraries.
876\item Sun: Sun workshop (Forte agent). It needs compilation with
877 profiling option (-pg)
878\item Compaq Alpha: pixie profiler. Instruments shared libraries for profiling.
879\end{itemize}
880
881On Linux AliRoot can be built with static libraries using the special
882target ``profile''
883
884\begin{lstlisting}[language=sh]
885 % make profile
886 # change LD_LIBRARY_PATH to replace lib/tgt_linux with lib/tgt_linuxPROF
887 # change PATH to replace bin/tgt_linux with bin/tgt_linuxPROF
888 % aliroot
889 root [0] gAlice->Run()
890 root [1] .q
891\end{lstlisting}
892
893After the end of aliroot session a file called gmon.out will be created. It
894contains the profiling information which can be investigated using
895gprof.
896
897\begin{lstlisting}[language=sh]
898 % gprof `which aliroot` | tee gprof.txt
899 % more gprof.txt
900\end{lstlisting}
901
902
903\noindent
904\textbf{VTune profiling tool}
905
906VTune is available from the Intel Web site
907\url{http://www.intel.com/software/products/index.htm}. It is free for
908non-commercial use on Linux. It provides possibility for call-graph
909and sampling profiling. VTune instruments shared libraries, and needs
910only -g option during the compilation. Here is an example of
911call-graph profiling:
912
913\begin{lstlisting}[language=sh]
914 # Register an activity
915 % vtl activity sim -c callgraph -app aliroot,'' -b -q sim.C'' -moi aliroot
916 % vtl run sim
917 % vtl show
918 % vtl view sim::r1 -gui
919\end{lstlisting}
920
921\subsection{Detection of run time errors}
922
923The Valgrind tool can be used for detection of run time errors on
924linux. It is available from \url{http://www.valgrind.org}. Valgrind
925is equipped with the following set of tools:
926\begin{itemize}
927\item memcheck for memory management problems;
928\item addrcheck: lightweight memory checker;
929\item cachegrind: cache profiler;
930\item massif: heap profiler;
931\item hellgrind: thread debugger;
932\item callgrind: extended version of cachegrind.
933\end{itemize}
934
935The most important tool is memcheck. It can detect:
936\begin{itemize}
937\item use of non-initialized memory;
938\item reading/writing memory after it has been free'd;
939\item reading/writing off the end of malloc'd blocks;
940\item reading/writing inappropriate areas on the stack;
941\item memory leaks -- where pointers to malloc'd blocks are lost forever;
942\item mismatched use of malloc/new/new [] vs free/delete/delete [];
943\item overlapping source and destination pointers in memcpy() and
944 related functions;
945\item some misuses of the POSIX pthreads API;
946\end{itemize}
947
948Here is an example of Valgrind usage:
949
950\begin{lstlisting}[language=sh]
951 % valgrind --tool=addrcheck --error-limit=no aliroot -b -q sim.C
952\end{lstlisting}
953
954%\noindent
955%\textbf{ROOT memory checker}
956%
957% The ROOT memory checker provides tests of memory leaks and other
958% problems related to new/delete. It is fast and easy to use. Here is
959% the recipe:
960% \begin{itemize}
961% \item link aliroot with -lNew. The user has to add `\-\-new' before
962% `\-\-glibs' in the ROOTCLIBS variable of the Makefile;
963% \item add Root.MemCheck: 1 in .rootrc
964% \item run the program: aliroot -b -q sim.C
965% \item run memprobe -e aliroot
966% \item Inspect the files with .info extension that have been generated.
967% \end{itemize}
968
969\subsection{Useful information LSF and CASTOR}
970
971\textbf{The information in this section is included for completeness: the
972 users are strongly advised to rely on the GRID tools for massive
973 productions and data access}
974
975LSF is the batch system at CERN. Every user is allowed to submit jobs
976to the different queues. Usually the user has to copy some input files
977(macros, data, executables, libraries) from a local computer or from
978the mass-storage system to the worker node on lxbatch, then to execute
979the program, and to store the results on the local computer or in the
980mass-storage system. The methods explained in the section are suitable
981if the user doesn't have direct access to a shared directory, for
982example on AFS. The main steps and commands are described below.
983
984In order to have access to the local desktop and to be able to use scp
985without password, the user has to create pair of SSH keys. Currently
986lxplus/lxbatch uses RSA1 cryptography. After login into lxplus the
987following has to be done:
988
989\begin{lstlisting}[language=sh]
990 % ssh-keygen -t rsa1
991 # Use empty password
992 % cp .ssh/identity.pub public/authorized_keys
993 % ln -s ../public/authorized_keys .ssh/authorized_keys
994\end{lstlisting}
995
996A list of useful LSF commands is given bellow:
997\begin{itemize}
998\item \textbf{bqueues} shows the available queues and their status;
999\item \textbf{ bsub -q 8nm job.sh} submits the shell script job.sh to
1000 the queue 8nm, where the name of the queue indicates the
1001 ``normalized CPU time'' (maximal job duration 8 min of normalized CPU time);
1002\item \textbf{bjobs} lists all unfinished jobs of the user;
1003\item \textbf{lsrun -m lxbXXXX xterm} returns a xterm running on the
1004 batch node lxbXXXX. This permits to inspect the job output and to
1005 debug a batch job.
1006\end{itemize}
1007
1008Each batch job stores the output in directory LSFJOB\_XXXXXX, where
1009XXXXXX is the job id. Since the home directory is on AFS, the user has
1010to redirect the verbose output, otherwise the AFS quota might be
1011exceeded and the jobs will fail.
1012
1013The CERN mass storage system is CASTOR2\cite{CASTOR2}. Every user has
1014his/her own CASTOR2 space, for example /castor/cern.ch/user/p/phristov.
1015The commands of CASTOR2 start with prefix ``ns'' of ``rf''. Here is
1016very short list of useful commands:
1017
1018\begin{itemize}
1019\item \textbf{nsls /castor/cern.ch/user/p/phristov} lists the CASTOR
1020 space of user phristov;
1021\item \textbf{rfdir /castor/cern.ch/user/p/phristov} the same as
1022 above, but the output is in long format;
1023\item \textbf{nsmkdir test} creates a new directory (test) in the
1024 CASTOR space of the user;
1025\item \textbf{rfcp /castor/cern.ch/user/p/phristov/test/galice.root .}
1026 copies the file from CASTOR to the local directory. If the file is
1027 on tape, this will trigger the stage-in procedure, which might take
1028 some time.
1029\item \textbf{rfcp AliESDs.root /castor/cern.ch/p/phristov/test}
1030 copies the local file AliESDs.root to CASTOR in the subdirectory
1031 test and schedules it for migration to tape.
1032\end{itemize}
1033
1034The user also has to be aware, that the behavior of CASTOR depends on
1035the environment variables RFIO\_USE\_CASTOR\_V2(=YES),
1036STAGE\_HOST(=castoralice) and STAGE\_SVCCLASS(=default). They are set
1037by default to the values for the group (z2 in case of ALICE).
1038
1039Below the user can find an example of job, where the simulation and
1040reconstruction are run using the corresponding macros sim.C and rec.C.
1041An example of such macros will be given later.
1042
1043\lstinputlisting[language=sh,title={LSF example job}]{scripts/lsfjob}
1044
1045%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
1046
1047\newpage
1048\section{Simulation} \label{Simulation}
1049
1050% -----------------------------------------------------------------------------
1051
1052\subsection{Introduction}
1053Heavy-ion collisions produce a very large number of particles in the
1054final state. This is a challenge for the reconstruction and analysis
1055algorithms. The detector design and the development of these algorithms requires a predictive
1056and precise simulation of the detector response. Model predictions
1057discussed in the first volume of Physics Performance Report for the
1058charged multiplicity at LHC in \mbox{Pb--Pb} collisions vary from 1400
1059to 8000 particles in the central unit of rapidity. The experiment was
1060designed when the highest available nucleon--nucleon center-of-mass energy
1061heavy-ion interactions was at $20 \, {\rm GeV}$ per nucleon--nucleon
1062pair at CERN SPS, i.e. a factor of about 300 less than the energy at
1063LHC. Recently, the RHIC collider came online. Its top energy of
1064$200\, {\rm GeV}$ per nucleon--nucleon pair is still 30 times less
1065than the LHC energy. The RHIC data seem to suggest that the LHC
1066multiplicity will be on the lower side of the interval. However, the
1067extrapolation is so large that both the hardware and software of ALICE
1068have to be designed for the highest multiplicity. Moreover, as the
1069predictions of different generators of heavy-ion collisions differ
1070substantially at LHC energies, we have to use several of them and
1071compare the results.
1072
1073The simulation of the processes involved in the transport through the
1074detector of the particles emerging from the interaction is confronted
1075with several problems:
1076
1077\begin {itemize}
1078\item existing event generators give different answers on parameters
1079 such as expected multiplicities, $p_T$-dependence and rapidity
1080 dependence at LHC energies.
1081
1082\item most of the physics signals, like hyperon production, high-$p_T$
1083 phenomena, open charm and beauty, quarkonia etc., are not exactly
1084 reproduced by the existing event generators.
1085
1086\item simulation of small cross-sections would demand prohibitively
1087 high computing resources to simulate a number of events that is commensurable with
1088 the expected number of detected events in the experiment.
1089
1090\item the existing generators do not provide for event topologies like
1091 momentum correlations, azimuthal flow etc.
1092\end {itemize}
1093
1094To allow nevertheless efficient simulations we have adopted a
1095framework that allows for a number of options:
1096
1097
1098\begin{itemize}
1099\item{} the simulation framework provides an interface to external
1100 generators, like HIJING~\cite{MC:HIJING} and
1101 DPMJET~\cite{MC:DPMJET}.
1102
1103\item{} a parameterized, signal-free, underlying event where the
1104 produced multiplicity can be specified as an input parameter is
1105 provided.
1106
1107\item{} rare signals can be generated using the interface to external
1108 generators like PYTHIA or simple parameterizations of transverse
1109 momentum and rapidity spectra defined in function libraries.
1110
1111\item{} the framework provides a tool to assemble events from
1112 different signal generators (event cocktails).
1113
1114\item{} the framework provides tools to combine underlying events and
1115 signal events at the primary particle level (cocktail) and at the
1116 summable digit level (merging).
1117
1118\item{} ``afterburners'' are used to introduce particle correlations in a
1119 controlled way. An afterburner is a program which changes the
1120 momenta of the particles produced by another generator, and thus
1121 modifies as desired the multi-particle momentum distributions.
1122\end{itemize}
1123
1124The implementation of this strategy is described below. The results of
1125different \MC generators for heavy-ion collisions are
1126described in section~\ref{MC:Generators}.
1127
1128\subsection{Simulation framework}
1129
1130The simulation framework covers the simulation of primary collisions
1131and generation of the emerging particles, the transport of particles
1132through the detector, the simulation of energy depositions (hits) in
1133the detector components, their response in form of so called summable
1134digits, the generation of digits from summable digits with the
1135optional merging of underlying events and the creation of raw data.
1136The \class{AliSimulation} class provides a simple user interface to
1137the simulation framework. This section focuses on the simulation
1138framework from the (detector) software developers point of view.
1139
1140\begin{figure}[ht]
1141 \centering
1142 \includegraphics[width=10cm]{picts/SimulationFramework}
1143 \caption{Simulation framework.} \label{MC:Simulation}
1144\end{figure}
1145
1146
1147\noindent
1148\textbf{Generation of Particles}
1149
1150Different generators can be used to produce particles emerging from
1151the collision. The class \class{AliGenerator} is the base class
1152defining the virtual interface to the generator programs. The
1153generators are described in more detail in the ALICE PPR Volume 1 and
1154in the next chapter.
1155
1156\noindent
1157\textbf{Virtual Monte Carlo}
1158
1159The simulation of particles traversing the detector components is
1160performed by a class derived from \class{TVirtualMC}. The Virtual
1161Monte Carlo also provides an interface to construct the geometry of
1162detectors. The task of the geometry description is done by the
1163geometrical modeler \class{TGeo}. The concrete implementation of the
1164virtual Monte Carlo application TVirtualMCApplication is AliMC. The
1165Monte Carlos used in ALICE are GEANT~3.21, GEANT~4 and FLUKA. More
1166information can be found on the VMC Web page:
1167\url{http://root.cern.ch/root/vmc}
1168
1169As explained above, our strategy was to develop a virtual interface to
1170the detector simulation code. We call the interface to the transport
1171code virtual Monte Carlo. It is implemented via C++ virtual classes
1172and is schematically shown in Fig.~\ref{MC:vmc}. The codes that
1173implement the abstract classes are real C++ programs or wrapper
1174classes that interface to FORTRAN programs.
1175
1176\begin{figure}[ht]
1177 \centering
1178 \includegraphics[width=10cm]{picts/vmc}
1179 \caption{Virtual \MC} \label{MC:vmc}
1180\end{figure}
1181
1182Thanks to the virtual Monte Carlo we have converted all FORTRAN user
1183code developed for GEANT~3 into C++, including the geometry definition
1184and the user scoring routines, \texttt{StepManager}. These have been
1185integrated in the detector classes of the AliRoot framework. The
1186output of the simulation is saved directly with ROOT I/O, simplifying
1187the development of the digitization and reconstruction code in C++.
1188
1189\noindent
1190\textbf{Modules and Detectors}
1191
1192Each module of the ALICE detector is described by a class derived from
1193\class{AliModule}. Classes for active modules (= detectors) are not
1194derived directly from \class{AliModule} but from its subclass
1195\class{AliDetector}. These base classes define the interface to the
1196simulation framework via a set of virtual methods.
1197
1198\noindent
1199\textbf{Configuration File (Config.C)}
1200
1201The configuration file is a C++ macro that is processed before the
1202simulation starts. It creates and configures the Monte Carlo object,
1203the generator object, the magnetic field map and the detector modules.
1204A detailed description is given below.
1205
1206\noindent
1207\textbf{Detector Geometry}
1208
1209The virtual Monte Carlo application creates and initializes the
1210geometry of the detector modules by calling the virtual functions
1211\method{CreateMaterials}, \method{CreateGeometry}, \method{Init} and
1212\method{BuildGeometry}.
1213
1214\noindent
1215\textbf{Vertexes and Particles}
1216
1217In case the simulated event is intended to be merged with an
1218underlying event, the primary vertex is taken from the file containing
1219the underlying event by using the vertex generator
1220\class{AliVertexGenFile}. Otherwise the primary vertex is generated
1221according to the generator settings. Then the particles emerging from
1222the collision are generated and put on the stack (an instance of
1223\class{AliStack}). The transport of particles through the detector is
1224performed by the Monte Carlo object. The decay of particles is usually
1225handled by the external decayer \class{AliDecayerPythia}.
1226
1227\noindent
1228\textbf{Hits and Track References}
1229
1230The Monte Carlo simulates the transport of a particle step by step.
1231After each step the virtual method \method{StepManager} of the module
1232in which the particle currently is located is called. In this step
1233manager method, the hits in the detector are created by calling
1234\method{AddHit}. Optionally also track references (location and
1235momentum of simulated particles at selected places) can be created by
1236calling \method{AddTackReference}. \method{AddHit} has to be
1237implemented by each detector whereas \method{AddTackReference} is
1238already implemented in AliModule. The container and the branch for the
1239hits -- and for the (summable) digits -- are managed by the detector
1240class via a set of so-called loaders. The relevant data members and
1241methods are fHits, fDigits, \method{ResetHits}, \method{ResetSDigits},
1242\method{ResetDigits},\method{MakeBranch} and \method{SetTreeAddress}.
1243
1244For each detector methods like \method{PreTrack}, \method{PostTrack},
1245\method{FinishPrimary}, \method{FinishEvent} and \method{FinishRun}
1246are called during the simulation when the conditions indicated by the
1247method names are fulfilled.
1248
1249\noindent
1250\textbf{Summable Digits}
1251
1252Summable digits are created by calling the virtual method Hits2SDigits
1253of a detector. This method loops over all events, creates the summable
1254digits from hits and stores them in the sdigits file(s).
1255
1256\noindent
1257\textbf{ Digitization and Merging}
1258
1259Dedicated classes derived from \class{AliDigitizer} are used for the
1260conversion of summable digits into digits. Since \class{AliDigitizer}
1261is a \class{TTask}, this conversion is done for
1262the current event by the \method{Exec} method. Inside this method the summable
1263digits of all input streams have to be added, combined with noise,
1264converted to digital values taking into account possible thresholds
1265and stored in the digits container.
1266
1267The input streams (more than one in case of merging) as well as the
1268output stream are managed by an object of type \method{AliRunDigitizer}. The
1269methods GetNinputs, GetInputFolderName and GetOutputFolderName return
1270the relevant information. The run digitizer is accessible inside the
1271digitizer via the protected data member fManager. If the flag
1272fRegionOfInterest is set, only detector parts where summable digits
1273from the signal event are present should be digitized. When \MC labels
1274are assigned to digits, the stream-dependent offset given by the
1275method \method{GetMask} is added to the label of the summable digit.
1276
1277The detector specific digitizer object is created in the virtual
1278method CreateDigitizer of the concrete detector class. The run
1279digitizer object is used to construct the detector
1280digitizer. The \method{Init} method of each digitizer is called before the loop
1281over the events starts.
1282
1283
1284A direct conversion from hits directly to digits can be implemented in
1285the method \method{Hits2Digits} of a detector. The loop over the events is
1286inside the method. Of course merging is not supported in this case.
1287
1288An example of simulation script that can be used for simulation of
1289proton-proton collisions is provided below:
1290
1291\begin{lstlisting}[language=C++, title={Simulation run}]
1292 void sim(Int_t nev=100) {
1293 AliSimulation simulator;
1294 // Measure the total time spent in the simulation
1295 TStopwatch timer;
1296 timer.Start();
1297 // List of detectors, where both summable digits and digits are provided
ababa197 1298 simulator.SetMakeSDigits("TRD TOF PHOS EMCAL HMPID MUON ZDC PMD FMD T0 VZERO");
c4593ee3 1299 // Direct conversion of hits to digits for faster processing (ITS TPC)
1300 simulator.SetMakeDigitsFromHits("ITS TPC");
1301 simulator.Run(nev);
1302 timer.Stop();
1303 timer.Print();
1304 }
1305\end{lstlisting}
1306
1307The following example shows how one can do event merging
1308
1309\begin{lstlisting}[language=C++, title={Event merging}]
1310 void sim(Int_t nev=6) {
1311 AliSimulation simulator;
1312 // The underlying events are stored in a separate directory.
1313 // Three signal events will be merged in turn with each
1314 // underlying event
1315 simulator.MergeWith("../backgr/galice.root",3);
1316 simulator.Run(nev);
1317 }
1318\end{lstlisting}
1319
1320\noindent
1321\textbf{Raw Data}
1322
1323The digits stored in ROOT containers can be converted into the DATE\cite{DATE}
1324format that will be the `payload' of the ROOT classes containing the
1325raw data. This is done for the current event in the method
1326\method{Digits2Raw} of the detector.
1327
1328The simulation of raw data is managed by the class \class{AliSimulation}. To
1329create raw data DDL files it loops over all events. For each event it
1330creates a directory, changes to this directory and calls the method
1331\method{Digits2Raw} of each selected detector. In the Digits2Raw method the DDL
1332files of a detector are created from the digits for the current
1333event.
1334
1335For the conversion of the DDL files to a DATE file the
1336\class{AliSimulation} class uses the tool dateStream. To create a raw
1337data file in ROOT format with the DATE output as payload the program alimdc is
1338utilized.
1339
1340The only part that has to be implemented in each detector is
1341the \method{Digits2Raw} method of the detectors. In this method one file per
1342DDL has to be created obeying the conventions for file names and DDL
1343IDs. Each file is a binary file with a DDL data header in the
1344beginning. The DDL data header is implemented in the structure
1345\class{AliRawDataHeader}. The data member fSize should be set to the total
1346size of the DDL raw data including the size of the header. The
1347attribute bit 0 should be set by calling the method \method{SetAttribute(0)} to
1348indicate that the data in this file is valid. The attribute bit 1 can
1349be set to indicate compressed raw data.
1350
1351The detector-specific raw data are stored in the DDL files after the
1352DDL data header. The format of this raw data should be as close as
1353possible to the one that will be delivered by the detector. This
1354includes the order in which the channels will be read out.
1355
1356Below we show an example of raw data creation for all the detectors
1357
1358\begin{lstlisting}[language=C++]
1359 void sim(Int_t nev=1) {
1360 AliSimulation simulator;
1361 // Create raw data for ALL detectors, rootify it and store in the
1362 // file raw,root. Do not delete the intermediate files
1363 simulator.SetWriteRawData("ALL","raw.root",kFALSE);
1364 simulator.Run(nev);
1365 }
1366\end{lstlisting}
1367
1368
1369\subsection{Configuration: example of Config.C}
1370
1371The example below contains as comments the most important information:
1372
1373\lstinputlisting[language=C++] {scripts/Config.C}
1374
1375% -----------------------------------------------------------------------------
1376
1377\subsection{Event generation}
1378\label{MC:Generators}
1379\begin{figure}[ht]
1380 \centering
1381 \includegraphics[width=10cm]{picts/aligen}
1382 \caption{\texttt{AliGenerator} is the base class, which has the
1383 responsibility to generate the primary particles of an event. Some
1384 realizations of this class do not generate the particles themselves
1385 but delegate the task to an external generator like PYTHIA through the
1386 \texttt{TGenerator} interface. }
1387 \label{MC:aligen}
1388\end{figure}
1389
1390\subsubsection{Parameterized generation}
1391
1392The event generation based on parameterization can be used to produce
1393signal-free final states. It avoids the dependences on a
1394specific model, and is efficient and flexible. It can be used to
1395study the track reconstruction efficiency
1396as a function of the initial multiplicity and occupation.
1397
1398\class{AliGenHIJINGparam}~\cite{MC:HIJINGparam} is an example of internal
1399AliRoot generator based on parameterized
1400pseudorapidity density and transverse momentum distributions of
1401charged and neutral pions and kaons. The pseudorapidity
1402distribution was obtained from a HIJING simulation of central
1403Pb--Pb collisions and scaled to a charged-particle multiplicity of
14048000 in the pseudo rapidity interval $|\eta | < 0.5$. Note that
1405this is about 10\% higher than the corresponding value for a
1406rapidity density with an average ${\rm d}N/{\rm d}y$ of 8000 in
1407the interval $|y | < 0.5$.
1408The transverse-momentum distribution is parameterized from the
1409measured CDF pion $p_T$-distribution at $\sqrt{s} = 1.8 \, TeV$.
1410The corresponding kaon $p_T$-distribution was obtained from the
1411pion distribution by $m_T$-scaling. See Ref.~\cite{MC:HIJINGparam}
1412for the details of these parameterizations.
1413
1414In many cases, the expected transverse momentum and rapidity
1415distributions of particles are known. In other cases the effect of
1416variations in these distributions must be investigated. In both
1417situations it is appropriate to use generators that produce
1418primary particles and their decays sampling from parameterized
1419spectra. To meet the different physics requirements in a modular
1420way, the parameterizations are stored in independent function
1421libraries wrapped into classes that can be plugged into the
1422generator. This is schematically illustrated in
1423Fig.~\ref{MC:evglib} where four different generator libraries can
1424be loaded via the abstract generator interface.
1425
1426It is customary in heavy-ion event generation to superimpose
1427different signals on an event to tune the reconstruction
1428algorithms. This is possible in AliRoot via the so-called cocktail
1429generator (Fig.~\ref{MC:cocktail}). This creates events from
1430user-defined particle cocktails by choosing as ingredients a list
1431of particle generators.
1432
1433\begin{figure}[ht]
1434 \centering
1435 \includegraphics[width=10cm]{picts/evglib}
1436 \caption{\texttt{AliGenParam} is a realization of \texttt{AliGenerator}
1437 that generates particles using parameterized $\pt$ and
1438 pseudo-rapidity distributions. Instead of coding a fixed number of
1439 parameterizations directly into the class implementations, user
1440 defined parameterization libraries (AliGenLib) can be connected at
1441 run time allowing for maximum flexibility.} \label{MC:evglib}
1442\end{figure}
1443
1444An example of \class{AliGenParam} usage is presented below:
1445
1446\begin{lstlisting}[language=C++]
1447 // Example for J/psi Production from Parameterization
1448 // using default library (AliMUONlib)
1449 AliGenParam *gener = new AliGenParam(ntracks, AliGenMUONlib::kUpsilon);
1450 gener->SetMomentumRange(0,999); // Wide cut on the Upsilon momentum
1451 gener->SetPtRange(0,999); // Wide cut on Pt
1452 gener->SetPhiRange(0. , 360.); // Full azimutal range
1453 gener->SetYRange(2.5,4); // In the acceptance of the MUON arm
1454 gener->SetCutOnChild(1); // Enable cuts on Upsilon decay products
1455 gener->SetChildThetaRange(2,9); // Theta range for the decay products
1456 gener->SetOrigin(0,0,0); // Vertex position
1457 gener->SetSigma(0,0,5.3); // Sigma in (X,Y,Z) (cm) on IP position
1458 gener->SetForceDecay(kDiMuon); // Upsilon->mu+ mu- decay
1459 gener->SetTrackingFlag(0); // No particle transport
1460 gener->Init()
1461\end{lstlisting}
1462
1463To facilitate the usage of different generators we have developed
1464an abstract generator interface called \texttt{AliGenerator}, see
1465Fig.~\ref{MC:aligen}. The objective is to provide the user with
1466an easy and coherent way to study a variety of physics signals as
1467well as full set of tools for testing and background studies. This
1468interface allows the study of full events, signal processes, and
1469a mixture of both, i.e. cocktail events (see an example later).
1470
1471Several event generators are available via the abstract ROOT class
1472that implements the generic generator interface, \texttt{TGenerator}.
1473Through implementations of this abstract base class we wrap
1474FORTRAN \MC codes like PYTHIA, HERWIG, and HIJING that are
1475thus accessible from the AliRoot classes. In particular the
1476interface to PYTHIA includes the use of nuclear structure
1477functions of LHAPDF.
1478
1479
1480\subsubsection{Pythia6}
1481
1482Pythia is used for simulation of proton-proton interactions and for
1483generation of jets in case of event merging. An example of minimum
1484bias Pythia events is presented below:
1485
1486\begin{lstlisting}[language=C++]
1487 AliGenPythia *gener = new AliGenPythia(-1);
1488 gener->SetMomentumRange(0,999999);
1489 gener->SetThetaRange(0., 180.);
1490 gener->SetYRange(-12,12);
1491 gener->SetPtRange(0,1000);
1492 gener->SetProcess(kPyMb); // Min. bias events
1493 gener->SetEnergyCMS(14000.); // LHC energy
1494 gener->SetOrigin(0, 0, 0); // Vertex position
1495 gener->SetSigma(0, 0, 5.3); // Sigma in (X,Y,Z) (cm) on IP position
1496 gener->SetCutVertexZ(1.); // Truncate at 1 sigma
1497 gener->SetVertexSmear(kPerEvent);// Smear per event
1498 gener->SetTrackingFlag(1); // Particle transport
1499 gener->Init()
1500\end{lstlisting}
1501
1502
1503\subsubsection{HIJING}
1504HIJING (Heavy-Ion Jet Interaction Generator) combines a
1505QCD-inspired model of jet production~\cite{MC:HIJING} with the
1506Lund model~\cite{MC:LUND} for jet fragmentation. Hard or
1507semi-hard parton scatterings with transverse momenta of a few GeV
1508are expected to dominate high-energy heavy-ion collisions. The
1509HIJING model has been developed with special emphasis on the role
1510of mini jets in pp, pA and A--A reactions at collider energies.
1511
1512Detailed systematic comparisons of HIJING results with a wide
1513range of data demonstrates a qualitative understanding of the
1514interplay between soft string dynamics and hard QCD interactions.
1515In particular, HIJING reproduces many inclusive spectra,
1516two-particle correlations, and the observed flavor and
1517multiplicity dependence of the average transverse momentum.
1518
1519The Lund FRITIOF~\cite{MC:FRITIOF} model and the Dual Parton
1520Model~\cite{MC:DPM} (DPM) have guided the formulation of HIJING
1521for soft nucleus--nucleus reactions at intermediate energies,
1522$\sqrt{s_{\rm NN}}\approx 20\, GeV$. The hadronic-collision
1523model has been inspired by the successful implementation of
1524perturbative QCD processes in PYTHIA~\cite{MC:PYTH}. Binary
1525scattering with Glauber geometry for multiple interactions are
1526used to extrapolate to pA and A--A collisions.
1527
1528Two important features of HIJING are jet quenching and nuclear
1529shadowing. Jet quenching is the energy loss by partons in nuclear
1530matter. It is responsible for an increase of the particle
1531multiplicity at central rapidities. Jet quenching is modeled by an
1532assumed energy loss by partons traversing dense matter. A simple
1533color configuration is assumed for the multi-jet system and the Lund
1534fragmentation model is used for the hadronisation. HIJING does not
1535simulate secondary interactions.
1536
1537Shadowing describes the modification of the free nucleon parton
1538density in the nucleus. At the low-momentum fractions, $x$,
1539observed by collisions at the LHC, shadowing results in a decrease
1540of the multiplicity. Parton shadowing is taken into account using
1541a parameterization of the modification.
1542
1543Here is an example of event generation with HIJING:
1544
1545\begin{lstlisting}[language=C++]
1546 AliGenHijing *gener = new AliGenHijing(-1);
1547 gener->SetEnergyCMS(5500.); // center of mass energy
1548 gener->SetReferenceFrame("CMS"); // reference frame
1549 gener->SetProjectile("A", 208, 82); // projectile
1550 gener->SetTarget ("A", 208, 82); // projectile
1551 gener->KeepFullEvent(); // HIJING will keep the full parent child chain
1552 gener->SetJetQuenching(1); // enable jet quenching
1553 gener->SetShadowing(1); // enable shadowing
1554 gener->SetDecaysOff(1); // neutral pion and heavy particle decays switched off
1555 gener->SetSpectators(0); // Don't track spectators
1556 gener->SetSelectAll(0); // kinematic selection
1557 gener->SetImpactParameterRange(0., 5.); // Impact parameter range (fm)
1558 gener->Init()
1559\end{lstlisting}
1560
1561\subsubsection{Additional universal generators}
1562
1563The following universal generators are available in AliRoot:
1564
1565\begin{itemize}
1566\item DPMJET: this is an implementation of the dual parton
1567 model\cite{MC:DPMJET};
1568\item ISAJET: a \MC event generator for pp, $\bar pp$, and $e^=e^-$
1569 reactions\cite{MC:ISAJET};
1570\item HERWIG: \MC package for simulating Hadron Emission
1571 Reactions With Interfering Gluons\cite{MC:HERWIG}.
1572\end{itemize}
1573
1574An example of HERWIG configuration in the Config.C is shown below:
1575\begin{lstlisting}[language=C++]
1576AliGenHerwig *gener = new AliGenHerwig(-1);
1577// final state kinematic cuts
1578gener->SetMomentumRange(0,7000);
1579gener->SetPhiRange(0. ,360.);
1580gener->SetThetaRange(0., 180.);
1581gener->SetYRange(-10,10);
1582gener->SetPtRange(0,7000);
1583// vertex position and smearing
1584gener->SetOrigin(0,0,0); // vertex position
1585gener->SetVertexSmear(kPerEvent);
1586gener->SetSigma(0,0,5.6); // Sigma in (X,Y,Z) (cm) on IP position
1587// Beam momenta
1588gener->SetBeamMomenta(7000,7000);
1589// Beams
1590gener->SetProjectile("P");
1591gener->SetTarget("P");
1592// Structure function
1593gener->SetStrucFunc(kGRVHO);
1594// Hard scatering
1595gener->SetPtHardMin(200);
1596gener->SetPtRMS(20);
1597// Min bias
1598gener->SetProcess(8000);
1599\end{lstlisting}
1600
1601\subsubsection{Generators for specific studies}
1602
1603\textbf{MEVSIM}
1604
1605MEVSIM~\cite{MC:MEVSIM} was developed for the STAR experiment to
1606quickly produce a large number of A--A collisions for some
1607specific needs -- initially for HBT studies and for testing of
1608reconstruction and analysis software. However, since the user is
1609able to generate specific signals, it was extended to flow and
1610event-by-event fluctuation analysis. A detailed description of
1611MEVSIM can be found in Ref.~\cite{MC:MEVSIM}.
1612
1613MEVSIM generates particle spectra according to a momentum model
1614chosen by the user. The main input parameters are: types and
1615numbers of generated particles, momentum-distribution model,
1616reaction-plane and azimuthal-anisotropy coefficients, multiplicity
1617fluctuation, number of generated events, etc. The momentum models
1618include factorized $p_T$ and rapidity distributions, non-expanding
1619and expanding thermal sources, arbitrary distributions in $y$ and
1620$p_T$ and others. The reaction plane and azimuthal anisotropy is
1621defined by the Fourier coefficients (maximum of six) including
1622directed and elliptical flow. Resonance production can also be
1623introduced.
1624
1625MEVSIM was originally written in FORTRAN. It was later integrated into
1626AliRoot. A complete description of the AliRoot implementation of MEVSIM can
1627be found on the web page (\url{http://home.cern.ch/~radomski}).
1628
1629\textbf{GeVSim}
1630
1631GeVSim \cite{MC:GEVSIM} is a fast and easy-to-use \MC
1632event generator implemented in AliRoot. It can provide events of
1633similar type configurable by the user according to the specific
1634needs of a simulation project, in particular, that of flow and
1635event-by-event fluctuation studies. It was developed to facilitate
1636detector performance studies and for the test of algorithms.
1637GeVSim can also be used to generate signal-free events to be
1638processed by afterburners, for example HBT processor.
1639
1640GeVSim is based on the MevSim \cite{MC:MEVSIM} event generator
1641developed for the STAR experiment.
1642
1643GeVSim generates a list of particles by randomly sampling a
1644distribution function. The parameters of single-particle spectra
1645and their event-by-event fluctuations are explicitly defined by
1646the user. Single-particle transverse-momentum and rapidity spectra
1647can be either selected from a menu of four predefined
1648distributions, the same as in MevSim, or provided by user.
1649
1650Flow can be easily introduced into simulated events. The parameters of
1651the flow are defined separately for each particle type and can be
1652either set to a constant value or parameterized as a function of
1653transverse momentum and rapidity. Two parameterizations of elliptic
1654flow based on results obtained by RHIC experiments are provided.
1655
1656GeVSim also has extended possibilities for simulating of
1657event-by-event fluctuations. The model allows fluctuations
1658following an arbitrary analytically defined distribution in
1659addition to the Gaussian distribution provided by MevSim. It is
1660also possible to systematically alter a given parameter to scan
1661the parameter space in one run. This feature is useful when
1662analyzing performance with respect to, for example, multiplicity
1663or event-plane angle.
1664
1665The current status and further development of GeVSim code and documentation
1666can be found in Ref.~\cite{MC:Radomski}.
1667
1668\textbf{HBT processor}
1669
1670Correlation functions constructed with the data produced by MEVSIM
1671or any other event generator are normally flat in the region of
1672small relative momenta. The HBT-processor afterburner introduces
1673two particle correlations into the set of generated particles. It
1674shifts the momentum of each particle so that the correlation
1675function of a selected model is reproduced. The imposed
1676correlation effects due to Quantum Statistics (QS) and Coulomb
1677Final State Interactions (FSI) do not affect the single-particle
1678distributions and multiplicities. The event structures before and
1679after passing through the HBT processor are identical. Thus, the
1680event reconstruction procedure with and without correlations is
1681also identical. However, the track reconstruction efficiency, momentum
1682resolution and particle identification need not to be, since
1683correlated particles have a special topology at small relative
1684velocities. We can thus verify the influence of various
1685experimental factors on the correlation functions.
1686
1687The method, proposed by L.~Ray and G.W.~Hoffmann \cite{MC:HBTproc}
1688is based on random shifts of the particle three-momentum within a
1689confined range. After each shift, a comparison is made with
1690correlation functions resulting from the assumed model of the
1691space--time distribution and with the single-particle spectra
1692which should remain unchanged. The shift is kept if the
1693$\chi^2$-test shows better agreement. The process is iterated
1694until satisfactory agreement is achieved. In order to construct
1695the correlation function, a reference sample is made by mixing
1696particles from some consecutive events. Such a method has an
1697important impact on the simulations when at least two events must
1698be processed simultaneously.
1699
1700Some specific features of this approach are important for practical
1701use:
1702\begin{itemize}
1703\item{} the HBT processor can simultaneously generate correlations of up
1704 to two particle types (e.g. positive and negative pions).
1705 Correlations of other particles can be added subsequently.
1706\item{} the form of the correlation function has to be parameterized
1707 analytically. One and three dimensional parameterizations are
1708 possible.
1709\item{} a static source is usually assumed. Dynamical effects,
1710 related to
1711 expansion or flow, can be simulated in a stepwise form by repeating
1712 simulations for different values of the space--time parameters
1713 associated with different kinematic intervals.
1714\item{} Coulomb effects may be introduced by one of three
1715 approaches: Gamow
1716 factor, experimentally modified Gamow correction and integrated
1717 Coulomb wave functions for discrete values of the source radii.
1718\item{} Strong interactions are not implemented.
1719\end{itemize}
1720
1721The detailed description of the HBT processor can be found
1722elsewhere~\cite{MC:PiotrSk}.
1723
1724\textbf{Flow afterburner}
1725
1726Azimuthal anisotropies, especially elliptic flow, carry unique
1727information about collective phenomena and consequently are
1728important for the study of heavy-ion collisions. Additional
1729information can be obtained studying different heavy-ion
1730observables, especially jets, relative to the event plane.
1731Therefore it is necessary to evaluate the capability of ALICE to
1732reconstruct the event plane and study elliptic flow.
1733
1734Since there is not a well understood microscopic description of
1735the flow effect it cannot be correctly simulated by microscopic
1736event generators. Therefore, to generate events with flow the user has
1737to use event generators based on macroscopic models, like GeVSim
1738\cite{MC:GEVSIM} or an afterburner which can generate flow on top
1739of events generated by event generators based on the microscopic
1740description of the interaction. In the AliRoot framework such a
1741flow afterburner is implemented.
1742
1743The algorithm to apply azimuthal correlation consists in shifting the
1744azimuthal coordinates of the particles. The transformation is given
1745by \cite{MC:POSCANCER}:
1746
1747
1748\[
1749\varphi \rightarrow \varphi '=\varphi +\Delta \varphi \]
1750\[
1751\Delta \varphi =\sum _{n}\frac{-2}{n}v_{n}\left( p_{t},y\right)
1752\sin n \times \left( \varphi -\psi \right) \] where \(
1753v_{n}(p_{t},y) \) is the flow coefficient to be obtained, \( n \)
1754is the harmonic number and \( \psi \) is the event-plane angle.
1755Note that the algorithm is deterministic and does not contain any
1756random numbers generation.
1757
1758The value of the flow coefficient can be either constant or parameterized as a
1759function of transverse momentum and rapidity. Two parameterizations
1760of elliptic flow are provided as in GeVSim.
1761
1762\begin{lstlisting}[language=C++]
1763 AliGenGeVSim* gener = new AliGenGeVSim(0);
1764
1765 mult = 2000; // Mult is the number of charged particles in |eta| < 0.5
1766 vn = 0.01; // Vn
1767
1768 Float_t sigma_eta = 2.75; // Sigma of the Gaussian dN/dEta
1769 Float_t etamax = 7.00; // Maximum eta
1770
1771 // Scale from multiplicity in |eta| < 0.5 to |eta| < |etamax|
1772 Float_t mm = mult * (TMath::Erf(etamax/sigma_eta/sqrt(2.)) /
1773 TMath::Erf(0.5/sigma_eta/sqrt(2.)));
1774
1775 // Scale from charged to total multiplicity
1776 mm *= 1.587;
1777
1778 // Define particles
1779
1780 // 78% Pions (26% pi+, 26% pi-, 26% p0) T = 250 MeV
1781 AliGeVSimParticle *pp =
1782 new AliGeVSimParticle(kPiPlus, 1, 0.26 * mm, 0.25, sigma_eta) ;
1783 AliGeVSimParticle *pm =
1784 new AliGeVSimParticle(kPiMinus, 1, 0.26 * mm, 0.25, sigma_eta) ;
1785 AliGeVSimParticle *p0 =
1786 new AliGeVSimParticle(kPi0, 1, 0.26 * mm, 0.25, sigma_eta) ;
1787
1788 // 12% Kaons (3% K0short, 3% K0long, 3% K+, 3% K-) T = 300 MeV
1789 AliGeVSimParticle *ks =
1790 new AliGeVSimParticle(kK0Short, 1, 0.03 * mm, 0.30, sigma_eta) ;
1791 AliGeVSimParticle *kl =
1792 new AliGeVSimParticle(kK0Long, 1, 0.03 * mm, 0.30, sigma_eta) ;
1793 AliGeVSimParticle *kp =
1794 new AliGeVSimParticle(kKPlus, 1, 0.03 * mm, 0.30, sigma_eta) ;
1795 AliGeVSimParticle *km =
1796 new AliGeVSimParticle(kKMinus, 1, 0.03 * mm, 0.30, sigma_eta) ;
1797
1798 // 10% Protons / Neutrons (5% Protons, 5% Neutrons) T = 250 MeV
1799 AliGeVSimParticle *pr =
1800 new AliGeVSimParticle(kProton, 1, 0.05 * mm, 0.25, sigma_eta) ;
1801 AliGeVSimParticle *ne =
1802 new AliGeVSimParticle(kNeutron, 1, 0.05 * mm, 0.25, sigma_eta) ;
1803
1804 // Set Elliptic Flow properties
1805
1806 Float_t pTsaturation = 2. ;
1807
1808 pp->SetEllipticParam(vn,pTsaturation,0.) ;
1809 pm->SetEllipticParam(vn,pTsaturation,0.) ;
1810 p0->SetEllipticParam(vn,pTsaturation,0.) ;
1811 pr->SetEllipticParam(vn,pTsaturation,0.) ;
1812 ne->SetEllipticParam(vn,pTsaturation,0.) ;
1813 ks->SetEllipticParam(vn,pTsaturation,0.) ;
1814 kl->SetEllipticParam(vn,pTsaturation,0.) ;
1815 kp->SetEllipticParam(vn,pTsaturation,0.) ;
1816 km->SetEllipticParam(vn,pTsaturation,0.) ;
1817
1818 // Set Direct Flow properties
1819
1820 pp->SetDirectedParam(vn,1.0,0.) ;
1821 pm->SetDirectedParam(vn,1.0,0.) ;
1822 p0->SetDirectedParam(vn,1.0,0.) ;
1823 pr->SetDirectedParam(vn,1.0,0.) ;
1824 ne->SetDirectedParam(vn,1.0,0.) ;
1825 ks->SetDirectedParam(vn,1.0,0.) ;
1826 kl->SetDirectedParam(vn,1.0,0.) ;
1827 kp->SetDirectedParam(vn,1.0,0.) ;
1828 km->SetDirectedParam(vn,1.0,0.) ;
1829
1830 // Add particles to the list
1831
1832 gener->AddParticleType(pp) ;
1833 gener->AddParticleType(pm) ;
1834 gener->AddParticleType(p0) ;
1835 gener->AddParticleType(pr) ;
1836 gener->AddParticleType(ne) ;
1837 gener->AddParticleType(ks) ;
1838 gener->AddParticleType(kl) ;
1839 gener->AddParticleType(kp) ;
1840 gener->AddParticleType(km) ;
1841
1842 // Random Ev.Plane
1843
1844 TF1 *rpa = new TF1("gevsimPsiRndm","1", 0, 360);
1845
1846 gener->SetPtRange(0., 9.) ; // Used for bin size in numerical integration
1847 gener->SetPhiRange(0, 360);
1848
1849 gener->SetOrigin(0, 0, 0); // vertex position
1850 gener->SetSigma(0, 0, 5.3); // Sigma in (X,Y,Z) (cm) on IP position
1851 gener->SetCutVertexZ(1.); // Truncate at 1 sigma
1852 gener->SetVertexSmear(kPerEvent);
1853 gener->SetTrackingFlag(1);
1854 gener->Init()
1855\end{lstlisting}
1856
1857\textbf{Generator for e$^+$e$^-$ pairs in Pb--Pb collisions}
1858
1859In addition to strong interactions of heavy ions in central and
1860peripheral collisions, ultra-peripheral collisions of ions give
1861rise to coherent, mainly electromagnetic, interactions among which
1862the dominant process is is the (multiple) e$^+$e$^-$-pair
1863production \cite{MC:AlscherHT97}
1864\begin{equation}
1865 AA \to AA + n({\rm e}^+{\rm e}^-), \label{nee}
1866\end{equation}
1867where $n$ is the pair multiplicity. Most electron--positron pairs
1868are produced into the very forward direction escaping the
1869experiment. However, for Pb--Pb collisions at the LHC the
1870cross-section of this process, about 230 \, ${\rm kb}$, is
1871enormous. A sizable fraction of pairs produced with large-momentum
1872transfer can contribute to the hit rate in the forward detectors
1873increasing the occupancy or trigger rate. In order to study this
1874effect an event generator for e$^+$e$^-$-pair production has
1875been implemented in the AliRoot framework \cite{MC:Sadovsky}. The
1876class \texttt{TEpEmGen} is a realisation of the \texttt{TGenerator}
1877interface for external generators and wraps the FORTRAN code used
1878to calculate the differential cross-section. \texttt{AliGenEpEmv1}
1879derives from \texttt{AliGenerator} and uses the external generator to
1880put the pairs on the AliRoot particle stack.
1881
1882
1883\subsubsection{Combination of generators: AliGenCocktail}
1884
1885\begin{figure}[ht]
1886 \centering
1887 \includegraphics[width=10cm]{picts/cocktail}
1888 \caption{The \texttt{AliGenCocktail} generator is a realization of {\tt
1889 AliGenerator} which does not generate particles itself but
1890 delegates this task to a list of objects of type {\tt
1891 AliGenerator} that can be connected as entries ({\tt
1892 AliGenCocktailEntry}) at run time. In this way different physics
1893 channels can be combined in one event.} \label{MC:cocktail}
1894\end{figure}
1895
1896Here is an example of cocktail, used for studies in the TRD detector:
1897
1898\begin{lstlisting}[language=C++]
1899 // The cocktail generator
1900 AliGenCocktail *gener = new AliGenCocktail();
1901
1902 // Phi meson (10 particles)
1903 AliGenParam *phi =
1904 new AliGenParam(10,new AliGenMUONlib(),AliGenMUONlib::kPhi,"Vogt PbPb");
1905 phi->SetPtRange(0, 100);
1906 phi->SetYRange(-1., +1.);
1907 phi->SetForceDecay(kDiElectron);
1908
1909 // Omega meson (10 particles)
1910 AliGenParam *omega =
1911 new AliGenParam(10,new AliGenMUONlib(),AliGenMUONlib::kOmega,"Vogt PbPb");
1912 omega->SetPtRange(0, 100);
1913 omega->SetYRange(-1., +1.);
1914 omega->SetForceDecay(kDiElectron);
1915
1916 // J/psi
1917 AliGenParam *jpsi = new AliGenParam(10,new AliGenMUONlib(),
1918 AliGenMUONlib::kJpsiFamily,"Vogt PbPb");
1919 jpsi->SetPtRange(0, 100);
1920 jpsi->SetYRange(-1., +1.);
1921 jpsi->SetForceDecay(kDiElectron);
1922
1923 // Upsilon family
1924 AliGenParam *ups = new AliGenParam(10,new AliGenMUONlib(),
1925 AliGenMUONlib::kUpsilonFamily,"Vogt PbPb");
1926 ups->SetPtRange(0, 100);
1927 ups->SetYRange(-1., +1.);
1928 ups->SetForceDecay(kDiElectron);
1929
1930 // Open charm particles
1931 AliGenParam *charm = new AliGenParam(10,new AliGenMUONlib(),
1932 AliGenMUONlib::kCharm,"central");
1933 charm->SetPtRange(0, 100);
1934 charm->SetYRange(-1.5, +1.5);
1935 charm->SetForceDecay(kSemiElectronic);
1936
1937 // Beauty particles: semi-electronic decays
1938 AliGenParam *beauty = new AliGenParam(10,new AliGenMUONlib(),
1939 AliGenMUONlib::kBeauty,"central");
1940 beauty->SetPtRange(0, 100);
1941 beauty->SetYRange(-1.5, +1.5);
1942 beauty->SetForceDecay(kSemiElectronic);
1943
1944 // Beauty particles to J/psi ee
1945 AliGenParam *beautyJ = new AliGenParam(10, new AliGenMUONlib(),
1946 AliGenMUONlib::kBeauty,"central");
1947 beautyJ->SetPtRange(0, 100);
1948 beautyJ->SetYRange(-1.5, +1.5);
1949 beautyJ->SetForceDecay(kBJpsiDiElectron);
1950
1951 // Adding all the components of the cocktail
1952 gener->AddGenerator(phi,"Phi",1);
1953 gener->AddGenerator(omega,"Omega",1);
1954 gener->AddGenerator(jpsi,"J/psi",1);
1955 gener->AddGenerator(ups,"Upsilon",1);
1956 gener->AddGenerator(charm,"Charm",1);
1957 gener->AddGenerator(beauty,"Beauty",1);
1958 gener->AddGenerator(beautyJ,"J/Psi from Beauty",1);
1959
1960 // Settings, common for all components
1961 gener->SetOrigin(0, 0, 0); // vertex position
1962 gener->SetSigma(0, 0, 5.3); // Sigma in (X,Y,Z) (cm) on IP position
1963 gener->SetCutVertexZ(1.); // Truncate at 1 sigma
1964 gener->SetVertexSmear(kPerEvent);
1965 gener->SetTrackingFlag(1);
1966 gener->Init();
1967\end{lstlisting}
1968
1969
1970\subsection{Particle transport}
1971
1972\subsubsection{TGeo essential information}
1973
1974A detailed description of the Root geometry package is available in
1975the Root User's Guide\cite{RootUsersGuide}. Several examples can be
1976found in \$ROOTSYS/tutorials, among them assembly.C, csgdemo.C,
1977geodemo.C, nucleus.C, rootgeom.C, etc. Here we show a simple usage for
1978export/import of the ALICE geometry and for check for overlaps and
1979extrusions:
1980
1981\begin{lstlisting}[language=C++]
1982 aliroot
1983 root [0] gAlice->Init()
1984 root [1] gGeoManager->Export("geometry.root")
1985 root [2] .q
1986 aliroot
1987 root [0] TGeoManager::Import("geometry.root")
1988 root [1] gGeoManager->CheckOverlaps()
1989 root [2] gGeoManager->PrintOverlaps()
1990 root [3] new TBrowser
1991 # Now you can navigate in Geometry->Illegal overlaps
1992 # and draw each overlap (double click on it)
1993\end{lstlisting}
1994
1995\subsubsection{Visualization}
1996
1997Below we show an example of VZERO visualization using the Root
1998geometry package:
1999
2000\begin{lstlisting}[language=C++]
2001 aliroot
2002 root [0] gAlice->Init()
2003 root [1] TGeoVolume *top = gGeoManager->GetMasterVolume()
2004 root [2] Int_t nd = top->GetNdaughters()
2005 root [3] for (Int_t i=0; i<nd; i++) \
2006 top->GetNode(i)->GetVolume()->InvisibleAll()
2007 root [4] TGeoVolume *v0ri = gGeoManager->GetVolume("V0RI")
2008 root [5] TGeoVolume *v0le = gGeoManager->GetVolume("V0LE")
2009 root [6] v0ri->SetVisibility(kTRUE);
2010 root [7] v0ri->VisibleDaughters(kTRUE);
2011 root [8] v0le->SetVisibility(kTRUE);
2012 root [9] v0le->VisibleDaughters(kTRUE);
2013 root [10] top->Draw();
2014
2015\end{lstlisting}
2016
2017\subsubsection{Particle decays}
2018
2019We use Pythia to carry one particle decays during the transport. The
2020default decay channels can be seen in the following way:
2021
2022\begin{lstlisting}[language=C++]
2023 aliroot
2024 root [0] AliPythia * py = AliPythia::Instance()
2025 root [1] py->Pylist(12); >> decay.list
2026\end{lstlisting}
2027
2028The file decay.list will contain the list of particles decays
2029available in Pythia. Now if we want to force the decay $\Lambda^0 \to
2030p \pi^-$, the following lines should be included in the Config.C
2031before we register the decayer:
2032
2033\begin{lstlisting}[language=C++]
2034 AliPythia * py = AliPythia::Instance();
2035 py->SetMDME(1059,1,0);
2036 py->SetMDME(1060,1,0);
2037 py->SetMDME(1061,1,0);
2038\end{lstlisting}
2039
2040where 1059,1060 and 1061 are the indexes of the decay channel (from
2041decay.list above) we want to switch off.
2042
2043\subsubsection{Examples}
2044
2045\noindent
2046\textbf{Fast simulation}
2047
2048This example is taken from the macro
2049\$ALICE\_ROOT/FASTSIM/fastGen.C. It shows how one can create a
2050Kinematics tree which later can be used as input for the particle
2051transport. A simple selection of events with high multiplicity is
2052implemented.
2053
2054\lstinputlisting[language=C++] {scripts/fastGen.C}
2055\noindent
2056\textbf{Reading of kinematics tree as input for the particle transport}
2057
2058We suppose that the macro fastGen.C above has been used to generate
2059the corresponding sent of files: galice.root and Kinematics.root, and
2060that they are stored in a separate subdirectory, for example kine. Then
2061the following code in Config.C will read the set of files and put them
2062in the stack for transport:
2063
2064\begin{lstlisting}[language=C++]
2065 AliGenExtFile *gener = new AliGenExtFile(-1);
2066
2067 gener->SetMomentumRange(0,14000);
2068 gener->SetPhiRange(0.,360.);
2069 gener->SetThetaRange(45,135);
2070 gener->SetYRange(-10,10);
2071 gener->SetOrigin(0, 0, 0); //vertex position
2072 gener->SetSigma(0, 0, 5.3); //Sigma in (X,Y,Z) (cm) on IP position
2073
2074 AliGenReaderTreeK * reader = new AliGenReaderTreeK();
2075 reader->SetFileName("../galice.root");
2076
2077 gener->SetReader(reader);
2078 gener->SetTrackingFlag(1);
2079
2080 gener->Init();
2081\end{lstlisting}
2082
2083\noindent
2084\textbf{Usage of different generators}
2085
2086A lot of examples are available in
2087\$ALICE\_ROOT/macros/Config\_gener.C. The correspondent part can be
2088extracted and placed in the relevant Config.C file.
2089
2090
2091%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2092
2093
2094\newpage
2095%\cleardoublepage
2096\section{Reconstruction}
2097
2098% -----------------------------------------------------------------------------
2099
2100\subsection{Reconstruction Framework}
2101
2102This chapter
2103focuses on the reconstruction framework from the (detector) software
2104developers point of view.
2105
2106Wherever it is not specified explicitly as different, we refer
2107to the `global ALICE coordinate system'\cite{CoordinateSystem}. It is a right-handed coordinate
2108system with
2109the $z$ axis coinciding with the beam-pipe axis and going in the direction
2110opposite to the muon arm, the $y$ axis going up, and the origin of
2111coordinates defined by the intersection point of the $z$ axis
2112and the central-membrane plane of TPC.
2113
2114Here is a reminder of the following terms which are used in the
2115description of the reconstruction framework (see also section \ref{AliRootFramework}):
2116\begin{itemize}
2117\item {\it Digit}: This is a digitized signal (ADC count) obtained by
2118 a sensitive pad of a detector at a certain time.
2119\item {\it Cluster}: This is a set of adjacent (in space and/or in time)
2120 digits that were presumably generated by the same particle crossing the
2121 sensitive element of a detector.
2122\item Reconstructed {\it space point}: This is the estimation of the
2123 position where a particle crossed the sensitive element of a detector
2124 (often, this is done by calculating the center of gravity of the
2125 `cluster').
2126\item Reconstructed {\it track}: This is a set of five parameters (such as the
2127 curvature and the angles with respect to the coordinate axes) of the particle's
2128 trajectory together with the corresponding covariance matrix estimated at a given
2129 point in space.
2130
2131\end{itemize}
2132
2133The input to the reconstruction framework are digits in root tree
2134format or raw data format. First a local reconstruction of clusters is
2135performed in each detector. Then vertexes and tracks are reconstructed
2136and the particle identification is carried on. The output of the reconstruction
2137is the Event Summary Data (ESD). The \class{AliReconstruction} class provides
2138a simple user interface to the reconstruction framework which is
2139explained in the source code and.
2140
2141\begin{figure}[ht]
2142 \centering
2143 \includegraphics[width=10cm]{picts/ReconstructionFramework}
2144 \caption{Reconstruction framework.} \label{MC:Reconstruction}
2145\end{figure}
2146
2147\textbf{Requirements and Guidelines}
2148
2149The development of the reconstruction framework has been carried on
2150according to the following requirements and guidelines:
2151\begin{itemize}
2152\item the prime goal of the reconstruction is to provide the data that
2153 is needed for a physics analysis;
2154\item the reconstruction should be aimed for high efficiency, purity and resolution.
2155\item the user should have an easy to use interface to extract the
2156 required information from the ESD;
2157\item the reconstruction code should be efficient but also maintainable;
2158\item the reconstruction should be as flexible as possible.
2159 It should be possible to do the reconstruction in one detector even in
2160 the case that other detectors are not operational.
2161 To achieve such a flexibility each detector module should be able to
2162 \begin{itemize}
2163 \item find tracks starting from seeds provided by another detector
2164 (external seeding),
2165 \item find tracks without using information from other detectors
2166 (internal seeding),
2167 \item find tracks from external seeds and add tracks from internal seeds
2168 \item and propagate tracks through the detector using the already
2169 assigned clusters in inward and outward direction.
2170 \end{itemize}
2171\item where it is appropriate, common (base) classes should be used in
2172 the different reconstruction modules;
2173\item the interdependencies between the reconstruction modules should
2174 be minimized.
2175 If possible the exchange of information between detectors should be
2176 done via a common track class.
2177\item the chain of reconstruction program(s) should be callable and
2178 steerable in an easy way;
2179\item there should be no assumptions on the structure or names of files
2180 or on the number or order of events;
2181\item each class, data member and method should have a correct,
2182 precise and helpful html documentation.
2183\end{itemize}
2184
2185
2186\noindent
2187\textbf{AliReconstructor}
2188
2189The interface from the steering class \class{AliReconstruction} to the
2190detector specific reconstruction code is defined by the base class
2191\class{AliReconstructor}. For each detector there is a derived reconstructor
2192class. The user can set options for each reconstructor in format of a
2193string parameter which is accessible inside the reconstructor via the
2194method GetOption.
2195
2196The detector specific reconstructors are created via
2197plugins. Therefore they must have a default constructor. If no plugin
2198handler is defined by the user (in .rootrc), it is assumed that the
2199name of the reconstructor for detector DET is AliDETReconstructor and
2200that it is located in the library libDETrec.so (or libDET.so).
2201
2202\noindent
2203\textbf{Input Data}
2204
2205If the input data is provided in format of root trees, either the
2206loaders or directly the trees are used to access the digits. In case
2207of raw data input the digits are accessed via a raw reader.
2208
2209If a galice.root file exists, the run loader will be retrieved from
2210it. Otherwise the run loader and the headers will be created from the
2211raw data. The reconstruction can not work if there is no galice.root file
2212and no raw data input.
2213
2214\noindent
2215\textbf{Output Data}
2216
2217The clusters (rec. points) are considered as intermediate output and
2218are stored in root trees handled by the loaders. The final output of
2219the reconstruction is a tree with objects of type \class{AliESD} stored in the
2220file AliESDs.root. This Event Summary Data (ESD) contains lists of
2221reconstructed tracks/particles and global event properties. The detailed
2222description of the ESD can be found in section \ref{ESD}.
2223
2224\noindent
2225\textbf{Local Reconstruction (Clusterization)}
2226
2227The first step of the reconstruction is the so called ``local
2228reconstruction''. It is executed for each detector separately and
2229without exchanging information with other detectors. Usually the
2230clusterization is done in this step.
2231
2232The local reconstruction is invoked via the method \method{Reconstruct} of the
2233reconstructor object. Each detector reconstructor runs the local
2234reconstruction for all events. The local reconstruction method is
2235only called if the method HasLocalReconstruction of the reconstructor
2236returns kTRUE.
2237
2238Instead of running the local reconstruction directly on raw data, it
2239is possible to first convert the raw data digits into a digits tree
2240and then to call the \method{Reconstruct} method with a tree as input
2241parameter. This conversion is done by the method ConvertDigits. The
2242reconstructor has to announce that it can convert the raw data digits
2243by returning kTRUE in the method \method{HasDigitConversion}.
2244
2245\noindent
2246\textbf{Vertexing}
2247
2248The current reconstruction of the primary-vertex
2249position in ALICE is done using the information provided by the
2250silicon pixel detectors, which constitute the two innermost layers of the
2251ITS.
2252
2253The algorithm starts with looking at the
2254distribution of the $z$ coordinates of the reconstructed space points
2255in the first pixel layers.
2256At a vertex $z$ coordinate $z_{\rm true} = 0$ the distribution is
2257symmetric and
2258its centroid ($z_{\rm cen}$) is very close to the nominal
2259vertex position. When the primary vertex is moved along the $z$ axis, an
2260increasing fraction
2261of hits will be lost and the centroid of the distribution no longer gives
2262the primary
2263vertex position. However, for primary vertex locations not too far from
2264$z_{\rm true} = 0$
2265(up to about 12~cm), the centroid of the distribution is still correlated to
2266the true vertex position.
2267The saturation effect at large $z_{\rm true}$ values of the vertex position
2268($z_{\rm true} = $12--15~cm)
2269is, however, not critical, since this procedure is only meant to find a rough
2270vertex position, in order to introduce some cut along $z$.
2271
2272To find the final vertex position,
2273the correlation between the points $z_1$, $z_2$ in the two layers
2274was considered. More details and performance studies are available in
2275\cite{PPRVII}.
2276
2277The primary vertex is reconstructed by a vertexer object derived from
2278\class{AliVertexer}. After the local reconstruction was done for all detectors
2279the vertexer method \method{FindVertexForCurrentEvent} is called for each
2280event. It returns a pointer to a vertex object of type \class{AliESDVertex}.
2281
2282The vertexer object is created by the method \method{CreateVertexer} of the
2283reconstructor. So far only the ITS is used to determine the primary
2284vertex (\class{AliITSVertexerZ} class).
2285
2286The precision of the primary vertex reconstruction in the bending plane
2287required for the reconstruction of D and B mesons in pp events
2288can be achieved only after the tracking is done. The method is
2289implemented in \class{AliITSVertexerTracks}. It is called as a second
2290estimation of the primary vertex. The details of the algorithm can be
2291found in Appendix \ref{VertexerTracks}.
2292
2293\noindent
2294\textbf{Combined Track Reconstruction}
2295The combined track reconstruction tries to accumulate the information from
2296different detectors in order to optimize the track reconstruction performance.
2297The result of this is stored in the combined track objects.
2298The \class{AliESDTrack} class also
2299provides the possibility to exchange information between detectors
2300without introducing dependencies between the reconstruction modules.
2301This is achieved by using just integer indexes pointing to the
2302specific track objects, which on the other hand makes it possible to
2303retrieve the full information if needed.
2304The list of combined tracks can be kept in memory and passed from one
2305reconstruction module to another.
2306The storage of the combined tracks should be done in the standard way.
2307
2308The classes responsible for the reconstruction of tracks are derived
2309from \class{AliTracker}. They are created by the method
2310\method{CreateTracker} of the
2311reconstructors. The reconstructed position of the primary vertex is
2312made available to them via the method \method{SetVertex}. Before the track
2313reconstruction in a detector starts the clusters are loaded from the
2314clusters tree by the method \method{LoadClusters}. After the track reconstruction the
2315clusters are unloaded by the method \method{UnloadClusters}.
2316
2317The track reconstruction (in the barrel part) is done in three passes. The first
2318pass consists of a track finding and fitting in inward direction in
2319TPC and then in ITS. The virtual method \method{Clusters2Tracks} (of
2320class \class{AliTracker}) is the
2321interface to this pass. The method for the next pass is
2322\method{PropagateBack}. It does the track reconstruction in outward direction and is
2323invoked for all detectors starting with the ITS. The last pass is the
2324track refit in inward direction in order to get the track parameters
2325at the vertex. The corresponding method \method{RefitInward} is called for TRD,
2326TPC and ITS. All three track reconstruction methods have an AliESD object as
2327argument which is used to exchange track information between detectors
2328without introducing dependences between the code of the detector
2329trackers.
2330
2331Depending on the way the information is used, the tracking methods can be
2332divided into two large groups: global methods and local methods. Each
2333group has advantages and disadvantages.
2334
2335With the global methods, all the track measurements are treated
2336simultaneously and the decision to include or exclude a measurement is
2337taken when all the information about the track is known.
2338Typical algorithms belonging to this class are combinatorial methods,
2339Hough transform, templates, conformal mappings. The advantages are
2340the stability with respect to noise and mismeasurements and the possibility
2341to operate directly on the raw data. On the other hand, these methods
2342require a precise global track model. Such a track model can sometimes be
2343unknown or does not even exist because of stochastic processes (energy
2344losses, multiple scattering), non-uniformity of the magnetic field etc.
2345In ALICE, global tracking methods are being extensively used in the
2346High-Level Trigger (HLT) software. There, we
2347are mostly interested in the reconstruction of the high-momentum tracks
2348only, the required precision is not crucial, but the speed of the
2349calculations is of great importance.
2350
2351
2352Local methods do not need the knowledge of the global track model.
2353The track parameters are always estimated `locally' at a given point
2354in space. The decision to accept or to reject a measurement is made using
2355either the local information or the information coming from the previous
2356`history' of this track. With these methods, all the local track
2357peculiarities (stochastic physics processes, magnetic fields, detector
2358geometry) can be naturally accounted for. Unfortunately, the local methods
2359rely on sophisticated space point reconstruction algorithms (including
2360unfolding of overlapped clusters). They are sensitive to noise, wrong or
2361displaced measurements and the precision of space point error parameterization.
2362The most advanced kind of local track-finding methods is Kalman
2363filtering which was introduced by P. Billoir in 1983~\cite{MC:billoir}.
2364
2365
2366
2367When applied to the track reconstruction problem, the Kalman-filter
2368approach shows many attractive properties:
2369\begin{itemize}
2370
2371\item It is a method for simultaneous track recognition and
2372 fitting.
2373
2374\item There is a possibility to reject incorrect space points `on
2375 the fly', during a single tracking pass. These incorrect points can
2376 appear as a consequence of the imperfection of the cluster finder or
2377 they may be due to noise or they may be points from other tracks
2378 accidentally captured in the list of points to be associated with
2379 the track under consideration. In the other tracking methods one
2380 usually needs an additional fitting pass to get rid of incorrectly
2381 assigned points.
2382
2383\item In the case of substantial multiple scattering, track
2384 measurements are correlated and therefore large matrices (of the
2385 size of the number of measured points) need to be inverted during
2386 a global fit. In the Kalman-filter procedure we only have to
2387 manipulate up to $5 \times 5$ matrices (although as many times as
2388 we have measured space points), which is much faster.
2389
2390\item One can handle multiple scattering and
2391 energy losses in a simpler way than in the case of global
2392 methods. At each step the material budget can be calculated and the
2393 mean correction calculated accordingly.
2394
2395\item It is a natural way to find the extrapolation
2396 of a track from one detector to another (for example from the TPC
2397 to the ITS or to the TRD).
2398\end{itemize}
2399
2400
2401In ALICE we require good track-finding efficiency and reconstruction
2402precision for track down to \mbox{\pt = 100 MeV/$c$.} Some of the ALICE
2403tracking detectors (ITS, TRD) have a significant material budget.
2404Under such conditions one can not neglect the energy losses or the multiple
2405scattering in the reconstruction. There are also rather
2406big dead zones between the tracking detectors which complicates finding
2407the continuation of the same track. For all these reasons,
2408it is the Kalman-filtering approach that has been our choice for the
2409offline reconstruction since 1994.
2410
2411% \subsubsection{General tracking strategy}
2412
2413The reconstruction software for the ALICE central tracking detectors (the
2414ITS, TPC and the TRD) shares a common convention on the coordinate
2415system used. All the clusters and tracks are always expressed in some local
2416coordinate system related to a given sub-detector (TPC sector, ITS module
2417etc). This local coordinate system is defined as the following:
2418\begin{itemize}
2419\item It is a right handed-Cartesian coordinate system;
2420\item its origin and the $z$ axis coincide with those of the global
2421 ALICE coordinate system;
2422\item the $x$ axis is perpendicular to the sub-detector's `sensitive plane'
2423 (TPC pad row, ITS ladder etc).
2424\end{itemize}
2425Such a choice reflects the symmetry of the ALICE set-up
2426and therefore simplifies the reconstruction equations.
2427It also enables the fastest possible transformations from
2428a local coordinate system to the global one and back again,
2429since these transformations become simple single rotations around the
2430$z$-axis.
2431
2432
2433The reconstruction begins with cluster finding in all of the ALICE central
2434detectors (ITS, TPC, TRD, TOF, HMPID and PHOS). Using the clusters
2435reconstructed at the two pixel layers of the ITS, the position of the
2436primary vertex is estimated and the track finding starts. As
2437described later, cluster-finding as well as the track-finding procedures
2438performed in the detectors have some different detector-specific features.
2439Moreover, within a given detector, on account of high occupancy and a big
2440number of overlapped clusters, the cluster finding and the track finding are
2441not completely independent: the number and positions of the clusters are
2442completely determined only at the track-finding step.
2443
2444The general tracking strategy is the following. We start from our
2445best tracker device, i.e. the TPC, and from the outer radius where the
2446track density is minimal. First, the track candidates (`seeds') are
2447found. Because of the small number of clusters assigned to a seed, the
2448precision of its parameters is not enough to safely extrapolate it outwards
2449to the other detectors. Instead, the tracking stays within the TPC and
2450proceeds towards the smaller TPC radii. Whenever
2451possible, new clusters are associated with a track candidate
2452at each step of the Kalman filter if they are within a given distance
2453from the track prolongation and the track parameters are more and
2454more refined. When all of the seeds are extrapolated to the inner limit of
2455the TPC, proceeds into the ITS. The ITS tracker tries to prolong
2456the TPC tracks as close as possible to the primary vertex.
2457On the way to the primary vertex, the tracks are assigned additional,
2458precisely reconstructed ITS clusters, which also improves
2459the estimation of the track parameters.
2460
2461After all the track candidates from the TPC are assigned their clusters
2462in the ITS, a special ITS stand-alone tracking procedure is applied to
2463the rest of the ITS clusters. This procedure tries to recover the
2464tracks that were not found in the TPC because of the \pt cut-off, dead zones
2465between the TPC sectors, or decays.
2466
2467At this point the tracking is restarted from the vertex back to the
2468outer layer of the ITS and then continued towards the outer wall of the
2469TPC. For the track that was labeled by the ITS tracker as potentially
2470primary, several particle-mass-dependent, time-of-flight hypotheses
2471are calculated. These hypotheses are then used for the particle
2472identification (PID) with the TOF detector. Once the outer
2473radius of the TPC is reached, the precision of the estimated track
2474parameters is
2475sufficient to extrapolate the tracks to the TRD, TOF, HMPID and PHOS
2476detectors. Tracking in the TRD is done in a similar way to that
2477in the TPC. Tracks are followed till the outer wall of the TRD and the
2478assigned clusters improve the momentum resolution further.
2479% Next, after the
2480% matching with the TOF, HMPID and PHOS is done, and the tracks aquire
2481% additional PID information.
2482Next, the tracks are extrapolated to the TOF, HMPID and PHOS, where they
2483acquire the PID information.
2484Finally, all the tracks are refitted with the Kalman filter backwards to
2485the primary vertex (or to the innermost possible radius, in the case of
2486the secondary tracks). This gives the most precise information about
2487the track parameters at the point where the track appeared.
2488
2489The tracks that passed the final refit towards the primary vertex are used
2490for the secondary vertex (V$^0$, cascade, kink) reconstruction. There is also
2491an option to reconstruct the secondary vertexes `on the fly' during the
2492tracking itself. The potential advantage of such a possibility is that
2493the tracks coming from a secondary vertex candidate are not extrapolated
2494beyond the vertex, thus minimizing the risk of picking up a wrong track
2495prolongation. This option is currently under investigation.
2496
2497The reconstructed tracks (together with the PID information), kink, V$^0$
2498and cascade particle decays are then stored in the Event Summary Data (ESD).
2499
2500More details about the reconstruction algorithms can be found in
2501Chapter 5 of the ALICE Physics Performance Report\cite{PPRVII}.
2502
2503\noindent
2504\textbf{Filling of ESD}
2505
2506After the tracks were reconstructed and stored in the \class{AliESD} object,
2507further information is added to the ESD. For each detector the method
2508\method{FillESD} of the reconstructor is called. Inside this method e.g. V0s
2509are reconstructed or particles are identified (PID). For the PID a
2510Bayesian approach is used (see Appendix \ref{BayesianPID}. The constants
2511and some functions that are used for the PID are defined in the class
2512\class{AliPID}.
2513
2514
2515\textbf{Monitoring of Performance}
2516
2517For the monitoring of the track reconstruction performance the classes
2518\class{AliTrackReference} are used.
2519Objects of the second type of class are created during the
2520reconstruction at the same locations as the \class{AliTrackReference}
2521objects.
2522So the reconstructed tracks can be easily compared with the simulated
2523particles.
2524This allows to study and monitor the performance of the track reconstruction in detail.
2525The creation of the objects used for the comparison should not
2526interfere with the reconstruction algorithm and can be switched on or
2527off.
2528
2529Several ``comparison'' macros permit to monitor the efficiency and the
2530resolution of the tracking. Here is a typical usage (the simulation
2531and the reconstruction have been done in advance):
2532
2533\begin{lstlisting}[language=C++]
2534 aliroot
2535 root [0] gSystem->SetIncludePath("-I$ROOTSYS/include \
2536 -I$ALICE_ROOT/include \
2537 -I$ALICE_ROOT/TPC \
2538 -I$ALICE_ROOT/ITS \
2539 -I$ALICE_ROOT/TOF")
2540 root [1] .L $ALICE_ROOT/TPC/AliTPCComparison.C++
2541 root [2] .L $ALICE_ROOT/ITS/AliITSComparisonV2.C++
2542 root [3] .L $ALICE_ROOT/TOF/AliTOFComparison.C++
2543 root [4] AliTPCComparison()
2544 root [5] AliITSComparisonV2()
2545 root [6] AliTOFComparison()
2546\end{lstlisting}
2547
2548Another macro can be used to provide a preliminary estimate of the
2549combined acceptance: \texttt{STEER/CheckESD.C}.
2550
2551\textbf{Classes}
2552
2553The following classes are used in the reconstruction:
2554\begin{itemize}
2555\item \class{AliTrackReference}:
2556 This class is used to store the position and the momentum of a
2557 simulated particle at given locations of interest (e.g. when the
2558 particle enters or exits a detector or it decays). It is used for
2559 mainly for debugging and tuning of the tracking.
2560
2561\item \class{AliExternalTrackParams}:
2562 This class describes the status of a track in a given point.
2563 It knows the track parameters and its covariance matrix.
2564 This parameterization is used to exchange tracks between the detectors.
2565 A set of functions returning the position and the momentum of tracks
2566 in the global coordinate system as well as the track impact parameters
2567 are implemented. There is possibility to propagate the track to a
2568 given radius \method{PropagateTo} and \method{Propagate}.
2569
2570\item \class{AliKalmanTrack} and derived classes:
2571 These classes are used to find and fit tracks with the Kalman approach.
2572 The \class{AliKalmanTrack} defines the interfaces and implements some
2573 common functionality. The derived classes know about the clusters
2574 assigned to the track. They also update the information in an
2575 \class{AliESDtrack}.
2576 The current status of the track during the track reconstruction can be
2577 represented by an \class{AliExternalTrackParameters}.
2578 The history of the track during the track reconstruction can be stored
2579 in a list of \class{AliExternalTrackParameters} objects.
2580 The \class{AliKalmanTrack} defines the methods:
2581 \begin{itemize}
2582 \item \method{Double\_t GetDCA(...)} Returns the distance
2583 of closest approach between this track and the track passed as the
2584 argument.
2585 \item \method{Double\_t MeanMaterialBudget(...)} Calculate the mean
2586 material budget and material properties between two points.
2587 \end{itemize}
2588
2589\item \class{AliTracker} and subclasses:
2590 The \class{AliTracker} is the base class for all the trackers in the
2591 different detectors. It fixes the interface needed to find and
2592 propagate tracks. The actual implementation is done in the derived classes.
2593
2594\item \class{AliESDTrack}:
2595 This class combines the information about a track from different detectors.
2596 It knows the current status of the track
2597 (\class{AliExternalTrackParameters}) and it has (non-persistent) pointers
2598 to the individual \class{AliKalmanTrack} objects from each detector
2599 which contributed to the track.
2600 It knows about some detector specific quantities like the number or
2601 bit pattern of assigned clusters, dEdx, $\chi^2$, etc..
2602 And it can calculate a conditional probability for a given mixture of
2603 particle species following the Bayesian approach.
2604 It defines a track label pointing to the corresponding simulated
2605 particle in case of \MC.
2606 The combined track objects are the basis for a physics analysis.
2607
2608\end{itemize}
2609
2610\noindent
2611\textbf{Example}
2612
2613The example below shows reconstruction with non-uniform magnetic field
2614(the simulation is also done with non-uniform magnetic field by adding
2615the following line in the Config.C: field$\to$SetL3ConstField(1)). Only
2616the barrel detectors are reconstructed, a specific TOF reconstruction
2617has been requested, and the RAW data have been used:
2618
2619\begin{lstlisting}[language=C++]
2620 void rec() {
2621 AliReconstruction reco;
2622
2623 reco.SetRunReconstruction("ITS TPC TRD TOF");
2624 reco.SetNonuniformFieldTracking();
2625 reco.SetInput("raw.root");
2626
2627 reco.Run();
2628 }
2629\end{lstlisting}
2630
2631% -----------------------------------------------------------------------------
2632
2633\subsection{Event summary data}\label{ESD}
2634
2635The classes which are needed to process and analyze the ESD are packed
2636together in a standalone library (libESD.so) which can be used
2637separately from the \aliroot framework. Inside each
2638ESD object the data is stored in polymorphic containers filled with
2639reconstructed tracks, neutral particles, etc. The main class is
2640\class{AliESD}, which contains all the information needed during the
2641physics analysis:
2642
2643\begin{itemize}
2644\item fields to identify the event such as event number, run number,
2645 time stamp, type of event, trigger type (mask), trigger cluster (mask),
2646 version of reconstruction, etc.;
2647\item reconstructed ZDC energies and number of participant;
ababa197 2648\item primary vertex information: vertex z position estimated by the T0,
c4593ee3 2649 primary vertex estimated by the SPD, primary vertex estimated using
2650 ESD tracks;
2651\item SPD tracklet multiplicity;
ababa197 2652\item interaction time estimated by the T0 together with additional
2653 time and amplitude information from T0;
c4593ee3 2654\item array of ESD tracks;
2655\item arrays of HLT tracks both from the conformal mapping and from
2656 the Hough transform reconstruction;
2657\item array of MUON tracks;
2658\item array of PMD tracks;
2659\item array of TRD ESD tracks (triggered);
2660\item arrays of reconstructed $V^0$ vertexes, cascade decays and
2661 kinks;
2662\item array of calorimeter clusters for PHOS/EMCAL;
2663\item indexes of the information from PHOS and EMCAL detectors in the
2664 array above.
2665\end{itemize}
2666
2667%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2668
2669\newpage
2670%\cleardoublepage
2671\section{Analysis}
2672
2673% -----------------------------------------------------------------------------
2674
2675\subsection{Introduction}
2676The analysis of experimental data is the final stage of event
2677processing and it is usually repeated many times. Analysis is a very diverse
2678activity, where the goals of each
2679particular analysis pass may differ significantly.
2680
2681The ALICE detector \cite{PPR} is optimized for the
2682reconstruction and analysis of heavy-ion collisions.
2683In addition, ALICE has a broad physics programme devoted to
2684\pp and \pA interactions.
2685
2686
2687The data analysis is coordinated by the Physics Board via the Physics
2688Working Groups (PWGs). At present the following PWG have started
2689their activity:
2690
2691\begin{itemize}
2692\item PWG0 \textbf{first physics};
2693\item PWG1 \textbf{detector performance};
2694\item PWG2 \textbf{global event characteristics:} particle multiplicity,
2695 centrality, energy density, nuclear stopping; \textbf{soft physics:} chemical composition (particle and resonance
2696 production, particle ratios and spectra, strangeness enhancement),
2697 reaction dynamics (transverse and elliptic flow, HBT correlations,
2698 event-by-event dynamical fluctuations);
2699\item PWG3 \textbf{heavy flavors:} quarkonia, open charm and beauty production.
2700\item PWG4 \textbf{hard probes:} jets, direct photons;
2701\end{itemize}
2702
2703Each PWG has corresponding module in AliRoot (PWG0 -- PWG4). The code
2704is managed by CVS administrators.
2705
2706The \pp and \pA programme will provide, on the one hand, reference points
2707for comparison with heavy ions. On the other hand, ALICE will also
2708pursue genuine and detailed \pp studies. Some
2709quantities, in particular the global characteristics of interactions, will
2710be measured during the first days of running exploiting the low-momentum
2711measurement and particle identification capabilities of ALICE.
2712
2713The ALICE computing framework is described in details in the Computing
2714Technical Design Report \cite{CompTDR}. This article is based on
2715Chapter 6 of the document.
2716
2717\noindent
2718\paragraph{The analysis activity.}
2719\noindent
2720We distinguish two main types of analysis: scheduled analysis and
2721chaotic analysis. They differ in their data access pattern, in the
2722storage and registration of the results, and in the frequency of
2723changes in the analysis code {more details are available below).
2724
2725In the ALICE Computing Model the analysis starts from the Event Summary
2726Data (ESD). These are produced during the reconstruction step and contain
2727all the information for the analysis. The size of the ESD is
2728about one order of magnitude lower than the corresponding raw
2729data. The analysis tasks produce Analysis
2730Object Data (AOD) specific to a given set of physics objectives.
2731Further passes for the specific analysis activity can be performed on
2732the AODs, until the selection parameter or algorithms are changed.
2733
2734A typical data analysis task usually requires processing of
2735selected sets of events. The selection is based on the event
2736topology and characteristics, and is done by querying the tag
2737database. The tags represent physics quantities which characterize
2738each run and event, and permit fast selection. They are created
2739after the reconstruction and contain also the unique
2740identifier of the ESD file. A typical query, when translated into
2741natural language, could look like ``Give me
2742all the events with impact parameter in $<$range$>$
2743containing jet candidates with energy larger than $<$threshold$>$''.
2744This results in a list of events and file identifiers to be used in the
2745consecutive event loop.
2746
2747
2748The next step of a typical analysis consists of a loop over all the events
2749in the list and calculation of the physics quantities of
2750interest. Usually, for each event, there is a set of embedded loops on the
2751reconstructed entities such as tracks, ${\rm V^0}$ candidates, neutral
2752clusters, etc., the main goal of which is to select the signal
2753candidates. Inside each loop a number of criteria (cuts) are applied to
2754reject the background combinations and to select the signal ones. The
2755cuts can be based on geometrical quantities such as impact parameters
2756of the tracks with
2757respect to the primary vertex, distance between the cluster and the
2758closest track, distance of closest approach between the tracks,
2759angle between the momentum vector of the particle combination
2760and the line connecting the production and decay vertexes. They can
2761also be based on
2762kinematics quantities such as momentum ratios, minimal and maximal
2763transverse momentum,
2764angles in the rest frame of the particle combination.
2765Particle identification criteria are also among the most common
2766selection criteria.
2767
2768The optimization of the selection criteria is one of the most
2769important parts of the analysis. The goal is to maximize the
2770signal-to-background ratio in case of search tasks, or another
2771ratio (typically ${\rm Signal/\sqrt{Signal+Background}}$) in
2772case of measurement of a given property. Usually, this optimization is
2773performed using simulated events where the information from the
2774particle generator is available.
2775
2776After the optimization of the selection criteria, one has to take into
2777account the combined acceptance of the detector. This is a complex,
2778analysis-specific quantity which depends on the geometrical acceptance,
2779the trigger efficiency, the decays of particles, the reconstruction
2780efficiency, the efficiency of the particle identification and of the
2781selection cuts. The components of the combined acceptance are usually
2782parameterized and their product is used to unfold the experimental
2783distributions or during the simulation of some model parameters.
2784
2785The last part of the analysis usually involves quite complex
2786mathematical treatments, and sophisticated statistical tools. Here one
2787may include the correction for systematic effects, the estimation of
2788statistical and systematic errors, etc.
2789
2790\noindent
2791\paragraph{Scheduled analysis.}
2792\noindent
2793The scheduled analysis typically uses all
2794the available data from a given period, and stores and registers the results
2795using \grid middleware. The tag database is updated accordingly. The
2796AOD files, generated during the scheduled
2797analysis, can be used by several subsequent analyses, or by a class of
2798related physics tasks.
2799The procedure of scheduled analysis is centralized and can be
2800considered as data filtering. The requirements come from the PWGs and
2801are prioritized by the Physics Board taking into
2802account the available computing and storage resources. The analysis
2803code is tested in advance and released before the beginning of the
2804data processing.
2805
2806Each PWG will require some sets of
2807AOD per event, which are specific for one or
2808a few analysis tasks. The creation of the AOD sets is managed centrally.
2809The event list of each AOD set
2810will be registered and the access to the AOD files will be granted to
2811all ALICE collaborators. AOD files will be generated
2812at different computing centers and will be stored on
2813the corresponding storage
2814elements. The processing of each file set will thus be done in a
2815distributed way on the \grid. Some of the AOD sets may be quite small
2816and would fit on a single storage element or even on one computer; in
2817this case the corresponding tools for file replication, available
2818in the ALICE \grid infrastructure, will be used.
2819
2820\noindent
2821\paragraph{Chaotic analysis.}
2822\noindent
2823The chaotic analysis is focused on a single physics task and
2824typically is based on the filtered data from the scheduled
2825analysis. Each physicist also
2826may access directly large parts of the ESD in order to search for rare
2827events or processes.
2828Usually the user develops the code using a small subsample
2829of data, and changes the algorithms and criteria frequently. The
2830analysis macros and software are tested many times on relatively
2831small data volumes, both experimental and \MC.
2832The output is often only a set of histograms.
2833Such a tuning of the analysis code can be done on a local
2834data set or on distributed data using \grid tools. The final version
2835of the analysis
2836will eventually be submitted to the \grid and will access large
2837portions or even
2838the totality of the ESDs. The results may be registered in the \grid file
2839catalog and used at later stages of the analysis.
2840This activity may or may not be coordinated inside
2841the PWGs, via the definition of priorities. The
2842chaotic analysis is carried on within the computing resources of the
2843physics groups.
2844
2845
2846% -----------------------------------------------------------------------------
2847
2848\subsection{Infrastructure tools for distributed analysis}
2849
2850\subsubsection{gShell}
2851
2852The main infrastructure tools for distributed analysis have been
2853described in Chapter 3 of the Computing TDR\cite{CompTDR}. The actual
2854middleware is hidden by an interface to the \grid,
2855gShell\cite{CH6Ref:gShell}, which provides a
2856single working shell.
2857The gShell package contains all the commands a user may need for file
2858catalog queries, creation of sub-directories in the user space,
2859registration and removal of files, job submission and process
2860monitoring. The actual \grid middleware is completely transparent to
2861the user.
2862
2863The gShell overcomes the scalability problem of direct client
2864connections to databases. All clients connect to the
2865gLite\cite{CH6Ref:gLite} API
2866services. This service is implemented as a pool of preforked server
2867daemons, which serve single-client requests. The client-server
2868protocol implements a client state which is represented by a current
2869working directory, a client session ID and time-dependent symmetric
2870cipher on both ends to guarantee client privacy and security. The
2871server daemons execute client calls with the identity of the connected
2872client.
2873
2874\subsubsection{PROOF -- the Parallel ROOT Facility}
2875
2876The Parallel ROOT Facility, PROOF\cite{CH6Ref:PROOF} has been specially
2877designed and developed
2878to allow the analysis and mining of very large data sets, minimizing
2879response time. It makes use of the inherent parallelism in event data
2880and implements an architecture that optimizes I/O and CPU utilization
2881in heterogeneous clusters with distributed storage. The system
2882provides transparent and interactive access to terabyte-scale data
2883sets. Being part of the ROOT framework, PROOF inherits the benefits of
2884a performing object storage system and a wealth of statistical and
2885visualization tools.
2886The most important design features of PROOF are:
2887
2888\begin{itemize}
2889\item transparency -- no difference between a local ROOT and
2890 a remote parallel PROOF session;
2891\item scalability -- no implicit limitations on number of computers
2892 used in parallel;
2893\item adaptability -- the system is able to adapt to variations in the
2894 remote environment.
2895\end{itemize}
2896
2897PROOF is based on a multi-tier architecture: the ROOT client session,
2898the PROOF master server, optionally a number of PROOF sub-master
2899servers, and the PROOF worker servers. The user connects from the ROOT
2900session to a master server on a remote cluster and the master server
2901creates sub-masters and worker servers on all the nodes in the
2902cluster. All workers process queries in parallel and the results are
2903presented to the user as coming from a single server.
2904
2905PROOF can be run either in a purely interactive way, with the user
2906remaining connected to the master and worker servers and the analysis
2907results being returned to the user's ROOT session for further
2908analysis, or in an `interactive batch' way where the user disconnects
2909from the master and workers (see Fig.~\vref{CH3Fig:alienfig7}). By
2910reconnecting later to the master server the user can retrieve the
2911analysis results for that particular
2912query. This last mode is useful for relatively long running queries
2913(several hours) or for submitting many queries at the same time. Both
2914modes will be important for the analysis of ALICE data.
2915
2916\begin{figure}[t]
2917 \centering
2918 \includegraphics[width=11.5cm]{picts/alienfig7}
2919 \caption{Setup and interaction with the \grid middleware of a user
2920 PROOF session distributed over many computing centers.}
2921 \label{CH3Fig:alienfig7}
2922\end{figure}
2923
2924% -----------------------------------------------------------------------------
2925
2926\subsection{Analysis tools}
2927
2928This section is devoted to the existing analysis tools in \ROOT and
2929\aliroot. As discussed in the introduction, some very broad
2930analysis tasks include the search for some rare events (in this case the
2931physicist tries to maximize the signal-over-background ratio), or
2932measurements where it is important to maximize the signal
2933significance. The tools that provide possibilities to apply certain
2934selection criteria and to find the interesting combinations within
2935a given event are described below. Some of them are very general and are
2936used in many different places, for example the statistical
2937tools. Others are specific to a given analysis.
2938
2939\subsubsection{Statistical tools}
2940
2941Several commonly used statistical tools are available in
2942\ROOT\cite{ROOT}. \ROOT provides
2943classes for efficient data storage and access, such as trees
2944and ntuples. The
2945ESD information is organized in a tree, where each event is a separate
2946entry. This allows a chain of the ESD files to be made and the
2947elaborated selector mechanisms to be used in order to exploit the PROOF
2948services. The tree classes
2949permit easy navigation, selection, browsing, and visualization of the
2950data in the branches.
2951
2952\ROOT also provides histogramming and fitting classes, which are used
2953for the representation of all the one- and multi-dimensional
2954distributions, and for extraction of their fitted parameters. \ROOT provides
2955an interface to powerful and robust minimization packages, which can be
2956used directly during some special parts of the analysis. A special
2957fitting class allows one to decompose an experimental histogram as a
2958superposition of source histograms.
2959
2960\ROOT also has a set of sophisticated statistical analysis tools such as
2961principal component analysis, robust estimator, and neural networks.
2962The calculation of confidence levels is provided as well.
2963
2964Additional statistical functions are included in \texttt{TMath}.
2965
2966\subsubsection{Calculations of kinematics variables}
2967
2968The main \ROOT physics classes include 3-vectors and Lorentz
2969vectors, and operations
2970such as translation, rotation, and boost. The calculations of
2971kinematics variables
2972such as transverse and longitudinal momentum, rapidity,
2973pseudorapidity, effective mass, and many others are provided as well.
2974
2975
2976\subsubsection{Geometrical calculations}
2977
2978There are several classes which can be used for
2979measurement of the primary vertex: \texttt{AliITSVertexerZ},
2980\texttt{AliITSVertexerIons}, \texttt{AliITSVertexerTracks}, etc. A fast estimation of the {\it z}-position can be
2981done by \texttt{AliITSVertexerZ}, which works for both lead--lead
2982and proton--proton collisions. An universal tool is provided by
2983\texttt{AliITSVertexerTracks}, which calculates the position and
2984covariance matrix of the primary vertex based on a set of tracks, and
2985also estimates the $\chi^2$ contribution of each track. An iterative
2986procedure can be used to remove the secondary tracks and improve the
2987precision.
2988
2989Track propagation to the primary vertex (inward) is provided in
2990AliESDtrack.
2991
2992The secondary vertex reconstruction in case of ${\rm V^0}$ is provided by
2993\texttt{AliV0vertexer}, and in case of cascade hyperons by
2994\texttt{AliCascadeVertexer}. An universal tool is
2995\texttt{AliITSVertexerTracks}, which can be used also to find secondary
2996vertexes close to the primary one, for example decays of open charm
2997like ${\rm D^0 \to K^- \pi^+}$ or ${\rm D^+ \to K^- \pi^+ \pi^+}$. All
2998the vertex
2999reconstruction classes also calculate distance of closest approach (DCA)
3000between the track and the vertex.
3001
3002The calculation of impact parameters with respect to the primary vertex
3003is done during the reconstruction and the information is available in
3004\texttt{AliESDtrack}. It is then possible to recalculate the
3005impact parameter during the ESD analysis, after an improved determination
3006of the primary vertex position using reconstructed ESD tracks.
3007
3008\subsubsection{Global event characteristics}
3009
3010The impact parameter of the interaction and the number of participants
3011are estimated from the energy measurements in the ZDC. In addition,
3012the information from the FMD, PMD, and T0 detectors is available. It
3013gives a valuable estimate of the event multiplicity at high rapidities
3014and permits global event characterization. Together with the ZDC
3015information it improves the determination of the impact parameter,
3016number of participants, and number of binary collisions.
3017
3018The event plane orientation is calculated by the \texttt{AliFlowAnalysis} class.
3019
3020\subsubsection{Comparison between reconstructed and simulated parameters}
3021
3022The comparison between the reconstructed and simulated parameters is
3023an important part of the analysis. It is the only way to estimate the
3024precision of the reconstruction. Several example macros exist in
3025\aliroot and can be used for this purpose: \texttt{AliTPCComparison.C},
3026\texttt{AliITSComparisonV2.C}, etc. As a first step in each of these
3027macros the list of so-called `good tracks' is built. The definition of
3028a good track is explained in detail in the ITS\cite{CH6Ref:ITS_TDR} and
3029TPC\cite{CH6Ref:TPC_TDR} Technical Design
3030Reports. The essential point is that the track
3031goes through the detector and can be reconstructed. Using the `good
3032tracks' one then estimates the efficiency of the reconstruction and
3033the resolution.
3034
3035Another example is specific to the MUON arm: the \texttt{MUONRecoCheck.C}
3036macro compares the reconstructed muon tracks with the simulated ones.
3037
3038There is also the possibility to calculate directly the resolutions without
3039additional requirements on the initial track. One can use the
3040so-called track label and retrieve the corresponding simulated
3041particle directly from the particle stack (\texttt{AliStack}).
3042
3043\subsubsection{Event mixing}
3044
3045One particular analysis approach in heavy-ion physics is the
3046estimation of the combinatorial background using event mixing. Part of the
3047information (for example the positive tracks) is taken from one
3048event, another part (for example the negative tracks) is taken from
3049a different, but
3050`similar' event. The event `similarity' is very important, because
3051only in this case the combinations produced from different events
3052represent the combinatorial background. Typically `similar' in
3053the example above means with the same multiplicity of negative
3054tracks. One may require in addition similar impact parameters of the
3055interactions, rotation of the tracks of the second event to adjust the
3056event plane, etc. The possibility for event mixing is provided in
3057\aliroot by the fact that the ESD is stored in trees and one can chain
3058and access simultaneously many ESD objects. Then the first pass would
3059be to order the events according to the desired criterion of
3060`similarity' and to use the obtained index for accessing the `similar'
3061events in the embedded analysis loops. An example of event mixing is
3062shown in Fig.~\ref{CH6Fig:phipp}. The background distribution has been
3063obtained using `mixed events'. The signal distribution has been taken
3064directly from the \MC simulation. The `experimental distribution' has
3065been produced by the analysis macro and decomposed as a
3066superposition of the signal and background histograms.
3067
3068\begin{figure}[htb]
3069 \centering
3070 \includegraphics*[width=120mm]{picts/phipp}
3071 \caption{Mass spectrum of the ${\rm \phi}$ meson candidates produced
3072 inclusively in the proton--proton interactions.}
3073 \label{CH6Fig:phipp}
3074\end{figure}
3075
3076
3077\subsubsection{Analysis of the High-Level Trigger (HLT) data}
3078
3079This is a specific analysis which is needed in order to adjust the cuts
3080in the HLT code, or to estimate the HLT
3081efficiency and resolution. \aliroot provides a transparent way of doing
3082such an analysis, since the HLT information is stored in the form of ESD
3083objects in a parallel tree. This also helps in the monitoring and
3084visualization of the results of the HLT algorithms.
3085
3086
3087%\vspace{-0.1cm}
3088\subsubsection{EVE -- Event Visualization Environment}
3089
3090EVE is composed of:
3091\begin{enumerate}
3092\item small application kernel;
3093\item graphics classes with editors and OpenGL renderers;
3094\item CINT scripts that extract data, fill graphics classes and register
3095 them to the application.
3096\end{enumerate}
3097
3098The framework is still evolving ... some things might not work as expected.
3099
3100\underline{Usage:}
3101
3102\begin{enumerate}
3103\item Initialize ALICE environment.
3104\item Spawn 'alieve' executable and invoke the alieve\_init.C macro,
3105 for example:
3106
3107To load first event from current directory:
3108\begin{lstlisting}[language=sh]
3109 # alieve alieve\_init.C
3110\end{lstlisting}
3111To load 5th event from directory /data/my-pp-run:
3112\begin{lstlisting}[language=sh]
3113 # alieve 'alieve\_init.C("/data/my-pp-run", 5)'
3114\end{lstlisting}
3115Interactively:
3116\begin{lstlisting}[language=sh]
3117 # alieve
3118 root[0] .L alieve\_init.C
3119 root[1] alieve\_init("/somedir")
3120\end{lstlisting}
3121
3122\item Use GUI or CINT command-line to invoke further visualization macros.
3123\item To navigate the events use macros 'event\_next.C' and 'event\_prev.C'.
3124 These are equivalent to the command-line invocations:
3125\begin{lstlisting}[language=sh]
3126 root[x] Alieve::gEvent->NextEvent()
3127\end{lstlisting}
3128or
3129\begin{lstlisting}[language=sh]
3130 root[x] Alieve::gEvent->PrevEvent()
3131\end{lstlisting}
3132The general form to go to event via its number is:
3133\begin{lstlisting}[language=sh]
3134 root[x] Alieve::gEvent->GotoEvent(<event-number>)
3135\end{lstlisting}
3136\end{enumerate}
3137
3138See files in EVE/alice-macros/. For specific uses these should be
3139edited to suit your needs.
3140
3141\underline{Directory structure}
3142
3143EVE is split into two modules: REVE (ROOT part, not dependent on
3144AliROOT) and ALIEVE (ALICE specific part). For the time being both
3145modules are kept in AliROOT CVS.
3146
3147Alieve/ and Reve/ -- sources
3148
3149macros/ -- macros for bootstraping and internal steering\\
3150alice-macros/ -- macros for ALICE visualization\\
3151alica-data/ -- data files used by ALICE macros\\
3152test-macros/ -- macros for tests of specific features; usually one needs
3153 to copy and edit them\\
3154bin/, Makefile and make\_base.inc are used for stand-alone build of the
3155packages.\\
3156
3157\underline{Notes}
3158
3159\begin{enumerate}
3160\item Problems with macro-execution
3161
3162A failed macro-execution can leave CINT in a poorly defined state that
3163prevents further execution of macros. For example:
3164
3165\begin{lstlisting}[language=sh]
3166 Exception Reve::Exc_t: Event::Open failed opening ALICE ESDfriend from
3167 '/alice-data/coctail_10k/AliESDfriends.root'.
3168
3169 root [1] Error: Function MUON_geom() is not defined in current scope :0:
3170 *** Interpreter error recovered ***
3171 Error: G__unloadfile() File "/tmp/MUON_geom.C" not loaded :0:
3172\end{lstlisting}
3173
3174'gROOT$\to$Reset()' helps in most of the cases.
3175\end{enumerate}
3176
3177% ------------------------------------------------------------------------------
3178
3179\vspace{-0.2cm}
3180\subsection{Existing analysis examples in \aliroot}
3181
3182There are several dedicated analysis tools available in \aliroot. Their results
3183were used in the Physics Performance Report and described in
3184ALICE internal notes. There are two main classes of analysis: the
3185first one based directly on ESD, and the second one extracting first
3186AOD, and then analyzing it.
3187
3188\begin{itemize}
3189\item\textbf{ESD analysis }
3190
3191 \begin{itemize}
3192 \item[ ] \textbf{${\rm V^0}$ and cascade reconstruction/analysis}
3193
3194 The ${\rm V^0}$ candidates
3195 are reconstructed during the combined barrel tracking and stored in
3196 the ESD object. The following criteria are used for the selection:
3197 minimal-allowed impact parameter (in the transverse plane) for each
3198 track; maximal-allowed DCA between the two tracks; maximal-allowed
3199 cosine of the
3200 ${\rm V^0}$ pointing angle
3201 (angle between the momentum vector of the particle combination
3202 and the line connecting the production and decay vertexes); minimal
3203 and maximal radius of the fiducial volume; maximal-allowed ${\rm
3204 \chi^2}$. The
3205 last criterion requires the covariance matrix of track parameters,
3206 which is available only in \texttt{AliESDtrack}. The reconstruction
3207 is performed by \texttt{AliV0vertexer}. This class can be used also
3208 in the analysis. An example of reconstructed kaons taken directly
3209 from the ESDs is shown in Fig.\ref{CH6Fig:kaon}.
3210
3211 \begin{figure}[th]
3212 \centering
3213 \includegraphics*[width=120mm]{picts/kaon}
3214 \caption{Mass spectrum of the ${\rm K_S^0}$ meson candidates produced
3215 inclusively in the \mbox{Pb--Pb} collisions.}
3216 \label{CH6Fig:kaon}
3217 \end{figure}
3218
3219 The cascade hyperons are reconstructed using the ${\rm V^0}$ candidate and
3220 `bachelor' track selected according to the cuts above. In addition,
3221 one requires that the reconstructed ${\rm V^0}$ effective mass belongs to
3222 a certain interval centered in the true value. The reconstruction
3223 is performed by \texttt{AliCascadeVertexer}, and this class can be
3224 used in the analysis.
3225
3226 \item[ ] \textbf{Open charm}
3227
3228 This is the second elaborated example of ESD
3229 analysis. There are two classes, \texttt{AliD0toKpi} and
3230 \texttt{AliD0toKpiAnalysis}, which contain the corresponding analysis
3231 code. The decay under investigation is ${\rm D^0 \to K^- \pi^+}$ and its
3232 charge conjugate. Each ${\rm D^0}$ candidate is formed by a positive and
3233 a negative track, selected to fulfill the following requirements:
3234 minimal-allowed track transverse momentum, minimal-allowed track
3235 impact parameter in the transverse plane with respect to the primary
3236 vertex. The selection criteria for each combination include
3237 maximal-allowed distance of closest approach between the two tracks,
3238 decay angle of the kaon in the ${\rm D^0}$ rest frame in a given region,
3239 product of the impact parameters of the two tracks larger than a given value,
3240 pointing angle between the ${\rm D^0}$ momentum and flight-line smaller than
3241 a given value. The particle
3242 identification probabilities are used to reject the wrong
3243 combinations, namely ${\rm (K,K)}$ and ${\rm (\pi,\pi)}$, and to enhance the
3244 signal-to-background ratio at low momentum by requiring the kaon
3245 identification. All proton-tagged tracks are excluded before the
3246 analysis loop on track pairs. More details can be found in
3247 Ref.\cite{CH6Ref:Dainese}.
3248
3249 \item[ ] \textbf{Quarkonia analysis}
3250
3251 Muon tracks stored in the ESD can be analyzed for example by the macro
3252 \texttt{MUONmassPlot\_ESD.C}.
3253 This macro performs an invariant-mass analysis of muon unlike-sign pairs
3254 and calculates the combinatorial background.
3255 Quarkonia \pt and rapidity distribution are built for \Jpsi and \Ups.
3256 This macro also performs a fast single-muon analysis: \pt,
3257 rapidity, and
3258 ${\rm \theta}$ vs ${\rm \varphi}$ acceptance distributions for positive
3259 and negative muon
3260 tracks with a maximal-allowed ${\rm \chi^2}$.
3261
3262 \end{itemize}
3263
3264 % \newpage
3265\item\textbf{AOD analysis}
3266
3267 Often only a small subset of information contained in the ESD
3268 is needed to perform an analysis. This information
3269 can be extracted and stored in the AOD format in order to reduce
3270 the computing resources needed for the analysis.
3271
3272 The AOD analysis framework implements a set of tools like data readers,
3273 converters, cuts, and other utility classes.
3274 The design is based on two main requirements: flexibility and common
3275 AOD particle interface. This guarantees that several analyses can be
3276 done in sequence within the same computing session.
3277
3278 In order to fulfill the first requirement, the analysis is driven by the
3279 `analysis manager' class and particular analyses are added to it.
3280 It performs the loop over events, which are delivered by an
3281 user-specified reader. This design allows the analyses to be ordered
3282 appropriately if some of them depend on the results of the others.
3283
3284 The cuts are designed to provide high flexibility
3285 and performance. A two-level architecture has been adopted
3286 for all the cuts (particle, pair and event). A class representing a cut
3287 has a list of `base cuts'. Each base cut implements a cut on a
3288 single property or performs a logical operation (and, or) on the result of
3289 other base cuts.
3290
3291 A class representing a pair of particles buffers all the results,
3292 so they can be re-used if required.
3293
3294 \vspace{-0.2cm}
3295 \begin{itemize}
3296 \item[ ] \textbf{Particle momentum correlations (HBT) -- HBTAN module}
3297
3298 Particle momentum correlation analysis is based on the event-mixing technique.
3299 It allows one to extract the signal by dividing the appropriate
3300 particle spectra coming from the original events by those from the
3301 mixed events.
3302
3303 Two analysis objects are currently implemented to perform the mixing:
3304 the standard one and the one implementing the Stavinsky
3305 algorithm\cite{CH6Ref:Stavinsky}. Others can easily be added if needed.
3306
3307 An extensive hierarchy of the function base classes has been implemented
3308 facilitating the creation of new functions.
3309 A wide set of the correlation, distribution and monitoring
3310 functions is already available in the module. See Ref.\cite{CH6Ref:HBTAN}
3311 for the details.
3312
3313 The package contains two implementations of weighting algorithms, used
3314 for correlation simulations (the first developed by Lednicky
3315 \cite{CH6Ref:Weights} and the second due to CRAB \cite{CH6Ref:CRAB}), both
3316 based on an uniform interface.
3317
3318 \item[ ] \textbf{Jet analysis}
3319
3320 The jet analysis\cite{CH6Ref:Loizides} is available in the module JETAN. It has a set of
3321 readers of the form \texttt{AliJetParticlesReader<XXX>}, where \texttt{XXX}
3322 = \texttt{ESD},
3323 \texttt{HLT}, \texttt{KineGoodTPC}, \texttt{Kine}, derived from the base class
3324 \texttt{AliJetParticlesReader}. These
3325 provide an uniform interface to
3326 the information from the
3327 kinematics tree, from HLT, and from the ESD. The first step in the
3328 analysis is the creation of an AOD object: a tree containing objects of
3329 type \texttt{AliJetEventParticles}. The particles are selected using a
3330 cut on the minimal-allowed transverse momentum. The second analysis
3331 step consists of jet finding. Several algorithms are available in the
3332 classes of the type \texttt{Ali<XXX>JetFinder}.
3333 An example of AOD creation is provided in
3334 the \texttt{createEvents.C} macro. The usage of jet finders is illustrated in
3335 \texttt{findJets.C} macro.
3336
3337
3338 \item[ ] \textbf{${\rm V^0}$ AODs}
3339
3340 The AODs for ${\rm V^0}$ analysis contain several additional parameters,
3341 calculated and stored for fast access. The methods of the class {\tt
3342 AliAODv0} provide access to all the geometrical and kinematics
3343 parameters of a ${\rm V^0}$ candidate, and to the ESD information used
3344 for the calculations.
3345
3346 \vspace{-0.1cm}
3347 \item[ ] \textbf{MUON}
3348
3349 There is also a prototype MUON analysis provided in
3350 \texttt{AliMuonAnalysis}. It simply fills several histograms, namely
3351 the transverse momentum and rapidity for positive and negative muons,
3352 the invariant mass of the muon pair, etc.
3353 \end{itemize}
3354
3355\end{itemize}
3356
3357%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
3358
3359\newpage
3360%\cleardoublepage
3361\section{Analysis Foundation Library}
3362
3363The result of the reconstruction chain is the Event Summary Data (ESD)
3364object. It contains all the information that may
3365be useful in {\it any} analysis. In most cases only a small subset
3366of this information is needed for a given analysis.
3367Hence, it is essential to provide a framework for analyses, where
3368user can extract only the information required and store it in
3369the Analysis Object Data (AOD) format. This is to be used in all his
3370further analyses. The proper data preselecting allows to speed up
3371the computation time significantly. Moreover, the interface of the ESD classes is
3372designed to fulfill the requirements of the reconstruction
3373code. It is inconvenient for most of analysis algorithms,
3374in contrary to the AOD one. Additionally, the latter one can be customized
3375to the needs of particular analysis, if it is only required.
3376
3377We have developed the analysis foundation library that
3378provides a skeleton framework for analyses, defines AOD data format
3379and implements a wide set of basic utility classes which facilitate
3380the creation of individual analyses.
3381It contains classes that define the following entities:
3382
3383\begin{itemize}
3384\item AOD event format
3385\item Event buffer
3386\item Particle(s)
3387\item Pair
3388\item Analysis manager class
3389\item Base class for analyses
3390\item Readers
3391\item AOD writer
3392\item Particle cuts
3393\item Pair cuts
3394\item Event cuts
3395\item Other utility classes
3396\end{itemize}
3397
3398It is designed to fulfill two main requirements:
3399%
3400\begin{enumerate}
3401\item \textbf{Allows for flexibility in designing individual analyses}
3402 Each analysis has its most performing solutions. The most trivial example is
3403 the internal representation of a particle momentum: in some cases the Cartesian coordinate system is preferable and in other cases - the cylindrical one.
3404\item \textbf{All analyses use the same AOD particle interface to access the data }
3405 This guarantees that analyses can be chained. It is important when
3406 one analysis depends on the result of the other one, so the latter one can
3407 process exactly the same data without the necessity of any conversion.
3408 It also lets to carry out many analyses in the same job and consequently, the
3409 computation time connected with
3410 the data reading, job submission, etc. can be significantly reduced.
3411\end{enumerate}
3412% ..
3413The design of the framework is described in detail below.
3414
3415
3416% -----------------------------------------------------------------------------
3417
3418\subsection{AOD}
3419
3420The \texttt{AliAOD} class contains only the information required
3421for an analysis. It is not only the data format as they are
3422stored in files, but it is also used internally throughout the package
3423as a particle container.
3424Currently it contains a \texttt{TClonesArray} of particles and
3425data members describing the global event properties.
3426This class is expected to evolve further as new analyses continue to be
3427developed and their requirements are implemented.
3428
3429% -----------------------------------------------------------------------------
3430
3431\subsection{Particle}
3432
3433\texttt{AliVAODParticle} is a pure virtual class that defines a particle
3434interface.
3435Each analysis is allowed to create its own particle class
3436if none of the already existing ones meet its requirements.
3437Of course, it must derive from \texttt{AliVAODParticle}.
3438However, all analyses are obliged to
3439use the interface defined in \texttt{AliVAODParticle} exclusively.
3440If additional functionality is required, an appropriate
3441method is also added to the virtual interface (as a pure virtual or an empty one).
3442Hence, all other analyses can be ran on any AOD, although the processing time
3443might be longer in some cases (if the internal representation is not
3444the optimal one).
3445
3446We have implemented the standard concrete particle class
3447called \texttt{AliAODParticle}. The momentum is stored in the
3448Cartesian coordinates and it also has the data members
3449describing the production vertex. All the PID information
3450is stored in two dynamic arrays. The first array contains
3451probabilities sorted in descending order,
3452and the second one - corresponding PDG codes (Particle Data Group).
3453The PID of a particle is defined by the data member which is
3454the index in the arrays. This solution allows for faster information
3455access during analysis and minimizes memory and disk space consumption.
3456
3457
3458% -----------------------------------------------------------------------------
3459
3460\subsection{Pair}
3461
3462The pair object points to two particles and implements
3463a set of methods for the calculation of the pair properties.
3464It buffers calculated values and intermediate
3465results for performance reasons. This solution applies to
3466quantities whose computation is time consuming and
3467also to quantities with a high reuse probability. A
3468Boolean flag is used to mark the variables already calculated.
3469To ensure that this mechanism works properly,
3470the pair always uses its own methods internally,
3471instead of accessing its variables directly.
3472
3473The pair object has pointer to another pair with the swapped
3474particles. The existence of this feature is connected to
3475the implementation of the mixing algorithm in the correlation
3476analysis package: if particle A is combined with B,
3477the pair with the swapped particles is not mixed.
3478In non-identical particle analysis their order is important, and
3479a pair cut may reject a pair while a reversed one would be
3480accepted. Hence, in the analysis the swapped pair is also tried
3481if a regular one is rejected. In this way the buffering feature is
3482automatically used also for the swapped pair.
3483
3484% -----------------------------------------------------------------------------
3485
3486\subsection{Analysis manager class and base class}
3487
3488The {\it analysis manager} class (\texttt{AliRunAnalysis}) drives all
3489the process. A particular analysis, which must inherit from
3490\texttt{AliAnalysis} class, is added to it.
3491The user triggers analysis by calling the \texttt{Process} method.
3492The manager performs a loop over events, which are delivered by
3493a reader (derivative of the \texttt{AliReader} class, see section
3494\ref{cap:soft:secReaders}).
3495This design allows to chain the analyses in the proper order if any
3496depends on the results of the other one.
3497
3498The user can set an event cut in the manager class.
3499If an event is not rejected, the \texttt{ProcessEvent}
3500method is executed for each analysis object.
3501This method requires two parameters, namely pointers to
3502a reconstructed and a simulated event.
3503
3504The events have a parallel structure, i.e. the corresponding
3505reconstructed particles and simulated particles have always the same index.
3506This allows for easy implementation of an analysis where both
3507are required, e.g. when constructing residual distributions.
3508It is also very important in correlation simulations
3509that use the weight algorithm\cite{CH6Ref:Weights}.
3510By default, the pointer to the simulated event is null,
3511i.e. like it is in the experimental data processing.
3512
3513An event cut and a pair cut can be set in \texttt{AliAnalysis}.
3514The latter one points two particle cuts, so
3515an additional particle cut data member is redundant
3516because the user can set it in this pair cut.
3517
3518\texttt{AliAnalysis} class has the feature that allows to choose
3519which data the cuts check:
3520\begin{enumerate}
3521\item the reconstructed (default)
3522\item the simulated
3523\item both.
3524\end{enumerate}
3525%
3526It has four pointers to the method (data members):
3527\begin{enumerate}
3528\item \texttt{fkPass1} -- checks a particle, the cut is defined by the
3529 cut on the first particle in the pair cut data member
3530\item \texttt{fkPass2} -- as above, but the cut on the second particle is used
3531\item \texttt{fkPass} -- checks a pair
3532\item \texttt{fkPassPairProp} -- checks a pair, but only two particle properties
3533 are considered
3534\end{enumerate}
3535Each of them has two parameters, namely pointers to
3536reconstructed and simulated particles or pairs.
3537The user switches the behavior with the
3538method that sets the above pointers to the appropriate methods.
3539We have decided to implement
3540this solution because it performs faster than the simpler one that uses
3541boolean flags and "if" statements. These cuts are used mostly inside
3542multiply nested loops, and even a small performance gain transforms
3543into a noticeable reduction of the overall computation time.
3544In the case of an event cut, the simpler solution was applied.
3545The \texttt{Rejected} method is always used to check events.
3546A developer of the analysis code must always use this method and
3547the pointers to methods itemized above to benefit from this feature.
3548
3549% -----------------------------------------------------------------------------
3550
3551\subsection{Readers}
3552\label{cap:soft:secReaders}
3553
3554A Reader is the object that provides data far an analysis.
3555\texttt{AliReader} is the base class that defines a pure virtual
3556interface.
3557
3558A reader may stream the reconstructed and/or the
3559simulated data. Each of them is stored in a separate AOD.
3560If it reads both, a corresponding reconstructed and
3561simulated particle have always the same index.
3562
3563Most important methods for the user are the following:
3564\begin{itemize}
3565\item \texttt{Next} -- It triggers reading of a next event. It returns
3566 0 in case of success and 1 if no more events
3567 are available.
3568\item \texttt{Rewind} -- Rewinds reading to the beginning
3569\item \texttt{GetEventRec} and \texttt{GetEventSim} -- They return
3570 pointers to the reconstructed and the simulated events respectively.
3571\end{itemize}
3572
3573The base reader class implements functionality for
3574particle filtering at a reading level. A user can set any
3575number of particle cuts in a reader and the particle is
3576read if it fulfills the criteria defined by any of them.
3577Particularly, a particle type is never certain and the readers
3578are constructed in the way that all the PID hypotheses (with non-zero
3579probability) are verified.
3580In principle, a track can be read with more than one mass
3581assumption.
3582For example, consider a track
3583which in 55\% is a pion and in 40\% a kaon, and a user wants to read
3584all the pions and kaons with the PID probabilities higher then
358550\% and 30\%, respectively. In such cases two particles
3586with different PIDs are added to AOD.
3587However, both particle have the same Unique Identification
3588number (UID) so it can be easily checked that in fact they are
3589the same track.
3590
3591% Multiple File Sources
3592\texttt{AliReader} implements the feature that allows to specify and manipulate
3593multiple data sources, which are read sequentially.
3594The user can provide a list of directory names where the data are searched.
3595The \texttt{ReadEventsFromTo} method allows to limit the range of events that are read
3596(e.g. when only one event of hundreds stored in an AOD is of interest).
3597% Event Buffering
3598\texttt{AliReader} has the switch that enables event buffering,
3599so an event is not deleted and can be quickly accessed if requested again.
3600
3601% Blending
3602Particles within an event are frequently sorted in some way, e.g.
3603the particle trajectory reconstruction provides tracks sorted according
3604to their transverse momentum. This leads to asymmetric
3605distributions where they are not expected. The user can request the
3606reader to randomize the particle order with \texttt{SetBlend} method.
3607
3608% Writing AOD
3609The AOD objects can be written to disk with the \texttt{AliReaderAOD}
3610using the static method \texttt{WriteAOD}. As the first
3611parameter user must pass the pointer to another reader that
3612provides AOD objects. Typically it is \texttt{AliReaderESD},
3613but it also can be other one, f.g. another \texttt{AliReaderAOD}
3614(to filter out the desired particles from the already existing AODs).
3615
3616Inside the file, the AODs are stored in a \texttt{TTree}.
3617Since the AOD stores particles in the clones array, and many particles
3618formats are allowed, the reading and writing is not straight forward.
3619The user must specify what is the particle format to be stored on disk,
3620because in a general case the input reader can stream AODs with not consistent
3621particle formats. Hence, the careful check must be done, because storing
3622an object of the different type then it was specified in the tree leads
3623to the inevitable crash. If the input AOD has the different particle type then
3624expected it is automatically converted. Hence, this method can be also used
3625for the AOD type conversion.
3626
3627% -----------------------------------------------------------------------------
3628
3629\subsection{AODs buffer}
3630
3631Normally the readers do not buffer the events.
3632Frequently an event is needed to be kept for further analysis,
3633f.g. if uncorrelated combinatorial background is computed.
3634We have implemented the FIFO (First In First Out) type buffer called
3635\texttt{AliEventBuffer} that caches the defined number of events.
3636
3637% -----------------------------------------------------------------------------
3638
3639\subsection{Cuts}
3640
3641The cuts are designed to guarantee the highest flexibility
3642and performance. We have implemented the same two level architecture
3643for all the cuts (particle, pair and event).
3644Cut object defines the ranges of many properties that a particle, a pair or
3645an event may posses and it also defines a method, which performs the
3646necessary check. However, usually a user wants to limit
3647ranges of only a few properties. For speed and robustness reasons,
3648the design presented in Fig.\ref{cap:soft:partcut} was developed.
3649
3650The cut object has an array of pointers to
3651base cuts. The number of entries in the array depends
3652on the number of the properties the user wants to limit.
3653The base cut implements checks on a single property.
3654It implements maximum and minimum values and a virtual method \texttt{Rejected}
3655that performs a range check of the value returned by pure
3656virtual method \texttt{GetValue}. Implementation of a concrete
3657base cut is very easy in most cases: it is enough to
3658implement \texttt{GetValue} method. The ANALYSIS package
3659already implements a wide range of base cuts,
3660and the cut classes have a comfortable interface for
3661setting all of them. For example it is enough to invoke
3662the \texttt{SetPtRange(min,max)} method and behind the scenes
3663a proper base cut is created and configured.
3664
3665The base cuts performing a logical operation (and,or) on the result of two
3666other base cuts are also implemented. This way the user can configure basically any
3667cut in a macro. Supplementary user defined base cuts can be added in the user
3668provided libraries.
3669In case the user prefers to implement a complicated cut in a single method (class)
3670he can create his base cut performing all the operations.
3671
3672The pair cut in addition to an array of pointers to the base pair
3673cuts it has two pointers to particle cut, one for each particle in
3674the pair.
3675
3676\begin{figure}
3677 \begin{center}
3678 \includegraphics[width=0.4\columnwidth, origin=c]{picts/partcuts}
3679 \end{center}
3680 \caption
3681 {Cut classes diagram on the example of the particle cut.
3682 \label{cap:soft:partcut}}
3683\end{figure}
3684
3685
3686\subsection{Other classes}
3687
3688We have developed a few classes that are used in correlation analyses,
3689but they can be also useful in the others. The first is the TPC cluster map,
3690which is the bitmap vector describing at which pad-rows a track has a cluster.
3691It is used by the anti-splitting algorithm in the particle correlation
3692analysis.
3693
3694Another example is the \class{AliTrackPoints} class, that stores
3695track space coordinates at requested distances from the center of
3696the detector. It is used in the particle correlation analysis
3697by the anti-merging cut.
3698The coordinates are calculated assuming the helix shape
3699of a track. Different options that define the way they are computed
3700are available.
3701
3702
3703
3704%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
3705
3706\newpage
3707%\cleardoublepage
3708\section{Data input, output and exchange subsystem of AliRoot}
3709
3710This section is taken from\cite{PiotrPhD}.
3711
3712A few tens of different data types is present within AliRoot because
3713hits, summable digits, digits and clusters are characteristic for each
3714sub-detector. Writing all of the event data to a single file was
3715causing number of limitations.
3716Moreover, the reconstruction chain introduces rather complicated dependences
3717between different components of the framework, what is highly
3718undesirable from the point of view of software design.
3719In order to solve both problems, we have designed a set of classes that
3720manage data manipulation i.e. storage, retrieval and exchange within
3721the framework.
3722
3723It was decided to use the ``white board'' concept, which is a single
3724exchange object where were all data are stored and publicly accessible.
3725For that purpose I have employed \textbf{TFolder} facility of ROOT.
3726This solution solves the problem of inter-module dependencies.
3727
3728There are two most frequently occurring use-cases concerning the way a user deals with the data within the framework:
3729\begin{enumerate}
3730\item data production -- produce - \textbf{write} - \textbf{unload} (clean)
3731\item data processing -- \textbf{load} (retrieve) - process - \textbf{unload}
3732\end{enumerate}
3733%
3734\textbf{Loader}s are the utility classes that encapsulate and
3735automatize the tasks written in bold font.
3736They limit the user's interaction with the I/O routines to the
3737necessary minimum, providing friendly and very easy interface,
3738which for the use-cases considered above, consists of only 3 methods:
3739\begin{itemize}
3740\item \texttt{Load} -- retrieves the requested data to the appropriate place in the
3741 white board (folder)
3742\item \texttt{Unload} -- cleans the data
3743\item \texttt{Write} -- writes the data
3744\end{itemize}
3745
3746Such an insulation layer has number of advantages:
3747\begin{itemize}
3748\item makes the data access easier for the user.
3749\item avoids the code duplication in the framework.
3750\item minimize the risk of a bug occurrence resulting from the improper I/O management.
3751 The ROOT object oriented data storage extremely simplifies the user interface,
3752 however, there are a few pitfalls that are frequently unknown to an
3753 unexperienced user.
3754\end{itemize}
3755
3756To make the description more clear we need to introduce briefly
3757basic concepts and the way the AliRoot program operates.
3758The basic entity is an event, i.e. all the data recorded by the
3759detector in a certain time interval plus all the reconstructed information
3760from these data. Ideally the data are produced by the single collision
3761selected by a trigger for recording. However, it may happen that the data
3762from the previous or proceeding events are present because the bunch
3763crossing rate is higher then the maximum detector frequency (pile-up),
3764or simply more than one collision occurred within one bunch crossing.
3765
3766Information describing the event and the detector state is also
3767stored, like bunch crossing number, magnetic field, configuration, alignment, etc..,
3768In the case of a Monte-Carlo simulated data, information concerning the
3769generator, simulation parameters is also kept. Altogether this data
3770is called the \textbf{header}.
3771
3772For the collisions that produce only a few tracks (best example
3773are the pp collisions) it may happen that total overhead
3774(the size of the header and the ROOT structures supporting object oriented
3775data storage) is non-negligible in comparison with the data itself.
3776To avoid such situations, the possibility of storing an arbitrary number
3777of events together within a \textbf{run} is required. Hence, the common data can be
3778written only once per run and several events can be written to a single file.
3779
3780It was decided that the data related to different detectors
3781and different processing phases should be stored in different files.
3782In such a case only the required data need to be downloaded for an analysis.
3783It also allows to alter the files easily if required,
3784for example when a new version of the reconstruction or simulation is needed
3785to be run for a given detector. Hence, only new files are updated
3786and all the rest may stay untouched. It is especially important because
3787it is difficult to erase files in mass storage systems.
3788This also gives the possibility for an easy comparison of the data produced with
3789competing algorithms.
3790
3791All the header data, configuration and management objects
3792are stored in a separate file, which is usually named galice.root
3793(for simplicity we will further refer to it as galice).
3794
3795% -----------------------------------------------------------------------------
3796
3797\subsection{The ``White Board''}
3798
3799The folder structure is presented in Fig.\ref{cap:soft:folderstruct}.
3800It is subdivided into two parts:
3801\begin{itemize}
3802\item \textbf{event data} that have the scope of single event
3803\item \textbf{static data} that do not change from event to event,
3804 i.e. geometry and alignment, calibration, etc.
3805\end{itemize}
3806
3807During startup of AliRoot the skeleton structure of the ALICE white
3808board is created. The \texttt{AliConfig} class (singleton) provides all the
3809functionality that is needed to construct the folder structures.
3810
3811An event data are stored under a single sub-folder (event folder) named as
3812specified by the user when opening a session (run). Many sessions can be
3813opened at the same time, providing that each of them has an unique event
3814folder name, so they can be distinguished by this name.
3815This functionality is crucial for superimposing events
3816on the level of the summable digits, i.e. analog detector response without the noise
3817contribution (the event merging). It is also useful when two events
3818or the same event simulated or reconstructed with a competing algorithm,
3819need to be compared.
3820
3821\begin{figure}
3822 \begin{center}
3823 \includegraphics[width=0.8\columnwidth, origin=c]{picts/folderstruct}
3824 \end{center}
3825 \caption
3826 {The folders structure. An example event is mounted under ``Event'' folder.
3827 \label{cap:soft:folderstruct}}
3828\end{figure}
3829
3830% -----------------------------------------------------------------------------
3831
3832\subsection {Loaders}
3833
3834Loaders can be represented as a four layer, tree like structure
3835(see Fig.\ref{cap:soft:loaderdiagram}). It represents the logical structure of
3836the detector and the data association.
3837%
3838\begin{figure}
3839 \begin{center}
3840 \includegraphics[width=1.0\columnwidth, origin=c]{picts/loaderdiagram}
3841 \end{center}
3842 \caption
3843 {Loaders diagram. Dashed lines separate layers serviced by the different types of
3844 the loaders (from top): AliRunLoder, AliLoader, AliDataLoader, AliBaseLoader.
3845 \label{cap:soft:loaderdiagram}}
3846\end{figure}
3847
3848
3849\begin{enumerate}
3850
3851\item \texttt{AliBaseLoader} -- One base loader is responsible for posting
3852 (finding in a file and publishing in a folder) and writing
3853 (finding in a folder and putting in a file) of a single object.
3854 AliBaseLoader is a pure virtual class because writing and
3855 posting depend on the type of an object. the following concrete classes are currently implemented:
3856 \begin{itemize}
3857 \item \texttt{AliObjectLoader} -- It handles \texttt{TObject}, i.e. basically any object
3858 within ROOT and AliRoot since an object must inherit from
3859 this class to be posted to the white board
3860 (added to \texttt{TFolder}).
3861
3862 \item \texttt{AliTreeLoader} -- It is the base loader for \texttt{TTrees},
3863 which requires special
3864 handling, because they must be always properly
3865 associated with a file.
3866
3867 \item \texttt{AliTaskLoader} -- It handles \texttt{TTask}, which need to be posted to the
3868 appropriate parental \texttt{TTask} instead of \texttt{TFolder}.
3869 \end{itemize}
3870 \texttt{AliBaseLoader} stores the name of the object it manages in
3871 its base class \class{TNamed} to be able
3872 to find it in a file or folder. The user normally does not need to use
3873 these classes directly and they are rather utility classes employed by
3874 \texttt{AliDataLoader}.
3875
3876\item \texttt{AliDataLoader} -- It manages a single data type, for example digits for
3877 a detector or kinematics tree.
3878 Since a few objects are normally associated with a given
3879 data type (data itself, quality assurance data (QA),
3880 a task that produces the data, QA task, etc.)
3881 \texttt{AliDataLoader} has an array of \texttt{AliBaseLoaders},
3882 so each of them is responsible for each object.
3883 Hence, \texttt{AliDataLoader} can be configured individually to
3884 meet specific requirements of a certain data type.
3885
3886 A single file contains the data corresponding to a single processing
3887 phase and solely of one detector.
3888 By default the file is named according to the schema
3889 {\it Detector Name + Data Name + .root} but it can be
3890 changed in run-time if needed so the data can be stored in or retrieved
3891 from an alternative source. When needed,
3892 the user can limit the number of events stored in a single file.
3893 If the maximum number is exceeded, a file is closed
3894 and a new one is opened with the consecutive number added
3895 to its name before {\it .root} suffix. Of course,
3896 during the reading process, files are also automatically
3897 interchanged behind the scenes and it is invisible to the user.
3898
3899 The \texttt{AliDataLoader} class performs all the tasks related
3900 to file management e.g. opening, closing,
3901 ROOT directories management, etc.
3902 Hence, for each data type the average file size can be
3903 tuned. It is important because it is undesirable to store small
3904 files on the mass storage systems and on the other hand, all file
3905 systems have a maximum file size allowed.
3906
3907
3908\item \texttt{AliLoader} -- It manages all the data associated with a
3909 single detector (hits, digits, summable digits, reconstructed points, etc.).
3910 It has an array of \texttt{AliDataLoaders} and each of them manages
3911 a single data type.
3912
3913 The \texttt{AliLoader} object is created by a class representing
3914 a detector (inheriting from \texttt{AliDetector}).
3915 Its functionality can be extended and customized to the needs of a
3916 particular detector by creating a specialized class that derives
3917 from \texttt{AliLoader}, as it was done, for instance, for ITS or PHOS.
3918 The default configuration can be
3919 easily modified either in \texttt{AliDetector::MakeLoader}
3920 or by overriding the method \texttt{AliLoader::InitDefaults}.
3921
3922
3923\item \texttt{AliRunLoader} -- It is a main handle for data access and manipulation in
3924 AliRoot. There is only one such an object in each run.
3925 It is always named {\it RunLoader} and stored
3926 on the top (ROOT) directory of a galice file.
3927
3928 It keeps an array of \texttt{AliLoader}'s, one for each detector.
3929 It also manages the event data that are not associated with any detector
3930 i.e. Kinematics and Header and it utilizes \texttt{AliDataLoader}'s
3931 for this purpose.
3932
3933 The user opens a session using a static method \texttt{AliRunLoader::Open}.
3934 This method has three parameters: the file name, event folder name and mode.
3935 The mode can be "new" and in this case a file and a run loader are created from scratch.
3936 Otherwise, a file is opened and a run loader is searched in.
3937 If successful, the event folder with a provided name
3938 (if such does not exist yet) is created and the structure
3939 presented in Fig.\ref{cap:soft:folderstruct} is created within the folder.
3940 The run loader is
3941 put in the event folder, so the user can always find it there
3942 and use it for data management.
3943
3944 \texttt{AliRunLoader} provides a simple method \texttt{GetEvent(n)}
3945 to loop over events within a run. Calling it causes that all
3946 currently loaded data are cleaned and the data for
3947 the newly requested event are automatically posted.
3948
3949 In order to facilitate the way the user interacts with the loaders,
3950 \texttt{AliRunLoader} provides the wide set of shortcut methods.
3951 For example, if digits are required to be loaded, the user can call
3952 \texttt{AliRunLoader::LoadDigits("ITS TPC")}, instead of finding the appropriate
3953 \texttt{AliDataLoader}'s responsible for digits for ITS and TPC,
3954 and then request to load the data for each of them.
3955
3956
3957\end{enumerate}
3958
3959\newpage
3960%\cleardoublepage
3961\section{Calibration and alignment}
3962
3963
3964\subsection{Calibration framework}
3965
3966
3967The calibration framework is based on the following principles:
3968
3969\begin{itemize}
3970
3971\item the calibration and alignment database contains ROOT TObjects stored
3972 into ROOT files;
3973
3974\item calibration and alignment objects are RUN DEPENDENT objects;
3975
3976\item the database is READ-ONLY (automatic versioning of the stored
3977 objects)
3978
3979\item three different data stores structures are (currently) available:
3980 \begin{itemize}
3981 \item a GRID folder containing Root files, each one containing one
3982 single Root object. The Root files are created inside a directory tree
3983 defined by the object's name and run validity range;
3984
3985 \item a LOCAL folder containing Root files, each one containing one
3986 single Root object, with a structure similar to the Grid one;
3987
3988 \item a LOCAL Root file containing one or more objects (so-called ``dump''). The
3989 objects are stored into Root TDirectories defined by the
3990 object's name and run range.
3991 \end{itemize}
3992
3993\item object storing and retrieval techniques are transparent to the user:
3994 he/she should only specify the kind of storage he wants to use ("grid",
3995 "local", "dump"). Object are stored and retrieved using the AliCDBStorage
3996 public classes:
3997
3998 \begin{lstlisting}[language=C++]
3999 Bool_t AliCDBStorage::Put(...)
4000 and
4001 AliCDBEntry* AliCDBStorage::Get(...)
4002 \end{lstlisting}
4003
4004 In addition, multiple objects can be retrieved using:
4005
4006 \begin{lstlisting}[language=C++]
4007 TList* AliCDBStorage::GetAll(...) (returns list of AliCDBEntry objects).
4008 \end{lstlisting}
4009
4010\item During object retrieval, the user has the possibility to retrieve the
4011 highest version of the object or to specify a particular version by means
4012 of one or more selection criteria.
4013\end{itemize}
4014
4015\noindent
4016\textbf{Features of the CDB storage classes}
4017
4018% see the talk here \url{http://indico.cern.ch/conferenceDisplay.py?confId=a055286}
4019
4020\begin{itemize}
4021\item MANAGER class AliCDBManager. It is a singleton which handles
4022 the instantiation, usage and destruction of all the storage classes. It
4023 allows the instantiation of more than one storage type at a time, keeping
4024 tracks of the list of active storages. The instantiation of a storage
4025 element is done by means of AliCDBManager public method GetStorage. A
4026 storage element is identified by its "URI" (a string) or by its
4027 "parameters". The set of parameters defining each storage is contained in
4028 its specific \class{AliCDBParam} class (\class{AliCDBGridParam}, \class{AliCDBLocalParam},
4029 \class{AliCDBDumpParam}).
4030
4031\item Versioning schema. In order to avoid version clashes when objects
4032 are transferred from grid to local and vice versa, we have introduced a
4033 new versioning schema. Basically the objects are defined by TWO version
4034 numbers: a "Grid" version and a "Local" version (subVersion). In local
4035 storage only the local version is increased, while in Grid storage only
4036 the Grid version is increased. When the object is transferred from local
4037 to Grid the Grid version is increased by one; when the object is
4038 transferred from Grid to Local the Grid version is kept and the subVersion
4039 is reset to zero. %You can find a plot of this schema on my talk (page 11).
4040
4041\item The container class of the object and its metadata
4042 (AliCDBEntry. The metadata of the object has been divided into two
4043 classes: one which contains the data used to store and retrieve the object
4044 ("identity" of the object, AliCDBId) and the other containing the metadata
4045 which is not used during storage and retrieval (AliCDBMetaData).
4046
4047 The AliCDBId object in turn contains:
4048 \begin{itemize}
4049 \item an object describing the name (path) of the object (AliCDBPath). The
4050 path name must have a fixed, three-level directory structure:
4051 "level1/level2/level3"
4052 \item an object describing the run validity range of the object
4053 (AliCDBRunRange)
4054 \item the version and subversion numbers (automatically set during storage)
4055 \item a string (fLastStorage) specifying from which storage the object was
4056 retrieved ("new", "grid", "local", "dump")
4057 \end{itemize}
4058
4059 The AliCDBId object has two functions:
4060 \begin{itemize}
4061 \item during storage it is used to specify the path and run range of the
4062 object;
4063 \item during retrieval it is used as a "query": it contains the
4064 path of the object, the required run and (if needed) the
4065 version and subversion to be retrieved (if version and/or
4066 subversion are not specified the highest ones are looked for).
4067 \end{itemize}
4068\end{itemize}
4069
4070\noindent
4071\textbf{Some usage examples}
4072
4073The following use cases are illustrated:
4074
4075\begin{itemize}
4076\item A pointer to the single instance of the AliCDBManager class is obtained
4077 with:
4078
4079 \begin{lstlisting}[language=C++]
4080 AliCDBManager::Instance()
4081 \end{lstlisting}
4082
4083\item A storage is activated and a pointer to it is returned using the
4084 \method{AliCDBManager::GetStorage(const char* URI)} method. Here are
4085 some examples of how to activate a storage via an URI string. The
4086 URI's must have a well defined syntax, for example (local cases):
4087
4088 \begin{itemize}
4089 \item "local://DBFolder" to local storage with base directory "DBFolder"
4090 created (if not existing from the working directory)
4091
4092 \item "local://\$ALICE\_ROOT/DBFolder" to local storage with base directory
4093 "\$ALICE\_ROOT/DBFolder" (full path name)
4094
4095 \item"dump://DBFile.root" to Dump storage. The file DBFile.root is looked
4096 for or created in the working directory if the full path is not specified
4097
4098 \item "dump://DBFile.root;ReadOnly" to Dump storage. DBFile.root is
4099 opened in read only mode.
4100 \end{itemize}
4101
4102\item Concrete examples (local case):
4103
4104 \begin{lstlisting}[language=C++]
4105 AliCDBStorage *sto =
4106 AliCDBManager::Instance()->GetStorage("local://DBFolder"):
4107
4108 AliCDBStorage *dump =
4109 AliCDBManager::Instance()->GetStorage("dump:///data/DBFile.root;ReadOnly"):
4110 \end{lstlisting}
4111
4112\item Creation and storage of an object. Example of how an
4113 object can be created and stored in a local database
4114
4115 \begin{itemize}
4116 \item Let's suppose our object is an AliZDCCalibData object (container of
4117 arrays of pedestals constants), whose name is
4118 "ZDC/Calib/Pedestals" and is valid for run 1 to 10.
4119
4120 \begin{lstlisting}[language=C++]
4121 AliZDCCalibData *calibda = new AliZDCCalibData();
4122 // ... filling calib data...
4123
4124 // creation of the AliCDBId object (identifier of the object)
4125 AliCDBId id("ZDC/Calib/Pedestals",1,10);
4126
4127 // creation and filling of the AliCDBMetaData
4128 AliCDBMetaData *md = new AliCDBMetaData();
4129 md->Set... // fill meta data object, see list of setters...
4130
4131 // Activation of local storage
4132 AliCDBStorage *sto =
4133 AliCDBManager::Instance()->GetStorage("local://$HOME/DBFolder");
4134
4135 // put object into database
4136 sto->Put(calibda, id, md);
4137 \end{lstlisting}
4138 The object is stored into local file:
4139 \$HOME/DBFolder/ZDC/Calib/Pedestals/Run1\_10\_v0\_s0.root
4140
4141 \item Examples of how to retrieve an object
4142
4143 \begin{lstlisting}[language=C++]
4144 // Activation of local storage
4145 AliCDBStorage *sto =
4146 AliCDBManager::Instance()->GetStorage("local://$HOME/DBFolder");
4147
4148 // Get the AliCDBEntry which contains the object "ZDC/Calib/Pedestals",
4149 valid for run 5, highest version
4150 AliCDBEntry* entry = sto->Get("ZDC/Calib/Pedestals",5)
4151 // alternatively, create an AliCDBId query and use sto->Get(query) ...
4152
4153 // specifying the version: I want version 2
4154 AliCDBEntry* entry = sto->Get("ZDC/Calib/Pedestals",5,2)
4155
4156 // specifying version and subversion: I want version 2 and subVersion 1
4157 AliCDBEntry* entry = sto->Get("ZDC/Calib/Pedestals",5,2,1)
4158 \end{lstlisting}
4159
4160 \item Selection criteria can be also specified using
4161 \method{AliCDBStorage::AddSelection(...)} methods:
4162
4163 \begin{lstlisting}[language=C++]
4164 // I want version 2\_1 for all "ZDC/Calib/*" objects for runs 1-100
4165 sto->AddSelection("ZDC/Calib/*",1,100,2,1);
4166 // and I want version 1\_0 for "ZDC/Calib/Pedestals" objects for runs 5-10
4167 sto->AddSelection("ZDC/Calib/Pedestals",5,10,1,0)
4168
4169 AliCDBEntry* entry = sto->Get("ZDC/Calib/Pedestals",5)
4170 \end{lstlisting}
4171
4172 See also: \method{AliCDBStorage::RemoveSelection(...),
4173 RemoveAllSelections(), PrintSelectionList()}
4174
4175 \item Retrieval of multiple objects with \method{AliCDBStorage::GetAll()}
4176
4177 \begin{lstlisting}[language=C++]
4178 TList *list = sto->GetAll("ZDC/*",5)
4179 \end{lstlisting}
4180 \end{itemize}
4181
4182\item Use of Default storage and Drain storages
4183
4184 AliCDBManager allows to set pointers to a "default storage" and to a
4185 "drain storage". In particular, if the drain storage is set, all the
4186 retrieved objects are automatically stored into it.
4187
4188 The default storage is automatically set as the first active storage. To
4189 set the default storage to another storage:
4190
4191 \begin{lstlisting}[language=C++]
4192 AliCDBManager::Instance()->SetDefaultStorage("uri")
4193 \end{lstlisting}
4194
4195 The default storage can be then used by:
4196 \begin{lstlisting}[language=C++]
4197 AliCDBEntry *entry =
4198 AliCDBManager::Instance()->GetDefaultStorage()->Get(...)
4199 \end{lstlisting}
4200
4201 The drain storage can be set in a similar way:
4202
4203 \begin{lstlisting}[language=C++]
4204 AliCDBManager::Instance()->SetDrain("uri")
4205 \end{lstlisting}
4206
4207 There are some AliCDBManager public methods to handle the default and
4208 storage methods:
4209
4210 \begin{lstlisting}[language=C++]
4211 Bool_t IsDefaultStorageSet()
4212 void RemoveDefaultStorage()
4213 Bool_t IsDrainSet()
4214 void RemoveDrain()
4215 \end{lstlisting}
4216
4217\item Example of how to use default and drain storage:
4218
4219 \begin{lstlisting}[language=C++]
4220 AliCDBManager::Instance()->SetDefaultStorage("local://$HOME/DBFolder");
4221 AliCDBManager::Instance()->SetDrain("dump://$HOME/DBDrain.root");
4222
4223 AliCDBEntry *entry =
4224 AliCDBManager::Instance()->GetDefaultStorage()->Get("ZDC/Calib/Pedestals",5)
4225 // Retrieved entry is automatically stored into DBDrain.root !
4226 \end{lstlisting}
4227
4228\item To destroy the AliCDBManager instance and all the active storages:
4229
4230 \begin{lstlisting}[language=C++]
4231 AliCDBManager::Instance()->Destroy()
4232 \end{lstlisting}
4233\end{itemize}
4234%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
4235
4236\appendix
4237
4238\section{Kalman filter}
4239Kalman filtering is quite a general and powerful method for statistical
4240estimations and predictions. The conditions for its
4241applicability are the following. A certain `system' is
4242determined at any moment in time $t_k$ by a state vector $x_k$. The state
4243vector varies with time according to an evolution
4244equation
4245\[ x_k = f_k(x_{k-1}) + \epsilon_k . \]
4246It is supposed that $f_k$ is
4247a known deterministic function and $\epsilon_k$ is a random vector of intrinsic
4248`process noise' which has a zero mean value ($<\epsilon_k> = 0$) and a known
4249covariance matrix (${\rm cov}(\epsilon_k) = Q_k$). Generally, only some function
4250$h_k$ of the state vector can be observed, and the result of the
4251observation $m_k$ is
4252corrupted by a `measurement noise' $\delta_k$:
4253\[ m_k = h_k(x_k) + \delta_k. \]
4254The measurement noise is supposed to be unbiased ($<\delta_k> = 0$) and have a
4255definite covariance matrix (${\rm cov}(\delta_k) = V_k$). In many cases, the
4256measurement function $h_k$ can be represented by a
4257certain matrix $H_k$:
4258\[ m_k = H_kx_k + \delta_k .\]
4259
4260If, at a certain time $t_{k-1}$, we are given
4261some estimates of the state vector $\tilde{x}_{k-1}$ and of
4262its covariance matrix $\tilde{C}_{k-1} = {\rm cov}(\tilde{x}_{k-1}-x_{k-1})$,
4263we can extrapolate
4264these estimates to the next time slot $t_k$ by means of formulas
4265(this is called `prediction'):
4266\begin{eqnarray}
4267 \tilde{x}_k^{k-1} &=& f_k(\tilde{x}_{k-1}) \nonumber \\
4268 \tilde{C}_k^{k-1} &=& F_k\tilde{C}_{k-1}F_k^T + Q_k\mbox{,\ \ \ \ }
4269 F_k=\frac{\displaystyle\partial f_k}{\displaystyle\partial x_{k-1}} .
4270 \nonumber
4271 % \label{pre}
4272\end{eqnarray}
4273The value of the predicted $\chi^2$ increment can be also calculated:
4274\begin{equation}
4275 (\chi^2)_k^{k-1} = (r_k^{k-1})^T(R_k^{k-1})^{-1}r_k^{k-1}\mbox{,\ \ \ \ }
4276 r_k^{k-1} = m_k - H_k\tilde{x}_k^{k-1}\mbox{,\ \ \ \ }
4277 R_k^{k-1} = V_k + H_k\tilde{C}_k^{k-1}H_k^T .
4278 \nonumber
4279 % \label{chi}
4280\end{equation}
4281The number of degrees of freedom is equal to the dimension of the vector $m_k$.
4282
4283If at the moment $t_k$, together with the results of prediction, we also
4284have the results of the state vector measurement,
4285this additional information can be combined with the prediction results
4286(this is called `filtering'). As a consequence, the estimation of the state
4287vector improves with respect to the previous step:
4288\begin{eqnarray}
4289 \tilde{x}_k &=& \tilde{x}_k^{k-1} + K_k(m_k - H_k\tilde{x}_k^{k-1})\nonumber\\
4290 \tilde{C}_k &=& \tilde{C}_k^{k-1} - K_kH_k\tilde{C}_k^{k-1},
4291 \nonumber
4292 % \label{fil}
4293\end{eqnarray}
4294where $K_k$ is the Kalman gain matrix
4295$
4296K_k = \tilde{C}_k^{k-1}H_k^T(V_k + H_k\tilde{C}_k^{k-1}H_k^T)^{-1}.
4297$
4298
4299Finally, the next formula gives us the value of the filtered $\chi^2$ increment:
4300\[
4301\chi^2_k = (r_k)^T(R_k)^{-1}r_k\mbox{,\ \ \ \ }
4302r_k = m_k - H_k\tilde{x}_k\mbox{,\ \ \ \ }
4303R_k = V_k - H_k\tilde{C}_kH_k^T .
4304\]
4305It can be shown that the predicted $\chi^2$ value is equal to the filtered
4306one:
4307\begin{equation}
4308 (\chi^2)_k^{k-1} = \chi^2_k \label{chi=chi} .
4309\end{equation}
4310
4311The `prediction' and `filtering' steps are repeated as many times as we have
4312measurements of the state vector.
4313
4314\section {Bayesian approach for combined particle identification}\label{BayesianPID}
4315
4316Particle identification over a large momentum range and
4317for many particle species is often one of the main design
4318requirements of high energy physics experiments.
4319The ALICE detectors are able to
4320identify particles with momenta from 0.1 GeV/$c$ up to
432110 GeV/$c$. This can be achieved by combining
4322several detecting systems that are efficient in some narrower and
4323complementary momentum sub-ranges. The situation is complicated by
4324the amount of data to be processed (about $10^7$ events with
4325about $10^4$ tracks in each). Thus, the particle identification
4326procedure should satisfy the following
4327requirements:
4328\begin{enumerate}
4329\item It should be as much as possible automatic.
4330\item It should be able to combine PID signals of different nature
4331({\it e.g.} $dE/dx$ and time-of-flight measurements).
4332\item When several detectors contribute to the PID, the procedure must profit
4333 from this situation by providing an improved PID.
4334\item When only some detectors identify a particle, the signals from the other
4335 detectors must not affect the combined PID.
4336\item It should take into account the fact that, due to
4337different event and track selection, the PID depends on the kind of analysis.
4338\end{enumerate}
4339
4340In this report we will demonstrate that combining PID signals in a Bayesian way
4341satisfies all these requirements.
4342
4343\subsection{Bayesian PID with a single detector}
4344Let $r(s|i)$ be a conditional probability density function to observe in some
4345detector a PID signal $s$ if a particle of $i-$type
4346($i=e, \mu, \pi, K, p, ...$)
4347is detected. The probability to be a particle of $i-$type if the signal
4348$s$ is observed, $w(i|s)$, depends not only on $r(s|i)$, but also
4349on how often this type of particles is registered in the considered experiment
4350({\it a priory} probability $C_i$ to find this
4351kind of particles in the detector). The corresponding relation is
4352given by the Bayes' formula:
4353
4354\begin{equation}\label{eq:bayes}
4355 w(i|s)={r(s|i) C_i \over \sum_{k=e, \mu, \pi, ...}{r(s|k) C_k}}
4356\end{equation}
4357
4358Under some reasonable conditions, $C_i$ and $r(s|i)$ are not correlated
4359so that one can rely on the following approximation:
4360\begin{itemize}
4361\item The functions $r(s|i)$ reflect only properties of the detector
4362(``detector response functions'') and do not depend on
4363other external conditions like event and track selections.
4364\item On contrary, the quantities $C_i$ (``relative concentrations'' of
4365particles of $i$-type) do not depend on the detector
4366properties, but do reflect the external conditions, selections {\it etc}.
4367\end{itemize}
4368
4369The PID procedure is done in the following way. First,
4370 the detector response
4371function is obtained. Second, a value $r(s|i)$ is assigned to
4372each track.
4373Third, the relative concentrations $C_i$ of particle species are
4374 estimated for a subset of events and tracks selected in a specific
4375physics analysis.
4376Finally, an array of probabilities $w(i|s)$ is calculated (see Eq.~\ref{eq:bayes}) for each track within the selected
4377subset.
4378
4379The probabilities $w(i|s)$ are often called PID weights.
4380
4381The conditional probability density function $r(s|i)$
4382(detector response function) can be always parameterized with sufficient
4383precision using available experimental data.
4384
4385In the simplest approach, the {\it a-priori} probabilities
4386$C_i$ (relative concentrations of particles of $i$-type) to observe a
4387particle of $i$-type can be assumed to be equal.
4388
4389However, in many cases one can do better. Thus, for example in ALICE,
4390when doing
4391PID in the TPC for the tracks that are registered both in the TPC and
4392in the Time-Of-Flight detector (TOF), these probabilities
4393can be estimated using the measured time-of-flight. One simply fills a
4394histogram of the following quantity:
4395\begin{equation}
4396m={p\over {\beta\gamma}}=p\sqrt{{{c^2t^2}\over{l^2}} - 1},
4397\end{equation}
4398where $p$ and $l$ are the reconstructed track momentum and length and $t$
4399is the measured time-of-flight. Such a histogram peaks near the values
4400$m$ that correspond to the masses of particles.
4401
4402Forcing some of the $C_i$ to be exactly zeros excludes the
4403corresponding particle type from the PID analysis and such particles will
4404be redistributed over other particle classes (see Eq.~\ref{eq:bayes}).
4405This can be useful for the kinds of analysis when, for the particles
4406of a certain type, one is not concerned by
4407the contamination but, at the same time, the efficiency of PID is
4408of particular importance.
4409
4410
4411\subsection{PID combined over several detectors}
4412This method can be easily applied for combining PID measurements
4413from several detectors. Considering the whole system of $N$ contributing
4414detectors as a single ``super-detector'' one can write the combined
4415PID weights $W(i|\bar{s})$ in the form similar to that given by
4416Eq.~\ref{eq:bayes} :
4417
4418\begin{equation}\label{eq:bayes1}
4419 W(i|\bar{s})={R(\bar{s}|i) C_i \over \sum_{k=e, \mu, \pi,
4420 ...}{R(\bar{s}|k) C_k}} ,
4421\end{equation}
4422where $\bar{s}={s_1, s_2, ..., s_N}$ is a vector of PID signals registered in
4423the first, second and other contributing detectors,
4424$C_i$ are the {\it a priory} probabilities to be a particle of the $i$-type
4425(the same as in Eq.~\ref{eq:bayes}) and
4426$R(\bar{s}|i)$ is the combined response function of the whole system
4427of detectors.
4428
4429If the single detector PID measurements $s_j$ are uncorrelated (which is
4430approximately true in the case of the ALICE experiment), the
4431combined response function is product of single response functions
4432$r(s_j|i)$ (the ones in Eq.~\ref{eq:bayes}) :
4433
4434\begin{equation}\label{eq:resp}
4435 R(\bar{s}|i)=\prod_{j=1}^{N}r(s_j|i).
4436\end{equation}
4437
4438One obtains the following expression for the PID weights combined over the
4439whole system of detectors:
4440
4441\begin{equation}\label{eq:bayes2}
4442 W(i|s_1, s_2, ..., s_N)={\displaystyle C_i \prod_{j=1}^{N}r(s_j|i) \over
4443\displaystyle \sum_{k=e, \mu, \pi, ...}C_k \prod_{j=1}^{N}r(s_j|k)}
4444\end{equation}
4445
4446
4447In the program code, the combined response functions $R(\bar{s}|i)$
4448do not necessarily have to be treated as analytical. They can be ``procedures''
4449(C++ functions, for example). Also, some additional effects like
4450probabilities to obtain a mis-measurement (mis-matching) in one or several
4451contributing detectors can be accounted for.
4452
4453The formula Eq.~\ref{eq:bayes2} has the following useful features:
4454\begin{itemize}
4455\item If for a certain particle momentum one (or several) of the detectors
4456 is not able to identify the particle type ({\it i.e.} $r(s|i)$ are equal
4457 for all $i=e, \mu, ...$), the contribution of such a detector cancels out
4458 from the formula.
4459\item When several detectors are capable of separating the particle types,
4460their contributions are accumulated with proper weights, thus providing
4461an improved combined particle identification.
4462\item Since the single detector response functions $r(s|i)$ can be obtained
4463 in advance at the calibration step and the combined response
4464 can be approximated by Eq.~\ref{eq:resp}, a part of PID (calculation of
4465 the $R(\bar{s}|i)$ ) can be done track-by-track
4466 ``once and forever'' by the reconstruction software and the results
4467 can be stored in the Event Summary Data. The final PID decision,
4468 being dependent via the {\it a priory} probabilities $C_i$ on the event
4469 and track selections , is then postponed until the physics analysis of the
4470 data.
4471\end{itemize}
4472
4473\subsection{Stability with respect to variations of the {\it a priory} probabilities}
4474Since the results of this PID procedure explicitly depend on the choice
4475of the {\it a priory} probabilities $C_i$ (and, in fact, this kind of
4476dependence is unavoidable in any case), the question of stability of the
4477results with respect to the almost arbitrary choice of $C_i$ becomes important.
4478
4479Fortunately, there is always some momentum region where the single detector
4480response functions for different particle types of at least one of the
4481detectors do not significantly overlap, and so the stability
4482is guaranteed. The more detectors enter the combined PID procedure, the wider
4483this momentum region becomes and the results are more stable.
4484
4485Detailed simulations using the ALIROOT framework show that results of the
4486PID combined over all the ALICE central
4487detectors are, within a few per cent, stable with respect to
4488variations of $C_i$ up-to at least 3~GeV/$c$.
4489
4490
4491\subsection{Features of the Bayesian PID}
4492Particle Identification in ALICE experiment at LHC can be done in a Bayesian
4493way. The procedure consists of three parts:
4494\begin{itemize}
4495\item First, the single detector PID response functions
4496$r(s|i)$ are obtained. This is done by the calibration software.
4497\item Second, for each reconstructed track the combined PID response
4498 $R(\bar{s}|i)$
4499 is calculated and effects of possible mis-measurements of the PID signals
4500 can be accounted for. The results are written to the Event Summary Data and,
4501 later, are used in all kinds of physics analysis of the data.
4502 This is a part of the reconstruction software.
4503\item And finally, for each kind of physics analysis, after the corresponding
4504 event and track selection is done, the {\it a~priory} probabilities $C_i$ to
4505be a particle of a certain $i$-type within the selected subset are estimated
4506and the PID weights $W(i|\bar{s})$ are calculated by means of formula
4507Eq.~\ref{eq:bayes2}. This part of the PID procedure belongs to the
4508analysis software.
4509\end{itemize}
4510
4511The advantages of the described here particle identification procedure are
4512\begin{itemize}
4513\item The fact that, due to different event and rack selection, the PID depends
4514on a particular kind of performed physics analysis is naturally taken into
4515account.
4516\item Capability to combine, in a common way, signals from detectors having
4517quite different nature and shape of the PID response functions (silicon, gas,
4518time-of-flight, transition radiation and Cerenkov detectors).
4519\item No interactive multidimensional graphical cuts are involved.
4520 The procedure is fully automatic.
4521
4522\end{itemize}
4523
4524\section{Vertex estimation using tracks}\label{VertexerTracks}
4525
4526Each track, reconstructed in the TPC and in the ITS,
4527is approximated with a straight line at the
4528position of the closest approach to the nominal primary vertex position
4529(the nominal vertex position is supposed to be known with a precision
4530of 100--200 $\mu$m).
4531Then, all possible
4532track pairs $(i,j)$ are considered and for each pair, the center
4533$C(i,j)\equiv(x_{ij},y_{ij},z_{ij})$ of the segment of minimum approach
4534between the two lines is found. The coordinates of the primary vertex
4535are determined as:
4536\[
4537% x_{\rm v}={1\over N_{\rm pairs}}\sum_{i,j}x_{ij}\:; \:\:\:\:\:\:
4538% y_{\rm v}={1\over N_{\rm pairs}}\sum_{i,j}y_{ij}\:; \:\:\:\:\:\:
4539% z_{\rm v}={1\over N_{\rm pairs}}\sum_{i,j}z_{ij} \:\:\:\:\:\:
4540x_{\rm v}=\frac{1}{N_{\rm pairs}}\sum_{i,j}x_{ij}\:; \:\:\:\:\:\:
4541y_{\rm v}=\frac{1}{N_{\rm pairs}}\sum_{i,j}y_{ij}\:; \:\:\:\:\:\:
4542z_{\rm v}=\frac{1}{N_{\rm pairs}}\sum_{i,j}z_{ij} \:\:\:\:\:\:
4543\]
4544where $N_{\rm pairs}$ is the number of track pairs.
4545This gives an improved estimate of the vertex position.
4546
4547Finally, the position $\textbf{r}_{\rm v}=(x_{\rm v},y_{\rm v},z_{\rm v})$ of the
4548vertex is reconstructed minimizing the
4549$\chi^2$ function (see~Ref.~\cite{VERTEX:cmsvtxnote}):
4550\begin{equation}
4551 \label{eq:chi2}
4552 \chi^2(\textbf{r}_{\rm v})=\sum_i (\textbf{r}_{\rm v}-\textbf{r}_i)^T\,{\bf
4553 V}_i^{-1}(\textbf{r}_{\rm v}-\textbf{r}_i),
4554\end{equation}
4555where $\textbf{r}_i$ is the global position of the
4556track $i$ (i.e. the position assigned at the step above)
4557and $\textbf{V}_i$ is the covariance matrix of the vector $\textbf{r}_i$.
4558
4559In order not to spoil the vertex resolution by including in the fit tracks that
4560do not originate from the primary vertex (e.g. strange particle
4561decay tracks), the tracks giving a
4562contribution larger than some value $\chi^2_{\rm max}$ to the global $\chi^2$
4563are removed one-by-one from the sample, until no such tracks are left. The
4564parameter $\chi^2_{\rm max}$ was tuned,
4565as a function of the event multiplicity, so as to obtain the best vertex
4566resolution.
4567
f9edc2b4 4568\section{Glossary}
4569
4570\begin{description}
4571
4572\item[ADC]Analogue to Digital Conversion/Converter
4573\item[AFS]Andrew File System\\{\footnotesize \url{http://en.wikipedia.org/wiki/Andrew_file_system}}
4574\item[ALICE]A Large Ion Collider Experiment\\{\footnotesize \url{http://aliceinfo.cern.ch/}}
4575\item[AOD]Analysis Object Data
4576\item[API]Application Program Interface
4577\item[ARDA]Architectural Roadmap towards Distributed Analysis\\{\footnotesize \url{http://lcg.web.cern.ch/LCG/activities/arda/arda.html}}
4578\item[AliRoot]ALIce offline framework\\{\footnotesize \url{http://aliceinfo.cern.ch/offline}}
4579\item[CA]Certification Authority
4580\item[CASTOR]CERN Advanced STORage\\{\footnotesize \url{http://castor.web.cern.ch/castor/}}
4581\item[CDC]Computing Data Challenge
4582\item[CDF]Collider Detector at Fermilab
4583\item[CE]Computing Element\\{\footnotesize \url{http://aliceinfo.cern.ch/static/AliEn/AliEn_Instalation/ch06s07.html}}
4584\item[CERN]European Organization for Nuclear Research\\{\footnotesize \url{http://www.cern.ch}}
4585\item[CINT]C/C++ INTerpreter that is embedded in ROOT\\{\footnotesize \url{http://root.cern.ch/root/Cint.html}}
4586\item[CRT]Cosmic Ray Trigger, the official name is ACORDE\\{\footnotesize \url{}}
4587\item[CVS]Concurrent Versioning System\\{\footnotesize \url{http://www.nongnu.org/cvs/}}
4588\item[DAQ]Data AcQuisition system\\{\footnotesize \url{http://cern.ch/alice-daq}}
4589\item[DATE]Data Acquisition and Test Environment\\{\footnotesize \url{http://cern.ch/alice-daq}}
4590\item[DCA]Distance of Closest Approach
4591\item[DCS]Detector Control System\\{\footnotesize \url{http://alicedcs.web.cern.ch/alicedcs/}}
4592\item[DPMJET]Dual Parton Model monte carlo event generator\\{\footnotesize \url{http://sroesler.web.cern.ch/sroesler/dpmjet3.html}}
4593\item[EGEE]Enabling Grid for E-sciencE project\\{\footnotesize \url{http://public.eu-egee.org/}}
4594\item[EMCal]Electromagnetic Calorimeter
4595\item[ESD]Event Summary Data
4596\item[FLUKA]A fully integrated particle physics MonteCarlo simulation package\\{\footnotesize \url{http://www.fluka.org/}}
4597\item[FMD]Forward Multiplicity Detector\\{\footnotesize \url{http://fmd.nbi.dk/}}
4598\item[FSI]Final State Interactions
4599\item[GAG]Grid Application Group\\{\footnotesize \url{http://project-lcg-gag.web.cern.ch/project-lcg-gag/}}
4600\item[GUI]Graphical User Interface
4601\item[GeVSim]fast Monte Carlo event generator, base on MEVSIM
4602\item[Geant4]A toolkit for simulation of the passage of particles through matter\\{\footnotesize \url{http://geant4.web.cern.ch/geant4/}}
4603\item[HBT]Hanbury Brown and Twiss
4604\item[HEP]High Energy Physics
4605\item[HEPCAL]HEP Common Application Area
4606\item[HERWIG]monte carlo package for simulating Hadron Emission Reactions With Interfering Gluons\\{\footnotesize \url{http://cernlib.web.cern.ch/cernlib/mc/herwig.html}}
4607\item[HIJING]Heavy Ion Jet Interaction Generator
4608\item[HLT]High Level Trigger\\{\footnotesize \url{http://wiki.kip.uni-heidelberg.de/ti/HLT/index.php/Main_Page}}
4609\item[HMPID]High Momentum Particle IDentification\\{\footnotesize \url{http://alice-hmpid.web.cern.ch/alice-hmpid/}}
4610\item[ICARUS]Imaging Cosmic And Rare Underground Signals\\{\footnotesize \url{http://pcnometh4.cern.ch/}}
4611\item[IP]Internet Protocol
4612\item[ITS]Inner Tracking System; collective name for SSD, SPD and SDD
4613\item[JETAN]JET ANalysis module
4614\item[LCG]LHC Computing Grid\\{\footnotesize \url{http://lcg.web.cern.ch/LCG/}}
4615\item[LDAP]Lightweight Directory Access Protocol
4616\item[LHC]Large Hadron Collider\\{\footnotesize \url{http://lhc.web.cern.ch/lhc/}}
4617\item[LSF]Load Sharing Facility\\{\footnotesize \url{http://wwwpdp.web.cern.ch/wwwpdp/bis/services/lsf/}}
4618\item[MC]Monte Carlo
4619\item[MoU]Memorandum of Understanding
4620\item[OCDB]Offline Calibration DataBase\\{\footnotesize \url{http://aliceinfo.cern.ch/Offline/Activities/ConditionDB.html}}
4621\item[OO]Object Oriented
4622\item[OS]Operating System
4623\item[PAW]Physics Analysis Workstation\\{\footnotesize \url{http://paw.web.cern.ch/paw/}}
4624\item[PDC]Physics Data Challenge
4625\item[PDF]Particle Distribution Function
4626\item[PEB]Project Execution Board
4627\item[PHOS]PHOton Spectrometer
4628\item[PID]Particle IDentity/IDentification
4629\item[PMD]Photon Multiplicity Detector\\{\footnotesize \url{http://www.veccal.ernet.in/~pmd/ALICE/alice.html}}
4630\item[PPR]Physics Performace Report\\{\footnotesize \url{http://alice.web.cern.ch/Alice/ppr/}}
4631\item[PROOF]Parallel ROOT Facility\\{\footnotesize \url{http://root.cern.ch/root/doc/RootDoc.html}}
4632\item[PWG]Physics Working Group\\{\footnotesize \url{http://aliceinfo.cern.ch/Collaboration/PhysicsWorkingGroups/}}
4633\item[PYTHIA]event generator
4634\item[QA]Quality Assurance
4635\item[QCD]Quantum ChromoDynamics
4636\item[QS]Quantum Statistics
4637\item[RICH]Ring Imaging CHerenkov\\{\footnotesize \url{http://alice-hmpid.web.cern.ch/alice-hmpid/}}
4638\item[ROOT]A class library for data analysis\\{\footnotesize \url{http://root.cern.ch}}
4639\item[RTAG]Requirements and Technical Assessment Group
4640\item[SDD]Silicon Drift Detector
4641\item[SDTY]Standard Data Taking Year
4642\item[SE]Storage Element
4643\item[SI2k]SpecInt2000 CPU benchmark\\{\footnotesize \url{http://cerncourier.com/articles/cnl/1/11/9/1}}
4644\item[SLC]Scientific Linux CERN\\{\footnotesize \url{http://linuxsoft.cern.ch/}}
4645\item[SOA]Second Order Acronym
4646\item[SPD]Silicon Pixel Detector\\{\footnotesize \url{http://www.pd.infn.it/spd/}}
4647\item[SSD]Silicon Strip Detector
4648\item[TDR]Technical Design Report\\{\footnotesize \url{http://alice.web.cern.ch/Alice/TDR/}}
4649\item[TOF]Time Of Flight Detector\\{\footnotesize \url{http://alice.web.cern.ch/Alice/Projects/TOF/}}
4650\item[TPC]Time Projection Chamber\\{\footnotesize \url{http://alice.web.cern.ch/Alice/Projects/TPC/}}
4651\item[TRD]Transition Radiation Detector\\{\footnotesize \url{http://www-alice.gsi.de/trd/index.html}}
4652\item[UI]User Interface
4653\item[UID]Unique IDentification number
4654\item[URL]Universal Resource Locator
4655\item[VMC]Virtual Monte Carlo
4656\item[VO]Virtual Organization
4657\item[VOMS]Virtual Organization Membership Service
4658\item[WAN]Wide Area Network
4659\item[XML]Extensible Markup Language\\{\footnotesize \url{http://www.w3.org/XML/}}
4660\item[ZDC]Zero Degree Calorimeter
4661\end{description}
4662
c4593ee3 4663
4664%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
4665
4666\begin{thebibliography}{99}
4667
4668\bibitem{PPR} CERN/LHCC 2003-049, ALICE Physics Performance Report,
4669 Volume 1 (7 November 2003); \\
4670 ALICE Collaboration: F. Carminati {\it et al.}, J. Phys. G: Nucl.
4671 Part. Phys. \textbf{30} (2004) 1517--1763.
4672
4673\bibitem{CompTDR} CERN-LHCC-2005-018, ALICE Technical Design Report:
4674 Computing, ALICE TDR 012 (15 June 2005).
4675
4676\bibitem{ROOT} \url{http://root.cern.ch}
4677
4678\bibitem{Geant3}
4679\url{http://wwwasdoc.web.cern.ch/wwwasdoc/geant_html3/geantall.html}
4680
4681\bibitem{FLUKA} \url{http://www.fluka.org}
4682
4683\bibitem{Geant4} \url{http://cewrn.ch/geant4}
4684
4685\bibitem{MC:PYTH} H.-U.~Bengtsson and T.~Sjostrand, Comput. Phys.
4686 Commun. \textbf{46} (1987) 43; \newline the code can be found in
4687 \url{http://nimis.thep.lu.se/~torbjorn/Pythia.html} \newline
4688 T.~Sjostrand, Comput. Phys. Commun. \textbf{82} (1994) 74; \newline
4689 the code can be found in
4690 \url{http://www.thep.lu.se/~torbjorn/Pythia.html}
4691
4692\bibitem{MC:HIJING} X.~N.~Wang and M.~Gyulassy, Phys. Rev.
4693 \textbf{D44} (1991) 3501. \newline M.~Gyulassy and X.~N.~Wang,
4694 Comput. Phys. Commun. \textbf{83} (1994) 307-331. \newline The code
4695 can be found in \url{http://www-nsdth.lbl.gov/~xnwang/hijing/}
4696
4697\bibitem{AliEn}
4698 \url{http://alien.cern.ch}
4699
4700\bibitem{SLC} \url{http://linux.web.cern.ch/linux}
4701
4702\bibitem{RedHat} \url{http://www.redhat.com}
4703
4704\bibitem{Fedora} \url{http://fedora.redhat.com}
4705
4706\bibitem{Linux} \url{http://www.linux.org}
4707
4708\bibitem{gcc} \url{http://gcc.gnu.org}
4709
4710\bibitem{icc}
4711 \url{http://www.intel.com/cd/software/products/asmo-na/eng/compilers/index.htm}
4712
4713\bibitem{VTune}
4714 \url{http://www.intel.com/cd/software/products/asmo-na/eng/vtune/index.htm}
4715
4716\bibitem{Itanium}
4717 \url{http://www.intel.com/products/processor/itanium2/index.htm}
4718
4719\bibitem{AMD} \url{http://www.amd.com}
4720
4721\bibitem{CVS} \url{http://www.cvshome.org}
4722
4723\bibitem{CVSManual} \url{http://ximbiot.com/cvs/manual}
4724
4725\bibitem{CLHEP} \url{http://cern.ch/clhep}
4726
4727\bibitem{CASTOR2} \url{http://cern.ch/castor}
4728
4729\bibitem{MC:DPMJET} J.~Ranft, Phys. Rev. \textbf{D 51} (1995) 64.
4730
4731\bibitem{MC:ISAJET}
4732\url{http://arxiv.org/abs/hep-ph/0312045}
4733
4734\bibitem{MC:HERWIG} HERWIG 6.5, G. Corcella, I.G. Knowles, G. Marchesini, S. Moretti,
4735K. Odagiri, P. Richardson, M.H. Seymour and B.R. Webber, JHEP 0101
4736(2001) 010 [hep-ph/0011363]; hep-ph/0210213
4737
4738\bibitem{DATE}
4739 ALICE-INT-2003-036
4740
4741\bibitem{MC:CDF} F.~Abe {\it et al.}, (CDF Collaboration), Phys. Rev.
4742 Lett.\textbf{61} (1988) 1819.
4743
4744\bibitem{MC:LUND} B.~Andersson, {\it et al.,} Phys. Rep. \textbf{97}
4745 (1983) 31.
4746
4747\bibitem{MC:FRITIOF} B.~Andersson, {\it et al.,} Nucl. Phys.
4748 \textbf{B281} (1987) 289; \newline B.~Nilsson-Almqvist and
4749 E.~Stenlund, Comput. Phys. Commun. \textbf{43} (1987) 387.
4750
4751\bibitem{MC:DPM} A.~Capella, {\it et al.,} Phys. Rep. \textbf{236}
4752 (1994) 227.
4753
4754\bibitem{MC:HIJINGparam} A.~Morsch,
4755 \url{http://home.cern.ch/~morsch/AliGenerator/AliGenerator.html} and
4756 \url{http://home.cern.ch/~morsch/generator.html}
4757
4758\bibitem{MC:NA35FIN} NA35 Collaboration, T.~Alber et al., \newblock Z.
4759 Phys. \textbf{C 64} (1994) 195.
4760
4761\bibitem{MC:Alber98} NA35 Collaboration, T.~Alber et al., \newblock
4762 Eur. Z. Phys. \textbf{C2} (1998) 643.
4763
4764\bibitem{MC:Kharzeev96} D.~Kharzeev: \newblock Phys. Lett. \textbf{B
4765 378} (1996) 238.
4766
4767\bibitem{MC:Capella96} A.~Capella and B.~Kopeliovich, \newblock Phys.
4768 Lett. \textbf{B381} (1996) 325.
4769
4770\bibitem{MC:Barrett77} R.~V. Barrett and D.~F. Jackson, \newblock {\em
4771 Nuclear sizes and structure,} \newblock Clarendon Press, Oxford,
4772 1977.
4773
4774\bibitem{MC:Roesler96b} S.~Roesler, R.~Engel and J.~Ranft, \newblock
4775 Phys. Rev. \textbf{D57} (1998) 2889.
4776
4777\bibitem{MC:Roesler99} S.~Roesler, \newblock {personal communication},
4778 1999.
4779
4780\bibitem{MC:Gluck95a} M.~Gl\"uck, E.~Reya and A.~Vogt: \newblock Z.\
4781 Phys.\ \textbf{C67} (1995) 433.
4782
4783\bibitem{MC:Gluck98a} M.~Gl\"uck, E.~Reya and A.~Vogt, \newblock Eur.\
4784 Phys.\ J. \textbf{C5} (1998) 461.
4785
4786\bibitem{MC:MEVSIM} L. Ray and R.S. Longacre, STAR Note 419.
4787
4788\bibitem{MC:GEVSIM} S. Radomski and Y. Foka, ALICE Internal Note 2002-31.
4789
4790\bibitem{MC:TMEVSIM}
4791 \url{http://radomski.home.cern.ch/~radomski/AliMevSim.html}
4792
4793\bibitem{MC:Radomski} \url{http://home.cern.ch/~radomski}
4794
4795\bibitem{MC:HBTproc} L. Ray and G.W. Hoffmann. Phys. Rev. \textbf{C
4796 54}, (1996) 2582, Phys. Rev. \textbf{C60}, (1999) 014906.
4797
4798\bibitem{MC:PiotrSk} P.~K.~Skowro\'nski, ALICE HBT Web Page,
4799 \url{http://aliweb.cern.ch/people/skowron}
4800
4801\bibitem{MC:POSCANCER} A.M.~Poskanzer and S.A.~Voloshin, Phys. Rev.
4802 \textbf{C 58}, (1998) 1671.
4803
4804\bibitem{MC:AlscherHT97} A.~Alscher, K.~Hencken, D.~Trautmann, and
4805 G.~Baur. \newblock Phys. Rev.~A \textbf{55}, (1997) 396.
4806
4807\bibitem{MC:Sadovsky} K.~Hencken, Y.~Kharlov, and S.~Sadovsky, ALICE
4808 Internal Note 2002-27.
4809
4810\bibitem{RootUsersGuide}
4811 \url{http://root.cern.ch/root/doc/RootDoc.html}
4812
4813\bibitem{CoordinateSystem} L.Betev, ALICE-PR-2003-279
4814
4815\bibitem{MC:billoir} P.~Billoir; NIM \textbf{A225} (1984) 352,
4816 P.~Billoir {\it et al.};
4817 NIM \textbf{A241} (1985) 115, \\
4818 R.Fruhwirth; NIM \textbf{A262} (1987) 444, P.Billoir; \textbf{CPC}
4819 (1989) 390.
4820
4821\bibitem{PPRVII} CERN/LHCC 2005-049, ALICE Physics Performance Report,
4822 Volume 2 (5 December 2005);
4823
4824\bibitem{VERTEX:cmsvtxnote} V.~Karim\"aki, CMS Note 1997/051 (1997).
4825
4826\bibitem{CH6Ref:gShell}
4827 \url{http://alien.cern.ch/download/current/gClient/gShell\_Documentation.html}
4828
4829\bibitem{CH6Ref:gLite} \url{http://glite.web.cern.ch/glite}
4830
4831\bibitem{CH6Ref:PROOF} \url{http://root.cern.ch/root/PROOF.html}
4832
4833\bibitem{CH6Ref:ITS_TDR} CERN/LHCC 99-12.
4834
4835\bibitem{CH6Ref:TPC_TDR} CERN/LHCC 2000-001.
4836
4837\bibitem{CH6Ref:Dainese} A.~Dainese, PhD Thesis, University of Padova,
4838 2003, [arXiv:nucl-ex/0311004].
4839
4840\bibitem{CH6Ref:Stavinsky} A.~Stavinsky {\it et al}, NUKLEONIKA
4841 \textbf{49} (Supplement 2) (2004) 23--25;
4842 \url{http://www.ichtj.waw.pl/ichtj/nukleon/back/full/vol49_2004/v49s2p023f.pdf}
4843\bibitem{CH6Ref:HBTAN} P.K.~Skowro\'nski for ALICE Collaboration,
4844 [arXiv:physics/0306111].
4845
4846\bibitem{CH6Ref:Weights} R.~Lednick\'y and V.L.~Lyuboshitz, Sov. J.
4847 Nucl. Phys. \textbf{35} (1982) 770.
4848
4849\bibitem{CH6Ref:CRAB}
4850 \url{http://www.nscl.msu.edu/~pratt/freecodes/crab/home.html}
4851
4852\bibitem{CH6Ref:Loizides}
4853 C.~Loizides, PhD Thesis, University of Frankfurt, 2005,
4854 [arXiv:nucl-ex/0501017].
4855
4856\bibitem{PiotrPhD}
4857 P.Skowronski, PhD Thesis.
4858
4859\end{thebibliography}
4860
4861\end{document}