1 \documentclass[12pt,a4paper,twoside]{article}
2 \usepackage[margin=2cm]{geometry}
9 \newcommand{\class}[1]{\texttt{\textbf{#1}}\xspace}
10 \newcommand{\method}[1]{\texttt{#1}\xspace}
11 \renewcommand{\rmdefault}{ptm}
14 % ---------------------------------------------------------------
15 % define new commands/symbols
16 % ---------------------------------------------------------------
25 \hyphenation{da-ta-ba-ses}
27 \newcommand{\pt}{\ensuremath{p_{\mathrm{t}}}}
28 \newcommand{\et}{\ensuremath{E_{\mathrm{T}}}}
29 \newcommand {\pT} {\mbox{$p_{\rm t}$}}
30 \newcommand{\mt}{\ensuremath{m_{\mathrm{t}}}}
31 \newcommand {\grid} {Grid\@\xspace}
32 \newcommand {\MC} {Monte~Carlo\@\xspace}
33 \newcommand {\alien} {AliEn\@\xspace}
34 \newcommand {\pp} {\mbox{p--p}\@\xspace}
35 \newcommand {\pA} {\mbox{p--A}\@\xspace}
36 \newcommand {\PbPb} {\mbox{Pb--Pb}\@\xspace}
37 \newcommand {\aliroot} {AliRoot\@\xspace}
38 \newcommand {\ROOT} {ROOT\@\xspace}
39 \newcommand {\OO} {Object-Oriented\@\xspace}
41 \newcommand{\mrm}{\mathrm}
42 \newcommand{\dd}{\mrm{d}}
43 \newcommand{\elm}{e.m.\@\xspace}
44 \newcommand{\eg}{{e.g.~\@\xspace}}
45 \newcommand{\ie}{i.e.\@\xspace}
46 \newcommand{\Jpsi} {\mbox{J\kern-0.05em /\kern-0.05em$\psi$}\xspace}
47 \newcommand{\psip} {\mbox{$\psi^\prime$}\xspace}
48 \newcommand{\Ups} {\mbox{$\Upsilon$}\xspace}
49 \newcommand{\Upsp} {\mbox{$\Upsilon^\prime$}\xspace}
50 \newcommand{\Upspp} {\mbox{$\Upsilon^{\prime\prime}$}\xspace}
51 \newcommand{\qqbar} {\mbox{$q\bar{q}$}\xspace}
53 \newcommand {\grad} {\mbox{$^{\circ}$}}
55 \newcommand {\rap} {\mbox{$\left | y \right | $}}
56 \newcommand {\mass} {\mbox{\rm GeV$\kern-0.15em /\kern-0.12em c^2$}}
57 \newcommand {\tev} {\mbox{${\rm TeV}$}}
58 \newcommand {\gev} {\mbox{${\rm GeV}$}}
59 \newcommand {\mev} {\mbox{${\rm MeV}$}}
60 \newcommand {\kev} {\mbox{${\rm keV}$}}
61 \newcommand {\mom} {\mbox{\rm GeV$\kern-0.15em /\kern-0.12em c$}}
62 \newcommand {\mum} {\mbox{$\mu {\rm m}$}}
63 \newcommand {\gmom} {\mbox{\rm GeV$\kern-0.15em /\kern-0.12em c$}}
64 \newcommand {\mmass} {\mbox{\rm MeV$\kern-0.15em /\kern-0.12em c^2$}}
65 \newcommand {\mmom} {\mbox{\rm MeV$\kern-0.15em /\kern-0.12em c$}}
66 \newcommand {\nb} {\mbox{\rm nb}}
67 \newcommand {\musec} {\mbox{$\mu {\rm s}$}}
68 \newcommand {\cmq} {\mbox{${\rm cm}^{2}$}}
69 \newcommand {\cm} {\mbox{${\rm cm}$}}
70 \newcommand {\mm} {\mbox{${\rm mm}$}}
71 \newcommand {\dens} {\mbox{${\rm g}\,{\rm cm}^{-3}$}}
73 \newcommand{\FR}{ALICE alignment framework}
74 \newcommand{\tgeo}{\lstinline!TGeo!}
76 \lstset{ % general command to set parameter(s)
77 % basicstyle=\small, % print whole listing small
78 basicstyle=\ttfamily, % print whole listing monospace
79 keywordstyle=\bfseries, % bold black keywords
80 identifierstyle=, % identifiers in italic
81 commentstyle=\itshape, % white comments in italic
82 stringstyle=\ttfamily, % typewriter type for strings
83 showstringspaces=false, % no special string spaces
84 columns=fullflexible, % Flexible columns
85 xleftmargin=2em, % Extra margin, left
86 xrightmargin=2em, % Extra margin, right
87 numbers=left, % Line numbers on the left
88 numberfirstline=true, % First line numbered
89 firstnumber=1, % Always start at 1
90 stepnumber=5, % Every fifth line
91 numberstyle=\footnotesize\itshape, % Style of line numbers
92 frame=lines} % Lines above and below listings
95 % ---------------------------------------------------------
96 % - End of Definitions
97 % ---------------------------------------------------------
101 \title{AliRoot Primer}
102 \author{Editor P.Hristov}
103 \date{Version v4-05-06 \\
111 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
113 \section{Introduction}\label{Introduction}
115 % -----------------------------------------------------------------------------
118 \subsection{About this primer}
120 The aim of this primer is to give some basic information about the
121 ALICE offline framework (AliRoot) from users perspective. We explain
122 in detail the installation procedure, and give examples of some
123 typical use cases: detector description, event generation, particle
124 transport, generation of ``summable digits'', event merging,
125 reconstruction, particle identification, and generation of event
126 summary data. The primer also includes some examples of analysis, and
127 short description of the existing analysis classes in AliRoot. An
128 updated version of the document can be downloaded from
129 \url{http://aliceinfo.cern.ch/Offline/AliRoot/primer.html}.
131 For the reader interested by the AliRoot architecture and by the
132 performance studies done so far, a good starting point is Chapter 4 of
133 the ALICE Physics Performance Report\cite{PPR}. Another important
134 document is the ALICE Computing Technical Design Report\cite{CompTDR}.
135 Some information contained there has been included in the present
136 document, but most of the details have been omitted.
138 AliRoot uses the ROOT\cite{ROOT} system as a foundation on which the
139 framework for simulation, reconstruction and analysis is built. The
140 transport of the particles through the detector is carried on by the
141 Geant3\cite{Geant3} or FLUKA\cite{FLUKA} packages. Support for
142 Geant4\cite{Geant4} transport package is coming soon.
144 Except for large existing libraries, such as Pythia6\cite{MC:PYTH} and
145 HIJING\cite{MC:HIJING}, and some remaining legacy code, this framework
146 is based on the Object Oriented programming paradigm, and it is
149 The following packages are needed to install the fully operational
150 software distribution:
152 \item ROOT, available from \url{http://root.cern.ch}
153 or using the ROOT CVS repository
155 :pserver:cvs@root.cern.ch:/user/cvs
157 \item AliRoot from the ALICE offline CVS repository
159 :pserver:cvs@alisoft.cern.ch:/soft/cvsroot
161 \item transport packages:
163 \item GEANT~3 is available from the ROOT CVS repository
164 \item FLUKA library can
165 be obtained after registration from \url{http://www.fluka.org}
166 \item GEANT~4 distribution from \url{http://cern.ch/geant4}.
170 The access to the GRID resources and data is provided by the
171 AliEn\cite{AliEn} system.
173 The installation details are explained in Section \ref{Installation}.
175 \subsection{AliRoot framework}\label{AliRootFramework}
177 In HEP, a framework is a set of software tools that enables data
178 processing. For example the old CERN Program Library was a toolkit to
179 build a framework. PAW was the first example of integration of tools
180 into a coherent ensemble specifically dedicated to data analysis. The
181 role of the framework is shown in Fig.~\ref{MC:Parab}.
185 \includegraphics[width=10cm]{picts/Parab}
186 \caption{Data processing framework.} \label{MC:Parab}
189 The primary interactions are simulated via event generators, and the
190 resulting kinematic tree is then used in the transport package. An
191 event generator produces set of ``particles'' with their momenta. The
192 set of particles, where one maintains the production history (in form
193 of mother-daughter relationship and production vertex) forms the
194 kinematic tree. More details can be found in the ROOT documentation of
195 class \class{TParticle}. The transport package transports the
196 particles through the set of detectors, and produces \textbf{hits},
197 which in ALICE terminology means energy deposition at a given
198 point. The hits contain also information (``track labels'') about the
199 particles that have generated them. In case of calorimeters (PHOS and
200 EMCAL) the hit is the energy deposition in the whole active volume of
201 a detecting element. In some detectors the energy of the hit is used
202 only for comparison with a given threshold, for example in TOF and ITS
205 At the next step the detector response is taken into account, and the
206 hits are transformed into \textbf{digits}. As it was explained above,
207 the hits are closely related to the tracks which generated them. The
208 transition from hits/tracks to digits/detectors is marked on the
209 picture as ``disintegrated response'', the tracks are
210 ``disintegrated'' and only the labels carry the \MC information.
211 There are two types of digits: \textbf{summable digits}, where one
212 uses low thresholds and the result is additive, and {\bf digits},
213 where the real thresholds are used, and result is similar to what one
214 would get in the real data taking. In some sense the {\bf summable
215 digits} are precursors of the \textbf{digits}. The noise simulation is
216 activated when \textbf{digits} are produced. There are two differences
217 between the \textbf{digits} and the \textbf{raw} data format produced
218 by the detector: firstly, the information about the \MC particle
219 generating the digit is kept as data member of the class
220 \class{AliDigit}, and secondly, the raw data are stored in binary
221 format as ``payload'' in a ROOT structure, while the digits are stored
222 in ROOT classes. Two conversion chains are provided in AliRoot:
223 \textbf{hits} $\to$ \textbf{summable digits} $\to$ \textbf{digits},
224 and \textbf{hits} $\to$ \textbf{digits}. The summable digits are used
225 for the so called ``event merging'', where a signal event is embedded
226 in a signal-free underlying event. This technique is widely used in
227 heavy-ion physics and allows to reuse the underlying events with
228 substantial economy of computing resources. Optionally it is possible
229 to perform the conversion \textbf{digits} $\to$ \textbf{raw data},
230 which is used to estimate the expected data size, to evaluate the high
231 level trigger algorithms, and to carry on the so called computing data
232 challenges. The reconstruction and the HLT algorithms can work both
233 with \textbf{digits} or with \textbf{raw data}. There is also the
234 possibility to convert the \textbf{raw data} between the following
235 formats: the format coming form the front-end electronics (FEE)
236 through the detector data link (DDL), the format used in the data
237 acquisition system (DAQ), and the ``rootified'' format. More details
238 are given in section \ref{Simulation}.
240 After the creation of digits, the reconstruction and analysis chain
241 can be activated to evaluate the software and the detector
242 performance, and to study some particular signatures. The
243 reconstruction takes as input digits or raw data, real or simulated.
244 The user can intervene into the cycle provided by the framework to
245 replace any part of it with his own code or implement his own analysis
246 of the data. I/O and user interfaces are part of the framework, as are
247 data visualization and analysis tools and all procedures that are
248 considered of general enough interest to be introduced into the
249 framework. The scope of the framework evolves with time as the needs
250 and understanding of the physics community evolve.
252 The basic principles that have guided the design of the AliRoot
253 framework are re-usability and modularity. There are almost as many
254 definitions of these concepts as there are programmers. However, for
255 our purpose, we adopt an operative heuristic definition that expresses
256 our objective to minimize the amount of unused or rewritten code and
257 maximize the participation of the physicists in the development of the
260 \textbf{Modularity} allows replacement of parts of our system with
261 minimal or no impact on the rest. Not every part of our system is
262 expected to be replaced. Therefore we are aiming at modularity
263 targeted to those elements that we expect to change. For example, we
264 require the ability to change the event generator or the transport \MC
265 without affecting the user code. There are elements that we do not
266 plan to interchange, but rather to evolve in collaboration with their
267 authors such as the ROOT I/O subsystem or the ROOT User Interface
268 (UI), and therefore no effort is made to make our framework modular
269 with respect to these. Whenever an element has to be modular in the
270 sense above, we define an abstract interface to it. The codes from the
271 different detectors are independent so that different detector groups
272 can work concurrently on the system while minimizing the
273 interference. We understand and accept the risk that at some point the
274 need may arise to make modular a component that was not designed to
275 be. For these cases, we have elaborated a development strategy that
276 can handle design changes in production code.
278 \textbf{Re-usability} is the protection of the investment made by the
279 programming physicists of ALICE. The code embodies a large scientific
280 knowledge and experience and is thus a precious resource. We preserve
281 this investment by designing a modular system in the sense above and
282 by making sure that we maintain the maximum amount of backward
283 compatibility while evolving our system. This naturally generates
284 requirements on the underlying framework prompting developments such
285 as the introduction of automatic schema evolution in ROOT.
287 The \textbf{support} of the AliRoot framework is a collaborative effort
288 within the ALICE experiment. Question, suggestions, topics for
289 discussion and messages are exchanged in the mailing list
290 \url{alice-off@cern.ch}. Bug reports and tasks are submitted on the
291 Savannah page \url{http://savannah.cern.ch/projects/aliroot/}.
293 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
297 \section{Installation and development tools}\label{Installation}
299 % -----------------------------------------------------------------------------
301 \subsection{Platforms and compilers}
303 The main development and production platform is Linux on Intel 32 bits
304 processors. The official Linux\cite{Linux} distribution at CERN is
305 Scientific Linux SLC\cite{SLC}. The code works also on
306 RedHat\cite{RedHat} version 7.3, 8.0, 9.0, Fedora Core\cite{Fedora} 1
307 -- 5, and on many other Linux distributions. The main compiler on
308 Linux is gcc\cite{gcc}: the recommended version is gcc 3.2.3 --
309 3.4.6. The older releases (2.91.66, 2.95.2, 2.96) have problems in the
310 FORTRAN optimization which has to be switched off for all the FORTRAN
311 packages. AliRoot can be used with gcc 4.0.X where the FORTRAN
312 compiler g77 is replaced by g95. The last release series of gcc (4.1)
313 work with gfortran as well. As an option you can use Intel
314 icc\cite{icc} compiler, which is supported as well. You can download
315 it from \url{http://www.intel.com} and use it free of charge for
316 non-commercial projects. Intel also provides free of charge the
317 VTune\cite{VTune} profiling tool which is really one of the best
320 AliRoot is supported on Intel 64 bit processors
321 (Itanium\cite{Itanium}) running Linux. Both the gcc and Intel icc
322 compilers can be used.
324 On 64 bit AMD\cite{AMD} processors such as Opteron AliRoot runs
325 successfully with the gcc compiler.
327 The software is also regularly compiled and run on other Unix
328 platforms. On Sun (SunOS 5.8) we recommend the CC compiler Sun
329 WorkShop 6 update 1 C++ 5.2. The WorkShop integrates nice debugging
330 and profiling facilities which are very useful for code development.
332 On Compaq alpha server (Digital Unix V4.0) the default compiler is cxx
333 ( Compaq C++ V6.2-024 for Digital UNIX V4.0F). Alpha provides also its
334 profiling tool pixie, which works well with shared libraries. AliRoot
335 works also on alpha server running Linux, where the compiler is gcc.
337 Recently AliRoot was ported to MacOS (Darwin). This OS is very
338 sensitive to the circular dependences in the shared libraries, which
339 makes it very useful as test platform.
341 % -----------------------------------------------------------------------------
343 \subsection{Essential CVS information}
345 CVS\cite{CVS} stands for Concurrent Version System. It permits to a
346 group of people to work simultaneously on groups of files (for
347 instance program sources). It also records the history of files, which
348 allows back tracking and file versioning. The official CVS Web page is
349 \url{http://www.cvshome.org/}. CVS has a host of features, among them
350 the most important are:
352 \item CVS facilitates parallel and concurrent code development;
353 \item it provides easy support and simple access;
354 \item it has possibility to establish group permissions (for example
355 only detector experts and CVS administrators can commit code to
356 given detector module).
358 CVS has rich set of commands, the most important are described below.
359 There exist several tools for visualization, logging and control which
360 work with CVS. More information is available in the CVS documentation
361 and manual\cite{CVSManual}.
363 Usually the development process with CVS has the following features:
365 \item all developers work on their \underline{own} copy of the project
366 (in one of their directories)
367 \item they often have to \underline{synchronize} with a global
368 repository both to update with modifications from other people and
369 to commit their own changes.
372 Here below we give an example of a typical CVS session
374 \begin{lstlisting}[language=sh]
375 # Login to the repository. The password is stored in ~/.cvspass
376 # If no cvs logout is done, the password remains there and
377 # one can access the repository without new login
378 % cvs -d :pserver:hristov@alisoft.cern.ch:/soft/cvsroot login
379 (Logging in to hristov@alisoft.cern.ch)
383 # Check-Out a local version of the TPC module
384 % cvs -d :pserver:hristov@alisoft.cern.ch:/soft/cvsroot checkout TPC
385 cvs server: Updating TPC
392 # compile and test modifications
394 # Commit your changes to the repository with an appropriate comment
395 % cvs commit -m "add include file xxx.h" AliTPC.h
396 Checking in AliTPC.h;
397 /soft/cvsroot/AliRoot/TPC/AliTPC.h,v <-- AliTPC.h
398 new revision: 1.9; previous revision:1.8
403 Instead of specifying the repository and user name by -d option, one
404 can export the environment variable CVSROOT, for example
406 \begin{lstlisting}[language=sh]
407 % export CVSROOT=:pserver:hristov@alisoft.cern.ch:/soft/cvsroot
410 Once the local version has been checked out, inside the directory tree
411 the CVSROOT is not needed anymore. The name of the actual repository
412 can be found in CVS/Root file. This name can be redefined again using
415 In case somebody else has committed some changes in AliTPC.h file, the
416 developer have to update the local version merging his own changes
417 before committing them:
419 \begin{lstlisting}[language=sh]
420 % cvs commit -m "add include file xxx.h" AliTPC.h
421 cvs server: Up-to-date check failed for `AliTPC.h'
422 cvs [server aborted]: correct above errors first!
425 cvs server: Updating .
426 RCS file: /soft/cvsroot/AliRoot/TPC/AliTPC.h,v
427 retrieving revision 1.9
428 retrieving revision 1.10
429 Merging differences between 1.9 and 1.10 into AliTPC.h
432 # edit, compile and test modifications
434 % cvs commit -m "add include file xxx.h" AliTPC.h
435 Checking in AliTPC.h;
436 /soft/cvsroot/AliRoot/TPC/AliTPC.h,v <-- AliTPC.h
437 new revision: 1.11; previous revision: 1.10
441 \textbf{Important note:} CVS performs a purely mechanical merging, and
442 it is the developer's to verify the result of this operation. It is
443 especially true in case of conflicts, when the CVS tool is not able to
444 merge the local and remote modifications consistently.
447 \subsection{Main CVS commands}
449 In the following examples we suppose that the CVSROOT environment
450 variable is set, as it was shown above. In case a local version has
451 been already checked out, the CVS repository is defined automatically
452 inside the directory tree.
455 \item\textbf{login} stores password in .cvspass. It is enough to login
456 once to the repository.
458 \item\textbf{checkout} retrieves the source files of AliRoot version v4-04-Rev-08
459 \begin{lstlisting}[language=sh]
460 % cvs co -r v4-04-Rev-08 AliRoot
463 \item\textbf{update} retrieves modifications from the repository and
464 merges them with the local ones. The -q option reduces the verbose
465 output, and the -z9 sets the compression level during the data
466 transfer. The option -A removes all the ``sticky'' tags, -d removes
467 the obsolete files from the local distribution, and -P retrieves the
468 new files which are missing from the local distribution. In this way
469 the local distribution will be updated to the latest code from the
470 main development branch.
471 \begin{lstlisting}[language=sh]
472 % cvs -qz9 update -AdP STEER
475 \item\textbf{diff} shows differences between the local and repository
476 versions of the whole module STEER
477 \begin{lstlisting}[language=sh]
478 % cvs -qz9 diff STEER
481 \item \textbf{add} adds files or directories to the repository. The
482 actual transfer is done when the commit command is invoked.
483 \begin{lstlisting}[language=sh]
484 % cvs -qz9 add AliTPCseed.*
487 \item\textbf{remove} removes old files or directories from the
488 repository. The -f option forces the removal of the local files. In
489 the example below the whole module CASTOR will be scheduled for
491 \begin{lstlisting}[language=sh]
492 % cvs remove -f CASTOR
495 \item\textbf{commit} checks in the local modifications to the
496 repository and increments the versions of the files. In the example
497 below all the changes made in the different files of the module
498 STEER will be committed to the repository. The -m option is
499 followed by the log message. In case you don't provide it you will
500 be prompted by an editor window. No commit is possible without the
501 log message which explains what was done.
502 \begin{lstlisting}[language=sh]
503 % cvs -qz9 commit -m ``Coding convention'' STEER
506 \item\textbf{tag} creates new tags and/or branches (with -b option).
507 \begin{lstlisting}[language=sh]
508 % cvs tag -b v4-05-Release .
510 \item\textbf{status} returns the actual status of a file: revision,
511 sticky tag, dates, options, and local modifications.
512 \begin{lstlisting}[language=sh]
513 % cvs status Makefile
516 \item\textbf{logout} removes the password which is stored in
517 \$HOME/.cvspass. It is not really necessary unless the user really
518 wants to remove the password from that account.
522 % -----------------------------------------------------------------------------
524 \subsection{Environment variables}
526 Before the installation of AliRoot the user has to set some
527 environment variables. In the following examples the user is working
528 on Linux and the default shell is bash. It is enough to add to the
529 .bash\_profile file few lines as shown below:
531 \begin{lstlisting}[language=sh]
533 export ROOTSYS=/home/mydir/root
534 export PATH=$PATH\:$ROOTSYS/bin
535 export LD_LIBRARY_PATH=$LD_LIBRARY_PATH\:$ROOTSYS/lib
538 export ALICE=/home/mydir/alice
539 export ALICE_ROOT=$ALICE/AliRoot
540 export ALICE_TARGET=`root-config --arch`
541 export PATH=$PATH\:$ALICE_ROOT/bin/tgt_${ALICE_TARGET}
542 export LD_LIBRARY_PATH=$LD_LIBRARY_PATH\:$ALICE_ROOT/lib/tgt_${ALICE_TARGET}
545 export PLATFORM=`root-config --arch` # Optional, defined otherwise in Geant3 Makefile
547 LD_LIBRARY_PATH=$LD_LIBRARY_PATH\:$ALICE/geant3/lib/tgt_${ALICE_TARGET}
550 export FLUPRO=$ALICE/fluka # $FLUPRO is used in TFluka
551 export PATH=$PATH\:$FLUPRO/flutil
553 # Geant4: see the details later
556 where ``/home/mydir'' has to be replaced with the actual directory
557 path. The meaning of the environment variables is the following:
559 \texttt{ROOTSYS} -- the place where the ROOT package is located;
561 \texttt{ALICE} -- top directory for all the software packages used in ALICE;
563 \texttt{ALICE\_ROOT} -- the place where the AliRoot package is located, usually
564 as subdirectory of ALICE;
566 \texttt{ALICE\_TARGET} -- specific platform name. Up to release
567 v4-01-Release this variable was set to the result of ``uname''
568 command. Starting from AliRoot v4-02-05 the ROOT naming schema was
569 adopted, and the user has to use ``root-config --arch'' command.
571 \texttt{PLATFORM} -- the same as ALICE\_TARGET for the GEANT~3
572 package. Until GEANT~3 v1-0 the user had to use `uname` to specify the
573 platform. From version v1-0 on the ROOT platform is used instead
574 (``root-config --arch''). This environment variable is set by default
575 in the Geant3 Makefile.
578 % -----------------------------------------------------------------------------
580 \subsection{Software packages}
582 \subsubsection{AliEn}
584 The installation of AliEn is the first one to be done if you plan to
585 access the GRID or need GRID-enabled Root. You can download the AliEn
586 installer and use it in the following way:
587 \begin{lstlisting}[language=sh, title={AliEn installation}]
588 % wget http://alien.cern.ch/alien-installer
589 % chmod +x alien-installer
592 The alien-installer runs a dialog which prompts for the default
593 selection and options. The default installation place for AliEn is
594 /opt/alien, and the typical packages one has to install are ``client''
599 All ALICE offline software is based on ROOT\cite{ROOT}. The ROOT
600 framework offers a number of important elements which are exploited in
604 \item a complete data analysis framework including all the PAW
606 \item an advanced Graphic User Interface (GUI) toolkit;
607 \item a large set of utility functions, including several commonly
608 used mathematical functions, random number generators,
609 multi-parametric fit and minimization procedures;
610 \item a complete set of object containers;
611 \item integrated I/O with class schema evolution;
612 \item C++ as a scripting language;
613 \item documentation tools.
615 There is a nice ROOT user's guide which incorporates important and
616 detailed information. For those who are not familiar with ROOT a good
617 staring point is the ROOT Web page at \url{http://root.cern.ch}. Here
618 the experienced users may find easily the latest version of the class
619 descriptions and search for useful information.
622 The recommended way to install ROOT is from the CVS sources, as it is
626 \item Login to the ROOT CVS repository if you haven't done it yet.
627 \begin{lstlisting}[language=sh]
628 % cvs -d :pserver:cvs@root.cern.ch:/user/cvs login
632 \item Download (check out) the needed ROOT version (v5-13-04 in the example)
633 \begin{lstlisting}[language=sh]
634 % cvs -d :pserver:cvs@root.cern.ch:/user/cvs co -r v5-13-04 root
636 The appropriate combinations of Root, Geant3 and AliRoot versions
638 \url{http://aliceinfo.cern.ch/Offline/AliRoot/Releases.html}
640 \item The code is stored in the directory ``root''. You have to go
641 there, set the ROOTSYS environment variable (if this is not done in
642 advance),and configure ROOT. The ROOTSYS contains the full path to
645 \lstinputlisting[language=sh, title={Root configuration}]{scripts/confroot}
647 \item Now you can compile and test ROOT
648 \lstinputlisting[language=sh,title={Compiling and testing
649 ROOT}]{scripts/makeroot}
653 At this point the user should have a working ROOT version on a Linux
654 (32 bit Pentium processor with gcc compiler). The list of supported
655 platforms can be obtained by ``./configure --help'' command.
657 \subsubsection{GEANT~3}
659 The installation of GEANT~3 is needed since for the moments this is
660 the default particle transport package. A GEANT~3 description is
662 \url{http://wwwasdoc.web.cern.ch/wwwasdoc/geant_html3/geantall.html}.
663 You can download the GEANT~3 distribution from the ROOT CVS repository
664 and compile it in the following way:
666 \lstinputlisting[language=sh,title={Make GEANT3}]{scripts/makeg3}
668 Please note that GEANT~3 is downloaded in \$ALICE directory. Another
669 important feature is the PLATFORM environment variable. If it is not
670 set, the Geant3 Makefile sets it to the result of `root-config
673 \subsubsection{GEANT~4}
674 To use GEANT~4\cite{Geant4}, some additional software has to
675 be installed. GEANT~4 needs CLHEP\cite{CLHEP} package, the user can
676 get the tar file (here on ``tarball'') from
677 \url{http://proj-clhep.web.cern.ch/proj-clhep/}.
678 Then the installation can be done in the following way:
680 \lstinputlisting[language=sh, title={Make CLHEP}]{scripts/makeclhep}
683 Another possibility is to use the CLHEP CVS repository:
685 \lstinputlisting[language=sh, title={Make CLHEP from
686 CVS}]{scripts/makeclhepcvs}
688 Now the following lines should be added to the .bash\_profile
690 \begin{lstlisting}[language=sh]
691 % export CLHEP_BASE_DIR=$ALICE/CLHEP
694 The next step is to install GEANT~4. The GEANT~4 distribution is available from
695 \url{http://geant4.web.cern.ch/geant4/}. Typically the following files
696 will be downloaded (the current versions may differ from the ones below):
698 \item geant4.8.1.p02.tar.gz: source tarball
699 \item G4NDL.3.9.tar.gz: G4NDL version 3.9 neutron data files with thermal cross sections
700 \item G4EMLOW4.0.tar.gz: data files for low energy electromagnetic processes - version 4.0
701 \item PhotonEvaporation.2.0.tar.gz: data files for photon evaporation - version 2.0
702 \item RadiativeDecay.3.0.tar.gz: data files for radioactive decay hadronic processes - version 3.0
703 \item G4ELASTIC.1.1.tar.gz: data files for high energy elastic scattering processes - version 1.1
706 Then the following steps have to be executed:
708 \lstinputlisting[language=sh, title={Make GEANT4}]{scripts/makeg4}
710 The execution of the env.sh script can be made from the
711 \texttt{\~{}/.bash\_profile} to have the GEANT~4 environment variables
712 initialized automatically.
714 \subsubsection{FLUKA}
716 The installation of FLUKA\cite{FLUKA} consists of the following steps:
720 \item register as FLUKA user at \url{http://www.fluka.org} if you
721 haven't yet done so. You will receive your ``fuid'' number and will set
724 \item download the latest FLUKA version from
725 \url{http://www.fluka.org}. Use your ``fuid'' registration and
726 password when prompted. You will obtain a tarball containing the
727 FLUKA libraries, for example fluka2006.3-linuxAA.tar.gz
729 \item install the libraries;
731 \lstinputlisting[language=sh, title={install FLUKA}]{scripts/makefluka}
733 \item compile TFluka;
735 \begin{lstlisting}[language=sh]
740 \item run AliRoot using FLUKA;
741 \begin{lstlisting}[language=sh]
742 % cd $ALICE_ROOT/TFluka/scripts
746 This script creates the directory tmp and inside all the necessary
747 links for data and configuration files and starts aliroot. For the
748 next run it is not necessary to run the script again. The tmp
749 directory can be kept or renamed. The user should run aliroot from
750 inside this directory.
752 \item from the AliRoot prompt start the simulation;
753 \begin{lstlisting}[language=C++]
754 root [0] AliSimulation sim;
758 You will get the results of the simulation in the tmp directory.
760 \item reconstruct the simulated event;
761 \begin{lstlisting}[language=sh]
766 and from the AliRoot prompt
767 \begin{lstlisting}[language=C++]
768 root [0] AliReconstruction rec;
772 \item report any problem you encounter to the offline list \url{alice-off@cern.ch}.
777 \subsubsection{AliRoot}
779 The AliRoot distribution is taken from the CVS repository and then
780 \begin{lstlisting}[language=C++]
782 % cvs -qz2 -d :pserver:cvs@alisoft.cern.ch:/soft/cvsroot co AliRoot
787 The AliRoot code (the above example retrieves the HEAD version from CVS) is contained in
788 ALICE\_ROOT directory. The ALICE\_TARGET is defined automatically in
789 the \texttt{.bash\_profile} via the call to `root-config --arch`.
793 \subsection{Debugging}
795 While developing code or running some ALICE program, the user may be
796 confronted with the following execution errors:
799 \item floating exceptions: division by zero, sqrt from negative
800 argument, assignment of NaN, etc.
801 \item segmentation violations/faults: attempt to access a memory
802 location that is not allowed to access, or in a way which is not
804 \item bus error: attempt to access memory that the computer cannot
808 In this case, the user will have to debug the program to determine the
809 source of the problem and fix it. There is several debugging
810 techniques, which are briefly listed below:
813 \item using \texttt{printf(...)}, \texttt{std::cout}, \texttt{assert(...)}, and
816 \item often this is the only easy way to find the origin of the
818 \item \texttt{assert(...)} aborts the program execution if the
819 argument is FALSE. It is a macro from \texttt{cassert}, it can be
820 inactivated by compiling with -DNDEBUG.
824 \item gdb needs compilation with -g option. Sometimes -O2 -g
825 prevents from exact tracing, so it is save to use compilation with
826 -O0 -g for debugging purposes;
827 \item One can use it directly (gdb aliroot) or attach it to a
828 process (gdb aliroot 12345 where 12345 is the process id).
832 Below we report the main gdb commands and their descriptions:
835 \item \textbf{run} starts the execution of the program;
836 \item \textbf{Control-C} stops the execution and switches to the gdb shell;
837 \item \textbf{where <n>} prints the program stack. Sometimes the program
838 stack is very long. The user can get the last n frames by specifying
839 n as a parameter to where;
840 \item \textbf{print} prints the value of a variable or expression;
842 \begin{lstlisting}[language=sh]
845 \item \textbf{up} and \textbf{down} are used to navigate in the program stack;
846 \item \textbf{quit} exits the gdb session;
847 \item \textbf{break} sets break point;
849 \begin{lstlisting}[language=C++]
850 (gdb) break AliLoader.cxx:100
851 (gdb) break 'AliLoader::AliLoader()'
854 The automatic completion of the class methods via tab is available
855 in case an opening quote (`) is put in front of the class name.
857 \item \textbf{cont} continues the run;
858 \item \textbf{watch} sets watchpoint (very slow execution). The example below
859 shows how to check each change of fData;
861 \begin{lstlisting}[language=C++]
864 \item \textbf{list} shows the source code;
865 \item \textbf{help} shows the description of commands.
869 \subsection{Profiling}
871 Profiling is used to discover where the program spends most of the
872 time, and to optimize the algorithms. There are several profiling
873 tools available on different platforms:
876 gprof: compilation with -pg option, static libraries\\
877 oprofile: uses kernel module\\
878 VTune: instruments shared libraries.
879 \item Sun: Sun workshop (Forte agent). It needs compilation with
880 profiling option (-pg)
881 \item Compaq Alpha: pixie profiler. Instruments shared libraries for profiling.
884 On Linux AliRoot can be built with static libraries using the special
887 \begin{lstlisting}[language=sh]
889 # change LD_LIBRARY_PATH to replace lib/tgt_linux with lib/tgt_linuxPROF
890 # change PATH to replace bin/tgt_linux with bin/tgt_linuxPROF
892 root [0] gAlice->Run()
896 After the end of aliroot session a file called gmon.out will be created. It
897 contains the profiling information which can be investigated using
900 \begin{lstlisting}[language=sh]
901 % gprof `which aliroot` | tee gprof.txt
907 \textbf{VTune profiling tool}
909 VTune is available from the Intel Web site
910 \url{http://www.intel.com/software/products/index.htm}. It is free for
911 non-commercial use on Linux. It provides possibility for call-graph
912 and sampling profiling. VTune instruments shared libraries, and needs
913 only -g option during the compilation. Here is an example of
914 call-graph profiling:
916 \begin{lstlisting}[language=sh]
917 # Register an activity
918 % vtl activity sim -c callgraph -app aliroot,'' -b -q sim.C'' -moi aliroot
921 % vtl view sim::r1 -gui
924 \subsection{Detection of run time errors}
926 The Valgrind tool can be used for detection of run time errors on
927 linux. It is available from \url{http://www.valgrind.org}. Valgrind
928 is equipped with the following set of tools:
930 \item memcheck for memory management problems;
931 \item addrcheck: lightweight memory checker;
932 \item cachegrind: cache profiler;
933 \item massif: heap profiler;
934 \item hellgrind: thread debugger;
935 \item callgrind: extended version of cachegrind.
938 The most important tool is memcheck. It can detect:
940 \item use of non-initialized memory;
941 \item reading/writing memory after it has been free'd;
942 \item reading/writing off the end of malloc'd blocks;
943 \item reading/writing inappropriate areas on the stack;
944 \item memory leaks -- where pointers to malloc'd blocks are lost forever;
945 \item mismatched use of malloc/new/new [] vs free/delete/delete [];
946 \item overlapping source and destination pointers in memcpy() and
948 \item some misuses of the POSIX pthreads API;
951 Here is an example of Valgrind usage:
953 \begin{lstlisting}[language=sh]
954 % valgrind --tool=addrcheck --error-limit=no aliroot -b -q sim.C
958 %\textbf{ROOT memory checker}
960 % The ROOT memory checker provides tests of memory leaks and other
961 % problems related to new/delete. It is fast and easy to use. Here is
964 % \item link aliroot with -lNew. The user has to add `\-\-new' before
965 % `\-\-glibs' in the ROOTCLIBS variable of the Makefile;
966 % \item add Root.MemCheck: 1 in .rootrc
967 % \item run the program: aliroot -b -q sim.C
968 % \item run memprobe -e aliroot
969 % \item Inspect the files with .info extension that have been generated.
972 \subsection{Useful information LSF and CASTOR}
974 \textbf{The information in this section is included for completeness: the
975 users are strongly advised to rely on the GRID tools for massive
976 productions and data access}
978 LSF is the batch system at CERN. Every user is allowed to submit jobs
979 to the different queues. Usually the user has to copy some input files
980 (macros, data, executables, libraries) from a local computer or from
981 the mass-storage system to the worker node on lxbatch, then to execute
982 the program, and to store the results on the local computer or in the
983 mass-storage system. The methods explained in the section are suitable
984 if the user doesn't have direct access to a shared directory, for
985 example on AFS. The main steps and commands are described below.
987 In order to have access to the local desktop and to be able to use scp
988 without password, the user has to create pair of SSH keys. Currently
989 lxplus/lxbatch uses RSA1 cryptography. After login into lxplus the
990 following has to be done:
992 \begin{lstlisting}[language=sh]
995 % cp .ssh/identity.pub public/authorized_keys
996 % ln -s ../public/authorized_keys .ssh/authorized_keys
999 A list of useful LSF commands is given bellow:
1001 \item \textbf{bqueues} shows the available queues and their status;
1002 \item \textbf{ bsub -q 8nm job.sh} submits the shell script job.sh to
1003 the queue 8nm, where the name of the queue indicates the
1004 ``normalized CPU time'' (maximal job duration 8 min of normalized CPU time);
1005 \item \textbf{bjobs} lists all unfinished jobs of the user;
1006 \item \textbf{lsrun -m lxbXXXX xterm} returns a xterm running on the
1007 batch node lxbXXXX. This permits to inspect the job output and to
1011 Each batch job stores the output in directory LSFJOB\_XXXXXX, where
1012 XXXXXX is the job id. Since the home directory is on AFS, the user has
1013 to redirect the verbose output, otherwise the AFS quota might be
1014 exceeded and the jobs will fail.
1016 The CERN mass storage system is CASTOR2\cite{CASTOR2}. Every user has
1017 his/her own CASTOR2 space, for example /castor/cern.ch/user/p/phristov.
1018 The commands of CASTOR2 start with prefix ``ns'' of ``rf''. Here is
1019 very short list of useful commands:
1022 \item \textbf{nsls /castor/cern.ch/user/p/phristov} lists the CASTOR
1023 space of user phristov;
1024 \item \textbf{rfdir /castor/cern.ch/user/p/phristov} the same as
1025 above, but the output is in long format;
1026 \item \textbf{nsmkdir test} creates a new directory (test) in the
1027 CASTOR space of the user;
1028 \item \textbf{rfcp /castor/cern.ch/user/p/phristov/test/galice.root .}
1029 copies the file from CASTOR to the local directory. If the file is
1030 on tape, this will trigger the stage-in procedure, which might take
1032 \item \textbf{rfcp AliESDs.root /castor/cern.ch/p/phristov/test}
1033 copies the local file AliESDs.root to CASTOR in the subdirectory
1034 test and schedules it for migration to tape.
1037 The user also has to be aware, that the behavior of CASTOR depends on
1038 the environment variables RFIO\_USE\_CASTOR\_V2(=YES),
1039 STAGE\_HOST(=castoralice) and STAGE\_SVCCLASS(=default). They are set
1040 by default to the values for the group (z2 in case of ALICE).
1042 Below the user can find an example of job, where the simulation and
1043 reconstruction are run using the corresponding macros sim.C and rec.C.
1044 An example of such macros will be given later.
1046 \lstinputlisting[language=sh,title={LSF example job}]{scripts/lsfjob}
1048 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
1051 \section{Simulation} \label{Simulation}
1053 % -----------------------------------------------------------------------------
1055 \subsection{Introduction}
1056 Heavy-ion collisions produce a very large number of particles in the
1057 final state. This is a challenge for the reconstruction and analysis
1058 algorithms. The detector design and the development of these algorithms requires a predictive
1059 and precise simulation of the detector response. Model predictions
1060 discussed in the first volume of Physics Performance Report for the
1061 charged multiplicity at LHC in \mbox{Pb--Pb} collisions vary from 1400
1062 to 8000 particles in the central unit of rapidity. The experiment was
1063 designed when the highest available nucleon--nucleon center-of-mass energy
1064 heavy-ion interactions was at $20 \, {\rm GeV}$ per nucleon--nucleon
1065 pair at CERN SPS, i.e. a factor of about 300 less than the energy at
1066 LHC. Recently, the RHIC collider came online. Its top energy of
1067 $200\, {\rm GeV}$ per nucleon--nucleon pair is still 30 times less
1068 than the LHC energy. The RHIC data seem to suggest that the LHC
1069 multiplicity will be on the lower side of the interval. However, the
1070 extrapolation is so large that both the hardware and software of ALICE
1071 have to be designed for the highest multiplicity. Moreover, as the
1072 predictions of different generators of heavy-ion collisions differ
1073 substantially at LHC energies, we have to use several of them and
1074 compare the results.
1076 The simulation of the processes involved in the transport through the
1077 detector of the particles emerging from the interaction is confronted
1078 with several problems:
1081 \item existing event generators give different answers on parameters
1082 such as expected multiplicities, $p_T$-dependence and rapidity
1083 dependence at LHC energies.
1085 \item most of the physics signals, like hyperon production, high-$p_T$
1086 phenomena, open charm and beauty, quarkonia etc., are not exactly
1087 reproduced by the existing event generators.
1089 \item simulation of small cross-sections would demand prohibitively
1090 high computing resources to simulate a number of events that is commensurable with
1091 the expected number of detected events in the experiment.
1093 \item the existing generators do not provide for event topologies like
1094 momentum correlations, azimuthal flow etc.
1097 To allow nevertheless efficient simulations we have adopted a
1098 framework that allows for a number of options:
1102 \item{} the simulation framework provides an interface to external
1103 generators, like HIJING~\cite{MC:HIJING} and
1104 DPMJET~\cite{MC:DPMJET}.
1106 \item{} a parameterized, signal-free, underlying event where the
1107 produced multiplicity can be specified as an input parameter is
1110 \item{} rare signals can be generated using the interface to external
1111 generators like PYTHIA or simple parameterizations of transverse
1112 momentum and rapidity spectra defined in function libraries.
1114 \item{} the framework provides a tool to assemble events from
1115 different signal generators (event cocktails).
1117 \item{} the framework provides tools to combine underlying events and
1118 signal events at the primary particle level (cocktail) and at the
1119 summable digit level (merging).
1121 \item{} ``afterburners'' are used to introduce particle correlations in a
1122 controlled way. An afterburner is a program which changes the
1123 momenta of the particles produced by another generator, and thus
1124 modifies as desired the multi-particle momentum distributions.
1127 The implementation of this strategy is described below. The results of
1128 different \MC generators for heavy-ion collisions are
1129 described in section~\ref{MC:Generators}.
1131 \subsection{Simulation framework}
1133 The simulation framework covers the simulation of primary collisions
1134 and generation of the emerging particles, the transport of particles
1135 through the detector, the simulation of energy depositions (hits) in
1136 the detector components, their response in form of so called summable
1137 digits, the generation of digits from summable digits with the
1138 optional merging of underlying events and the creation of raw data.
1139 The \class{AliSimulation} class provides a simple user interface to
1140 the simulation framework. This section focuses on the simulation
1141 framework from the (detector) software developers point of view.
1145 \includegraphics[width=10cm]{picts/SimulationFramework}
1146 \caption{Simulation framework.} \label{MC:Simulation}
1151 \textbf{Generation of Particles}
1153 Different generators can be used to produce particles emerging from
1154 the collision. The class \class{AliGenerator} is the base class
1155 defining the virtual interface to the generator programs. The
1156 generators are described in more detail in the ALICE PPR Volume 1 and
1157 in the next chapter.
1160 \textbf{Virtual Monte Carlo}
1162 The simulation of particles traversing the detector components is
1163 performed by a class derived from \class{TVirtualMC}. The Virtual
1164 Monte Carlo also provides an interface to construct the geometry of
1165 detectors. The task of the geometry description is done by the
1166 geometrical modeler \class{TGeo}. The concrete implementation of the
1167 virtual Monte Carlo application TVirtualMCApplication is AliMC. The
1168 Monte Carlos used in ALICE are GEANT~3.21, GEANT~4 and FLUKA. More
1169 information can be found on the VMC Web page:
1170 \url{http://root.cern.ch/root/vmc}
1172 As explained above, our strategy was to develop a virtual interface to
1173 the detector simulation code. We call the interface to the transport
1174 code virtual Monte Carlo. It is implemented via C++ virtual classes
1175 and is schematically shown in Fig.~\ref{MC:vmc}. The codes that
1176 implement the abstract classes are real C++ programs or wrapper
1177 classes that interface to FORTRAN programs.
1181 \includegraphics[width=10cm]{picts/vmc}
1182 \caption{Virtual \MC} \label{MC:vmc}
1185 Thanks to the virtual Monte Carlo we have converted all FORTRAN user
1186 code developed for GEANT~3 into C++, including the geometry definition
1187 and the user scoring routines, \texttt{StepManager}. These have been
1188 integrated in the detector classes of the AliRoot framework. The
1189 output of the simulation is saved directly with ROOT I/O, simplifying
1190 the development of the digitization and reconstruction code in C++.
1193 \textbf{Modules and Detectors}
1195 Each module of the ALICE detector is described by a class derived from
1196 \class{AliModule}. Classes for active modules (= detectors) are not
1197 derived directly from \class{AliModule} but from its subclass
1198 \class{AliDetector}. These base classes define the interface to the
1199 simulation framework via a set of virtual methods.
1202 \textbf{Configuration File (Config.C)}
1204 The configuration file is a C++ macro that is processed before the
1205 simulation starts. It creates and configures the Monte Carlo object,
1206 the generator object, the magnetic field map and the detector modules.
1207 A detailed description is given below.
1210 \textbf{Detector Geometry}
1212 The virtual Monte Carlo application creates and initializes the
1213 geometry of the detector modules by calling the virtual functions
1214 \method{CreateMaterials}, \method{CreateGeometry}, \method{Init} and
1215 \method{BuildGeometry}.
1218 \textbf{Vertexes and Particles}
1220 In case the simulated event is intended to be merged with an
1221 underlying event, the primary vertex is taken from the file containing
1222 the underlying event by using the vertex generator
1223 \class{AliVertexGenFile}. Otherwise the primary vertex is generated
1224 according to the generator settings. Then the particles emerging from
1225 the collision are generated and put on the stack (an instance of
1226 \class{AliStack}). The transport of particles through the detector is
1227 performed by the Monte Carlo object. The decay of particles is usually
1228 handled by the external decayer \class{AliDecayerPythia}.
1231 \textbf{Hits and Track References}
1233 The Monte Carlo simulates the transport of a particle step by step.
1234 After each step the virtual method \method{StepManager} of the module
1235 in which the particle currently is located is called. In this step
1236 manager method, the hits in the detector are created by calling
1237 \method{AddHit}. Optionally also track references (location and
1238 momentum of simulated particles at selected places) can be created by
1239 calling \method{AddTackReference}. \method{AddHit} has to be
1240 implemented by each detector whereas \method{AddTackReference} is
1241 already implemented in AliModule. The container and the branch for the
1242 hits -- and for the (summable) digits -- are managed by the detector
1243 class via a set of so-called loaders. The relevant data members and
1244 methods are fHits, fDigits, \method{ResetHits}, \method{ResetSDigits},
1245 \method{ResetDigits},\method{MakeBranch} and \method{SetTreeAddress}.
1247 For each detector methods like \method{PreTrack}, \method{PostTrack},
1248 \method{FinishPrimary}, \method{FinishEvent} and \method{FinishRun}
1249 are called during the simulation when the conditions indicated by the
1250 method names are fulfilled.
1253 \textbf{Summable Digits}
1255 Summable digits are created by calling the virtual method Hits2SDigits
1256 of a detector. This method loops over all events, creates the summable
1257 digits from hits and stores them in the sdigits file(s).
1260 \textbf{ Digitization and Merging}
1262 Dedicated classes derived from \class{AliDigitizer} are used for the
1263 conversion of summable digits into digits. Since \class{AliDigitizer}
1264 is a \class{TTask}, this conversion is done for
1265 the current event by the \method{Exec} method. Inside this method the summable
1266 digits of all input streams have to be added, combined with noise,
1267 converted to digital values taking into account possible thresholds
1268 and stored in the digits container.
1270 The input streams (more than one in case of merging) as well as the
1271 output stream are managed by an object of type \method{AliRunDigitizer}. The
1272 methods GetNinputs, GetInputFolderName and GetOutputFolderName return
1273 the relevant information. The run digitizer is accessible inside the
1274 digitizer via the protected data member fManager. If the flag
1275 fRegionOfInterest is set, only detector parts where summable digits
1276 from the signal event are present should be digitized. When \MC labels
1277 are assigned to digits, the stream-dependent offset given by the
1278 method \method{GetMask} is added to the label of the summable digit.
1280 The detector specific digitizer object is created in the virtual
1281 method CreateDigitizer of the concrete detector class. The run
1282 digitizer object is used to construct the detector
1283 digitizer. The \method{Init} method of each digitizer is called before the loop
1284 over the events starts.
1287 A direct conversion from hits directly to digits can be implemented in
1288 the method \method{Hits2Digits} of a detector. The loop over the events is
1289 inside the method. Of course merging is not supported in this case.
1291 An example of simulation script that can be used for simulation of
1292 proton-proton collisions is provided below:
1294 \begin{lstlisting}[language=C++, title={Simulation run}]
1295 void sim(Int_t nev=100) {
1296 AliSimulation simulator;
1297 // Measure the total time spent in the simulation
1300 // List of detectors, where both summable digits and digits are provided
1301 simulator.SetMakeSDigits("TRD TOF PHOS EMCAL HMPID MUON ZDC PMD FMD T0 VZERO");
1302 // Direct conversion of hits to digits for faster processing (ITS TPC)
1303 simulator.SetMakeDigitsFromHits("ITS TPC");
1310 The following example shows how one can do event merging
1312 \begin{lstlisting}[language=C++, title={Event merging}]
1313 void sim(Int_t nev=6) {
1314 AliSimulation simulator;
1315 // The underlying events are stored in a separate directory.
1316 // Three signal events will be merged in turn with each
1318 simulator.MergeWith("../backgr/galice.root",3);
1326 The digits stored in ROOT containers can be converted into the DATE\cite{DATE}
1327 format that will be the `payload' of the ROOT classes containing the
1328 raw data. This is done for the current event in the method
1329 \method{Digits2Raw} of the detector.
1331 The simulation of raw data is managed by the class \class{AliSimulation}. To
1332 create raw data DDL files it loops over all events. For each event it
1333 creates a directory, changes to this directory and calls the method
1334 \method{Digits2Raw} of each selected detector. In the Digits2Raw method the DDL
1335 files of a detector are created from the digits for the current
1338 For the conversion of the DDL files to a DATE file the
1339 \class{AliSimulation} class uses the tool dateStream. To create a raw
1340 data file in ROOT format with the DATE output as payload the program alimdc is
1343 The only part that has to be implemented in each detector is
1344 the \method{Digits2Raw} method of the detectors. In this method one file per
1345 DDL has to be created obeying the conventions for file names and DDL
1346 IDs. Each file is a binary file with a DDL data header in the
1347 beginning. The DDL data header is implemented in the structure
1348 \class{AliRawDataHeader}. The data member fSize should be set to the total
1349 size of the DDL raw data including the size of the header. The
1350 attribute bit 0 should be set by calling the method \method{SetAttribute(0)} to
1351 indicate that the data in this file is valid. The attribute bit 1 can
1352 be set to indicate compressed raw data.
1354 The detector-specific raw data are stored in the DDL files after the
1355 DDL data header. The format of this raw data should be as close as
1356 possible to the one that will be delivered by the detector. This
1357 includes the order in which the channels will be read out.
1359 Below we show an example of raw data creation for all the detectors
1361 \begin{lstlisting}[language=C++]
1362 void sim(Int_t nev=1) {
1363 AliSimulation simulator;
1364 // Create raw data for ALL detectors, rootify it and store in the
1365 // file raw,root. Do not delete the intermediate files
1366 simulator.SetWriteRawData("ALL","raw.root",kFALSE);
1372 \subsection{Configuration: example of Config.C}
1374 The example below contains as comments the most important information:
1376 \lstinputlisting[language=C++] {scripts/Config.C}
1378 % -----------------------------------------------------------------------------
1380 \subsection{Event generation}
1381 \label{MC:Generators}
1384 \includegraphics[width=10cm]{picts/aligen}
1385 \caption{\texttt{AliGenerator} is the base class, which has the
1386 responsibility to generate the primary particles of an event. Some
1387 realizations of this class do not generate the particles themselves
1388 but delegate the task to an external generator like PYTHIA through the
1389 \texttt{TGenerator} interface. }
1393 \subsubsection{Parameterized generation}
1395 The event generation based on parameterization can be used to produce
1396 signal-free final states. It avoids the dependences on a
1397 specific model, and is efficient and flexible. It can be used to
1398 study the track reconstruction efficiency
1399 as a function of the initial multiplicity and occupation.
1401 \class{AliGenHIJINGparam}~\cite{MC:HIJINGparam} is an example of internal
1402 AliRoot generator based on parameterized
1403 pseudorapidity density and transverse momentum distributions of
1404 charged and neutral pions and kaons. The pseudorapidity
1405 distribution was obtained from a HIJING simulation of central
1406 Pb--Pb collisions and scaled to a charged-particle multiplicity of
1407 8000 in the pseudo rapidity interval $|\eta | < 0.5$. Note that
1408 this is about 10\% higher than the corresponding value for a
1409 rapidity density with an average ${\rm d}N/{\rm d}y$ of 8000 in
1410 the interval $|y | < 0.5$.
1411 The transverse-momentum distribution is parameterized from the
1412 measured CDF pion $p_T$-distribution at $\sqrt{s} = 1.8 \, TeV$.
1413 The corresponding kaon $p_T$-distribution was obtained from the
1414 pion distribution by $m_T$-scaling. See Ref.~\cite{MC:HIJINGparam}
1415 for the details of these parameterizations.
1417 In many cases, the expected transverse momentum and rapidity
1418 distributions of particles are known. In other cases the effect of
1419 variations in these distributions must be investigated. In both
1420 situations it is appropriate to use generators that produce
1421 primary particles and their decays sampling from parameterized
1422 spectra. To meet the different physics requirements in a modular
1423 way, the parameterizations are stored in independent function
1424 libraries wrapped into classes that can be plugged into the
1425 generator. This is schematically illustrated in
1426 Fig.~\ref{MC:evglib} where four different generator libraries can
1427 be loaded via the abstract generator interface.
1429 It is customary in heavy-ion event generation to superimpose
1430 different signals on an event to tune the reconstruction
1431 algorithms. This is possible in AliRoot via the so-called cocktail
1432 generator (Fig.~\ref{MC:cocktail}). This creates events from
1433 user-defined particle cocktails by choosing as ingredients a list
1434 of particle generators.
1438 \includegraphics[width=10cm]{picts/evglib}
1439 \caption{\texttt{AliGenParam} is a realization of \texttt{AliGenerator}
1440 that generates particles using parameterized $\pt$ and
1441 pseudo-rapidity distributions. Instead of coding a fixed number of
1442 parameterizations directly into the class implementations, user
1443 defined parameterization libraries (AliGenLib) can be connected at
1444 run time allowing for maximum flexibility.} \label{MC:evglib}
1447 An example of \class{AliGenParam} usage is presented below:
1449 \begin{lstlisting}[language=C++]
1450 // Example for J/psi Production from Parameterization
1451 // using default library (AliMUONlib)
1452 AliGenParam *gener = new AliGenParam(ntracks, AliGenMUONlib::kUpsilon);
1453 gener->SetMomentumRange(0,999); // Wide cut on the Upsilon momentum
1454 gener->SetPtRange(0,999); // Wide cut on Pt
1455 gener->SetPhiRange(0. , 360.); // Full azimutal range
1456 gener->SetYRange(2.5,4); // In the acceptance of the MUON arm
1457 gener->SetCutOnChild(1); // Enable cuts on Upsilon decay products
1458 gener->SetChildThetaRange(2,9); // Theta range for the decay products
1459 gener->SetOrigin(0,0,0); // Vertex position
1460 gener->SetSigma(0,0,5.3); // Sigma in (X,Y,Z) (cm) on IP position
1461 gener->SetForceDecay(kDiMuon); // Upsilon->mu+ mu- decay
1462 gener->SetTrackingFlag(0); // No particle transport
1466 To facilitate the usage of different generators we have developed
1467 an abstract generator interface called \texttt{AliGenerator}, see
1468 Fig.~\ref{MC:aligen}. The objective is to provide the user with
1469 an easy and coherent way to study a variety of physics signals as
1470 well as full set of tools for testing and background studies. This
1471 interface allows the study of full events, signal processes, and
1472 a mixture of both, i.e. cocktail events (see an example later).
1474 Several event generators are available via the abstract ROOT class
1475 that implements the generic generator interface, \texttt{TGenerator}.
1476 Through implementations of this abstract base class we wrap
1477 FORTRAN \MC codes like PYTHIA, HERWIG, and HIJING that are
1478 thus accessible from the AliRoot classes. In particular the
1479 interface to PYTHIA includes the use of nuclear structure
1480 functions of LHAPDF.
1483 \subsubsection{Pythia6}
1485 Pythia is used for simulation of proton-proton interactions and for
1486 generation of jets in case of event merging. An example of minimum
1487 bias Pythia events is presented below:
1489 \begin{lstlisting}[language=C++]
1490 AliGenPythia *gener = new AliGenPythia(-1);
1491 gener->SetMomentumRange(0,999999);
1492 gener->SetThetaRange(0., 180.);
1493 gener->SetYRange(-12,12);
1494 gener->SetPtRange(0,1000);
1495 gener->SetProcess(kPyMb); // Min. bias events
1496 gener->SetEnergyCMS(14000.); // LHC energy
1497 gener->SetOrigin(0, 0, 0); // Vertex position
1498 gener->SetSigma(0, 0, 5.3); // Sigma in (X,Y,Z) (cm) on IP position
1499 gener->SetCutVertexZ(1.); // Truncate at 1 sigma
1500 gener->SetVertexSmear(kPerEvent);// Smear per event
1501 gener->SetTrackingFlag(1); // Particle transport
1506 \subsubsection{HIJING}
1507 HIJING (Heavy-Ion Jet Interaction Generator) combines a
1508 QCD-inspired model of jet production~\cite{MC:HIJING} with the
1509 Lund model~\cite{MC:LUND} for jet fragmentation. Hard or
1510 semi-hard parton scatterings with transverse momenta of a few GeV
1511 are expected to dominate high-energy heavy-ion collisions. The
1512 HIJING model has been developed with special emphasis on the role
1513 of mini jets in pp, pA and A--A reactions at collider energies.
1515 Detailed systematic comparisons of HIJING results with a wide
1516 range of data demonstrates a qualitative understanding of the
1517 interplay between soft string dynamics and hard QCD interactions.
1518 In particular, HIJING reproduces many inclusive spectra,
1519 two-particle correlations, and the observed flavor and
1520 multiplicity dependence of the average transverse momentum.
1522 The Lund FRITIOF~\cite{MC:FRITIOF} model and the Dual Parton
1523 Model~\cite{MC:DPM} (DPM) have guided the formulation of HIJING
1524 for soft nucleus--nucleus reactions at intermediate energies,
1525 $\sqrt{s_{\rm NN}}\approx 20\, GeV$. The hadronic-collision
1526 model has been inspired by the successful implementation of
1527 perturbative QCD processes in PYTHIA~\cite{MC:PYTH}. Binary
1528 scattering with Glauber geometry for multiple interactions are
1529 used to extrapolate to pA and A--A collisions.
1531 Two important features of HIJING are jet quenching and nuclear
1532 shadowing. Jet quenching is the energy loss by partons in nuclear
1533 matter. It is responsible for an increase of the particle
1534 multiplicity at central rapidities. Jet quenching is modeled by an
1535 assumed energy loss by partons traversing dense matter. A simple
1536 color configuration is assumed for the multi-jet system and the Lund
1537 fragmentation model is used for the hadronisation. HIJING does not
1538 simulate secondary interactions.
1540 Shadowing describes the modification of the free nucleon parton
1541 density in the nucleus. At the low-momentum fractions, $x$,
1542 observed by collisions at the LHC, shadowing results in a decrease
1543 of the multiplicity. Parton shadowing is taken into account using
1544 a parameterization of the modification.
1546 Here is an example of event generation with HIJING:
1548 \begin{lstlisting}[language=C++]
1549 AliGenHijing *gener = new AliGenHijing(-1);
1550 gener->SetEnergyCMS(5500.); // center of mass energy
1551 gener->SetReferenceFrame("CMS"); // reference frame
1552 gener->SetProjectile("A", 208, 82); // projectile
1553 gener->SetTarget ("A", 208, 82); // projectile
1554 gener->KeepFullEvent(); // HIJING will keep the full parent child chain
1555 gener->SetJetQuenching(1); // enable jet quenching
1556 gener->SetShadowing(1); // enable shadowing
1557 gener->SetDecaysOff(1); // neutral pion and heavy particle decays switched off
1558 gener->SetSpectators(0); // Don't track spectators
1559 gener->SetSelectAll(0); // kinematic selection
1560 gener->SetImpactParameterRange(0., 5.); // Impact parameter range (fm)
1564 \subsubsection{Additional universal generators}
1566 The following universal generators are available in AliRoot:
1569 \item DPMJET: this is an implementation of the dual parton
1570 model\cite{MC:DPMJET};
1571 \item ISAJET: a \MC event generator for pp, $\bar pp$, and $e^=e^-$
1572 reactions\cite{MC:ISAJET};
1573 \item HERWIG: \MC package for simulating Hadron Emission
1574 Reactions With Interfering Gluons\cite{MC:HERWIG}.
1577 An example of HERWIG configuration in the Config.C is shown below:
1578 \begin{lstlisting}[language=C++]
1579 AliGenHerwig *gener = new AliGenHerwig(-1);
1580 // final state kinematic cuts
1581 gener->SetMomentumRange(0,7000);
1582 gener->SetPhiRange(0. ,360.);
1583 gener->SetThetaRange(0., 180.);
1584 gener->SetYRange(-10,10);
1585 gener->SetPtRange(0,7000);
1586 // vertex position and smearing
1587 gener->SetOrigin(0,0,0); // vertex position
1588 gener->SetVertexSmear(kPerEvent);
1589 gener->SetSigma(0,0,5.6); // Sigma in (X,Y,Z) (cm) on IP position
1591 gener->SetBeamMomenta(7000,7000);
1593 gener->SetProjectile("P");
1594 gener->SetTarget("P");
1595 // Structure function
1596 gener->SetStrucFunc(kGRVHO);
1598 gener->SetPtHardMin(200);
1599 gener->SetPtRMS(20);
1601 gener->SetProcess(8000);
1604 \subsubsection{Generators for specific studies}
1608 MEVSIM~\cite{MC:MEVSIM} was developed for the STAR experiment to
1609 quickly produce a large number of A--A collisions for some
1610 specific needs -- initially for HBT studies and for testing of
1611 reconstruction and analysis software. However, since the user is
1612 able to generate specific signals, it was extended to flow and
1613 event-by-event fluctuation analysis. A detailed description of
1614 MEVSIM can be found in Ref.~\cite{MC:MEVSIM}.
1616 MEVSIM generates particle spectra according to a momentum model
1617 chosen by the user. The main input parameters are: types and
1618 numbers of generated particles, momentum-distribution model,
1619 reaction-plane and azimuthal-anisotropy coefficients, multiplicity
1620 fluctuation, number of generated events, etc. The momentum models
1621 include factorized $p_T$ and rapidity distributions, non-expanding
1622 and expanding thermal sources, arbitrary distributions in $y$ and
1623 $p_T$ and others. The reaction plane and azimuthal anisotropy is
1624 defined by the Fourier coefficients (maximum of six) including
1625 directed and elliptical flow. Resonance production can also be
1628 MEVSIM was originally written in FORTRAN. It was later integrated into
1629 AliRoot. A complete description of the AliRoot implementation of MEVSIM can
1630 be found on the web page (\url{http://home.cern.ch/~radomski}).
1634 GeVSim \cite{MC:GEVSIM} is a fast and easy-to-use \MC
1635 event generator implemented in AliRoot. It can provide events of
1636 similar type configurable by the user according to the specific
1637 needs of a simulation project, in particular, that of flow and
1638 event-by-event fluctuation studies. It was developed to facilitate
1639 detector performance studies and for the test of algorithms.
1640 GeVSim can also be used to generate signal-free events to be
1641 processed by afterburners, for example HBT processor.
1643 GeVSim is based on the MevSim \cite{MC:MEVSIM} event generator
1644 developed for the STAR experiment.
1646 GeVSim generates a list of particles by randomly sampling a
1647 distribution function. The parameters of single-particle spectra
1648 and their event-by-event fluctuations are explicitly defined by
1649 the user. Single-particle transverse-momentum and rapidity spectra
1650 can be either selected from a menu of four predefined
1651 distributions, the same as in MevSim, or provided by user.
1653 Flow can be easily introduced into simulated events. The parameters of
1654 the flow are defined separately for each particle type and can be
1655 either set to a constant value or parameterized as a function of
1656 transverse momentum and rapidity. Two parameterizations of elliptic
1657 flow based on results obtained by RHIC experiments are provided.
1659 GeVSim also has extended possibilities for simulating of
1660 event-by-event fluctuations. The model allows fluctuations
1661 following an arbitrary analytically defined distribution in
1662 addition to the Gaussian distribution provided by MevSim. It is
1663 also possible to systematically alter a given parameter to scan
1664 the parameter space in one run. This feature is useful when
1665 analyzing performance with respect to, for example, multiplicity
1666 or event-plane angle.
1668 The current status and further development of GeVSim code and documentation
1669 can be found in Ref.~\cite{MC:Radomski}.
1671 \textbf{HBT processor}
1673 Correlation functions constructed with the data produced by MEVSIM
1674 or any other event generator are normally flat in the region of
1675 small relative momenta. The HBT-processor afterburner introduces
1676 two particle correlations into the set of generated particles. It
1677 shifts the momentum of each particle so that the correlation
1678 function of a selected model is reproduced. The imposed
1679 correlation effects due to Quantum Statistics (QS) and Coulomb
1680 Final State Interactions (FSI) do not affect the single-particle
1681 distributions and multiplicities. The event structures before and
1682 after passing through the HBT processor are identical. Thus, the
1683 event reconstruction procedure with and without correlations is
1684 also identical. However, the track reconstruction efficiency, momentum
1685 resolution and particle identification need not to be, since
1686 correlated particles have a special topology at small relative
1687 velocities. We can thus verify the influence of various
1688 experimental factors on the correlation functions.
1690 The method, proposed by L.~Ray and G.W.~Hoffmann \cite{MC:HBTproc}
1691 is based on random shifts of the particle three-momentum within a
1692 confined range. After each shift, a comparison is made with
1693 correlation functions resulting from the assumed model of the
1694 space--time distribution and with the single-particle spectra
1695 which should remain unchanged. The shift is kept if the
1696 $\chi^2$-test shows better agreement. The process is iterated
1697 until satisfactory agreement is achieved. In order to construct
1698 the correlation function, a reference sample is made by mixing
1699 particles from some consecutive events. Such a method has an
1700 important impact on the simulations when at least two events must
1701 be processed simultaneously.
1703 Some specific features of this approach are important for practical
1706 \item{} the HBT processor can simultaneously generate correlations of up
1707 to two particle types (e.g. positive and negative pions).
1708 Correlations of other particles can be added subsequently.
1709 \item{} the form of the correlation function has to be parameterized
1710 analytically. One and three dimensional parameterizations are
1712 \item{} a static source is usually assumed. Dynamical effects,
1714 expansion or flow, can be simulated in a stepwise form by repeating
1715 simulations for different values of the space--time parameters
1716 associated with different kinematic intervals.
1717 \item{} Coulomb effects may be introduced by one of three
1719 factor, experimentally modified Gamow correction and integrated
1720 Coulomb wave functions for discrete values of the source radii.
1721 \item{} Strong interactions are not implemented.
1724 The detailed description of the HBT processor can be found
1725 elsewhere~\cite{MC:PiotrSk}.
1727 \textbf{Flow afterburner}
1729 Azimuthal anisotropies, especially elliptic flow, carry unique
1730 information about collective phenomena and consequently are
1731 important for the study of heavy-ion collisions. Additional
1732 information can be obtained studying different heavy-ion
1733 observables, especially jets, relative to the event plane.
1734 Therefore it is necessary to evaluate the capability of ALICE to
1735 reconstruct the event plane and study elliptic flow.
1737 Since there is not a well understood microscopic description of
1738 the flow effect it cannot be correctly simulated by microscopic
1739 event generators. Therefore, to generate events with flow the user has
1740 to use event generators based on macroscopic models, like GeVSim
1741 \cite{MC:GEVSIM} or an afterburner which can generate flow on top
1742 of events generated by event generators based on the microscopic
1743 description of the interaction. In the AliRoot framework such a
1744 flow afterburner is implemented.
1746 The algorithm to apply azimuthal correlation consists in shifting the
1747 azimuthal coordinates of the particles. The transformation is given
1748 by \cite{MC:POSCANCER}:
1752 \varphi \rightarrow \varphi '=\varphi +\Delta \varphi \]
1754 \Delta \varphi =\sum _{n}\frac{-2}{n}v_{n}\left( p_{t},y\right)
1755 \sin n \times \left( \varphi -\psi \right) \] where \(
1756 v_{n}(p_{t},y) \) is the flow coefficient to be obtained, \( n \)
1757 is the harmonic number and \( \psi \) is the event-plane angle.
1758 Note that the algorithm is deterministic and does not contain any
1759 random numbers generation.
1761 The value of the flow coefficient can be either constant or parameterized as a
1762 function of transverse momentum and rapidity. Two parameterizations
1763 of elliptic flow are provided as in GeVSim.
1765 \begin{lstlisting}[language=C++]
1766 AliGenGeVSim* gener = new AliGenGeVSim(0);
1768 mult = 2000; // Mult is the number of charged particles in |eta| < 0.5
1771 Float_t sigma_eta = 2.75; // Sigma of the Gaussian dN/dEta
1772 Float_t etamax = 7.00; // Maximum eta
1774 // Scale from multiplicity in |eta| < 0.5 to |eta| < |etamax|
1775 Float_t mm = mult * (TMath::Erf(etamax/sigma_eta/sqrt(2.)) /
1776 TMath::Erf(0.5/sigma_eta/sqrt(2.)));
1778 // Scale from charged to total multiplicity
1783 // 78% Pions (26% pi+, 26% pi-, 26% p0) T = 250 MeV
1784 AliGeVSimParticle *pp =
1785 new AliGeVSimParticle(kPiPlus, 1, 0.26 * mm, 0.25, sigma_eta) ;
1786 AliGeVSimParticle *pm =
1787 new AliGeVSimParticle(kPiMinus, 1, 0.26 * mm, 0.25, sigma_eta) ;
1788 AliGeVSimParticle *p0 =
1789 new AliGeVSimParticle(kPi0, 1, 0.26 * mm, 0.25, sigma_eta) ;
1791 // 12% Kaons (3% K0short, 3% K0long, 3% K+, 3% K-) T = 300 MeV
1792 AliGeVSimParticle *ks =
1793 new AliGeVSimParticle(kK0Short, 1, 0.03 * mm, 0.30, sigma_eta) ;
1794 AliGeVSimParticle *kl =
1795 new AliGeVSimParticle(kK0Long, 1, 0.03 * mm, 0.30, sigma_eta) ;
1796 AliGeVSimParticle *kp =
1797 new AliGeVSimParticle(kKPlus, 1, 0.03 * mm, 0.30, sigma_eta) ;
1798 AliGeVSimParticle *km =
1799 new AliGeVSimParticle(kKMinus, 1, 0.03 * mm, 0.30, sigma_eta) ;
1801 // 10% Protons / Neutrons (5% Protons, 5% Neutrons) T = 250 MeV
1802 AliGeVSimParticle *pr =
1803 new AliGeVSimParticle(kProton, 1, 0.05 * mm, 0.25, sigma_eta) ;
1804 AliGeVSimParticle *ne =
1805 new AliGeVSimParticle(kNeutron, 1, 0.05 * mm, 0.25, sigma_eta) ;
1807 // Set Elliptic Flow properties
1809 Float_t pTsaturation = 2. ;
1811 pp->SetEllipticParam(vn,pTsaturation,0.) ;
1812 pm->SetEllipticParam(vn,pTsaturation,0.) ;
1813 p0->SetEllipticParam(vn,pTsaturation,0.) ;
1814 pr->SetEllipticParam(vn,pTsaturation,0.) ;
1815 ne->SetEllipticParam(vn,pTsaturation,0.) ;
1816 ks->SetEllipticParam(vn,pTsaturation,0.) ;
1817 kl->SetEllipticParam(vn,pTsaturation,0.) ;
1818 kp->SetEllipticParam(vn,pTsaturation,0.) ;
1819 km->SetEllipticParam(vn,pTsaturation,0.) ;
1821 // Set Direct Flow properties
1823 pp->SetDirectedParam(vn,1.0,0.) ;
1824 pm->SetDirectedParam(vn,1.0,0.) ;
1825 p0->SetDirectedParam(vn,1.0,0.) ;
1826 pr->SetDirectedParam(vn,1.0,0.) ;
1827 ne->SetDirectedParam(vn,1.0,0.) ;
1828 ks->SetDirectedParam(vn,1.0,0.) ;
1829 kl->SetDirectedParam(vn,1.0,0.) ;
1830 kp->SetDirectedParam(vn,1.0,0.) ;
1831 km->SetDirectedParam(vn,1.0,0.) ;
1833 // Add particles to the list
1835 gener->AddParticleType(pp) ;
1836 gener->AddParticleType(pm) ;
1837 gener->AddParticleType(p0) ;
1838 gener->AddParticleType(pr) ;
1839 gener->AddParticleType(ne) ;
1840 gener->AddParticleType(ks) ;
1841 gener->AddParticleType(kl) ;
1842 gener->AddParticleType(kp) ;
1843 gener->AddParticleType(km) ;
1847 TF1 *rpa = new TF1("gevsimPsiRndm","1", 0, 360);
1849 gener->SetPtRange(0., 9.) ; // Used for bin size in numerical integration
1850 gener->SetPhiRange(0, 360);
1852 gener->SetOrigin(0, 0, 0); // vertex position
1853 gener->SetSigma(0, 0, 5.3); // Sigma in (X,Y,Z) (cm) on IP position
1854 gener->SetCutVertexZ(1.); // Truncate at 1 sigma
1855 gener->SetVertexSmear(kPerEvent);
1856 gener->SetTrackingFlag(1);
1860 \textbf{Generator for e$^+$e$^-$ pairs in Pb--Pb collisions}
1862 In addition to strong interactions of heavy ions in central and
1863 peripheral collisions, ultra-peripheral collisions of ions give
1864 rise to coherent, mainly electromagnetic, interactions among which
1865 the dominant process is is the (multiple) e$^+$e$^-$-pair
1866 production \cite{MC:AlscherHT97}
1868 AA \to AA + n({\rm e}^+{\rm e}^-), \label{nee}
1870 where $n$ is the pair multiplicity. Most electron--positron pairs
1871 are produced into the very forward direction escaping the
1872 experiment. However, for Pb--Pb collisions at the LHC the
1873 cross-section of this process, about 230 \, ${\rm kb}$, is
1874 enormous. A sizable fraction of pairs produced with large-momentum
1875 transfer can contribute to the hit rate in the forward detectors
1876 increasing the occupancy or trigger rate. In order to study this
1877 effect an event generator for e$^+$e$^-$-pair production has
1878 been implemented in the AliRoot framework \cite{MC:Sadovsky}. The
1879 class \texttt{TEpEmGen} is a realisation of the \texttt{TGenerator}
1880 interface for external generators and wraps the FORTRAN code used
1881 to calculate the differential cross-section. \texttt{AliGenEpEmv1}
1882 derives from \texttt{AliGenerator} and uses the external generator to
1883 put the pairs on the AliRoot particle stack.
1886 \subsubsection{Combination of generators: AliGenCocktail}
1890 \includegraphics[width=10cm]{picts/cocktail}
1891 \caption{The \texttt{AliGenCocktail} generator is a realization of {\tt
1892 AliGenerator} which does not generate particles itself but
1893 delegates this task to a list of objects of type {\tt
1894 AliGenerator} that can be connected as entries ({\tt
1895 AliGenCocktailEntry}) at run time. In this way different physics
1896 channels can be combined in one event.} \label{MC:cocktail}
1899 Here is an example of cocktail, used for studies in the TRD detector:
1901 \begin{lstlisting}[language=C++]
1902 // The cocktail generator
1903 AliGenCocktail *gener = new AliGenCocktail();
1905 // Phi meson (10 particles)
1907 new AliGenParam(10,new AliGenMUONlib(),AliGenMUONlib::kPhi,"Vogt PbPb");
1908 phi->SetPtRange(0, 100);
1909 phi->SetYRange(-1., +1.);
1910 phi->SetForceDecay(kDiElectron);
1912 // Omega meson (10 particles)
1913 AliGenParam *omega =
1914 new AliGenParam(10,new AliGenMUONlib(),AliGenMUONlib::kOmega,"Vogt PbPb");
1915 omega->SetPtRange(0, 100);
1916 omega->SetYRange(-1., +1.);
1917 omega->SetForceDecay(kDiElectron);
1920 AliGenParam *jpsi = new AliGenParam(10,new AliGenMUONlib(),
1921 AliGenMUONlib::kJpsiFamily,"Vogt PbPb");
1922 jpsi->SetPtRange(0, 100);
1923 jpsi->SetYRange(-1., +1.);
1924 jpsi->SetForceDecay(kDiElectron);
1927 AliGenParam *ups = new AliGenParam(10,new AliGenMUONlib(),
1928 AliGenMUONlib::kUpsilonFamily,"Vogt PbPb");
1929 ups->SetPtRange(0, 100);
1930 ups->SetYRange(-1., +1.);
1931 ups->SetForceDecay(kDiElectron);
1933 // Open charm particles
1934 AliGenParam *charm = new AliGenParam(10,new AliGenMUONlib(),
1935 AliGenMUONlib::kCharm,"central");
1936 charm->SetPtRange(0, 100);
1937 charm->SetYRange(-1.5, +1.5);
1938 charm->SetForceDecay(kSemiElectronic);
1940 // Beauty particles: semi-electronic decays
1941 AliGenParam *beauty = new AliGenParam(10,new AliGenMUONlib(),
1942 AliGenMUONlib::kBeauty,"central");
1943 beauty->SetPtRange(0, 100);
1944 beauty->SetYRange(-1.5, +1.5);
1945 beauty->SetForceDecay(kSemiElectronic);
1947 // Beauty particles to J/psi ee
1948 AliGenParam *beautyJ = new AliGenParam(10, new AliGenMUONlib(),
1949 AliGenMUONlib::kBeauty,"central");
1950 beautyJ->SetPtRange(0, 100);
1951 beautyJ->SetYRange(-1.5, +1.5);
1952 beautyJ->SetForceDecay(kBJpsiDiElectron);
1954 // Adding all the components of the cocktail
1955 gener->AddGenerator(phi,"Phi",1);
1956 gener->AddGenerator(omega,"Omega",1);
1957 gener->AddGenerator(jpsi,"J/psi",1);
1958 gener->AddGenerator(ups,"Upsilon",1);
1959 gener->AddGenerator(charm,"Charm",1);
1960 gener->AddGenerator(beauty,"Beauty",1);
1961 gener->AddGenerator(beautyJ,"J/Psi from Beauty",1);
1963 // Settings, common for all components
1964 gener->SetOrigin(0, 0, 0); // vertex position
1965 gener->SetSigma(0, 0, 5.3); // Sigma in (X,Y,Z) (cm) on IP position
1966 gener->SetCutVertexZ(1.); // Truncate at 1 sigma
1967 gener->SetVertexSmear(kPerEvent);
1968 gener->SetTrackingFlag(1);
1973 \subsection{Particle transport}
1975 \subsubsection{TGeo essential information}
1977 A detailed description of the Root geometry package is available in
1978 the Root User's Guide\cite{RootUsersGuide}. Several examples can be
1979 found in \$ROOTSYS/tutorials, among them assembly.C, csgdemo.C,
1980 geodemo.C, nucleus.C, rootgeom.C, etc. Here we show a simple usage for
1981 export/import of the ALICE geometry and for check for overlaps and
1984 \begin{lstlisting}[language=C++]
1986 root [0] gAlice->Init()
1987 root [1] gGeoManager->Export("geometry.root")
1990 root [0] TGeoManager::Import("geometry.root")
1991 root [1] gGeoManager->CheckOverlaps()
1992 root [2] gGeoManager->PrintOverlaps()
1993 root [3] new TBrowser
1994 # Now you can navigate in Geometry->Illegal overlaps
1995 # and draw each overlap (double click on it)
1998 \subsubsection{Visualization}
2000 Below we show an example of VZERO visualization using the Root
2003 \begin{lstlisting}[language=C++]
2005 root [0] gAlice->Init()
2006 root [1] TGeoVolume *top = gGeoManager->GetMasterVolume()
2007 root [2] Int_t nd = top->GetNdaughters()
2008 root [3] for (Int_t i=0; i<nd; i++) \
2009 top->GetNode(i)->GetVolume()->InvisibleAll()
2010 root [4] TGeoVolume *v0ri = gGeoManager->GetVolume("V0RI")
2011 root [5] TGeoVolume *v0le = gGeoManager->GetVolume("V0LE")
2012 root [6] v0ri->SetVisibility(kTRUE);
2013 root [7] v0ri->VisibleDaughters(kTRUE);
2014 root [8] v0le->SetVisibility(kTRUE);
2015 root [9] v0le->VisibleDaughters(kTRUE);
2016 root [10] top->Draw();
2020 \subsubsection{Particle decays}
2022 We use Pythia to carry one particle decays during the transport. The
2023 default decay channels can be seen in the following way:
2025 \begin{lstlisting}[language=C++]
2027 root [0] AliPythia * py = AliPythia::Instance()
2028 root [1] py->Pylist(12); >> decay.list
2031 The file decay.list will contain the list of particles decays
2032 available in Pythia. Now if we want to force the decay $\Lambda^0 \to
2033 p \pi^-$, the following lines should be included in the Config.C
2034 before we register the decayer:
2036 \begin{lstlisting}[language=C++]
2037 AliPythia * py = AliPythia::Instance();
2038 py->SetMDME(1059,1,0);
2039 py->SetMDME(1060,1,0);
2040 py->SetMDME(1061,1,0);
2043 where 1059,1060 and 1061 are the indexes of the decay channel (from
2044 decay.list above) we want to switch off.
2046 \subsubsection{Examples}
2049 \textbf{Fast simulation}
2051 This example is taken from the macro
2052 \$ALICE\_ROOT/FASTSIM/fastGen.C. It shows how one can create a
2053 Kinematics tree which later can be used as input for the particle
2054 transport. A simple selection of events with high multiplicity is
2057 \lstinputlisting[language=C++] {scripts/fastGen.C}
2059 \textbf{Reading of kinematics tree as input for the particle transport}
2061 We suppose that the macro fastGen.C above has been used to generate
2062 the corresponding sent of files: galice.root and Kinematics.root, and
2063 that they are stored in a separate subdirectory, for example kine. Then
2064 the following code in Config.C will read the set of files and put them
2065 in the stack for transport:
2067 \begin{lstlisting}[language=C++]
2068 AliGenExtFile *gener = new AliGenExtFile(-1);
2070 gener->SetMomentumRange(0,14000);
2071 gener->SetPhiRange(0.,360.);
2072 gener->SetThetaRange(45,135);
2073 gener->SetYRange(-10,10);
2074 gener->SetOrigin(0, 0, 0); //vertex position
2075 gener->SetSigma(0, 0, 5.3); //Sigma in (X,Y,Z) (cm) on IP position
2077 AliGenReaderTreeK * reader = new AliGenReaderTreeK();
2078 reader->SetFileName("../galice.root");
2080 gener->SetReader(reader);
2081 gener->SetTrackingFlag(1);
2087 \textbf{Usage of different generators}
2089 A lot of examples are available in
2090 \$ALICE\_ROOT/macros/Config\_gener.C. The correspondent part can be
2091 extracted and placed in the relevant Config.C file.
2094 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2099 \section{Reconstruction}
2101 % -----------------------------------------------------------------------------
2103 \subsection{Reconstruction Framework}
2106 focuses on the reconstruction framework from the (detector) software
2107 developers point of view.
2109 Wherever it is not specified explicitly as different, we refer
2110 to the `global ALICE coordinate system'\cite{CoordinateSystem}. It is a right-handed coordinate
2112 the $z$ axis coinciding with the beam-pipe axis and going in the direction
2113 opposite to the muon arm, the $y$ axis going up, and the origin of
2114 coordinates defined by the intersection point of the $z$ axis
2115 and the central-membrane plane of TPC.
2117 Here is a reminder of the following terms which are used in the
2118 description of the reconstruction framework (see also section \ref{AliRootFramework}):
2120 \item {\it Digit}: This is a digitized signal (ADC count) obtained by
2121 a sensitive pad of a detector at a certain time.
2122 \item {\it Cluster}: This is a set of adjacent (in space and/or in time)
2123 digits that were presumably generated by the same particle crossing the
2124 sensitive element of a detector.
2125 \item Reconstructed {\it space point}: This is the estimation of the
2126 position where a particle crossed the sensitive element of a detector
2127 (often, this is done by calculating the center of gravity of the
2129 \item Reconstructed {\it track}: This is a set of five parameters (such as the
2130 curvature and the angles with respect to the coordinate axes) of the particle's
2131 trajectory together with the corresponding covariance matrix estimated at a given
2136 The input to the reconstruction framework are digits in root tree
2137 format or raw data format. First a local reconstruction of clusters is
2138 performed in each detector. Then vertexes and tracks are reconstructed
2139 and the particle identification is carried on. The output of the reconstruction
2140 is the Event Summary Data (ESD). The \class{AliReconstruction} class provides
2141 a simple user interface to the reconstruction framework which is
2142 explained in the source code and.
2146 \includegraphics[width=10cm]{picts/ReconstructionFramework}
2147 \caption{Reconstruction framework.} \label{MC:Reconstruction}
2150 \textbf{Requirements and Guidelines}
2152 The development of the reconstruction framework has been carried on
2153 according to the following requirements and guidelines:
2155 \item the prime goal of the reconstruction is to provide the data that
2156 is needed for a physics analysis;
2157 \item the reconstruction should be aimed for high efficiency, purity and resolution.
2158 \item the user should have an easy to use interface to extract the
2159 required information from the ESD;
2160 \item the reconstruction code should be efficient but also maintainable;
2161 \item the reconstruction should be as flexible as possible.
2162 It should be possible to do the reconstruction in one detector even in
2163 the case that other detectors are not operational.
2164 To achieve such a flexibility each detector module should be able to
2166 \item find tracks starting from seeds provided by another detector
2168 \item find tracks without using information from other detectors
2170 \item find tracks from external seeds and add tracks from internal seeds
2171 \item and propagate tracks through the detector using the already
2172 assigned clusters in inward and outward direction.
2174 \item where it is appropriate, common (base) classes should be used in
2175 the different reconstruction modules;
2176 \item the interdependencies between the reconstruction modules should
2178 If possible the exchange of information between detectors should be
2179 done via a common track class.
2180 \item the chain of reconstruction program(s) should be callable and
2181 steerable in an easy way;
2182 \item there should be no assumptions on the structure or names of files
2183 or on the number or order of events;
2184 \item each class, data member and method should have a correct,
2185 precise and helpful html documentation.
2190 \textbf{AliReconstructor}
2192 The interface from the steering class \class{AliReconstruction} to the
2193 detector specific reconstruction code is defined by the base class
2194 \class{AliReconstructor}. For each detector there is a derived reconstructor
2195 class. The user can set options for each reconstructor in format of a
2196 string parameter which is accessible inside the reconstructor via the
2199 The detector specific reconstructors are created via
2200 plugins. Therefore they must have a default constructor. If no plugin
2201 handler is defined by the user (in .rootrc), it is assumed that the
2202 name of the reconstructor for detector DET is AliDETReconstructor and
2203 that it is located in the library libDETrec.so (or libDET.so).
2208 If the input data is provided in format of root trees, either the
2209 loaders or directly the trees are used to access the digits. In case
2210 of raw data input the digits are accessed via a raw reader.
2212 If a galice.root file exists, the run loader will be retrieved from
2213 it. Otherwise the run loader and the headers will be created from the
2214 raw data. The reconstruction can not work if there is no galice.root file
2215 and no raw data input.
2218 \textbf{Output Data}
2220 The clusters (rec. points) are considered as intermediate output and
2221 are stored in root trees handled by the loaders. The final output of
2222 the reconstruction is a tree with objects of type \class{AliESD} stored in the
2223 file AliESDs.root. This Event Summary Data (ESD) contains lists of
2224 reconstructed tracks/particles and global event properties. The detailed
2225 description of the ESD can be found in section \ref{ESD}.
2228 \textbf{Local Reconstruction (Clusterization)}
2230 The first step of the reconstruction is the so called ``local
2231 reconstruction''. It is executed for each detector separately and
2232 without exchanging information with other detectors. Usually the
2233 clusterization is done in this step.
2235 The local reconstruction is invoked via the method \method{Reconstruct} of the
2236 reconstructor object. Each detector reconstructor runs the local
2237 reconstruction for all events. The local reconstruction method is
2238 only called if the method HasLocalReconstruction of the reconstructor
2241 Instead of running the local reconstruction directly on raw data, it
2242 is possible to first convert the raw data digits into a digits tree
2243 and then to call the \method{Reconstruct} method with a tree as input
2244 parameter. This conversion is done by the method ConvertDigits. The
2245 reconstructor has to announce that it can convert the raw data digits
2246 by returning kTRUE in the method \method{HasDigitConversion}.
2251 The current reconstruction of the primary-vertex
2252 position in ALICE is done using the information provided by the
2253 silicon pixel detectors, which constitute the two innermost layers of the
2256 The algorithm starts with looking at the
2257 distribution of the $z$ coordinates of the reconstructed space points
2258 in the first pixel layers.
2259 At a vertex $z$ coordinate $z_{\rm true} = 0$ the distribution is
2261 its centroid ($z_{\rm cen}$) is very close to the nominal
2262 vertex position. When the primary vertex is moved along the $z$ axis, an
2264 of hits will be lost and the centroid of the distribution no longer gives
2266 vertex position. However, for primary vertex locations not too far from
2268 (up to about 12~cm), the centroid of the distribution is still correlated to
2269 the true vertex position.
2270 The saturation effect at large $z_{\rm true}$ values of the vertex position
2271 ($z_{\rm true} = $12--15~cm)
2272 is, however, not critical, since this procedure is only meant to find a rough
2273 vertex position, in order to introduce some cut along $z$.
2275 To find the final vertex position,
2276 the correlation between the points $z_1$, $z_2$ in the two layers
2277 was considered. More details and performance studies are available in
2280 The primary vertex is reconstructed by a vertexer object derived from
2281 \class{AliVertexer}. After the local reconstruction was done for all detectors
2282 the vertexer method \method{FindVertexForCurrentEvent} is called for each
2283 event. It returns a pointer to a vertex object of type \class{AliESDVertex}.
2285 The vertexer object is created by the method \method{CreateVertexer} of the
2286 reconstructor. So far only the ITS is used to determine the primary
2287 vertex (\class{AliITSVertexerZ} class).
2289 The precision of the primary vertex reconstruction in the bending plane
2290 required for the reconstruction of D and B mesons in pp events
2291 can be achieved only after the tracking is done. The method is
2292 implemented in \class{AliITSVertexerTracks}. It is called as a second
2293 estimation of the primary vertex. The details of the algorithm can be
2294 found in Appendix \ref{VertexerTracks}.
2297 \textbf{Combined Track Reconstruction}
2298 The combined track reconstruction tries to accumulate the information from
2299 different detectors in order to optimize the track reconstruction performance.
2300 The result of this is stored in the combined track objects.
2301 The \class{AliESDTrack} class also
2302 provides the possibility to exchange information between detectors
2303 without introducing dependencies between the reconstruction modules.
2304 This is achieved by using just integer indexes pointing to the
2305 specific track objects, which on the other hand makes it possible to
2306 retrieve the full information if needed.
2307 The list of combined tracks can be kept in memory and passed from one
2308 reconstruction module to another.
2309 The storage of the combined tracks should be done in the standard way.
2311 The classes responsible for the reconstruction of tracks are derived
2312 from \class{AliTracker}. They are created by the method
2313 \method{CreateTracker} of the
2314 reconstructors. The reconstructed position of the primary vertex is
2315 made available to them via the method \method{SetVertex}. Before the track
2316 reconstruction in a detector starts the clusters are loaded from the
2317 clusters tree by the method \method{LoadClusters}. After the track reconstruction the
2318 clusters are unloaded by the method \method{UnloadClusters}.
2320 The track reconstruction (in the barrel part) is done in three passes. The first
2321 pass consists of a track finding and fitting in inward direction in
2322 TPC and then in ITS. The virtual method \method{Clusters2Tracks} (of
2323 class \class{AliTracker}) is the
2324 interface to this pass. The method for the next pass is
2325 \method{PropagateBack}. It does the track reconstruction in outward direction and is
2326 invoked for all detectors starting with the ITS. The last pass is the
2327 track refit in inward direction in order to get the track parameters
2328 at the vertex. The corresponding method \method{RefitInward} is called for TRD,
2329 TPC and ITS. All three track reconstruction methods have an AliESD object as
2330 argument which is used to exchange track information between detectors
2331 without introducing dependences between the code of the detector
2334 Depending on the way the information is used, the tracking methods can be
2335 divided into two large groups: global methods and local methods. Each
2336 group has advantages and disadvantages.
2338 With the global methods, all the track measurements are treated
2339 simultaneously and the decision to include or exclude a measurement is
2340 taken when all the information about the track is known.
2341 Typical algorithms belonging to this class are combinatorial methods,
2342 Hough transform, templates, conformal mappings. The advantages are
2343 the stability with respect to noise and mismeasurements and the possibility
2344 to operate directly on the raw data. On the other hand, these methods
2345 require a precise global track model. Such a track model can sometimes be
2346 unknown or does not even exist because of stochastic processes (energy
2347 losses, multiple scattering), non-uniformity of the magnetic field etc.
2348 In ALICE, global tracking methods are being extensively used in the
2349 High-Level Trigger (HLT) software. There, we
2350 are mostly interested in the reconstruction of the high-momentum tracks
2351 only, the required precision is not crucial, but the speed of the
2352 calculations is of great importance.
2355 Local methods do not need the knowledge of the global track model.
2356 The track parameters are always estimated `locally' at a given point
2357 in space. The decision to accept or to reject a measurement is made using
2358 either the local information or the information coming from the previous
2359 `history' of this track. With these methods, all the local track
2360 peculiarities (stochastic physics processes, magnetic fields, detector
2361 geometry) can be naturally accounted for. Unfortunately, the local methods
2362 rely on sophisticated space point reconstruction algorithms (including
2363 unfolding of overlapped clusters). They are sensitive to noise, wrong or
2364 displaced measurements and the precision of space point error parameterization.
2365 The most advanced kind of local track-finding methods is Kalman
2366 filtering which was introduced by P. Billoir in 1983~\cite{MC:billoir}.
2370 When applied to the track reconstruction problem, the Kalman-filter
2371 approach shows many attractive properties:
2374 \item It is a method for simultaneous track recognition and
2377 \item There is a possibility to reject incorrect space points `on
2378 the fly', during a single tracking pass. These incorrect points can
2379 appear as a consequence of the imperfection of the cluster finder or
2380 they may be due to noise or they may be points from other tracks
2381 accidentally captured in the list of points to be associated with
2382 the track under consideration. In the other tracking methods one
2383 usually needs an additional fitting pass to get rid of incorrectly
2386 \item In the case of substantial multiple scattering, track
2387 measurements are correlated and therefore large matrices (of the
2388 size of the number of measured points) need to be inverted during
2389 a global fit. In the Kalman-filter procedure we only have to
2390 manipulate up to $5 \times 5$ matrices (although as many times as
2391 we have measured space points), which is much faster.
2393 \item One can handle multiple scattering and
2394 energy losses in a simpler way than in the case of global
2395 methods. At each step the material budget can be calculated and the
2396 mean correction calculated accordingly.
2398 \item It is a natural way to find the extrapolation
2399 of a track from one detector to another (for example from the TPC
2400 to the ITS or to the TRD).
2404 In ALICE we require good track-finding efficiency and reconstruction
2405 precision for track down to \mbox{\pt = 100 MeV/$c$.} Some of the ALICE
2406 tracking detectors (ITS, TRD) have a significant material budget.
2407 Under such conditions one can not neglect the energy losses or the multiple
2408 scattering in the reconstruction. There are also rather
2409 big dead zones between the tracking detectors which complicates finding
2410 the continuation of the same track. For all these reasons,
2411 it is the Kalman-filtering approach that has been our choice for the
2412 offline reconstruction since 1994.
2414 % \subsubsection{General tracking strategy}
2416 The reconstruction software for the ALICE central tracking detectors (the
2417 ITS, TPC and the TRD) shares a common convention on the coordinate
2418 system used. All the clusters and tracks are always expressed in some local
2419 coordinate system related to a given sub-detector (TPC sector, ITS module
2420 etc). This local coordinate system is defined as the following:
2422 \item It is a right handed-Cartesian coordinate system;
2423 \item its origin and the $z$ axis coincide with those of the global
2424 ALICE coordinate system;
2425 \item the $x$ axis is perpendicular to the sub-detector's `sensitive plane'
2426 (TPC pad row, ITS ladder etc).
2428 Such a choice reflects the symmetry of the ALICE set-up
2429 and therefore simplifies the reconstruction equations.
2430 It also enables the fastest possible transformations from
2431 a local coordinate system to the global one and back again,
2432 since these transformations become simple single rotations around the
2436 The reconstruction begins with cluster finding in all of the ALICE central
2437 detectors (ITS, TPC, TRD, TOF, HMPID and PHOS). Using the clusters
2438 reconstructed at the two pixel layers of the ITS, the position of the
2439 primary vertex is estimated and the track finding starts. As
2440 described later, cluster-finding as well as the track-finding procedures
2441 performed in the detectors have some different detector-specific features.
2442 Moreover, within a given detector, on account of high occupancy and a big
2443 number of overlapped clusters, the cluster finding and the track finding are
2444 not completely independent: the number and positions of the clusters are
2445 completely determined only at the track-finding step.
2447 The general tracking strategy is the following. We start from our
2448 best tracker device, i.e. the TPC, and from the outer radius where the
2449 track density is minimal. First, the track candidates (`seeds') are
2450 found. Because of the small number of clusters assigned to a seed, the
2451 precision of its parameters is not enough to safely extrapolate it outwards
2452 to the other detectors. Instead, the tracking stays within the TPC and
2453 proceeds towards the smaller TPC radii. Whenever
2454 possible, new clusters are associated with a track candidate
2455 at each step of the Kalman filter if they are within a given distance
2456 from the track prolongation and the track parameters are more and
2457 more refined. When all of the seeds are extrapolated to the inner limit of
2458 the TPC, proceeds into the ITS. The ITS tracker tries to prolong
2459 the TPC tracks as close as possible to the primary vertex.
2460 On the way to the primary vertex, the tracks are assigned additional,
2461 precisely reconstructed ITS clusters, which also improves
2462 the estimation of the track parameters.
2464 After all the track candidates from the TPC are assigned their clusters
2465 in the ITS, a special ITS stand-alone tracking procedure is applied to
2466 the rest of the ITS clusters. This procedure tries to recover the
2467 tracks that were not found in the TPC because of the \pt cut-off, dead zones
2468 between the TPC sectors, or decays.
2470 At this point the tracking is restarted from the vertex back to the
2471 outer layer of the ITS and then continued towards the outer wall of the
2472 TPC. For the track that was labeled by the ITS tracker as potentially
2473 primary, several particle-mass-dependent, time-of-flight hypotheses
2474 are calculated. These hypotheses are then used for the particle
2475 identification (PID) with the TOF detector. Once the outer
2476 radius of the TPC is reached, the precision of the estimated track
2478 sufficient to extrapolate the tracks to the TRD, TOF, HMPID and PHOS
2479 detectors. Tracking in the TRD is done in a similar way to that
2480 in the TPC. Tracks are followed till the outer wall of the TRD and the
2481 assigned clusters improve the momentum resolution further.
2483 % matching with the TOF, HMPID and PHOS is done, and the tracks aquire
2484 % additional PID information.
2485 Next, the tracks are extrapolated to the TOF, HMPID and PHOS, where they
2486 acquire the PID information.
2487 Finally, all the tracks are refitted with the Kalman filter backwards to
2488 the primary vertex (or to the innermost possible radius, in the case of
2489 the secondary tracks). This gives the most precise information about
2490 the track parameters at the point where the track appeared.
2492 The tracks that passed the final refit towards the primary vertex are used
2493 for the secondary vertex (V$^0$, cascade, kink) reconstruction. There is also
2494 an option to reconstruct the secondary vertexes `on the fly' during the
2495 tracking itself. The potential advantage of such a possibility is that
2496 the tracks coming from a secondary vertex candidate are not extrapolated
2497 beyond the vertex, thus minimizing the risk of picking up a wrong track
2498 prolongation. This option is currently under investigation.
2500 The reconstructed tracks (together with the PID information), kink, V$^0$
2501 and cascade particle decays are then stored in the Event Summary Data (ESD).
2503 More details about the reconstruction algorithms can be found in
2504 Chapter 5 of the ALICE Physics Performance Report\cite{PPRVII}.
2507 \textbf{Filling of ESD}
2509 After the tracks were reconstructed and stored in the \class{AliESD} object,
2510 further information is added to the ESD. For each detector the method
2511 \method{FillESD} of the reconstructor is called. Inside this method e.g. V0s
2512 are reconstructed or particles are identified (PID). For the PID a
2513 Bayesian approach is used (see Appendix \ref{BayesianPID}. The constants
2514 and some functions that are used for the PID are defined in the class
2518 \textbf{Monitoring of Performance}
2520 For the monitoring of the track reconstruction performance the classes
2521 \class{AliTrackReference} are used.
2522 Objects of the second type of class are created during the
2523 reconstruction at the same locations as the \class{AliTrackReference}
2525 So the reconstructed tracks can be easily compared with the simulated
2527 This allows to study and monitor the performance of the track reconstruction in detail.
2528 The creation of the objects used for the comparison should not
2529 interfere with the reconstruction algorithm and can be switched on or
2532 Several ``comparison'' macros permit to monitor the efficiency and the
2533 resolution of the tracking. Here is a typical usage (the simulation
2534 and the reconstruction have been done in advance):
2536 \begin{lstlisting}[language=C++]
2538 root [0] gSystem->SetIncludePath("-I$ROOTSYS/include \
2539 -I$ALICE_ROOT/include \
2543 root [1] .L $ALICE_ROOT/TPC/AliTPCComparison.C++
2544 root [2] .L $ALICE_ROOT/ITS/AliITSComparisonV2.C++
2545 root [3] .L $ALICE_ROOT/TOF/AliTOFComparison.C++
2546 root [4] AliTPCComparison()
2547 root [5] AliITSComparisonV2()
2548 root [6] AliTOFComparison()
2551 Another macro can be used to provide a preliminary estimate of the
2552 combined acceptance: \texttt{STEER/CheckESD.C}.
2556 The following classes are used in the reconstruction:
2558 \item \class{AliTrackReference}:
2559 This class is used to store the position and the momentum of a
2560 simulated particle at given locations of interest (e.g. when the
2561 particle enters or exits a detector or it decays). It is used for
2562 mainly for debugging and tuning of the tracking.
2564 \item \class{AliExternalTrackParams}:
2565 This class describes the status of a track in a given point.
2566 It knows the track parameters and its covariance matrix.
2567 This parameterization is used to exchange tracks between the detectors.
2568 A set of functions returning the position and the momentum of tracks
2569 in the global coordinate system as well as the track impact parameters
2570 are implemented. There is possibility to propagate the track to a
2571 given radius \method{PropagateTo} and \method{Propagate}.
2573 \item \class{AliKalmanTrack} and derived classes:
2574 These classes are used to find and fit tracks with the Kalman approach.
2575 The \class{AliKalmanTrack} defines the interfaces and implements some
2576 common functionality. The derived classes know about the clusters
2577 assigned to the track. They also update the information in an
2578 \class{AliESDtrack}.
2579 The current status of the track during the track reconstruction can be
2580 represented by an \class{AliExternalTrackParameters}.
2581 The history of the track during the track reconstruction can be stored
2582 in a list of \class{AliExternalTrackParameters} objects.
2583 The \class{AliKalmanTrack} defines the methods:
2585 \item \method{Double\_t GetDCA(...)} Returns the distance
2586 of closest approach between this track and the track passed as the
2588 \item \method{Double\_t MeanMaterialBudget(...)} Calculate the mean
2589 material budget and material properties between two points.
2592 \item \class{AliTracker} and subclasses:
2593 The \class{AliTracker} is the base class for all the trackers in the
2594 different detectors. It fixes the interface needed to find and
2595 propagate tracks. The actual implementation is done in the derived classes.
2597 \item \class{AliESDTrack}:
2598 This class combines the information about a track from different detectors.
2599 It knows the current status of the track
2600 (\class{AliExternalTrackParameters}) and it has (non-persistent) pointers
2601 to the individual \class{AliKalmanTrack} objects from each detector
2602 which contributed to the track.
2603 It knows about some detector specific quantities like the number or
2604 bit pattern of assigned clusters, dEdx, $\chi^2$, etc..
2605 And it can calculate a conditional probability for a given mixture of
2606 particle species following the Bayesian approach.
2607 It defines a track label pointing to the corresponding simulated
2608 particle in case of \MC.
2609 The combined track objects are the basis for a physics analysis.
2616 The example below shows reconstruction with non-uniform magnetic field
2617 (the simulation is also done with non-uniform magnetic field by adding
2618 the following line in the Config.C: field$\to$SetL3ConstField(1)). Only
2619 the barrel detectors are reconstructed, a specific TOF reconstruction
2620 has been requested, and the RAW data have been used:
2622 \begin{lstlisting}[language=C++]
2624 AliReconstruction reco;
2626 reco.SetRunReconstruction("ITS TPC TRD TOF");
2627 reco.SetNonuniformFieldTracking();
2628 reco.SetInput("raw.root");
2634 % -----------------------------------------------------------------------------
2636 \subsection{Event summary data}\label{ESD}
2638 The classes which are needed to process and analyze the ESD are packed
2639 together in a standalone library (libESD.so) which can be used
2640 separately from the \aliroot framework. Inside each
2641 ESD object the data is stored in polymorphic containers filled with
2642 reconstructed tracks, neutral particles, etc. The main class is
2643 \class{AliESD}, which contains all the information needed during the
2647 \item fields to identify the event such as event number, run number,
2648 time stamp, type of event, trigger type (mask), trigger cluster (mask),
2649 version of reconstruction, etc.;
2650 \item reconstructed ZDC energies and number of participant;
2651 \item primary vertex information: vertex z position estimated by the T0,
2652 primary vertex estimated by the SPD, primary vertex estimated using
2654 \item SPD tracklet multiplicity;
2655 \item interaction time estimated by the T0 together with additional
2656 time and amplitude information from T0;
2657 \item array of ESD tracks;
2658 \item arrays of HLT tracks both from the conformal mapping and from
2659 the Hough transform reconstruction;
2660 \item array of MUON tracks;
2661 \item array of PMD tracks;
2662 \item array of TRD ESD tracks (triggered);
2663 \item arrays of reconstructed $V^0$ vertexes, cascade decays and
2665 \item array of calorimeter clusters for PHOS/EMCAL;
2666 \item indexes of the information from PHOS and EMCAL detectors in the
2670 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2676 % -----------------------------------------------------------------------------
2678 \subsection{Introduction}
2679 The analysis of experimental data is the final stage of event
2680 processing and it is usually repeated many times. Analysis is a very diverse
2681 activity, where the goals of each
2682 particular analysis pass may differ significantly.
2684 The ALICE detector \cite{PPR} is optimized for the
2685 reconstruction and analysis of heavy-ion collisions.
2686 In addition, ALICE has a broad physics programme devoted to
2687 \pp and \pA interactions.
2690 The data analysis is coordinated by the Physics Board via the Physics
2691 Working Groups (PWGs). At present the following PWG have started
2695 \item PWG0 \textbf{first physics};
2696 \item PWG1 \textbf{detector performance};
2697 \item PWG2 \textbf{global event characteristics:} particle multiplicity,
2698 centrality, energy density, nuclear stopping; \textbf{soft physics:} chemical composition (particle and resonance
2699 production, particle ratios and spectra, strangeness enhancement),
2700 reaction dynamics (transverse and elliptic flow, HBT correlations,
2701 event-by-event dynamical fluctuations);
2702 \item PWG3 \textbf{heavy flavors:} quarkonia, open charm and beauty production.
2703 \item PWG4 \textbf{hard probes:} jets, direct photons;
2706 Each PWG has corresponding module in AliRoot (PWG0 -- PWG4). The code
2707 is managed by CVS administrators.
2709 The \pp and \pA programme will provide, on the one hand, reference points
2710 for comparison with heavy ions. On the other hand, ALICE will also
2711 pursue genuine and detailed \pp studies. Some
2712 quantities, in particular the global characteristics of interactions, will
2713 be measured during the first days of running exploiting the low-momentum
2714 measurement and particle identification capabilities of ALICE.
2716 The ALICE computing framework is described in details in the Computing
2717 Technical Design Report \cite{CompTDR}. This article is based on
2718 Chapter 6 of the document.
2721 \paragraph{The analysis activity.}
2723 We distinguish two main types of analysis: scheduled analysis and
2724 chaotic analysis. They differ in their data access pattern, in the
2725 storage and registration of the results, and in the frequency of
2726 changes in the analysis code {more details are available below).
2728 In the ALICE Computing Model the analysis starts from the Event Summary
2729 Data (ESD). These are produced during the reconstruction step and contain
2730 all the information for the analysis. The size of the ESD is
2731 about one order of magnitude lower than the corresponding raw
2732 data. The analysis tasks produce Analysis
2733 Object Data (AOD) specific to a given set of physics objectives.
2734 Further passes for the specific analysis activity can be performed on
2735 the AODs, until the selection parameter or algorithms are changed.
2737 A typical data analysis task usually requires processing of
2738 selected sets of events. The selection is based on the event
2739 topology and characteristics, and is done by querying the tag
2740 database. The tags represent physics quantities which characterize
2741 each run and event, and permit fast selection. They are created
2742 after the reconstruction and contain also the unique
2743 identifier of the ESD file. A typical query, when translated into
2744 natural language, could look like ``Give me
2745 all the events with impact parameter in $<$range$>$
2746 containing jet candidates with energy larger than $<$threshold$>$''.
2747 This results in a list of events and file identifiers to be used in the
2748 consecutive event loop.
2751 The next step of a typical analysis consists of a loop over all the events
2752 in the list and calculation of the physics quantities of
2753 interest. Usually, for each event, there is a set of embedded loops on the
2754 reconstructed entities such as tracks, ${\rm V^0}$ candidates, neutral
2755 clusters, etc., the main goal of which is to select the signal
2756 candidates. Inside each loop a number of criteria (cuts) are applied to
2757 reject the background combinations and to select the signal ones. The
2758 cuts can be based on geometrical quantities such as impact parameters
2760 respect to the primary vertex, distance between the cluster and the
2761 closest track, distance of closest approach between the tracks,
2762 angle between the momentum vector of the particle combination
2763 and the line connecting the production and decay vertexes. They can
2765 kinematics quantities such as momentum ratios, minimal and maximal
2766 transverse momentum,
2767 angles in the rest frame of the particle combination.
2768 Particle identification criteria are also among the most common
2771 The optimization of the selection criteria is one of the most
2772 important parts of the analysis. The goal is to maximize the
2773 signal-to-background ratio in case of search tasks, or another
2774 ratio (typically ${\rm Signal/\sqrt{Signal+Background}}$) in
2775 case of measurement of a given property. Usually, this optimization is
2776 performed using simulated events where the information from the
2777 particle generator is available.
2779 After the optimization of the selection criteria, one has to take into
2780 account the combined acceptance of the detector. This is a complex,
2781 analysis-specific quantity which depends on the geometrical acceptance,
2782 the trigger efficiency, the decays of particles, the reconstruction
2783 efficiency, the efficiency of the particle identification and of the
2784 selection cuts. The components of the combined acceptance are usually
2785 parameterized and their product is used to unfold the experimental
2786 distributions or during the simulation of some model parameters.
2788 The last part of the analysis usually involves quite complex
2789 mathematical treatments, and sophisticated statistical tools. Here one
2790 may include the correction for systematic effects, the estimation of
2791 statistical and systematic errors, etc.
2794 \paragraph{Scheduled analysis.}
2796 The scheduled analysis typically uses all
2797 the available data from a given period, and stores and registers the results
2798 using \grid middleware. The tag database is updated accordingly. The
2799 AOD files, generated during the scheduled
2800 analysis, can be used by several subsequent analyses, or by a class of
2801 related physics tasks.
2802 The procedure of scheduled analysis is centralized and can be
2803 considered as data filtering. The requirements come from the PWGs and
2804 are prioritized by the Physics Board taking into
2805 account the available computing and storage resources. The analysis
2806 code is tested in advance and released before the beginning of the
2809 Each PWG will require some sets of
2810 AOD per event, which are specific for one or
2811 a few analysis tasks. The creation of the AOD sets is managed centrally.
2812 The event list of each AOD set
2813 will be registered and the access to the AOD files will be granted to
2814 all ALICE collaborators. AOD files will be generated
2815 at different computing centers and will be stored on
2816 the corresponding storage
2817 elements. The processing of each file set will thus be done in a
2818 distributed way on the \grid. Some of the AOD sets may be quite small
2819 and would fit on a single storage element or even on one computer; in
2820 this case the corresponding tools for file replication, available
2821 in the ALICE \grid infrastructure, will be used.
2824 \paragraph{Chaotic analysis.}
2826 The chaotic analysis is focused on a single physics task and
2827 typically is based on the filtered data from the scheduled
2828 analysis. Each physicist also
2829 may access directly large parts of the ESD in order to search for rare
2830 events or processes.
2831 Usually the user develops the code using a small subsample
2832 of data, and changes the algorithms and criteria frequently. The
2833 analysis macros and software are tested many times on relatively
2834 small data volumes, both experimental and \MC.
2835 The output is often only a set of histograms.
2836 Such a tuning of the analysis code can be done on a local
2837 data set or on distributed data using \grid tools. The final version
2839 will eventually be submitted to the \grid and will access large
2841 the totality of the ESDs. The results may be registered in the \grid file
2842 catalog and used at later stages of the analysis.
2843 This activity may or may not be coordinated inside
2844 the PWGs, via the definition of priorities. The
2845 chaotic analysis is carried on within the computing resources of the
2849 % -----------------------------------------------------------------------------
2851 \subsection{Infrastructure tools for distributed analysis}
2853 \subsubsection{gShell}
2855 The main infrastructure tools for distributed analysis have been
2856 described in Chapter 3 of the Computing TDR\cite{CompTDR}. The actual
2857 middleware is hidden by an interface to the \grid,
2858 gShell\cite{CH6Ref:gShell}, which provides a
2859 single working shell.
2860 The gShell package contains all the commands a user may need for file
2861 catalog queries, creation of sub-directories in the user space,
2862 registration and removal of files, job submission and process
2863 monitoring. The actual \grid middleware is completely transparent to
2866 The gShell overcomes the scalability problem of direct client
2867 connections to databases. All clients connect to the
2868 gLite\cite{CH6Ref:gLite} API
2869 services. This service is implemented as a pool of preforked server
2870 daemons, which serve single-client requests. The client-server
2871 protocol implements a client state which is represented by a current
2872 working directory, a client session ID and time-dependent symmetric
2873 cipher on both ends to guarantee client privacy and security. The
2874 server daemons execute client calls with the identity of the connected
2877 \subsubsection{PROOF -- the Parallel ROOT Facility}
2879 The Parallel ROOT Facility, PROOF\cite{CH6Ref:PROOF} has been specially
2880 designed and developed
2881 to allow the analysis and mining of very large data sets, minimizing
2882 response time. It makes use of the inherent parallelism in event data
2883 and implements an architecture that optimizes I/O and CPU utilization
2884 in heterogeneous clusters with distributed storage. The system
2885 provides transparent and interactive access to terabyte-scale data
2886 sets. Being part of the ROOT framework, PROOF inherits the benefits of
2887 a performing object storage system and a wealth of statistical and
2888 visualization tools.
2889 The most important design features of PROOF are:
2892 \item transparency -- no difference between a local ROOT and
2893 a remote parallel PROOF session;
2894 \item scalability -- no implicit limitations on number of computers
2896 \item adaptability -- the system is able to adapt to variations in the
2900 PROOF is based on a multi-tier architecture: the ROOT client session,
2901 the PROOF master server, optionally a number of PROOF sub-master
2902 servers, and the PROOF worker servers. The user connects from the ROOT
2903 session to a master server on a remote cluster and the master server
2904 creates sub-masters and worker servers on all the nodes in the
2905 cluster. All workers process queries in parallel and the results are
2906 presented to the user as coming from a single server.
2908 PROOF can be run either in a purely interactive way, with the user
2909 remaining connected to the master and worker servers and the analysis
2910 results being returned to the user's ROOT session for further
2911 analysis, or in an `interactive batch' way where the user disconnects
2912 from the master and workers (see Fig.~\vref{CH3Fig:alienfig7}). By
2913 reconnecting later to the master server the user can retrieve the
2914 analysis results for that particular
2915 query. This last mode is useful for relatively long running queries
2916 (several hours) or for submitting many queries at the same time. Both
2917 modes will be important for the analysis of ALICE data.
2921 \includegraphics[width=11.5cm]{picts/alienfig7}
2922 \caption{Setup and interaction with the \grid middleware of a user
2923 PROOF session distributed over many computing centers.}
2924 \label{CH3Fig:alienfig7}
2927 % -----------------------------------------------------------------------------
2929 \subsection{Analysis tools}
2931 This section is devoted to the existing analysis tools in \ROOT and
2932 \aliroot. As discussed in the introduction, some very broad
2933 analysis tasks include the search for some rare events (in this case the
2934 physicist tries to maximize the signal-over-background ratio), or
2935 measurements where it is important to maximize the signal
2936 significance. The tools that provide possibilities to apply certain
2937 selection criteria and to find the interesting combinations within
2938 a given event are described below. Some of them are very general and are
2939 used in many different places, for example the statistical
2940 tools. Others are specific to a given analysis.
2942 \subsubsection{Statistical tools}
2944 Several commonly used statistical tools are available in
2945 \ROOT\cite{ROOT}. \ROOT provides
2946 classes for efficient data storage and access, such as trees
2948 ESD information is organized in a tree, where each event is a separate
2949 entry. This allows a chain of the ESD files to be made and the
2950 elaborated selector mechanisms to be used in order to exploit the PROOF
2951 services. The tree classes
2952 permit easy navigation, selection, browsing, and visualization of the
2953 data in the branches.
2955 \ROOT also provides histogramming and fitting classes, which are used
2956 for the representation of all the one- and multi-dimensional
2957 distributions, and for extraction of their fitted parameters. \ROOT provides
2958 an interface to powerful and robust minimization packages, which can be
2959 used directly during some special parts of the analysis. A special
2960 fitting class allows one to decompose an experimental histogram as a
2961 superposition of source histograms.
2963 \ROOT also has a set of sophisticated statistical analysis tools such as
2964 principal component analysis, robust estimator, and neural networks.
2965 The calculation of confidence levels is provided as well.
2967 Additional statistical functions are included in \texttt{TMath}.
2969 \subsubsection{Calculations of kinematics variables}
2971 The main \ROOT physics classes include 3-vectors and Lorentz
2972 vectors, and operations
2973 such as translation, rotation, and boost. The calculations of
2974 kinematics variables
2975 such as transverse and longitudinal momentum, rapidity,
2976 pseudorapidity, effective mass, and many others are provided as well.
2979 \subsubsection{Geometrical calculations}
2981 There are several classes which can be used for
2982 measurement of the primary vertex: \texttt{AliITSVertexerZ},
2983 \texttt{AliITSVertexerIons}, \texttt{AliITSVertexerTracks}, etc. A fast estimation of the {\it z}-position can be
2984 done by \texttt{AliITSVertexerZ}, which works for both lead--lead
2985 and proton--proton collisions. An universal tool is provided by
2986 \texttt{AliITSVertexerTracks}, which calculates the position and
2987 covariance matrix of the primary vertex based on a set of tracks, and
2988 also estimates the $\chi^2$ contribution of each track. An iterative
2989 procedure can be used to remove the secondary tracks and improve the
2992 Track propagation to the primary vertex (inward) is provided in
2995 The secondary vertex reconstruction in case of ${\rm V^0}$ is provided by
2996 \texttt{AliV0vertexer}, and in case of cascade hyperons by
2997 \texttt{AliCascadeVertexer}.
2998 \texttt{AliITSVertexerTracks} can be used to find secondary
2999 vertexes close to the primary one, for example decays of open charm
3000 like ${\rm D^0 \to K^- \pi^+}$ or ${\rm D^+ \to K^- \pi^+ \pi^+}$. All
3002 reconstruction classes also calculate distance of closest approach (DCA)
3003 between the track and the vertex.
3005 The calculation of impact parameters with respect to the primary vertex
3006 is done during the reconstruction and the information is available in
3007 \texttt{AliESDtrack}. It is then possible to recalculate the
3008 impact parameter during the ESD analysis, after an improved determination
3009 of the primary vertex position using reconstructed ESD tracks.
3011 \subsubsection{Global event characteristics}
3013 The impact parameter of the interaction and the number of participants
3014 are estimated from the energy measurements in the ZDC. In addition,
3015 the information from the FMD, PMD, and T0 detectors is available. It
3016 gives a valuable estimate of the event multiplicity at high rapidities
3017 and permits global event characterization. Together with the ZDC
3018 information it improves the determination of the impact parameter,
3019 number of participants, and number of binary collisions.
3021 The event plane orientation is calculated by the \texttt{AliFlowAnalysis} class.
3023 \subsubsection{Comparison between reconstructed and simulated parameters}
3025 The comparison between the reconstructed and simulated parameters is
3026 an important part of the analysis. It is the only way to estimate the
3027 precision of the reconstruction. Several example macros exist in
3028 \aliroot and can be used for this purpose: \texttt{AliTPCComparison.C},
3029 \texttt{AliITSComparisonV2.C}, etc. As a first step in each of these
3030 macros the list of so-called `good tracks' is built. The definition of
3031 a good track is explained in detail in the ITS\cite{CH6Ref:ITS_TDR} and
3032 TPC\cite{CH6Ref:TPC_TDR} Technical Design
3033 Reports. The essential point is that the track
3034 goes through the detector and can be reconstructed. Using the `good
3035 tracks' one then estimates the efficiency of the reconstruction and
3038 Another example is specific to the MUON arm: the \texttt{MUONRecoCheck.C}
3039 macro compares the reconstructed muon tracks with the simulated ones.
3041 There is also the possibility to calculate directly the resolutions without
3042 additional requirements on the initial track. One can use the
3043 so-called track label and retrieve the corresponding simulated
3044 particle directly from the particle stack (\texttt{AliStack}).
3046 \subsubsection{Event mixing}
3048 One particular analysis approach in heavy-ion physics is the
3049 estimation of the combinatorial background using event mixing. Part of the
3050 information (for example the positive tracks) is taken from one
3051 event, another part (for example the negative tracks) is taken from
3053 `similar' event. The event `similarity' is very important, because
3054 only in this case the combinations produced from different events
3055 represent the combinatorial background. Typically `similar' in
3056 the example above means with the same multiplicity of negative
3057 tracks. One may require in addition similar impact parameters of the
3058 interactions, rotation of the tracks of the second event to adjust the
3059 event plane, etc. The possibility for event mixing is provided in
3060 \aliroot by the fact that the ESD is stored in trees and one can chain
3061 and access simultaneously many ESD objects. Then the first pass would
3062 be to order the events according to the desired criterion of
3063 `similarity' and to use the obtained index for accessing the `similar'
3064 events in the embedded analysis loops. An example of event mixing is
3065 shown in Fig.~\ref{CH6Fig:phipp}. The background distribution has been
3066 obtained using `mixed events'. The signal distribution has been taken
3067 directly from the \MC simulation. The `experimental distribution' has
3068 been produced by the analysis macro and decomposed as a
3069 superposition of the signal and background histograms.
3073 \includegraphics*[width=120mm]{picts/phipp}
3074 \caption{Mass spectrum of the ${\rm \phi}$ meson candidates produced
3075 inclusively in the proton--proton interactions.}
3076 \label{CH6Fig:phipp}
3080 \subsubsection{Analysis of the High-Level Trigger (HLT) data}
3082 This is a specific analysis which is needed in order to adjust the cuts
3083 in the HLT code, or to estimate the HLT
3084 efficiency and resolution. \aliroot provides a transparent way of doing
3085 such an analysis, since the HLT information is stored in the form of ESD
3086 objects in a parallel tree. This also helps in the monitoring and
3087 visualization of the results of the HLT algorithms.
3091 \subsubsection{EVE -- Event Visualization Environment}
3095 \item small application kernel;
3096 \item graphics classes with editors and OpenGL renderers;
3097 \item CINT scripts that extract data, fill graphics classes and register
3098 them to the application.
3101 The framework is still evolving ... some things might not work as expected.
3106 \item Initialize ALICE environment.
3107 \item Spawn 'alieve' executable and invoke the alieve\_init.C macro,
3110 To load first event from current directory:
3111 \begin{lstlisting}[language=sh]
3112 # alieve alieve\_init.C
3114 To load 5th event from directory /data/my-pp-run:
3115 \begin{lstlisting}[language=sh]
3116 # alieve 'alieve\_init.C("/data/my-pp-run", 5)'
3119 \begin{lstlisting}[language=sh]
3121 root[0] .L alieve\_init.C
3122 root[1] alieve\_init("/somedir")
3125 \item Use GUI or CINT command-line to invoke further visualization macros.
3126 \item To navigate the events use macros 'event\_next.C' and 'event\_prev.C'.
3127 These are equivalent to the command-line invocations:
3128 \begin{lstlisting}[language=sh]
3129 root[x] Alieve::gEvent->NextEvent()
3132 \begin{lstlisting}[language=sh]
3133 root[x] Alieve::gEvent->PrevEvent()
3135 The general form to go to event via its number is:
3136 \begin{lstlisting}[language=sh]
3137 root[x] Alieve::gEvent->GotoEvent(<event-number>)
3141 See files in EVE/alice-macros/. For specific uses these should be
3142 edited to suit your needs.
3144 \underline{Directory structure}
3146 EVE is split into two modules: REVE (ROOT part, not dependent on
3147 AliROOT) and ALIEVE (ALICE specific part). For the time being both
3148 modules are kept in AliROOT CVS.
3150 Alieve/ and Reve/ -- sources
3152 macros/ -- macros for bootstraping and internal steering\\
3153 alice-macros/ -- macros for ALICE visualization\\
3154 alica-data/ -- data files used by ALICE macros\\
3155 test-macros/ -- macros for tests of specific features; usually one needs
3156 to copy and edit them\\
3157 bin/, Makefile and make\_base.inc are used for stand-alone build of the
3163 \item Problems with macro-execution
3165 A failed macro-execution can leave CINT in a poorly defined state that
3166 prevents further execution of macros. For example:
3168 \begin{lstlisting}[language=sh]
3169 Exception Reve::Exc_t: Event::Open failed opening ALICE ESDfriend from
3170 '/alice-data/coctail_10k/AliESDfriends.root'.
3172 root [1] Error: Function MUON_geom() is not defined in current scope :0:
3173 *** Interpreter error recovered ***
3174 Error: G__unloadfile() File "/tmp/MUON_geom.C" not loaded :0:
3177 'gROOT$\to$Reset()' helps in most of the cases.
3180 % ------------------------------------------------------------------------------
3183 \subsection{Existing analysis examples in \aliroot}
3185 There are several dedicated analysis tools available in \aliroot. Their results
3186 were used in the Physics Performance Report and described in
3187 ALICE internal notes. There are two main classes of analysis: the
3188 first one based directly on ESD, and the second one extracting first
3189 AOD, and then analyzing it.
3192 \item\textbf{ESD analysis }
3195 \item[ ] \textbf{${\rm V^0}$ and cascade reconstruction/analysis}
3197 The ${\rm V^0}$ candidates
3198 are reconstructed during the combined barrel tracking and stored in
3199 the ESD object. The following criteria are used for the selection:
3200 minimal-allowed impact parameter (in the transverse plane) for each
3201 track; maximal-allowed DCA between the two tracks; maximal-allowed
3203 ${\rm V^0}$ pointing angle
3204 (angle between the momentum vector of the particle combination
3205 and the line connecting the production and decay vertexes); minimal
3206 and maximal radius of the fiducial volume; maximal-allowed ${\rm
3208 last criterion requires the covariance matrix of track parameters,
3209 which is available only in \texttt{AliESDtrack}. The reconstruction
3210 is performed by \texttt{AliV0vertexer}. This class can be used also
3211 in the analysis. An example of reconstructed kaons taken directly
3212 from the ESDs is shown in Fig.\ref{CH6Fig:kaon}.
3216 \includegraphics*[width=120mm]{picts/kaon}
3217 \caption{Mass spectrum of the ${\rm K_S^0}$ meson candidates produced
3218 inclusively in the \mbox{Pb--Pb} collisions.}
3222 The cascade hyperons are reconstructed using the ${\rm V^0}$ candidate and
3223 `bachelor' track selected according to the cuts above. In addition,
3224 one requires that the reconstructed ${\rm V^0}$ effective mass belongs to
3225 a certain interval centered in the true value. The reconstruction
3226 is performed by \texttt{AliCascadeVertexer}, and this class can be
3227 used in the analysis.
3229 \item[ ] \textbf{Open charm}
3231 This is the second elaborated example of ESD
3232 analysis. There are two classes, \texttt{AliD0toKpi} and
3233 \texttt{AliD0toKpiAnalysis}, which contain the corresponding analysis
3234 code. The decay under investigation is ${\rm D^0 \to K^- \pi^+}$ and its
3235 charge conjugate. Each ${\rm D^0}$ candidate is formed by a positive and
3236 a negative track, selected to fulfill the following requirements:
3237 minimal-allowed track transverse momentum, minimal-allowed track
3238 impact parameter in the transverse plane with respect to the primary
3239 vertex. The selection criteria for each combination include
3240 maximal-allowed distance of closest approach between the two tracks,
3241 decay angle of the kaon in the ${\rm D^0}$ rest frame in a given region,
3242 product of the impact parameters of the two tracks larger than a given value,
3243 pointing angle between the ${\rm D^0}$ momentum and flight-line smaller than
3244 a given value. The particle
3245 identification probabilities are used to reject the wrong
3246 combinations, namely ${\rm (K,K)}$ and ${\rm (\pi,\pi)}$, and to enhance the
3247 signal-to-background ratio at low momentum by requiring the kaon
3248 identification. All proton-tagged tracks are excluded before the
3249 analysis loop on track pairs. More details can be found in
3250 Ref.\cite{CH6Ref:Dainese}.
3252 \item[ ] \textbf{Quarkonia analysis}
3254 Muon tracks stored in the ESD can be analyzed for example by the macro
3255 \texttt{MUONmassPlot\_ESD.C}.
3256 This macro performs an invariant-mass analysis of muon unlike-sign pairs
3257 and calculates the combinatorial background.
3258 Quarkonia \pt and rapidity distribution are built for \Jpsi and \Ups.
3259 This macro also performs a fast single-muon analysis: \pt,
3261 ${\rm \theta}$ vs ${\rm \varphi}$ acceptance distributions for positive
3263 tracks with a maximal-allowed ${\rm \chi^2}$.
3268 \item\textbf{AOD analysis}
3272 Often only a small subset of information contained in the ESD
3273 is needed to perform an analysis. This information
3274 can be extracted and stored in the AOD format in order to reduce
3275 the computing resources needed for the analysis.
3277 The AOD analysis framework implements a set of tools like data readers,
3278 converters, cuts, and other utility classes.
3279 The design is based on two main requirements: flexibility and common
3280 AOD particle interface. This guarantees that several analyses can be
3281 done in sequence within the same computing session.
3283 In order to fulfill the first requirement, the analysis is driven by the
3284 `analysis manager' class and particular analyses are added to it.
3285 It performs the loop over events, which are delivered by an
3286 user-specified reader. This design allows the analyses to be ordered
3287 appropriately if some of them depend on the results of the others.
3289 The cuts are designed to provide high flexibility
3290 and performance. A two-level architecture has been adopted
3291 for all the cuts (particle, pair and event). A class representing a cut
3292 has a list of `base cuts'. Each base cut implements a cut on a
3293 single property or performs a logical operation (and, or) on the result of
3296 A class representing a pair of particles buffers all the results,
3297 so they can be re-used if required.
3301 \item[ ] \textbf{Particle momentum correlations (HBT) -- HBTAN module}
3303 Particle momentum correlation analysis is based on the event-mixing technique.
3304 It allows one to extract the signal by dividing the appropriate
3305 particle spectra coming from the original events by those from the
3308 Two analysis objects are currently implemented to perform the mixing:
3309 the standard one and the one implementing the Stavinsky
3310 algorithm\cite{CH6Ref:Stavinsky}. Others can easily be added if needed.
3312 An extensive hierarchy of the function base classes has been implemented
3313 facilitating the creation of new functions.
3314 A wide set of the correlation, distribution and monitoring
3315 functions is already available in the module. See Ref.\cite{CH6Ref:HBTAN}
3318 The package contains two implementations of weighting algorithms, used
3319 for correlation simulations (the first developed by Lednicky
3320 \cite{CH6Ref:Weights} and the second due to CRAB \cite{CH6Ref:CRAB}), both
3321 based on an uniform interface.
3323 \item[ ] \textbf{Jet analysis}
3325 The jet analysis\cite{CH6Ref:Loizides} is available in the module JETAN. It has a set of
3326 readers of the form \texttt{AliJetParticlesReader<XXX>}, where \texttt{XXX}
3328 \texttt{HLT}, \texttt{KineGoodTPC}, \texttt{Kine}, derived from the base class
3329 \texttt{AliJetParticlesReader}. These
3330 provide an uniform interface to
3331 the information from the
3332 kinematics tree, from HLT, and from the ESD. The first step in the
3333 analysis is the creation of an AOD object: a tree containing objects of
3334 type \texttt{AliJetEventParticles}. The particles are selected using a
3335 cut on the minimal-allowed transverse momentum. The second analysis
3336 step consists of jet finding. Several algorithms are available in the
3337 classes of the type \texttt{Ali<XXX>JetFinder}.
3338 An example of AOD creation is provided in
3339 the \texttt{createEvents.C} macro. The usage of jet finders is illustrated in
3340 \texttt{findJets.C} macro.
3343 \item[ ] \textbf{${\rm V^0}$ AODs}
3345 The AODs for ${\rm V^0}$ analysis contain several additional parameters,
3346 calculated and stored for fast access. The methods of the class {\tt
3347 AliAODv0} provide access to all the geometrical and kinematics
3348 parameters of a ${\rm V^0}$ candidate, and to the ESD information used
3349 for the calculations.
3352 \item[ ] \textbf{MUON}
3354 There is also a prototype MUON analysis provided in
3355 \texttt{AliMuonAnalysis}. It simply fills several histograms, namely
3356 the transverse momentum and rapidity for positive and negative muons,
3357 the invariant mass of the muon pair, etc.
3362 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
3366 \section{Analysis Foundation Library}
3370 The result of the reconstruction chain is the Event Summary Data (ESD)
3371 object. It contains all the information that may
3372 be useful in {\it any} analysis. In most cases only a small subset
3373 of this information is needed for a given analysis.
3374 Hence, it is essential to provide a framework for analyses, where
3375 user can extract only the information required and store it in
3376 the Analysis Object Data (AOD) format. This is to be used in all his
3377 further analyses. The proper data preselecting allows to speed up
3378 the computation time significantly. Moreover, the interface of the ESD classes is
3379 designed to fulfill the requirements of the reconstruction
3380 code. It is inconvenient for most of analysis algorithms,
3381 in contrary to the AOD one. Additionally, the latter one can be customized
3382 to the needs of particular analysis, if it is only required.
3384 We have developed the analysis foundation library that
3385 provides a skeleton framework for analyses, defines AOD data format
3386 and implements a wide set of basic utility classes which facilitate
3387 the creation of individual analyses.
3388 It contains classes that define the following entities:
3391 \item AOD event format
3395 \item Analysis manager class
3396 \item Base class for analyses
3402 \item Other utility classes
3405 It is designed to fulfill two main requirements:
3408 \item \textbf{Allows for flexibility in designing individual analyses}
3409 Each analysis has its most performing solutions. The most trivial example is
3410 the internal representation of a particle momentum: in some cases the Cartesian coordinate system is preferable and in other cases - the cylindrical one.
3411 \item \textbf{All analyses use the same AOD particle interface to access the data }
3412 This guarantees that analyses can be chained. It is important when
3413 one analysis depends on the result of the other one, so the latter one can
3414 process exactly the same data without the necessity of any conversion.
3415 It also lets to carry out many analyses in the same job and consequently, the
3416 computation time connected with
3417 the data reading, job submission, etc. can be significantly reduced.
3420 The design of the framework is described in detail below.
3423 % -----------------------------------------------------------------------------
3427 The \texttt{AliAOD} class contains only the information required
3428 for an analysis. It is not only the data format as they are
3429 stored in files, but it is also used internally throughout the package
3430 as a particle container.
3431 Currently it contains a \texttt{TClonesArray} of particles and
3432 data members describing the global event properties.
3433 This class is expected to evolve further as new analyses continue to be
3434 developed and their requirements are implemented.
3436 % -----------------------------------------------------------------------------
3438 \subsection{Particle}
3440 \texttt{AliVAODParticle} is a pure virtual class that defines a particle
3442 Each analysis is allowed to create its own particle class
3443 if none of the already existing ones meet its requirements.
3444 Of course, it must derive from \texttt{AliVAODParticle}.
3445 However, all analyses are obliged to
3446 use the interface defined in \texttt{AliVAODParticle} exclusively.
3447 If additional functionality is required, an appropriate
3448 method is also added to the virtual interface (as a pure virtual or an empty one).
3449 Hence, all other analyses can be ran on any AOD, although the processing time
3450 might be longer in some cases (if the internal representation is not
3453 We have implemented the standard concrete particle class
3454 called \texttt{AliAODParticle}. The momentum is stored in the
3455 Cartesian coordinates and it also has the data members
3456 describing the production vertex. All the PID information
3457 is stored in two dynamic arrays. The first array contains
3458 probabilities sorted in descending order,
3459 and the second one - corresponding PDG codes (Particle Data Group).
3460 The PID of a particle is defined by the data member which is
3461 the index in the arrays. This solution allows for faster information
3462 access during analysis and minimizes memory and disk space consumption.
3465 % -----------------------------------------------------------------------------
3469 The pair object points to two particles and implements
3470 a set of methods for the calculation of the pair properties.
3471 It buffers calculated values and intermediate
3472 results for performance reasons. This solution applies to
3473 quantities whose computation is time consuming and
3474 also to quantities with a high reuse probability. A
3475 Boolean flag is used to mark the variables already calculated.
3476 To ensure that this mechanism works properly,
3477 the pair always uses its own methods internally,
3478 instead of accessing its variables directly.
3480 The pair object has pointer to another pair with the swapped
3481 particles. The existence of this feature is connected to
3482 the implementation of the mixing algorithm in the correlation
3483 analysis package: if particle A is combined with B,
3484 the pair with the swapped particles is not mixed.
3485 In non-identical particle analysis their order is important, and
3486 a pair cut may reject a pair while a reversed one would be
3487 accepted. Hence, in the analysis the swapped pair is also tried
3488 if a regular one is rejected. In this way the buffering feature is
3489 automatically used also for the swapped pair.
3491 % -----------------------------------------------------------------------------
3493 \subsection{Analysis manager class and base class}
3495 The {\it analysis manager} class (\texttt{AliRunAnalysis}) drives all
3496 the process. A particular analysis, which must inherit from
3497 \texttt{AliAnalysis} class, is added to it.
3498 The user triggers analysis by calling the \texttt{Process} method.
3499 The manager performs a loop over events, which are delivered by
3500 a reader (derivative of the \texttt{AliReader} class, see section
3501 \ref{cap:soft:secReaders}).
3502 This design allows to chain the analyses in the proper order if any
3503 depends on the results of the other one.
3505 The user can set an event cut in the manager class.
3506 If an event is not rejected, the \texttt{ProcessEvent}
3507 method is executed for each analysis object.
3508 This method requires two parameters, namely pointers to
3509 a reconstructed and a simulated event.
3511 The events have a parallel structure, i.e. the corresponding
3512 reconstructed particles and simulated particles have always the same index.
3513 This allows for easy implementation of an analysis where both
3514 are required, e.g. when constructing residual distributions.
3515 It is also very important in correlation simulations
3516 that use the weight algorithm\cite{CH6Ref:Weights}.
3517 By default, the pointer to the simulated event is null,
3518 i.e. like it is in the experimental data processing.
3520 An event cut and a pair cut can be set in \texttt{AliAnalysis}.
3521 The latter one points two particle cuts, so
3522 an additional particle cut data member is redundant
3523 because the user can set it in this pair cut.
3525 \texttt{AliAnalysis} class has the feature that allows to choose
3526 which data the cuts check:
3528 \item the reconstructed (default)
3533 It has four pointers to the method (data members):
3535 \item \texttt{fkPass1} -- checks a particle, the cut is defined by the
3536 cut on the first particle in the pair cut data member
3537 \item \texttt{fkPass2} -- as above, but the cut on the second particle is used
3538 \item \texttt{fkPass} -- checks a pair
3539 \item \texttt{fkPassPairProp} -- checks a pair, but only two particle properties
3542 Each of them has two parameters, namely pointers to
3543 reconstructed and simulated particles or pairs.
3544 The user switches the behavior with the
3545 method that sets the above pointers to the appropriate methods.
3546 We have decided to implement
3547 this solution because it performs faster than the simpler one that uses
3548 boolean flags and "if" statements. These cuts are used mostly inside
3549 multiply nested loops, and even a small performance gain transforms
3550 into a noticeable reduction of the overall computation time.
3551 In the case of an event cut, the simpler solution was applied.
3552 The \texttt{Rejected} method is always used to check events.
3553 A developer of the analysis code must always use this method and
3554 the pointers to methods itemized above to benefit from this feature.
3556 % -----------------------------------------------------------------------------
3558 \subsection{Readers}
3559 \label{cap:soft:secReaders}
3561 A Reader is the object that provides data far an analysis.
3562 \texttt{AliReader} is the base class that defines a pure virtual
3565 A reader may stream the reconstructed and/or the
3566 simulated data. Each of them is stored in a separate AOD.
3567 If it reads both, a corresponding reconstructed and
3568 simulated particle have always the same index.
3570 Most important methods for the user are the following:
3572 \item \texttt{Next} -- It triggers reading of a next event. It returns
3573 0 in case of success and 1 if no more events
3575 \item \texttt{Rewind} -- Rewinds reading to the beginning
3576 \item \texttt{GetEventRec} and \texttt{GetEventSim} -- They return
3577 pointers to the reconstructed and the simulated events respectively.
3580 The base reader class implements functionality for
3581 particle filtering at a reading level. A user can set any
3582 number of particle cuts in a reader and the particle is
3583 read if it fulfills the criteria defined by any of them.
3584 Particularly, a particle type is never certain and the readers
3585 are constructed in the way that all the PID hypotheses (with non-zero
3586 probability) are verified.
3587 In principle, a track can be read with more than one mass
3589 For example, consider a track
3590 which in 55\% is a pion and in 40\% a kaon, and a user wants to read
3591 all the pions and kaons with the PID probabilities higher then
3592 50\% and 30\%, respectively. In such cases two particles
3593 with different PIDs are added to AOD.
3594 However, both particle have the same Unique Identification
3595 number (UID) so it can be easily checked that in fact they are
3598 % Multiple File Sources
3599 \texttt{AliReader} implements the feature that allows to specify and manipulate
3600 multiple data sources, which are read sequentially.
3601 The user can provide a list of directory names where the data are searched.
3602 The \texttt{ReadEventsFromTo} method allows to limit the range of events that are read
3603 (e.g. when only one event of hundreds stored in an AOD is of interest).
3605 \texttt{AliReader} has the switch that enables event buffering,
3606 so an event is not deleted and can be quickly accessed if requested again.
3609 Particles within an event are frequently sorted in some way, e.g.
3610 the particle trajectory reconstruction provides tracks sorted according
3611 to their transverse momentum. This leads to asymmetric
3612 distributions where they are not expected. The user can request the
3613 reader to randomize the particle order with \texttt{SetBlend} method.
3616 The AOD objects can be written to disk with the \texttt{AliReaderAOD}
3617 using the static method \texttt{WriteAOD}. As the first
3618 parameter user must pass the pointer to another reader that
3619 provides AOD objects. Typically it is \texttt{AliReaderESD},
3620 but it also can be other one, f.g. another \texttt{AliReaderAOD}
3621 (to filter out the desired particles from the already existing AODs).
3623 Inside the file, the AODs are stored in a \texttt{TTree}.
3624 Since the AOD stores particles in the clones array, and many particles
3625 formats are allowed, the reading and writing is not straight forward.
3626 The user must specify what is the particle format to be stored on disk,
3627 because in a general case the input reader can stream AODs with not consistent
3628 particle formats. Hence, the careful check must be done, because storing
3629 an object of the different type then it was specified in the tree leads
3630 to the inevitable crash. If the input AOD has the different particle type then
3631 expected it is automatically converted. Hence, this method can be also used
3632 for the AOD type conversion.
3634 % -----------------------------------------------------------------------------
3636 \subsection{AODs buffer}
3638 Normally the readers do not buffer the events.
3639 Frequently an event is needed to be kept for further analysis,
3640 f.g. if uncorrelated combinatorial background is computed.
3641 We have implemented the FIFO (First In First Out) type buffer called
3642 \texttt{AliEventBuffer} that caches the defined number of events.
3644 % -----------------------------------------------------------------------------
3648 The cuts are designed to guarantee the highest flexibility
3649 and performance. We have implemented the same two level architecture
3650 for all the cuts (particle, pair and event).
3651 Cut object defines the ranges of many properties that a particle, a pair or
3652 an event may posses and it also defines a method, which performs the
3653 necessary check. However, usually a user wants to limit
3654 ranges of only a few properties. For speed and robustness reasons,
3655 the design presented in Fig.\ref{cap:soft:partcut} was developed.
3657 The cut object has an array of pointers to
3658 base cuts. The number of entries in the array depends
3659 on the number of the properties the user wants to limit.
3660 The base cut implements checks on a single property.
3661 It implements maximum and minimum values and a virtual method \texttt{Rejected}
3662 that performs a range check of the value returned by pure
3663 virtual method \texttt{GetValue}. Implementation of a concrete
3664 base cut is very easy in most cases: it is enough to
3665 implement \texttt{GetValue} method. The ANALYSIS package
3666 already implements a wide range of base cuts,
3667 and the cut classes have a comfortable interface for
3668 setting all of them. For example it is enough to invoke
3669 the \texttt{SetPtRange(min,max)} method and behind the scenes
3670 a proper base cut is created and configured.
3672 The base cuts performing a logical operation (and,or) on the result of two
3673 other base cuts are also implemented. This way the user can configure basically any
3674 cut in a macro. Supplementary user defined base cuts can be added in the user
3676 In case the user prefers to implement a complicated cut in a single method (class)
3677 he can create his base cut performing all the operations.
3679 The pair cut in addition to an array of pointers to the base pair
3680 cuts it has two pointers to particle cut, one for each particle in
3685 \includegraphics[width=0.4\columnwidth, origin=c]{picts/partcuts}
3688 {Cut classes diagram on the example of the particle cut.
3689 \label{cap:soft:partcut}}
3693 \subsection{Other classes}
3695 We have developed a few classes that are used in correlation analyses,
3696 but they can be also useful in the others. The first is the TPC cluster map,
3697 which is the bitmap vector describing at which pad-rows a track has a cluster.
3698 It is used by the anti-splitting algorithm in the particle correlation
3701 Another example is the \class{AliTrackPoints} class, that stores
3702 track space coordinates at requested distances from the center of
3703 the detector. It is used in the particle correlation analysis
3704 by the anti-merging cut.
3705 The coordinates are calculated assuming the helix shape
3706 of a track. Different options that define the way they are computed
3711 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
3715 \section{Data input, output and exchange subsystem of AliRoot}
3717 This section is taken from\cite{PiotrPhD}.
3719 A few tens of different data types is present within AliRoot because
3720 hits, summable digits, digits and clusters are characteristic for each
3721 sub-detector. Writing all of the event data to a single file was
3722 causing number of limitations.
3723 Moreover, the reconstruction chain introduces rather complicated dependences
3724 between different components of the framework, what is highly
3725 undesirable from the point of view of software design.
3726 In order to solve both problems, we have designed a set of classes that
3727 manage data manipulation i.e. storage, retrieval and exchange within
3730 It was decided to use the ``white board'' concept, which is a single
3731 exchange object where were all data are stored and publicly accessible.
3732 For that purpose I have employed \textbf{TFolder} facility of ROOT.
3733 This solution solves the problem of inter-module dependencies.
3735 There are two most frequently occurring use-cases concerning the way a user deals with the data within the framework:
3737 \item data production -- produce - \textbf{write} - \textbf{unload} (clean)
3738 \item data processing -- \textbf{load} (retrieve) - process - \textbf{unload}
3741 \textbf{Loader}s are the utility classes that encapsulate and
3742 automatize the tasks written in bold font.
3743 They limit the user's interaction with the I/O routines to the
3744 necessary minimum, providing friendly and very easy interface,
3745 which for the use-cases considered above, consists of only 3 methods:
3747 \item \texttt{Load} -- retrieves the requested data to the appropriate place in the
3748 white board (folder)
3749 \item \texttt{Unload} -- cleans the data
3750 \item \texttt{Write} -- writes the data
3753 Such an insulation layer has number of advantages:
3755 \item makes the data access easier for the user.
3756 \item avoids the code duplication in the framework.
3757 \item minimize the risk of a bug occurrence resulting from the improper I/O management.
3758 The ROOT object oriented data storage extremely simplifies the user interface,
3759 however, there are a few pitfalls that are frequently unknown to an
3763 To make the description more clear we need to introduce briefly
3764 basic concepts and the way the AliRoot program operates.
3765 The basic entity is an event, i.e. all the data recorded by the
3766 detector in a certain time interval plus all the reconstructed information
3767 from these data. Ideally the data are produced by the single collision
3768 selected by a trigger for recording. However, it may happen that the data
3769 from the previous or proceeding events are present because the bunch
3770 crossing rate is higher then the maximum detector frequency (pile-up),
3771 or simply more than one collision occurred within one bunch crossing.
3773 Information describing the event and the detector state is also
3774 stored, like bunch crossing number, magnetic field, configuration, alignment, etc..,
3775 In the case of a Monte-Carlo simulated data, information concerning the
3776 generator, simulation parameters is also kept. Altogether this data
3777 is called the \textbf{header}.
3779 For the collisions that produce only a few tracks (best example
3780 are the pp collisions) it may happen that total overhead
3781 (the size of the header and the ROOT structures supporting object oriented
3782 data storage) is non-negligible in comparison with the data itself.
3783 To avoid such situations, the possibility of storing an arbitrary number
3784 of events together within a \textbf{run} is required. Hence, the common data can be
3785 written only once per run and several events can be written to a single file.
3787 It was decided that the data related to different detectors
3788 and different processing phases should be stored in different files.
3789 In such a case only the required data need to be downloaded for an analysis.
3790 It also allows to alter the files easily if required,
3791 for example when a new version of the reconstruction or simulation is needed
3792 to be run for a given detector. Hence, only new files are updated
3793 and all the rest may stay untouched. It is especially important because
3794 it is difficult to erase files in mass storage systems.
3795 This also gives the possibility for an easy comparison of the data produced with
3796 competing algorithms.
3798 All the header data, configuration and management objects
3799 are stored in a separate file, which is usually named galice.root
3800 (for simplicity we will further refer to it as galice).
3802 % -----------------------------------------------------------------------------
3804 \subsection{The ``White Board''}
3806 The folder structure is presented in Fig.\ref{cap:soft:folderstruct}.
3807 It is subdivided into two parts:
3809 \item \textbf{event data} that have the scope of single event
3810 \item \textbf{static data} that do not change from event to event,
3811 i.e. geometry and alignment, calibration, etc.
3814 During startup of AliRoot the skeleton structure of the ALICE white
3815 board is created. The \texttt{AliConfig} class (singleton) provides all the
3816 functionality that is needed to construct the folder structures.
3818 An event data are stored under a single sub-folder (event folder) named as
3819 specified by the user when opening a session (run). Many sessions can be
3820 opened at the same time, providing that each of them has an unique event
3821 folder name, so they can be distinguished by this name.
3822 This functionality is crucial for superimposing events
3823 on the level of the summable digits, i.e. analog detector response without the noise
3824 contribution (the event merging). It is also useful when two events
3825 or the same event simulated or reconstructed with a competing algorithm,
3826 need to be compared.
3830 \includegraphics[width=0.8\columnwidth, origin=c]{picts/folderstruct}
3833 {The folders structure. An example event is mounted under ``Event'' folder.
3834 \label{cap:soft:folderstruct}}
3837 % -----------------------------------------------------------------------------
3839 \subsection {Loaders}
3841 Loaders can be represented as a four layer, tree like structure
3842 (see Fig.\ref{cap:soft:loaderdiagram}). It represents the logical structure of
3843 the detector and the data association.
3847 \includegraphics[width=1.0\columnwidth, origin=c]{picts/loaderdiagram}
3850 {Loaders diagram. Dashed lines separate layers serviced by the different types of
3851 the loaders (from top): AliRunLoder, AliLoader, AliDataLoader, AliBaseLoader.
3852 \label{cap:soft:loaderdiagram}}
3858 \item \texttt{AliBaseLoader} -- One base loader is responsible for posting
3859 (finding in a file and publishing in a folder) and writing
3860 (finding in a folder and putting in a file) of a single object.
3861 AliBaseLoader is a pure virtual class because writing and
3862 posting depend on the type of an object. the following concrete classes are currently implemented:
3864 \item \texttt{AliObjectLoader} -- It handles \texttt{TObject}, i.e. basically any object
3865 within ROOT and AliRoot since an object must inherit from
3866 this class to be posted to the white board
3867 (added to \texttt{TFolder}).
3869 \item \texttt{AliTreeLoader} -- It is the base loader for \texttt{TTrees},
3870 which requires special
3871 handling, because they must be always properly
3872 associated with a file.
3874 \item \texttt{AliTaskLoader} -- It handles \texttt{TTask}, which need to be posted to the
3875 appropriate parental \texttt{TTask} instead of \texttt{TFolder}.
3877 \texttt{AliBaseLoader} stores the name of the object it manages in
3878 its base class \class{TNamed} to be able
3879 to find it in a file or folder. The user normally does not need to use
3880 these classes directly and they are rather utility classes employed by
3881 \texttt{AliDataLoader}.
3883 \item \texttt{AliDataLoader} -- It manages a single data type, for example digits for
3884 a detector or kinematics tree.
3885 Since a few objects are normally associated with a given
3886 data type (data itself, quality assurance data (QA),
3887 a task that produces the data, QA task, etc.)
3888 \texttt{AliDataLoader} has an array of \texttt{AliBaseLoaders},
3889 so each of them is responsible for each object.
3890 Hence, \texttt{AliDataLoader} can be configured individually to
3891 meet specific requirements of a certain data type.
3893 A single file contains the data corresponding to a single processing
3894 phase and solely of one detector.
3895 By default the file is named according to the schema
3896 {\it Detector Name + Data Name + .root} but it can be
3897 changed in run-time if needed so the data can be stored in or retrieved
3898 from an alternative source. When needed,
3899 the user can limit the number of events stored in a single file.
3900 If the maximum number is exceeded, a file is closed
3901 and a new one is opened with the consecutive number added
3902 to its name before {\it .root} suffix. Of course,
3903 during the reading process, files are also automatically
3904 interchanged behind the scenes and it is invisible to the user.
3906 The \texttt{AliDataLoader} class performs all the tasks related
3907 to file management e.g. opening, closing,
3908 ROOT directories management, etc.
3909 Hence, for each data type the average file size can be
3910 tuned. It is important because it is undesirable to store small
3911 files on the mass storage systems and on the other hand, all file
3912 systems have a maximum file size allowed.
3915 \item \texttt{AliLoader} -- It manages all the data associated with a
3916 single detector (hits, digits, summable digits, reconstructed points, etc.).
3917 It has an array of \texttt{AliDataLoaders} and each of them manages
3920 The \texttt{AliLoader} object is created by a class representing
3921 a detector (inheriting from \texttt{AliDetector}).
3922 Its functionality can be extended and customized to the needs of a
3923 particular detector by creating a specialized class that derives
3924 from \texttt{AliLoader}, as it was done, for instance, for ITS or PHOS.
3925 The default configuration can be
3926 easily modified either in \texttt{AliDetector::MakeLoader}
3927 or by overriding the method \texttt{AliLoader::InitDefaults}.
3930 \item \texttt{AliRunLoader} -- It is a main handle for data access and manipulation in
3931 AliRoot. There is only one such an object in each run.
3932 It is always named {\it RunLoader} and stored
3933 on the top (ROOT) directory of a galice file.
3935 It keeps an array of \texttt{AliLoader}'s, one for each detector.
3936 It also manages the event data that are not associated with any detector
3937 i.e. Kinematics and Header and it utilizes \texttt{AliDataLoader}'s
3940 The user opens a session using a static method \texttt{AliRunLoader::Open}.
3941 This method has three parameters: the file name, event folder name and mode.
3942 The mode can be "new" and in this case a file and a run loader are created from scratch.
3943 Otherwise, a file is opened and a run loader is searched in.
3944 If successful, the event folder with a provided name
3945 (if such does not exist yet) is created and the structure
3946 presented in Fig.\ref{cap:soft:folderstruct} is created within the folder.
3948 put in the event folder, so the user can always find it there
3949 and use it for data management.
3951 \texttt{AliRunLoader} provides a simple method \texttt{GetEvent(n)}
3952 to loop over events within a run. Calling it causes that all
3953 currently loaded data are cleaned and the data for
3954 the newly requested event are automatically posted.
3956 In order to facilitate the way the user interacts with the loaders,
3957 \texttt{AliRunLoader} provides the wide set of shortcut methods.
3958 For example, if digits are required to be loaded, the user can call
3959 \texttt{AliRunLoader::LoadDigits("ITS TPC")}, instead of finding the appropriate
3960 \texttt{AliDataLoader}'s responsible for digits for ITS and TPC,
3961 and then request to load the data for each of them.
3968 \section{Calibration and alignment}
3971 \subsection{Calibration framework}
3974 The calibration framework is based on the following principles:
3978 \item the calibration and alignment database contains ROOT TObjects stored
3981 \item calibration and alignment objects are RUN DEPENDENT objects;
3983 \item the database is READ-ONLY (automatic versioning of the stored
3986 \item three different data stores structures are (currently) available:
3988 \item a GRID folder containing Root files, each one containing one
3989 single Root object. The Root files are created inside a directory tree
3990 defined by the object's name and run validity range;
3992 \item a LOCAL folder containing Root files, each one containing one
3993 single Root object, with a structure similar to the Grid one;
3995 \item a LOCAL Root file containing one or more objects (so-called ``dump''). The
3996 objects are stored into Root TDirectories defined by the
3997 object's name and run range.
4000 \item object storing and retrieval techniques are transparent to the user:
4001 he/she should only specify the kind of storage he wants to use ("grid",
4002 "local", "dump"). Object are stored and retrieved using the AliCDBStorage
4005 \begin{lstlisting}[language=C++]
4006 Bool_t AliCDBStorage::Put(...)
4008 AliCDBEntry* AliCDBStorage::Get(...)
4011 In addition, multiple objects can be retrieved using:
4013 \begin{lstlisting}[language=C++]
4014 TList* AliCDBStorage::GetAll(...) (returns list of AliCDBEntry objects).
4017 \item During object retrieval, the user has the possibility to retrieve the
4018 highest version of the object or to specify a particular version by means
4019 of one or more selection criteria.
4023 \textbf{Features of the CDB storage classes}
4025 % see the talk here \url{http://indico.cern.ch/conferenceDisplay.py?confId=a055286}
4028 \item MANAGER class AliCDBManager. It is a singleton which handles
4029 the instantiation, usage and destruction of all the storage classes. It
4030 allows the instantiation of more than one storage type at a time, keeping
4031 tracks of the list of active storages. The instantiation of a storage
4032 element is done by means of AliCDBManager public method GetStorage. A
4033 storage element is identified by its "URI" (a string) or by its
4034 "parameters". The set of parameters defining each storage is contained in
4035 its specific \class{AliCDBParam} class (\class{AliCDBGridParam}, \class{AliCDBLocalParam},
4036 \class{AliCDBDumpParam}).
4038 \item Versioning schema. In order to avoid version clashes when objects
4039 are transferred from grid to local and vice versa, we have introduced a
4040 new versioning schema. Basically the objects are defined by TWO version
4041 numbers: a "Grid" version and a "Local" version (subVersion). In local
4042 storage only the local version is increased, while in Grid storage only
4043 the Grid version is increased. When the object is transferred from local
4044 to Grid the Grid version is increased by one; when the object is
4045 transferred from Grid to Local the Grid version is kept and the subVersion
4046 is reset to zero. %You can find a plot of this schema on my talk (page 11).
4048 \item The container class of the object and its metadata
4049 (AliCDBEntry. The metadata of the object has been divided into two
4050 classes: one which contains the data used to store and retrieve the object
4051 ("identity" of the object, AliCDBId) and the other containing the metadata
4052 which is not used during storage and retrieval (AliCDBMetaData).
4054 The AliCDBId object in turn contains:
4056 \item an object describing the name (path) of the object (AliCDBPath). The
4057 path name must have a fixed, three-level directory structure:
4058 "level1/level2/level3"
4059 \item an object describing the run validity range of the object
4061 \item the version and subversion numbers (automatically set during storage)
4062 \item a string (fLastStorage) specifying from which storage the object was
4063 retrieved ("new", "grid", "local", "dump")
4066 The AliCDBId object has two functions:
4068 \item during storage it is used to specify the path and run range of the
4070 \item during retrieval it is used as a "query": it contains the
4071 path of the object, the required run and (if needed) the
4072 version and subversion to be retrieved (if version and/or
4073 subversion are not specified the highest ones are looked for).
4078 \textbf{Some usage examples}
4080 The following use cases are illustrated:
4083 \item A pointer to the single instance of the AliCDBManager class is obtained
4086 \begin{lstlisting}[language=C++]
4087 AliCDBManager::Instance()
4090 \item A storage is activated and a pointer to it is returned using the
4091 \method{AliCDBManager::GetStorage(const char* URI)} method. Here are
4092 some examples of how to activate a storage via an URI string. The
4093 URI's must have a well defined syntax, for example (local cases):
4096 \item "local://DBFolder" to local storage with base directory "DBFolder"
4097 created (if not existing from the working directory)
4099 \item "local://\$ALICE\_ROOT/DBFolder" to local storage with base directory
4100 "\$ALICE\_ROOT/DBFolder" (full path name)
4102 \item"dump://DBFile.root" to Dump storage. The file DBFile.root is looked
4103 for or created in the working directory if the full path is not specified
4105 \item "dump://DBFile.root;ReadOnly" to Dump storage. DBFile.root is
4106 opened in read only mode.
4109 \item Concrete examples (local case):
4111 \begin{lstlisting}[language=C++]
4112 AliCDBStorage *sto =
4113 AliCDBManager::Instance()->GetStorage("local://DBFolder"):
4115 AliCDBStorage *dump =
4116 AliCDBManager::Instance()->GetStorage("dump:///data/DBFile.root;ReadOnly"):
4119 \item Creation and storage of an object. Example of how an
4120 object can be created and stored in a local database
4123 \item Let's suppose our object is an AliZDCCalibData object (container of
4124 arrays of pedestals constants), whose name is
4125 "ZDC/Calib/Pedestals" and is valid for run 1 to 10.
4127 \begin{lstlisting}[language=C++]
4128 AliZDCCalibData *calibda = new AliZDCCalibData();
4129 // ... filling calib data...
4131 // creation of the AliCDBId object (identifier of the object)
4132 AliCDBId id("ZDC/Calib/Pedestals",1,10);
4134 // creation and filling of the AliCDBMetaData
4135 AliCDBMetaData *md = new AliCDBMetaData();
4136 md->Set... // fill meta data object, see list of setters...
4138 // Activation of local storage
4139 AliCDBStorage *sto =
4140 AliCDBManager::Instance()->GetStorage("local://$HOME/DBFolder");
4142 // put object into database
4143 sto->Put(calibda, id, md);
4145 The object is stored into local file:
4146 \$HOME/DBFolder/ZDC/Calib/Pedestals/Run1\_10\_v0\_s0.root
4148 \item Examples of how to retrieve an object
4150 \begin{lstlisting}[language=C++]
4151 // Activation of local storage
4152 AliCDBStorage *sto =
4153 AliCDBManager::Instance()->GetStorage("local://$HOME/DBFolder");
4155 // Get the AliCDBEntry which contains the object "ZDC/Calib/Pedestals",
4156 valid for run 5, highest version
4157 AliCDBEntry* entry = sto->Get("ZDC/Calib/Pedestals",5)
4158 // alternatively, create an AliCDBId query and use sto->Get(query) ...
4160 // specifying the version: I want version 2
4161 AliCDBEntry* entry = sto->Get("ZDC/Calib/Pedestals",5,2)
4163 // specifying version and subversion: I want version 2 and subVersion 1
4164 AliCDBEntry* entry = sto->Get("ZDC/Calib/Pedestals",5,2,1)
4167 \item Selection criteria can be also specified using
4168 \method{AliCDBStorage::AddSelection(...)} methods:
4170 \begin{lstlisting}[language=C++]
4171 // I want version 2\_1 for all "ZDC/Calib/*" objects for runs 1-100
4172 sto->AddSelection("ZDC/Calib/*",1,100,2,1);
4173 // and I want version 1\_0 for "ZDC/Calib/Pedestals" objects for runs 5-10
4174 sto->AddSelection("ZDC/Calib/Pedestals",5,10,1,0)
4176 AliCDBEntry* entry = sto->Get("ZDC/Calib/Pedestals",5)
4179 See also: \method{AliCDBStorage::RemoveSelection(...),
4180 RemoveAllSelections(), PrintSelectionList()}
4182 \item Retrieval of multiple objects with \method{AliCDBStorage::GetAll()}
4184 \begin{lstlisting}[language=C++]
4185 TList *list = sto->GetAll("ZDC/*",5)
4189 \item Use of Default storage and Drain storages
4191 AliCDBManager allows to set pointers to a "default storage" and to a
4192 "drain storage". In particular, if the drain storage is set, all the
4193 retrieved objects are automatically stored into it.
4195 The default storage is automatically set as the first active storage. To
4196 set the default storage to another storage:
4198 \begin{lstlisting}[language=C++]
4199 AliCDBManager::Instance()->SetDefaultStorage("uri")
4202 The default storage can be then used by:
4203 \begin{lstlisting}[language=C++]
4204 AliCDBEntry *entry =
4205 AliCDBManager::Instance()->GetDefaultStorage()->Get(...)
4208 The drain storage can be set in a similar way:
4210 \begin{lstlisting}[language=C++]
4211 AliCDBManager::Instance()->SetDrain("uri")
4214 There are some AliCDBManager public methods to handle the default and
4217 \begin{lstlisting}[language=C++]
4218 Bool_t IsDefaultStorageSet()
4219 void RemoveDefaultStorage()
4224 \item Example of how to use default and drain storage:
4226 \begin{lstlisting}[language=C++]
4227 AliCDBManager::Instance()->SetDefaultStorage("local://$HOME/DBFolder");
4228 AliCDBManager::Instance()->SetDrain("dump://$HOME/DBDrain.root");
4230 AliCDBEntry *entry =
4231 AliCDBManager::Instance()->GetDefaultStorage()->Get("ZDC/Calib/Pedestals",5)
4232 // Retrieved entry is automatically stored into DBDrain.root !
4235 \item To destroy the AliCDBManager instance and all the active storages:
4237 \begin{lstlisting}[language=C++]
4238 AliCDBManager::Instance()->Destroy()
4241 \item Create a local copy of all the alignment objects
4243 \begin{lstlisting}[language=C++]
4244 AliCDBManager* man = AliCDBManager::Instance();
4245 man->SetDefaultStorage(
4246 "alien://folder=/alice/simulation/2006/PDC06/Residual/CDB/");
4248 man->SetDrain("local://$ALICE_ROOT/CDB");
4249 AliCDBStorage* sto = man->GetDefaultStorage();
4252 // All the objects are stored in $ALICE_ROOT/CDB !
4257 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
4259 \section{The Event Tag System}
4261 The event tag system \cite{EventTag} is designed to provide fast
4263 events with the desired characteristics. This task will be performed
4264 first of all by imposing event selection criteria within the analysis
4265 code and then by interacting with software that is designed to provide
4266 a file-transparent event access for analysis. The latter is an
4267 evolution of the procedure that has already been implemented by the
4268 STAR \cite{STAR} collaboration.
4270 In the next sections we will first describe the analysis scheme using
4271 the event tag system. Then we will continue by presenting in detail
4272 the existing event tag prototype. Furthermore, a separate section is
4273 dedicated to the description of the two ways to create the tag files
4274 and their integration in the whole framework \cite{CompTDR}.
4276 \subsection{The Analysis Scheme}
4278 ALICE collaboration intends to use a system that will reduce the time
4279 and computing resources needed to perform an analysis by providing to
4280 the analysis code just the events of interest as they are defined by
4281 the users' selection criteria. Fig. \ref{analysis} gives a schematic
4282 view of the whole analysis architecture.
4286 \includegraphics[width=10cm]{picts/tagana}
4287 \caption{The selected analysis scheme using the event tag system. }
4291 Before describing the architecture let us first define a few terms
4292 that are listed in this figure:
4296 \item User/Administrator: A typical ALICE user or even the
4297 administrator of the system who wants to create tag files for all or
4298 a few ESDs \cite{CompTDR} of a run.
4301 \item Index Builder: A code with Grid Collector \cite{GC1,GC2}
4302 functionality that allows the creation of compressed bitmap indices
4303 from the attributes listed in the tag files. This functionality will
4304 provide an even faster preselection.
4307 \item Selector: The user's analysis code that derives from the
4308 TSelector class of ROOT \cite{RootSelector}.
4313 The whole procedure can be summarized as follows:
4315 The offline framework will create the tag files, which will hold
4316 information about each Event Summary Data (ESD) file (top left box of
4317 Fig.\ref{analysis}), as a final step of the whole reconstruction
4318 chain. The creation of the tag files is also foreseen to be performed
4319 by each user in a post process that will be described in the following
4320 sections. These tag files, as will be mentioned in this note, are root
4321 files containing trees of tag objects. Then, following the procedure
4322 flow as it is shown in Fig. \ref{analysis}, the indexing algorithm of
4323 the Grid Collector \cite{GC1,GC2}, the \textit{Index Builder}, will
4324 take the produced tag files and create the compressed bitmap
4325 indices. In parallel, the user will submit a job with some selection
4326 criteria relevant to the corresponding analysis he/she is
4327 performing. These selection criteria will be used in order to query
4328 the produced compressed indices (or as it is done at the moment the
4329 query will be on the tags themselves) and the output of the whole
4330 procedure will be a list of \textit{TEventList} objects grouped by
4331 \textit{GUID}, which is the file's unique identifier in the file
4332 catalog, as it is shown in the middle box of Fig.\ref{analysis}. This
4333 output will be forwarded to the servers that will interact with the
4334 file catalog in order to retrieve the physical file for each
4335 \textit{GUID} (left part of Fig. \ref{analysis}). The final result
4336 will be passed to a selector \cite{RootSelector} that will process the
4337 list of the events that fulfill the imposed selection criteria and
4338 merge the output into a single object, whether this is a histogram or
4339 a tree or any root object.
4341 The whole implementation implies the existence of an event tag system
4342 that will allow the user to create the tags for each file. This event
4343 tag system is active and has been used inside AliRoot's framework
4344 \cite{aliroot} since June 2005. In the next section we will describe
4345 this system in detail.
4348 \subsection{The Event Tag System}
4350 The event tag system that has been built, intends to provide a summary
4351 of the most useful physics information that describe each ESD to the
4352 user. It consists of four levels of information \cite{EventTagWeb}:
4356 \item Run Level: Fields that describe the run conditions and
4357 configurations and are retrieved from Detector Control System (DCS),
4358 Data Acquisition system (DAQ) and offline (Fig. \ref{sources}).
4360 \item LHC Level: Fields that describe the LHC condition per ALICE run
4361 which are retrieved from the DCS (Fig. \ref{sources}).
4363 \item Detector Level: Fields that describe the detector configuration
4364 per ALICE run which are retrieved from the Experimental Control
4365 system (ECS) (Fig. \ref{sources}).
4367 \item Event Level: Fields that describe each event - mainly physics
4368 related information and are retrieved by both offline and the grid
4369 file catalog (Fig. \ref{sources}).
4375 \includegraphics[width=15cm]{picts/tagsources}
4376 \caption{The sources of information for the different levels of the event tag system.}
4380 The corresponding classes that form this system have already been
4381 included in AliRoot's framework under the
4382 \textbf{STEER} module. The output tag files will be root files having
4383 a tree structure \cite{EventTagWeb}.
4385 \underline{Run tags:}
4387 The class that deals with the run tag fields is called
4388 \class{AliRunTag}. One \class{AliRunTag} object is associated to
4391 \underline{LHC tags:}
4393 The class that deals with the LHC tag fields is called
4394 \class{AliLHCTag}. One \class{AliLHCTag} object is associated to
4397 \underline{Detector tags:}
4399 The class that deals with the detector tag fields is called
4400 \class{AliDetectorTag}. The information concerning the detector
4401 configuration per ALICE run will be described in the ECS database
4402 (Fig. \ref{sources}). One \class{AliDetectorTag} object is associated
4405 \underline{Event tags:}
4407 The class that handles the event tag fields is called
4408 \class{AliEventTag}. The values of these fields, as mentioned before,
4409 will be mainly retrieved from the ESDs although there are some fields
4410 the values of which will come from the grid file catalog. The number
4411 of \class{AliEventTag} objects which are associated to each file is
4412 equal to the number of events that are stored inside the initial ESD
4415 \subsection{The Creation of the Tag Files}
4417 As it was mentioned in a previous section, the creation of the tag
4418 files will be the first step of the whole procedure. Two different
4419 scenarios were decided:
4423 \item \textbf{On the fly creation}: The creation of the tag file comes
4424 as a last step of the reconstruction procedure.
4426 \item \textbf{Post creation}: After the ESDs have been transfered to
4427 the ALICE's file catalog \cite{AliEn}, every user has the
4428 possibility to run this post process and create his/her own tag
4433 \subsubsection{The on the fly creation scenario}
4435 As mentioned before, the on the fly creation of the tag files is
4436 implemented in such a way that the tags are filled as a last step of
4437 the reconstruction chain. This process is treated inside the
4438 \class{AliReconstruction} class. Thus, exactly after the creation of
4439 the ESD, the file is passed as an argument to the
4440 \method{AliReconstruction::CreateTags(TFile *file)} method. Inside
4441 this method empty \class{AliRunTag} and \class{AliEventTag} objects
4442 are created. The next step is to loop over the events listed in the
4443 ESD file and finally fill the run and event level information. The
4444 naming convention followed for the output tag file is:
4445 \textbf{Run}\textbf{\textit{RunId}}.\textbf{Event}\textbf{\textit{FirstEventId}}\_\textbf{\textit{LastEventId}}.\textbf{ESD.tag.root}
4448 \subsubsection{The post creation scenario}
4450 The post creation procedure provides the possibility to every user to
4451 create and store the tag files at any time \cite{EventTagWeb}. The
4452 post creation of the tag files implies the following steps:
4456 \item The reconstruction code finishes and several ESD files are created.
4458 \item These files are stored then in ALICE's file catalog \cite{AliEn}.
4460 \item Then the administrator or even every user for the purpose of his
4461 private analysis, can loop over the produced ESDs and create the
4462 corresponding tag files.
4464 \item These files can either be stored locally or can be stored in the
4465 file catalog \cite{AliEn}.
4467 \item As a final step, a user can choose to create a single merged tag
4468 file from all the previous ones.
4472 What a user has to do in order to create the tag files using this
4473 procedure depends on the location of the input AliESDs.root
4474 files. Detailed instructions on how to create tag files for each
4475 separate case will be given in the following sections. In general a
4476 user has to perform the following steps:
4480 \item Generate the input that provides information about the location
4481 of the AliESDs.root files: this can be the result of a query to the
4482 file catalog (\emph{TGridResult} \cite{RootTGridResult} - grid
4483 stored ESDS, an upper level local directory - locally stored ESDs or
4484 even a text file - CERN Analysis Facility (CAF) stored ESDs
4487 \item Loop over the entries of the given input (\emph{TGridResult},
4488 \emph{local path}, \emph{text file}) and create the tag file for
4491 \item Either store the files locally or in the grid's file catalog.
4493 \item Merge the tag files into one and store it accordingly (locally
4494 or in the file catalog) \cite{RootApi}.
4498 Fig. \ref{posttag} has a schematic view of these functionalities.
4503 \includegraphics[width=15cm]{picts/tagpost}
4504 \caption{A schematic view of the architecture of the post creation of the tag files.}
4508 The class that addresses this procedure is the \class{AliTagCreator}. The main methods of the class and their corresponding functionalities are described in the following lines:
4512 \item \method{AliTagCreator()}: The default constructor of the
4513 class. It is used to initialize the private members.
4514 \item \method{void SetStorage(Int\_t storage)}: Allows the user to
4515 define the place where the tag files will be stored. In general
4516 there are two possibilities: the tags can either be stored locally
4517 (storage = 0) or in the file catalog (storage = 1). If the user
4518 defines some other value then an error message appears and the
4521 \item \method{void SetSE(const char *se)}: This method can be used in
4522 the case where the files will be stored in the grid. It allows the
4523 user to define the desired storage element. If not selected the
4524 default storage element will be used.
4526 \item \method{void SetGridPath(const char *gridpath)}: This method may
4527 be used in the case where the files will be stored in the grid. It
4528 allows the user to define the grid path under which the files will
4529 be stored. If not selected the tag files will be stored in the home
4530 directory of the user in the file catalog.
4532 \item \method{Bool\_t ReadGridCollection(TGridResult *result)}: This
4533 method is used when creating tag files from ESDs that are stored in
4534 the file catalog. It takes as an input the result of the query to
4535 the file catalog \textit{TGridResult} and loops over the
4536 corresponding entries. For each one the
4537 \method{AliTagCreator::CreateTags(TFile *f, const char* guid, const char* md5, const char*turl, Long64\_t size, Int\_t Counter)}
4538 protected method will be called to create the tag files that will be
4541 \item \method{Bool\_t ReadCAFCollection(const char* filename)}: This
4542 method is used when creating tag files from ESDs that are stored in
4543 the CERN Analysis Facility (CAF)\cite{CAF}. It takes as an input a
4544 text file that has all the information about the location of the
4545 files within the storage element of the CAF. For each one, the
4546 \method{AliTagCreator::CreateTags(TFile *f, const char* filepath,
4547 Int\_t Counter)} protected method will be called to create the tag
4548 files that will be stored accordingly.
4550 \item \method{Bool\_t ReadLocalCollection(const char* localpath)}:
4551 This method is used when creating tag files from ESDs that are
4552 stored locally. It takes as an input the upper directory where the
4553 ESD files are stored. The system assumes that one level down there
4554 are several subdirectories where the AliESDs.root are store. The
4555 method searches the file system and when it finds an ESD file the
4556 \method{AliTagCreator::CreateTags(TFile *f, const char* filepath, Int\_t Counter)}
4557 protected method will be called to create the tag
4558 files that will be stored accordingly.
4560 \item \method{void CreateTag(TFile* file, const char *guid, const char *md5, const char *turl, Long64\_t size, Int\_t Counter)}:
4561 Protected method that is called inside the
4562 \method{AliTagCreator::ReadGridCollection(TGridResult *result)}
4563 method. Creates the tag file and stores it locally if \method{AliTagCreator::SetStorage(0)}
4564 or in AliEn if \method{AliTagCreator::SetStorage(1)}.
4566 \item \method{void CreateTag(TFile* file, const char *filepath, Int\_t Counter)}:
4567 Protected method that is called inside the
4568 \method{AliTagCreator:: ReadCAFCollection(const char* filename)} or
4569 the \method{AliTagCreator:: ReadLocalCollection(const char* filepath)}
4570 method. Creates the tag file and stores it locally if
4571 \method{AliTagCreator::SetStorage(0)} or in AliEn if
4572 \method{AliTagCreator::SetStorage(1)}.
4574 \item \method{Bool\_t MergeTags()}: Chains all the tags regardless of
4575 the location (locally stored or in the grid) and merges them by
4576 creating a single tag file having a name:
4577 \textbf{Run}\textbf{\textit{RunId}}.\textbf{Merged}.\textbf{ESD.tag.root}.
4578 This file is then stored either locally or in the grid according to
4579 the value set in the \method{SetStorage} method.
4585 \subsubsection{Usage of AliRoot classes}
4587 The following lines intend to give an example on how to use the
4588 \textbf{AliTagCreator} class in order to create tags. Additional
4589 information can be found in \cite{EventTagWeb}. There are three
4590 different cases depending on the location where the AliESDs.root files
4596 \item Locally stored AliESDs.root files.
4598 \item CAF stored AliESDs.root files.
4600 \item Grid stored AliESDs.root files.
4604 We will address the three different cases separately.
4606 \underline{Locally stored AliESDs.root}
4608 We assume that for debugging or source code validation reasons, a user
4609 has a few AliESDs.root files stored locally. The files are stored
4610 under $\$HOME/PDC06/pp$. One level down, the directory structure can
4616 \item xxx/AliESDs.root
4617 \item yyy/AliESDs.root
4618 \item zzz/AliESDs.root
4622 \noindent where xxx is the directory name which can be something like
4623 \emph{Run1, Run2} etc or even the run number. In order to create the
4624 tag files for this case we need to create an empty
4625 \class{AliTagCreator} object. The next step is to define whether the
4626 produced tags will be stored locally on in the grid. If the second
4627 option is chosen, then the user should define the SE and the
4628 corresponding grid path where the tag files will be stored. If the
4629 first option is chosen, then the files will be stored locally in
4630 his/hers working directory. Finally the call of the
4631 \method{AliTagCreator::ReadLocalCollection(const char* filepath)}
4632 allows the user to query the local file system and create the tag
4638 \begin{lstlisting}[language=C++]
4639 //create an AliTagCreator object
4640 AliTagCreator *t = new AliTagCreator();
4641 //Store the tag files locally
4643 //Query the file system, create the tags and store them
4644 t->ReadLocalollection(''/home/<username>/PDC06/pp'');
4645 //Merge the tags and store the merged file
4649 \underline{CAF stored AliESDs.root}
4651 In the case where the ESD files are stored in the CAF, then we take as
4652 an input the text file that has the information about the location of
4653 the files in the storage element of the system \cite{EventTagWeb,
4654 CAF}. The next lines, where we assume that this input file is called
4655 \emph{ESD.txt} and is located in the working directory, indicate the
4656 steps that one has to follow:
4659 \begin{lstlisting}[language=C++]
4660 //create an AliTagCreator object
4661 AliTagCreator *t = new AliTagCreator();
4662 //Store the tag files in AliEn's file catalog
4664 //Read the entries of the file, create the tags and store them
4665 t->ReadCAFCollection(``ESD.txt'');
4666 //Merge the tags and store the merged file
4670 \underline{GRID stored AliESDs.root}
4672 In the case where the ESD files are stored in the file catalog, then
4673 the first thing a user needs to have is a ROOT version compiled with
4674 AliEn support. Detailed information on how to do this, can be found in
4675 \cite{RootApi}. Then we need to invoke the AliEn's API services
4676 \cite{RootApi} and use as an input a query to the file catalog
4677 (\class{TGridResult}). The following lines give an example of the
4681 \begin{lstlisting}[language=C++]
4682 //connect to AliEn's API services
4683 TGrid::Connect("alien://pcapiserv01.cern.ch:10000","<username>");
4684 //create an AliTagCreator object
4685 AliTagCreator *t = new AliTagCreator();
4686 //Query the file catalog and get a TGridResult
4687 TGridResult* result =
4688 gGrid->Query("/alice/cern.ch/user/p/pchrista/PDC06/pp/*",
4689 "AliESDs.root","","");
4690 //Store the tag files in AliEn's file catalog
4692 //Define the SE where the tag files will be stored
4693 t->SetSE("ALICE::CERN::se01");
4694 //Define the grid's path where the tag files will be stored
4695 t->SetGridPath("PDC06/Tags");
4696 //Read the TGridResult, create the tags and store them
4697 t->ReadGridCollection(result);
4698 //Merge the tags and store the merged file
4704 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
4709 \section{Kalman filter}
4710 Kalman filtering is quite a general and powerful method for statistical
4711 estimations and predictions. The conditions for its
4712 applicability are the following. A certain `system' is
4713 determined at any moment in time $t_k$ by a state vector $x_k$. The state
4714 vector varies with time according to an evolution
4716 \[ x_k = f_k(x_{k-1}) + \epsilon_k . \]
4717 It is supposed that $f_k$ is
4718 a known deterministic function and $\epsilon_k$ is a random vector of intrinsic
4719 `process noise' which has a zero mean value ($<\epsilon_k> = 0$) and a known
4720 covariance matrix (${\rm cov}(\epsilon_k) = Q_k$). Generally, only some function
4721 $h_k$ of the state vector can be observed, and the result of the
4722 observation $m_k$ is
4723 corrupted by a `measurement noise' $\delta_k$:
4724 \[ m_k = h_k(x_k) + \delta_k. \]
4725 The measurement noise is supposed to be unbiased ($<\delta_k> = 0$) and have a
4726 definite covariance matrix (${\rm cov}(\delta_k) = V_k$). In many cases, the
4727 measurement function $h_k$ can be represented by a
4728 certain matrix $H_k$:
4729 \[ m_k = H_kx_k + \delta_k .\]
4731 If, at a certain time $t_{k-1}$, we are given
4732 some estimates of the state vector $\tilde{x}_{k-1}$ and of
4733 its covariance matrix $\tilde{C}_{k-1} = {\rm cov}(\tilde{x}_{k-1}-x_{k-1})$,
4735 these estimates to the next time slot $t_k$ by means of formulas
4736 (this is called `prediction'):
4738 \tilde{x}_k^{k-1} &=& f_k(\tilde{x}_{k-1}) \nonumber \\
4739 \tilde{C}_k^{k-1} &=& F_k\tilde{C}_{k-1}F_k^T + Q_k\mbox{,\ \ \ \ }
4740 F_k=\frac{\displaystyle\partial f_k}{\displaystyle\partial x_{k-1}} .
4744 The value of the predicted $\chi^2$ increment can be also calculated:
4746 (\chi^2)_k^{k-1} = (r_k^{k-1})^T(R_k^{k-1})^{-1}r_k^{k-1}\mbox{,\ \ \ \ }
4747 r_k^{k-1} = m_k - H_k\tilde{x}_k^{k-1}\mbox{,\ \ \ \ }
4748 R_k^{k-1} = V_k + H_k\tilde{C}_k^{k-1}H_k^T .
4752 The number of degrees of freedom is equal to the dimension of the vector $m_k$.
4754 If at the moment $t_k$, together with the results of prediction, we also
4755 have the results of the state vector measurement,
4756 this additional information can be combined with the prediction results
4757 (this is called `filtering'). As a consequence, the estimation of the state
4758 vector improves with respect to the previous step:
4760 \tilde{x}_k &=& \tilde{x}_k^{k-1} + K_k(m_k - H_k\tilde{x}_k^{k-1})\nonumber\\
4761 \tilde{C}_k &=& \tilde{C}_k^{k-1} - K_kH_k\tilde{C}_k^{k-1},
4765 where $K_k$ is the Kalman gain matrix
4767 K_k = \tilde{C}_k^{k-1}H_k^T(V_k + H_k\tilde{C}_k^{k-1}H_k^T)^{-1}.
4770 Finally, the next formula gives us the value of the filtered $\chi^2$ increment:
4772 \chi^2_k = (r_k)^T(R_k)^{-1}r_k\mbox{,\ \ \ \ }
4773 r_k = m_k - H_k\tilde{x}_k\mbox{,\ \ \ \ }
4774 R_k = V_k - H_k\tilde{C}_kH_k^T .
4776 It can be shown that the predicted $\chi^2$ value is equal to the filtered
4779 (\chi^2)_k^{k-1} = \chi^2_k \label{chi=chi} .
4782 The `prediction' and `filtering' steps are repeated as many times as we have
4783 measurements of the state vector.
4785 \section {Bayesian approach for combined particle identification}\label{BayesianPID}
4787 Particle identification over a large momentum range and
4788 for many particle species is often one of the main design
4789 requirements of high energy physics experiments.
4790 The ALICE detectors are able to
4791 identify particles with momenta from 0.1 GeV/$c$ up to
4792 10 GeV/$c$. This can be achieved by combining
4793 several detecting systems that are efficient in some narrower and
4794 complementary momentum sub-ranges. The situation is complicated by
4795 the amount of data to be processed (about $10^7$ events with
4796 about $10^4$ tracks in each). Thus, the particle identification
4797 procedure should satisfy the following
4800 \item It should be as much as possible automatic.
4801 \item It should be able to combine PID signals of different nature
4802 ({\it e.g.} $dE/dx$ and time-of-flight measurements).
4803 \item When several detectors contribute to the PID, the procedure must profit
4804 from this situation by providing an improved PID.
4805 \item When only some detectors identify a particle, the signals from the other
4806 detectors must not affect the combined PID.
4807 \item It should take into account the fact that, due to
4808 different event and track selection, the PID depends on the kind of analysis.
4811 In this report we will demonstrate that combining PID signals in a Bayesian way
4812 satisfies all these requirements.
4814 \subsection{Bayesian PID with a single detector}
4815 Let $r(s|i)$ be a conditional probability density function to observe in some
4816 detector a PID signal $s$ if a particle of $i-$type
4817 ($i=e, \mu, \pi, K, p, ...$)
4818 is detected. The probability to be a particle of $i-$type if the signal
4819 $s$ is observed, $w(i|s)$, depends not only on $r(s|i)$, but also
4820 on how often this type of particles is registered in the considered experiment
4821 ({\it a priory} probability $C_i$ to find this
4822 kind of particles in the detector). The corresponding relation is
4823 given by the Bayes' formula:
4825 \begin{equation}\label{eq:bayes}
4826 w(i|s)={r(s|i) C_i \over \sum_{k=e, \mu, \pi, ...}{r(s|k) C_k}}
4829 Under some reasonable conditions, $C_i$ and $r(s|i)$ are not correlated
4830 so that one can rely on the following approximation:
4832 \item The functions $r(s|i)$ reflect only properties of the detector
4833 (``detector response functions'') and do not depend on
4834 other external conditions like event and track selections.
4835 \item On contrary, the quantities $C_i$ (``relative concentrations'' of
4836 particles of $i$-type) do not depend on the detector
4837 properties, but do reflect the external conditions, selections {\it etc}.
4840 The PID procedure is done in the following way. First,
4841 the detector response
4842 function is obtained. Second, a value $r(s|i)$ is assigned to
4844 Third, the relative concentrations $C_i$ of particle species are
4845 estimated for a subset of events and tracks selected in a specific
4847 Finally, an array of probabilities $w(i|s)$ is calculated (see Eq.~\ref{eq:bayes}) for each track within the selected
4850 The probabilities $w(i|s)$ are often called PID weights.
4852 The conditional probability density function $r(s|i)$
4853 (detector response function) can be always parameterized with sufficient
4854 precision using available experimental data.
4856 In the simplest approach, the {\it a-priori} probabilities
4857 $C_i$ (relative concentrations of particles of $i$-type) to observe a
4858 particle of $i$-type can be assumed to be equal.
4860 However, in many cases one can do better. Thus, for example in ALICE,
4862 PID in the TPC for the tracks that are registered both in the TPC and
4863 in the Time-Of-Flight detector (TOF), these probabilities
4864 can be estimated using the measured time-of-flight. One simply fills a
4865 histogram of the following quantity:
4867 m={p\over {\beta\gamma}}=p\sqrt{{{c^2t^2}\over{l^2}} - 1},
4869 where $p$ and $l$ are the reconstructed track momentum and length and $t$
4870 is the measured time-of-flight. Such a histogram peaks near the values
4871 $m$ that correspond to the masses of particles.
4873 Forcing some of the $C_i$ to be exactly zeros excludes the
4874 corresponding particle type from the PID analysis and such particles will
4875 be redistributed over other particle classes (see Eq.~\ref{eq:bayes}).
4876 This can be useful for the kinds of analysis when, for the particles
4877 of a certain type, one is not concerned by
4878 the contamination but, at the same time, the efficiency of PID is
4879 of particular importance.
4882 \subsection{PID combined over several detectors}
4883 This method can be easily applied for combining PID measurements
4884 from several detectors. Considering the whole system of $N$ contributing
4885 detectors as a single ``super-detector'' one can write the combined
4886 PID weights $W(i|\bar{s})$ in the form similar to that given by
4887 Eq.~\ref{eq:bayes} :
4889 \begin{equation}\label{eq:bayes1}
4890 W(i|\bar{s})={R(\bar{s}|i) C_i \over \sum_{k=e, \mu, \pi,
4891 ...}{R(\bar{s}|k) C_k}} ,
4893 where $\bar{s}={s_1, s_2, ..., s_N}$ is a vector of PID signals registered in
4894 the first, second and other contributing detectors,
4895 $C_i$ are the {\it a priory} probabilities to be a particle of the $i$-type
4896 (the same as in Eq.~\ref{eq:bayes}) and
4897 $R(\bar{s}|i)$ is the combined response function of the whole system
4900 If the single detector PID measurements $s_j$ are uncorrelated (which is
4901 approximately true in the case of the ALICE experiment), the
4902 combined response function is product of single response functions
4903 $r(s_j|i)$ (the ones in Eq.~\ref{eq:bayes}) :
4905 \begin{equation}\label{eq:resp}
4906 R(\bar{s}|i)=\prod_{j=1}^{N}r(s_j|i).
4909 One obtains the following expression for the PID weights combined over the
4910 whole system of detectors:
4912 \begin{equation}\label{eq:bayes2}
4913 W(i|s_1, s_2, ..., s_N)={\displaystyle C_i \prod_{j=1}^{N}r(s_j|i) \over
4914 \displaystyle \sum_{k=e, \mu, \pi, ...}C_k \prod_{j=1}^{N}r(s_j|k)}
4918 In the program code, the combined response functions $R(\bar{s}|i)$
4919 do not necessarily have to be treated as analytical. They can be ``procedures''
4920 (C++ functions, for example). Also, some additional effects like
4921 probabilities to obtain a mis-measurement (mis-matching) in one or several
4922 contributing detectors can be accounted for.
4924 The formula Eq.~\ref{eq:bayes2} has the following useful features:
4926 \item If for a certain particle momentum one (or several) of the detectors
4927 is not able to identify the particle type ({\it i.e.} $r(s|i)$ are equal
4928 for all $i=e, \mu, ...$), the contribution of such a detector cancels out
4930 \item When several detectors are capable of separating the particle types,
4931 their contributions are accumulated with proper weights, thus providing
4932 an improved combined particle identification.
4933 \item Since the single detector response functions $r(s|i)$ can be obtained
4934 in advance at the calibration step and the combined response
4935 can be approximated by Eq.~\ref{eq:resp}, a part of PID (calculation of
4936 the $R(\bar{s}|i)$ ) can be done track-by-track
4937 ``once and forever'' by the reconstruction software and the results
4938 can be stored in the Event Summary Data. The final PID decision,
4939 being dependent via the {\it a priory} probabilities $C_i$ on the event
4940 and track selections , is then postponed until the physics analysis of the
4944 \subsection{Stability with respect to variations of the {\it a priory} probabilities}
4945 Since the results of this PID procedure explicitly depend on the choice
4946 of the {\it a priory} probabilities $C_i$ (and, in fact, this kind of
4947 dependence is unavoidable in any case), the question of stability of the
4948 results with respect to the almost arbitrary choice of $C_i$ becomes important.
4950 Fortunately, there is always some momentum region where the single detector
4951 response functions for different particle types of at least one of the
4952 detectors do not significantly overlap, and so the stability
4953 is guaranteed. The more detectors enter the combined PID procedure, the wider
4954 this momentum region becomes and the results are more stable.
4956 Detailed simulations using the ALIROOT framework show that results of the
4957 PID combined over all the ALICE central
4958 detectors are, within a few per cent, stable with respect to
4959 variations of $C_i$ up-to at least 3~GeV/$c$.
4962 \subsection{Features of the Bayesian PID}
4963 Particle Identification in ALICE experiment at LHC can be done in a Bayesian
4964 way. The procedure consists of three parts:
4966 \item First, the single detector PID response functions
4967 $r(s|i)$ are obtained. This is done by the calibration software.
4968 \item Second, for each reconstructed track the combined PID response
4970 is calculated and effects of possible mis-measurements of the PID signals
4971 can be accounted for. The results are written to the Event Summary Data and,
4972 later, are used in all kinds of physics analysis of the data.
4973 This is a part of the reconstruction software.
4974 \item And finally, for each kind of physics analysis, after the corresponding
4975 event and track selection is done, the {\it a~priory} probabilities $C_i$ to
4976 be a particle of a certain $i$-type within the selected subset are estimated
4977 and the PID weights $W(i|\bar{s})$ are calculated by means of formula
4978 Eq.~\ref{eq:bayes2}. This part of the PID procedure belongs to the
4982 The advantages of the described here particle identification procedure are
4984 \item The fact that, due to different event and rack selection, the PID depends
4985 on a particular kind of performed physics analysis is naturally taken into
4987 \item Capability to combine, in a common way, signals from detectors having
4988 quite different nature and shape of the PID response functions (silicon, gas,
4989 time-of-flight, transition radiation and Cerenkov detectors).
4990 \item No interactive multidimensional graphical cuts are involved.
4991 The procedure is fully automatic.
4995 \section{Vertex estimation using tracks}\label{VertexerTracks}
4997 Each track, reconstructed in the TPC and in the ITS,
4998 is approximated with a straight line at the
4999 position of the closest approach to the nominal primary vertex position
5000 (the nominal vertex position is supposed to be known with a precision
5001 of 100--200 $\mu$m).
5003 track pairs $(i,j)$ are considered and for each pair, the center
5004 $C(i,j)\equiv(x_{ij},y_{ij},z_{ij})$ of the segment of minimum approach
5005 between the two lines is found. The coordinates of the primary vertex
5008 % x_{\rm v}={1\over N_{\rm pairs}}\sum_{i,j}x_{ij}\:; \:\:\:\:\:\:
5009 % y_{\rm v}={1\over N_{\rm pairs}}\sum_{i,j}y_{ij}\:; \:\:\:\:\:\:
5010 % z_{\rm v}={1\over N_{\rm pairs}}\sum_{i,j}z_{ij} \:\:\:\:\:\:
5011 x_{\rm v}=\frac{1}{N_{\rm pairs}}\sum_{i,j}x_{ij}\:; \:\:\:\:\:\:
5012 y_{\rm v}=\frac{1}{N_{\rm pairs}}\sum_{i,j}y_{ij}\:; \:\:\:\:\:\:
5013 z_{\rm v}=\frac{1}{N_{\rm pairs}}\sum_{i,j}z_{ij} \:\:\:\:\:\:
5015 where $N_{\rm pairs}$ is the number of track pairs.
5016 This gives an improved estimate of the vertex position.
5018 Finally, the position $\textbf{r}_{\rm v}=(x_{\rm v},y_{\rm v},z_{\rm v})$ of the
5019 vertex is reconstructed minimizing the
5020 $\chi^2$ function (see~Ref.~\cite{VERTEX:cmsvtxnote}):
5023 \chi^2(\textbf{r}_{\rm v})=\sum_i (\textbf{r}_{\rm v}-\textbf{r}_i)^T\,{\bf
5024 V}_i^{-1}(\textbf{r}_{\rm v}-\textbf{r}_i),
5026 where $\textbf{r}_i$ is the global position of the
5027 track $i$ (i.e. the position assigned at the step above)
5028 and $\textbf{V}_i$ is the covariance matrix of the vector $\textbf{r}_i$.
5030 In order not to spoil the vertex resolution by including in the fit tracks that
5031 do not originate from the primary vertex (e.g. strange particle
5032 decay tracks), the tracks giving a
5033 contribution larger than some value $\chi^2_{\rm max}$ to the global $\chi^2$
5034 are removed one-by-one from the sample, until no such tracks are left. The
5035 parameter $\chi^2_{\rm max}$ was tuned,
5036 as a function of the event multiplicity, so as to obtain the best vertex
5038 %%%%%%%%%%%%%%%%%%%%%%
5040 \section{Alignment framework}
5041 \subsection{Basic objects and alignment constants}
5042 The purpose of the \FR is to offer all the
5043 functionality related to storing alignment information, retrieving it from
5044 the Offline Conditions Data Base (OCDB) and consistently applying it to the
5045 ALICE geometry in order to improve the knowledge of the real geometry
5046 by means of the additional information obtained by
5047 survey and alignment procedures, without needing to change the
5048 hard-coded implementation of the detector's geometry.
5049 The \FR\ is based on the \lstinline{AliAlignObj}
5050 base class and its derived classes; each instance of this class is an
5051 \emph{alignment object} storing the so called \emph{alignment
5052 constants} for a single alignable volume, that is the information to
5053 uniquely identify the physical volume (specific instance of the volume
5054 in the geometry tree) to be displaced and to unambiguously describe the
5055 delta-transformation to be applied to that volume.
5056 In the \FR\ an alignment object holds the
5057 following information:
5059 \item a unique volume identifier
5060 \item a unique global index
5061 \item a delta-transformation
5063 In the following we describe the meaning of this variables, how they
5064 are stored and set and the functionality related to them.
5066 \subsubsection{The unique volume identifier}
5067 The unique volume identifier is the character string which allows the user to
5068 access a specific physical volume inside the geometry tree. For the
5069 ALICE geometry (which is a \ROOT geometry) this is the \emph{volume
5070 path}, that is the string containing the names of all physical volumes
5071 in the current branch in the directory tree fashion. For example
5072 \lstinline!/A_1/B_i/.../M_j/Vol_k! identifies the physical volume ``kth
5073 copy of the volume \lstinline!Vol!'' by listing its container volumes;
5074 going from right to left in the path corresponds to going from the
5075 innermost to the outermost containers and from the lowest to the upper
5076 level in the geometry tree, starting from the mother volume
5077 \lstinline!M_j! of the current volume \lstinline!Vol_k! up to the
5078 physical top volume \lstinline!A_1!, the root of the geometry tree.
5080 The unique volume identifier stored by
5081 the alignment object is not the volume path but a ``\emph{symbolic volume
5082 name}'', a string dynamically associated to the corresponding volume
5083 path by a hash table built at the finalization stage of the geometry
5084 (the physical tree needs to be already closed) and stored as part of it.
5085 The choice of the symbolic volume names is constrained only by the
5086 following two rules:
5088 \item each name has to contain a leading sub-string indicating its
5089 pertaining sub-detector; in this way the uniqueness of the name inside
5090 the sub-detector scope guarantees also its uniqueness in the global
5091 scope of the whole geometry.
5092 \item each name has to contain the intermediate alignable levels,
5093 separated by a slash ('\texttt{/}'), in case some other physical
5094 volume on the same geometry branch is in turn alignable.
5096 There are two considerable advantages deriving from the choice to
5097 introduce the symbolic volume names as unique volume identifiers
5098 stored in the alignment object in place of the volume path:
5100 \item the unique volume identifier has no direct dependency on the
5101 geometry; in fact changes in the volume paths reflect in changes in
5102 the hash table associating the symbolic names to them, which is
5103 built and stored together with the geometry. As a consequence the
5104 validity of the alignment objects is not affected by changes in the
5105 geometry and hence is in principle unlimited in time.
5106 \item The unique volume identifier can be freely chosen, according
5107 to the two simple rules mentioned above, thus allowing to assign
5108 meaningful names to the alignable volumes, as opposed to the volume
5109 paths which inevitably are long strings of often obscure names.
5111 The geometry then provides the user with some methods to query the hash table
5112 linking the symbolic volume names to the
5113 corresponding volume paths; in particular the user can
5115 \item get the number of entries in the table
5116 \item retrieve a specific entry (symbolic volume name,
5117 volume path) either by index or by symbolic name.
5121 \subsubsection{The unique global index}
5122 Among the alignment constants we store a numerical index uniquely
5123 identifying the volume to which those constants refer; being a
5124 \lstinline!short!, this numerical index has 16 bits available which
5125 are filled from the index of the ``layer'' or sub-detector to which
5126 the volume belongs (5 bits) and from the ``local index'', i.e. the
5127 index of the volume itself inside the sub-detector (the remaining 11
5128 bits). Limiting the range of sub-detectors to $2^5=32$ and of
5129 alignable volumes inside each sub-detector to $2^{11}=2048$, this
5132 The aim of indexing the alignable volumes is fast iterative access
5133 during alignment procedures. The framework allows to easily
5134 browse the look-up table mapping indexes to symbolic volume names by
5135 means of methods which return the symbolic volume name for the present
5136 object given either its global index or both its layer and local
5137 indexes. For these methods to work the only condition is that at least
5138 one instance of an alignment object has been created, so that the
5139 static method building the look-up table has been called.
5142 \subsubsection{The delta-transformation}
5144 The delta-transformation is the transformation which defines the
5145 displacement to be applied to the given physical volume.
5146 During the alignment process we want to correct the hard-coded, ideal
5147 position of some volume, initially fixed according to the
5148 engeneers'drawings, by including the survey and alignment information
5149 related to those volumes; we say that we want to align the ideal
5150 geometry. With this aim, we need here to describe how the
5151 delta-transformations are defined and thus how they have to be produced and
5152 applied to the ideal geometry in order to correct the global and local
5153 ideal transformations into global and local aligned transformations.
5155 For the representation of the delta-transformation there are several
5156 possible conventions and choices, in particular:
5158 \item to use the local-to-global or the global-to-local convention and
5159 ``active-'' or ``passive-transformations'' convention;
5160 \item to use the local or global delta-transformation to be stored in the
5161 alignment object and to be passed when setting the object itself;
5162 \item the convention used for the Euler angles representing the
5163 delta-transformation;
5164 \item the use of a matrix or of a minimal set of parameters (three
5165 orthogonal shifts plus three Euler angles) to be stored in the
5166 alignment object and to be passed when setting the object itself.
5168 The choices adopted by the framework are explained in the remaining of
5171 \underline{Use of the global and local transformations}
5173 Being based on the \ROOT geometry package, the framework keeps the
5174 ``local-to-global'' convention; this means that the \emph{global
5175 transformation} for a given volume is the matrix $\mathcal{G}$ which, as in
5176 \tgeo, transforms the local vector $\vec{l}$ (giving the position in the
5177 local reference system, i.e. the reference system associated to that volume)
5178 into the global vector $\vec{g}$, giving the position in the
5179 global (or master) reference system (``MARS''), according to:
5182 \vec{g} = \mathcal{G}\vec{l}
5184 Similarly, the \emph{local transformation} matrix is the
5185 matrix $\mathcal{L}$ which transforms a local vector $\vec{l}$ into the
5186 corresponding vector in the mother volume RS, $\vec{m}$, according to:
5189 \vec{m} = \mathcal{L}\vec{l}
5191 If furthermore $\mathcal{M}$ is the global transformation for the
5192 mother volume, then we can write:
5193 $$ \vec{g} = \mathcal{G}\vec{l} = \mathcal{M}\vec{m} = \mathcal{M}\mathcal{L}\vec{l} $$
5194 Recursively repeating this argument to all the parent volumes, that is
5195 to all the volumes in the branch of the geometry tree which
5196 contains the given volume, we can write:
5197 $$ \vec{g} = \mathcal{G}\vec{l} = \mathcal{M}_0...\mathcal{M}_n\mathcal{L}\vec{l} $$
5198 which shows that the global matrix is given by the product of the
5199 matrices of the parent volumes on the geometry branch, from the
5200 uppermost to the lowest level.
5202 Let's now denote by $\mathcal{G}$ and $ \mathcal{L}$ the ideal global
5203 and local transformations of a specific physical volume (those
5204 relative to the reference geometry) and let's put
5205 the superscript '$^a$' to the corresponding matrices in the aligned
5206 geometry, so that $\mathcal{G}^a$ and $ \mathcal{L}^a$ are the aligned
5207 global and aligned local transformations which relate the position of
5208 a point in the local RS to its position in the global RS and in the
5209 mother's RS respectively, after the volume has been aligned, according
5212 \vec{g} &=& \mathcal{G}^a\vec{l} \label{eq:l2ga}\\
5213 \vec{m} &=& \mathcal{L}^a\vec{l} \label{eq:l2ma}
5215 Eqs.~(\ref{eq:l2ga})-~(\ref{eq:l2ma}) are the equivalent of
5216 Eqs.~(\ref{eq:l2g})-~(\ref{eq:l2m}) after the volume has
5219 There are two possible choices for expressing the
5220 delta-transformation; either we use:
5222 \item the \emph{global delta-transformation} $\Delta^g$, that is the
5223 transformation to be applied to the ideal global transformation
5224 $\mathcal{G}$ in order to get the aligned global transformation:
5225 \begin{equation}\label{eq:gadeltag}
5226 \mathcal{G}^a=\Delta^g\mathcal{G}=\Delta^g\mathcal{M}\mathcal{L}
5229 \item the \emph{local delta-transformation} $\Delta^l$, that is the
5230 transformation to be applied to the ideal local transformation
5231 $\mathcal{L}$ to get the aligned local transformation:
5232 \begin{equation}\label{eq:laldelta}
5233 \mathcal{L}^a=\mathcal{L}\Delta^l
5237 Eqs.~(\ref{eq:gadeltag})--(\ref{eq:laldelta}) allow to rewrite:
5238 \begin{equation} %\label{eq:}
5239 \mathcal{G}^a=\mathcal{M}\mathcal{L}^a
5244 \Delta^g\mathcal{M}\mathcal{L}=\mathcal{M}\mathcal{L}\Delta^l
5248 \Delta^g &=& \mathcal{G}\Delta^l\mathcal{G}^{-1} \label{eq:dltodg}\\
5249 \Delta^l &=& \mathcal{G}^{-1}\Delta^g\mathcal{G} \label{eq:dgtodl}
5251 to relate global and local alignment.
5253 The alignment object stores as delta-transformation the global
5254 delta-transformation; nevertheless both global and local
5255 delta-transformations can be used to construct the alignment object or
5256 to set it. The reasons for this flexibility in the user interface
5257 is that the local RS is sometimes the most natural one for expressing the
5258 misalignment, as e.g. in the case of a volume rotated around its
5259 centre; however the use of the local delta-transformation is sometimes
5260 error-prone; in fact the user has to be aware that he is referring to
5261 the same local RS which is defined in the hard-coded geometry when
5262 positioning the given volume, while the local RS used by simulation or
5263 reconstruction code can in general be different. In case the
5264 alignment object is constructed or its delta-transformation is set by
5265 means of the local delta-transformation, the framework will then use
5266 Equation~(\ref{eq:dltodg}) to perform the conversion into global
5267 alignment constants.
5269 As for the choice of storing a symbolic volume name instead of the
5270 volume path as volume identifier, we would like to also make the
5271 delta-transformation stored in the alignment objects
5272 independent from the geometry, keeping thus their validity
5273 unconstrained. This is possible if we store in the geometry itself a
5274 matrix for the ideal global transformation related to that volume
5275 (this possibility is offered by the class storing the link between
5276 symbolic volume names and volume paths, see Section~\ref{sec:ROOT}).
5279 \underline{Matrix or parameters for the delta-transformation}
5281 The global delta-transformation can be saved both
5283 \item as a \lstinline!TGeoMatrix! and
5284 \item as a set of six parameters, out of which three define the
5285 translation, by means of the shifts in
5286 the three orthogonal directions, and three define the rotation
5287 by means of three Euler angles.
5289 This two cases correspond to choosing one of the following two
5290 \lstinline{AliAlignObj}- derived classes:
5292 \item \lstinline!AliAlignObjMatrix!: stores a \lstinline!TGeoHMatrix!
5293 \item \lstinline!AliAlignObjParams!: stores six double precision floating
5296 While storing the alignment constants in a different form, they appear
5297 with the same user interface, which allows to set the
5298 delta-transformation both via the matrix and via the six parameters
5302 \underline{Choice for the Euler angles}
5304 A general rotation in three-dimensional Euclidean space can be
5305 decomposed into and represented by three successive rotations about
5306 the three orthogonal axis. The three angles characterizing the three
5307 rotations are called Euler angles; however there are several
5308 conventions for the Euler angles, depending on the axes
5309 about which the rotations are carried out, right/left-handed systems,
5310 (counter)clockwise direction of rotation, order of the three rotations.
5312 The convention chosen in the \FR\ for the Euler angles is the
5313 ``\emph{xyz convention}'' (see Ref.~\cite{mathworld}), also known as
5314 \emph{pitch-roll-yaw} or \emph{Tait-Bryan angles}, or \emph{Cardano
5315 angles} convention. Following this convention the general rotation is
5316 represented as a composition of a rotation around the $z$-axis (yaw)
5317 with a rotation around the $y$-axis (pitch) with a rotation around the
5318 $x$-axis (roll).There is an additional choice to fully specify the
5319 convention used, since the angles have opposite sign wheter we
5320 consider them bringing the original RS in coincidence with the aligned
5321 RS (``active-transformation'' convention) or the other way round
5322 (``passive-transformation'' convention). In order to maintain our
5323 representation fully consistent with the \lstinline!TGeoRotation!
5324 methods we choose the ``active-transformation'' convention, that is
5325 the opposite convention as the one chosen by the already referenced
5326 description of the pitch-roll-yaw angles (Ref.~\cite{mathworld}).
5328 To summarise, the three angles - $\psi$,$\theta$,$\phi$
5329 - used by the framework to represent the rotation part of the
5330 delta-transformation, unambigously represent a rotation $\mathcal{A}$
5331 as the composition of the following three rotations:
5333 \item a rotation $\mathcal{D}$ by an angle $\phi$ (yaw) around the $z$-axis
5334 $$ \mathcal{D} = \left( \begin{array}{ccc}
5335 cos\phi & -sin\phi & 0 \\
5336 sin\phi & cos\phi & 0 \\
5338 \end{array} \right)$$
5339 \item a rotation $\mathcal{C}$ by an angle $\theta$ (pitch) around the $y$-axis
5340 $$ \mathcal{C} = \left( \begin{array}{ccc}
5341 cos\theta & 0 & sin\theta \\
5343 -sin\theta & 0 & cos\theta
5344 \end{array} \right)$$
5345 \item a rotation $\mathcal{B}$ by an angle $\psi$ (roll) around the $x$-axis
5346 $$ \mathcal{B} = \left( \begin{array}{ccc}
5348 0 & cos\psi & -sin\psi \\
5349 0 & sin\psi & cos\psi
5350 \end{array} \right) $$
5355 $$ \mathcal{A} = \mathcal{B} \mathcal{C} \mathcal{D} =
5356 \left( \begin{array}{ccc}
5358 cos\theta cos\phi & -cos\theta sin\phi & sin\theta \\
5359 sin\psi sin\theta cos\phi + cos\psi sin\phi & -sin\psi
5360 sin\theta sin\phi+cos\psi cos\phi & -cos\theta sin\psi \\
5361 -cos\psi sin\theta cos\phi+sin\psi sin\phi & cos\psi
5362 sin\theta sin\phi + sin\psi cos\phi & cos\theta cos\psi
5364 \end{array} \right) $$
5366 \subsection{Use of \ROOT geometry functionality}
5368 The ALICE geometry is implemented via the \ROOT geometrical modeller
5369 (often referred to as \tgeo), a framework for building, browsing,
5370 navigating and visualising a detector's geometry, which is independent
5371 from the Monte Carlo transport (see Ref.~\cite{tgeo} and the dedicated
5372 chapter in Ref.~\cite{rootUG}). This choice allows the \FR\ to take
5373 advantage of using \ROOT features such as its I/O,
5374 histogramming, browsing, GUI, \ldots. However, the main advantage of
5375 this choice is that the \FR\ can provide its specific functionality as
5376 a rather thin layer on
5377 top of already existing features which allow to consistently and
5378 efficiently manage the complexity related to modifying a tree of some
5379 million of physical nodes.\\ The \FR\ takes in particular advantage of
5382 \item to save the geometry to a file and upload it from a file
5383 \item to check the geometry for overlaps and extrusions exceeding a
5385 \item to query the geometry for the global and local matrix of a given
5387 \item to make a physical node out of a specific physical volume and
5388 change the local and global transformation associated to it, while
5389 keeping track of the original transformations
5390 \item to store a hash table of links between symbolic volume
5391 names and volume paths which can be queried in an efficient way
5393 Concerning this last issue, the class representing the objects linking
5394 the symbolic volume names and the volume paths provides in addition
5395 the possibility to store a transformation. This feature turns out to be very
5396 useful if it is used to store the matrix relating the RS stored in the
5397 geometry (global transformation matrix for that volume) with the RS
5398 used in simulation and reconstruction (the two things in general differ).
5401 \subsection{Application of the alignment objects to the geometry}
5403 The base class provides a method to apply the single alignment object
5404 to the geometry present in memory, loaded from file or constructed;
5405 the method accesses the geometry to change the position of the volume
5406 referred by the unique volume identifier according to
5407 Equation~(\ref{eq:gadeltag}). However this method alone cannot
5408 guarantee that the single object is applied correctly; the most common
5409 case is indeed the application of a set of alignment objects. In this
5410 case the framework has to check that the application of each object in
5411 the set does not invalidate the application of the others; when
5412 applying a set of alignment objects during a simulation or
5413 reconstruction run the framework transparently performs the following
5416 \item in case of alignment objects referring to physical volumes on
5417 the same branch, they have to be applied starting from the one which
5418 refers to a volume at the uppermost level in the physical tree
5419 (container volume) down to the one at the lowest level (contained
5420 volume). On the contrary, if the contained volume is displaced first
5421 the subsequent displacement of the container volume would change its
5422 temporarily correct position;
5423 \item in no case two alignment objects should be applied to the same
5424 physical volume separately.
5426 The reason for the first limitation is in short that the position of
5427 the contained volumes depend on the position of the container volumes.
5428 The reason for the second limitation is that the delta-transformations
5429 are relative to the ideal global position of the given volume (see
5430 Eq.~(\ref{eq:gadeltag})), which then need not to have been previously
5431 modified by the previous application of an alignment object referring to
5433 The tools used by the framework for checking that the two previous
5434 conditions are fulfilled are respectively:
5436 \item sorting the alignment objects based on a method which compares
5437 the depth of the physical
5438 volume to which the given alignment object refers.
5439 \item combining more alignment objects referring to the same volume
5440 before applying them to the geometry.
5442 During a simulation or reconstruction run the user
5443 can consistently apply the objects to the geometry, having the two
5444 checks described above transparently performed.
5446 An additional check is performed during a simulation or reconstruction
5447 run to verify that the
5448 application of the alignment objects did not introduce big overlaps
5449 or extrusions which would invalidate the geometry (hiding some
5450 sensitive parts or changing the material budget during tracking). This
5451 check is done by means of the overlap checker provided by the
5452 \ROOT geometry package; a default threshold below which overlaps and
5453 extrusions are accepted is fixed; the \tgeo\ overlap checker favours speed
5454 (checks the whole ALICE geometry in few seconds) at the expense of
5455 completeness, thus same rare overlap topologies can eventually escape
5458 \subsection{Access to the Conditions Data Base}
5459 \label{sec:CDBaccess}
5460 An important task of the \FR\ is to intermediate between the
5461 simulation and reconstruction jobs and the objects residing on the
5462 Offline Conditions Data Base (OCDB), both for defining a default
5463 behaviour and for managing specific use cases. The OCDB is filled
5464 with conditions (calibration and alignment) objects; the alignment
5465 objects in the OCDB are presently created by macros to reproduce two
5466 possible misalignment scenarios: the initial misalignment, according
5467 to expected deviations from the ideal geometry just after the
5468 sub-detectors are positioned and the residual misalignment, trying to
5469 reproduce the deviations which can not be resolved by the alignment
5470 procedures. The next step is to fill the OCDB with the alignment
5471 objects produced from the survey procedures, as soon as survey data
5472 are available to the offline. Finally these objects and those produced
5473 by alignment procedures will fill the OCDB to be used by the
5474 reconstruction of the real data in its different passes.
5476 The OCDB stores the conditions making use of the database capabilities
5477 of a file system three-level directory structure; the run and the
5478 version are stored in the file name.
5479 If not otherwise specified, the OCDB returns the last version of the
5480 required object and in case of an object being uploaded it is
5481 automatically saved with increased version number.
5483 The \FR\ defines a specific default storage from which to load the
5484 alignment objects for all the sub-detectors; the user can set a
5485 different storage, either residing locally or on the grid if he has
5486 the permissions to access it. The definition of a non-default storage
5487 for the OCDB, as well as its deactivation can also be given for
5488 specific sub-detectors only, The user can also just switch off the
5489 loading of alignment objects from a OCDB storage or as a side-effect
5490 of passing to the simulation or reconstruction run an array of
5491 alignment objects available in memory.
5493 \subsection{Summary}
5496 The \FR, based on the \ROOT geometry package (see
5497 Refs.~\cite{rootUG,tgeo}), aims at allowing a consistent and flexible
5498 management of the alignment information, while leaving the related
5499 complexity as much as possible hidden to the user. The framework
5502 \item save and retrieve the alignment constants relative
5503 to a specific alignable volume (automatic retrieval from a
5504 Conditions Data Base is handled);
5505 \item apply the alignment objects to the
5506 current (ideal) geometry;
5507 \item get from the current geometry the
5508 alignment object for a specified alignable volume;
5509 \item transform positions in the ideal global
5510 RS into positions in the aligned global RS;
5511 \item set the objects by means of both global and local
5512 delta-transformations.
5514 These functionalities are built on the \lstinline!AliAlignObj! base
5515 class and its two derived classes, which store the
5516 delta-transformation by means of the transformation matrix
5517 (\lstinline!AliAlignObjMatrix!) or by means of the six transformation
5518 parameters (\lstinline!AliAlignObjParams!). The user interface is the
5519 same in both cases; it fixes the representation of the
5520 delta-transformation while leaving several choices to the user which
5521 have been explained in this note together with their implementation.
5523 The \FR\ fixes the following conventions:
5525 \item the transformations are interpreted according to the
5526 local-to-global convention;
5527 \item the delta-transformation stored is the global delta-transformation;
5528 \item the three parameters to specify the rotation are the
5529 roll-pitch-yaw Euler angles, with the ``active-transformations'' convention.
5531 The framework fixes also the following default behaviours in
5532 simulation and reconstruction runs:
5534 \item objects are loaded from a default Conditions Data Base
5535 storage, on a sub-detector basis;
5536 \item the set of loaded objects is sorted for assuring the consistency
5537 of its application to the geometry;
5538 \item the ideal and aligned geometries are saved.
5540 Several choices related to the delta-transformation are left to the user, who:
5542 \item can choose to set the alignment object either by passing a
5543 \lstinline!TGeoMatrix! or by giving the six parameters which uniquely
5544 identify the global delta-transformation;
5545 \item can choose if he wants the object to store either the
5546 \lstinline!TGeoMatrix!, using an \lstinline!AliAlignObjMatrix! or the six
5547 parameters, using an \lstinline!AliAlignObjParams!;
5548 \item can choose if the transformation he is passing is the global
5549 delta-transformation or the local delta-transformation; in this
5550 latter case the framework converts it to the global one to set the
5551 internal data members.
5554 %%%%%%%%%%%%%%%%%%%%%%
5560 \item[ADC]Analogue to Digital Conversion/Converter
5561 \item[AFS]Andrew File System\\{\footnotesize \url{http://en.wikipedia.org/wiki/Andrew_file_system}}
5562 \item[ALICE]A Large Ion Collider Experiment\\{\footnotesize \url{http://aliceinfo.cern.ch/}}
5563 \item[AOD]Analysis Object Data
5564 \item[API]Application Program Interface
5565 \item[ARDA]Architectural Roadmap towards Distributed Analysis\\{\footnotesize \url{http://lcg.web.cern.ch/LCG/activities/arda/arda.html}}
5566 \item[AliRoot]ALIce offline framework\\{\footnotesize \url{http://aliceinfo.cern.ch/offline}}
5567 \item[CA]Certification Authority
5568 \item[CASTOR]CERN Advanced STORage\\{\footnotesize \url{http://castor.web.cern.ch/castor/}}
5569 \item[CDC]Computing Data Challenge
5570 \item[CDF]Collider Detector at Fermilab
5571 \item[CE]Computing Element\\{\footnotesize \url{http://aliceinfo.cern.ch/static/AliEn/AliEn_Instalation/ch06s07.html}}
5572 \item[CERN]European Organization for Nuclear Research\\{\footnotesize \url{http://www.cern.ch}}
5573 \item[CINT]C/C++ INTerpreter that is embedded in ROOT\\{\footnotesize \url{http://root.cern.ch/root/Cint.html}}
5574 \item[CRT]Cosmic Ray Trigger, the official name is ACORDE\\{\footnotesize \url{}}
5575 \item[CVS]Concurrent Versioning System\\{\footnotesize \url{http://www.nongnu.org/cvs/}}
5576 \item[DAQ]Data AcQuisition system\\{\footnotesize \url{http://cern.ch/alice-daq}}
5577 \item[DATE]Data Acquisition and Test Environment\\{\footnotesize \url{http://cern.ch/alice-daq}}
5578 \item[DCA]Distance of Closest Approach
5579 \item[DCS]Detector Control System\\{\footnotesize \url{http://alicedcs.web.cern.ch/alicedcs/}}
5580 \item[DPMJET]Dual Parton Model monte carlo event generator\\{\footnotesize \url{http://sroesler.web.cern.ch/sroesler/dpmjet3.html}}
5581 \item[EGEE]Enabling Grid for E-sciencE project\\{\footnotesize \url{http://public.eu-egee.org/}}
5582 \item[EMCal]Electromagnetic Calorimeter
5583 \item[ESD]Event Summary Data
5584 \item[FLUKA]A fully integrated particle physics MonteCarlo simulation package\\{\footnotesize \url{http://www.fluka.org/}}
5585 \item[FMD]Forward Multiplicity Detector\\{\footnotesize \url{http://fmd.nbi.dk/}}
5586 \item[FSI]Final State Interactions
5587 \item[GAG]Grid Application Group\\{\footnotesize \url{http://project-lcg-gag.web.cern.ch/project-lcg-gag/}}
5588 \item[GUI]Graphical User Interface
5589 \item[GeVSim]fast Monte Carlo event generator, base on MEVSIM
5590 \item[Geant4]A toolkit for simulation of the passage of particles through matter\\{\footnotesize \url{http://geant4.web.cern.ch/geant4/}}
5591 \item[HBT]Hanbury Brown and Twiss
5592 \item[HEP]High Energy Physics
5593 \item[HEPCAL]HEP Common Application Area
5594 \item[HERWIG]monte carlo package for simulating Hadron Emission Reactions With Interfering Gluons\\{\footnotesize \url{http://cernlib.web.cern.ch/cernlib/mc/herwig.html}}
5595 \item[HIJING]Heavy Ion Jet Interaction Generator
5596 \item[HLT]High Level Trigger\\{\footnotesize \url{http://wiki.kip.uni-heidelberg.de/ti/HLT/index.php/Main_Page}}
5597 \item[HMPID]High Momentum Particle IDentification\\{\footnotesize \url{http://alice-hmpid.web.cern.ch/alice-hmpid/}}
5598 \item[ICARUS]Imaging Cosmic And Rare Underground Signals\\{\footnotesize \url{http://pcnometh4.cern.ch/}}
5599 \item[IP]Internet Protocol
5600 \item[ITS]Inner Tracking System; collective name for SSD, SPD and SDD
5601 \item[JETAN]JET ANalysis module
5602 \item[LCG]LHC Computing Grid\\{\footnotesize \url{http://lcg.web.cern.ch/LCG/}}
5603 \item[LDAP]Lightweight Directory Access Protocol
5604 \item[LHC]Large Hadron Collider\\{\footnotesize \url{http://lhc.web.cern.ch/lhc/}}
5605 \item[LSF]Load Sharing Facility\\{\footnotesize \url{http://wwwpdp.web.cern.ch/wwwpdp/bis/services/lsf/}}
5606 \item[MC]Monte Carlo
5607 \item[MoU]Memorandum of Understanding
5608 \item[OCDB]Offline Calibration DataBase\\{\footnotesize \url{http://aliceinfo.cern.ch/Offline/Activities/ConditionDB.html}}
5609 \item[OO]Object Oriented
5610 \item[OS]Operating System
5611 \item[PAW]Physics Analysis Workstation\\{\footnotesize \url{http://paw.web.cern.ch/paw/}}
5612 \item[PDC]Physics Data Challenge
5613 \item[PDF]Particle Distribution Function
5614 \item[PEB]Project Execution Board
5615 \item[PHOS]PHOton Spectrometer
5616 \item[PID]Particle IDentity/IDentification
5617 \item[PMD]Photon Multiplicity Detector\\{\footnotesize \url{http://www.veccal.ernet.in/~pmd/ALICE/alice.html}}
5618 \item[PPR]Physics Performace Report\\{\footnotesize \url{http://alice.web.cern.ch/Alice/ppr/}}
5619 \item[PROOF]Parallel ROOT Facility\\{\footnotesize \url{http://root.cern.ch/root/doc/RootDoc.html}}
5620 \item[PWG]Physics Working Group\\{\footnotesize \url{http://aliceinfo.cern.ch/Collaboration/PhysicsWorkingGroups/}}
5621 \item[PYTHIA]event generator
5622 \item[QA]Quality Assurance
5623 \item[QCD]Quantum ChromoDynamics
5624 \item[QS]Quantum Statistics
5625 \item[RICH]Ring Imaging CHerenkov\\{\footnotesize \url{http://alice-hmpid.web.cern.ch/alice-hmpid/}}
5626 \item[ROOT]A class library for data analysis\\{\footnotesize \url{http://root.cern.ch}}
5627 \item[RTAG]Requirements and Technical Assessment Group
5628 \item[SDD]Silicon Drift Detector
5629 \item[SDTY]Standard Data Taking Year
5630 \item[SE]Storage Element
5631 \item[SI2k]SpecInt2000 CPU benchmark\\{\footnotesize \url{http://cerncourier.com/articles/cnl/1/11/9/1}}
5632 \item[SLC]Scientific Linux CERN\\{\footnotesize \url{http://linuxsoft.cern.ch/}}
5633 \item[SOA]Second Order Acronym
5634 \item[SPD]Silicon Pixel Detector\\{\footnotesize \url{http://www.pd.infn.it/spd/}}
5635 \item[SSD]Silicon Strip Detector
5636 \item[TDR]Technical Design Report\\{\footnotesize \url{http://alice.web.cern.ch/Alice/TDR/}}
5637 \item[TOF]Time Of Flight Detector\\{\footnotesize \url{http://alice.web.cern.ch/Alice/Projects/TOF/}}
5638 \item[TPC]Time Projection Chamber\\{\footnotesize \url{http://alice.web.cern.ch/Alice/Projects/TPC/}}
5639 \item[TRD]Transition Radiation Detector\\{\footnotesize \url{http://www-alice.gsi.de/trd/index.html}}
5640 \item[UI]User Interface
5641 \item[UID]Unique IDentification number
5642 \item[URL]Universal Resource Locator
5643 \item[VMC]Virtual Monte Carlo
5644 \item[VO]Virtual Organization
5645 \item[VOMS]Virtual Organization Membership Service
5646 \item[WAN]Wide Area Network
5647 \item[XML]Extensible Markup Language\\{\footnotesize \url{http://www.w3.org/XML/}}
5648 \item[ZDC]Zero Degree Calorimeter
5652 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
5654 \begin{thebibliography}{99}
5656 \bibitem{PPR} CERN/LHCC 2003-049, ALICE Physics Performance Report,
5657 Volume 1 (7 November 2003); \\
5658 ALICE Collaboration: F. Carminati {\it et al.}, J. Phys. G: Nucl.
5659 Part. Phys. \textbf{30} (2004) 1517--1763.
5661 \bibitem{CompTDR} CERN-LHCC-2005-018, ALICE Technical Design Report:
5662 Computing, ALICE TDR 012 (15 June 2005).
5664 \bibitem{ROOT} \url{http://root.cern.ch}
5667 \url{http://wwwasdoc.web.cern.ch/wwwasdoc/geant_html3/geantall.html}
5669 \bibitem{FLUKA} \url{http://www.fluka.org}
5671 \bibitem{Geant4} \url{http://cewrn.ch/geant4}
5673 \bibitem{MC:PYTH} H.-U.~Bengtsson and T.~Sjostrand, Comput. Phys.
5674 Commun. \textbf{46} (1987) 43; \newline the code can be found in
5675 \url{http://nimis.thep.lu.se/~torbjorn/Pythia.html} \newline
5676 T.~Sjostrand, Comput. Phys. Commun. \textbf{82} (1994) 74; \newline
5677 the code can be found in
5678 \url{http://www.thep.lu.se/~torbjorn/Pythia.html}
5680 \bibitem{MC:HIJING} X.~N.~Wang and M.~Gyulassy, Phys. Rev.
5681 \textbf{D44} (1991) 3501. \newline M.~Gyulassy and X.~N.~Wang,
5682 Comput. Phys. Commun. \textbf{83} (1994) 307-331. \newline The code
5683 can be found in \url{http://www-nsdth.lbl.gov/~xnwang/hijing/}
5686 \url{http://alien.cern.ch}
5688 \bibitem{SLC} \url{http://linux.web.cern.ch/linux}
5690 \bibitem{RedHat} \url{http://www.redhat.com}
5692 \bibitem{Fedora} \url{http://fedora.redhat.com}
5694 \bibitem{Linux} \url{http://www.linux.org}
5696 \bibitem{gcc} \url{http://gcc.gnu.org}
5699 \url{http://www.intel.com/cd/software/products/asmo-na/eng/compilers/index.htm}
5702 \url{http://www.intel.com/cd/software/products/asmo-na/eng/vtune/index.htm}
5705 \url{http://www.intel.com/products/processor/itanium2/index.htm}
5707 \bibitem{AMD} \url{http://www.amd.com}
5709 \bibitem{CVS} \url{http://www.cvshome.org}
5711 \bibitem{CVSManual} \url{http://ximbiot.com/cvs/manual}
5713 \bibitem{CLHEP} \url{http://cern.ch/clhep}
5715 \bibitem{CASTOR2} \url{http://cern.ch/castor}
5717 \bibitem{MC:DPMJET} J.~Ranft, Phys. Rev. \textbf{D 51} (1995) 64.
5720 \url{http://arxiv.org/abs/hep-ph/0312045}
5722 \bibitem{MC:HERWIG} HERWIG 6.5, G. Corcella, I.G. Knowles, G. Marchesini, S. Moretti,
5723 K. Odagiri, P. Richardson, M.H. Seymour and B.R. Webber, JHEP 0101
5724 (2001) 010 [hep-ph/0011363]; hep-ph/0210213
5729 \bibitem{MC:CDF} F.~Abe {\it et al.}, (CDF Collaboration), Phys. Rev.
5730 Lett.\textbf{61} (1988) 1819.
5732 \bibitem{MC:LUND} B.~Andersson, {\it et al.,} Phys. Rep. \textbf{97}
5735 \bibitem{MC:FRITIOF} B.~Andersson, {\it et al.,} Nucl. Phys.
5736 \textbf{B281} (1987) 289; \newline B.~Nilsson-Almqvist and
5737 E.~Stenlund, Comput. Phys. Commun. \textbf{43} (1987) 387.
5739 \bibitem{MC:DPM} A.~Capella, {\it et al.,} Phys. Rep. \textbf{236}
5742 \bibitem{MC:HIJINGparam} A.~Morsch,
5743 \url{http://home.cern.ch/~morsch/AliGenerator/AliGenerator.html} and
5744 \url{http://home.cern.ch/~morsch/generator.html}
5746 \bibitem{MC:NA35FIN} NA35 Collaboration, T.~Alber et al., \newblock Z.
5747 Phys. \textbf{C 64} (1994) 195.
5749 \bibitem{MC:Alber98} NA35 Collaboration, T.~Alber et al., \newblock
5750 Eur. Z. Phys. \textbf{C2} (1998) 643.
5752 \bibitem{MC:Kharzeev96} D.~Kharzeev: \newblock Phys. Lett. \textbf{B
5755 \bibitem{MC:Capella96} A.~Capella and B.~Kopeliovich, \newblock Phys.
5756 Lett. \textbf{B381} (1996) 325.
5758 \bibitem{MC:Barrett77} R.~V. Barrett and D.~F. Jackson, \newblock {\em
5759 Nuclear sizes and structure,} \newblock Clarendon Press, Oxford,
5762 \bibitem{MC:Roesler96b} S.~Roesler, R.~Engel and J.~Ranft, \newblock
5763 Phys. Rev. \textbf{D57} (1998) 2889.
5765 \bibitem{MC:Roesler99} S.~Roesler, \newblock {personal communication},
5768 \bibitem{MC:Gluck95a} M.~Gl\"uck, E.~Reya and A.~Vogt: \newblock Z.\
5769 Phys.\ \textbf{C67} (1995) 433.
5771 \bibitem{MC:Gluck98a} M.~Gl\"uck, E.~Reya and A.~Vogt, \newblock Eur.\
5772 Phys.\ J. \textbf{C5} (1998) 461.
5774 \bibitem{MC:MEVSIM} L. Ray and R.S. Longacre, STAR Note 419.
5776 \bibitem{MC:GEVSIM} S. Radomski and Y. Foka, ALICE Internal Note 2002-31.
5778 \bibitem{MC:TMEVSIM}
5779 \url{http://radomski.home.cern.ch/~radomski/AliMevSim.html}
5781 \bibitem{MC:Radomski} \url{http://home.cern.ch/~radomski}
5783 \bibitem{MC:HBTproc} L. Ray and G.W. Hoffmann. Phys. Rev. \textbf{C
5784 54}, (1996) 2582, Phys. Rev. \textbf{C60}, (1999) 014906.
5786 \bibitem{MC:PiotrSk} P.~K.~Skowro\'nski, ALICE HBT Web Page,
5787 \url{http://aliweb.cern.ch/people/skowron}
5789 \bibitem{MC:POSCANCER} A.M.~Poskanzer and S.A.~Voloshin, Phys. Rev.
5790 \textbf{C 58}, (1998) 1671.
5792 \bibitem{MC:AlscherHT97} A.~Alscher, K.~Hencken, D.~Trautmann, and
5793 G.~Baur. \newblock Phys. Rev.~A \textbf{55}, (1997) 396.
5795 \bibitem{MC:Sadovsky} K.~Hencken, Y.~Kharlov, and S.~Sadovsky, ALICE
5796 Internal Note 2002-27.
5798 \bibitem{RootUsersGuide}
5799 \url{http://root.cern.ch/root/doc/RootDoc.html}
5801 \bibitem{CoordinateSystem} L.Betev, ALICE-PR-2003-279
5803 \bibitem{MC:billoir} P.~Billoir; NIM \textbf{A225} (1984) 352,
5804 P.~Billoir {\it et al.};
5805 NIM \textbf{A241} (1985) 115, \\
5806 R.Fruhwirth; NIM \textbf{A262} (1987) 444, P.Billoir; \textbf{CPC}
5809 \bibitem{PPRVII} CERN/LHCC 2005-049, ALICE Physics Performance Report,
5810 Volume 2 (5 December 2005);
5812 \bibitem{VERTEX:cmsvtxnote} V.~Karim\"aki, CMS Note 1997/051 (1997).
5814 \bibitem{CH6Ref:gShell}
5815 \url{http://alien.cern.ch/download/current/gClient/gShell\_Documentation.html}
5817 \bibitem{CH6Ref:gLite} \url{http://glite.web.cern.ch/glite}
5819 \bibitem{CH6Ref:PROOF} \url{http://root.cern.ch/root/PROOF.html}
5821 \bibitem{CH6Ref:ITS_TDR} CERN/LHCC 99-12.
5823 \bibitem{CH6Ref:TPC_TDR} CERN/LHCC 2000-001.
5825 \bibitem{CH6Ref:Dainese} A.~Dainese, PhD Thesis, University of Padova,
5826 2003, [arXiv:nucl-ex/0311004].
5828 \bibitem{CH6Ref:Stavinsky} A.~Stavinsky {\it et al}, NUKLEONIKA
5829 \textbf{49} (Supplement 2) (2004) 23--25;
5830 \url{http://www.ichtj.waw.pl/ichtj/nukleon/back/full/vol49_2004/v49s2p023f.pdf}
5831 \bibitem{CH6Ref:HBTAN} P.K.~Skowro\'nski for ALICE Collaboration,
5832 [arXiv:physics/0306111].
5834 \bibitem{CH6Ref:Weights} R.~Lednick\'y and V.L.~Lyuboshitz, Sov. J.
5835 Nucl. Phys. \textbf{35} (1982) 770.
5837 \bibitem{CH6Ref:CRAB}
5838 \url{http://www.nscl.msu.edu/~pratt/freecodes/crab/home.html}
5840 \bibitem{CH6Ref:Loizides}
5841 C.~Loizides, PhD Thesis, University of Frankfurt, 2005,
5842 [arXiv:nucl-ex/0501017].
5845 P.Skowronski, PhD Thesis.
5847 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
5848 \bibitem{EventTag} P.~Christakoglou, P.~Hristov, ALICE-INT-2006-023
5850 \bibitem{STAR} \url{http://www.star.bnl.gov/}
5852 \bibitem{GC1} A.~Shoshani, A.~Sim, and J.~Wu, "Storage resource
5853 managers: Middleware components for grid storage", in Proceedings of
5854 Nineteenth IEEE Symposium on Mass Storage Systems, 2002 (MSS 2002).
5856 \bibitem{GC2} K.~Wu et al, "Grid collector: An event catalog with
5857 automated file management".
5859 \bibitem{RootSelector}
5860 \url{http://agenda.cern.ch/fullAgenda.php?ida=a055638}
5862 \bibitem{EventTagWeb} \url{http://pcaliweb02.cern.ch/Offline/Analysis/RunEventTagSystem/}
5865 \url{http://agenda.cern.ch/askArchive.php?base=agenda\&categ=a045061\&id=a045061s0t5/transparencies} \\
5866 \url{http://project-arda-dev.web.cern.ch/project-arda-dev/alice/apiservice/AA-UserGuide-0.0m.pdf}
5868 \bibitem{RootTGridResult}
5869 \url{http://root.cern.ch/root/htmldoc//TGridResult.html}
5871 \bibitem{CAF} \url{http://pcaliweb02.cern.ch/Offline/Analysis/CAF/}
5873 \bibitem{mathworld} \url{http://mathworld.wolfram.com/EulerAngles.html}
5876 \ROOT User's Guide, \url{http://root.cern.ch/root/doc/RootDoc.html}
5878 \bibitem{tgeo} R. Brun, A. Gheata and M. Gheata, The \ROOT geometry package, NIM A502 (2003) 676-680
5880 \end{thebibliography}