]> git.uio.no Git - u/mrichter/AliRoot.git/blame - EVE/Reve/Plex.h
Runloader is updated when moving to next file (quick fix).
[u/mrichter/AliRoot.git] / EVE / Reve / Plex.h
CommitLineData
b28acb03 1// $Header$
2
3#ifndef REVE_PLEX_H
4#define REVE_PLEX_H
5
6#include <Reve/Reve.h>
7
8#include <TObject.h>
9#include <TArrayC.h>
10
11#include <vector>
12
13namespace Reve {
14
15/**************************************************************************/
16// VoidCPlex
17/**************************************************************************/
18
19class VoidCPlex
20{
21private:
22 VoidCPlex(const VoidCPlex&); // Not implemented
23 VoidCPlex& operator=(const VoidCPlex&); // Not implemented
24
25protected:
26 Int_t fS; // Size of atom
27 Int_t fN; // Number of atoms in a chunk
28
29 Int_t fSize; // Size of container, number of atoms
30 Int_t fVecSize; // Number of allocated chunks
31 Int_t fCapacity; // Available capacity within the chunks
32
33 std::vector<TArrayC*> fChunks; // Memory blocks
34
35 void ReleaseChunks();
36
37public:
38 VoidCPlex();
39 VoidCPlex(Int_t atom_size, Int_t chunk_size);
40 virtual ~VoidCPlex();
41
42 void Reset(Int_t atom_size, Int_t chunk_size);
43 void Refit();
44
45 Int_t S() const { return fS; }
46 Int_t N() const { return fN; }
47
48 Int_t Size() const { return fSize; }
49 Int_t VecSize() const { return fVecSize; }
50 Int_t Capacity() const { return fCapacity; }
51
df6ec2c7 52 Char_t* Atom(Int_t idx) const { return fChunks[idx/fN]->fArray + idx%fN*fS; }
53 Char_t* Chunk(Int_t chk) const { return fChunks[chk]->fArray; }
54 Int_t NAtoms(Int_t chk) const { return (chk < fVecSize-1) ? fN : (fSize-1)%fN + 1; }
b28acb03 55
56 Char_t* NewAtom();
57 Char_t* NewChunk();
58
df6ec2c7 59
60 // Iterators
61
62 struct iterator
63 {
64 VoidCPlex *fPlex;
65 Char_t *fCurrent;
66 Int_t fAtomIndex;
67 Int_t fNextChunk;
68 Int_t fAtomsToGo;
69
70 iterator(VoidCPlex* p) :
71 fPlex(p), fCurrent(0), fAtomIndex(-1), fNextChunk(0), fAtomsToGo(0) {}
72 iterator(VoidCPlex& p) :
73 fPlex(&p), fCurrent(0), fAtomIndex(-1), fNextChunk(0), fAtomsToGo(0) {}
74
75 Bool_t next();
76 void reset() { fCurrent = 0; fNextChunk = fAtomsToGo = 0; }
77
78 Char_t* operator()() { return fCurrent; }
79 Char_t* operator*() { return fCurrent; }
80 Int_t index() { return fAtomIndex; }
81 };
82
83
b28acb03 84 ClassDef(VoidCPlex, 1)
85};
86
87inline Char_t* VoidCPlex::NewAtom()
88{
89 Char_t *a = (fSize >= fCapacity) ? NewChunk() : Atom(fSize);
90 ++fSize;
91 return a;
92}
93
df6ec2c7 94inline Bool_t VoidCPlex::iterator::next()
95{
96 if (fAtomsToGo <= 0) {
97 if (fNextChunk < fPlex->fVecSize) {
98 fCurrent = fPlex->Chunk(fNextChunk);
99 fAtomsToGo = fPlex->NAtoms(fNextChunk);
100 ++fNextChunk;
101 } else {
102 return kFALSE;
103 }
104 } else {
105 fCurrent += fPlex->fS;
106 }
107 ++fAtomIndex;
108 --fAtomsToGo;
109 return kTRUE;
110}
111
b28acb03 112
113/**************************************************************************/
114// Some-class CPlex
115/**************************************************************************/
116
117template<class T>
118class CPlex : public VoidCPlex
119{
120private:
121 CPlex(const CPlex&); // Not implemented
122 CPlex& operator=(const CPlex&); // Not implemented
123
124public:
125 CPlex() : VoidCPlex() {}
126 CPlex(Int_t chunk_size) : VoidCPlex(sizeof(T), chunk_size) {}
127 virtual ~CPlex() {}
128
129 void Reset(Int_t chunk_size) { Reset(sizeof(T), chunk_size); }
130
1fc17c6f 131 T* At(Int_t idx) { return reinterpret_cast<T*>(Atom(idx)); }
b28acb03 132 T& Ref(Int_t idx) { return *At(idx); }
133
134 ClassDef(CPlex, 1)
135}; // endclass CPlex
136
137}
138
139#endif