]> git.uio.no Git - u/mrichter/AliRoot.git/blame - HLT/TPCLib/comp/AliHLTTPCCompModelDeconverterComponent.cxx
adding more monitoring histograms, correcting axis labels (Alberica)
[u/mrichter/AliRoot.git] / HLT / TPCLib / comp / AliHLTTPCCompModelDeconverterComponent.cxx
CommitLineData
7e914051 1// $Id$
ff2f0f94 2
892210c7 3//**************************************************************************
4//* This file is property of and copyright by the ALICE HLT Project *
5//* ALICE Experiment at CERN, All rights reserved. *
6//* *
7//* Primary Authors: Timm Steinbeck <timm@kip.uni-heidelberg.de> *
8//* for The ALICE HLT Project. *
9//* *
10//* Permission to use, copy, modify and distribute this software and its *
11//* documentation strictly for non-commercial purposes is hereby granted *
12//* without fee, provided that the above copyright notice appears in all *
13//* copies and that both the copyright notice and this permission notice *
14//* appear in the supporting documentation. The authors make no claims *
15//* about the suitability of this software for any purpose. It is *
16//* provided "as is" without express or implied warranty. *
17//**************************************************************************
ff2f0f94 18
19/** @file AliHLTTPCCompModelDeconverterComponent.cxx
20 @author Timm Steinbeck
21 @date
22 @brief A copy processing component for the HLT. */
23
24#if __GNUC__ >= 3
25using namespace std;
26#endif
27
28#include "AliHLTTPCCompModelDeconverterComponent.h"
29#include "AliHLTTPCDefinitions.h"
30#include <stdlib.h>
31#include <errno.h>
32
892210c7 33/**
34 * An implementiation of a deconverter component that
35 * deconverts the tracks and clusters from the Vestbo-model
36 * into the standard HLT cluster track format again
37 * in order to evaluate the loss of the model
38 * due to the Vestbo-compression
39 */
40
672f8b8c 41/** ROOT macro for the implementation of ROOT specific class methods */
ff2f0f94 42ClassImp(AliHLTTPCCompModelDeconverterComponent)
43
44AliHLTTPCCompModelDeconverterComponent::AliHLTTPCCompModelDeconverterComponent():
f0d05e66 45 fDeconverter(),
46 fOutputTracks(kTRUE)
ff2f0f94 47 {
48 // see header file for class documentation
49 }
50
51AliHLTTPCCompModelDeconverterComponent::~AliHLTTPCCompModelDeconverterComponent()
52 {
53 // see header file for class documentation
54 }
55
56const char* AliHLTTPCCompModelDeconverterComponent::GetComponentID()
57 {
58 // see header file for class documentation
59 return "TPCCompModelDeconverter"; // The ID of this component
60 }
61
62void AliHLTTPCCompModelDeconverterComponent::GetInputDataTypes( vector<AliHLTComponent_DataType>& list)
63 {
64 // see header file for class documentation
65 list.clear(); // We do not have any requirements for our input data type(s).
66 list.push_back( AliHLTTPCDefinitions::fgkClusterTracksModelDataType );
67 list.push_back( AliHLTTPCDefinitions::fgkRemainingClustersModelDataType );
68 }
69
70AliHLTComponent_DataType AliHLTTPCCompModelDeconverterComponent::GetOutputDataType()
71 {
72 // see header file for class documentation
73 return AliHLTTPCDefinitions::fgkClustersDataType;
74 }
75
76void AliHLTTPCCompModelDeconverterComponent::GetOutputDataSize( unsigned long& constBase, double& inputMultiplier )
77 {
78 // see header file for class documentation
79 constBase = 8+216*4; // Track count + clusters count
80 inputMultiplier = 4.;
ff2f0f94 81 }
82
83// Spawn function, return new instance of this class
84AliHLTComponent* AliHLTTPCCompModelDeconverterComponent::Spawn()
85 {
86 // see header file for class documentation
87 return new AliHLTTPCCompModelDeconverterComponent;
88 };
89
90int AliHLTTPCCompModelDeconverterComponent::DoInit( int argc, const char** argv )
91 {
92 // see header file for class documentation
93 Int_t i = 0;
892210c7 94 //Char_t* cpErr;
ff2f0f94 95
96 while ( i < argc )
97 {
98 if ( !strcmp( argv[i], "notracks" ) )
99 {
100 fOutputTracks = kFALSE;
101 ++i;
102 continue;
103 }
104 Logging(kHLTLogError, "HLT::TPCCompModelDeconverter::DoInit", "Unknown Option", "Unknown option '%s'", argv[i] );
105 return EINVAL;
106 }
107 return 0;
108 }
109
110int AliHLTTPCCompModelDeconverterComponent::DoDeinit()
111 {
112 // see header file for class documentation
113 return 0;
114 }
115
116int AliHLTTPCCompModelDeconverterComponent::DoEvent( const AliHLTComponent_EventData& evtData, const AliHLTComponent_BlockData* blocks,
117 AliHLTComponent_TriggerData& /*trigData*/, AliHLTUInt8_t* outputPtr,
118 AliHLTUInt32_t& size, vector<AliHLTComponent_BlockData>& outputBlocks )
119 {
120 // see header file for class documentation
121 fDeconverter.Init();
122 // Process an event
123 // Loop over all input blocks in the event
124 AliHLTUInt8_t minSlice=0xFF, maxSlice=0xFF, minPatch=0xFF, maxPatch=0xFF;
125 for ( unsigned long n = 0; n < evtData.fBlockCnt; n++ )
126 {
127 AliHLTUInt8_t slice, patch;
128 if ( blocks[n].fDataType == AliHLTTPCDefinitions::fgkRemainingClustersModelDataType ||
129 blocks[n].fDataType == AliHLTTPCDefinitions::fgkClusterTracksModelDataType )
130 {
131 slice = AliHLTTPCDefinitions::GetMinSliceNr( blocks[n].fSpecification );
132 patch = AliHLTTPCDefinitions::GetMinPatchNr( blocks[n].fSpecification );
133 if ( minSlice==0xFF || slice<minSlice )
134 minSlice = slice;
135 if ( maxSlice==0xFF || slice>maxSlice )
136 maxSlice = slice;
137 if ( minPatch==0xFF || patch<minPatch )
138 minPatch = patch;
139 if ( maxPatch==0xFF || patch>maxPatch )
140 maxPatch = patch;
141 HLTDebug( "Slice: %u - Patch: %u", (unsigned)slice, (unsigned)patch );
142 slice = AliHLTTPCDefinitions::GetMaxSliceNr( blocks[n].fSpecification );
143 patch = AliHLTTPCDefinitions::GetMaxPatchNr( blocks[n].fSpecification );
144 if ( minSlice==0xFF || slice<minSlice )
145 minSlice = slice;
146 if ( maxSlice==0xFF || slice>maxSlice )
147 maxSlice = slice;
148 if ( minPatch==0xFF || patch<minPatch )
149 minPatch = patch;
150 if ( maxPatch==0xFF || patch>maxPatch )
151 maxPatch = patch;
152 HLTDebug( "Slice: %u - Patch: %u", (unsigned)slice, (unsigned)patch );
153 }
154 if ( blocks[n].fDataType == AliHLTTPCDefinitions::fgkClusterTracksModelDataType )
155 {
156 HLTDebug( "Tracks" );
157 fDeconverter.SetTrackClusterModelInputData( (AliHLTUInt8_t*)blocks[n].fPtr, blocks[n].fSize );
158 }
159 if ( blocks[n].fDataType == AliHLTTPCDefinitions::fgkRemainingClustersModelDataType )
160 {
161 HLTDebug( "Clusters" );
162 fDeconverter.SetRemainingClustersModelInputData( (AliHLTUInt8_t*)blocks[n].fPtr, blocks[n].fSize );
163 }
164 }
165
166 HLTDebug( "min slice: %u - max slice: %u - min patch: %u - max patch: %u",
167 (unsigned)minSlice, (unsigned)maxSlice, (unsigned)minPatch, (unsigned)maxPatch );
168
169 UInt_t blockSize = size;
170 UInt_t outputSize = 0;
171 Int_t ret;
172 if ( fOutputTracks )
173 {
174 ret = fDeconverter.DeconvertTracks( outputPtr, blockSize );
175 if ( !ret )
176 {
177 if ( outputSize+blockSize > size )
178 {
179 HLTError( "Output data too large. (%lu used instead of %u available)",
180 (unsigned long)blockSize, (unsigned long)size );
181 return ENOBUFS;
182 }
183
184 AliHLTComponent_BlockData ob;
185 // Let the structure be filled with the default values.
186 // This takes care of setting the shared memory and data type values to default values,
187 // so that they can be filled in by the calling code.
188 FillBlockData( ob );
189 // This block's start (offset) is after all other blocks written so far
190 ob.fOffset = outputSize;
191 // the size of this block's data.
192 ob.fSize = blockSize;
193 // The specification of the data is copied from the input block.
194 ob.fSpecification = AliHLTTPCDefinitions::EncodeDataSpecification( minSlice, maxSlice, minPatch, maxPatch );
195 // The type of the data is copied from the input block.
196 ob.fDataType = AliHLTTPCDefinitions::fgkTracksDataType;
197 // Place this block into the list of output blocks
198 outputBlocks.push_back( ob );
199 outputSize += blockSize;
200 }
201 else
202 HLTError( "Error deconverting tracks: %s (%d)", strerror(ret), (int)ret );
203 }
204
205 for ( UInt_t slice=minSlice; slice<=maxSlice; slice++ )
206 {
207 for ( UInt_t patch=minPatch; patch<=maxPatch; patch++ )
208 {
209 blockSize = size-outputSize;
210 ret = fDeconverter.DeconvertClusters( slice, patch, outputPtr+outputSize, blockSize );
211 if ( !ret )
212 {
213 if ( outputSize+blockSize > size )
214 {
215 HLTError( "Output data too large. (%lu used instead of %u available)",
216 (unsigned long)blockSize, (unsigned long)size );
217 return ENOBUFS;
218 }
219
220 AliHLTComponent_BlockData ob;
221 // Let the structure be filled with the default values.
222 // This takes care of setting the shared memory and data type values to default values,
223 // so that they can be filled in by the calling code.
224 FillBlockData( ob );
225 // This block's start (offset) is after all other blocks written so far
226 ob.fOffset = outputSize;
227 // the size of this block's data.
228 ob.fSize = blockSize;
229 // The specification of the data is copied from the input block.
230 ob.fSpecification = AliHLTTPCDefinitions::EncodeDataSpecification( slice, slice, patch, patch );
231 // The type of the data is copied from the input block.
232 ob.fDataType = AliHLTTPCDefinitions::fgkClustersDataType;
233 // Place this block into the list of output blocks
234 outputBlocks.push_back( ob );
235 outputSize += blockSize;
236 }
237 else
238 HLTError( "Error deconverting clusters for slice %u, patch %u: %s (%d)",
239 (unsigned)slice, (unsigned)patch, strerror(ret), (int)ret );
240 }
241 }
242
243 // Finally we set the total size of output memory we consumed.
244 size = outputSize;
245 return 0;
246 }