]>
Commit | Line | Data |
---|---|---|
686d5523 | 1 | // $Id$ |
2 | //************************************************************************** | |
f652dd4a | 3 | //* This file is property of and copyright by the * |
686d5523 | 4 | //* ALICE Experiment at CERN, All rights reserved. * |
5 | //* * | |
6 | //* Primary Authors: Matthias Richter <Matthias.Richter@ift.uib.no> * | |
686d5523 | 7 | //* * |
8 | //* Permission to use, copy, modify and distribute this software and its * | |
9 | //* documentation strictly for non-commercial purposes is hereby granted * | |
10 | //* without fee, provided that the above copyright notice appears in all * | |
11 | //* copies and that both the copyright notice and this permission notice * | |
12 | //* appear in the supporting documentation. The authors make no claims * | |
13 | //* about the suitability of this software for any purpose. It is * | |
14 | //* provided "as is" without express or implied warranty. * | |
15 | //************************************************************************** | |
16 | ||
d60d120c | 17 | /// @file AliHLTTPCDataPublisherComponent.cxx |
686d5523 | 18 | /// @author Matthias Richter |
19 | /// @date 2011-08-08 | |
20 | /// @brief | |
21 | /// | |
22 | ||
d60d120c | 23 | #include "AliHLTTPCDataPublisherComponent.h" |
686d5523 | 24 | #include "AliHLTTPCDefinitions.h" |
f652dd4a | 25 | #include "AliHLTTPCTransform.h" |
26 | #include "AliHLTTPCClusterMCData.h" | |
27 | #include "AliHLTTPCDataCompressionDecoder.h" | |
686d5523 | 28 | #include "AliHLTPluginBase.h" |
29 | #include "AliHLTSystem.h" | |
30 | #include "AliHLTOUT.h" | |
f652dd4a | 31 | #include "AliHLTDAQ.h" |
32 | #include "AliHLTTemplates.h" | |
686d5523 | 33 | #include "AliLog.h" |
34 | #include <vector> | |
f652dd4a | 35 | #include <memory> |
36 | #include <algorithm> | |
686d5523 | 37 | |
d60d120c | 38 | ClassImp(AliHLTTPCDataPublisherComponent) |
686d5523 | 39 | |
d60d120c | 40 | AliHLTTPCDataPublisherComponent::AliHLTTPCDataPublisherComponent() |
686d5523 | 41 | : AliHLTRawReaderPublisherComponent() |
749f85ed | 42 | , fMode(kPublisherModeDefault) |
686d5523 | 43 | , fArraySelected(NULL) |
f652dd4a | 44 | , fClusters(NULL) |
45 | , fpDecoder(NULL) | |
686d5523 | 46 | { |
f652dd4a | 47 | /// constructor |
686d5523 | 48 | } |
49 | ||
d60d120c | 50 | AliHLTTPCDataPublisherComponent::~AliHLTTPCDataPublisherComponent() |
686d5523 | 51 | { |
52 | /// destructor | |
f652dd4a | 53 | if (fpDecoder) delete fpDecoder; |
54 | fpDecoder=NULL; | |
686d5523 | 55 | } |
56 | ||
57 | ||
d60d120c | 58 | const char* AliHLTTPCDataPublisherComponent::GetComponentID() |
686d5523 | 59 | { |
60 | /// inherited from AliHLTComponent: id of the component | |
f652dd4a | 61 | return "TPCDataPublisher"; |
686d5523 | 62 | } |
63 | ||
d60d120c | 64 | AliHLTComponent* AliHLTTPCDataPublisherComponent::Spawn() |
686d5523 | 65 | { |
66 | /// inherited from AliHLTComponent: spawn function. | |
d60d120c | 67 | return new AliHLTTPCDataPublisherComponent; |
686d5523 | 68 | } |
69 | ||
d60d120c | 70 | int AliHLTTPCDataPublisherComponent::GetEvent(const AliHLTComponentEventData& evtData, |
686d5523 | 71 | AliHLTComponentTriggerData& trigData, |
72 | AliHLTUInt8_t* outputPtr, | |
73 | AliHLTUInt32_t& size, | |
f652dd4a | 74 | AliHLTComponentBlockDataList& outputBlocks) |
686d5523 | 75 | { |
76 | /// inherited from AliHLTProcessor: data processing | |
77 | if (!IsDataEvent()) return 0; | |
78 | ||
f652dd4a | 79 | int iResult=0; |
686d5523 | 80 | |
f652dd4a | 81 | AliHLTComponentBlockDataList clusterBlocks; |
82 | AliHLTUInt32_t offset=0; | |
83 | AliHLTUInt32_t capacity=size; | |
84 | size=0; | |
85 | if (fClusters) { | |
86 | fClusters->Clear(); | |
749f85ed | 87 | if (CheckMode(kPublishClustersAll)) { |
88 | // set the target buffer only if the clusters should be published | |
89 | fClusters->SetTargetBuffer(outputPtr+offset, capacity-offset); | |
90 | } else if (CheckMode(kRegisterClusterBlocks)) { | |
91 | // data blocks are registered in the container, track model cluster blocks | |
92 | // are unpacked but not stored in order to find the included partitions | |
93 | //fClusters-> | |
94 | } | |
95 | if (CheckMode(kPublishClustersAll) || | |
96 | CheckMode(kRegisterClusterBlocks)) { | |
97 | if ((iResult=ReadClusterFromHLTOUT(fClusters))>=0) { | |
98 | if ((iResult=fClusters->GetState())>=0) { | |
99 | if (fClusters->CopyBlockDescriptors(clusterBlocks)>0) { | |
100 | for (AliHLTComponentBlockDataList::const_iterator bd=clusterBlocks.begin(); | |
101 | bd!=clusterBlocks.end(); bd++) { | |
102 | if (offset<bd->fOffset+bd->fSize) | |
103 | offset=bd->fOffset+bd->fSize; | |
104 | } | |
f652dd4a | 105 | } |
749f85ed | 106 | } else if (iResult==-ENOSPC) { |
107 | offset=fClusters->GetBlockCount()*sizeof(AliHLTTPCRawClusterData)+ | |
108 | fClusters->GetClusterCount()*sizeof(AliHLTTPCRawCluster); | |
109 | iResult=0; // keep going to also accumulate the size for raw data blocks | |
f652dd4a | 110 | } |
f652dd4a | 111 | } |
749f85ed | 112 | if (iResult==-ENODATA) { |
113 | // return indicates absence of compressed clusters in HLTOUT | |
114 | // but is not treated as an error further downstream | |
115 | iResult=0; | |
116 | } | |
f652dd4a | 117 | } |
118 | } | |
119 | ||
120 | if (offset<=capacity) { | |
121 | size=capacity-offset; | |
122 | outputPtr+=offset; | |
123 | } else { | |
124 | // there is clearly not enough space, keep the full buffer to | |
125 | // publish the raw data blocks and determine the size of those | |
126 | // data will be overwritten | |
127 | size=capacity; | |
128 | } | |
129 | if (iResult>=0) { | |
749f85ed | 130 | unsigned firstBlock=outputBlocks.size(); |
f652dd4a | 131 | iResult=AliHLTRawReaderPublisherComponent::GetEvent(evtData, trigData, outputPtr, size, outputBlocks); |
132 | if (iResult==-ENOSPC) { | |
133 | // not enough space in the buffer, fMaxSize has been updated by base class | |
134 | fMaxSize+=offset; | |
135 | } else if (iResult>=0) { | |
749f85ed | 136 | if (outputBlocks.size()>firstBlock && CheckMode(kPublishRawFiltered)) { |
db699906 | 137 | AliInfo(Form("publishing %lu DDL(s) for emulation of compressed TPC clusters", outputBlocks.size()-firstBlock)); |
749f85ed | 138 | } |
f652dd4a | 139 | // correct for the shifted buffer which was provided to the |
140 | // GetEvent method | |
141 | for (AliHLTComponentBlockDataList::iterator bd=outputBlocks.begin(); | |
142 | bd!=outputBlocks.end(); bd++) { | |
749f85ed | 143 | if (firstBlock>0) {firstBlock--; continue;} |
f652dd4a | 144 | bd->fOffset+=offset; |
145 | } | |
146 | offset+=size; | |
147 | } | |
148 | } | |
149 | ||
150 | if (iResult>=0 && capacity<offset && fMaxSize<(int)offset) { | |
151 | // update the size requirement | |
152 | fMaxSize=offset; | |
153 | outputBlocks.clear(); | |
154 | iResult=-ENOSPC; | |
155 | } | |
156 | ||
157 | if (iResult>=0) { | |
158 | size=offset; | |
749f85ed | 159 | if (clusterBlocks.size()>0 && !CheckMode(kRegisterClusterBlocks)) { |
160 | outputBlocks.insert(outputBlocks.begin(), clusterBlocks.begin(), clusterBlocks.end()); | |
161 | } | |
f652dd4a | 162 | } |
163 | ||
164 | return iResult; | |
686d5523 | 165 | } |
166 | ||
d60d120c | 167 | int AliHLTTPCDataPublisherComponent::ReadClusterFromHLTOUT(AliHLTTPCDataPublisherComponent::AliRawClusterContainer* pContainer) |
686d5523 | 168 | { |
169 | // check the HLTOUT for availability of compressed data blocks | |
f652dd4a | 170 | int iResult=0; |
686d5523 | 171 | AliHLTSystem* pSystem=AliHLTPluginBase::GetInstance(); |
172 | if (!pSystem) { | |
173 | // global system not initialized | |
174 | return -ENODEV; | |
175 | } | |
176 | AliHLTOUT* pHLTOUT=pSystem->RequestHLTOUT(); | |
177 | if (!pHLTOUT) { | |
178 | // not HLTOUT, hence not clusters | |
179 | return 0; | |
180 | } | |
181 | ||
f652dd4a | 182 | if (!fpDecoder) { |
183 | fpDecoder=new AliHLTTPCDataCompressionDecoder; | |
184 | } | |
185 | ||
186 | if (!fpDecoder) { | |
187 | AliError("failed to create decoder instance"); | |
188 | return -ENODEV; | |
189 | } | |
190 | ||
191 | AliHLTTPCDataCompressionDecoder& decoder=*fpDecoder; | |
192 | decoder.Clear(); | |
193 | decoder.SetVerbosity(GetVerbosity()); | |
194 | decoder.EnableClusterMerger(); | |
195 | ||
196 | bool bNextBlock=false; | |
197 | // add cluster id and mc information data blocks | |
198 | for (bNextBlock=(pHLTOUT->SelectFirstDataBlock()>=0); | |
686d5523 | 199 | bNextBlock; bNextBlock=(pHLTOUT->SelectNextDataBlock()>=0)) { |
f652dd4a | 200 | AliHLTComponentBlockData desc; |
201 | if ((iResult=pHLTOUT->GetDataBuffer(desc))<0) { | |
686d5523 | 202 | continue; |
f652dd4a | 203 | } |
204 | if (desc.fDataType==AliHLTTPCDefinitions::AliHLTDataTypeClusterMCInfo()) { | |
205 | // add mc information | |
206 | if ((iResult=decoder.AddClusterMCData(&desc))<0) { | |
207 | return iResult; | |
208 | } | |
209 | } | |
210 | if (desc.fDataType==AliHLTTPCDefinitions::RemainingClusterIdsDataType() || | |
211 | desc.fDataType==AliHLTTPCDefinitions::ClusterIdTracksDataType()) { | |
212 | // add cluster ids | |
213 | if ((iResult=decoder.AddClusterIds(&desc))<0) { | |
214 | return iResult; | |
215 | } | |
216 | } | |
686d5523 | 217 | } |
218 | ||
f652dd4a | 219 | bool bHavePartitionRawData=false; |
220 | bool bHavePartitionCompressedData=false; | |
221 | vector<bool> bHavePartitionData(216, false); | |
222 | ||
223 | // read data | |
224 | iResult=-ENODATA; | |
225 | int nExtractedClusters=0; | |
226 | for (bNextBlock=(pHLTOUT->SelectFirstDataBlock()>=0); | |
686d5523 | 227 | bNextBlock; bNextBlock=(pHLTOUT->SelectNextDataBlock()>=0)) { |
f652dd4a | 228 | decoder.SetPadShift(0.0); |
229 | AliHLTComponentBlockData desc; | |
230 | if ((iResult=pHLTOUT->GetDataBuffer(desc))<0) { | |
231 | continue; | |
232 | } | |
233 | if (desc.fDataType==AliHLTTPCDefinitions::RawClustersDataType()) { | |
234 | // This is a special handling of data blocks produced with v5-01-Release | |
235 | // The pad shift by 0.5 was not included in the data but was applied in the | |
236 | // unpacking in this class. Changed in r51306, the next tag containing this | |
237 | // change in the online system is v5-01-Rev-07. There are only very few runs | |
238 | // of Sep 2011 with recorded clusters not containing the 0.5 shift | |
239 | // There was also a chenge in the data type of the compressed partition | |
240 | // cluster blocks which helps to identify the blocks which need the pad shift | |
241 | // here | |
242 | if (desc.fSize<sizeof(AliHLTTPCRawClusterData)) continue; | |
243 | const AliHLTTPCRawClusterData* clusterData = reinterpret_cast<const AliHLTTPCRawClusterData*>(desc.fPtr); | |
244 | if (!clusterData) continue; | |
245 | if (clusterData->fVersion==1) { | |
246 | // compressed clusters without the pad shift | |
247 | // no raw clusters (version==0) have ever been recorded | |
248 | decoder.SetPadShift(0.5); | |
249 | } | |
250 | AliHLTUInt8_t slice = AliHLTTPCDefinitions::GetMinSliceNr(desc.fSpecification); | |
251 | AliHLTUInt8_t partition = AliHLTTPCDefinitions::GetMinPatchNr(desc.fSpecification); | |
252 | if (slice!=AliHLTTPCDefinitions::GetMaxSliceNr(desc.fSpecification) || | |
253 | partition!=AliHLTTPCDefinitions::GetMaxPatchNr(desc.fSpecification)) { | |
254 | AliFatal(Form("inconsistent cluster data: can not handle blocks containing multiple partitions, " | |
255 | "block specification 0x%08x", desc.fSpecification)); | |
256 | } | |
257 | iResult=decoder.ReadClustersPartition(pContainer->BeginRemainingClusterBlock(0, desc.fSpecification), | |
258 | reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr), | |
259 | desc.fSize, | |
260 | desc.fSpecification); | |
261 | if (iResult>=0) nExtractedClusters+=iResult; | |
262 | else { | |
263 | AliFatal(Form("processing of cluster block 0x%08x failed with error code %d", desc.fSpecification, iResult)); | |
264 | } | |
265 | unsigned index=slice*AliHLTTPCTransform::GetNumberOfPatches()+partition; | |
266 | if (index>=bHavePartitionData.size()) bHavePartitionData.resize(index, false); | |
267 | if (bHavePartitionData[index]) { | |
268 | AliFatal(Form("inconsistent cluster data: multiple data blocks of identical specification indicate a failure " | |
269 | "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction " | |
270 | "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; " | |
271 | "block specification 0x%08x", desc.fSpecification)); | |
272 | } | |
273 | bHavePartitionData[index]=true; | |
274 | if (bHavePartitionCompressedData) { | |
275 | AliFatal(Form("inconsistent cluster data: both compressed and raw cluster blocks present in HLTOUT, indicates a failure " | |
276 | "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction " | |
277 | "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; " | |
278 | "block specification 0x%08x", desc.fSpecification)); | |
279 | } | |
280 | bHavePartitionRawData=true; | |
281 | continue; | |
282 | } else if (desc.fDataType==AliHLTTPCDefinitions::RemainingClustersCompressedDataType()) { | |
283 | AliHLTUInt8_t slice = AliHLTTPCDefinitions::GetMinSliceNr(desc.fSpecification); | |
284 | AliHLTUInt8_t partition = AliHLTTPCDefinitions::GetMinPatchNr(desc.fSpecification); | |
285 | if (slice!=AliHLTTPCDefinitions::GetMaxSliceNr(desc.fSpecification) || | |
286 | partition!=AliHLTTPCDefinitions::GetMaxPatchNr(desc.fSpecification)) { | |
287 | AliFatal(Form("inconsistent cluster data: can not handle blocks containing multiple partitions, " | |
288 | "block specification 0x%08x", desc.fSpecification)); | |
289 | } | |
290 | iResult=decoder.ReadClustersPartition(pContainer->BeginRemainingClusterBlock(0, desc.fSpecification), | |
291 | reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr), | |
292 | desc.fSize, | |
293 | desc.fSpecification); | |
294 | if (iResult>0) nExtractedClusters+=iResult; | |
295 | unsigned index=slice*AliHLTTPCTransform::GetNumberOfPatches()+partition; | |
296 | if (index>=bHavePartitionData.size()) bHavePartitionData.resize(index, false); | |
297 | if (bHavePartitionData[index]) { | |
298 | AliFatal(Form("inconsistent cluster data: multiple data blocks of identical specification indicate a failure " | |
299 | "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction " | |
300 | "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; " | |
301 | "block specification 0x%08x", desc.fSpecification)); | |
302 | } | |
303 | bHavePartitionData[index]=true; | |
304 | bHavePartitionData[index]=true; | |
305 | if (bHavePartitionRawData) { | |
306 | AliFatal(Form("inconsistent cluster data: both compressed and raw cluster blocks present in HLTOUT, indicates a failure " | |
307 | "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction " | |
308 | "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; " | |
309 | "block specification 0x%08x", desc.fSpecification)); | |
310 | } | |
311 | bHavePartitionCompressedData=true; | |
312 | continue; | |
313 | } else if (desc.fDataType==AliHLTTPCDefinitions::ClusterTracksCompressedDataType()) { | |
314 | iResult=decoder.ReadTrackModelClustersCompressed(pContainer->BeginTrackModelClusterBlock(0), | |
315 | reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr), | |
316 | desc.fSize, | |
317 | desc.fSpecification); | |
318 | continue; | |
319 | } | |
686d5523 | 320 | } |
f652dd4a | 321 | |
322 | pSystem->ReleaseHLTOUT(pHLTOUT); | |
686d5523 | 323 | |
f652dd4a | 324 | if (iResult<0) return iResult; |
325 | return nExtractedClusters; | |
686d5523 | 326 | } |
327 | ||
d60d120c | 328 | int AliHLTTPCDataPublisherComponent::DoInit( int argc, const char** argv ) |
686d5523 | 329 | { |
330 | /// inherited from AliHLTComponent: component initialisation and argument scan. | |
331 | int iResult=0; | |
332 | ||
333 | // component configuration | |
334 | //Stage 1: default initialization. | |
749f85ed | 335 | const char* defaultArguments="-detector TPC -datatype 'DDL_RAW ' 'TPC ' -skipempty"; |
f652dd4a | 336 | if ((iResult = ConfigureFromArgumentString(1, &defaultArguments)) < 0) |
337 | return iResult; | |
686d5523 | 338 | |
339 | //Stage 2: OCDB. - disabled | |
340 | //TString cdbPath("HLT/ConfigTPC/"); | |
341 | //cdbPath += GetComponentID(); | |
342 | // | |
343 | //iResult = ConfigureFromCDBTObjString(cdbPath); | |
344 | //if (iResult < 0) | |
345 | // return iResult; | |
346 | ||
347 | //Stage 3: command line arguments. | |
348 | if (argc && (iResult = ConfigureFromArgumentString(argc, argv)) < 0) | |
349 | return iResult; | |
f652dd4a | 350 | if ((iResult=AliHLTRawReaderPublisherComponent::DoInit(0, NULL))<0) |
351 | return iResult; | |
352 | ||
353 | auto_ptr<AliRawClusterContainer> container(new AliRawClusterContainer); | |
354 | if (!container.get()) return -ENOMEM; | |
355 | ||
356 | fClusters=container.release(); | |
686d5523 | 357 | |
358 | return iResult; | |
359 | } | |
360 | ||
d60d120c | 361 | int AliHLTTPCDataPublisherComponent::DoDeinit() |
686d5523 | 362 | { |
363 | /// inherited from AliHLTComponent: component cleanup | |
364 | int iResult=0; | |
365 | ||
f652dd4a | 366 | if (fpDecoder) delete fpDecoder; |
367 | fpDecoder=NULL; | |
368 | ||
686d5523 | 369 | return iResult; |
370 | } | |
371 | ||
d60d120c | 372 | int AliHLTTPCDataPublisherComponent::ScanConfigurationArgument(int argc, const char** argv) |
686d5523 | 373 | { |
374 | /// inherited from AliHLTComponent: argument scan | |
686d5523 | 375 | if (argc<1) return 0; |
749f85ed | 376 | int bMissingParam=0; |
377 | int i=0; | |
378 | TString argument=argv[i]; | |
686d5523 | 379 | |
380 | do { | |
749f85ed | 381 | // -publish-raw |
382 | if (argument.CompareTo("-publish-raw")==0) { | |
383 | if ((bMissingParam=(++i>=argc))) break; | |
384 | TString parameter=argv[i]; | |
385 | if (parameter.CompareTo("all")==0) { | |
386 | fMode|=kPublishRawAll; | |
387 | return 2; | |
388 | } else if (parameter.CompareTo("filtered")==0) { | |
389 | fMode|=kPublishRawFiltered; | |
390 | fMode|=kRegisterClusterBlocks; | |
391 | fMode&=~kPublishRawAll; | |
392 | return 2; | |
393 | } else if (parameter.CompareTo("off")==0) { | |
394 | fMode&=~(kPublishRawAll|kPublishRawFiltered); | |
395 | return 2; | |
396 | } else { | |
397 | HLTError("invalid parameter for argument %s, expecting either 'all', 'filtered', or 'off' instead of %s", argument.Data(), parameter.Data()); | |
398 | return -EPROTO; | |
399 | } | |
400 | } | |
401 | // -publish-clusters | |
402 | if (argument.CompareTo("-publish-clusters")==0) { | |
403 | if ((bMissingParam=(++i>=argc))) break; | |
404 | TString parameter=argv[i]; | |
405 | if (parameter.CompareTo("all")==0) { | |
406 | fMode|=kPublishClustersAll; | |
407 | return 2; | |
408 | } else if (parameter.CompareTo("off")==0) { | |
409 | fMode&=~(kPublishClustersAll); | |
410 | return 2; | |
411 | } else { | |
412 | HLTError("invalid parameter for argument %s, expecting either 'all', or 'off' instead of %s", argument.Data(), parameter.Data()); | |
413 | return -EPROTO; | |
414 | } | |
415 | } | |
686d5523 | 416 | |
417 | } while (0); // using do-while only to have break available | |
418 | ||
f652dd4a | 419 | return AliHLTRawReaderPublisherComponent::ScanConfigurationArgument(argc, argv); |
686d5523 | 420 | } |
421 | ||
d60d120c | 422 | int AliHLTTPCDataPublisherComponent::GetSpecificationFromEquipmentId(int id, AliHLTUInt32_t &specification) const |
686d5523 | 423 | { |
424 | /// inherited from AliHLTRawReaderPublisherComponent: get specification | |
425 | ||
426 | // FIXME: add common functionality to AliHLTDAQ | |
427 | int partition; | |
428 | int slice; | |
429 | if (id < 840) { | |
430 | partition = id % 2; | |
431 | slice = (id - 768) / 2; | |
432 | } else { | |
433 | partition = (id % 4) + 2; | |
434 | slice = (id - 840) / 4; | |
435 | } | |
436 | specification=(slice<<24)|(slice<<16)|(partition<<8)|partition; | |
437 | ||
438 | return 0; | |
439 | } | |
440 | ||
d60d120c | 441 | bool AliHLTTPCDataPublisherComponent::IsSelected(int equipmentId) const |
686d5523 | 442 | { |
443 | /// inherited from AliHLTRawReaderPublisherComponent: check if a block is selected or not | |
f652dd4a | 444 | /// check if a raw data block needs to be published. This is the case if |
445 | /// there is no corresponding compressed data, i.e. function returns | |
446 | /// only false if the block can be found in the cluster container | |
749f85ed | 447 | if (CheckMode(kPublishRawAll)) |
448 | return true; | |
449 | if (!CheckMode(kPublishRawFiltered)) | |
450 | return false; | |
451 | ||
f652dd4a | 452 | if (!fClusters) |
453 | return true; | |
454 | ||
455 | int offset=AliHLTDAQ::DdlIDOffset(3); | |
456 | int count=AliHLTDAQ::NumberOfDdls(3); | |
457 | if (offset<0 || count<0) | |
458 | return true; | |
459 | if (equipmentId<offset) | |
460 | return true; | |
461 | equipmentId-=offset; | |
462 | if (equipmentId>=count) | |
463 | return true; | |
464 | int slice=equipmentId<72?equipmentId/2:(equipmentId-72)/4; | |
749f85ed | 465 | int partition=equipmentId<72?equipmentId%2:((equipmentId-72)%4)+2; |
f652dd4a | 466 | AliHLTUInt32_t specification=AliHLTTPCDefinitions::EncodeDataSpecification(slice, slice, partition, partition); |
467 | for (AliHLTComponentBlockDataList::const_iterator i=fClusters->GetBlockDescriptors().begin(); | |
468 | i!=fClusters->GetBlockDescriptors().end(); i++) { | |
469 | if (i->fSpecification==specification) | |
470 | return false; | |
471 | } | |
472 | return true; | |
473 | } | |
474 | ||
d60d120c | 475 | AliHLTTPCDataPublisherComponent::AliRawClusterContainer::AliRawClusterContainer() |
f652dd4a | 476 | : AliHLTLogging() |
477 | , fBlockCount(0) | |
478 | , fTotalClusterCount(0) | |
479 | , fBlockClusterCount(0) | |
480 | , fpBuffer(NULL) | |
481 | , fBufferSize(0) | |
482 | , fDescriptors() | |
483 | , fCurrentBlock(NULL) | |
484 | , fTrackModelClusters(NULL) | |
485 | , fTrackModelClusterMap() | |
486 | , fIterator() | |
487 | , fState(0) | |
488 | { | |
489 | // constructor | |
490 | } | |
491 | ||
d60d120c | 492 | AliHLTTPCDataPublisherComponent::AliRawClusterContainer::~AliRawClusterContainer() |
f652dd4a | 493 | { |
494 | // destructor | |
495 | } | |
496 | ||
d60d120c | 497 | int AliHLTTPCDataPublisherComponent::AliRawClusterContainer::SetTargetBuffer(AliHLTUInt8_t* pBuffer, int size) |
f652dd4a | 498 | { |
499 | // set/reset the external target buffer | |
500 | Clear(); | |
501 | fpBuffer=pBuffer; | |
502 | fBufferSize=pBuffer?size:0; | |
503 | return 0; | |
504 | } | |
505 | ||
d60d120c | 506 | int AliHLTTPCDataPublisherComponent::AliRawClusterContainer::Sort() |
f652dd4a | 507 | { |
508 | // merge track model clusters into partition cluster blocks | |
509 | ||
510 | // TODO: implement merging | |
511 | // decoding of track model clusters needs to be done after all | |
512 | // partition blocks have been decoded. The track model clusters are | |
513 | // then at the end of the target buffer and have to be sorted into the | |
514 | // other blocks | |
515 | // 1) move track model cluster block by its own size back in buffer | |
516 | // if not enough space, allocate temporary buffer and increase the | |
517 | // size estimator for the next event | |
518 | // 2) fill the index grid | |
519 | // 3) make appropriate gaps between the partition cluster blocks | |
520 | // 4) copy clusters into the partitions and update descriptors | |
521 | return -ENOSYS; | |
522 | } | |
523 | ||
d60d120c | 524 | int AliHLTTPCDataPublisherComponent::AliRawClusterContainer::CopyBlockDescriptors(AliHLTComponentBlockDataList& target) const |
f652dd4a | 525 | { |
526 | // fill block descriptors of extracted partition cluster blocks to target list | |
527 | target.insert(target.begin(), fDescriptors.begin(), fDescriptors.end()); | |
528 | return fDescriptors.size(); | |
529 | } | |
686d5523 | 530 | |
d60d120c | 531 | AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::BeginPartitionClusterBlock(int count, AliHLTUInt32_t specification) |
f652dd4a | 532 | { |
533 | /// iterator of partition clusters block of specification | |
534 | return ClusterIterator(count, AliHLTTPCDefinitions::RemainingClustersCompressedDataType(), specification, fCurrentBlock); | |
535 | } | |
536 | ||
d60d120c | 537 | AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::BeginTrackModelClusterBlock(int count) |
f652dd4a | 538 | { |
539 | /// iterator of track model clusters | |
540 | return ClusterIterator(count, AliHLTTPCDefinitions::ClusterTracksCompressedDataType(), 0x23000500, fTrackModelClusters); | |
541 | } | |
542 | ||
d60d120c | 543 | AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::ClusterIterator(int /*count*/, AliHLTComponentDataType dt, AliHLTUInt32_t specification, AliHLTTPCRawClusterData* &pData) |
f652dd4a | 544 | { |
545 | /// iterator of partition clusters block of specification | |
546 | fBlockCount++; | |
547 | fIterator.~iterator(); | |
548 | fCurrentBlock=NULL; | |
549 | fTrackModelClusters=NULL; | |
550 | fTrackModelClusterMap.clear(); | |
551 | fBlockClusterCount=0; | |
552 | AliHLTUInt32_t filled=0; | |
553 | for (AliHLTComponentBlockDataList::const_iterator desc=fDescriptors.begin(); | |
554 | desc!=fDescriptors.end(); desc++) { | |
555 | filled+=desc->fSize; | |
556 | if (desc->fSpecification==specification && | |
557 | desc->fDataType==dt) { | |
558 | HLTFatal("partition cluster block with data type %s and specification 0x%08x has been already processed", | |
559 | AliHLTComponent::DataType2Text(dt).c_str(), specification); | |
560 | filled=fBufferSize; | |
561 | } | |
562 | } | |
749f85ed | 563 | |
564 | // insert an empty data block which is than updated later | |
565 | AliHLTComponentBlockData bd; | |
566 | AliHLTComponent::FillBlockData(bd); | |
567 | bd.fPtr=NULL; | |
568 | bd.fSize=0; | |
569 | bd.fOffset=filled; | |
570 | bd.fDataType=dt; | |
571 | bd.fSpecification=specification; | |
572 | fDescriptors.push_back(bd); | |
573 | ||
f652dd4a | 574 | // initialize only the header, during filling the cluster count of the header |
575 | // and the block size will be incremented | |
576 | AliHLTUInt32_t blocksize=sizeof(AliHLTTPCRawClusterData); | |
577 | if (filled+blocksize>(unsigned)fBufferSize || fpBuffer==NULL) { | |
578 | new (&fIterator) iterator; | |
579 | return fIterator; | |
580 | } | |
581 | pData=reinterpret_cast<AliHLTTPCRawClusterData*>(fpBuffer+filled); | |
582 | pData->fVersion=0; | |
583 | pData->fCount=0; | |
749f85ed | 584 | fDescriptors.back().fSize=blocksize; |
f652dd4a | 585 | new (&fIterator) iterator(this); |
586 | return fIterator; | |
587 | } | |
588 | ||
d60d120c | 589 | AliHLTTPCRawCluster* AliHLTTPCDataPublisherComponent::AliRawClusterContainer::NextCluster(int slice, int partition) |
f652dd4a | 590 | { |
591 | /// increment to next cluster | |
592 | fTotalClusterCount++; | |
593 | fBlockClusterCount++; | |
594 | if (!fCurrentBlock && !fTrackModelClusters) | |
595 | return NULL; | |
596 | if (fDescriptors.size()==0) | |
597 | return NULL; | |
598 | AliHLTTPCRawClusterData* data=fCurrentBlock?fCurrentBlock:fTrackModelClusters; | |
599 | if (int(fDescriptors.back().fOffset+fDescriptors.back().fSize+sizeof(AliHLTTPCRawCluster))>=fBufferSize) { | |
600 | fState=-ENOSPC; | |
601 | return NULL; | |
602 | } | |
603 | data->fCount++; | |
604 | fDescriptors.back().fSize+=sizeof(AliHLTTPCRawCluster); | |
605 | if (fTrackModelClusters) | |
606 | fTrackModelClusterMap.push_back(AliHLTTPCSpacePointData::GetID(slice, partition, fBlockClusterCount)); | |
607 | return data->fClusters+(data->fCount-1); | |
608 | } | |
609 | ||
d60d120c | 610 | void AliHLTTPCDataPublisherComponent::AliRawClusterContainer::Clear(Option_t * /*option*/) |
f652dd4a | 611 | { |
612 | /// internal cleanup | |
613 | fBlockCount=0; | |
614 | fTotalClusterCount=0; | |
615 | fBlockClusterCount=0; | |
616 | fpBuffer=NULL; | |
617 | fBufferSize=0; | |
618 | fCurrentBlock=NULL; | |
619 | fTrackModelClusters=NULL; | |
620 | fTrackModelClusterMap.clear(); | |
621 | fDescriptors.clear(); | |
622 | fState=0; | |
623 | } | |
624 | ||
d60d120c | 625 | void AliHLTTPCDataPublisherComponent::AliRawClusterContainer::Print(Option_t */*option*/) const |
f652dd4a | 626 | { |
627 | /// print info | |
628 | } | |
629 | ||
d60d120c | 630 | AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator::Next(int slice, int partition) |
f652dd4a | 631 | { |
632 | // increment iterator | |
633 | if (fContainer) { | |
634 | fCluster=fContainer->NextCluster(slice, partition); | |
635 | if (fCluster) memset(fCluster, 0, sizeof(AliHLTTPCRawCluster)); | |
636 | } else { | |
637 | fCluster=NULL; | |
638 | } | |
639 | return *this; | |
686d5523 | 640 | } |