]> git.uio.no Git - u/mrichter/AliRoot.git/blame - HLT/TPCLib/AliHLTTPCDataPublisherComponent.cxx
Merge branch 'workdir'
[u/mrichter/AliRoot.git] / HLT / TPCLib / AliHLTTPCDataPublisherComponent.cxx
CommitLineData
686d5523 1// $Id$
2//**************************************************************************
f652dd4a 3//* This file is property of and copyright by the *
686d5523 4//* ALICE Experiment at CERN, All rights reserved. *
5//* *
6//* Primary Authors: Matthias Richter <Matthias.Richter@ift.uib.no> *
686d5523 7//* *
8//* Permission to use, copy, modify and distribute this software and its *
9//* documentation strictly for non-commercial purposes is hereby granted *
10//* without fee, provided that the above copyright notice appears in all *
11//* copies and that both the copyright notice and this permission notice *
12//* appear in the supporting documentation. The authors make no claims *
13//* about the suitability of this software for any purpose. It is *
14//* provided "as is" without express or implied warranty. *
15//**************************************************************************
16
d60d120c 17/// @file AliHLTTPCDataPublisherComponent.cxx
686d5523 18/// @author Matthias Richter
19/// @date 2011-08-08
20/// @brief
21///
22
d60d120c 23#include "AliHLTTPCDataPublisherComponent.h"
686d5523 24#include "AliHLTTPCDefinitions.h"
f652dd4a 25#include "AliHLTTPCTransform.h"
26#include "AliHLTTPCClusterMCData.h"
27#include "AliHLTTPCDataCompressionDecoder.h"
686d5523 28#include "AliHLTPluginBase.h"
29#include "AliHLTSystem.h"
30#include "AliHLTOUT.h"
f652dd4a 31#include "AliHLTDAQ.h"
32#include "AliHLTTemplates.h"
686d5523 33#include "AliLog.h"
34#include <vector>
f652dd4a 35#include <memory>
36#include <algorithm>
686d5523 37
d60d120c 38ClassImp(AliHLTTPCDataPublisherComponent)
686d5523 39
d60d120c 40AliHLTTPCDataPublisherComponent::AliHLTTPCDataPublisherComponent()
686d5523 41 : AliHLTRawReaderPublisherComponent()
749f85ed 42 , fMode(kPublisherModeDefault)
686d5523 43 , fArraySelected(NULL)
f652dd4a 44 , fClusters(NULL)
45 , fpDecoder(NULL)
686d5523 46{
f652dd4a 47 /// constructor
686d5523 48}
49
d60d120c 50AliHLTTPCDataPublisherComponent::~AliHLTTPCDataPublisherComponent()
686d5523 51{
52 /// destructor
f652dd4a 53 if (fpDecoder) delete fpDecoder;
54 fpDecoder=NULL;
686d5523 55}
56
57
d60d120c 58const char* AliHLTTPCDataPublisherComponent::GetComponentID()
686d5523 59{
60 /// inherited from AliHLTComponent: id of the component
f652dd4a 61 return "TPCDataPublisher";
686d5523 62}
63
d60d120c 64AliHLTComponent* AliHLTTPCDataPublisherComponent::Spawn()
686d5523 65{
66 /// inherited from AliHLTComponent: spawn function.
d60d120c 67 return new AliHLTTPCDataPublisherComponent;
686d5523 68}
69
d60d120c 70int AliHLTTPCDataPublisherComponent::GetEvent(const AliHLTComponentEventData& evtData,
686d5523 71 AliHLTComponentTriggerData& trigData,
72 AliHLTUInt8_t* outputPtr,
73 AliHLTUInt32_t& size,
f652dd4a 74 AliHLTComponentBlockDataList& outputBlocks)
686d5523 75{
76 /// inherited from AliHLTProcessor: data processing
77 if (!IsDataEvent()) return 0;
78
f652dd4a 79 int iResult=0;
686d5523 80
f652dd4a 81 AliHLTComponentBlockDataList clusterBlocks;
82 AliHLTUInt32_t offset=0;
83 AliHLTUInt32_t capacity=size;
84 size=0;
85 if (fClusters) {
86 fClusters->Clear();
749f85ed 87 if (CheckMode(kPublishClustersAll)) {
88 // set the target buffer only if the clusters should be published
89 fClusters->SetTargetBuffer(outputPtr+offset, capacity-offset);
90 } else if (CheckMode(kRegisterClusterBlocks)) {
91 // data blocks are registered in the container, track model cluster blocks
92 // are unpacked but not stored in order to find the included partitions
93 //fClusters->
94 }
95 if (CheckMode(kPublishClustersAll) ||
96 CheckMode(kRegisterClusterBlocks)) {
97 if ((iResult=ReadClusterFromHLTOUT(fClusters))>=0) {
98 if ((iResult=fClusters->GetState())>=0) {
99 if (fClusters->CopyBlockDescriptors(clusterBlocks)>0) {
100 for (AliHLTComponentBlockDataList::const_iterator bd=clusterBlocks.begin();
101 bd!=clusterBlocks.end(); bd++) {
102 if (offset<bd->fOffset+bd->fSize)
103 offset=bd->fOffset+bd->fSize;
104 }
f652dd4a 105 }
749f85ed 106 } else if (iResult==-ENOSPC) {
107 offset=fClusters->GetBlockCount()*sizeof(AliHLTTPCRawClusterData)+
108 fClusters->GetClusterCount()*sizeof(AliHLTTPCRawCluster);
109 iResult=0; // keep going to also accumulate the size for raw data blocks
f652dd4a 110 }
f652dd4a 111 }
749f85ed 112 if (iResult==-ENODATA) {
113 // return indicates absence of compressed clusters in HLTOUT
114 // but is not treated as an error further downstream
115 iResult=0;
116 }
f652dd4a 117 }
118 }
119
120 if (offset<=capacity) {
121 size=capacity-offset;
122 outputPtr+=offset;
123 } else {
124 // there is clearly not enough space, keep the full buffer to
125 // publish the raw data blocks and determine the size of those
126 // data will be overwritten
127 size=capacity;
128 }
129 if (iResult>=0) {
749f85ed 130 unsigned firstBlock=outputBlocks.size();
f652dd4a 131 iResult=AliHLTRawReaderPublisherComponent::GetEvent(evtData, trigData, outputPtr, size, outputBlocks);
132 if (iResult==-ENOSPC) {
133 // not enough space in the buffer, fMaxSize has been updated by base class
134 fMaxSize+=offset;
135 } else if (iResult>=0) {
749f85ed 136 if (outputBlocks.size()>firstBlock && CheckMode(kPublishRawFiltered)) {
db699906 137 AliInfo(Form("publishing %lu DDL(s) for emulation of compressed TPC clusters", outputBlocks.size()-firstBlock));
749f85ed 138 }
f652dd4a 139 // correct for the shifted buffer which was provided to the
140 // GetEvent method
141 for (AliHLTComponentBlockDataList::iterator bd=outputBlocks.begin();
142 bd!=outputBlocks.end(); bd++) {
749f85ed 143 if (firstBlock>0) {firstBlock--; continue;}
f652dd4a 144 bd->fOffset+=offset;
145 }
146 offset+=size;
147 }
148 }
149
150 if (iResult>=0 && capacity<offset && fMaxSize<(int)offset) {
151 // update the size requirement
152 fMaxSize=offset;
153 outputBlocks.clear();
154 iResult=-ENOSPC;
155 }
156
157 if (iResult>=0) {
158 size=offset;
749f85ed 159 if (clusterBlocks.size()>0 && !CheckMode(kRegisterClusterBlocks)) {
160 outputBlocks.insert(outputBlocks.begin(), clusterBlocks.begin(), clusterBlocks.end());
161 }
f652dd4a 162 }
163
164 return iResult;
686d5523 165}
166
d60d120c 167int AliHLTTPCDataPublisherComponent::ReadClusterFromHLTOUT(AliHLTTPCDataPublisherComponent::AliRawClusterContainer* pContainer)
686d5523 168{
169 // check the HLTOUT for availability of compressed data blocks
f652dd4a 170 int iResult=0;
686d5523 171 AliHLTSystem* pSystem=AliHLTPluginBase::GetInstance();
172 if (!pSystem) {
173 // global system not initialized
174 return -ENODEV;
175 }
176 AliHLTOUT* pHLTOUT=pSystem->RequestHLTOUT();
177 if (!pHLTOUT) {
178 // not HLTOUT, hence not clusters
179 return 0;
180 }
181
f652dd4a 182 if (!fpDecoder) {
183 fpDecoder=new AliHLTTPCDataCompressionDecoder;
184 }
185
186 if (!fpDecoder) {
187 AliError("failed to create decoder instance");
188 return -ENODEV;
189 }
190
191 AliHLTTPCDataCompressionDecoder& decoder=*fpDecoder;
192 decoder.Clear();
124b5fc8 193 decoder.SetVerbosity(GetVerbosity());
f652dd4a 194
3ba734d3 195 bool bHavePartitionRawData=false;
196 bool bHavePartitionCompressedData=false;
197
f652dd4a 198 bool bNextBlock=false;
199 // add cluster id and mc information data blocks
200 for (bNextBlock=(pHLTOUT->SelectFirstDataBlock()>=0);
686d5523 201 bNextBlock; bNextBlock=(pHLTOUT->SelectNextDataBlock()>=0)) {
f652dd4a 202 AliHLTComponentBlockData desc;
203 if ((iResult=pHLTOUT->GetDataBuffer(desc))<0) {
686d5523 204 continue;
f652dd4a 205 }
124b5fc8 206 if (desc.fDataType==AliHLTTPCDefinitions::DataCompressionDescriptorDataType()) {
3ba734d3 207 // compression header
124b5fc8 208 if ((iResult=decoder.AddCompressionDescriptor(&desc))<0) {
209 return iResult;
210 }
3ba734d3 211 bHavePartitionCompressedData = true;
212 }
213 if (desc.fDataType==AliHLTTPCDefinitions::RawClustersDescriptorDataType()) {
214 // raw clusters header
215 if ((iResult=decoder.AddRawClustersDescriptor(&desc))<0) {
216 return iResult;
217 }
218 bHavePartitionRawData = true;
124b5fc8 219 }
f652dd4a 220 if (desc.fDataType==AliHLTTPCDefinitions::AliHLTDataTypeClusterMCInfo()) {
221 // add mc information
222 if ((iResult=decoder.AddClusterMCData(&desc))<0) {
223 return iResult;
224 }
225 }
226 if (desc.fDataType==AliHLTTPCDefinitions::RemainingClusterIdsDataType() ||
227 desc.fDataType==AliHLTTPCDefinitions::ClusterIdTracksDataType()) {
228 // add cluster ids
229 if ((iResult=decoder.AddClusterIds(&desc))<0) {
230 return iResult;
231 }
232 }
686d5523 233 }
234
f652dd4a 235 vector<bool> bHavePartitionData(216, false);
236
237 // read data
238 iResult=-ENODATA;
239 int nExtractedClusters=0;
240 for (bNextBlock=(pHLTOUT->SelectFirstDataBlock()>=0);
686d5523 241 bNextBlock; bNextBlock=(pHLTOUT->SelectNextDataBlock()>=0)) {
f652dd4a 242 decoder.SetPadShift(0.0);
243 AliHLTComponentBlockData desc;
244 if ((iResult=pHLTOUT->GetDataBuffer(desc))<0) {
245 continue;
246 }
247 if (desc.fDataType==AliHLTTPCDefinitions::RawClustersDataType()) {
248 // This is a special handling of data blocks produced with v5-01-Release
249 // The pad shift by 0.5 was not included in the data but was applied in the
250 // unpacking in this class. Changed in r51306, the next tag containing this
251 // change in the online system is v5-01-Rev-07. There are only very few runs
252 // of Sep 2011 with recorded clusters not containing the 0.5 shift
253 // There was also a chenge in the data type of the compressed partition
254 // cluster blocks which helps to identify the blocks which need the pad shift
255 // here
256 if (desc.fSize<sizeof(AliHLTTPCRawClusterData)) continue;
257 const AliHLTTPCRawClusterData* clusterData = reinterpret_cast<const AliHLTTPCRawClusterData*>(desc.fPtr);
258 if (!clusterData) continue;
259 if (clusterData->fVersion==1) {
260 // compressed clusters without the pad shift
261 // no raw clusters (version==0) have ever been recorded
262 decoder.SetPadShift(0.5);
263 }
264 AliHLTUInt8_t slice = AliHLTTPCDefinitions::GetMinSliceNr(desc.fSpecification);
265 AliHLTUInt8_t partition = AliHLTTPCDefinitions::GetMinPatchNr(desc.fSpecification);
266 if (slice!=AliHLTTPCDefinitions::GetMaxSliceNr(desc.fSpecification) ||
267 partition!=AliHLTTPCDefinitions::GetMaxPatchNr(desc.fSpecification)) {
268 AliFatal(Form("inconsistent cluster data: can not handle blocks containing multiple partitions, "
269 "block specification 0x%08x", desc.fSpecification));
270 }
271 iResult=decoder.ReadClustersPartition(pContainer->BeginRemainingClusterBlock(0, desc.fSpecification),
272 reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr),
273 desc.fSize,
274 desc.fSpecification);
275 if (iResult>=0) nExtractedClusters+=iResult;
276 else {
277 AliFatal(Form("processing of cluster block 0x%08x failed with error code %d", desc.fSpecification, iResult));
278 }
279 unsigned index=slice*AliHLTTPCTransform::GetNumberOfPatches()+partition;
280 if (index>=bHavePartitionData.size()) bHavePartitionData.resize(index, false);
281 if (bHavePartitionData[index]) {
282 AliFatal(Form("inconsistent cluster data: multiple data blocks of identical specification indicate a failure "
283 "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
284 "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
285 "block specification 0x%08x", desc.fSpecification));
286 }
287 bHavePartitionData[index]=true;
288 if (bHavePartitionCompressedData) {
289 AliFatal(Form("inconsistent cluster data: both compressed and raw cluster blocks present in HLTOUT, indicates a failure "
290 "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
291 "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
292 "block specification 0x%08x", desc.fSpecification));
293 }
294 bHavePartitionRawData=true;
295 continue;
296 } else if (desc.fDataType==AliHLTTPCDefinitions::RemainingClustersCompressedDataType()) {
297 AliHLTUInt8_t slice = AliHLTTPCDefinitions::GetMinSliceNr(desc.fSpecification);
298 AliHLTUInt8_t partition = AliHLTTPCDefinitions::GetMinPatchNr(desc.fSpecification);
299 if (slice!=AliHLTTPCDefinitions::GetMaxSliceNr(desc.fSpecification) ||
300 partition!=AliHLTTPCDefinitions::GetMaxPatchNr(desc.fSpecification)) {
301 AliFatal(Form("inconsistent cluster data: can not handle blocks containing multiple partitions, "
302 "block specification 0x%08x", desc.fSpecification));
303 }
304 iResult=decoder.ReadClustersPartition(pContainer->BeginRemainingClusterBlock(0, desc.fSpecification),
305 reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr),
306 desc.fSize,
307 desc.fSpecification);
308 if (iResult>0) nExtractedClusters+=iResult;
309 unsigned index=slice*AliHLTTPCTransform::GetNumberOfPatches()+partition;
310 if (index>=bHavePartitionData.size()) bHavePartitionData.resize(index, false);
311 if (bHavePartitionData[index]) {
312 AliFatal(Form("inconsistent cluster data: multiple data blocks of identical specification indicate a failure "
313 "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
314 "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
315 "block specification 0x%08x", desc.fSpecification));
316 }
317 bHavePartitionData[index]=true;
318 bHavePartitionData[index]=true;
319 if (bHavePartitionRawData) {
320 AliFatal(Form("inconsistent cluster data: both compressed and raw cluster blocks present in HLTOUT, indicates a failure "
321 "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
322 "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
323 "block specification 0x%08x", desc.fSpecification));
324 }
325 bHavePartitionCompressedData=true;
326 continue;
327 } else if (desc.fDataType==AliHLTTPCDefinitions::ClusterTracksCompressedDataType()) {
328 iResult=decoder.ReadTrackModelClustersCompressed(pContainer->BeginTrackModelClusterBlock(0),
329 reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr),
330 desc.fSize,
331 desc.fSpecification);
332 continue;
333 }
686d5523 334 }
f652dd4a 335
336 pSystem->ReleaseHLTOUT(pHLTOUT);
686d5523 337
f652dd4a 338 if (iResult<0) return iResult;
339 return nExtractedClusters;
686d5523 340}
341
d60d120c 342int AliHLTTPCDataPublisherComponent::DoInit( int argc, const char** argv )
686d5523 343{
344 /// inherited from AliHLTComponent: component initialisation and argument scan.
345 int iResult=0;
346
347 // component configuration
348 //Stage 1: default initialization.
749f85ed 349 const char* defaultArguments="-detector TPC -datatype 'DDL_RAW ' 'TPC ' -skipempty";
f652dd4a 350 if ((iResult = ConfigureFromArgumentString(1, &defaultArguments)) < 0)
351 return iResult;
686d5523 352
353 //Stage 2: OCDB. - disabled
354 //TString cdbPath("HLT/ConfigTPC/");
355 //cdbPath += GetComponentID();
356 //
357 //iResult = ConfigureFromCDBTObjString(cdbPath);
358 //if (iResult < 0)
359 // return iResult;
360
361 //Stage 3: command line arguments.
362 if (argc && (iResult = ConfigureFromArgumentString(argc, argv)) < 0)
363 return iResult;
f652dd4a 364 if ((iResult=AliHLTRawReaderPublisherComponent::DoInit(0, NULL))<0)
365 return iResult;
366
367 auto_ptr<AliRawClusterContainer> container(new AliRawClusterContainer);
368 if (!container.get()) return -ENOMEM;
369
370 fClusters=container.release();
686d5523 371
372 return iResult;
373}
374
d60d120c 375int AliHLTTPCDataPublisherComponent::DoDeinit()
686d5523 376{
377 /// inherited from AliHLTComponent: component cleanup
378 int iResult=0;
379
f652dd4a 380 if (fpDecoder) delete fpDecoder;
381 fpDecoder=NULL;
382
686d5523 383 return iResult;
384}
385
d60d120c 386int AliHLTTPCDataPublisherComponent::ScanConfigurationArgument(int argc, const char** argv)
686d5523 387{
388 /// inherited from AliHLTComponent: argument scan
686d5523 389 if (argc<1) return 0;
749f85ed 390 int bMissingParam=0;
391 int i=0;
392 TString argument=argv[i];
686d5523 393
394 do {
749f85ed 395 // -publish-raw
396 if (argument.CompareTo("-publish-raw")==0) {
397 if ((bMissingParam=(++i>=argc))) break;
398 TString parameter=argv[i];
399 if (parameter.CompareTo("all")==0) {
400 fMode|=kPublishRawAll;
401 return 2;
402 } else if (parameter.CompareTo("filtered")==0) {
403 fMode|=kPublishRawFiltered;
404 fMode|=kRegisterClusterBlocks;
405 fMode&=~kPublishRawAll;
406 return 2;
407 } else if (parameter.CompareTo("off")==0) {
408 fMode&=~(kPublishRawAll|kPublishRawFiltered);
409 return 2;
410 } else {
411 HLTError("invalid parameter for argument %s, expecting either 'all', 'filtered', or 'off' instead of %s", argument.Data(), parameter.Data());
412 return -EPROTO;
413 }
414 }
415 // -publish-clusters
416 if (argument.CompareTo("-publish-clusters")==0) {
417 if ((bMissingParam=(++i>=argc))) break;
418 TString parameter=argv[i];
419 if (parameter.CompareTo("all")==0) {
420 fMode|=kPublishClustersAll;
421 return 2;
422 } else if (parameter.CompareTo("off")==0) {
423 fMode&=~(kPublishClustersAll);
424 return 2;
425 } else {
426 HLTError("invalid parameter for argument %s, expecting either 'all', or 'off' instead of %s", argument.Data(), parameter.Data());
427 return -EPROTO;
428 }
429 }
686d5523 430
431 } while (0); // using do-while only to have break available
432
f652dd4a 433 return AliHLTRawReaderPublisherComponent::ScanConfigurationArgument(argc, argv);
686d5523 434}
435
d60d120c 436int AliHLTTPCDataPublisherComponent::GetSpecificationFromEquipmentId(int id, AliHLTUInt32_t &specification) const
686d5523 437{
438 /// inherited from AliHLTRawReaderPublisherComponent: get specification
439
440 // FIXME: add common functionality to AliHLTDAQ
441 int partition;
442 int slice;
443 if (id < 840) {
444 partition = id % 2;
445 slice = (id - 768) / 2;
446 } else {
447 partition = (id % 4) + 2;
448 slice = (id - 840) / 4;
449 }
450 specification=(slice<<24)|(slice<<16)|(partition<<8)|partition;
451
452 return 0;
453}
454
d60d120c 455bool AliHLTTPCDataPublisherComponent::IsSelected(int equipmentId) const
686d5523 456{
457 /// inherited from AliHLTRawReaderPublisherComponent: check if a block is selected or not
f652dd4a 458 /// check if a raw data block needs to be published. This is the case if
459 /// there is no corresponding compressed data, i.e. function returns
460 /// only false if the block can be found in the cluster container
749f85ed 461 if (CheckMode(kPublishRawAll))
462 return true;
463 if (!CheckMode(kPublishRawFiltered))
464 return false;
465
f652dd4a 466 if (!fClusters)
467 return true;
468
469 int offset=AliHLTDAQ::DdlIDOffset(3);
470 int count=AliHLTDAQ::NumberOfDdls(3);
471 if (offset<0 || count<0)
472 return true;
473 if (equipmentId<offset)
474 return true;
475 equipmentId-=offset;
476 if (equipmentId>=count)
477 return true;
478 int slice=equipmentId<72?equipmentId/2:(equipmentId-72)/4;
749f85ed 479 int partition=equipmentId<72?equipmentId%2:((equipmentId-72)%4)+2;
f652dd4a 480 AliHLTUInt32_t specification=AliHLTTPCDefinitions::EncodeDataSpecification(slice, slice, partition, partition);
481 for (AliHLTComponentBlockDataList::const_iterator i=fClusters->GetBlockDescriptors().begin();
482 i!=fClusters->GetBlockDescriptors().end(); i++) {
483 if (i->fSpecification==specification)
484 return false;
485 }
486 return true;
487}
488
d60d120c 489AliHLTTPCDataPublisherComponent::AliRawClusterContainer::AliRawClusterContainer()
f652dd4a 490 : AliHLTLogging()
491 , fBlockCount(0)
492 , fTotalClusterCount(0)
493 , fBlockClusterCount(0)
494 , fpBuffer(NULL)
495 , fBufferSize(0)
496 , fDescriptors()
497 , fCurrentBlock(NULL)
498 , fTrackModelClusters(NULL)
499 , fTrackModelClusterMap()
500 , fIterator()
501 , fState(0)
502{
503 // constructor
504}
505
d60d120c 506AliHLTTPCDataPublisherComponent::AliRawClusterContainer::~AliRawClusterContainer()
f652dd4a 507{
508 // destructor
509}
510
d60d120c 511int AliHLTTPCDataPublisherComponent::AliRawClusterContainer::SetTargetBuffer(AliHLTUInt8_t* pBuffer, int size)
f652dd4a 512{
513 // set/reset the external target buffer
514 Clear();
515 fpBuffer=pBuffer;
516 fBufferSize=pBuffer?size:0;
517 return 0;
518}
519
d60d120c 520int AliHLTTPCDataPublisherComponent::AliRawClusterContainer::Sort()
f652dd4a 521{
522 // merge track model clusters into partition cluster blocks
523
524 // TODO: implement merging
525 // decoding of track model clusters needs to be done after all
526 // partition blocks have been decoded. The track model clusters are
527 // then at the end of the target buffer and have to be sorted into the
528 // other blocks
529 // 1) move track model cluster block by its own size back in buffer
530 // if not enough space, allocate temporary buffer and increase the
531 // size estimator for the next event
532 // 2) fill the index grid
533 // 3) make appropriate gaps between the partition cluster blocks
534 // 4) copy clusters into the partitions and update descriptors
535 return -ENOSYS;
536}
537
d60d120c 538int AliHLTTPCDataPublisherComponent::AliRawClusterContainer::CopyBlockDescriptors(AliHLTComponentBlockDataList& target) const
f652dd4a 539{
540 // fill block descriptors of extracted partition cluster blocks to target list
541 target.insert(target.begin(), fDescriptors.begin(), fDescriptors.end());
542 return fDescriptors.size();
543}
686d5523 544
d60d120c 545AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::BeginPartitionClusterBlock(int count, AliHLTUInt32_t specification)
f652dd4a 546{
547 /// iterator of partition clusters block of specification
548 return ClusterIterator(count, AliHLTTPCDefinitions::RemainingClustersCompressedDataType(), specification, fCurrentBlock);
549}
550
d60d120c 551AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::BeginTrackModelClusterBlock(int count)
f652dd4a 552{
553 /// iterator of track model clusters
554 return ClusterIterator(count, AliHLTTPCDefinitions::ClusterTracksCompressedDataType(), 0x23000500, fTrackModelClusters);
555}
556
d60d120c 557AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::ClusterIterator(int /*count*/, AliHLTComponentDataType dt, AliHLTUInt32_t specification, AliHLTTPCRawClusterData* &pData)
f652dd4a 558{
559 /// iterator of partition clusters block of specification
560 fBlockCount++;
561 fIterator.~iterator();
562 fCurrentBlock=NULL;
563 fTrackModelClusters=NULL;
564 fTrackModelClusterMap.clear();
565 fBlockClusterCount=0;
566 AliHLTUInt32_t filled=0;
567 for (AliHLTComponentBlockDataList::const_iterator desc=fDescriptors.begin();
568 desc!=fDescriptors.end(); desc++) {
569 filled+=desc->fSize;
570 if (desc->fSpecification==specification &&
571 desc->fDataType==dt) {
572 HLTFatal("partition cluster block with data type %s and specification 0x%08x has been already processed",
573 AliHLTComponent::DataType2Text(dt).c_str(), specification);
574 filled=fBufferSize;
575 }
576 }
749f85ed 577
578 // insert an empty data block which is than updated later
579 AliHLTComponentBlockData bd;
580 AliHLTComponent::FillBlockData(bd);
581 bd.fPtr=NULL;
582 bd.fSize=0;
583 bd.fOffset=filled;
584 bd.fDataType=dt;
585 bd.fSpecification=specification;
586 fDescriptors.push_back(bd);
587
f652dd4a 588 // initialize only the header, during filling the cluster count of the header
589 // and the block size will be incremented
590 AliHLTUInt32_t blocksize=sizeof(AliHLTTPCRawClusterData);
591 if (filled+blocksize>(unsigned)fBufferSize || fpBuffer==NULL) {
592 new (&fIterator) iterator;
593 return fIterator;
594 }
595 pData=reinterpret_cast<AliHLTTPCRawClusterData*>(fpBuffer+filled);
596 pData->fVersion=0;
597 pData->fCount=0;
749f85ed 598 fDescriptors.back().fSize=blocksize;
f652dd4a 599 new (&fIterator) iterator(this);
600 return fIterator;
601}
602
d60d120c 603AliHLTTPCRawCluster* AliHLTTPCDataPublisherComponent::AliRawClusterContainer::NextCluster(int slice, int partition)
f652dd4a 604{
605 /// increment to next cluster
606 fTotalClusterCount++;
607 fBlockClusterCount++;
608 if (!fCurrentBlock && !fTrackModelClusters)
609 return NULL;
610 if (fDescriptors.size()==0)
611 return NULL;
612 AliHLTTPCRawClusterData* data=fCurrentBlock?fCurrentBlock:fTrackModelClusters;
613 if (int(fDescriptors.back().fOffset+fDescriptors.back().fSize+sizeof(AliHLTTPCRawCluster))>=fBufferSize) {
614 fState=-ENOSPC;
615 return NULL;
616 }
617 data->fCount++;
618 fDescriptors.back().fSize+=sizeof(AliHLTTPCRawCluster);
619 if (fTrackModelClusters)
620 fTrackModelClusterMap.push_back(AliHLTTPCSpacePointData::GetID(slice, partition, fBlockClusterCount));
621 return data->fClusters+(data->fCount-1);
622}
623
d60d120c 624void AliHLTTPCDataPublisherComponent::AliRawClusterContainer::Clear(Option_t * /*option*/)
f652dd4a 625{
626 /// internal cleanup
627 fBlockCount=0;
628 fTotalClusterCount=0;
629 fBlockClusterCount=0;
630 fpBuffer=NULL;
631 fBufferSize=0;
632 fCurrentBlock=NULL;
633 fTrackModelClusters=NULL;
634 fTrackModelClusterMap.clear();
635 fDescriptors.clear();
636 fState=0;
637}
638
d60d120c 639void AliHLTTPCDataPublisherComponent::AliRawClusterContainer::Print(Option_t */*option*/) const
f652dd4a 640{
641 /// print info
642}
643
d60d120c 644AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator::Next(int slice, int partition)
f652dd4a 645{
646 // increment iterator
647 if (fContainer) {
648 fCluster=fContainer->NextCluster(slice, partition);
649 if (fCluster) memset(fCluster, 0, sizeof(AliHLTTPCRawCluster));
650 } else {
651 fCluster=NULL;
652 }
653 return *this;
686d5523 654}