2 //**************************************************************************
3 //* This file is property of and copyright by the *
4 //* ALICE Experiment at CERN, All rights reserved. *
6 //* Primary Authors: Matthias Richter <Matthias.Richter@ift.uib.no> *
8 //* Permission to use, copy, modify and distribute this software and its *
9 //* documentation strictly for non-commercial purposes is hereby granted *
10 //* without fee, provided that the above copyright notice appears in all *
11 //* copies and that both the copyright notice and this permission notice *
12 //* appear in the supporting documentation. The authors make no claims *
13 //* about the suitability of this software for any purpose. It is *
14 //* provided "as is" without express or implied warranty. *
15 //**************************************************************************
17 /// @file AliHLTTPCDataPublisherComponent.cxx
18 /// @author Matthias Richter
23 #include "AliHLTTPCDataPublisherComponent.h"
24 #include "AliHLTTPCDefinitions.h"
25 #include "AliHLTTPCTransform.h"
26 #include "AliHLTTPCClusterMCData.h"
27 #include "AliHLTTPCDataCompressionDecoder.h"
28 #include "AliHLTPluginBase.h"
29 #include "AliHLTSystem.h"
30 #include "AliHLTOUT.h"
31 #include "AliHLTDAQ.h"
32 #include "AliHLTTemplates.h"
38 ClassImp(AliHLTTPCDataPublisherComponent)
40 AliHLTTPCDataPublisherComponent::AliHLTTPCDataPublisherComponent()
41 : AliHLTRawReaderPublisherComponent()
42 , fMode(kPublisherModeDefault)
43 , fArraySelected(NULL)
50 AliHLTTPCDataPublisherComponent::~AliHLTTPCDataPublisherComponent()
53 if (fpDecoder) delete fpDecoder;
58 const char* AliHLTTPCDataPublisherComponent::GetComponentID()
60 /// inherited from AliHLTComponent: id of the component
61 return "TPCDataPublisher";
64 AliHLTComponent* AliHLTTPCDataPublisherComponent::Spawn()
66 /// inherited from AliHLTComponent: spawn function.
67 return new AliHLTTPCDataPublisherComponent;
70 int AliHLTTPCDataPublisherComponent::GetEvent(const AliHLTComponentEventData& evtData,
71 AliHLTComponentTriggerData& trigData,
72 AliHLTUInt8_t* outputPtr,
74 AliHLTComponentBlockDataList& outputBlocks)
76 /// inherited from AliHLTProcessor: data processing
77 if (!IsDataEvent()) return 0;
81 AliHLTComponentBlockDataList clusterBlocks;
82 AliHLTUInt32_t offset=0;
83 AliHLTUInt32_t capacity=size;
87 if (CheckMode(kPublishClustersAll)) {
88 // set the target buffer only if the clusters should be published
89 fClusters->SetTargetBuffer(outputPtr+offset, capacity-offset);
90 } else if (CheckMode(kRegisterClusterBlocks)) {
91 // data blocks are registered in the container, track model cluster blocks
92 // are unpacked but not stored in order to find the included partitions
95 if (CheckMode(kPublishClustersAll) ||
96 CheckMode(kRegisterClusterBlocks)) {
97 if ((iResult=ReadClusterFromHLTOUT(fClusters))>=0) {
98 if ((iResult=fClusters->GetState())>=0) {
99 if (fClusters->CopyBlockDescriptors(clusterBlocks)>0) {
100 for (AliHLTComponentBlockDataList::const_iterator bd=clusterBlocks.begin();
101 bd!=clusterBlocks.end(); bd++) {
102 if (offset<bd->fOffset+bd->fSize)
103 offset=bd->fOffset+bd->fSize;
106 } else if (iResult==-ENOSPC) {
107 offset=fClusters->GetBlockCount()*sizeof(AliHLTTPCRawClusterData)+
108 fClusters->GetClusterCount()*sizeof(AliHLTTPCRawCluster);
109 iResult=0; // keep going to also accumulate the size for raw data blocks
112 if (iResult==-ENODATA) {
113 // return indicates absence of compressed clusters in HLTOUT
114 // but is not treated as an error further downstream
120 if (offset<=capacity) {
121 size=capacity-offset;
124 // there is clearly not enough space, keep the full buffer to
125 // publish the raw data blocks and determine the size of those
126 // data will be overwritten
130 unsigned firstBlock=outputBlocks.size();
131 iResult=AliHLTRawReaderPublisherComponent::GetEvent(evtData, trigData, outputPtr, size, outputBlocks);
132 if (iResult==-ENOSPC) {
133 // not enough space in the buffer, fMaxSize has been updated by base class
135 } else if (iResult>=0) {
136 if (outputBlocks.size()>firstBlock && CheckMode(kPublishRawFiltered)) {
137 AliInfo(Form("publishing %lu DDL(s) for emulation of compressed TPC clusters", outputBlocks.size()-firstBlock));
139 // correct for the shifted buffer which was provided to the
141 for (AliHLTComponentBlockDataList::iterator bd=outputBlocks.begin();
142 bd!=outputBlocks.end(); bd++) {
143 if (firstBlock>0) {firstBlock--; continue;}
150 if (iResult>=0 && capacity<offset && fMaxSize<(int)offset) {
151 // update the size requirement
153 outputBlocks.clear();
159 if (clusterBlocks.size()>0 && !CheckMode(kRegisterClusterBlocks)) {
160 outputBlocks.insert(outputBlocks.begin(), clusterBlocks.begin(), clusterBlocks.end());
167 int AliHLTTPCDataPublisherComponent::ReadClusterFromHLTOUT(AliHLTTPCDataPublisherComponent::AliRawClusterContainer* pContainer)
169 // check the HLTOUT for availability of compressed data blocks
171 AliHLTSystem* pSystem=AliHLTPluginBase::GetInstance();
173 // global system not initialized
176 AliHLTOUT* pHLTOUT=pSystem->RequestHLTOUT();
178 // not HLTOUT, hence not clusters
183 fpDecoder=new AliHLTTPCDataCompressionDecoder;
187 AliError("failed to create decoder instance");
191 AliHLTTPCDataCompressionDecoder& decoder=*fpDecoder;
193 decoder.SetVerbosity(GetVerbosity());
195 bool bNextBlock=false;
196 // add cluster id and mc information data blocks
197 for (bNextBlock=(pHLTOUT->SelectFirstDataBlock()>=0);
198 bNextBlock; bNextBlock=(pHLTOUT->SelectNextDataBlock()>=0)) {
199 AliHLTComponentBlockData desc;
200 if ((iResult=pHLTOUT->GetDataBuffer(desc))<0) {
203 if (desc.fDataType==AliHLTTPCDefinitions::DataCompressionDescriptorDataType()) {
205 if ((iResult=decoder.AddCompressionDescriptor(&desc))<0) {
209 if (desc.fDataType==AliHLTTPCDefinitions::AliHLTDataTypeClusterMCInfo()) {
210 // add mc information
211 if ((iResult=decoder.AddClusterMCData(&desc))<0) {
215 if (desc.fDataType==AliHLTTPCDefinitions::RemainingClusterIdsDataType() ||
216 desc.fDataType==AliHLTTPCDefinitions::ClusterIdTracksDataType()) {
218 if ((iResult=decoder.AddClusterIds(&desc))<0) {
224 bool bHavePartitionRawData=false;
225 bool bHavePartitionCompressedData=false;
226 vector<bool> bHavePartitionData(216, false);
230 int nExtractedClusters=0;
231 for (bNextBlock=(pHLTOUT->SelectFirstDataBlock()>=0);
232 bNextBlock; bNextBlock=(pHLTOUT->SelectNextDataBlock()>=0)) {
233 decoder.SetPadShift(0.0);
234 AliHLTComponentBlockData desc;
235 if ((iResult=pHLTOUT->GetDataBuffer(desc))<0) {
238 if (desc.fDataType==AliHLTTPCDefinitions::RawClustersDataType()) {
239 // This is a special handling of data blocks produced with v5-01-Release
240 // The pad shift by 0.5 was not included in the data but was applied in the
241 // unpacking in this class. Changed in r51306, the next tag containing this
242 // change in the online system is v5-01-Rev-07. There are only very few runs
243 // of Sep 2011 with recorded clusters not containing the 0.5 shift
244 // There was also a chenge in the data type of the compressed partition
245 // cluster blocks which helps to identify the blocks which need the pad shift
247 if (desc.fSize<sizeof(AliHLTTPCRawClusterData)) continue;
248 const AliHLTTPCRawClusterData* clusterData = reinterpret_cast<const AliHLTTPCRawClusterData*>(desc.fPtr);
249 if (!clusterData) continue;
250 if (clusterData->fVersion==1) {
251 // compressed clusters without the pad shift
252 // no raw clusters (version==0) have ever been recorded
253 decoder.SetPadShift(0.5);
255 AliHLTUInt8_t slice = AliHLTTPCDefinitions::GetMinSliceNr(desc.fSpecification);
256 AliHLTUInt8_t partition = AliHLTTPCDefinitions::GetMinPatchNr(desc.fSpecification);
257 if (slice!=AliHLTTPCDefinitions::GetMaxSliceNr(desc.fSpecification) ||
258 partition!=AliHLTTPCDefinitions::GetMaxPatchNr(desc.fSpecification)) {
259 AliFatal(Form("inconsistent cluster data: can not handle blocks containing multiple partitions, "
260 "block specification 0x%08x", desc.fSpecification));
262 iResult=decoder.ReadClustersPartition(pContainer->BeginRemainingClusterBlock(0, desc.fSpecification),
263 reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr),
265 desc.fSpecification);
266 if (iResult>=0) nExtractedClusters+=iResult;
268 AliFatal(Form("processing of cluster block 0x%08x failed with error code %d", desc.fSpecification, iResult));
270 unsigned index=slice*AliHLTTPCTransform::GetNumberOfPatches()+partition;
271 if (index>=bHavePartitionData.size()) bHavePartitionData.resize(index, false);
272 if (bHavePartitionData[index]) {
273 AliFatal(Form("inconsistent cluster data: multiple data blocks of identical specification indicate a failure "
274 "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
275 "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
276 "block specification 0x%08x", desc.fSpecification));
278 bHavePartitionData[index]=true;
279 if (bHavePartitionCompressedData) {
280 AliFatal(Form("inconsistent cluster data: both compressed and raw cluster blocks present in HLTOUT, indicates a failure "
281 "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
282 "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
283 "block specification 0x%08x", desc.fSpecification));
285 bHavePartitionRawData=true;
287 } else if (desc.fDataType==AliHLTTPCDefinitions::RemainingClustersCompressedDataType()) {
288 AliHLTUInt8_t slice = AliHLTTPCDefinitions::GetMinSliceNr(desc.fSpecification);
289 AliHLTUInt8_t partition = AliHLTTPCDefinitions::GetMinPatchNr(desc.fSpecification);
290 if (slice!=AliHLTTPCDefinitions::GetMaxSliceNr(desc.fSpecification) ||
291 partition!=AliHLTTPCDefinitions::GetMaxPatchNr(desc.fSpecification)) {
292 AliFatal(Form("inconsistent cluster data: can not handle blocks containing multiple partitions, "
293 "block specification 0x%08x", desc.fSpecification));
295 iResult=decoder.ReadClustersPartition(pContainer->BeginRemainingClusterBlock(0, desc.fSpecification),
296 reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr),
298 desc.fSpecification);
299 if (iResult>0) nExtractedClusters+=iResult;
300 unsigned index=slice*AliHLTTPCTransform::GetNumberOfPatches()+partition;
301 if (index>=bHavePartitionData.size()) bHavePartitionData.resize(index, false);
302 if (bHavePartitionData[index]) {
303 AliFatal(Form("inconsistent cluster data: multiple data blocks of identical specification indicate a failure "
304 "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
305 "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
306 "block specification 0x%08x", desc.fSpecification));
308 bHavePartitionData[index]=true;
309 bHavePartitionData[index]=true;
310 if (bHavePartitionRawData) {
311 AliFatal(Form("inconsistent cluster data: both compressed and raw cluster blocks present in HLTOUT, indicates a failure "
312 "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
313 "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
314 "block specification 0x%08x", desc.fSpecification));
316 bHavePartitionCompressedData=true;
318 } else if (desc.fDataType==AliHLTTPCDefinitions::ClusterTracksCompressedDataType()) {
319 iResult=decoder.ReadTrackModelClustersCompressed(pContainer->BeginTrackModelClusterBlock(0),
320 reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr),
322 desc.fSpecification);
327 pSystem->ReleaseHLTOUT(pHLTOUT);
329 if (iResult<0) return iResult;
330 return nExtractedClusters;
333 int AliHLTTPCDataPublisherComponent::DoInit( int argc, const char** argv )
335 /// inherited from AliHLTComponent: component initialisation and argument scan.
338 // component configuration
339 //Stage 1: default initialization.
340 const char* defaultArguments="-detector TPC -datatype 'DDL_RAW ' 'TPC ' -skipempty";
341 if ((iResult = ConfigureFromArgumentString(1, &defaultArguments)) < 0)
344 //Stage 2: OCDB. - disabled
345 //TString cdbPath("HLT/ConfigTPC/");
346 //cdbPath += GetComponentID();
348 //iResult = ConfigureFromCDBTObjString(cdbPath);
352 //Stage 3: command line arguments.
353 if (argc && (iResult = ConfigureFromArgumentString(argc, argv)) < 0)
355 if ((iResult=AliHLTRawReaderPublisherComponent::DoInit(0, NULL))<0)
358 auto_ptr<AliRawClusterContainer> container(new AliRawClusterContainer);
359 if (!container.get()) return -ENOMEM;
361 fClusters=container.release();
366 int AliHLTTPCDataPublisherComponent::DoDeinit()
368 /// inherited from AliHLTComponent: component cleanup
371 if (fpDecoder) delete fpDecoder;
377 int AliHLTTPCDataPublisherComponent::ScanConfigurationArgument(int argc, const char** argv)
379 /// inherited from AliHLTComponent: argument scan
380 if (argc<1) return 0;
383 TString argument=argv[i];
387 if (argument.CompareTo("-publish-raw")==0) {
388 if ((bMissingParam=(++i>=argc))) break;
389 TString parameter=argv[i];
390 if (parameter.CompareTo("all")==0) {
391 fMode|=kPublishRawAll;
393 } else if (parameter.CompareTo("filtered")==0) {
394 fMode|=kPublishRawFiltered;
395 fMode|=kRegisterClusterBlocks;
396 fMode&=~kPublishRawAll;
398 } else if (parameter.CompareTo("off")==0) {
399 fMode&=~(kPublishRawAll|kPublishRawFiltered);
402 HLTError("invalid parameter for argument %s, expecting either 'all', 'filtered', or 'off' instead of %s", argument.Data(), parameter.Data());
407 if (argument.CompareTo("-publish-clusters")==0) {
408 if ((bMissingParam=(++i>=argc))) break;
409 TString parameter=argv[i];
410 if (parameter.CompareTo("all")==0) {
411 fMode|=kPublishClustersAll;
413 } else if (parameter.CompareTo("off")==0) {
414 fMode&=~(kPublishClustersAll);
417 HLTError("invalid parameter for argument %s, expecting either 'all', or 'off' instead of %s", argument.Data(), parameter.Data());
422 } while (0); // using do-while only to have break available
424 return AliHLTRawReaderPublisherComponent::ScanConfigurationArgument(argc, argv);
427 int AliHLTTPCDataPublisherComponent::GetSpecificationFromEquipmentId(int id, AliHLTUInt32_t &specification) const
429 /// inherited from AliHLTRawReaderPublisherComponent: get specification
431 // FIXME: add common functionality to AliHLTDAQ
436 slice = (id - 768) / 2;
438 partition = (id % 4) + 2;
439 slice = (id - 840) / 4;
441 specification=(slice<<24)|(slice<<16)|(partition<<8)|partition;
446 bool AliHLTTPCDataPublisherComponent::IsSelected(int equipmentId) const
448 /// inherited from AliHLTRawReaderPublisherComponent: check if a block is selected or not
449 /// check if a raw data block needs to be published. This is the case if
450 /// there is no corresponding compressed data, i.e. function returns
451 /// only false if the block can be found in the cluster container
452 if (CheckMode(kPublishRawAll))
454 if (!CheckMode(kPublishRawFiltered))
460 int offset=AliHLTDAQ::DdlIDOffset(3);
461 int count=AliHLTDAQ::NumberOfDdls(3);
462 if (offset<0 || count<0)
464 if (equipmentId<offset)
467 if (equipmentId>=count)
469 int slice=equipmentId<72?equipmentId/2:(equipmentId-72)/4;
470 int partition=equipmentId<72?equipmentId%2:((equipmentId-72)%4)+2;
471 AliHLTUInt32_t specification=AliHLTTPCDefinitions::EncodeDataSpecification(slice, slice, partition, partition);
472 for (AliHLTComponentBlockDataList::const_iterator i=fClusters->GetBlockDescriptors().begin();
473 i!=fClusters->GetBlockDescriptors().end(); i++) {
474 if (i->fSpecification==specification)
480 AliHLTTPCDataPublisherComponent::AliRawClusterContainer::AliRawClusterContainer()
483 , fTotalClusterCount(0)
484 , fBlockClusterCount(0)
488 , fCurrentBlock(NULL)
489 , fTrackModelClusters(NULL)
490 , fTrackModelClusterMap()
497 AliHLTTPCDataPublisherComponent::AliRawClusterContainer::~AliRawClusterContainer()
502 int AliHLTTPCDataPublisherComponent::AliRawClusterContainer::SetTargetBuffer(AliHLTUInt8_t* pBuffer, int size)
504 // set/reset the external target buffer
507 fBufferSize=pBuffer?size:0;
511 int AliHLTTPCDataPublisherComponent::AliRawClusterContainer::Sort()
513 // merge track model clusters into partition cluster blocks
515 // TODO: implement merging
516 // decoding of track model clusters needs to be done after all
517 // partition blocks have been decoded. The track model clusters are
518 // then at the end of the target buffer and have to be sorted into the
520 // 1) move track model cluster block by its own size back in buffer
521 // if not enough space, allocate temporary buffer and increase the
522 // size estimator for the next event
523 // 2) fill the index grid
524 // 3) make appropriate gaps between the partition cluster blocks
525 // 4) copy clusters into the partitions and update descriptors
529 int AliHLTTPCDataPublisherComponent::AliRawClusterContainer::CopyBlockDescriptors(AliHLTComponentBlockDataList& target) const
531 // fill block descriptors of extracted partition cluster blocks to target list
532 target.insert(target.begin(), fDescriptors.begin(), fDescriptors.end());
533 return fDescriptors.size();
536 AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::BeginPartitionClusterBlock(int count, AliHLTUInt32_t specification)
538 /// iterator of partition clusters block of specification
539 return ClusterIterator(count, AliHLTTPCDefinitions::RemainingClustersCompressedDataType(), specification, fCurrentBlock);
542 AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::BeginTrackModelClusterBlock(int count)
544 /// iterator of track model clusters
545 return ClusterIterator(count, AliHLTTPCDefinitions::ClusterTracksCompressedDataType(), 0x23000500, fTrackModelClusters);
548 AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::ClusterIterator(int /*count*/, AliHLTComponentDataType dt, AliHLTUInt32_t specification, AliHLTTPCRawClusterData* &pData)
550 /// iterator of partition clusters block of specification
552 fIterator.~iterator();
554 fTrackModelClusters=NULL;
555 fTrackModelClusterMap.clear();
556 fBlockClusterCount=0;
557 AliHLTUInt32_t filled=0;
558 for (AliHLTComponentBlockDataList::const_iterator desc=fDescriptors.begin();
559 desc!=fDescriptors.end(); desc++) {
561 if (desc->fSpecification==specification &&
562 desc->fDataType==dt) {
563 HLTFatal("partition cluster block with data type %s and specification 0x%08x has been already processed",
564 AliHLTComponent::DataType2Text(dt).c_str(), specification);
569 // insert an empty data block which is than updated later
570 AliHLTComponentBlockData bd;
571 AliHLTComponent::FillBlockData(bd);
576 bd.fSpecification=specification;
577 fDescriptors.push_back(bd);
579 // initialize only the header, during filling the cluster count of the header
580 // and the block size will be incremented
581 AliHLTUInt32_t blocksize=sizeof(AliHLTTPCRawClusterData);
582 if (filled+blocksize>(unsigned)fBufferSize || fpBuffer==NULL) {
583 new (&fIterator) iterator;
586 pData=reinterpret_cast<AliHLTTPCRawClusterData*>(fpBuffer+filled);
589 fDescriptors.back().fSize=blocksize;
590 new (&fIterator) iterator(this);
594 AliHLTTPCRawCluster* AliHLTTPCDataPublisherComponent::AliRawClusterContainer::NextCluster(int slice, int partition)
596 /// increment to next cluster
597 fTotalClusterCount++;
598 fBlockClusterCount++;
599 if (!fCurrentBlock && !fTrackModelClusters)
601 if (fDescriptors.size()==0)
603 AliHLTTPCRawClusterData* data=fCurrentBlock?fCurrentBlock:fTrackModelClusters;
604 if (int(fDescriptors.back().fOffset+fDescriptors.back().fSize+sizeof(AliHLTTPCRawCluster))>=fBufferSize) {
609 fDescriptors.back().fSize+=sizeof(AliHLTTPCRawCluster);
610 if (fTrackModelClusters)
611 fTrackModelClusterMap.push_back(AliHLTTPCSpacePointData::GetID(slice, partition, fBlockClusterCount));
612 return data->fClusters+(data->fCount-1);
615 void AliHLTTPCDataPublisherComponent::AliRawClusterContainer::Clear(Option_t * /*option*/)
619 fTotalClusterCount=0;
620 fBlockClusterCount=0;
624 fTrackModelClusters=NULL;
625 fTrackModelClusterMap.clear();
626 fDescriptors.clear();
630 void AliHLTTPCDataPublisherComponent::AliRawClusterContainer::Print(Option_t */*option*/) const
635 AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator& AliHLTTPCDataPublisherComponent::AliRawClusterContainer::iterator::Next(int slice, int partition)
637 // increment iterator
639 fCluster=fContainer->NextCluster(slice, partition);
640 if (fCluster) memset(fCluster, 0, sizeof(AliHLTTPCRawCluster));