///
#include "AliHLTTPCClusterAccessHLTOUT.h"
+#include "AliHLTTPCDataCompressionDecoder.h"
#include "AliHLTTPCDefinitions.h"
#include "AliHLTTPCClusterDataFormat.h"
#include "AliHLTTPCRawCluster.h"
+#include "AliHLTTPCTransform.h"
#include "AliHLTOUT.h"
#include "AliHLTComponent.h"
+#include "AliHLTErrorGuard.h"
+#include "AliHLTDataInflater.h"
+#include "AliHLTTPCDefinitions.h"
#include "AliLog.h"
#include "AliHLTSystem.h"
#include "AliHLTPluginBase.h"
#include <cstdlib>
#include <string>
#include <memory>
+#include <iostream>
+#include <iomanip>
/** ROOT macro for the implementation of ROOT specific class methods */
ClassImp(AliHLTTPCClusterAccessHLTOUT)
: TObject()
, fVerbosity(0)
, fClusters(NULL)
+ , fCurrentSector(-1)
+ , fpDecoder(NULL)
{
// see header file for class documentation
// or
delete fClusters;
fClusters=NULL;
}
+ if (fpDecoder) {
+ fpDecoder->Clear();
+ delete fpDecoder;
+ fpDecoder=NULL;
+ }
}
void AliHLTTPCClusterAccessHLTOUT::Execute(const char *method, const char *params, Int_t *error)
TObject* AliHLTTPCClusterAccessHLTOUT::FindObject(const char *name) const
{
/// inherited from TObject: return the cluster array if name id "clusterarray"
- if (strcmp(name, "clusterarray")==0) return fClusters;
+ if (strcmp(name, "clusterarray")==0) {
+ if (fCurrentSector<0) return NULL;
+ return fClusters->GetSectorArray(fCurrentSector);
+ }
return TObject::FindObject(name);
}
-void AliHLTTPCClusterAccessHLTOUT::Clear(Option_t * /*option*/)
+void AliHLTTPCClusterAccessHLTOUT::Clear(Option_t * option)
{
/// inherited from TObject: cleanup
- if (fClusters) fClusters->Clear();
+ if (strcmp(option, "event")==0) {
+ if (fClusters) fClusters->Clear();
+ fCurrentSector=-1;
+ }
}
-void AliHLTTPCClusterAccessHLTOUT::Print(Option_t */*option*/) const
+void AliHLTTPCClusterAccessHLTOUT::Print(Option_t *option) const
{
/// inherited from TObject
- if (!fClusters) return;
- for (int i=0; i<fClusters->GetEntriesFast(); i++) {
- if (!fClusters->At(i)) continue;
- AliTPCclusterMI* pCluster=dynamic_cast<AliTPCclusterMI*>(fClusters->At(i));
- if (!pCluster) break;
- cout << "AliTPCclusterMI:"
- << " row=" << pCluster->GetRow()
- << " pad=" << pCluster->GetPad()
- << " time=" << pCluster->GetTimeBin()
- << " charge=" << pCluster->GetQ()
- << " maxq=" << pCluster->GetMax()
- << endl;
- }
+ if (fClusters) fClusters->Print(option);
}
int AliHLTTPCClusterAccessHLTOUT::ProcessClusters(const char* params)
/// raw or compressed
int iResult=0;
TString strparams(params);
- int minSlice=0, maxSlice=35, minPart=0, maxPart=5;
+ int sector=-1;
std::auto_ptr<TObjArray> tokens(strparams.Tokenize(" "));
if (!tokens.get()) return -ENOMEM;
for (int i=0; i< tokens->GetEntriesFast(); i++) {
if (!tokens->At(i)) continue;
TString argument=tokens->At(i)->GetName();
+ // the offline code enumerates first the 36 inner (partitions 0+1) and then 36 outer
+ // sectors (partitions 2-5)
if (argument.BeginsWith("sector=")) {
argument.ReplaceAll("sector=", "");
- int sector=argument.Atoi();
- // the offline code enumerates first the 36 inner (partitions 0+1) and then 36 outer
- // sectors (partitions 2-5)
- if (fVerbosity>0) AliInfo(Form("processing HLT clusters for sector %d", sector));
- if (sector<36) { // inner sectors
- minSlice=maxSlice=sector;
- minPart=0; maxPart=1;
- } else { // outer sectors
- minSlice=maxSlice=sector-36;
- minPart=2; maxPart=5;
- }
+ sector=argument.Atoi();
}
}
+ if (sector<0) {
+ AliError("invalid argument, please specify \"sector=sectorno\"");
+ return -EINVAL;
+ }
+ if (sector>=76) {
+ AliError(Form("invalid sector number %d", sector));
+ return -EINVAL;
+ }
if (!fClusters) {
- fClusters=new TClonesArray("AliTPCclusterMI");
+ fClusters=new AliTPCclusterMIContainer;
}
if (!fClusters) return -ENOMEM;
+ if (fCurrentSector>=0) {
+ // cluster container already filled
+ fCurrentSector=sector;
+ TObjArray* pArray=fClusters->GetSectorArray(fCurrentSector);
+ if (!pArray) {
+ AliError(Form("can not get cluster array for sector %d", sector));
+ return -ENOBUFS;
+ }
+ if (fVerbosity>0) AliInfo(Form("converted %d cluster(s) for sector %d", pArray->GetEntriesFast() ,sector));
+ return pArray->GetEntriesFast();
+ }
+
+ // fill the cluster container
AliHLTSystem* pSystem=AliHLTPluginBase::GetInstance();
if (!pSystem) {
AliError("can not access HLT system");
AliHLTOUT* pHLTOUT=pSystem->RequestHLTOUT();
if (!pHLTOUT) {
AliError("can not access HLTOUT");
+ return -EACCES;
+ }
+
+ if (!fpDecoder) {
+ fpDecoder=new AliHLTTPCDataCompressionDecoder;
+ }
+
+ if (!fpDecoder) {
+ AliError("failed to create decoder instance");
return -ENODEV;
}
- for (int slice=minSlice; slice<=maxSlice; slice++) {
- for (int part=minPart; part<=maxPart; part++) {
- if (fVerbosity>0) AliInfo(Form("processing HLT clusters for slice %d partitions %d", slice, part));
- AliHLTUInt32_t spec=slice<<24 | slice<<16 | part<<8 | part;
- AliHLTTPCClusterMCDataList tpcClusterLabels;
- bool bHaveLabels=false;
- if (pHLTOUT->SelectFirstDataBlock(AliHLTTPCDefinitions::fgkAliHLTDataTypeClusterMCInfo, spec)>=0) {
- iResult=ReadAliHLTTPCClusterMCData(pHLTOUT, tpcClusterLabels);
- bHaveLabels=true;
+ AliHLTTPCDataCompressionDecoder& decoder=*fpDecoder;
+ decoder.Clear();
+ decoder.SetVerbosity(fVerbosity);
+ decoder.EnableClusterMerger();
+
+ bool bNextBlock=false;
+ // add cluster id and mc information data blocks
+ for (bNextBlock=(pHLTOUT->SelectFirstDataBlock()>=0);
+ bNextBlock; bNextBlock=(pHLTOUT->SelectNextDataBlock()>=0)) {
+ AliHLTComponentBlockData desc;
+ // FIXME: extend HLTOUT to get the full descriptor
+ const AliHLTUInt8_t* buffer=NULL;
+ if ((iResult=pHLTOUT->GetDataBuffer(buffer, desc.fSize))<0) {
+ continue;
+ }
+ desc.fPtr=(void*)buffer;
+ if (pHLTOUT->GetDataBlockDescription(desc.fDataType, desc.fSpecification)<0) {
+ continue;
+ }
+ if (desc.fDataType==AliHLTTPCDefinitions::AliHLTDataTypeClusterMCInfo()) {
+ // add mc information
+ if ((iResult=decoder.AddClusterMCData(&desc))<0) {
+ return iResult;
}
+ }
+ if (desc.fDataType==AliHLTTPCDefinitions::RemainingClusterIdsDataType() ||
+ desc.fDataType==AliHLTTPCDefinitions::ClusterIdTracksDataType()) {
+ // add cluster ids
+ if ((iResult=decoder.AddClusterIds(&desc))<0) {
+ return iResult;
+ }
+ }
+ }
+
+ bool bHavePartitionRawData=false;
+ bool bHavePartitionCompressedData=false;
+ vector<bool> bHavePartitionData(216, false);
- if (pHLTOUT->SelectFirstDataBlock(AliHLTTPCDefinitions::fgkRawClustersDataType, spec)>=0) {
- iResult=ReadAliHLTTPCRawClusterData(pHLTOUT, fClusters, bHaveLabels?&tpcClusterLabels:NULL);
- } else if (pHLTOUT->SelectFirstDataBlock(AliHLTTPCDefinitions::fgkClustersDataType, spec)>=0) {
- iResult=ReadAliHLTTPCClusterData(pHLTOUT, fClusters, bHaveLabels?&tpcClusterLabels:NULL);
+ // read data
+ iResult=-ENODATA;
+ int nExtractedClusters=0;
+ for (bNextBlock=(pHLTOUT->SelectFirstDataBlock()>=0);
+ bNextBlock; bNextBlock=(pHLTOUT->SelectNextDataBlock()>=0)) {
+ decoder.SetPadShift(0.0);
+ AliHLTComponentBlockData desc;
+ // FIXME: extend HLTOUT to get the full descriptor with one call
+ const AliHLTUInt8_t* buffer=NULL;
+ if ((iResult=pHLTOUT->GetDataBuffer(buffer, desc.fSize))<0) {
+ continue;
+ }
+ desc.fPtr=(void*)buffer;
+ if (pHLTOUT->GetDataBlockDescription(desc.fDataType, desc.fSpecification)<0) {
+ continue;
+ }
+ if (!TestBit(kSkipPartitionClusters) &&
+ (desc.fDataType==AliHLTTPCDefinitions::RawClustersDataType())) {
+ // This is a special handling of data blocks produced with v5-01-Release
+ // The pad shift by 0.5 was not included in the data but was applied in the
+ // unpacking in this class. Changed in r51306, the next tag containing this
+ // change in the online system is v5-01-Rev-07. There are only very few runs
+ // of Sep 2011 with recorded clusters not containing the 0.5 shift
+ // There was also a chenge in the data type of the compressed partition
+ // cluster blocks which helps to identify the blocks which need the pad shift
+ // here
+ if (desc.fSize<sizeof(AliHLTTPCRawClusterData)) continue;
+ const AliHLTTPCRawClusterData* clusterData = reinterpret_cast<const AliHLTTPCRawClusterData*>(buffer);
+ if (!clusterData) continue;
+ if (clusterData->fVersion==1) {
+ // compressed clusters without the pad shift
+ // no raw clusters (version==0) have ever been recorded
+ decoder.SetPadShift(0.5);
+ }
+ AliHLTUInt8_t slice = AliHLTTPCDefinitions::GetMinSliceNr(desc.fSpecification);
+ AliHLTUInt8_t partition = AliHLTTPCDefinitions::GetMinPatchNr(desc.fSpecification);
+ if (slice!=AliHLTTPCDefinitions::GetMaxSliceNr(desc.fSpecification) ||
+ partition!=AliHLTTPCDefinitions::GetMaxPatchNr(desc.fSpecification)) {
+ AliFatal(Form("inconsistent cluster data: can not handle blocks containing multiple partitions, "
+ "block specification 0x%08x", desc.fSpecification));
+ }
+ iResult=decoder.ReadClustersPartition(fClusters->BeginRemainingClusterBlock(0, desc.fSpecification),
+ reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr),
+ desc.fSize,
+ desc.fSpecification);
+ if (iResult>=0) nExtractedClusters+=iResult;
+ else {
+ AliFatal(Form("processing of cluster block 0x%08x failed with error code %d", desc.fSpecification, iResult));
+ }
+ unsigned index=slice*AliHLTTPCTransform::GetNumberOfPatches()+partition;
+ if (index>=bHavePartitionData.size()) bHavePartitionData.resize(index, false);
+ if (bHavePartitionData[index]) {
+ AliFatal(Form("inconsistent cluster data: multiple data blocks of identical specification indicate a failure "
+ "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
+ "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
+ "block specification 0x%08x", desc.fSpecification));
+ }
+ bHavePartitionData[index]=true;
+ if (bHavePartitionCompressedData) {
+ AliFatal(Form("inconsistent cluster data: both compressed and raw cluster blocks present in HLTOUT, indicates a failure "
+ "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
+ "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
+ "block specification 0x%08x", desc.fSpecification));
+ }
+ bHavePartitionRawData=true;
+ continue;
+ } else if (!TestBit(kSkipPartitionClusters) &&
+ (desc.fDataType==AliHLTTPCDefinitions::RemainingClustersCompressedDataType())) {
+ AliHLTUInt8_t slice = AliHLTTPCDefinitions::GetMinSliceNr(desc.fSpecification);
+ AliHLTUInt8_t partition = AliHLTTPCDefinitions::GetMinPatchNr(desc.fSpecification);
+ if (slice!=AliHLTTPCDefinitions::GetMaxSliceNr(desc.fSpecification) ||
+ partition!=AliHLTTPCDefinitions::GetMaxPatchNr(desc.fSpecification)) {
+ AliFatal(Form("inconsistent cluster data: can not handle blocks containing multiple partitions, "
+ "block specification 0x%08x", desc.fSpecification));
+ }
+ iResult=decoder.ReadClustersPartition(fClusters->BeginRemainingClusterBlock(0, desc.fSpecification),
+ reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr),
+ desc.fSize,
+ desc.fSpecification);
+ if (iResult>0) nExtractedClusters+=iResult;
+ unsigned index=slice*AliHLTTPCTransform::GetNumberOfPatches()+partition;
+ if (index>=bHavePartitionData.size()) bHavePartitionData.resize(index, false);
+ if (bHavePartitionData[index]) {
+ AliFatal(Form("inconsistent cluster data: multiple data blocks of identical specification indicate a failure "
+ "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
+ "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
+ "block specification 0x%08x", desc.fSpecification));
+ }
+ bHavePartitionData[index]=true;
+ bHavePartitionData[index]=true;
+ if (bHavePartitionRawData) {
+ AliFatal(Form("inconsistent cluster data: both compressed and raw cluster blocks present in HLTOUT, indicates a failure "
+ "in the production of the data. Probably an HLT emulation chain is executed in the reconstruction "
+ "and produces data in addition to HLTOUT. Option 'ignore-hltout' is required in that case; "
+ "block specification 0x%08x", desc.fSpecification));
}
+ bHavePartitionCompressedData=true;
+ continue;
+ } else if (!TestBit(kSkipTrackClusters) &&
+ desc.fDataType==AliHLTTPCDefinitions::ClusterTracksCompressedDataType()) {
+ iResult=decoder.ReadTrackModelClustersCompressed(fClusters->BeginTrackModelClusterBlock(0),
+ reinterpret_cast<AliHLTUInt8_t*>(desc.fPtr),
+ desc.fSize,
+ desc.fSpecification);
+ continue;
}
}
pSystem->ReleaseHLTOUT(pHLTOUT);
- return iResult;
+
+ if (iResult<0) return iResult;
+ if (fVerbosity>0) {
+ int nConvertedClusters=0;
+ for (int s=0; s<72; s++) {
+ TObjArray* pArray=fClusters->GetSectorArray(s);
+ if (!pArray) continue;
+ nConvertedClusters+=pArray->GetEntriesFast();
+ }
+ AliInfo(Form("extracted HLT clusters: %d, converted HLT clusters: %d", nExtractedClusters, nConvertedClusters));
+ }
+
+ fCurrentSector=sector;
+ TObjArray* pArray=fClusters->GetSectorArray(fCurrentSector);
+ if (!pArray) {
+ AliError(Form("can not get cluster array for sector %d", sector));
+ return -ENOBUFS;
+ }
+ if (fVerbosity>0) AliInfo(Form("converted %d cluster(s) for sector %d", pArray->GetEntriesFast() ,sector));
+ return pArray->GetEntriesFast();
}
int AliHLTTPCClusterAccessHLTOUT::ReadAliHLTTPCClusterMCData(AliHLTOUT* pHLTOUT, AliHLTTPCClusterMCDataList &tpcClusterLabels) const
}
const AliHLTTPCRawClusterData* clusterData = reinterpret_cast<const AliHLTTPCRawClusterData*>(pBuffer);
Int_t nCount = (Int_t) clusterData->fCount;
+ if (clusterData->fVersion!=0) {
+ // this is encoded data of different formats
+ switch (clusterData->fVersion) {
+ case 1:
+ iResult=ReadAliHLTTPCRawClusterDataDeflateSimple(reinterpret_cast<const AliHLTUInt8_t*>(clusterData->fClusters),
+ size-sizeof(AliHLTTPCRawClusterData), nCount, specification,
+ pClusters, tpcClusterLabels);
+ break;
+ default:
+ iResult=-EPROTO;
+ }
+ return iResult;
+ }
+
if (nCount*sizeof(AliHLTTPCRawCluster) + sizeof(AliHLTTPCRawClusterData) != size) {
AliError("inconsistent cluster data block size, skipping block");
continue;
} while (pHLTOUT->SelectNextDataBlock()>=0);
return iResult;
}
+
+int AliHLTTPCClusterAccessHLTOUT::ReadRemainingClustersCompressed(AliHLTOUT* pHLTOUT, TClonesArray* pClusters, const AliHLTTPCClusterMCDataList *tpcClusterLabels)
+{
+ // read cluster data from AliHLTTPCClusterData
+ int iResult=0;
+ if (!pHLTOUT || !pClusters) return -EINVAL;
+ do {
+ const AliHLTUInt8_t* pBuffer=NULL;
+ AliHLTUInt32_t size=0;
+ if ((iResult=pHLTOUT->GetDataBuffer(pBuffer, size))<0) {
+ continue;
+ }
+ if (pBuffer==NULL || size<4) {
+ AliError("invalid cluster data block");
+ continue;
+ }
+ AliHLTComponentDataType dt=kAliHLTVoidDataType;
+ AliHLTUInt32_t specification=kAliHLTVoidDataSpec;
+ if (pHLTOUT->GetDataBlockDescription(dt, specification)<0) {
+ AliError("failed to retrieve data block description, skipping mc cluster data block ...");
+ continue;
+ }
+ const AliHLTTPCRawClusterData* clusterData = reinterpret_cast<const AliHLTTPCRawClusterData*>(pBuffer);
+ Int_t nCount = (Int_t) clusterData->fCount;
+
+ // this is encoded data of different formats
+ switch (clusterData->fVersion) {
+ case 1:
+ iResult=ReadAliHLTTPCRawClusterDataDeflateSimple(reinterpret_cast<const AliHLTUInt8_t*>(clusterData->fClusters),
+ size-sizeof(AliHLTTPCRawClusterData), nCount, specification,
+ pClusters, tpcClusterLabels);
+ break;
+ default:
+ AliError(Form("invalid cluster format version %d", clusterData->fVersion));
+ iResult=-EPROTO;
+ }
+
+ if (fVerbosity>0) AliInfo(Form("converted %d cluster(s) from block %s 0x%08x", nCount, AliHLTComponent::DataType2Text(dt).c_str(), specification));
+ } while (pHLTOUT->SelectNextDataBlock()>=0 && iResult>=0);
+
+ return iResult;
+}
+
+int AliHLTTPCClusterAccessHLTOUT::ReadAliHLTTPCRawClusterDataDeflateSimple(const AliHLTUInt8_t* pData, int dataSize,
+ int nofClusters, AliHLTUInt32_t specification,
+ TClonesArray* pClusters,
+ const AliHLTTPCClusterMCDataList *tpcClusterLabels)
+{
+ // read cluster data from AliHLTTPCClusterData
+
+ // FIXME: quick implementation to read the compressed cluster data from HLTOUT
+ // the data definition below is the same as in AliHLTTPCDataCompressionComponent
+ // but needs to be moved to a common class (AliHLTTPCDefinitions?)
+ // Think about a decoder class supporting iterator objects for various types
+ // of cluster data
+ int iResult=0;
+ if (!pData || !pClusters) return -EINVAL;
+ AliHLTDataInflater inflater;
+ if ((iResult=inflater.InitBitDataInput(pData, dataSize))<0) {
+ return iResult;
+ }
+
+ int offset=pClusters->GetEntries();
+ pClusters->ExpandCreate(offset+nofClusters);
+ AliHLTUInt8_t slice = AliHLTTPCDefinitions::GetMinSliceNr(specification);
+ AliHLTUInt8_t partition = AliHLTTPCDefinitions::GetMinPatchNr(specification);
+ // the compressed format stores the difference of the local row number in
+ // the partition to the row of the last cluster
+ // add the first row in the partition to get global row number
+ // offline uses row number in physical sector, inner sector consists of
+ // partitions 0 and 1, outer sector of partition 2-5
+ int rowOffset=AliHLTTPCTransform::GetFirstRow(partition)-(partition<2?0:AliHLTTPCTransform::GetFirstRow(2));
+
+ int parameterId=0;
+ int outClusterCnt=0;
+ AliHLTUInt8_t switchBit=0;
+ AliHLTUInt64_t value=0;
+ AliTPCclusterMI* pCluster=NULL;
+ AliHLTUInt32_t lastPadRow=0;
+ while (outClusterCnt<nofClusters && inflater.InputBit(switchBit)) {
+ const AliHLTTPCDefinitions::AliClusterParameter& parameter
+ =AliHLTTPCDefinitions::fgkClusterParameterDefinitions[parameterId];
+ // in mode DeflaterSimple, the optional parameter of the cluster parameter definition
+ // corresponds to the number bits of the reduced format
+ if (!inflater.InputBits(value, switchBit?parameter.fBitLength:parameter.fOptional)) {
+ break;
+ }
+
+ if (!pCluster) {
+ if (!pClusters->At(offset+outClusterCnt)) {
+ // here we should not get anymore because of the condition outClusterCnt<nofClusters
+ return -ENOSPC;
+ }
+ pCluster=dynamic_cast<AliTPCclusterMI*>(pClusters->At(offset+outClusterCnt));
+ if (!pCluster) {
+ AliError("invalid object type, expecting AliTPCclusterMI");
+ iResult=-EBADF; // this is a problem of all objects
+ break;
+ }
+ }
+ switch (parameterId) {
+ case AliHLTTPCDefinitions::kPadRow:
+ {pCluster->SetRow(value+lastPadRow+rowOffset); lastPadRow+=value;break;}
+ case AliHLTTPCDefinitions::kPad:
+ {float pad=value; pad/=parameter.fScale; pCluster->SetPad(pad); break;}
+ case AliHLTTPCDefinitions::kTime:
+ {float time=value; time/=parameter.fScale; pCluster->SetTimeBin(time); break;}
+ case AliHLTTPCDefinitions::kSigmaY2:
+ {float sigmaY2=value; sigmaY2/=parameter.fScale; pCluster->SetSigmaY2(sigmaY2); break;}
+ case AliHLTTPCDefinitions::kSigmaZ2:
+ {float sigmaZ2=value; sigmaZ2/=parameter.fScale; pCluster->SetSigmaZ2(sigmaZ2); break;}
+ case AliHLTTPCDefinitions::kCharge:
+ {pCluster->SetQ(value); break;}
+ case AliHLTTPCDefinitions::kQMax:
+ {pCluster->SetMax(value); break;}
+ }
+ if (parameterId>=AliHLTTPCDefinitions::kLast) {
+ // switch to next cluster
+ if (tpcClusterLabels) {
+ UInt_t clusterID=AliHLTTPCSpacePointData::GetID(slice, partition, outClusterCnt);
+ if (tpcClusterLabels->find(clusterID)!=tpcClusterLabels->end()) {
+ const AliHLTTPCClusterMCWeight* mcWeights=tpcClusterLabels->find(clusterID)->second.fClusterID;
+ for (int k=0; k<3; k++) {
+ // TODO: sort the labels according to the weight in order to assign the most likely mc label
+ // to the first component
+ pCluster->SetLabel(mcWeights[k].fMCID, k);
+ }
+ } else {
+ AliError(Form("can not find mc label of cluster with id 0x%08x", clusterID));
+ }
+ }
+ outClusterCnt++;
+ pCluster=NULL;
+ parameterId=-1;
+ }
+ parameterId++;
+ }
+ inflater.Pad8Bits();
+ if (inflater.InputBit(switchBit)) {
+ AliWarning("format error of compressed clusters, there is more data than expected");
+ }
+ inflater.CloseBitDataInput();
+ if (iResult>=0 && nofClusters!=outClusterCnt) {
+ // is this a Fatal?
+ AliError(Form("error reading compressed cluster format: expected %d, read only %d cluster(s)", nofClusters, outClusterCnt));
+ return -EPROTO;
+ }
+ return iResult;
+}
+
+AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::AliTPCclusterMIContainer()
+ : fClusterArrays()
+ , fRemainingClusterIds()
+ , fTrackModelClusterIds()
+ , fCurrentClusterIds(NULL)
+ , fClusterMCData()
+ , fIterator()
+
+{
+ /// constructor
+ for (int i=0; i<72; i++) {
+ fClusterArrays.push_back(new TClonesArray("AliTPCclusterMI"));
+ }
+}
+
+AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::~AliTPCclusterMIContainer()
+{
+ /// dectructor
+ for (vector<TClonesArray*>::iterator i=fClusterArrays.begin(); i!=fClusterArrays.end(); i++) {
+ if (*i) {
+ (*i)->Clear();
+ delete *i;
+ }
+ }
+}
+
+AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::iterator& AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::BeginRemainingClusterBlock(int /*count*/, AliHLTUInt32_t specification)
+{
+ /// iterator of remaining clusters block of specification
+ AliHLTUInt8_t slice=AliHLTTPCDefinitions::GetMinSliceNr(specification);
+ AliHLTUInt8_t partition=AliHLTTPCDefinitions::GetMinPatchNr(specification);
+ unsigned index=slice*AliHLTTPCTransform::GetNumberOfPatches()+partition;
+ if (index<fRemainingClusterIds.size())
+ fCurrentClusterIds=&fRemainingClusterIds[index];
+ else
+ fCurrentClusterIds=NULL;
+ fIterator=iterator(this);
+ return fIterator;
+}
+
+AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::iterator& AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::BeginTrackModelClusterBlock(int /*count*/)
+{
+ /// iterator of track model clusters
+ if (fTrackModelClusterIds.fIds && fTrackModelClusterIds.fSize>0)
+ fCurrentClusterIds=&fTrackModelClusterIds;
+ else
+ fCurrentClusterIds=NULL;
+ fIterator=iterator(this);
+ return fIterator;
+}
+
+int AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::AddClusterMCData(const AliHLTComponentBlockData* pDesc)
+{
+ /// add cluster mc data block
+ if (!pDesc) return -EINVAL;
+ if (pDesc->fDataType==AliHLTTPCDefinitions::AliHLTDataTypeClusterMCInfo()) {
+ AliHLTUInt8_t slice=AliHLTTPCDefinitions::GetMinSliceNr(pDesc->fSpecification);
+ AliHLTUInt8_t partition=AliHLTTPCDefinitions::GetMinPatchNr(pDesc->fSpecification);
+ unsigned index=slice*AliHLTTPCTransform::GetNumberOfPatches()+partition;
+ if (fClusterMCData.size()<=index) {
+ if ((int)fClusterMCData.size()<AliHLTTPCTransform::GetNSlice()*AliHLTTPCTransform::GetNumberOfPatches()) {
+ fClusterMCData.resize(AliHLTTPCTransform::GetNSlice()*AliHLTTPCTransform::GetNumberOfPatches(), NULL);
+ } else {
+ fClusterMCData.resize(index+1, NULL);
+ }
+ }
+ if (pDesc->fSize<sizeof(AliHLTTPCClusterMCData)) return -EINVAL;
+ const AliHLTTPCClusterMCData* pData=reinterpret_cast<const AliHLTTPCClusterMCData*>(pDesc->fPtr);
+ unsigned nLabels = pData->fCount;
+ if (nLabels*sizeof(AliHLTTPCClusterMCLabel) + sizeof(AliHLTTPCClusterMCData) != pDesc->fSize) {
+ return -EINVAL;
+ }
+ fClusterMCData[index]=pData;
+ return 0;
+ }
+ return -ENODATA;
+}
+
+int AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::AddClusterIds(const AliHLTComponentBlockData* pDesc)
+{
+ /// add cluster id block for remaining or track model clusters
+ if (!pDesc) return -EINVAL;
+ if (pDesc->fDataType==AliHLTTPCDefinitions::ClusterIdTracksDataType()) {
+ fTrackModelClusterIds.fIds=reinterpret_cast<AliHLTUInt32_t*>(pDesc->fPtr);
+ fTrackModelClusterIds.fSize=pDesc->fSize/sizeof(AliHLTUInt32_t);
+ return 0;
+ }
+ if (pDesc->fDataType==AliHLTTPCDefinitions::RemainingClusterIdsDataType()) {
+ AliHLTUInt8_t slice=AliHLTTPCDefinitions::GetMinSliceNr(pDesc->fSpecification);
+ AliHLTUInt8_t partition=AliHLTTPCDefinitions::GetMinPatchNr(pDesc->fSpecification);
+ unsigned index=slice*AliHLTTPCTransform::GetNumberOfPatches()+partition;
+ if (fRemainingClusterIds.size()<=index) {
+ if ((int)fRemainingClusterIds.size()<AliHLTTPCTransform::GetNSlice()*AliHLTTPCTransform::GetNumberOfPatches()) {
+ fRemainingClusterIds.resize(AliHLTTPCTransform::GetNSlice()*AliHLTTPCTransform::GetNumberOfPatches());
+ } else {
+ fRemainingClusterIds.resize(index+1);
+ }
+ }
+ fRemainingClusterIds[index].fIds=reinterpret_cast<AliHLTUInt32_t*>(pDesc->fPtr);
+ fRemainingClusterIds[index].fSize=pDesc->fSize/sizeof(AliHLTUInt32_t);
+ return 0;
+ }
+ return -ENODATA;
+}
+
+AliHLTUInt32_t AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::GetClusterId(int clusterNo) const
+{
+ /// get the cluster id from the current cluster id block (optional)
+ if (!fCurrentClusterIds ||
+ (int)fCurrentClusterIds->fSize<=clusterNo ||
+ clusterNo<0)
+ return kAliHLTVoidDataSpec;
+ return fCurrentClusterIds->fIds[clusterNo];
+}
+
+AliTPCclusterMI* AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::NextCluster(int slice, int partition)
+{
+ /// load next cluster from array of the sepcific sector
+ unsigned sector=partition<2?slice:slice+36;
+ if (fClusterArrays.size()<=sector ||
+ fClusterArrays[sector]==NULL) {
+ AliErrorClass(Form("no cluster array available for sector %d", sector));
+ return NULL;
+ }
+ TClonesArray& array=*(fClusterArrays[sector]);
+ int count=array.GetEntriesFast();
+ return new (array[count]) AliTPCclusterMI;
+}
+
+int AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::SetMC(AliTPCclusterMI* pCluster, AliHLTUInt32_t clusterId)
+{
+ /// set MC data for the cluster
+ if (!pCluster) return -EINVAL;
+ if (clusterId==kAliHLTVoidDataSpec) return 0;
+
+ unsigned slice=AliHLTTPCSpacePointData::GetSlice(clusterId);
+ unsigned partition=AliHLTTPCSpacePointData::GetPatch(clusterId);
+ unsigned number=AliHLTTPCSpacePointData::GetNumber(clusterId);
+ if ((int)slice>=AliHLTTPCTransform::GetNSlice() ||
+ (int)partition>=AliHLTTPCTransform::GetNumberOfPatches()) return -EDOM;
+ unsigned index=slice*AliHLTTPCTransform::GetNumberOfPatches()+partition;
+ if (fClusterMCData.size()<=index ||
+ fClusterMCData[index]==NULL ||
+ fClusterMCData[index]->fCount<=number) return 0;
+ const AliHLTTPCClusterMCWeight* mcWeights=fClusterMCData[index]->fLabels[number].fClusterID;
+ for (int k=0; k<3; k++) {
+ // TODO: sort the labels according to the weight in order to assign the most likely mc label
+ // to the first component
+ pCluster->SetLabel(mcWeights[k].fMCID, k);
+ }
+
+ return 0;
+}
+
+void AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::Clear(Option_t* /*option*/)
+{
+ /// internal cleanup
+ {
+ for (vector<TClonesArray*>::iterator i=fClusterArrays.begin(); i!=fClusterArrays.end(); i++)
+ if (*i) (*i)->Clear();
+ }
+ {
+ for (vector<AliClusterIdBlock>::iterator i=fRemainingClusterIds.begin(); i!=fRemainingClusterIds.end(); i++)
+ {i->fIds=NULL; i->fSize=0;}
+ }
+ fTrackModelClusterIds.fIds=NULL; fTrackModelClusterIds.fSize=0;
+ fCurrentClusterIds=NULL;
+ {
+ for (vector<const AliHLTTPCClusterMCData*>::iterator i=fClusterMCData.begin(); i!=fClusterMCData.end(); i++)
+ *i=NULL;
+ }
+}
+
+TObjArray* AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::GetSectorArray(unsigned sector) const
+{
+ /// get the cluster array for a sector
+ if (fClusterArrays.size()<=sector) return NULL;
+ return fClusterArrays[sector];
+}
+
+void AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::Print(Option_t *option) const
+{
+ /// inherited from TObject
+ cout << "AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer" << endl;
+ ios::fmtflags coutflags=cout.flags(); // backup cout status flags
+ bool bAll=false;
+ if ((bAll=(strcmp(option, "full")==0)) ||
+ strcmp(option, "short")==0) {
+ for (unsigned iArray=0; iArray<fClusterArrays.size(); iArray++) {
+ if (fClusterArrays[iArray]) {
+ TClonesArray* pArray=fClusterArrays[iArray];
+ cout << " sector " << setfill(' ') << setw(2) << iArray << ": " << pArray->GetEntriesFast() << endl;
+ if (bAll) {
+ for (int iCluster=0; iCluster<pArray->GetEntriesFast(); iCluster++) {
+ if (!pArray->At(iCluster)) continue;
+ AliTPCclusterMI* pCluster=dynamic_cast<AliTPCclusterMI*>(pArray->At(iCluster));
+ if (!pCluster) break;
+ cout << " AliTPCclusterMI:"
+ << " row=" << pCluster->GetRow()
+ << " pad=" << pCluster->GetPad()
+ << " time=" << pCluster->GetTimeBin()
+ << " charge=" << pCluster->GetQ()
+ << " maxq=" << pCluster->GetMax()
+ << endl;
+ }
+ }
+ }
+ }
+ }
+ cout.flags(coutflags); // restore the original flags
+}
+
+AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::iterator& AliHLTTPCClusterAccessHLTOUT::AliTPCclusterMIContainer::iterator::Next(int slice, int partition)
+{
+ // switch to next cluster
+ if (!fData) {
+ fCluster=NULL;
+ fClusterId=kAliHLTVoidDataSpec;
+ return *this;
+ }
+ if (fClusterNo>=0 && !fCluster) {
+ // end was reached before
+ return *this;
+ }
+ fCluster=fData->NextCluster(slice, partition);
+ fClusterId=fData->GetClusterId(++fClusterNo);
+ if (fCluster && fClusterId!=kAliHLTVoidDataSpec) {
+ fData->SetMC(fCluster, fClusterId);
+ }
+ // offline uses row number in physical sector, inner sector consists of
+ // partitions 0 and 1, outer sector of partition 2-5
+ fRowOffset=partition<2?0:AliHLTTPCTransform::GetFirstRow(2);
+ return *this;
+}