#include "AliHLTGlobalBarrelTrack.h"
#include "AliHLTComponentBenchmark.h"
#include "AliHLTDataDeflaterSimple.h"
+#include "AliHLTDataDeflaterHuffman.h"
#include "AliHLTTPCTransform.h"
#include "AliHLTTPCClusterMCData.h"
+#include "AliHLTTPCClusterTransformation.h"
#include "AliRawDataHeader.h"
+#include "AliCDBManager.h"
+#include "AliCDBPath.h"
+#include "AliCDBId.h"
+#include "AliCDBMetaData.h"
+#include "AliCDBEntry.h"
#include "TH1F.h"
#include "TFile.h"
#include <memory>
: AliHLTProcessor()
, fMode(0)
, fDeflaterMode(0)
+ , fVerificationMode(0)
, fMaxDeltaPad(AliHLTTPCDefinitions::GetMaxClusterDeltaPad())
, fMaxDeltaTime(AliHLTTPCDefinitions::GetMaxClusterDeltaTime())
, fRawInputClusters(NULL)
, fHistoClusterRatio(NULL)
, fHistoTrackClusterRatio(NULL)
, fHistogramFile()
+ , fTrainingTableOutput()
, fpBenchmark(NULL)
+ , fpWrittenAssociatedClusterIds(NULL)
+ , fDriftTimeFactorA(1.)
+ , fDriftTimeOffsetA(0.)
+ , fDriftTimeFactorC(1.)
+ , fDriftTimeOffsetC(0.)
, fVerbosity(0)
{
}
AliHLTTPCDataCompressionComponent::~AliHLTTPCDataCompressionComponent()
{
/// destructor
+ if (fpWrittenAssociatedClusterIds) delete fpWrittenAssociatedClusterIds;
}
{
/// inherited from AliHLTComponent: multiple output data types of the component.
tgtList.clear();
- tgtList.push_back(AliHLTTPCDefinitions::fgkRawClustersDataType);
+ tgtList.push_back(AliHLTTPCDefinitions::RawClustersDataType());
+ tgtList.push_back(AliHLTTPCDefinitions::RemainingClustersCompressedDataType());
+ tgtList.push_back(AliHLTTPCDefinitions::RemainingClusterIdsDataType());
+ tgtList.push_back(AliHLTTPCDefinitions::ClusterTracksCompressedDataType());
+ tgtList.push_back(AliHLTTPCDefinitions::ClusterIdTracksDataType());
return tgtList.size();
}
{
/// inherited from AliHLTComponent: output data size estimator
constBase=0;
- inputMultiplier=1.3;
+ inputMultiplier=1.; // there should not be more data than input
+ inputMultiplier+=.3; // slightly more data when using the old HWCF data with 20 Byte and raw clusters 22 Byte
+ if (fpWrittenAssociatedClusterIds) inputMultiplier+=.3; // space for optional cluster id array
}
AliHLTComponent* AliHLTTPCDataCompressionComponent::Spawn()
// Process an event
// Loop over all input blocks in the event
bool bHaveMC=(GetFirstInputBlock(AliHLTTPCDefinitions::fgkAliHLTDataTypeClusterMCInfo | kAliHLTDataOriginTPC))!=NULL;
+ if ((bHaveMC || fVerificationMode>0) && fpWrittenAssociatedClusterIds==NULL) {
+ fpWrittenAssociatedClusterIds=new vector<AliHLTUInt32_t>;
+ }
const AliHLTComponentBlockData* pDesc=NULL;
}
}
- AliHLTTrackGeometry* trackpoints=new AliHLTTPCTrackGeometry;
+ AliHLTTPCTrackGeometry* trackpoints=new AliHLTTPCTrackGeometry;
if (!trackpoints) continue;
+ trackpoints->InitDriftTimeTransformation(fDriftTimeFactorA, fDriftTimeOffsetA, fDriftTimeFactorC, fDriftTimeOffsetC);
trackpoints->SetTrackId(trackID);
trackpoints->CalculateTrackPoints(*track);
trackpoints->RegisterTrackPoints(fTrackGrid);
// FIXME: decoder index instead of data specification to be used
// use an external access grid to reduce allocated memory
// set to NULL after writing the clusters
+ const char* writeoptions="";
+ if (fpWrittenAssociatedClusterIds) {
+ writeoptions="write-cluster-ids";
+ }
fRawInputClusters->SetSpacePointPropertyGrid(pDesc->fSpecification, fSpacePointGrid);
- iResult=fRawInputClusters->Write(outputPtr+size, capacity-size, outputBlocks, fpDataDeflater);
+ iResult=fRawInputClusters->Write(outputPtr+size, capacity-size, outputBlocks, fpDataDeflater, writeoptions);
fRawInputClusters->SetSpacePointPropertyGrid(pDesc->fSpecification, NULL);
if (iResult>=0) {
size+=iResult;
// output of track model clusters
if (iResult>=0) {
+ if (fpWrittenAssociatedClusterIds) fpWrittenAssociatedClusterIds->clear();
iResult=WriteTrackClusters(inputTrackArray, fRawInputClusters, fpDataDeflater, outputPtr+size, capacity-size);
if (iResult>=0) {
AliHLTComponent_BlockData bd;
size += bd.fSize;
outputDataSize+=bd.fSize;
HLTBenchmark("track data block of %d tracks: size %d", inputTrackArray.size(), bd.fSize);
+
+ if (fpWrittenAssociatedClusterIds && fpWrittenAssociatedClusterIds->size()>0) {
+ AliHLTComponent::FillBlockData(bd);
+ bd.fOffset = size;
+ bd.fSize = fpWrittenAssociatedClusterIds->size()*sizeof(vector<AliHLTUInt32_t>::value_type);
+ memcpy(outputPtr+bd.fOffset, &(*fpWrittenAssociatedClusterIds)[0], bd.fSize);
+ bd.fDataType = AliHLTTPCDefinitions::ClusterIdTracksDataType();
+ bd.fSpecification = AliHLTTPCDefinitions::EncodeDataSpecification(minSlice, maxSlice, minPatch, maxPatch);
+ outputBlocks.push_back(bd);
+ size += bd.fSize;
+
+ fpWrittenAssociatedClusterIds->clear();
+ }
}
}
if (GetBenchmarkInstance()) {
GetBenchmarkInstance()->Stop(0);
- HLTBenchmark("%s - compression factor %.2f", GetBenchmarkInstance()->GetStatistics(), compressionFactor);
+ if (fDeflaterMode!=3) {
+ HLTBenchmark("%s - compression factor %.2f", GetBenchmarkInstance()->GetStatistics(), compressionFactor);
+ } else {
+ HLTBenchmark("%s", GetBenchmarkInstance()->GetStatistics());
+ }
}
if (fInputClusters) {
return -EBADF;
}
- int result=pTrackPoints->Write(*track, pSpacePoints, pDeflater, outputPtr+size, capacity-size);
+ int result=pTrackPoints->Write(*track, pSpacePoints, pDeflater, outputPtr+size, capacity-size, fpWrittenAssociatedClusterIds);
if (result<0) return result;
size+=result;
fHistoTrackClusterRatio=histoTrackClusterRatio.release();
}
+ if (iResult>=0 && (iResult=InitDriftTimeTransformation())<0) return iResult;
+
return iResult;
}
int AliHLTTPCDataCompressionComponent::InitDeflater(int mode)
{
/// init the data deflater
+ int iResult=0;
+ if (mode==2 || mode==3) {
+ // huffman deflater
+ std::auto_ptr<AliHLTDataDeflaterHuffman> deflater(new AliHLTDataDeflaterHuffman(mode==3));
+ if (!deflater.get()) return -ENOMEM;
+
+ if (!deflater->IsTrainingMode()) {
+ TString cdbPath("HLT/ConfigTPC/");
+ cdbPath += GetComponentID();
+ cdbPath += "HuffmanTables";
+ TObject* pConf=LoadAndExtractOCDBObject(cdbPath);
+ if (!pConf) return -ENOENT;
+ if (dynamic_cast<TList*>(pConf)==NULL) {
+ HLTError("huffman table configuration object of inconsistent type");
+ return -EINVAL;
+ }
+ iResult=deflater->InitDecoders(dynamic_cast<TList*>(pConf));
+ if (iResult<0) return iResult;
+ }
+
+ unsigned nofParameters=AliHLTTPCDefinitions::GetNumberOfClusterParameterDefinitions();
+ unsigned p=0;
+ for (; p<nofParameters; p++) {
+ const AliHLTTPCDefinitions::AliClusterParameter& parameter=AliHLTTPCDefinitions::fgkClusterParameterDefinitions[p];
+ if (deflater->AddParameterDefinition(parameter.fName,
+ parameter.fBitLength)!=(int)parameter.fId) {
+ // for performance reason the parameter id is simply used as index in the array of
+ // definitions, the position must match the id
+ HLTFatal("mismatch between parameter id and position in array for parameter %s, rearrange definitions!", parameter.fName);
+ return -EFAULT;
+ }
+ }
+ fpDataDeflater=deflater.release();
+ return 0;
+ }
if (mode==1) {
std::auto_ptr<AliHLTDataDeflaterSimple> deflater(new AliHLTDataDeflaterSimple);
if (!deflater.get()) return -ENOMEM;
}
fpDataDeflater=deflater.release();
return 0;
- } else if (mode==2) {
- // huffman deflater
- HLTError("huffman deflater to be implemented");
- return -ENOSYS; // change to 0 if implemented
}
HLTError("invalid deflater mode %d, allowed 1=simple 2=huffman", mode);
return -EINVAL;
if (fHistoTrackClusterRatio) delete fHistoTrackClusterRatio;
fHistoTrackClusterRatio=NULL;
- if (fpDataDeflater) delete fpDataDeflater; fpDataDeflater=NULL;
+ if (fpDataDeflater) {
+ if (!fHistogramFile.IsNull()) {
+ TString filename=fHistogramFile;
+ filename.ReplaceAll(".root", "-deflater.root");
+ fpDataDeflater->SaveAs(filename);
+ }
+ if (fDeflaterMode==3) {
+ if (fTrainingTableOutput.IsNull()) {
+ fTrainingTableOutput=GetComponentID();
+ fTrainingTableOutput+="-huffman.root";
+ }
+ // TODO: currently, the code tables are also calculated in FindObject
+ // check if a different function is more appropriate
+ TObject* pConf=fpDataDeflater->FindObject("DeflaterConfiguration");
+ if (pConf) {
+ TString cdbEntryPath("HLT/ConfigTPC/");
+ cdbEntryPath += GetComponentID();
+ cdbEntryPath += "HuffmanTables";
+ AliCDBPath cdbPath(cdbEntryPath);
+ AliCDBId cdbId(cdbPath, AliCDBManager::Instance()->GetRun(), AliCDBRunRange::Infinity(), 0, 0);
+ AliCDBMetaData* cdbMetaData=new AliCDBMetaData;
+ cdbMetaData->SetResponsible("ALICE HLT Matthias.Richter@cern.ch");
+ cdbMetaData->SetComment("Huffman encoder configuration");
+ AliCDBEntry* entry=new AliCDBEntry(pConf, cdbId, cdbMetaData, kTRUE);
+
+ entry->SaveAs(fTrainingTableOutput);
+ // this is a small memory leak
+ // seg fault in ROOT object handling if the two objects are deleted
+ // investigate later
+ //delete entry;
+ //delete cdbMetaData;
+ }
+ }
+ delete fpDataDeflater;
+ }
+ fpDataDeflater=NULL;
+
+
if (fTrackGrid) delete fTrackGrid; fTrackGrid=NULL;
if (fSpacePointGrid) delete fSpacePointGrid; fSpacePointGrid=NULL;
fHistogramFile=argv[i++];
return 2;
}
+ // -save-histogram-table
+ if (argument.CompareTo("-save-huffman-table")==0) {
+ if ((bMissingParam=(++i>=argc))) break;
+ fTrainingTableOutput=argv[i++];
+ return 2;
+ }
} while (0); // using do-while only to have break available
if (bMissingParam) {
return bd.fSize;
}
+
+int AliHLTTPCDataCompressionComponent::InitDriftTimeTransformation()
+{
+ /// calculate correction factor and offset for a linear approximation of the
+ /// drift time transformation, separately for A and C side
+ int iResult=0;
+ AliHLTTPCClusterTransformation transform;
+ if ((iResult=transform.Init( GetBz(), GetTimeStamp()))<0) {
+ HLTError("failed to init AliHLTTPCClusterTransformation: %d", iResult);
+ return iResult;
+ }
+
+ if ((iResult=CalculateDriftTimeTransformation(transform, 0, 0, fDriftTimeFactorA, fDriftTimeOffsetA))<0) return iResult;
+ if (fVerbosity>0) HLTInfo("drift time transformation A side: m=%f n=%f", fDriftTimeFactorA, fDriftTimeOffsetA);
+ if ((iResult=CalculateDriftTimeTransformation(transform, 18, 0, fDriftTimeFactorC, fDriftTimeOffsetC))<0) return iResult;
+ if (fVerbosity>0) HLTInfo("drift time transformation C side: m=%f n=%f", fDriftTimeFactorC, fDriftTimeOffsetC);
+
+ return 0;
+}
+
+int AliHLTTPCDataCompressionComponent::CalculateDriftTimeTransformation(AliHLTTPCClusterTransformation& transform,
+ int slice, int padrow,
+ float& m, float& n) const
+{
+ /// calculate correction factor and offset for a linear approximation of the
+ /// drift time transformation by just probing the range of timebins with
+ /// AliHLTTPCClusterTransformation
+ const int nofSteps=100;
+ vector<float> zvalues;
+
+ int nofTimebins=AliHLTTPCTransform::GetNTimeBins();
+ int stepWidth=nofTimebins/nofSteps;
+ int time=0;
+ int count=0;
+ float meanT=0.;
+ float meanZ=0.;
+ for (time=0; time<nofTimebins; time+=stepWidth, count++) {
+ Float_t xyz[3];
+ transform.Transform(slice, padrow, 0, time, xyz);
+ zvalues.push_back(xyz[2]);
+ meanT+=time;
+ meanZ+=xyz[2];
+ }
+ meanT/=count;
+ meanZ/=count;
+ float sumTZ=.0;
+ float sumT2=.0;
+ time=0;
+ for (vector<float>::const_iterator z=zvalues.begin();
+ z!=zvalues.end(); z++, time+=stepWidth) {
+ sumTZ+=(time-meanT)*((*z)-meanZ);
+ sumT2+=(time-meanT)*(time-meanT);
+ }
+ m=sumTZ/sumT2;
+ n=meanZ-m*meanT;
+
+ return 0;
+}