]>
Commit | Line | Data |
---|---|---|
7e914051 | 1 | // $Id$ |
ff2f0f94 | 2 | |
892210c7 | 3 | //************************************************************************** |
4 | //* This file is property of and copyright by the ALICE HLT Project * | |
5 | //* ALICE Experiment at CERN, All rights reserved. * | |
6 | //* * | |
7 | //* Primary Authors: Timm Steinbeck <timm@kip.uni-heidelberg.de> * | |
8 | //* for The ALICE HLT Project. * | |
9 | //* * | |
10 | //* Permission to use, copy, modify and distribute this software and its * | |
11 | //* documentation strictly for non-commercial purposes is hereby granted * | |
12 | //* without fee, provided that the above copyright notice appears in all * | |
13 | //* copies and that both the copyright notice and this permission notice * | |
14 | //* appear in the supporting documentation. The authors make no claims * | |
15 | //* about the suitability of this software for any purpose. It is * | |
16 | //* provided "as is" without express or implied warranty. * | |
17 | //************************************************************************** | |
ff2f0f94 | 18 | |
19 | /** @file AliHLTTPCCompModelConverter.cxx | |
20 | @author Timm Steinbeck | |
21 | @author changed by J. Wagner | |
22 | @date 17-11-2007 | |
23 | @brief A copy processing component for the HLT. */ | |
24 | ||
25 | #if __GNUC__ >= 3 | |
26 | using namespace std; | |
27 | #endif | |
28 | ||
29 | #include "AliHLTTPCCompModelConverter.h" | |
30 | #include "AliHLTTPCTransform.h" | |
31 | #include "AliHLTTPCTrack.h" | |
32 | #include "AliHLTTPCModelTrack.h" | |
33 | #include "AliHLTTPCCompDataCompressorHelper.h" | |
34 | #include <cerrno> | |
35 | ||
36 | AliHLTTPCCompModelConverter::AliHLTTPCCompModelConverter(): | |
37 | fInputTrackArray(), | |
38 | fOutputTrackArray("AliHLTTPCModelTrack"), | |
892210c7 | 39 | fModelAnalysisInstance(NULL), |
40 | fMinHits(0) | |
ff2f0f94 | 41 | { |
42 | // see header file for class documentation | |
43 | for ( UInt_t slice=0; slice<36; slice++ ) | |
44 | for ( UInt_t patch=0; patch<6; patch++ ) | |
45 | { | |
46 | fClusterUsedSizes[slice][patch] = 0; | |
47 | fClusterUsed[slice][patch] = NULL; | |
48 | } | |
49 | Init(); | |
50 | fMinHits = 5; | |
51 | } | |
52 | ||
53 | AliHLTTPCCompModelConverter::AliHLTTPCCompModelConverter(AliHLTTPCCompModelAnalysis* modelanalysis): | |
54 | fInputTrackArray(), | |
55 | fOutputTrackArray("AliHLTTPCModelTrack"), | |
f0d05e66 | 56 | fModelAnalysisInstance(modelanalysis), |
57 | fMinHits(0) | |
ff2f0f94 | 58 | { |
59 | // see header file for class documentation | |
60 | for ( UInt_t slice=0; slice<36; slice++ ) | |
61 | for ( UInt_t patch=0; patch<6; patch++ ) | |
62 | { | |
63 | fClusterUsedSizes[slice][patch] = 0; | |
64 | fClusterUsed[slice][patch] = NULL; | |
65 | } | |
66 | Init(); | |
67 | fMinHits = 5; | |
68 | } | |
69 | ||
70 | AliHLTTPCCompModelConverter::~AliHLTTPCCompModelConverter() | |
71 | { | |
72 | // see header file for class documentation | |
73 | for ( UInt_t slice=0; slice<36; slice++ ) | |
74 | for ( UInt_t patch=0; patch<6; patch++ ) | |
75 | { | |
76 | if ( fClusterUsed[slice][patch] ) | |
77 | { | |
78 | delete [] fClusterUsed[slice][patch]; | |
79 | fClusterUsed[slice][patch] = NULL; | |
80 | } | |
81 | } | |
82 | } | |
83 | ||
84 | int AliHLTTPCCompModelConverter::Init() | |
85 | { | |
86 | // see header file for class documentation | |
87 | fInputTrackArray.Reset(); | |
88 | fOutputTrackArray.Reset(); | |
89 | for ( UInt_t slice=0; slice<36; slice++ ) | |
90 | for ( UInt_t patch=0; patch<6; patch++ ) | |
91 | fClusters[slice][patch] = NULL; | |
92 | ||
93 | return 0; | |
94 | } | |
95 | ||
96 | int AliHLTTPCCompModelConverter::SetInputTracks( AliHLTTPCTrackletData* tracklets ) | |
97 | { | |
98 | // see header file for class documentation | |
99 | HLTDebug( "Filling %u tracks", (unsigned)tracklets->fTrackletCnt ); | |
100 | fInputTrackArray.FillTracks( tracklets->fTrackletCnt, tracklets->fTracklets ); | |
101 | return 0; | |
102 | } | |
103 | ||
104 | int AliHLTTPCCompModelConverter::SetInputClusters( AliHLTTPCClusterData* clusters, UInt_t slice, UInt_t patch ) | |
105 | { | |
106 | // see header file for class documentation | |
107 | if ( slice>=36 || patch>=6 ) | |
108 | return EINVAL; | |
109 | if ( fClusters[slice][patch] ) | |
110 | return EBUSY; | |
111 | fClusters[slice][patch] = clusters; | |
112 | if ( fClusterUsedSizes[slice][patch]<clusters->fSpacePointCnt || | |
113 | fClusterUsedSizes[slice][patch]>clusters->fSpacePointCnt*8 ) | |
114 | { | |
115 | delete [] fClusterUsed[slice][patch]; | |
116 | fClusterUsed[slice][patch] = NULL; | |
117 | } | |
118 | if ( !fClusterUsed[slice][patch] ) | |
119 | { | |
120 | fClusterUsed[slice][patch] = new bool[clusters->fSpacePointCnt]; | |
121 | if ( !fClusterUsed[slice][patch] ) | |
122 | { | |
123 | HLTDebug( "Out of memory trying to allocate usage data for %u clusters", (unsigned)clusters->fSpacePointCnt ); | |
124 | return ENOMEM; | |
125 | } | |
126 | } | |
127 | for ( unsigned long nn=0; nn<clusters->fSpacePointCnt; nn++ ) | |
128 | fClusterUsed[slice][patch][nn]=false; | |
129 | HLTDebug( "Filling %u clusters", (unsigned)clusters->fSpacePointCnt ); | |
130 | return 0; | |
131 | } | |
132 | ||
133 | void AliHLTTPCCompModelConverter::Convert() | |
134 | { | |
135 | // see header file for class documentation | |
136 | fInputTrackArray.QSort(); | |
137 | for(Int_t i=0; i<fInputTrackArray.GetNTracks(); i++) | |
138 | { | |
139 | AliHLTTPCTrack *intrack = fInputTrackArray.GetCheckedTrack(i); | |
140 | ||
141 | // NO WARNING IF intrack = NULL! | |
142 | if(!intrack) continue; | |
143 | ||
144 | if((unsigned)intrack->GetNHits()<fMinHits) | |
145 | { | |
146 | HLTDebug("Track %d with %d clusters is below minimum of %d clusters",i,intrack->GetNHits(),fMinHits); | |
147 | break; | |
148 | }; | |
149 | ||
150 | // LOSS OF TRACKS due to following statement possible! | |
151 | if(intrack->GetPt()<0.1) | |
152 | { | |
153 | HLTDebug("Discarding track with low pt."); | |
154 | if(fModelAnalysisInstance) | |
155 | { | |
156 | if(fModelAnalysisInstance->GetfModelAnalysis()) // analysis of model | |
157 | { | |
158 | fModelAnalysisInstance->MarkTrashTrack(intrack); | |
159 | } | |
160 | } | |
161 | ||
162 | continue; | |
163 | } | |
164 | ||
165 | intrack->CalculateHelix(); | |
166 | ||
167 | AliHLTTPCModelTrack *outtrack = (AliHLTTPCModelTrack*)fOutputTrackArray.NextTrack(); | |
168 | outtrack->SetNHits(intrack->GetNHits()); | |
169 | outtrack->SetRowRange(intrack->GetFirstRow(),intrack->GetLastRow()); | |
170 | outtrack->SetFirstPoint(intrack->GetFirstPointX(),intrack->GetFirstPointY(),intrack->GetFirstPointZ()); | |
171 | outtrack->SetLastPoint(intrack->GetLastPointX(),intrack->GetLastPointY(),intrack->GetLastPointZ()); | |
172 | outtrack->SetPt(intrack->GetPt()); | |
173 | outtrack->SetPsi(intrack->GetPsi()); | |
174 | outtrack->SetTgl(intrack->GetTgl()); | |
175 | outtrack->SetCharge(intrack->GetCharge()); | |
176 | outtrack->CalculateHelix(); | |
177 | Int_t nhits = intrack->GetNHits(); | |
178 | UInt_t *hitids = intrack->GetHitNumbers(); | |
179 | Int_t origslice = (hitids[nhits-1]>>25)&0x7f; | |
180 | outtrack->Init(origslice,-1); | |
181 | ||
182 | for(Int_t j=nhits-1; j>=0; j--) | |
183 | { | |
184 | UInt_t id=hitids[j]; | |
185 | Int_t slice = (id>>25)&0x7f; | |
186 | Int_t patch = (id>>22)&0x7; | |
187 | UInt_t pos = id&0x3fffff; | |
188 | ||
189 | //UInt_t size; | |
190 | if ( !fClusters[slice][patch] ) | |
191 | { | |
192 | //HLTWarning( "No clusters for slice %d, patch %d", slice, patch ); | |
193 | continue; | |
194 | } | |
195 | if ( !fClusterUsed[slice][patch] ) | |
196 | { | |
197 | HLTWarning( "No cluster used data for slice %d, patch %d", slice, patch ); | |
198 | continue; | |
199 | } | |
200 | if ( fClusters[slice][patch]->fSpacePointCnt<=pos ) | |
201 | { | |
202 | HLTWarning( "Clusters position %d too large in slice %d, patch %d (%u max.)", pos, | |
203 | slice, patch, fClusters[slice][patch]->fSpacePointCnt ); | |
204 | continue; | |
205 | } | |
206 | ||
207 | AliHLTTPCSpacePointData *points = fClusters[slice][patch]->fSpacePoints; | |
208 | bool* clustersUsed = fClusterUsed[slice][patch]; | |
209 | Float_t xyz[3] = {points[pos].fX,points[pos].fY,points[pos].fZ}; | |
210 | Int_t padrow = points[pos].fPadRow; | |
211 | ||
212 | //Calculate the crossing point between track and padrow | |
213 | Float_t angle = 0; //Perpendicular to padrow in local coordinates | |
214 | AliHLTTPCTransform::Local2GlobalAngle(&angle,slice); | |
215 | if(!intrack->CalculateReferencePoint(angle,AliHLTTPCTransform::Row2X(padrow))) | |
216 | { | |
217 | HLTError( "AliHLTDataCompressor::FillData : Error in crossing point calc on slice %d, padrow %d", slice, padrow ); | |
218 | break; | |
219 | //outtrack->Print(kFALSE); | |
220 | //exit(5); | |
221 | } | |
222 | ||
223 | Float_t xyzCross[3] = {intrack->GetPointX(),intrack->GetPointY(),intrack->GetPointZ()}; | |
224 | ||
225 | Int_t sector,row; | |
226 | AliHLTTPCTransform::Slice2Sector(slice,padrow,sector,row); | |
227 | AliHLTTPCTransform::Global2Raw(xyzCross,sector,row); | |
228 | #if 1 | |
229 | AliHLTTPCTransform::Local2Raw(xyz,sector,row); | |
230 | #else | |
231 | AliHLTTPCTransform::Global2Raw(xyz,sector,row); | |
232 | #endif | |
233 | ||
234 | outtrack->SetPadHit(padrow,xyzCross[1]); | |
235 | outtrack->SetTimeHit(padrow,xyzCross[2]); | |
236 | ||
237 | outtrack->SetCrossingAngleLUT(padrow,intrack->GetCrossingAngle(padrow,slice)); | |
238 | outtrack->CalculateClusterWidths(padrow,kTRUE); // calculates parSigmas (with parametrisation) in raw coordinates | |
239 | //HLTInfo("angle %f", outtrack->GetCrossingAngleLUT(padrow)); | |
240 | //HLTInfo("parsigma %f",outtrack->GetParSigmaY2(padrow)); | |
241 | patch = AliHLTTPCTransform::GetPatch(padrow); | |
242 | // sigmay in units of pads (quantisation!) | |
243 | Float_t sigmaY2 = points[pos].fSigmaY2 / pow(AliHLTTPCTransform::GetPadPitchWidth(patch),2); | |
244 | //HLTInfo("sigmaY conv.: %f", points[pos].fSigmaY2); | |
245 | ||
246 | //HLTInfo("parSigmaY2 = %f",AliHLTTPCTransform::GetParSigmaY2(padrow, xyzCross[2],angle)); | |
247 | //Float_t testsigma = 0.0; | |
248 | //outtrack->GetSigmaY2(padrow, testsigma); | |
249 | //HLTInfo("DSigmaY2 = %f",testsigma); | |
250 | ||
251 | //HLTInfo("sigmaY2 float: %f",sigmaY2); | |
252 | Float_t sigmaZ2 = points[pos].fSigmaZ2 / pow(AliHLTTPCTransform::GetZWidth(),2); | |
253 | outtrack->SetCluster(padrow,xyz[1],xyz[2],points[pos].fCharge,sigmaY2,sigmaZ2,3); | |
254 | //AliHLTTPCClusterModel* test1 = outtrack->GetClusterModel(padrow); | |
255 | //HLTInfo("Dsigma %f",test1->fDSigmaY); | |
256 | ||
257 | //IMPORTANT: Set the slice in which cluster is, you need it in AliHLTTPCModelTrack::FillTrack! | |
258 | outtrack->GetClusterModel(padrow)->fSlice=slice; | |
259 | #ifdef MODELDEBUG | |
260 | outtrack->GetClusterModel(padrow)->fID=points[pos].fID; | |
261 | HLTDebug( "Track %d cluster for padrow %d ID: %u (0x%08X) - fSlice: %u", i, padrow, | |
262 | outtrack->GetClusterModel(padrow)->fID, outtrack->GetClusterModel(padrow)->fID, | |
263 | (unsigned)outtrack->GetClusterModel(padrow)->fSlice ); | |
264 | #endif | |
265 | //points[pos].fCharge = 0;//Mark this cluster as used. | |
266 | clustersUsed[pos] = true;//Mark this cluster as used. | |
267 | //fNusedClusters++; | |
268 | } //end of clusters for each track | |
269 | ||
270 | //outtrack->SetNClusters(AliHLTTPCTransform::GetNRows(-1)); // Equivalent call in ExpandTrackData | |
271 | } // end of track-loop | |
272 | ExpandTrackData(); | |
273 | ||
274 | // validation test for clusternumbers of tracks: | |
275 | //for(unsigned long jj = 0; jj < (unsigned long) fOutputTrackArray.GetNTracks(); jj++) | |
276 | // { | |
277 | // AliHLTTPCModelTrack *track = (AliHLTTPCModelTrack*)fOutputTrackArray.GetCheckedTrack(jj); | |
278 | // Int_t nhits = track->GetNHits(); | |
279 | // HLTInfo("Number of clusters for track %lu is %d",jj, nhits); | |
280 | // } | |
281 | ||
282 | //comp->WriteFile(fOutputTrackArray); | |
283 | ||
284 | } | |
285 | ||
286 | void AliHLTTPCCompModelConverter::ExpandTrackData() | |
287 | { | |
288 | // see header file for class documentation | |
289 | //Loop over tracks and try to assign unused clusters. | |
290 | //Only clusters which are closer than the max. residual are taken. | |
291 | ||
292 | HLTDebug( "Expanding %lu tracks", (unsigned long)fOutputTrackArray.GetNTracks() ); | |
293 | for(Int_t i=0; i<fOutputTrackArray.GetNTracks(); i++) | |
294 | { | |
295 | AliHLTTPCModelTrack *track = (AliHLTTPCModelTrack*)fOutputTrackArray.GetCheckedTrack(i); | |
296 | ||
297 | if(!track) continue; | |
298 | ||
299 | // tracks that hit every row already cannot be expanded in the current model! | |
300 | if(track->GetNHits() == AliHLTTPCTransform::GetNRows()) continue; | |
301 | ||
302 | Int_t nhits = track->GetNHits(); | |
303 | ||
304 | // validation test | |
305 | //HLTInfo("Before expansion: track %u with number of clusters %d", i, nhits); | |
306 | ||
307 | Int_t lastSlice=-1; | |
308 | for(Int_t padrow=AliHLTTPCTransform::GetNRows()-1; padrow>=0; padrow--) | |
309 | { | |
310 | if(track->IsPresent(padrow)) | |
311 | { | |
312 | lastSlice = track->GetClusterModel(padrow)->fSlice; | |
313 | continue; | |
314 | } | |
315 | ||
316 | if(lastSlice < 0) //the outer cluster is missing, so skip it - it will be written anyhow. | |
317 | continue; | |
318 | ||
319 | //Check the slice of the next padrow: | |
320 | Int_t nextPadrow = padrow-1; | |
321 | Int_t nextSlice = -1; | |
322 | while(nextPadrow >=0) | |
323 | { | |
324 | if(track->IsPresent(nextPadrow)) | |
325 | { | |
326 | nextSlice = track->GetClusterModel(nextPadrow)->fSlice; | |
327 | break; | |
328 | } | |
329 | nextPadrow--; | |
330 | } | |
331 | if(nextSlice>=0) | |
332 | if(nextSlice != lastSlice)//The track crosses a slice boundary here | |
333 | continue; | |
334 | ||
335 | //UInt_t size; | |
336 | if ( !fClusters[lastSlice][0] ) | |
337 | { | |
338 | HLTWarning( "No clusters for slice %d, patch %d", lastSlice, 0 ); | |
339 | continue; | |
340 | } | |
341 | if ( !fClusterUsed[lastSlice][0] ) | |
342 | { | |
343 | HLTWarning( "No cluster used data for slice %d, patch %d", lastSlice, 0 ); | |
344 | continue; | |
345 | } | |
346 | AliHLTTPCSpacePointData *points = fClusters[lastSlice][0]->fSpacePoints;//->GetDataPointer(size); | |
347 | bool* clustersUsed = fClusterUsed[lastSlice][0]; | |
348 | ||
349 | Float_t angle = 0; | |
350 | AliHLTTPCTransform::Local2GlobalAngle(&angle,lastSlice); | |
351 | if(!track->CalculateReferencePoint(angle,AliHLTTPCTransform::Row2X(padrow))) | |
352 | continue; | |
353 | Float_t xyzCross[3] = {track->GetPointX(),track->GetPointY(),track->GetPointZ()}; | |
354 | AliHLTTPCTransform::Global2LocHLT(xyzCross,lastSlice); | |
355 | Float_t mindist = 123456789; | |
356 | AliHLTTPCSpacePointData *closest=0; | |
357 | UInt_t closestJ=0; | |
358 | for(UInt_t j=0; j<fClusters[lastSlice][0]->fSpacePointCnt; j++) | |
359 | { | |
360 | //if(points[j].fCharge == 0) continue;// || points[j].fPadRow != padrow) continue; | |
361 | if (clustersUsed[j]) continue; // Cluster already used | |
362 | if(points[j].fPadRow < padrow) continue; | |
363 | if(points[j].fPadRow > padrow) break; | |
364 | Float_t xyz[3] = {points[j].fX,points[j].fY,points[j].fZ}; | |
365 | #if 1 | |
366 | #else | |
367 | AliHLTTPCTransform::Global2LocHLT(xyz,lastSlice); | |
368 | #endif | |
369 | ||
370 | //Check for overflow: | |
371 | Int_t temp = (Int_t)rint((xyzCross[1]-xyz[1])/AliHLTTPCCompDataCompressorHelper::GetXYResidualStep(padrow)); | |
372 | if( abs(temp) > 1<<(AliHLTTPCCompDataCompressorHelper::GetNPadBits()-1)) | |
373 | continue; | |
374 | ||
375 | temp = (Int_t)rint((xyzCross[2]-xyz[2])/AliHLTTPCCompDataCompressorHelper::GetZResidualStep(padrow)); | |
376 | if( abs(temp) > 1<<(AliHLTTPCCompDataCompressorHelper::GetNTimeBits()-1)) | |
377 | continue; | |
378 | ||
379 | Float_t dist = sqrt( pow(xyzCross[1]-xyz[1],2) + pow(xyzCross[2]-xyz[2],2) ); | |
380 | if(dist < mindist) | |
381 | { | |
382 | closest = &points[j]; | |
383 | closestJ = j; | |
384 | mindist = dist; | |
385 | } | |
386 | } | |
387 | if(closest) //there was a cluster assigned | |
388 | { | |
389 | Int_t sector,row; | |
390 | Float_t xyz[3] = {closest->fX,closest->fY,closest->fZ}; | |
391 | AliHLTTPCTransform::Slice2Sector(lastSlice,padrow,sector,row); | |
392 | AliHLTTPCTransform::Local2Raw(xyzCross,sector,row); | |
393 | #if 1 | |
394 | AliHLTTPCTransform::Local2Raw(xyz,sector,row); | |
395 | #else | |
396 | AliHLTTPCTransform::Global2Raw(xyz,sector,row); | |
397 | #endif | |
398 | ||
399 | track->SetPadHit(padrow,xyzCross[1]); | |
400 | track->SetTimeHit(padrow,xyzCross[2]); | |
401 | ||
402 | Float_t angle = track->GetCrossingAngle(padrow,lastSlice); | |
403 | track->SetCrossingAngleLUT(padrow,angle); | |
404 | track->CalculateClusterWidths(padrow,kTRUE); | |
405 | Int_t patch = AliHLTTPCTransform::GetPatch(padrow); | |
406 | Float_t sigmaY2 = closest->fSigmaY2 / pow(AliHLTTPCTransform::GetPadPitchWidth(patch),2); | |
407 | Float_t sigmaZ2 = closest->fSigmaZ2 / pow(AliHLTTPCTransform::GetZWidth(),2); | |
408 | track->SetCluster(padrow,xyz[1],xyz[2],closest->fCharge,sigmaY2,sigmaZ2,3); | |
409 | //AliHLTTPCClusterModel* test1 = track->GetClusterModel(padrow); | |
410 | //HLTInfo("Dsigma %f",test1->fDSigmaY); | |
411 | ||
412 | nhits++; | |
413 | ||
414 | //IMPORTANT: Set the slice in which cluster is, you need it in AliHLTTPCModelTrack::FillTrack! | |
415 | track->GetClusterModel(padrow)->fSlice=lastSlice; | |
416 | #ifdef MODELDEBUG | |
417 | track->GetClusterModel(padrow)->fID=closest->fID; | |
418 | HLTDebug( "Track %d cluster for padrow %d ID: %u (0x%08X) - fSlice: %u", i, padrow, | |
419 | track->GetClusterModel(padrow)->fID, track->GetClusterModel(padrow)->fID, | |
420 | track->GetClusterModel(padrow)->fSlice ); | |
421 | #endif | |
422 | //closest->fCharge = 0;//Mark this cluster as used. | |
423 | clustersUsed[closestJ] = true;//Mark this cluster as used. | |
424 | } | |
425 | } | |
426 | track->SetNClusters(AliHLTTPCTransform::GetNRows()); | |
427 | //cout<<"Track was assigned "<<nhits<<" clusters"<<endl; | |
428 | ||
429 | // validation test | |
430 | //HLTInfo( "After expansion: track %d with clusters %u", i, nhits); | |
431 | } | |
432 | ||
433 | } | |
434 | ||
435 | unsigned long AliHLTTPCCompModelConverter::GetOutputModelDataSize() | |
436 | { | |
437 | // see header file for class documentation | |
438 | unsigned long dataSize=0; | |
439 | Short_t ntracks = fOutputTrackArray.GetNTracks(); | |
440 | ||
441 | dataSize += sizeof(AliHLTUInt32_t); | |
442 | ||
443 | for(Int_t i=0; i<ntracks; i++) | |
444 | { | |
445 | AliHLTTPCModelTrack *track = (AliHLTTPCModelTrack*)fOutputTrackArray.GetCheckedTrack(i); | |
446 | if ( !track ) | |
447 | continue; | |
448 | ||
449 | dataSize += sizeof(AliHLTTPCTrackModel)+track->GetNClusters()*sizeof(AliHLTTPCClusterModel); | |
450 | } | |
451 | return dataSize; | |
452 | } | |
453 | ||
454 | int AliHLTTPCCompModelConverter::OutputModelData( AliHLTUInt8_t* data ) | |
455 | { | |
456 | // see header file for class documentation | |
457 | unsigned long dataOffset=0; | |
458 | Short_t ntracks = fOutputTrackArray.GetNTracks(); | |
459 | ||
460 | AliHLTTPCClusterModel *clusters=0; | |
461 | AliHLTTPCTrackModel *model=0; | |
462 | ||
463 | *(AliHLTUInt32_t*)data = 0; // Write format version number | |
464 | dataOffset += sizeof(AliHLTUInt32_t); | |
465 | ||
466 | for(Int_t i=0; i<ntracks; i++) | |
467 | { | |
468 | AliHLTTPCModelTrack *track = (AliHLTTPCModelTrack*)fOutputTrackArray.GetCheckedTrack(i); | |
469 | if ( !track ) | |
470 | continue; | |
471 | ||
472 | track->FillModel(); | |
473 | model = track->GetModel(); | |
474 | ||
475 | clusters = track->GetClusters(); | |
476 | ||
477 | // validation test | |
478 | //HLTInfo( "Track %d clusters: %u", i, (unsigned)track->GetNPresentClusters() ); | |
479 | ||
480 | for ( Int_t jj=0; jj<track->GetNClusters(); jj++ ) | |
481 | { | |
482 | //HLTDebug( " Cluster %d fPresent: %u", jj, (unsigned)clusters[jj].fPresent ); | |
483 | } | |
484 | ||
485 | memcpy( data+dataOffset, model, sizeof(AliHLTTPCTrackModel) ); | |
486 | dataOffset += sizeof(AliHLTTPCTrackModel); | |
487 | ||
488 | memcpy( data+dataOffset, clusters, track->GetNClusters()*sizeof(AliHLTTPCClusterModel) ); | |
489 | dataOffset += track->GetNClusters()*sizeof(AliHLTTPCClusterModel); | |
490 | } | |
491 | return 0; | |
492 | } | |
493 | ||
494 | void AliHLTTPCCompModelConverter::SelectRemainingClusters() | |
495 | { | |
496 | // see header file for class documentation | |
497 | //Select which remaining clusters to write in addition to the compressed data. | |
498 | //In particular one can here make sure that "important" clusters are not missed: | |
499 | //The offline track finder perform seed finding in the outer padrows; | |
500 | //the first seeding is using pair of points on outermost padrow and | |
501 | //0.125*nrows more rows towards the vertex. The second seeding uses pair | |
502 | //of points on the outermost padrow-0.5*0.125*nrows and 0.125*nrows + 0.5*0.125*nrows | |
503 | //more rows towards the vertex. In order to evaluate the seeds, the track offline | |
504 | //track finder checks whether a certain amount of possible clusters (padrows) is | |
505 | //attached to the track, and then the kalman filtering starts. | |
506 | //To ensure a minimal loss off efficiency, all clusters in this region should be | |
507 | //intact..... | |
508 | ||
509 | Int_t nrows = AliHLTTPCTransform::GetNRows(); | |
510 | Int_t gap=(Int_t)(0.125*nrows), shift=(Int_t)(0.5*gap); | |
511 | ||
512 | for(Int_t slice=0; slice<36; slice++) | |
513 | { | |
514 | for(Int_t patch=0; patch<6; patch++) | |
515 | { | |
516 | if ( !fClusters[slice][patch] ) | |
517 | continue; | |
518 | AliHLTTPCSpacePointData *points = fClusters[slice][patch]->fSpacePoints; | |
519 | bool* clustersUsed = fClusterUsed[slice][patch]; | |
520 | for(UInt_t i=0; i<fClusters[slice][patch]->fSpacePointCnt; i++) | |
521 | { | |
522 | //if(points[i].fCharge == 0) continue; //Already removed | |
523 | if (clustersUsed[i]) continue; //Already removed | |
524 | Int_t padrow = (Int_t)points[i].fPadRow; | |
525 | ||
526 | //Check the widths (errors) of the cluster, and remove big bastards: | |
527 | Float_t padw = sqrt(points[i].fSigmaY2) / AliHLTTPCTransform::GetPadPitchWidth(AliHLTTPCTransform::GetPatch(padrow)); | |
528 | Float_t timew = sqrt(points[i].fSigmaZ2) / AliHLTTPCTransform::GetZWidth(); | |
529 | if(padw >= 2.55 || timew >= 2.55)//Because we use 1 byte to store | |
530 | { | |
531 | //points[i].fCharge = 0; | |
532 | clustersUsed[i] = true; | |
533 | continue; | |
534 | } | |
535 | ||
536 | Float_t xyz[3] = {points[i].fX,points[i].fY,points[i].fZ}; | |
537 | Int_t sector,row; | |
538 | AliHLTTPCTransform::Slice2Sector(slice,padrow,sector,row); | |
539 | AliHLTTPCTransform::Global2Raw(xyz,sector,row); | |
540 | ||
541 | if(padrow >= nrows-1-gap-shift) continue;//save all the clusters in this region | |
542 | ||
543 | //if(padrow >= nrows-1-shift) continue; | |
544 | ||
545 | //Save the clusters at the borders: | |
546 | //if(xyz[1] < 3 || xyz[1] >= AliHLTTPCTransform::GetNPads(padrow)-4) | |
547 | // continue; | |
548 | ||
549 | //Save clusters on padrows used for offline seeding: | |
550 | if(padrow == nrows - 1 || padrow == nrows - 1 - gap || //First seeding | |
551 | padrow == nrows - 1 - shift || padrow == nrows - 1 - gap - shift) //Second seeding | |
552 | continue; | |
553 | ||
554 | //Cluster did not meet any of the above criteria, so disregard it: | |
555 | //points[i].fCharge = 0; | |
556 | clustersUsed[i] = true; | |
557 | } | |
558 | } | |
559 | } | |
560 | ||
561 | } | |
562 | ||
563 | unsigned long AliHLTTPCCompModelConverter::GetRemainingClustersOutputDataSize() | |
564 | { | |
565 | // see header file for class documentation | |
566 | #if 0 | |
567 | for ( UInt_t slice=0; slice<36; slice++ ) | |
568 | for ( UInt_t patch=0; patch<6; patch++ ) | |
569 | { | |
570 | bool* clustersUsed = fClusterUsed[slice][patch]; | |
571 | if ( !clustersUsed || !fClusters[slice][patch] ) | |
572 | continue; | |
573 | for ( UInt_t pos=0; pos<fClusters[slice][patch]->fSpacePointCnt; pos++ ) | |
574 | { | |
575 | if ( !clustersUsed[pos] ) | |
576 | clusterCnt++; | |
577 | } | |
578 | } | |
579 | return clusterCnt*sizeof(AliHLTTPCClusterModel); | |
580 | #else | |
581 | const Int_t nrows = AliHLTTPCTransform::GetNRows(); | |
ce622827 | 582 | Int_t * npoints = new Int_t[nrows]; |
ff2f0f94 | 583 | unsigned long dataWritten = 0; |
584 | ||
585 | dataWritten += sizeof(AliHLTUInt32_t); | |
586 | ||
587 | for(Int_t slice=0; slice<35; slice++) | |
588 | { | |
589 | for(Int_t patch=0; patch < 6; patch++) | |
590 | { | |
591 | if ( !fClusters[slice][patch] ) | |
592 | { | |
593 | dataWritten++; | |
594 | continue; | |
595 | } | |
596 | AliHLTTPCSpacePointData *points = fClusters[slice][patch]->fSpacePoints; | |
597 | bool* clustersUsed = fClusterUsed[slice][patch]; | |
598 | if ( !clustersUsed ) | |
599 | continue; | |
600 | memset(npoints,0,nrows*sizeof(Int_t)); | |
601 | Int_t nonZeroRows=0; | |
602 | ||
603 | for(UInt_t j=0; j<fClusters[slice][patch]->fSpacePointCnt; j++) | |
604 | { | |
605 | //if(points[j].fCharge == 0) continue; //has been used | |
606 | if ( clustersUsed[j] ) continue; //has been used | |
607 | if ( !npoints[points[j].fPadRow] ) | |
608 | nonZeroRows++; | |
609 | npoints[points[j].fPadRow]++; | |
610 | } | |
611 | ||
612 | dataWritten++; | |
613 | ||
614 | Int_t size =0; | |
615 | Byte_t *data = 0; | |
616 | AliHLTTPCRemainingRow *tempPt=0; | |
617 | ||
618 | Int_t lastRow = -2; | |
619 | Int_t localcounter=0; | |
620 | ||
621 | for(UInt_t j=0; j<fClusters[slice][patch]->fSpacePointCnt; j++) | |
622 | { | |
623 | //if(points[j].fCharge == 0) continue; //has been used | |
624 | if ( clustersUsed[j] ) continue; //has been used | |
625 | ||
626 | Int_t padrow = points[j].fPadRow; | |
627 | if(padrow != lastRow) | |
628 | { | |
629 | if(lastRow != -2) | |
630 | { | |
631 | if(!tempPt) | |
632 | { | |
633 | HLTError( "Zero row pointer " ); | |
634 | return EINVAL; | |
635 | } | |
636 | if(localcounter != tempPt->fNClusters) | |
637 | { | |
638 | HLTError( "Mismatching clustercounter %lu - %d ", | |
639 | (unsigned long)localcounter, (Int_t)tempPt->fNClusters ); | |
640 | return EINVAL; | |
641 | } | |
642 | dataWritten += size; | |
643 | } | |
644 | if(data) | |
645 | delete [] data; | |
646 | size = sizeof(AliHLTTPCRemainingRow) + npoints[padrow]*sizeof(AliHLTTPCRemainingCluster); | |
647 | data = new Byte_t[size]; | |
648 | tempPt = (AliHLTTPCRemainingRow*)data; | |
649 | ||
650 | localcounter=0; | |
651 | tempPt->fPadRow = padrow; | |
652 | tempPt->fNClusters = npoints[padrow]; | |
653 | lastRow = padrow; | |
654 | } | |
655 | if(localcounter >= npoints[padrow]) | |
656 | { | |
657 | HLTError( "Cluster counter out of range: %lu - %lu", | |
658 | (unsigned long)localcounter, (unsigned long)npoints[padrow] ); | |
659 | return EINVAL; | |
660 | } | |
661 | ||
662 | localcounter++; | |
663 | } | |
664 | ||
665 | //Write the last row: | |
666 | if ( tempPt ) | |
667 | { | |
668 | dataWritten += size; | |
669 | if(data) | |
670 | delete [] data; | |
671 | } | |
672 | } | |
673 | } | |
ce622827 | 674 | delete [] npoints; |
ff2f0f94 | 675 | return dataWritten; |
676 | #endif | |
677 | } | |
678 | ||
679 | int AliHLTTPCCompModelConverter::GetRemainingClusters( AliHLTUInt8_t* const data, unsigned long& dataSize ) | |
680 | { | |
681 | // see header file for class documentation | |
682 | ||
683 | const Int_t nrows = AliHLTTPCTransform::GetNRows(); | |
ce622827 | 684 | Int_t * npoints = new Int_t[nrows]; |
ff2f0f94 | 685 | unsigned long dataWritten = 0; |
686 | AliHLTUInt8_t* writePtr = data; | |
687 | ||
688 | *(AliHLTUInt32_t*)writePtr = 0; // Write format version | |
689 | dataWritten += sizeof(AliHLTUInt32_t); | |
690 | writePtr += sizeof(AliHLTUInt32_t); | |
691 | ||
692 | for(Int_t slice=0; slice<=35; slice++) | |
693 | { | |
694 | for(Int_t patch=0; patch < 6; patch++) | |
695 | { | |
696 | if ( !fClusters[slice][patch] ) | |
697 | { | |
698 | *writePtr = (AliHLTUInt8_t)0; | |
699 | writePtr++; | |
700 | dataWritten++; | |
701 | continue; | |
702 | } | |
703 | AliHLTTPCSpacePointData *points = fClusters[slice][patch]->fSpacePoints; | |
704 | bool* clustersUsed = fClusterUsed[slice][patch]; | |
705 | if ( !clustersUsed ) | |
706 | continue; | |
707 | memset(npoints,0,nrows*sizeof(Int_t)); | |
708 | Int_t nonZeroRows=0; | |
709 | ||
710 | for(UInt_t j=0; j<fClusters[slice][patch]->fSpacePointCnt; j++) | |
711 | { | |
712 | //if(points[j].fCharge == 0) continue; //has been used | |
713 | if ( clustersUsed[j] ) continue; //has been used | |
714 | if ( !npoints[points[j].fPadRow] ) | |
715 | nonZeroRows++; | |
716 | npoints[points[j].fPadRow]++; | |
717 | } | |
718 | ||
719 | *writePtr = (AliHLTUInt8_t)nonZeroRows; | |
720 | writePtr++; | |
721 | dataWritten++; | |
722 | ||
723 | Int_t size =0; | |
724 | Byte_t *data = 0; | |
725 | AliHLTTPCRemainingRow *tempPt=0; | |
726 | ||
727 | Int_t lastRow = -2; | |
728 | Int_t localcounter=0; | |
729 | ||
730 | for(UInt_t j=0; j<fClusters[slice][patch]->fSpacePointCnt; j++) | |
731 | { | |
732 | //if(points[j].fCharge == 0) continue; //has been used | |
733 | if ( clustersUsed[j] ) continue; //has been used | |
734 | ||
735 | Int_t padrow = points[j].fPadRow; | |
736 | if(padrow != lastRow) | |
737 | { | |
738 | if(lastRow != -2) | |
739 | { | |
740 | if(!tempPt) | |
741 | { | |
742 | HLTError( "Zero row pointer " ); | |
743 | return EINVAL; | |
744 | } | |
745 | if(localcounter != tempPt->fNClusters) | |
746 | { | |
747 | HLTError( "Mismatching clustercounter %lu - %d ", | |
748 | (unsigned long)localcounter, (Int_t)tempPt->fNClusters ); | |
749 | return EINVAL; | |
750 | } | |
751 | //cout<<"Writing row "<<(int)tempPt->fPadRow<<" with "<<(int)tempPt->fNClusters<<" clusters"<<endl; | |
752 | //fwrite(tempPt,size,1,outfile); | |
753 | if ( dataWritten+size > dataSize ) | |
754 | { | |
755 | HLTWarning( "Cannot write remaining clusters to output. Data size too large (exceeding %lu bytes)", (unsigned long)dataSize ); | |
756 | return ENOBUFS; | |
757 | } | |
758 | memcpy( writePtr, tempPt, size ); | |
759 | dataWritten += size; | |
760 | writePtr += size; | |
761 | } | |
762 | if(data) | |
763 | delete [] data; | |
764 | size = sizeof(AliHLTTPCRemainingRow) + npoints[padrow]*sizeof(AliHLTTPCRemainingCluster); | |
765 | data = new Byte_t[size]; | |
766 | tempPt = (AliHLTTPCRemainingRow*)data; | |
767 | ||
768 | localcounter=0; | |
769 | tempPt->fPadRow = padrow; | |
770 | tempPt->fNClusters = npoints[padrow]; | |
771 | lastRow = padrow; | |
772 | } | |
773 | if(localcounter >= npoints[padrow]) | |
774 | { | |
775 | HLTError( "Cluster counter out of range: %lu - %lu", | |
776 | (unsigned long)localcounter, (unsigned long)npoints[padrow] ); | |
777 | return EINVAL; | |
778 | } | |
779 | ||
780 | Float_t xyz[3] = {points[j].fX,points[j].fY,points[j].fZ}; | |
781 | Int_t sector,row; | |
782 | AliHLTTPCTransform::Slice2Sector(slice,padrow,sector,row); | |
783 | #if 1 | |
784 | AliHLTTPCTransform::Local2Raw(xyz,sector,row); | |
785 | #else | |
786 | AliHLTTPCTransform::Global2Raw(xyz,sector,row); | |
787 | #endif | |
788 | ||
789 | Float_t padw = points[j].fSigmaY2 / pow(AliHLTTPCTransform::GetPadPitchWidth(AliHLTTPCTransform::GetPatch(padrow)),2); | |
790 | Float_t timew = points[j].fSigmaZ2 / pow(AliHLTTPCTransform::GetZWidth(),2); | |
791 | tempPt->fClusters[localcounter].fPad = xyz[1]; | |
792 | tempPt->fClusters[localcounter].fTime = xyz[2]; | |
793 | tempPt->fClusters[localcounter].fCharge = points[j].fCharge; | |
794 | tempPt->fClusters[localcounter].fSigmaY2 = padw; | |
795 | tempPt->fClusters[localcounter].fSigmaZ2 = timew; | |
796 | #ifdef MODELDEBUG | |
797 | tempPt->fClusters[localcounter].fID = points[j].fID; | |
798 | #endif | |
799 | localcounter++; | |
800 | if(fModelAnalysisInstance) | |
801 | { | |
802 | if(fModelAnalysisInstance->GetfModelAnalysis()) | |
803 | { | |
804 | fModelAnalysisInstance->MarkTrashCluster(fClusters[slice][patch], slice, patch); | |
805 | } | |
806 | } | |
807 | } | |
808 | ||
809 | ||
810 | //Write the last row: | |
811 | if ( dataWritten+size > dataSize ) | |
812 | { | |
813 | HLTWarning( "Cannot write remaining clusters to output. Data size too large (exceeding %lu bytes)", (unsigned long)dataSize ); | |
814 | return ENOBUFS; | |
815 | } | |
816 | if ( tempPt ) | |
817 | { | |
818 | memcpy( writePtr, tempPt, size ); | |
819 | dataWritten += size; | |
820 | writePtr += size; | |
821 | if(data) | |
822 | delete [] data; | |
823 | } | |
824 | } | |
825 | } | |
826 | dataSize = dataWritten; | |
ce622827 | 827 | |
828 | delete [] npoints; | |
ff2f0f94 | 829 | return 0; |
830 | } |