]> git.uio.no Git - u/mrichter/AliRoot.git/blame - HLT/TPCLib/comp/AliHLTTPCCompModelConverter.cxx
adding NULL pointer protection
[u/mrichter/AliRoot.git] / HLT / TPCLib / comp / AliHLTTPCCompModelConverter.cxx
CommitLineData
7e914051 1// $Id$
ff2f0f94 2
892210c7 3//**************************************************************************
4//* This file is property of and copyright by the ALICE HLT Project *
5//* ALICE Experiment at CERN, All rights reserved. *
6//* *
7//* Primary Authors: Timm Steinbeck <timm@kip.uni-heidelberg.de> *
8//* for The ALICE HLT Project. *
9//* *
10//* Permission to use, copy, modify and distribute this software and its *
11//* documentation strictly for non-commercial purposes is hereby granted *
12//* without fee, provided that the above copyright notice appears in all *
13//* copies and that both the copyright notice and this permission notice *
14//* appear in the supporting documentation. The authors make no claims *
15//* about the suitability of this software for any purpose. It is *
16//* provided "as is" without express or implied warranty. *
17//**************************************************************************
ff2f0f94 18
19/** @file AliHLTTPCCompModelConverter.cxx
20 @author Timm Steinbeck
21 @author changed by J. Wagner
22 @date 17-11-2007
23 @brief A copy processing component for the HLT. */
24
25#if __GNUC__ >= 3
26using namespace std;
27#endif
28
29#include "AliHLTTPCCompModelConverter.h"
30#include "AliHLTTPCTransform.h"
31#include "AliHLTTPCTrack.h"
32#include "AliHLTTPCModelTrack.h"
33#include "AliHLTTPCCompDataCompressorHelper.h"
34#include <cerrno>
35
36AliHLTTPCCompModelConverter::AliHLTTPCCompModelConverter():
37 fInputTrackArray(),
38 fOutputTrackArray("AliHLTTPCModelTrack"),
892210c7 39 fModelAnalysisInstance(NULL),
40 fMinHits(0)
ff2f0f94 41 {
42 // see header file for class documentation
43 for ( UInt_t slice=0; slice<36; slice++ )
44 for ( UInt_t patch=0; patch<6; patch++ )
45 {
46 fClusterUsedSizes[slice][patch] = 0;
47 fClusterUsed[slice][patch] = NULL;
48 }
49 Init();
50 fMinHits = 5;
51 }
52
53AliHLTTPCCompModelConverter::AliHLTTPCCompModelConverter(AliHLTTPCCompModelAnalysis* modelanalysis):
54 fInputTrackArray(),
55 fOutputTrackArray("AliHLTTPCModelTrack"),
f0d05e66 56 fModelAnalysisInstance(modelanalysis),
57 fMinHits(0)
ff2f0f94 58 {
59 // see header file for class documentation
60 for ( UInt_t slice=0; slice<36; slice++ )
61 for ( UInt_t patch=0; patch<6; patch++ )
62 {
63 fClusterUsedSizes[slice][patch] = 0;
64 fClusterUsed[slice][patch] = NULL;
65 }
66 Init();
67 fMinHits = 5;
68 }
69
70AliHLTTPCCompModelConverter::~AliHLTTPCCompModelConverter()
71 {
72 // see header file for class documentation
73 for ( UInt_t slice=0; slice<36; slice++ )
74 for ( UInt_t patch=0; patch<6; patch++ )
75 {
76 if ( fClusterUsed[slice][patch] )
77 {
78 delete [] fClusterUsed[slice][patch];
79 fClusterUsed[slice][patch] = NULL;
80 }
81 }
82 }
83
84int AliHLTTPCCompModelConverter::Init()
85 {
86 // see header file for class documentation
87 fInputTrackArray.Reset();
88 fOutputTrackArray.Reset();
89 for ( UInt_t slice=0; slice<36; slice++ )
90 for ( UInt_t patch=0; patch<6; patch++ )
91 fClusters[slice][patch] = NULL;
92
93 return 0;
94 }
95
96int AliHLTTPCCompModelConverter::SetInputTracks( AliHLTTPCTrackletData* tracklets )
97 {
98 // see header file for class documentation
99 HLTDebug( "Filling %u tracks", (unsigned)tracklets->fTrackletCnt );
100 fInputTrackArray.FillTracks( tracklets->fTrackletCnt, tracklets->fTracklets );
101 return 0;
102 }
103
104int AliHLTTPCCompModelConverter::SetInputClusters( AliHLTTPCClusterData* clusters, UInt_t slice, UInt_t patch )
105 {
106 // see header file for class documentation
107 if ( slice>=36 || patch>=6 )
108 return EINVAL;
109 if ( fClusters[slice][patch] )
110 return EBUSY;
111 fClusters[slice][patch] = clusters;
112 if ( fClusterUsedSizes[slice][patch]<clusters->fSpacePointCnt ||
113 fClusterUsedSizes[slice][patch]>clusters->fSpacePointCnt*8 )
114 {
115 delete [] fClusterUsed[slice][patch];
116 fClusterUsed[slice][patch] = NULL;
117 }
118 if ( !fClusterUsed[slice][patch] )
119 {
120 fClusterUsed[slice][patch] = new bool[clusters->fSpacePointCnt];
121 if ( !fClusterUsed[slice][patch] )
122 {
123 HLTDebug( "Out of memory trying to allocate usage data for %u clusters", (unsigned)clusters->fSpacePointCnt );
124 return ENOMEM;
125 }
126 }
127 for ( unsigned long nn=0; nn<clusters->fSpacePointCnt; nn++ )
128 fClusterUsed[slice][patch][nn]=false;
129 HLTDebug( "Filling %u clusters", (unsigned)clusters->fSpacePointCnt );
130 return 0;
131 }
132
133void AliHLTTPCCompModelConverter::Convert()
134 {
135 // see header file for class documentation
136 fInputTrackArray.QSort();
137 for(Int_t i=0; i<fInputTrackArray.GetNTracks(); i++)
138 {
139 AliHLTTPCTrack *intrack = fInputTrackArray.GetCheckedTrack(i);
140
141 // NO WARNING IF intrack = NULL!
142 if(!intrack) continue;
143
144 if((unsigned)intrack->GetNHits()<fMinHits)
145 {
146 HLTDebug("Track %d with %d clusters is below minimum of %d clusters",i,intrack->GetNHits(),fMinHits);
147 break;
148 };
149
150 // LOSS OF TRACKS due to following statement possible!
151 if(intrack->GetPt()<0.1)
152 {
153 HLTDebug("Discarding track with low pt.");
154 if(fModelAnalysisInstance)
155 {
156 if(fModelAnalysisInstance->GetfModelAnalysis()) // analysis of model
157 {
158 fModelAnalysisInstance->MarkTrashTrack(intrack);
159 }
160 }
161
162 continue;
163 }
164
165 intrack->CalculateHelix();
166
167 AliHLTTPCModelTrack *outtrack = (AliHLTTPCModelTrack*)fOutputTrackArray.NextTrack();
168 outtrack->SetNHits(intrack->GetNHits());
169 outtrack->SetRowRange(intrack->GetFirstRow(),intrack->GetLastRow());
170 outtrack->SetFirstPoint(intrack->GetFirstPointX(),intrack->GetFirstPointY(),intrack->GetFirstPointZ());
171 outtrack->SetLastPoint(intrack->GetLastPointX(),intrack->GetLastPointY(),intrack->GetLastPointZ());
172 outtrack->SetPt(intrack->GetPt());
173 outtrack->SetPsi(intrack->GetPsi());
174 outtrack->SetTgl(intrack->GetTgl());
175 outtrack->SetCharge(intrack->GetCharge());
176 outtrack->CalculateHelix();
177 Int_t nhits = intrack->GetNHits();
178 UInt_t *hitids = intrack->GetHitNumbers();
a371a266 179 Int_t origslice = AliHLTTPCSpacePointData::GetSlice(hitids[nhits-1]);
ff2f0f94 180 outtrack->Init(origslice,-1);
181
182 for(Int_t j=nhits-1; j>=0; j--)
183 {
184 UInt_t id=hitids[j];
a371a266 185 Int_t slice = AliHLTTPCSpacePointData::GetSlice(id);
186 Int_t patch = AliHLTTPCSpacePointData::GetPatch(id);
187 UInt_t pos = AliHLTTPCSpacePointData::GetNumber(id);
188
ff2f0f94 189 //UInt_t size;
190 if ( !fClusters[slice][patch] )
191 {
192 //HLTWarning( "No clusters for slice %d, patch %d", slice, patch );
193 continue;
194 }
195 if ( !fClusterUsed[slice][patch] )
196 {
197 HLTWarning( "No cluster used data for slice %d, patch %d", slice, patch );
198 continue;
199 }
200 if ( fClusters[slice][patch]->fSpacePointCnt<=pos )
201 {
202 HLTWarning( "Clusters position %d too large in slice %d, patch %d (%u max.)", pos,
203 slice, patch, fClusters[slice][patch]->fSpacePointCnt );
204 continue;
205 }
206
207 AliHLTTPCSpacePointData *points = fClusters[slice][patch]->fSpacePoints;
208 bool* clustersUsed = fClusterUsed[slice][patch];
209 Float_t xyz[3] = {points[pos].fX,points[pos].fY,points[pos].fZ};
210 Int_t padrow = points[pos].fPadRow;
211
212 //Calculate the crossing point between track and padrow
213 Float_t angle = 0; //Perpendicular to padrow in local coordinates
214 AliHLTTPCTransform::Local2GlobalAngle(&angle,slice);
215 if(!intrack->CalculateReferencePoint(angle,AliHLTTPCTransform::Row2X(padrow)))
216 {
217 HLTError( "AliHLTDataCompressor::FillData : Error in crossing point calc on slice %d, padrow %d", slice, padrow );
218 break;
219 //outtrack->Print(kFALSE);
220 //exit(5);
221 }
222
223 Float_t xyzCross[3] = {intrack->GetPointX(),intrack->GetPointY(),intrack->GetPointZ()};
224
225 Int_t sector,row;
226 AliHLTTPCTransform::Slice2Sector(slice,padrow,sector,row);
227 AliHLTTPCTransform::Global2Raw(xyzCross,sector,row);
228#if 1
229 AliHLTTPCTransform::Local2Raw(xyz,sector,row);
230#else
231 AliHLTTPCTransform::Global2Raw(xyz,sector,row);
232#endif
233
234 outtrack->SetPadHit(padrow,xyzCross[1]);
235 outtrack->SetTimeHit(padrow,xyzCross[2]);
236
237 outtrack->SetCrossingAngleLUT(padrow,intrack->GetCrossingAngle(padrow,slice));
238 outtrack->CalculateClusterWidths(padrow,kTRUE); // calculates parSigmas (with parametrisation) in raw coordinates
239 //HLTInfo("angle %f", outtrack->GetCrossingAngleLUT(padrow));
240 //HLTInfo("parsigma %f",outtrack->GetParSigmaY2(padrow));
241 patch = AliHLTTPCTransform::GetPatch(padrow);
242 // sigmay in units of pads (quantisation!)
243 Float_t sigmaY2 = points[pos].fSigmaY2 / pow(AliHLTTPCTransform::GetPadPitchWidth(patch),2);
244 //HLTInfo("sigmaY conv.: %f", points[pos].fSigmaY2);
245
246 //HLTInfo("parSigmaY2 = %f",AliHLTTPCTransform::GetParSigmaY2(padrow, xyzCross[2],angle));
247 //Float_t testsigma = 0.0;
248 //outtrack->GetSigmaY2(padrow, testsigma);
249 //HLTInfo("DSigmaY2 = %f",testsigma);
250
251 //HLTInfo("sigmaY2 float: %f",sigmaY2);
252 Float_t sigmaZ2 = points[pos].fSigmaZ2 / pow(AliHLTTPCTransform::GetZWidth(),2);
253 outtrack->SetCluster(padrow,xyz[1],xyz[2],points[pos].fCharge,sigmaY2,sigmaZ2,3);
254 //AliHLTTPCClusterModel* test1 = outtrack->GetClusterModel(padrow);
255 //HLTInfo("Dsigma %f",test1->fDSigmaY);
256
257 //IMPORTANT: Set the slice in which cluster is, you need it in AliHLTTPCModelTrack::FillTrack!
258 outtrack->GetClusterModel(padrow)->fSlice=slice;
259#ifdef MODELDEBUG
260 outtrack->GetClusterModel(padrow)->fID=points[pos].fID;
261 HLTDebug( "Track %d cluster for padrow %d ID: %u (0x%08X) - fSlice: %u", i, padrow,
262 outtrack->GetClusterModel(padrow)->fID, outtrack->GetClusterModel(padrow)->fID,
263 (unsigned)outtrack->GetClusterModel(padrow)->fSlice );
264#endif
265 //points[pos].fCharge = 0;//Mark this cluster as used.
266 clustersUsed[pos] = true;//Mark this cluster as used.
267 //fNusedClusters++;
268 } //end of clusters for each track
269
270 //outtrack->SetNClusters(AliHLTTPCTransform::GetNRows(-1)); // Equivalent call in ExpandTrackData
271 } // end of track-loop
272 ExpandTrackData();
273
274 // validation test for clusternumbers of tracks:
275 //for(unsigned long jj = 0; jj < (unsigned long) fOutputTrackArray.GetNTracks(); jj++)
276 // {
277 // AliHLTTPCModelTrack *track = (AliHLTTPCModelTrack*)fOutputTrackArray.GetCheckedTrack(jj);
278 // Int_t nhits = track->GetNHits();
279 // HLTInfo("Number of clusters for track %lu is %d",jj, nhits);
280 // }
281
282 //comp->WriteFile(fOutputTrackArray);
283
284 }
285
286void AliHLTTPCCompModelConverter::ExpandTrackData()
287 {
288 // see header file for class documentation
289 //Loop over tracks and try to assign unused clusters.
290 //Only clusters which are closer than the max. residual are taken.
291
292 HLTDebug( "Expanding %lu tracks", (unsigned long)fOutputTrackArray.GetNTracks() );
293 for(Int_t i=0; i<fOutputTrackArray.GetNTracks(); i++)
294 {
295 AliHLTTPCModelTrack *track = (AliHLTTPCModelTrack*)fOutputTrackArray.GetCheckedTrack(i);
296
297 if(!track) continue;
298
299 // tracks that hit every row already cannot be expanded in the current model!
300 if(track->GetNHits() == AliHLTTPCTransform::GetNRows()) continue;
301
302 Int_t nhits = track->GetNHits();
303
304 // validation test
305 //HLTInfo("Before expansion: track %u with number of clusters %d", i, nhits);
306
307 Int_t lastSlice=-1;
308 for(Int_t padrow=AliHLTTPCTransform::GetNRows()-1; padrow>=0; padrow--)
309 {
310 if(track->IsPresent(padrow))
311 {
312 lastSlice = track->GetClusterModel(padrow)->fSlice;
313 continue;
314 }
315
316 if(lastSlice < 0) //the outer cluster is missing, so skip it - it will be written anyhow.
317 continue;
318
319 //Check the slice of the next padrow:
320 Int_t nextPadrow = padrow-1;
321 Int_t nextSlice = -1;
322 while(nextPadrow >=0)
323 {
324 if(track->IsPresent(nextPadrow))
325 {
326 nextSlice = track->GetClusterModel(nextPadrow)->fSlice;
327 break;
328 }
329 nextPadrow--;
330 }
331 if(nextSlice>=0)
332 if(nextSlice != lastSlice)//The track crosses a slice boundary here
333 continue;
334
335 //UInt_t size;
336 if ( !fClusters[lastSlice][0] )
337 {
338 HLTWarning( "No clusters for slice %d, patch %d", lastSlice, 0 );
339 continue;
340 }
341 if ( !fClusterUsed[lastSlice][0] )
342 {
343 HLTWarning( "No cluster used data for slice %d, patch %d", lastSlice, 0 );
344 continue;
345 }
346 AliHLTTPCSpacePointData *points = fClusters[lastSlice][0]->fSpacePoints;//->GetDataPointer(size);
347 bool* clustersUsed = fClusterUsed[lastSlice][0];
348
1dffc959 349 Float_t globalangle = 0;
350 AliHLTTPCTransform::Local2GlobalAngle(&globalangle,lastSlice);
351 if(!track->CalculateReferencePoint(globalangle,AliHLTTPCTransform::Row2X(padrow)))
ff2f0f94 352 continue;
353 Float_t xyzCross[3] = {track->GetPointX(),track->GetPointY(),track->GetPointZ()};
354 AliHLTTPCTransform::Global2LocHLT(xyzCross,lastSlice);
355 Float_t mindist = 123456789;
356 AliHLTTPCSpacePointData *closest=0;
357 UInt_t closestJ=0;
358 for(UInt_t j=0; j<fClusters[lastSlice][0]->fSpacePointCnt; j++)
359 {
360 //if(points[j].fCharge == 0) continue;// || points[j].fPadRow != padrow) continue;
361 if (clustersUsed[j]) continue; // Cluster already used
362 if(points[j].fPadRow < padrow) continue;
363 if(points[j].fPadRow > padrow) break;
364 Float_t xyz[3] = {points[j].fX,points[j].fY,points[j].fZ};
365#if 1
366#else
367 AliHLTTPCTransform::Global2LocHLT(xyz,lastSlice);
368#endif
369
370 //Check for overflow:
371 Int_t temp = (Int_t)rint((xyzCross[1]-xyz[1])/AliHLTTPCCompDataCompressorHelper::GetXYResidualStep(padrow));
372 if( abs(temp) > 1<<(AliHLTTPCCompDataCompressorHelper::GetNPadBits()-1))
373 continue;
374
375 temp = (Int_t)rint((xyzCross[2]-xyz[2])/AliHLTTPCCompDataCompressorHelper::GetZResidualStep(padrow));
376 if( abs(temp) > 1<<(AliHLTTPCCompDataCompressorHelper::GetNTimeBits()-1))
377 continue;
378
379 Float_t dist = sqrt( pow(xyzCross[1]-xyz[1],2) + pow(xyzCross[2]-xyz[2],2) );
380 if(dist < mindist)
381 {
382 closest = &points[j];
383 closestJ = j;
384 mindist = dist;
385 }
386 }
387 if(closest) //there was a cluster assigned
388 {
389 Int_t sector,row;
390 Float_t xyz[3] = {closest->fX,closest->fY,closest->fZ};
391 AliHLTTPCTransform::Slice2Sector(lastSlice,padrow,sector,row);
392 AliHLTTPCTransform::Local2Raw(xyzCross,sector,row);
393#if 1
394 AliHLTTPCTransform::Local2Raw(xyz,sector,row);
395#else
396 AliHLTTPCTransform::Global2Raw(xyz,sector,row);
397#endif
398
399 track->SetPadHit(padrow,xyzCross[1]);
400 track->SetTimeHit(padrow,xyzCross[2]);
401
402 Float_t angle = track->GetCrossingAngle(padrow,lastSlice);
403 track->SetCrossingAngleLUT(padrow,angle);
404 track->CalculateClusterWidths(padrow,kTRUE);
405 Int_t patch = AliHLTTPCTransform::GetPatch(padrow);
406 Float_t sigmaY2 = closest->fSigmaY2 / pow(AliHLTTPCTransform::GetPadPitchWidth(patch),2);
407 Float_t sigmaZ2 = closest->fSigmaZ2 / pow(AliHLTTPCTransform::GetZWidth(),2);
408 track->SetCluster(padrow,xyz[1],xyz[2],closest->fCharge,sigmaY2,sigmaZ2,3);
409 //AliHLTTPCClusterModel* test1 = track->GetClusterModel(padrow);
410 //HLTInfo("Dsigma %f",test1->fDSigmaY);
411
412 nhits++;
413
414 //IMPORTANT: Set the slice in which cluster is, you need it in AliHLTTPCModelTrack::FillTrack!
415 track->GetClusterModel(padrow)->fSlice=lastSlice;
416#ifdef MODELDEBUG
417 track->GetClusterModel(padrow)->fID=closest->fID;
418 HLTDebug( "Track %d cluster for padrow %d ID: %u (0x%08X) - fSlice: %u", i, padrow,
419 track->GetClusterModel(padrow)->fID, track->GetClusterModel(padrow)->fID,
420 track->GetClusterModel(padrow)->fSlice );
421#endif
422 //closest->fCharge = 0;//Mark this cluster as used.
423 clustersUsed[closestJ] = true;//Mark this cluster as used.
424 }
425 }
426 track->SetNClusters(AliHLTTPCTransform::GetNRows());
427 //cout<<"Track was assigned "<<nhits<<" clusters"<<endl;
428
429 // validation test
430 //HLTInfo( "After expansion: track %d with clusters %u", i, nhits);
431 }
432
433 }
434
435unsigned long AliHLTTPCCompModelConverter::GetOutputModelDataSize()
436 {
437 // see header file for class documentation
438 unsigned long dataSize=0;
439 Short_t ntracks = fOutputTrackArray.GetNTracks();
440
441 dataSize += sizeof(AliHLTUInt32_t);
442
443 for(Int_t i=0; i<ntracks; i++)
444 {
445 AliHLTTPCModelTrack *track = (AliHLTTPCModelTrack*)fOutputTrackArray.GetCheckedTrack(i);
446 if ( !track )
447 continue;
448
449 dataSize += sizeof(AliHLTTPCTrackModel)+track->GetNClusters()*sizeof(AliHLTTPCClusterModel);
450 }
451 return dataSize;
452 }
453
454int AliHLTTPCCompModelConverter::OutputModelData( AliHLTUInt8_t* data )
455 {
456 // see header file for class documentation
457 unsigned long dataOffset=0;
458 Short_t ntracks = fOutputTrackArray.GetNTracks();
459
460 AliHLTTPCClusterModel *clusters=0;
461 AliHLTTPCTrackModel *model=0;
462
463 *(AliHLTUInt32_t*)data = 0; // Write format version number
464 dataOffset += sizeof(AliHLTUInt32_t);
465
466 for(Int_t i=0; i<ntracks; i++)
467 {
468 AliHLTTPCModelTrack *track = (AliHLTTPCModelTrack*)fOutputTrackArray.GetCheckedTrack(i);
469 if ( !track )
470 continue;
471
472 track->FillModel();
473 model = track->GetModel();
474
475 clusters = track->GetClusters();
476
477 // validation test
478 //HLTInfo( "Track %d clusters: %u", i, (unsigned)track->GetNPresentClusters() );
479
480 for ( Int_t jj=0; jj<track->GetNClusters(); jj++ )
481 {
482 //HLTDebug( " Cluster %d fPresent: %u", jj, (unsigned)clusters[jj].fPresent );
483 }
484
485 memcpy( data+dataOffset, model, sizeof(AliHLTTPCTrackModel) );
486 dataOffset += sizeof(AliHLTTPCTrackModel);
487
488 memcpy( data+dataOffset, clusters, track->GetNClusters()*sizeof(AliHLTTPCClusterModel) );
489 dataOffset += track->GetNClusters()*sizeof(AliHLTTPCClusterModel);
490 }
491 return 0;
492 }
493
494void AliHLTTPCCompModelConverter::SelectRemainingClusters()
495 {
496 // see header file for class documentation
497 //Select which remaining clusters to write in addition to the compressed data.
498 //In particular one can here make sure that "important" clusters are not missed:
499 //The offline track finder perform seed finding in the outer padrows;
500 //the first seeding is using pair of points on outermost padrow and
501 //0.125*nrows more rows towards the vertex. The second seeding uses pair
502 //of points on the outermost padrow-0.5*0.125*nrows and 0.125*nrows + 0.5*0.125*nrows
503 //more rows towards the vertex. In order to evaluate the seeds, the track offline
504 //track finder checks whether a certain amount of possible clusters (padrows) is
505 //attached to the track, and then the kalman filtering starts.
506 //To ensure a minimal loss off efficiency, all clusters in this region should be
507 //intact.....
508
509 Int_t nrows = AliHLTTPCTransform::GetNRows();
510 Int_t gap=(Int_t)(0.125*nrows), shift=(Int_t)(0.5*gap);
511
512 for(Int_t slice=0; slice<36; slice++)
513 {
514 for(Int_t patch=0; patch<6; patch++)
515 {
516 if ( !fClusters[slice][patch] )
517 continue;
518 AliHLTTPCSpacePointData *points = fClusters[slice][patch]->fSpacePoints;
519 bool* clustersUsed = fClusterUsed[slice][patch];
520 for(UInt_t i=0; i<fClusters[slice][patch]->fSpacePointCnt; i++)
521 {
522 //if(points[i].fCharge == 0) continue; //Already removed
523 if (clustersUsed[i]) continue; //Already removed
524 Int_t padrow = (Int_t)points[i].fPadRow;
525
526 //Check the widths (errors) of the cluster, and remove big bastards:
527 Float_t padw = sqrt(points[i].fSigmaY2) / AliHLTTPCTransform::GetPadPitchWidth(AliHLTTPCTransform::GetPatch(padrow));
528 Float_t timew = sqrt(points[i].fSigmaZ2) / AliHLTTPCTransform::GetZWidth();
529 if(padw >= 2.55 || timew >= 2.55)//Because we use 1 byte to store
530 {
531 //points[i].fCharge = 0;
532 clustersUsed[i] = true;
533 continue;
534 }
535
536 Float_t xyz[3] = {points[i].fX,points[i].fY,points[i].fZ};
537 Int_t sector,row;
538 AliHLTTPCTransform::Slice2Sector(slice,padrow,sector,row);
539 AliHLTTPCTransform::Global2Raw(xyz,sector,row);
540
541 if(padrow >= nrows-1-gap-shift) continue;//save all the clusters in this region
542
543 //if(padrow >= nrows-1-shift) continue;
544
545 //Save the clusters at the borders:
546 //if(xyz[1] < 3 || xyz[1] >= AliHLTTPCTransform::GetNPads(padrow)-4)
547 // continue;
548
549 //Save clusters on padrows used for offline seeding:
550 if(padrow == nrows - 1 || padrow == nrows - 1 - gap || //First seeding
551 padrow == nrows - 1 - shift || padrow == nrows - 1 - gap - shift) //Second seeding
552 continue;
553
554 //Cluster did not meet any of the above criteria, so disregard it:
555 //points[i].fCharge = 0;
556 clustersUsed[i] = true;
557 }
558 }
559 }
560
561 }
562
563unsigned long AliHLTTPCCompModelConverter::GetRemainingClustersOutputDataSize()
564 {
565 // see header file for class documentation
e61d1d2a 566 int iResult=0;
ff2f0f94 567#if 0
568 for ( UInt_t slice=0; slice<36; slice++ )
569 for ( UInt_t patch=0; patch<6; patch++ )
570 {
571 bool* clustersUsed = fClusterUsed[slice][patch];
572 if ( !clustersUsed || !fClusters[slice][patch] )
573 continue;
574 for ( UInt_t pos=0; pos<fClusters[slice][patch]->fSpacePointCnt; pos++ )
575 {
576 if ( !clustersUsed[pos] )
577 clusterCnt++;
578 }
579 }
580 return clusterCnt*sizeof(AliHLTTPCClusterModel);
581#else
582 const Int_t nrows = AliHLTTPCTransform::GetNRows();
ce622827 583 Int_t * npoints = new Int_t[nrows];
ff2f0f94 584 unsigned long dataWritten = 0;
585
586 dataWritten += sizeof(AliHLTUInt32_t);
587
e61d1d2a 588 // FIXME: get rid of hardcoded numbers
589 for(Int_t slice=0; slice<35 && iResult>=0; slice++)
ff2f0f94 590 {
e61d1d2a 591 for(Int_t patch=0; patch < 6 && iResult>=0; patch++)
ff2f0f94 592 {
593 if ( !fClusters[slice][patch] )
594 {
595 dataWritten++;
596 continue;
597 }
598 AliHLTTPCSpacePointData *points = fClusters[slice][patch]->fSpacePoints;
599 bool* clustersUsed = fClusterUsed[slice][patch];
600 if ( !clustersUsed )
601 continue;
602 memset(npoints,0,nrows*sizeof(Int_t));
603 Int_t nonZeroRows=0;
604
605 for(UInt_t j=0; j<fClusters[slice][patch]->fSpacePointCnt; j++)
606 {
607 //if(points[j].fCharge == 0) continue; //has been used
608 if ( clustersUsed[j] ) continue; //has been used
609 if ( !npoints[points[j].fPadRow] )
610 nonZeroRows++;
611 npoints[points[j].fPadRow]++;
612 }
613
614 dataWritten++;
615
616 Int_t size =0;
617 Byte_t *data = 0;
618 AliHLTTPCRemainingRow *tempPt=0;
619
620 Int_t lastRow = -2;
621 Int_t localcounter=0;
622
623 for(UInt_t j=0; j<fClusters[slice][patch]->fSpacePointCnt; j++)
624 {
625 //if(points[j].fCharge == 0) continue; //has been used
626 if ( clustersUsed[j] ) continue; //has been used
627
628 Int_t padrow = points[j].fPadRow;
629 if(padrow != lastRow)
630 {
631 if(lastRow != -2)
632 {
633 if(!tempPt)
634 {
635 HLTError( "Zero row pointer " );
e61d1d2a 636 iResult=-EINVAL;
637 break;
ff2f0f94 638 }
639 if(localcounter != tempPt->fNClusters)
640 {
641 HLTError( "Mismatching clustercounter %lu - %d ",
642 (unsigned long)localcounter, (Int_t)tempPt->fNClusters );
e61d1d2a 643 iResult=EINVAL;
644 break;
ff2f0f94 645 }
646 dataWritten += size;
647 }
648 if(data)
649 delete [] data;
650 size = sizeof(AliHLTTPCRemainingRow) + npoints[padrow]*sizeof(AliHLTTPCRemainingCluster);
651 data = new Byte_t[size];
e61d1d2a 652 tempPt = reinterpret_cast<AliHLTTPCRemainingRow*>(data);
ff2f0f94 653
654 localcounter=0;
655 tempPt->fPadRow = padrow;
656 tempPt->fNClusters = npoints[padrow];
657 lastRow = padrow;
658 }
659 if(localcounter >= npoints[padrow])
660 {
661 HLTError( "Cluster counter out of range: %lu - %lu",
662 (unsigned long)localcounter, (unsigned long)npoints[padrow] );
e61d1d2a 663 iResult=-EINVAL;
664 break;
ff2f0f94 665 }
666
667 localcounter++;
668 }
669
670 //Write the last row:
671 if ( tempPt )
672 {
673 dataWritten += size;
ff2f0f94 674 }
e61d1d2a 675 if(data)
676 delete [] data;
ff2f0f94 677 }
678 }
ce622827 679 delete [] npoints;
e61d1d2a 680 // FIXME check the caller and propagate an error condition
681 if (iResult<0) return 0;
ff2f0f94 682 return dataWritten;
683#endif
684 }
685
1dffc959 686int AliHLTTPCCompModelConverter::GetRemainingClusters( AliHLTUInt8_t* const pTgt, unsigned long& dataSize )
ff2f0f94 687 {
688 // see header file for class documentation
e61d1d2a 689 int iResult=0;
690
691 // FIXME: almost identical code to GetRemainingClustersOutputDataSize
692 // try to combine
ff2f0f94 693 const Int_t nrows = AliHLTTPCTransform::GetNRows();
ce622827 694 Int_t * npoints = new Int_t[nrows];
ff2f0f94 695 unsigned long dataWritten = 0;
1dffc959 696 AliHLTUInt8_t* writePtr = pTgt;
ff2f0f94 697
698 *(AliHLTUInt32_t*)writePtr = 0; // Write format version
699 dataWritten += sizeof(AliHLTUInt32_t);
700 writePtr += sizeof(AliHLTUInt32_t);
701
e61d1d2a 702 for(Int_t slice=0; slice<=35 && iResult>=0; slice++)
ff2f0f94 703 {
e61d1d2a 704 for(Int_t patch=0; patch < 6 && iResult>=0; patch++)
ff2f0f94 705 {
706 if ( !fClusters[slice][patch] )
707 {
708 *writePtr = (AliHLTUInt8_t)0;
709 writePtr++;
710 dataWritten++;
711 continue;
712 }
713 AliHLTTPCSpacePointData *points = fClusters[slice][patch]->fSpacePoints;
714 bool* clustersUsed = fClusterUsed[slice][patch];
715 if ( !clustersUsed )
716 continue;
717 memset(npoints,0,nrows*sizeof(Int_t));
718 Int_t nonZeroRows=0;
719
720 for(UInt_t j=0; j<fClusters[slice][patch]->fSpacePointCnt; j++)
721 {
722 //if(points[j].fCharge == 0) continue; //has been used
723 if ( clustersUsed[j] ) continue; //has been used
724 if ( !npoints[points[j].fPadRow] )
725 nonZeroRows++;
726 npoints[points[j].fPadRow]++;
727 }
728
729 *writePtr = (AliHLTUInt8_t)nonZeroRows;
730 writePtr++;
731 dataWritten++;
732
733 Int_t size =0;
734 Byte_t *data = 0;
735 AliHLTTPCRemainingRow *tempPt=0;
736
737 Int_t lastRow = -2;
738 Int_t localcounter=0;
739
740 for(UInt_t j=0; j<fClusters[slice][patch]->fSpacePointCnt; j++)
741 {
742 //if(points[j].fCharge == 0) continue; //has been used
743 if ( clustersUsed[j] ) continue; //has been used
744
745 Int_t padrow = points[j].fPadRow;
746 if(padrow != lastRow)
747 {
748 if(lastRow != -2)
749 {
750 if(!tempPt)
751 {
752 HLTError( "Zero row pointer " );
e61d1d2a 753 iResult=-EINVAL;
754 break;
ff2f0f94 755 }
756 if(localcounter != tempPt->fNClusters)
757 {
758 HLTError( "Mismatching clustercounter %lu - %d ",
759 (unsigned long)localcounter, (Int_t)tempPt->fNClusters );
e61d1d2a 760 iResult=-EINVAL;
761 break;
ff2f0f94 762 }
763 //cout<<"Writing row "<<(int)tempPt->fPadRow<<" with "<<(int)tempPt->fNClusters<<" clusters"<<endl;
764 //fwrite(tempPt,size,1,outfile);
765 if ( dataWritten+size > dataSize )
766 {
767 HLTWarning( "Cannot write remaining clusters to output. Data size too large (exceeding %lu bytes)", (unsigned long)dataSize );
e61d1d2a 768 iResult=-ENOBUFS;
769 break;
ff2f0f94 770 }
771 memcpy( writePtr, tempPt, size );
772 dataWritten += size;
773 writePtr += size;
774 }
775 if(data)
776 delete [] data;
777 size = sizeof(AliHLTTPCRemainingRow) + npoints[padrow]*sizeof(AliHLTTPCRemainingCluster);
778 data = new Byte_t[size];
779 tempPt = (AliHLTTPCRemainingRow*)data;
780
781 localcounter=0;
782 tempPt->fPadRow = padrow;
783 tempPt->fNClusters = npoints[padrow];
784 lastRow = padrow;
785 }
786 if(localcounter >= npoints[padrow])
787 {
788 HLTError( "Cluster counter out of range: %lu - %lu",
789 (unsigned long)localcounter, (unsigned long)npoints[padrow] );
e61d1d2a 790 iResult=EINVAL;
791 break;
ff2f0f94 792 }
793
794 Float_t xyz[3] = {points[j].fX,points[j].fY,points[j].fZ};
795 Int_t sector,row;
796 AliHLTTPCTransform::Slice2Sector(slice,padrow,sector,row);
797#if 1
798 AliHLTTPCTransform::Local2Raw(xyz,sector,row);
799#else
800 AliHLTTPCTransform::Global2Raw(xyz,sector,row);
801#endif
802
803 Float_t padw = points[j].fSigmaY2 / pow(AliHLTTPCTransform::GetPadPitchWidth(AliHLTTPCTransform::GetPatch(padrow)),2);
804 Float_t timew = points[j].fSigmaZ2 / pow(AliHLTTPCTransform::GetZWidth(),2);
805 tempPt->fClusters[localcounter].fPad = xyz[1];
806 tempPt->fClusters[localcounter].fTime = xyz[2];
807 tempPt->fClusters[localcounter].fCharge = points[j].fCharge;
808 tempPt->fClusters[localcounter].fSigmaY2 = padw;
809 tempPt->fClusters[localcounter].fSigmaZ2 = timew;
810#ifdef MODELDEBUG
811 tempPt->fClusters[localcounter].fID = points[j].fID;
812#endif
813 localcounter++;
814 if(fModelAnalysisInstance)
815 {
816 if(fModelAnalysisInstance->GetfModelAnalysis())
817 {
818 fModelAnalysisInstance->MarkTrashCluster(fClusters[slice][patch], slice, patch);
819 }
820 }
821 }
822
823
824 //Write the last row:
825 if ( dataWritten+size > dataSize )
826 {
827 HLTWarning( "Cannot write remaining clusters to output. Data size too large (exceeding %lu bytes)", (unsigned long)dataSize );
e61d1d2a 828 iResult=-ENOBUFS;
b4eb88d1 829 if(data)
830 delete [] data;
e61d1d2a 831 break;
ff2f0f94 832 }
833 if ( tempPt )
834 {
835 memcpy( writePtr, tempPt, size );
836 dataWritten += size;
837 writePtr += size;
ff2f0f94 838 }
e61d1d2a 839 if(data)
840 delete [] data;
ff2f0f94 841 }
842 }
843 dataSize = dataWritten;
ce622827 844
845 delete [] npoints;
e61d1d2a 846 return iResult;
ff2f0f94 847 }