]>
Commit | Line | Data |
---|---|---|
1 | // $Id$ | |
2 | ||
3 | //************************************************************************** | |
4 | //* This file is property of and copyright by the ALICE HLT Project * | |
5 | //* ALICE Experiment at CERN, All rights reserved. * | |
6 | //* * | |
7 | //* Primary Authors: Matthias Richter <Matthias.Richter@ift.uib.no> * | |
8 | //* for The ALICE HLT Project. * | |
9 | //* * | |
10 | //* Permission to use, copy, modify and distribute this software and its * | |
11 | //* documentation strictly for non-commercial purposes is hereby granted * | |
12 | //* without fee, provided that the above copyright notice appears in all * | |
13 | //* copies and that both the copyright notice and this permission notice * | |
14 | //* appear in the supporting documentation. The authors make no claims * | |
15 | //* about the suitability of this software for any purpose. It is * | |
16 | //* provided "as is" without express or implied warranty. * | |
17 | //************************************************************************** | |
18 | ||
19 | /** @file AliHLTOUTComponent.cxx | |
20 | @author Matthias Richter | |
21 | @date | |
22 | @brief The HLTOUT data sink component similar to HLTOUT nodes | |
23 | */ | |
24 | ||
25 | #if __GNUC__>= 3 | |
26 | using namespace std; | |
27 | #endif | |
28 | ||
29 | #include <cassert> | |
30 | //#include <iostream> | |
31 | #include "AliHLTOUTComponent.h" | |
32 | #include "AliHLTOUT.h" | |
33 | #include "AliHLTHOMERLibManager.h" | |
34 | #include "AliHLTHOMERWriter.h" | |
35 | #include "AliDAQ.h" // equipment Ids | |
36 | #include "AliRawDataHeader.h" // Common Data Header | |
37 | #include <TDatime.h> // seed for TRandom | |
38 | #include <TRandom.h> // random int generation for DDL no | |
39 | #include <TFile.h> | |
40 | #include <TTree.h> | |
41 | #include <TArrayC.h> | |
42 | ||
43 | /** ROOT macro for the implementation of ROOT specific class methods */ | |
44 | ClassImp(AliHLTOUTComponent) | |
45 | ||
46 | AliHLTOUTComponent::AliHLTOUTComponent() | |
47 | : | |
48 | AliHLTOfflineDataSink(), | |
49 | fWriters(), | |
50 | fNofDDLs(10), | |
51 | fIdFirstDDL(7680), // 0x1e<<8 | |
52 | fBuffer(), | |
53 | fpLibManager(NULL), | |
54 | fpDigitFile(NULL), | |
55 | fpDigitTree(NULL), | |
56 | fppDigitArrays(NULL), | |
57 | fReservedWriter(-1), | |
58 | fReservedData(0) | |
59 | { | |
60 | // see header file for class documentation | |
61 | // or | |
62 | // refer to README to build package | |
63 | // or | |
64 | // visit http://web.ift.uib.no/~kjeks/doc/alice-hlt | |
65 | ||
66 | // I guess DDL definitions should never change any more | |
67 | assert(fNofDDLs==AliDAQ::NumberOfDdls("HLT")); | |
68 | fNofDDLs=AliDAQ::NumberOfDdls("HLT"); | |
69 | ||
70 | /* AliDAQ::DdlIDOffset returns wrong offset for HLT links | |
71 | assert(fIdFirstDDL==AliDAQ::DdlIDOffset("HLT")); | |
72 | fIdFirstDDL=AliDAQ::DdlIDOffset("HLT"); | |
73 | */ | |
74 | } | |
75 | ||
76 | int AliHLTOUTComponent::fgOptions=kWriteRawFiles|kWriteDigits; | |
77 | ||
78 | AliHLTOUTComponent::~AliHLTOUTComponent() | |
79 | { | |
80 | // see header file for class documentation | |
81 | if (fpLibManager) delete fpLibManager; | |
82 | fpLibManager=NULL; | |
83 | } | |
84 | ||
85 | const char* AliHLTOUTComponent::GetComponentID() | |
86 | { | |
87 | // see header file for class documentation | |
88 | return "HLTOUT"; | |
89 | } | |
90 | ||
91 | void AliHLTOUTComponent::GetInputDataTypes( vector<AliHLTComponentDataType>& list) | |
92 | { | |
93 | // see header file for class documentation | |
94 | list.clear(); | |
95 | list.push_back(kAliHLTAnyDataType); | |
96 | } | |
97 | ||
98 | AliHLTComponent* AliHLTOUTComponent::Spawn() | |
99 | { | |
100 | // see header file for class documentation | |
101 | return new AliHLTOUTComponent; | |
102 | } | |
103 | ||
104 | int AliHLTOUTComponent::DoInit( int argc, const char** argv ) | |
105 | { | |
106 | // see header file for class documentation | |
107 | int iResult=0; | |
108 | TString argument=""; | |
109 | int bMissingParam=0; | |
110 | for (int i=0; i<argc && iResult>=0; i++) { | |
111 | argument=argv[i]; | |
112 | if (argument.IsNull()) continue; | |
113 | ||
114 | // -links | |
115 | if (argument.CompareTo("-links")==0) { | |
116 | if ((bMissingParam=(++i>=argc))) break; | |
117 | TString parameter(argv[i]); | |
118 | parameter.Remove(TString::kLeading, ' '); // remove all blanks | |
119 | if (parameter.IsDigit()) { | |
120 | fNofDDLs=parameter.Atoi(); | |
121 | } else { | |
122 | HLTError("wrong parameter for argument %s, number expected", argument.Data()); | |
123 | iResult=-EINVAL; | |
124 | } | |
125 | } else { | |
126 | HLTError("unknown argument %s", argument.Data()); | |
127 | iResult=-EINVAL; | |
128 | break; | |
129 | } | |
130 | } | |
131 | if (bMissingParam) { | |
132 | HLTError("missing parameter for argument %s", argument.Data()); | |
133 | iResult=-EINVAL; | |
134 | } | |
135 | if (iResult>=0) { | |
136 | } | |
137 | ||
138 | fpLibManager=new AliHLTHOMERLibManager; | |
139 | if (fpLibManager) { | |
140 | int writerNo=0; | |
141 | for (writerNo=0; writerNo<fNofDDLs; writerNo++) { | |
142 | AliHLTMonitoringWriter* pWriter=fpLibManager->OpenWriter(); | |
143 | if (pWriter) { | |
144 | HLTDebug("HOMER writer %p added", pWriter); | |
145 | fWriters.push_back(pWriter); | |
146 | } else { | |
147 | HLTError("can not open HOMER writer"); | |
148 | iResult=-ENODEV; | |
149 | break; | |
150 | } | |
151 | } | |
152 | } else { | |
153 | iResult=-ENOMEM; | |
154 | } | |
155 | ||
156 | return iResult; | |
157 | } | |
158 | ||
159 | int AliHLTOUTComponent::DoDeinit() | |
160 | { | |
161 | // see header file for class documentation | |
162 | int iResult=0; | |
163 | ||
164 | if (fpLibManager) { | |
165 | AliHLTMonitoringWriterPVector::iterator element=fWriters.begin(); | |
166 | while (element!= fWriters.end()) { | |
167 | assert(*element); | |
168 | // wanted to have a dynamic_cast<AliHLTHOMERWriter*> here, but this results into | |
169 | // undefined symbol when loading the library | |
170 | (*element)->Clear(); | |
171 | if (*element!=NULL) fpLibManager->DeleteWriter((AliHLTHOMERWriter*)(*element)); | |
172 | element=fWriters.erase(element); | |
173 | } | |
174 | } | |
175 | ||
176 | if (fpDigitTree) { | |
177 | delete fpDigitTree; | |
178 | fpDigitTree=NULL; | |
179 | } | |
180 | ||
181 | if (fpDigitFile) { | |
182 | fpDigitFile->Close(); | |
183 | delete fpDigitFile; | |
184 | fpDigitFile=NULL; | |
185 | } | |
186 | ||
187 | if (fppDigitArrays) { | |
188 | for (int i=0; i<fNofDDLs; i++) { | |
189 | if (fppDigitArrays[i]) delete fppDigitArrays[i]; | |
190 | } | |
191 | delete[] fppDigitArrays; | |
192 | fppDigitArrays=NULL; | |
193 | } | |
194 | ||
195 | return iResult; | |
196 | } | |
197 | ||
198 | int AliHLTOUTComponent::DumpEvent( const AliHLTComponentEventData& evtData, | |
199 | const AliHLTComponentBlockData* blocks, | |
200 | AliHLTComponentTriggerData& /*trigData*/ ) | |
201 | { | |
202 | // see header file for class documentation | |
203 | int iResult=0; | |
204 | HLTInfo("write %d output block(s)", evtData.fBlockCnt); | |
205 | int writerNo=0; | |
206 | AliHLTUInt32_t eventType=gkAliEventTypeUnknown; | |
207 | bool bIsDataEvent=IsDataEvent(&eventType); | |
208 | if (iResult>=0) { | |
209 | homer_uint64 homerHeader[kCount_64b_Words]; | |
210 | HOMERBlockDescriptor homerDescriptor(homerHeader); | |
211 | for (int n=0; n<(int)evtData.fBlockCnt; n++ ) { | |
212 | if (blocks[n].fDataType==kAliHLTDataTypeEvent || | |
213 | blocks[n].fDataType==kAliHLTDataTypeSOR || | |
214 | blocks[n].fDataType==kAliHLTDataTypeEOR || | |
215 | blocks[n].fDataType==kAliHLTDataTypeComConf || | |
216 | blocks[n].fDataType==kAliHLTDataTypeUpdtDCS) | |
217 | { | |
218 | // the special events have to be ignored. | |
219 | continue; | |
220 | } | |
221 | if (!bIsDataEvent && | |
222 | (blocks[n].fDataType!=kAliHLTDataTypeComponentTable)) | |
223 | { | |
224 | // In simulation, there are no SOR and EOR events created. Thats | |
225 | // why all data blocks of those events are currently ignored. | |
226 | // Strictly speaking, components should not create output blocks | |
227 | // on the SOR/EOR event | |
228 | // | |
229 | // Exeptions: some blocks are added, the buffer must be prepared and | |
230 | // kept since the pointers will be invalid | |
231 | // - kAliHLTDataTypeComponentTable component table entries | |
232 | continue; | |
233 | } | |
234 | memset( homerHeader, 0, sizeof(homer_uint64)*kCount_64b_Words ); | |
235 | homerDescriptor.Initialize(); | |
236 | // for some traditional reason the TCPDumpSubscriber swaps the bytes | |
237 | // of the data type id and data type origin. Actually I do not understand | |
238 | // the corresponding code line | |
239 | // homerBlock.SetType( blocks[n].fDataType.fID ); | |
240 | // this compiles in the PubSub framework and in addition does a byte swap | |
241 | homer_uint64 id=0; | |
242 | homer_uint64 origin=0; | |
243 | memcpy(&id, blocks[n].fDataType.fID, sizeof(homer_uint64)); | |
244 | memcpy(((AliHLTUInt8_t*)&origin)+sizeof(homer_uint32), blocks[n].fDataType.fOrigin, sizeof(homer_uint32)); | |
245 | homerDescriptor.SetType(AliHLTOUT::ByteSwap64(id)); | |
246 | homerDescriptor.SetSubType1(AliHLTOUT::ByteSwap64(origin)); | |
247 | homerDescriptor.SetSubType2(blocks[n].fSpecification); | |
248 | homerDescriptor.SetBlockSize(blocks[n].fSize); | |
249 | if (bIsDataEvent) { | |
250 | writerNo=ShuffleWriters(fWriters, blocks[n].fSize); | |
251 | } | |
252 | assert(writerNo>=0 && writerNo<(int)fWriters.size()); | |
253 | // I'm puzzled by the different headers, buffers etc. used in the | |
254 | // HOMER writer/data. In additional, there is no type check as there | |
255 | // are void pointers used and names mixed. | |
256 | // It seems that HOMERBlockDescriptor is just a tool to set the | |
257 | // different fields in the homer header, which is an array of 64 bit | |
258 | // words. | |
259 | fWriters[writerNo]->AddBlock(homerHeader, blocks[n].fPtr); | |
260 | } | |
261 | } | |
262 | ||
263 | if (iResult>=0 && !bIsDataEvent) { | |
264 | // data blocks from a special event are kept to be added to the | |
265 | // following event. | |
266 | if (fReservedWriter>=0) { | |
267 | HLTWarning("overriding previous buffer of non-data event data blocks"); | |
268 | } | |
269 | const AliHLTUInt8_t* pBuffer=NULL; | |
270 | int bufferSize=0; | |
271 | // TODO: not yet clear whether it is smart to send the event id of | |
272 | // this special event or if it should be set from the id of the | |
273 | // following event where the data will be added | |
274 | if ((bufferSize=FillOutputBuffer(evtData.fEventID, fWriters[writerNo], pBuffer))>0) { | |
275 | fReservedWriter=writerNo; | |
276 | fReservedData=bufferSize; | |
277 | } | |
278 | fWriters[writerNo]->Clear(); | |
279 | } | |
280 | ||
281 | return iResult; | |
282 | } | |
283 | ||
284 | int AliHLTOUTComponent::FillESD(int eventNo, AliRunLoader* runLoader, AliESDEvent* /*esd*/) | |
285 | { | |
286 | // see header file for class documentation | |
287 | int iResult=0; | |
288 | ||
289 | if (fWriters.size()==0) return 0; | |
290 | ||
291 | if (fReservedWriter>=0) { | |
292 | if (fgOptions&kWriteDigits) WriteDigitArray(fReservedWriter, &fBuffer[0], fReservedData); | |
293 | if (fgOptions&kWriteRawFiles) WriteRawFile(eventNo, runLoader, fReservedWriter, &fBuffer[0], fReservedData); | |
294 | fReservedData=0; | |
295 | } | |
296 | ||
297 | // search for the writer with the biggest data volume in order to allocate the | |
298 | // output buffer of sufficient size | |
299 | vector<int> sorted; | |
300 | for (size_t i=0; i<fWriters.size(); i++) { | |
301 | if ((int)i==fReservedWriter) continue; | |
302 | assert(fWriters[i]); | |
303 | if (fWriters[i]) { | |
304 | if (sorted.size()==0 || fWriters[i]->GetTotalMemorySize()<=fWriters[sorted[0]]->GetTotalMemorySize()) { | |
305 | sorted.push_back(i); | |
306 | } else { | |
307 | sorted.insert(sorted.begin(), i); | |
308 | } | |
309 | } | |
310 | } | |
311 | fReservedWriter=-1; | |
312 | ||
313 | vector<int>::iterator ddlno=sorted.begin(); | |
314 | while (ddlno!=sorted.end()) { | |
315 | const AliHLTUInt8_t* pBuffer=NULL; | |
316 | int bufferSize=0; | |
317 | ||
318 | if ((bufferSize=FillOutputBuffer(eventNo, fWriters[*ddlno], pBuffer))>0) { | |
319 | if (fgOptions&kWriteDigits) WriteDigitArray(*ddlno, pBuffer, bufferSize); | |
320 | if (fgOptions&kWriteRawFiles) WriteRawFile(eventNo, runLoader, *ddlno, pBuffer, bufferSize); | |
321 | } | |
322 | fWriters[*ddlno]->Clear(); | |
323 | ddlno++; | |
324 | } | |
325 | if (fgOptions&kWriteDigits) WriteDigits(eventNo, runLoader); | |
326 | return iResult; | |
327 | } | |
328 | ||
329 | int AliHLTOUTComponent::ShuffleWriters(AliHLTMonitoringWriterPVector &list, AliHLTUInt32_t /*size*/) | |
330 | { | |
331 | // see header file for class documentation | |
332 | int iResult=-ENOENT; | |
333 | assert(list.size()>0); | |
334 | if (list.size()==0) return iResult; | |
335 | vector<int> writers; | |
336 | size_t i=0; | |
337 | for (i=0; i<list.size(); i++) { | |
338 | if ((int)i==fReservedWriter) continue; | |
339 | if (list[i]->GetTotalMemorySize()==0) | |
340 | writers.push_back(i); | |
341 | else if (iResult<0 || | |
342 | list[i]->GetTotalMemorySize()<list[iResult]->GetTotalMemorySize()) | |
343 | iResult=i; | |
344 | ||
345 | } | |
346 | if (writers.size()>0) { | |
347 | iResult=writers[0]; | |
348 | if (writers.size()>0) { | |
349 | // shuffle among the empty writers | |
350 | TDatime dt; | |
351 | TRandom rand; | |
352 | rand.SetSeed(dt.Get()*(iResult+1)); | |
353 | i=rand.Integer(writers.size()-1); | |
354 | assert(i>0 && i<writers.size()-1); | |
355 | iResult=writers[i]; | |
356 | } | |
357 | } else { | |
358 | // take the writer with the least data volume | |
359 | assert(iResult>=0); | |
360 | } | |
361 | return iResult; | |
362 | } | |
363 | ||
364 | int AliHLTOUTComponent::FillOutputBuffer(int eventNo, AliHLTMonitoringWriter* pWriter, const AliHLTUInt8_t* &pBuffer) | |
365 | { | |
366 | // see header file for class documentation | |
367 | int iResult=0; | |
368 | unsigned int bufferSize=0; | |
369 | ||
370 | // space for common data header | |
371 | bufferSize+=sizeof(AliRawDataHeader); | |
372 | assert(sizeof(AliRawDataHeader)==32); | |
373 | ||
374 | // space for HLT event header | |
375 | bufferSize+=sizeof(AliHLTOUT::AliHLTOUTEventHeader); | |
376 | ||
377 | // space for payload from the writer | |
378 | if (pWriter) bufferSize+=pWriter->GetTotalMemorySize(); | |
379 | ||
380 | if (bufferSize>fBuffer.size()) | |
381 | fBuffer.resize(bufferSize); | |
382 | ||
383 | if (bufferSize<=fBuffer.size()) { | |
384 | AliRawDataHeader* pCDH=reinterpret_cast<AliRawDataHeader*>(&fBuffer[0]); | |
385 | AliHLTOUT::AliHLTOUTEventHeader* pHLTH=reinterpret_cast<AliHLTOUT::AliHLTOUTEventHeader*>(&fBuffer[sizeof(AliRawDataHeader)]); | |
386 | *pCDH = AliRawDataHeader(); // Fill with default values. | |
387 | memset(pHLTH, 0, sizeof(AliHLTOUT::AliHLTOUTEventHeader)); | |
388 | ||
389 | if (pWriter) { | |
390 | // copy payload | |
391 | pWriter->Copy(&fBuffer[sizeof(AliRawDataHeader)+sizeof(AliHLTOUT::AliHLTOUTEventHeader)], 0, 0, 0, 0); | |
392 | pHLTH->fLength=pWriter->GetTotalMemorySize(); | |
393 | // set status bit to indicate HLT payload | |
394 | pCDH->fStatusMiniEventID|=0x1<<(AliHLTOUT::kCDHStatusFlagsOffset+AliHLTOUT::kCDHFlagsHLTPayload); | |
395 | } | |
396 | pHLTH->fLength+=sizeof(AliHLTOUT::AliHLTOUTEventHeader); | |
397 | // pHLTH->fEventIDLow is already set to zero in memset above. | |
398 | pHLTH->fEventIDLow = eventNo; | |
399 | // version does not really matter since we do not add decision data | |
400 | pHLTH->fVersion=AliHLTOUT::kVersion1; | |
401 | ||
402 | pCDH->fSize=sizeof(AliRawDataHeader)+pHLTH->fLength; | |
403 | pCDH->fStatusMiniEventID|=0x1<<(AliHLTOUT::kCDHStatusFlagsOffset + AliHLTOUT::kCDHFlagsHLTPayload); | |
404 | ||
405 | pBuffer=&fBuffer[0]; | |
406 | iResult=(int)bufferSize; | |
407 | } else { | |
408 | pBuffer=NULL; | |
409 | iResult=-ENOMEM; | |
410 | } | |
411 | ||
412 | return iResult; | |
413 | } | |
414 | ||
415 | int AliHLTOUTComponent::WriteDigitArray(int hltddl, const AliHLTUInt8_t* pBuffer, unsigned int bufferSize) | |
416 | { | |
417 | // see header file for class documentation | |
418 | int iResult=0; | |
419 | assert(hltddl<fNofDDLs); | |
420 | if (hltddl>=fNofDDLs) return -ERANGE; | |
421 | ||
422 | if (!fppDigitArrays) { | |
423 | fppDigitArrays=new TArrayC*[fNofDDLs]; | |
424 | if (fppDigitArrays) { | |
425 | for (int i=0; i<fNofDDLs; i++) { | |
426 | fppDigitArrays[i]=new TArrayC(0); | |
427 | } | |
428 | } | |
429 | } | |
430 | if (fppDigitArrays && fppDigitArrays[hltddl]) { | |
431 | fppDigitArrays[hltddl]->Set(bufferSize, reinterpret_cast<const Char_t*>(pBuffer)); | |
432 | } else { | |
433 | iResult=-ENOMEM; | |
434 | } | |
435 | return iResult; | |
436 | } | |
437 | ||
438 | int AliHLTOUTComponent::WriteDigits(int /*eventNo*/, AliRunLoader* /*runLoader*/) | |
439 | { | |
440 | // see header file for class documentation | |
441 | int iResult=0; | |
442 | const char* digitFileName="HLT.Digits.root"; | |
443 | if (!fpDigitFile) { | |
444 | fpDigitFile=new TFile(digitFileName, "RECREATE"); | |
445 | } | |
446 | if (fpDigitFile && !fpDigitFile->IsZombie()) { | |
447 | if (!fpDigitTree) { | |
448 | fpDigitTree=new TTree("rawhltout","HLTOUT raw data"); | |
449 | if (fpDigitTree && fppDigitArrays) { | |
450 | for (int i=0; i<fNofDDLs; i++) { | |
451 | const char* branchName=AliDAQ::DdlFileName("HLT", i); | |
452 | if (fppDigitArrays[i]) fpDigitTree->Branch(branchName, "TArrayC", &fppDigitArrays[i], 32000/*just as the default*/, 0); | |
453 | } | |
454 | } | |
455 | } | |
456 | if (fpDigitTree) { | |
457 | int res=fpDigitTree->Fill(); | |
458 | HLTDebug("writing digit tree: %d", res); | |
459 | fpDigitFile->cd(); | |
460 | res=fpDigitTree->Write("",TObject::kOverwrite); | |
461 | HLTDebug("writing digit tree: %d", res); | |
462 | if (fppDigitArrays) for (int i=0; i<fNofDDLs; i++) { | |
463 | if (fppDigitArrays[i]) fppDigitArrays[i]->Set(0); | |
464 | } | |
465 | } | |
466 | } else { | |
467 | const char* errorMsg=""; | |
468 | if (GetEventCount()==5) { | |
469 | errorMsg=" (suppressing further error messages)"; | |
470 | } | |
471 | if (GetEventCount()<5) { | |
472 | HLTError("can not open HLT digit file %s%s", digitFileName, errorMsg); | |
473 | } | |
474 | iResult=-EBADF; | |
475 | } | |
476 | return iResult; | |
477 | } | |
478 | ||
479 | int AliHLTOUTComponent::WriteRawFile(int eventNo, AliRunLoader* /*runLoader*/, int hltddl, const AliHLTUInt8_t* pBuffer, unsigned int bufferSize) | |
480 | { | |
481 | // see header file for class documentation | |
482 | int iResult=0; | |
483 | const char* fileName=AliDAQ::DdlFileName("HLT", hltddl); | |
484 | assert(fileName!=NULL); | |
485 | TString filePath; | |
486 | filePath.Form("raw%d/", eventNo); | |
487 | filePath+=fileName; | |
488 | if (fileName) { | |
489 | ios::openmode filemode=(ios::openmode)0; | |
490 | ofstream rawfile(filePath.Data(), filemode); | |
491 | if (rawfile.good()) { | |
492 | if (pBuffer && bufferSize>0) { | |
493 | rawfile.write(reinterpret_cast<const char*>(pBuffer), bufferSize); | |
494 | } else { | |
495 | HLTWarning("writing zero length raw data file %s"); | |
496 | } | |
497 | HLTDebug("wrote %d byte(s) to file %s", bufferSize, filePath.Data()); | |
498 | } else { | |
499 | HLTError("can not open file %s for writing", filePath.Data()); | |
500 | iResult=-EBADF; | |
501 | } | |
502 | rawfile.close(); | |
503 | } | |
504 | return iResult; | |
505 | } | |
506 | ||
507 | void AliHLTOUTComponent::SetGlobalOption(unsigned int options) | |
508 | { | |
509 | // see header file for class documentation | |
510 | fgOptions|=options; | |
511 | } | |
512 | ||
513 | void AliHLTOUTComponent::ClearGlobalOption(unsigned int options) | |
514 | { | |
515 | // see header file for class documentation | |
516 | fgOptions&=~options; | |
517 | } |