f60ddabc |
1 | # this is the startup process for root |
2 | Executable="ana.sh"; |
3 | Jobtag={"comment:omega(782) -> pi0 pi+ pi- analysis"}; |
4 | |
5 | # we split per storage element |
6 | Split="se"; |
7 | splitarguments="$1"; |
8 | |
9 | # we want each job to read 50 input files |
10 | SplitMaxInputFileNumber="50"; |
11 | |
12 | # we need AliRoot and the API service configuration package. |
13 | Packages={"VO_ALICE@APISCONFIG::V2.4","VO_ALICE@AliRoot::v4-17-01"}; |
14 | TTL = "18 hours"; |
15 | #Price = 10; |
16 | User = "polishch"; |
17 | |
18 | #ROOT will read this collection file to know, which files to analyze |
19 | InputDataList="$1.xml"; |
20 | |
21 | #ROOT requires the collection file in the xml-single format |
22 | InputDataListFormat="xml-single"; |
23 | |
24 | # this is our collection file containing the files to be analyzed |
25 | InputDataCollection="LF:/alice/cern.ch/user/p/polishch/xml/$1.xml,nodownload"; |
26 | |
27 | InputFile= {"LF:/alice/cern.ch/user/p/polishch/omega/AnaTaskOmega3pi.C"}; |
28 | |
29 | InputBox= {"/alice/cern.ch/user/p/polishch/omega/AnaTaskOmega3pi.C"}; |
30 | |
31 | # Output archive |
32 | OutputArchive={"log_archive.zip:stdout,stderr@ALICE::NIHAM::FILE", |
33 | "root_archive.zip:*.root@ALICE::NIHAM::FILE"}; |
34 | |
35 | # Output directory |
36 | OutputDir="/alice/cern.ch/user/p/polishch/omega/output/$1/#alien_counter#"; |
37 | |
38 | # Output files |
39 | OutputFile={"histos.root"}; |
40 | |
41 | # Merge the output |
42 | Merge={"histos.root:/alice/cern.ch/user/p/polishch/omega/mergerootfile.jdl:histos_merged.root"}; |
43 | MergeOutputDir={"/alice/cern.ch/user/p/polishch/omega/output/$1"}; |
44 | |
45 | # Validation |
46 | Validationcommand ="/alice/cern.ch/user/p/polishch/bin/validate.sh"; |
47 | |