]>
Commit | Line | Data |
---|---|---|
06f630bb | 1 | Jobtag = { |
2 | "comment:Automatically generated analysis JDL" | |
3 | }; | |
4 | # Input xml collections | |
5 | InputDataCollection = { | |
6 | "LF:/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/$1,nodownload" | |
7 | }; | |
8 | # Output directory | |
9 | OutputDir = "/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/output/$2/#alien_counter_03i#"; | |
10 | # List of requested packages | |
11 | Packages = { | |
12 | "VO_ALICE@AliRoot::v5-01-Rev-19", | |
13 | "VO_ALICE@ROOT::v5-30-03-1", | |
14 | "VO_ALICE@APISCONFIG::V1.1x" | |
15 | }; | |
16 | # List of input files to be uploaded to workers | |
17 | InputFile = { | |
18 | "LF:/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/takuv2c123456_2012_TEST.C", | |
19 | "LF:/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/takuv2c123456_2012_TEST.root", | |
20 | "LF:/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/AliDielectronHistosTaku.h", | |
21 | "LF:/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/AliDielectronHistosTaku.cxx", | |
22 | "LF:/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/AliDielectronDebugTreeTaku.h", | |
23 | "LF:/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/AliDielectronDebugTreeTaku.cxx", | |
24 | "LF:/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/AliDielectronTaku.h", | |
25 | "LF:/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/AliDielectronTaku.cxx", | |
26 | "LF:/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/AliAnalysisTaskMultiDielectronNewTaku.h", | |
27 | "LF:/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/AliAnalysisTaskMultiDielectronNewTaku.cxx" | |
28 | }; | |
29 | # This is the startup script | |
30 | Executable = "takuv2c123456_2012_TEST.sh"; | |
31 | # We split per SE or file | |
32 | Split = "se"; | |
33 | # Time after which the job is killed (1500 min.) | |
34 | TTL = "90000"; | |
35 | # Resubmit failed jobs until DONE rate reaches this percentage | |
36 | MasterResubmitThreshold = "90%"; | |
37 | # Maximum number of input files to be processed per subjob | |
38 | SplitMaxInputFileNumber = "50"; | |
39 | # Format of input data | |
40 | InputDataListFormat = "xml-single"; | |
41 | # Collection name to be processed on each worker node | |
42 | InputDataList = "wn.xml"; | |
43 | # Files to be archived | |
44 | OutputArchive = { | |
45 | "log_archive.zip:std*@disk=1","root_archive.zip:EventStat_temp.root,Resultstakuv2c123456_2012_TESTAnalysisResults.root,*.stat@disk=2" | |
46 | }; | |
47 | # Maximum number of first failing jobs to abort the master job | |
48 | MaxInitFailed = "20"; | |
49 | # AliEn price for this job | |
50 | Price = "1"; | |
51 | # Validation script to be run for each subjob | |
52 | Validationcommand = "/alice/cern.ch/user/a/atoia/takuv2c123456_2012_TEST/takuv2c123456_2012_TEST_validation.sh"; | |
53 | ||
54 | # JDL variables | |
55 | JDLVariables = | |
56 | { | |
57 | "Packages", | |
58 | "OutputDir" | |
59 | }; | |
60 | Workdirectorysize = {"5000MB"}; |