1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
|
[CRAB]
jobtype = cmssw
#scheduler = glite
scheduler = caf
### NOTE: just setting the name of the server (pi, lnl etc etc )
### crab will submit the jobs to the server...
#server_name = bari
#
[CMSSW]
### The data you want to access (to be found on DBS)
#dbs_url = http://cmsdbsprod.cern.ch/cms_dbs_ph_analysis_01/servlet/DBSServlet
#datasetpath=/Neutrino_Pt_2to20_gun/Summer12-EflowHpu_Pileup_START53_V16-v1/RECODEBUG
#datasetpath=/Neutrino_Pt_2to20_gun/Summer12-EflowHpu_Pileup_START53_V7C-v1/RECODEBUG
datasetpath=/MinBias_TuneZ2star_8TeV-pythia6/Summer12-EflowHpu_NoPileUp_START53_V7C-v1/RECODEBUG
#datasetpath=/Neutrino_Pt_2to20_gun/Summer12_DR53X-PU45_START53_V7C-v1/GEN-SIM-RECO
#datasetpath=/Neutrino_Pt_2to20_gun/Summer12_DR53X-PU45_noOOT_START53_V7C-v1/GEN-SIM-RECO
pycfg_params= globalTag=START53_V7C::All
pset=DPGAnalysis/SiStripTools/test/OccupancyPlotsTest_vtxpos_cfg.py
#lumi_mask=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/Prompt/Cert_160404-176309_7TeV_PromptReco_Collisions11_JSON.txt
#runselection = 176304
#total_number_of_lumis =-1
#lumis_per_job = 20
total_number_of_events=10000000
events_per_job = 10000
#total_number_of_events=1000
#events_per_job = 200
### The output files (comma separated list)
#output_file =
[USER]
### OUTPUT files Management
## output back into UI
return_data = 1
### To use a specific name of UI directory where CRAB will create job to submit (with full path).
### the default directory will be "crab_0_data_time"
#ui_working_dir = /afs/cern.ch/cms/tracking/output/OccupancyPlotsTest_vtxpos_neutrino_summer12_eflowhpu_start53_v7c_v2
ui_working_dir = /afs/cern.ch/cms/tracking/output/OccupancyPlotsTest_vtxpos_minbias_z2star_summer12_eflowhpu_start53_v7c_v2
### To specify the UI directory where to store the CMS executable output
### FULL path is mandatory. Default is <ui_working_dir>/res will be used.
#outputdir= /full/path/yourOutDir
### To specify the UI directory where to store the stderr, stdout and .BrokerInfo of submitted jobs
### FULL path is mandatory. Default is <ui_working_dir>/res will be used.
#logdir= /full/path/yourLogDir
### OUTPUT files INTO A SE
copy_data = 0
### if you want to copy data in a "official CMS site"
### you have to specify the name as written in
#storage_element = T2_IT_Bari
### the user_remote_dir will be created under the SE mountpoint
### in the case of publication this directory is not considered
#user_remote_dir = name_directory_you_want
### if you want to copy your data at CAF
#storage_element = T2_CH_CAF
### the user_remote_dir will be created under the SE mountpoint
### in the case of publication this directory is not considered
#user_remote_dir = express_2010_132421
### if you want to copy your data to your area in castor at cern
### or in a "not official CMS site" you have to specify the complete name of SE
storage_element=srm-cms.cern.ch
### this directory is the mountpoin of SE
#storage_path=/srm/managerv2?SFN=/castor/cern.ch
storage_path=/castor/cern.ch
### directory or tree of directory under the mounpoint
#user_remote_dir = /user/v/venturia/skims/express_2010_132421_132422_3
### To publish produced output in a local istance of DBS set publish_data = 1
publish_data=0
### Specify the dataset name. The full path will be <primarydataset>/<publish_data_name>/USER
publish_data_name = name_you_prefer
### Specify the URL of DBS istance where CRAB has to publish the output files
#dbs_url_for_publication = https://cmsdbsprod.cern.ch:8443/cms_dbs_caf_analysis_01_writer/servlet/DBSServlet
### To specify additional files to be put in InputSandBox
### write the full path if the files are not in the current directory
### (wildcard * are allowed): comma separated list
#additional_input_files = file1, file2, /full/path/file3
#if server
#thresholdLevel = 100
#eMail = your@Email.address
[CAF]
queue = cmscaf1nd
[GRID]
#
## RB/WMS management:
rb = CERN
## Black and White Lists management:
## By Storage
se_black_list = T0,T1
#se_black_list = T0
#se_white_list =
## By ComputingElement
#ce_black_list =
#ce_white_list =
[CONDORG]
# Set this to condor to override the batchsystem defined in gridcat.
#batchsystem = condor
# Specify addition condor_g requirments
# use this requirment to run on a cms dedicated hardare
# globus_rsl = (condor_submit=(requirements 'ClusterName == \"CMS\" && (Arch == \"INTEL\" || Arch == \"X86_64\")'))
# use this requirement to run on the new hardware
#globus_rsl = (condor_submit=(requirements 'regexp(\"cms-*\",Machine)'))
|