Warning, /Validation/RecoVertex/test/crab_mcverticessimpleanalyzer.cfg is written in an unsupported language. File is not indexed.
0001 [CRAB]
0002
0003 jobtype = cmssw
0004 #scheduler = glite
0005 scheduler = caf
0006 ### NOTE: just setting the name of the server (pi, lnl etc etc )
0007 ### crab will submit the jobs to the server...
0008 #server_name = bari
0009 #
0010 [CMSSW]
0011
0012 ### The data you want to access (to be found on DBS)
0013
0014 #dbs_url = http://cmsdbsprod.cern.ch/cms_dbs_ph_analysis_01/servlet/DBSServlet
0015
0016 #datasetpath=/RelValSingleMuPt100_UP15/CMSSW_6_2_0_patch1-PU_POSTLS162_V1_TriggerRef25ns-v2/GEN-SIM-RECO
0017 #datasetpath=/RelValSingleMuPt100_UP15/CMSSW_6_2_0_patch1-PU_POSTLS162_V2_TriggerNew25ns-v2/GEN-SIM-RECO
0018 #datasetpath=/RelValSingleMuPt1_UP15/CMSSW_6_2_0_patch1-POSTLS162_V1_30Aug2013-v2/GEN-SIM-RECO
0019 datasetpath= /RelValTTbar/CMSSW_6_2_0-PU_PRE_ST62_V8-v2/GEN-SIM-RECO
0020
0021 # not sure it works with AOD becuse of the determination of the position of the main vertex MC truth
0022
0023 pycfg_params= globalTag=POSTLS162_V1::All
0024
0025 pset=Validation/RecoVertex/test/mcverticessimpleanalyzer_cfg.py
0026
0027
0028 total_number_of_events=1000
0029 events_per_job = 500
0030
0031 ### The output files (comma separated list)
0032 #output_file =
0033
0034 [USER]
0035
0036 ### OUTPUT files Management
0037 ## output back into UI
0038 return_data = 1
0039
0040 ### To use a specific name of UI directory where CRAB will create job to submit (with full path).
0041 ### the default directory will be "crab_0_data_time"
0042 ui_working_dir = /afs/cern.ch/work/v/venturia/crab/mcverticessimpleanalyzer_ttbar_pu_pre_st62_v1
0043
0044 ### To specify the UI directory where to store the CMS executable output
0045 ### FULL path is mandatory. Default is <ui_working_dir>/res will be used.
0046 #outputdir= /full/path/yourOutDir
0047
0048 ### To specify the UI directory where to store the stderr, stdout and .BrokerInfo of submitted jobs
0049 ### FULL path is mandatory. Default is <ui_working_dir>/res will be used.
0050 #logdir= /full/path/yourLogDir
0051
0052 ### OUTPUT files INTO A SE
0053 copy_data = 0
0054
0055 ### if you want to copy data in a "official CMS site"
0056 ### you have to specify the name as written in
0057 #storage_element = T2_IT_Bari
0058 ### the user_remote_dir will be created under the SE mountpoint
0059 ### in the case of publication this directory is not considered
0060 #user_remote_dir = name_directory_you_want
0061
0062 ### if you want to copy your data at CAF
0063 #storage_element = T2_CH_CAF
0064 ### the user_remote_dir will be created under the SE mountpoint
0065 ### in the case of publication this directory is not considered
0066 #user_remote_dir = express_2010_132421
0067
0068 ### if you want to copy your data to your area in castor at cern
0069 ### or in a "not official CMS site" you have to specify the complete name of SE
0070 storage_element=srm-cms.cern.ch
0071 ### this directory is the mountpoin of SE
0072 #storage_path=/srm/managerv2?SFN=/castor/cern.ch
0073 storage_path=/castor/cern.ch
0074 ### directory or tree of directory under the mounpoint
0075 #user_remote_dir = /user/v/venturia/skims/express_2010_132421_132422_3
0076
0077
0078 ### To publish produced output in a local istance of DBS set publish_data = 1
0079 publish_data=0
0080 ### Specify the dataset name. The full path will be <primarydataset>/<publish_data_name>/USER
0081 publish_data_name = name_you_prefer
0082 ### Specify the URL of DBS istance where CRAB has to publish the output files
0083 #dbs_url_for_publication = https://cmsdbsprod.cern.ch:8443/cms_dbs_caf_analysis_01_writer/servlet/DBSServlet
0084
0085 ### To specify additional files to be put in InputSandBox
0086 ### write the full path if the files are not in the current directory
0087 ### (wildcard * are allowed): comma separated list
0088 #additional_input_files = file1, file2, /full/path/file3
0089
0090 #if server
0091 #thresholdLevel = 100
0092 #eMail = your@Email.address
0093
0094 [CAF]
0095
0096 queue=cmscaf1nd
0097
0098 [GRID]
0099 #
0100 ## RB/WMS management:
0101 rb = CERN
0102
0103 ## Black and White Lists management:
0104 ## By Storage
0105 se_black_list = T0,T1
0106 #se_black_list = T0
0107 #se_white_list =
0108
0109 ## By ComputingElement
0110 #ce_black_list =
0111 #ce_white_list =
0112
0113 [CONDORG]
0114
0115 # Set this to condor to override the batchsystem defined in gridcat.
0116 #batchsystem = condor
0117
0118 # Specify addition condor_g requirments
0119 # use this requirment to run on a cms dedicated hardare
0120 # globus_rsl = (condor_submit=(requirements 'ClusterName == \"CMS\" && (Arch == \"INTEL\" || Arch == \"X86_64\")'))
0121 # use this requirement to run on the new hardware
0122 #globus_rsl = (condor_submit=(requirements 'regexp(\"cms-*\",Machine)'))
0123