Warning, /DPGAnalysis/SiStripTools/test/crab_OccupancyPlotsTest_vtxpos.cfg is written in an unsupported language. File is not indexed.
0001 [CRAB]
0002
0003 jobtype = cmssw
0004 #scheduler = glite
0005 scheduler = caf
0006 ### NOTE: just setting the name of the server (pi, lnl etc etc )
0007 ### crab will submit the jobs to the server...
0008 #server_name = bari
0009 #
0010 [CMSSW]
0011
0012 ### The data you want to access (to be found on DBS)
0013
0014 #dbs_url = http://cmsdbsprod.cern.ch/cms_dbs_ph_analysis_01/servlet/DBSServlet
0015
0016
0017 #datasetpath=/Neutrino_Pt_2to20_gun/Summer12-EflowHpu_Pileup_START53_V16-v1/RECODEBUG
0018 #datasetpath=/Neutrino_Pt_2to20_gun/Summer12-EflowHpu_Pileup_START53_V7C-v1/RECODEBUG
0019 datasetpath=/MinBias_TuneZ2star_8TeV-pythia6/Summer12-EflowHpu_NoPileUp_START53_V7C-v1/RECODEBUG
0020 #datasetpath=/Neutrino_Pt_2to20_gun/Summer12_DR53X-PU45_START53_V7C-v1/GEN-SIM-RECO
0021 #datasetpath=/Neutrino_Pt_2to20_gun/Summer12_DR53X-PU45_noOOT_START53_V7C-v1/GEN-SIM-RECO
0022
0023 pycfg_params= globalTag=START53_V7C::All
0024
0025 pset=DPGAnalysis/SiStripTools/test/OccupancyPlotsTest_vtxpos_cfg.py
0026
0027 #lumi_mask=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions11/7TeV/Prompt/Cert_160404-176309_7TeV_PromptReco_Collisions11_JSON.txt
0028 #runselection = 176304
0029
0030 #total_number_of_lumis =-1
0031 #lumis_per_job = 20
0032
0033 total_number_of_events=10000000
0034 events_per_job = 10000
0035 #total_number_of_events=1000
0036 #events_per_job = 200
0037
0038 ### The output files (comma separated list)
0039 #output_file =
0040
0041 [USER]
0042
0043 ### OUTPUT files Management
0044 ## output back into UI
0045 return_data = 1
0046
0047 ### To use a specific name of UI directory where CRAB will create job to submit (with full path).
0048 ### the default directory will be "crab_0_data_time"
0049 #ui_working_dir = /afs/cern.ch/cms/tracking/output/OccupancyPlotsTest_vtxpos_neutrino_summer12_eflowhpu_start53_v7c_v2
0050 ui_working_dir = /afs/cern.ch/cms/tracking/output/OccupancyPlotsTest_vtxpos_minbias_z2star_summer12_eflowhpu_start53_v7c_v2
0051
0052 ### To specify the UI directory where to store the CMS executable output
0053 ### FULL path is mandatory. Default is <ui_working_dir>/res will be used.
0054 #outputdir= /full/path/yourOutDir
0055
0056 ### To specify the UI directory where to store the stderr, stdout and .BrokerInfo of submitted jobs
0057 ### FULL path is mandatory. Default is <ui_working_dir>/res will be used.
0058 #logdir= /full/path/yourLogDir
0059
0060 ### OUTPUT files INTO A SE
0061 copy_data = 0
0062
0063 ### if you want to copy data in a "official CMS site"
0064 ### you have to specify the name as written in
0065 #storage_element = T2_IT_Bari
0066 ### the user_remote_dir will be created under the SE mountpoint
0067 ### in the case of publication this directory is not considered
0068 #user_remote_dir = name_directory_you_want
0069
0070 ### if you want to copy your data at CAF
0071 #storage_element = T2_CH_CAF
0072 ### the user_remote_dir will be created under the SE mountpoint
0073 ### in the case of publication this directory is not considered
0074 #user_remote_dir = express_2010_132421
0075
0076 ### if you want to copy your data to your area in castor at cern
0077 ### or in a "not official CMS site" you have to specify the complete name of SE
0078 storage_element=srm-cms.cern.ch
0079 ### this directory is the mountpoin of SE
0080 #storage_path=/srm/managerv2?SFN=/castor/cern.ch
0081 storage_path=/castor/cern.ch
0082 ### directory or tree of directory under the mounpoint
0083 #user_remote_dir = /user/v/venturia/skims/express_2010_132421_132422_3
0084
0085
0086 ### To publish produced output in a local istance of DBS set publish_data = 1
0087 publish_data=0
0088 ### Specify the dataset name. The full path will be <primarydataset>/<publish_data_name>/USER
0089 publish_data_name = name_you_prefer
0090 ### Specify the URL of DBS istance where CRAB has to publish the output files
0091 #dbs_url_for_publication = https://cmsdbsprod.cern.ch:8443/cms_dbs_caf_analysis_01_writer/servlet/DBSServlet
0092
0093 ### To specify additional files to be put in InputSandBox
0094 ### write the full path if the files are not in the current directory
0095 ### (wildcard * are allowed): comma separated list
0096 #additional_input_files = file1, file2, /full/path/file3
0097
0098 #if server
0099 #thresholdLevel = 100
0100 #eMail = your@Email.address
0101
0102 [CAF]
0103
0104 queue = cmscaf1nd
0105
0106 [GRID]
0107 #
0108 ## RB/WMS management:
0109 rb = CERN
0110
0111 ## Black and White Lists management:
0112 ## By Storage
0113 se_black_list = T0,T1
0114 #se_black_list = T0
0115 #se_white_list =
0116
0117 ## By ComputingElement
0118 #ce_black_list =
0119 #ce_white_list =
0120
0121 [CONDORG]
0122
0123 # Set this to condor to override the batchsystem defined in gridcat.
0124 #batchsystem = condor
0125
0126 # Specify addition condor_g requirments
0127 # use this requirment to run on a cms dedicated hardare
0128 # globus_rsl = (condor_submit=(requirements 'ClusterName == \"CMS\" && (Arch == \"INTEL\" || Arch == \"X86_64\")'))
0129 # use this requirement to run on the new hardware
0130 #globus_rsl = (condor_submit=(requirements 'regexp(\"cms-*\",Machine)'))
0131