Project

General

Profile

Jobsub.ini

;; to see new variables by jobsub_tools release see new_jobsub_ini_vars_by_release
;; to see alphabetical listing of variables see new_jobsub_ini_vars_index

;; This page documents behavior of entries in the jobsub.ini file
;;ini file for jobsub_tools and jobsub-server

;; NOTE THAT BLUE DEFINITIONS such as 'sandbox_readable_by_group' are LINKS to definition pages

;;order of searching for location of this file :
;;first $JOBSUB_INI_FILE
;;next pwd
;;next $HOME
;;next $JOBSUB_TOOLS_DIR/bin
;;Once this file has been loaded, its location is pointed to by $JOBSUB_INI_FILE
;;
;;adding an experiment should require just changing this ini file, not code changes to jobsub
;;

;;sections loaded into jobsub in this order:
;;first [default]
;;then GROUPS, can be forced by export GROUP='something in GROUP section'
;;then SUBMIT_HOST, can be forced by export SUBMIT_HOST=some_machne prior to setting up jobsub_tools

;;=======================
;;default section
;;=======================
;;note that $GROUP must be set prior to loading jobsub_tools, the ini file reads bash env values and
;;substitutes them in

;;ini file for jobsub_tools and jobsub-server
;;order of searching for location of this file :
;;first $JOBSUB_INI_FILE
;;next pwd
;;next $HOME
;;next $JOBSUB_TOOLS_DIR/bin
;;Once this file has been loaded, its location is pointed to by $JOBSUB_INI_FILE
;;
;;adding an experiment should require just changing this ini file, not code changes to jobsub
;;

;;sections loaded into jobsub in this order:
;;first [default]
;;then GROUPS, can be forced by export GROUP='something in GROUP section'
;;then SUBMIT_HOST, can be forced by export SUBMIT_HOST=some_machne prior to setting up jobsub_tools

;;=======================
;;default section
;;=======================
;;note that $GROUP must be set prior to loading jobsub_tools, the ini file reads bash env values and
;;substitutes them in

[default]
ca_path = /etc/grid-security/certificates
condor_mail_notify = Error
enable_http_cache = False
http_cache_duration = 180
schedd_constraint = stringListMember(name,"fermicloud042.fnal.gov,default.fnal.gov")
vo_constraint = '%(schedd_constraint)s&&(supportedvolist=?=Null || stringlistimember("{0}",supportedvolist)=?=true)'
downtime_constraint = '(InDownTime =!= True)&&(InDownTime =!= "True") && %(schedd_constraint)s'
ferry_server = fermicloud033.fnal.gov
ferry_port = 8443
ferry_output = /var/lib/jobsub/ferry
ferry_expire = 30
ferry_uname_fqan_map = 'document me'
ferry_dn_user_roles_map = 'document me'
ferry_fqan_user_map = 'document me'
ferry_vo_role_fqan_map = 'document me'
ferry_getGridMapFile = 'document me'

jobsub_cert = /etc/grid-security/jobsub/jobsubcert.pem
jobsub_key = /etc/grid-security/jobsub/jobsubkey.pem
dropbox_location = "/pnfs/%%s/scratch/jobsub_stage"
dropbox_constraint = '(jobsub_group=?="%%s")&&(PNFS_INPUT_FILES=!=Null)'
dropbox_max_size = 1236258553

;;RecentDaemonCoreDutyCycle is a number between 0 and 1
;;if RecentDaemonCoreDutyCycle > submit_reject_threshold then submission
;;will fail with an error message to try again later
submit_reject_threshold = .95

;;global_superusers can hold/release/remove anyones job and view thier sandboxes
global_superusers = 'ahandres mfattoru boyd mazzacan kherner kretzke sbhat kreymer lisa vito kuropat illingwo stoughto natasha ivm mengel diesburg kirby tlevshin dbox'

;;becomes job_lease_duration in submitted jobs jdf
job_lease_duration = 3600
;;input to -valid flag in voms-proxy-init
voms_proxy_lifetime = 24:00
authentication_methods = myproxy
myproxy_server = myproxy.fnal.gov
motd_file = /do/not/use/motd_file
ifdh_base_uri = http://samweb.fnal.gov:8480/sam/${GROUP}/api
desired_os = ' && ( DesiredOS =?= NULL || stringlistimember(Target.IFOS_installed,DesiredOS)) '
condor_setup_cmd = ''
transfer_wrapfile = True
transfer_executable = True
condor_installed_in_opt = False
command_path_root = /fife/local/scratch/uploads
dropbox_path_root = /fife/local/scratch/dropbox
wn_ifdh_location = '/cvmfs/fermilab.opensciencegrid.org/products/common/etc/setups /grid/fermiapp/products/common/etc/setups.sh /fnal/ups/etc/setups.sh '
jobsub_max_joblog_size = 5000000
jobsub_max_joblog_tail_size = 4000000
jobsub_max_joblog_head_size = 1000000
#jobsub_skip_sam_prestage = True
site_ignore_list = 'Fermigrid fcdfosgt2 fcdfosgt3'
default_voms_role = Analysis
krbrefresh_query_format = ' -af:, "strcat(jobsub_group,string(\".\"),owner)" x509userproxysubject x509userproxy -constraint "JobUniverse=?=5&&X509UserProxySubject=!=UNDEFINED" '

[annie]
sandbox_readable_by_group = True
[argoneut]
[captmnv]
[cdf]
voms = cdf:/cdf
transfer_krbcc_to_job = True
#number of times cdf job will try transferring back data before quitting
num_transfer_tries = 10
#randomly sleep between one and sleep_random seconds between data transfer tries
sleep_random = 1200
#default host that cdf jobs try to scp data back to
default_output_host = fcdflnxgpvm01.fnal.gov
[cdms]
[chips]
[coupp]
[darkside]
[des]
voms = des:/des
[dune]
voms = dune:/dune
group_superusers = 'trj anorman neha'
[dzero]
voms = dzero:/dzero/users
transfer_krbcc_to_job = True
sub_group_pattern = dzero/users
[fermilab]
voms = fermilab:/fermilab
[genie]
[gm2]
[lar1]
[lar1nd]
[lariat]
[lsst]
voms = lsst:/lsst
[marsaccel]
voms = fermilab:/fermilab/mars/accel
sub_group_pattern = mars/accel
[marsgm2]
voms = fermilab:/fermilab/mars/gm2
sub_group_pattern = mars/gm2
[marslbne]
voms = fermilab:/fermilab/mars/lbne
sub_group_pattern = mars/lbne
[marsmu2e]
voms = fermilab:/fermilab/mars/mu2e
sub_group_pattern = mars/mu2e
;set_up_ifdh = True
[minerva]
group_superusers = 'rodriges drut1186 drimal'
[miniboone]
[minos]
sandbox_readable_by_group = True
[mu2e]
sandbox_readable_by_group = True
;set_up_ifdh = True
[numix]
[nova]
sandbox_readable_by_group = True
;;Should be set to 'True' for groups that use Production role but do not use
;;managed proxies distribued by discompsupp
hash_nondefault_proxy = True
[patriot]
[sbnd]
[seaquest]
[test]
[uboone]
sandbox_readable_by_group = True

;;submit_host section
;;these will overwrite the experiment settings above
;;
[fifebatchitbgpvm01.fnal.gov]
output_files_web_browsable_allowed_types = '.out .err .cmd .sh .log .dag .dot .metrics .sub'
;;max value for jobsub_submit -N
jobsub_max_cluster_procs = 10000

;;match to JOB_EXPECTED_MAX_LIFETIME
;;as of jobsub 1.1.9.1, 'bare' seconds without a unit
;;generates a warning on submission so append 's','m','h',or 'd'
;;to these numbers
;;6 hours
job_expected_max_lifetime_short = 3h
;;12 hours
job_expected_max_lifetime_medium = 8h
;;23 hours 40 minutes
;;same as 1420m or 85200s
job_expected_max_lifetime_long = 85200s
;; default - 23 hours 40 minutes
job_expected_max_lifetime_default = 8h

condor_tmp = /fife/local/scratch/uploads/${GROUP}/${LOGNAME}/${WORKDIR_ID}
condor_exec = /fife/local/scratch/uploads/${GROUP}/${LOGNAME}/${WORKDIR_ID}
history_db = /fife/local/scratch/history/jobsub_history.db
x509_user_proxy = /fife/local/home/rexbatch/.security/${GROUP}/x509cc_${LOGNAME}
storage_group = fife
supported_groups = 'annie argoneut captmnv cdf cdms chips coupp darkside des dune dzero fermilab genie gm2 lar1 lar1nd lariat lsst marsaccel marsgm2 marslbne marsmu2e minerva miniboone minos mu2e numix nova patriot sbnd seaquest test uboone'
supported_roles = 'Analysis Calibration DESGW Data Production Online'
transfer_wrapfile = True
always_run_on_grid = True
has_usage_model = FERMICLOUD_PRIV1,FERMICLOUD_PRIV,FERMICLOUD_PP_PRIV1,FERMICLOUD_PP_PRIV,FERMICLOUD_PP,FERMICLOUD,OFFSITE,PAID_CLOUD,DEDICATED,OPPORTUNISTIC,SLOTTEST,PAID_CLOUD_TEST,AWS_HEPCLOUD,SLEEPER
default_grid_site = False