1
|
#!/bin/bash
|
2
|
#
|
3
|
# Project:
|
4
|
# glideinWMS
|
5
|
#
|
6
|
# File Version:
|
7
|
#
|
8
|
|
9
|
# default IFS, to protect against unusual environment, better than "unset IFS" because works with restoring old one
|
10
|
IFS=$' \t\n'
|
11
|
|
12
|
global_args="$@"
|
13
|
|
14
|
export LANG=C
|
15
|
|
16
|
function trap_with_arg {
|
17
|
func="$1" ; shift
|
18
|
for sig ; do
|
19
|
trap "$func $sig" "$sig"
|
20
|
done
|
21
|
}
|
22
|
|
23
|
#function to handle passing signals to the child processes
|
24
|
# no need to re-raise sigint, caller does unconditional exit (https://www.cons.org/cracauer/sigint.html)
|
25
|
function on_die {
|
26
|
echo "Received kill signal... shutting down child processes (forwarding $1 signal)" 1>&2
|
27
|
ON_DIE=1
|
28
|
kill -s $1 %1
|
29
|
}
|
30
|
|
31
|
function ignore_signal {
|
32
|
echo "Ignoring SIGHUP signal... Use SIGTERM or SIGQUIT to kill processes" 1>&2
|
33
|
}
|
34
|
|
35
|
function warn {
|
36
|
echo `date` "$@" 1>&2
|
37
|
}
|
38
|
|
39
|
function usage {
|
40
|
echo "Usage: glidein_startup.sh <options>"
|
41
|
echo "where <options> is:"
|
42
|
echo " -factory <name> : name of this factory"
|
43
|
echo " -name <name> : name of this glidein"
|
44
|
echo " -entry <name> : name of this glidein entry"
|
45
|
echo " -clientname <name> : name of the requesting client"
|
46
|
echo " -clientgroup <name> : group name of the requesting client"
|
47
|
echo " -web <baseURL> : base URL from where to fetch"
|
48
|
echo " -proxy <proxyURL> : URL of the local proxy"
|
49
|
echo " -dir <dirID> : directory ID (supports ., Condor, CONDOR, OSG, TMPDIR, AUTO)"
|
50
|
echo " -sign <sign> : signature of the signature file"
|
51
|
echo " -signtype <id> : type of signature (only sha1 supported for now)"
|
52
|
echo " -signentry <sign> : signature of the entry signature file"
|
53
|
echo " -cluster <ClusterID> : condorG ClusterId"
|
54
|
echo " -subcluster <ProcID> : condorG ProcId"
|
55
|
echo " -submitcredid <CredentialID>: Credential ID of this condorG job"
|
56
|
echo " -schedd <name> : condorG Schedd Name"
|
57
|
echo " -descript <fname> : description file name"
|
58
|
echo " -descriptentry <fname> : description file name for entry"
|
59
|
echo " -clientweb <baseURL> : base URL from where to fetch client files"
|
60
|
echo " -clientwebgroup <baseURL> : base URL from where to fetch client group files"
|
61
|
echo " -clientsign <sign> : signature of the client signature file"
|
62
|
echo " -clientsigntype <id> : type of client signature (only sha1 supported for now)"
|
63
|
echo " -clientsigngroup <sign> : signature of the client group signature file"
|
64
|
echo " -clientdescript <fname> : client description file name"
|
65
|
echo " -clientdescriptgroup <fname>: client description file name for group"
|
66
|
echo " -slotslayout <type> : how Condor will set up slots (fixed, partitionable)"
|
67
|
echo " -v <id> : operation mode (std, nodebug, fast, check supported)"
|
68
|
echo " -param_* <arg> : user specified parameters"
|
69
|
exit 1
|
70
|
}
|
71
|
|
72
|
|
73
|
# params will contain the full list of parameters
|
74
|
# -param_XXX YYY will become "XXX YYY"
|
75
|
params=""
|
76
|
|
77
|
while [ $# -gt 0 ]
|
78
|
do case "$1" in
|
79
|
-factory) glidein_factory="$2";;
|
80
|
-name) glidein_name="$2";;
|
81
|
-entry) glidein_entry="$2";;
|
82
|
-clientname) client_name="$2";;
|
83
|
-clientgroup) client_group="$2";;
|
84
|
-web) repository_url="$2";;
|
85
|
-proxy) proxy_url="$2";;
|
86
|
-dir) work_dir="$2";;
|
87
|
-sign) sign_id="$2";;
|
88
|
-signtype) sign_type="$2";;
|
89
|
-signentry) sign_entry_id="$2";;
|
90
|
-cluster) condorg_cluster="$2";;
|
91
|
-subcluster) condorg_subcluster="$2";;
|
92
|
-submitcredid) glidein_cred_id="$2";;
|
93
|
-schedd) condorg_schedd="$2";;
|
94
|
-descript) descript_file="$2";;
|
95
|
-descriptentry) descript_entry_file="$2";;
|
96
|
-clientweb) client_repository_url="$2";;
|
97
|
-clientwebgroup) client_repository_group_url="$2";;
|
98
|
-clientsign) client_sign_id="$2";;
|
99
|
-clientsigntype) client_sign_type="$2";;
|
100
|
-clientsigngroup) client_sign_group_id="$2";;
|
101
|
-clientdescript) client_descript_file="$2";;
|
102
|
-clientdescriptgroup) client_descript_group_file="$2";;
|
103
|
-slotslayout) slots_layout="$2";;
|
104
|
-v) operation_mode="$2";;
|
105
|
-param_*) params="$params `echo $1 | awk '{print substr($0,8)}'` $2";;
|
106
|
*) (warn "Unknown option $1"; usage) 1>&2; exit 1
|
107
|
esac
|
108
|
shift
|
109
|
shift
|
110
|
done
|
111
|
|
112
|
# make sure we have a valid slots_layout
|
113
|
if (echo "x$slots_layout" | grep -i fixed) >/dev/null 2>&1 ; then
|
114
|
slots_layout="fixed"
|
115
|
else
|
116
|
slots_layout="partitionable"
|
117
|
fi
|
118
|
|
119
|
function python_b64uuencode {
|
120
|
echo "begin-base64 644 -"
|
121
|
python -c 'import binascii,sys;fd=sys.stdin;buf=fd.read();size=len(buf);idx=0
|
122
|
while size>57:
|
123
|
print binascii.b2a_base64(buf[idx:idx+57]),;
|
124
|
idx+=57;
|
125
|
size-=57;
|
126
|
print binascii.b2a_base64(buf[idx:]),'
|
127
|
echo "===="
|
128
|
}
|
129
|
|
130
|
function base64_b64uuencode {
|
131
|
echo "begin-base64 644 -"
|
132
|
base64 -
|
133
|
echo "===="
|
134
|
}
|
135
|
|
136
|
# not all WNs have all the tools installed
|
137
|
function b64uuencode {
|
138
|
which uuencode >/dev/null 2>&1
|
139
|
if [ $? -eq 0 ]; then
|
140
|
uuencode -m -
|
141
|
else
|
142
|
which base64 >/dev/null 2>&1
|
143
|
if [ $? -eq 0 ]; then
|
144
|
base64_b64uuencode
|
145
|
else
|
146
|
python_b64uuencode
|
147
|
fi
|
148
|
fi
|
149
|
}
|
150
|
|
151
|
function construct_xml {
|
152
|
result="$1"
|
153
|
|
154
|
glidein_end_time=`date +%s`
|
155
|
|
156
|
echo "<?xml version=\"1.0\"?>
|
157
|
<OSGTestResult id=\"glidein_startup.sh\" version=\"4.3.1\">
|
158
|
<operatingenvironment>
|
159
|
<env name=\"cwd\">$start_dir</env>
|
160
|
</operatingenvironment>
|
161
|
<test>
|
162
|
<cmd>$0 ${global_args}</cmd>
|
163
|
<tStart>`date --date=@${startup_time} +%Y-%m-%dT%H:%M:%S%:z`</tStart>
|
164
|
<tEnd>`date --date=@${glidein_end_time} +%Y-%m-%dT%H:%M:%S%:z`</tEnd>
|
165
|
</test>
|
166
|
$result
|
167
|
</OSGTestResult>"
|
168
|
}
|
169
|
|
170
|
|
171
|
function extract_parent_fname {
|
172
|
exitcode=$1
|
173
|
|
174
|
if [ -s otrx_output.xml ]; then
|
175
|
# file exists and is not 0 size
|
176
|
last_result=`cat otrx_output.xml`
|
177
|
|
178
|
if [ "$exitcode" -eq 0 ]; then
|
179
|
echo "SUCCESS"
|
180
|
else
|
181
|
last_script_name=`echo "$last_result" |awk '/<OSGTestResult /{split($0,a,"id=\""); split(a[2],b,"\""); print b[1];}'`
|
182
|
echo ${last_script_name}
|
183
|
fi
|
184
|
else
|
185
|
echo "Unknown"
|
186
|
fi
|
187
|
}
|
188
|
|
189
|
function extract_parent_xml_detail {
|
190
|
exitcode=$1
|
191
|
glidein_end_time=`date +%s`
|
192
|
|
193
|
if [ -s otrx_output.xml ]; then
|
194
|
# file exists and is not 0 size
|
195
|
last_result=`cat otrx_output.xml`
|
196
|
|
197
|
if [ "$exitcode" -eq 0 ]; then
|
198
|
echo " <result>"
|
199
|
echo " <status>OK</status>"
|
200
|
# propagate metrics as well
|
201
|
echo "$last_result" | grep '<metric '
|
202
|
echo " </result>"
|
203
|
else
|
204
|
last_script_name=`echo "$last_result" |awk '/<OSGTestResult /{split($0,a,"id=\""); split(a[2],b,"\""); print b[1];}'`
|
205
|
|
206
|
last_script_reason=`echo "$last_result" | awk 'BEGIN{fr=0;}/<[/]detail>/{fr=0;}{if (fr==1) print $0}/<detail>/{fr=1;}'`
|
207
|
my_reason=" Validation failed in $last_script_name.
|
208
|
|
209
|
$last_script_reason"
|
210
|
|
211
|
echo " <result>"
|
212
|
echo " <status>ERROR</status>
|
213
|
<metric name=\"TestID\" ts=\"`date --date=@${glidein_end_time} +%Y-%m-%dT%H:%M:%S%:z`\" uri=\"local\">$last_script_name</metric>"
|
214
|
# propagate metrics as well (will include the failure metric)
|
215
|
echo "$last_result" | grep '<metric '
|
216
|
echo " </result>"
|
217
|
echo " <detail>
|
218
|
${my_reason}
|
219
|
</detail>"
|
220
|
fi
|
221
|
else
|
222
|
# create a minimal XML file, else
|
223
|
echo " <result>"
|
224
|
if [ "$exitcode" -eq 0 ]; then
|
225
|
echo " <status>OK</status>"
|
226
|
else
|
227
|
echo " <status>ERROR</status>"
|
228
|
echo " <metric name=\"failure\" ts=\"`date --date=@${glidein_end_time} +%Y-%m-%dT%H:%M:%S%:z`\" uri=\"local\">Unknown</metric>"
|
229
|
fi
|
230
|
echo " </result>
|
231
|
<detail>
|
232
|
No detail. Could not find source XML file.
|
233
|
</detail>"
|
234
|
fi
|
235
|
}
|
236
|
|
237
|
function basexml2simplexml {
|
238
|
final_result="$1"
|
239
|
|
240
|
# augment with node info
|
241
|
echo "${final_result}" | awk 'BEGIN{fr=1;}{if (fr==1) print $0}/<operatingenvironment>/{fr=0;}'
|
242
|
|
243
|
echo " <env name=\"client_name\">$client_name</env>"
|
244
|
echo " <env name=\"client_group\">$client_group</env>"
|
245
|
|
246
|
echo " <env name=\"user\">`id -un`</env>"
|
247
|
echo " <env name=\"arch\">`uname -m`</env>"
|
248
|
if [ -e '/etc/redhat-release' ]; then
|
249
|
echo " <env name=\"os\">`cat /etc/redhat-release`</env>"
|
250
|
fi
|
251
|
echo " <env name=\"hostname\">`uname -n`</env>"
|
252
|
|
253
|
echo "${final_result}" | awk 'BEGIN{fr=0;}{if (fr==1) print $0}/<operatingenvironment>/{fr=1;}'
|
254
|
}
|
255
|
|
256
|
function simplexml2longxml {
|
257
|
final_result_simple="$1"
|
258
|
global_result="$2"
|
259
|
|
260
|
echo "${final_result_simple}" | awk 'BEGIN{fr=1;}{if (fr==1) print $0}/<OSGTestResult /{fr=0;}'
|
261
|
|
262
|
if [ "${global_result}" != "" ]; then
|
263
|
# subtests first, so it is more readable, when tailing
|
264
|
echo ' <subtestlist>'
|
265
|
echo ' <OSGTestResults>'
|
266
|
echo "${global_result}" | awk '{print " " $0}'
|
267
|
echo ' </OSGTestResults>'
|
268
|
echo ' </subtestlist>'
|
269
|
fi
|
270
|
|
271
|
echo "${final_result_simple}" | awk 'BEGIN{fr=0;}{if (fr==1) print $0}/<OSGTestResult /{fr=1;}/<operatingenvironment>/{fr=0;}'
|
272
|
|
273
|
echo " <env name=\"glidein_factory\">$glidein_factory</env>"
|
274
|
echo " <env name=\"glidein_name\">$glidein_name</env>"
|
275
|
echo " <env name=\"glidein_entry\">$glidein_entry</env>"
|
276
|
echo " <env name=\"condorg_cluster\">$condorg_cluster</env>"
|
277
|
echo " <env name=\"condorg_subcluster\">$condorg_subcluster</env>"
|
278
|
echo " <env name=\"glidein_credential_id\">$glidein_cred_id</env>"
|
279
|
echo " <env name=\"condorg_schedd\">$condorg_schedd</env>"
|
280
|
|
281
|
echo "${final_result_simple}" | awk 'BEGIN{fr=0;}{if (fr==1) print $0}/<operatingenvironment>/{fr=1;}'
|
282
|
}
|
283
|
|
284
|
function print_tail {
|
285
|
exit_code=$1
|
286
|
final_result_simple="$2"
|
287
|
final_result_long="$3"
|
288
|
|
289
|
glidein_end_time=`date +%s`
|
290
|
let total_time=$glidein_end_time-$startup_time
|
291
|
echo "=== Glidein ending `date` ($glidein_end_time) with code ${exit_code} after $total_time ==="
|
292
|
|
293
|
echo ""
|
294
|
echo "=== XML description of glidein activity ==="
|
295
|
echo "${final_result_simple}" | grep -v "<cmd>"
|
296
|
echo "=== End XML description of glidein activity ==="
|
297
|
|
298
|
echo "" 1>&2
|
299
|
echo "=== Encoded XML description of glidein activity ===" 1>&2
|
300
|
echo "${final_result_long}" | gzip --stdout - | b64uuencode 1>&2
|
301
|
echo "=== End encoded XML description of glidein activity ===" 1>&2
|
302
|
}
|
303
|
|
304
|
####################################
|
305
|
# Cleaup, print out message and exit
|
306
|
work_dir_created=0
|
307
|
glide_local_tmp_dir_created=0
|
308
|
|
309
|
# use this for early failures, when we cannot assume we can write to disk at all
|
310
|
# too bad we end up with some repeated code, but difficult to do better
|
311
|
function early_glidein_failure {
|
312
|
error_msg="$1"
|
313
|
|
314
|
warn "${error_msg}"
|
315
|
|
316
|
sleep $sleep_time
|
317
|
# wait a bit in case of error, to reduce lost glideins
|
318
|
|
319
|
glidein_end_time=`date +%s`
|
320
|
result=" <metric name=\"failure\" ts=\"`date --date=@${glidein_end_time} +%Y-%m-%dT%H:%M:%S%:z`\" uri=\"local\">WN_RESOURCE</metric>
|
321
|
<status>ERROR</status>
|
322
|
<detail>
|
323
|
$error_msg
|
324
|
</detail>"
|
325
|
|
326
|
final_result=`construct_xml "$result"`
|
327
|
final_result_simple=`basexml2simplexml "${final_result}"`
|
328
|
# have no global section
|
329
|
final_result_long=`simplexml2longxml "${final_result_simple}" ""`
|
330
|
|
331
|
cd "$start_dir"
|
332
|
if [ "$work_dir_created" -eq "1" ]; then
|
333
|
rm -fR "$work_dir"
|
334
|
fi
|
335
|
if [ "$glide_local_tmp_dir_created" -eq "1" ]; then
|
336
|
rm -fR "$glide_local_tmp_dir"
|
337
|
fi
|
338
|
|
339
|
print_tail 1 "${final_result_simple}" "${final_result_long}"
|
340
|
|
341
|
exit 1
|
342
|
}
|
343
|
|
344
|
|
345
|
# use this one once the most basic ops have been done
|
346
|
function glidein_exit {
|
347
|
# lock file for whole machine
|
348
|
if [ "x$lock_file" != "x" ]; then
|
349
|
rm -f $lock_file
|
350
|
fi
|
351
|
|
352
|
global_result=""
|
353
|
if [ -f otr_outlist.list ]; then
|
354
|
global_result=`cat otr_outlist.list`
|
355
|
chmod u+w otr_outlist.list
|
356
|
fi
|
357
|
|
358
|
ge_last_script_name=`extract_parent_fname $1`
|
359
|
result=`extract_parent_xml_detail $1`
|
360
|
final_result=`construct_xml "$result"`
|
361
|
|
362
|
# augment with node info
|
363
|
final_result_simple=`basexml2simplexml "${final_result}"`
|
364
|
|
365
|
# Create a richer version, too
|
366
|
final_result_long=`simplexml2longxml "${final_result_simple}" "${global_result}"`
|
367
|
|
368
|
if [ $1 -ne 0 ]; then
|
369
|
report_failed=`grep -i "^GLIDEIN_Report_Failed " "$glidein_config" | cut -d ' ' -f 2-`
|
370
|
|
371
|
if [ -z "$report_failed" ]; then
|
372
|
report_failed="NEVER"
|
373
|
fi
|
374
|
|
375
|
factory_report_failed=`grep -i "^GLIDEIN_Factory_Report_Failed " "$glidein_config" | cut -d ' ' -f 2-`
|
376
|
|
377
|
if [ -z "$factory_report_failed" ]; then
|
378
|
factory_collector=`grep -i "^GLIDEIN_Factory_Collector " "$glidein_config" | cut -d ' ' -f 2-`
|
379
|
if [ -z "$factory_collector" ]; then
|
380
|
# no point in enabling it if there are no collectors
|
381
|
factory_report_failed="NEVER"
|
382
|
else
|
383
|
factory_report_failed="ALIVEONLY"
|
384
|
fi
|
385
|
fi
|
386
|
|
387
|
do_report=0
|
388
|
if [ "$report_failed" != "NEVER" ] || [ "$factory_report_failed" != "NEVER" ]; then
|
389
|
do_report=1
|
390
|
fi
|
391
|
|
392
|
|
393
|
# wait a bit in case of error, to reduce lost glideins
|
394
|
let "dl=`date +%s` + $sleep_time"
|
395
|
dlf=`date --date="@$dl"`
|
396
|
add_config_line "GLIDEIN_ADVERTISE_ONLY" "1"
|
397
|
add_config_line "GLIDEIN_Failed" "True"
|
398
|
add_config_line "GLIDEIN_EXIT_CODE" "$1"
|
399
|
add_config_line "GLIDEIN_ToDie" "$dl"
|
400
|
add_config_line "GLIDEIN_Expire" "$dl"
|
401
|
add_config_line "GLIDEIN_LAST_SCRIPT" "${ge_last_script_name}"
|
402
|
add_config_line "GLIDEIN_ADVERTISE_TYPE" "Retiring"
|
403
|
|
404
|
add_config_line "GLIDEIN_FAILURE_REASON" "Glidein failed while running ${ge_last_script_name}. Keeping node busy until $dl ($dlf)."
|
405
|
|
406
|
condor_vars_file="`grep -i "^CONDOR_VARS_FILE " "$glidein_config" | cut -d ' ' -f 2-`"
|
407
|
if [ -n "${condor_vars_file}" ]; then
|
408
|
# if we are to advertise, this should be available... else, it does not matter anyhow
|
409
|
add_condor_vars_line "GLIDEIN_ADVERTISE_ONLY" "C" "True" "+" "Y" "Y" "-"
|
410
|
add_condor_vars_line "GLIDEIN_Failed" "C" "True" "+" "Y" "Y" "-"
|
411
|
add_condor_vars_line "GLIDEIN_EXIT_CODE" "I" "-" "+" "Y" "Y" "-"
|
412
|
add_condor_vars_line "GLIDEIN_ToDie" "I" "-" "+" "Y" "Y" "-"
|
413
|
add_condor_vars_line "GLIDEIN_Expire" "I" "-" "+" "Y" "Y" "-"
|
414
|
add_condor_vars_line "GLIDEIN_LAST_SCRIPT" "S" "-" "+" "Y" "Y" "-"
|
415
|
add_condor_vars_line "GLIDEIN_FAILURE_REASON" "S" "-" "+" "Y" "Y" "-"
|
416
|
fi
|
417
|
main_work_dir=`get_work_dir main`
|
418
|
|
419
|
for ((t=`date +%s`; $t<$dl;t=`date +%s`))
|
420
|
do
|
421
|
if [ -e "${main_work_dir}/$last_script" ] && [ "$do_report" = "1" ] ; then
|
422
|
# if the file exists, we should be able to talk to the collectors
|
423
|
# notify that things went badly and we are waiting
|
424
|
if [ "$factory_report_failed" != "NEVER" ]; then
|
425
|
add_config_line "GLIDEIN_ADVERTISE_DESTINATION" "Factory"
|
426
|
warn "Notifying Factory of error"
|
427
|
"${main_work_dir}/$last_script" glidein_config
|
428
|
fi
|
429
|
if [ "$report_failed" != "NEVER" ]; then
|
430
|
add_config_line "GLIDEIN_ADVERTISE_DESTINATION" "VO"
|
431
|
warn "Notifying VO of error"
|
432
|
"${main_work_dir}/$last_script" glidein_config
|
433
|
fi
|
434
|
fi
|
435
|
|
436
|
# sleep for about 5 mins... but randomize a bit
|
437
|
let "ds=250+$RANDOM%100"
|
438
|
let "as=`date +%s` + $ds"
|
439
|
if [ $as -gt $dl ]; then
|
440
|
# too long, shorten to the deadline
|
441
|
let "ds=$dl - `date +%s`"
|
442
|
fi
|
443
|
warn "Sleeping $ds"
|
444
|
sleep $ds
|
445
|
done
|
446
|
|
447
|
if [ -e "${main_work_dir}/$last_script" ] && [ "$do_report" = "1" ]; then
|
448
|
# notify that things went badly and we are going away
|
449
|
if [ "$factory_report_failed" != "NEVER" ]; then
|
450
|
add_config_line "GLIDEIN_ADVERTISE_DESTINATION" "Factory"
|
451
|
if [ "$factory_report_failed" = "ALIVEONLY" ]; then
|
452
|
add_config_line "GLIDEIN_ADVERTISE_TYPE" "INVALIDATE"
|
453
|
else
|
454
|
add_config_line "GLIDEIN_ADVERTISE_TYPE" "Killing"
|
455
|
add_config_line "GLIDEIN_FAILURE_REASON" "Glidein failed while running ${ge_last_script_name}. Terminating now. ($dl) ($dlf)"
|
456
|
fi
|
457
|
"${main_work_dir}/$last_script" glidein_config
|
458
|
warn "Last notification sent to Factory"
|
459
|
fi
|
460
|
if [ "$report_failed" != "NEVER" ]; then
|
461
|
add_config_line "GLIDEIN_ADVERTISE_DESTINATION" "VO"
|
462
|
if [ "$report_failed" = "ALIVEONLY" ]; then
|
463
|
add_config_line "GLIDEIN_ADVERTISE_TYPE" "INVALIDATE"
|
464
|
else
|
465
|
add_config_line "GLIDEIN_ADVERTISE_TYPE" "Killing"
|
466
|
add_config_line "GLIDEIN_FAILURE_REASON" "Glidein failed while running ${ge_last_script_name}. Terminating now. ($dl) ($dlf)"
|
467
|
fi
|
468
|
"${main_work_dir}/$last_script" glidein_config
|
469
|
warn "Last notification sent to VO"
|
470
|
fi
|
471
|
fi
|
472
|
fi
|
473
|
|
474
|
cd "$start_dir"
|
475
|
if [ "$work_dir_created" -eq "1" ]; then
|
476
|
rm -fR "$work_dir"
|
477
|
fi
|
478
|
if [ "$glide_local_tmp_dir_created" -eq "1" ]; then
|
479
|
rm -fR "$glide_local_tmp_dir"
|
480
|
fi
|
481
|
|
482
|
print_tail $1 "${final_result_simple}" "${final_result_long}"
|
483
|
|
484
|
exit $1
|
485
|
}
|
486
|
|
487
|
####################################################
|
488
|
# automatically determine and setup work directories
|
489
|
function automatic_work_dir {
|
490
|
targets="$_CONDOR_SCRATCH_DIR $OSG_WN_TMP $TG_NODE_SCRATCH $TG_CLUSTER_SCRATCH $SCRATCH $TMPDIR $TMP $PWD"
|
491
|
unset TMPDIR
|
492
|
|
493
|
# kb
|
494
|
disk_required=1000000
|
495
|
|
496
|
for d in $targets; do
|
497
|
|
498
|
echo "Checking $d for potential use as work space... " 1>&2
|
499
|
|
500
|
# does the target exist?
|
501
|
if [ ! -e $d ]; then
|
502
|
echo " Workdir: $d does not exist" 1>&2
|
503
|
continue
|
504
|
fi
|
505
|
|
506
|
# make sure there is enough available diskspace
|
507
|
#cd $d
|
508
|
free=`df -kP $d | awk '{if (NR==2) print $4}'`
|
509
|
if [ "x$free" = "x" -o $free -lt $disk_required ]; then
|
510
|
echo " Workdir: not enough disk space available in $d" 1>&2
|
511
|
continue
|
512
|
fi
|
513
|
|
514
|
if touch $d/.dirtest.$$ >/dev/null 2>&1; then
|
515
|
echo " Workdir: $d selected" 1>&2
|
516
|
rm -f $d/.dirtest.$$ >/dev/null 2>&1
|
517
|
work_dir=$d
|
518
|
return 0
|
519
|
fi
|
520
|
echo " Workdir: not allowed to write to $d" 1>&2
|
521
|
done
|
522
|
return 1
|
523
|
}
|
524
|
|
525
|
|
526
|
# Create a script that defines add_config_line
|
527
|
# and add_condor_vars_line
|
528
|
# This way other depending scripts can use it
|
529
|
# Scripts are executed one at the time (also in schedd_cron)
|
530
|
# If this changes, these functions would have to add a locking mechanism
|
531
|
function create_add_config_line {
|
532
|
cat > "$1" << EOF
|
533
|
|
534
|
function warn {
|
535
|
echo \`date\` \$@ 1>&2
|
536
|
}
|
537
|
|
538
|
###################################
|
539
|
# Add a line to the config file
|
540
|
# Arg: line to add, first element is the id
|
541
|
# Uses global variable glidein_config
|
542
|
function add_config_line {
|
543
|
grep -q "^\${*}$" \$glidein_config
|
544
|
if [ \$? -ne 0 ]; then
|
545
|
rm -f \${glidein_config}.old #just in case one was there
|
546
|
mv \$glidein_config \${glidein_config}.old
|
547
|
if [ \$? -ne 0 ]; then
|
548
|
warn "Error renaming \$glidein_config into \${glidein_config}.old"
|
549
|
exit 1
|
550
|
fi
|
551
|
grep -v "^\$1 " \${glidein_config}.old > \$glidein_config
|
552
|
# NOTE that parameters are flattened if not quoted, if there are blanks they are separated by single space
|
553
|
echo "\$@" >> \$glidein_config
|
554
|
rm -f \${glidein_config}.old
|
555
|
fi
|
556
|
}
|
557
|
|
558
|
##################################################
|
559
|
# Add a line to the config file using a lock file
|
560
|
# Replace add_config_line in script_wrapper where multiple instances run in parallel
|
561
|
# Uses FD 200, fails after a timeout of 300 sec
|
562
|
function add_config_line_safe {
|
563
|
grep -q "^\${*}$" \$glidein_config
|
564
|
if [ \$? -ne 0 ]; then
|
565
|
# when fd is closed the lock is released, no need to trap and remove the file
|
566
|
(
|
567
|
flock -w 300 -e 200 || (warn "Error acquiring lock for glidein_config"; exit 1)
|
568
|
add_config_line "\$@"
|
569
|
) 200>\${glidein_config}.lock
|
570
|
fi
|
571
|
}
|
572
|
|
573
|
|
574
|
|
575
|
####################################
|
576
|
# Add a line to the condor_vars file
|
577
|
# Arg: line to add, first element is the id
|
578
|
# Uses global variable condor_vars_file
|
579
|
function add_condor_vars_line {
|
580
|
id=\$1
|
581
|
|
582
|
rm -f \${condor_vars_file}.old #just in case one was there
|
583
|
mv \$condor_vars_file \${condor_vars_file}.old
|
584
|
if [ \$? -ne 0 ]; then
|
585
|
warn "Error renaming \$condor_vars_file into \${condor_vars_file}.old"
|
586
|
exit 1
|
587
|
fi
|
588
|
grep -v "^\$id\b" \${condor_vars_file}.old > \$condor_vars_file
|
589
|
echo "\$@" >> \$condor_vars_file
|
590
|
rm -f \${condor_vars_file}.old
|
591
|
}
|
592
|
EOF
|
593
|
}
|
594
|
|
595
|
# Create a script that defines various id based functions
|
596
|
# This way other depending scripts can use it
|
597
|
function create_get_id_selectors {
|
598
|
cat > "$1" << EOF
|
599
|
############################################
|
600
|
# Get entry/client/group work dir
|
601
|
# Arg: type (main/entry/client/client_group)
|
602
|
function get_work_dir {
|
603
|
if [ "\$1" = "main" ]; then
|
604
|
grep "^GLIDEIN_WORK_DIR " "\${glidein_config}" | cut -d ' ' -f 2-
|
605
|
return \$?
|
606
|
elif [ "\$1" = "entry" ]; then
|
607
|
grep "^GLIDEIN_ENTRY_WORK_DIR " "\${glidein_config}" | cut -d ' ' -f 2-
|
608
|
return \$?
|
609
|
elif [ "\$1" = "client" ]; then
|
610
|
grep "^GLIDECLIENT_WORK_DIR " "\${glidein_config}" | cut -d ' ' -f 2-
|
611
|
return \$?
|
612
|
elif [ "\$1" = "client_group" ]; then
|
613
|
grep "^GLIDECLIENT_GROUP_WORK_DIR " "\${glidein_config}" | cut -d ' ' -f 2-
|
614
|
return \$?
|
615
|
fi
|
616
|
echo "[get_work_dir] Invalid id: \$1" 1>&2
|
617
|
return 1
|
618
|
}
|
619
|
|
620
|
################################################
|
621
|
# Get entry/client/group description file name
|
622
|
# Arg: type (main/entry/client/client_group)
|
623
|
function get_descript_file {
|
624
|
if [ "\$1" = "main" ]; then
|
625
|
grep "^DESCRIPTION_FILE " "\${glidein_config}" | cut -d ' ' -f 2-
|
626
|
return \$?
|
627
|
elif [ "\$1" = "entry" ]; then
|
628
|
grep "^DESCRIPTION_ENTRY_FILE " "\${glidein_config}" | cut -d ' ' -f 2-
|
629
|
return \$?
|
630
|
elif [ "\$1" = "client" ]; then
|
631
|
grep "^GLIDECLIENT_DESCRIPTION_FILE " "\${glidein_config}" | cut -d ' ' -f 2-
|
632
|
return \$?
|
633
|
elif [ "\$1" = "client_group" ]; then
|
634
|
grep "^GLIDECLIENT_DESCRIPTION_GROUP_FILE " "\${glidein_config}" | cut -d ' ' -f 2-
|
635
|
return \$?
|
636
|
fi
|
637
|
echo "[get_descript_file] Invalid id: \$1" 1>&2
|
638
|
return 1
|
639
|
}
|
640
|
|
641
|
############################################
|
642
|
# Get entry/client/group signature
|
643
|
# Arg: type (main/entry/client/client_group)
|
644
|
function get_signature {
|
645
|
if [ "\$1" = "main" ]; then
|
646
|
grep "^GLIDEIN_Signature " "\${glidein_config}" | cut -d ' ' -f 2-
|
647
|
return \$?
|
648
|
elif [ "\$1" = "entry" ]; then
|
649
|
grep "^GLIDEIN_Entry_Signature " "\${glidein_config}" | cut -d ' ' -f 2-
|
650
|
return \$?
|
651
|
elif [ "\$1" = "client" ]; then
|
652
|
grep "^GLIDECLIENT_Signature " "\${glidein_config}" | cut -d ' ' -f 2-
|
653
|
return \$?
|
654
|
elif [ "\$1" = "client_group" ]; then
|
655
|
grep "^GLIDECLIENT_Group_Signature " "\${glidein_config}" | cut -d ' ' -f 2-
|
656
|
return \$?
|
657
|
fi
|
658
|
echo "[get_signature] Invalid id: \$1" 1>&2
|
659
|
return 1
|
660
|
}
|
661
|
|
662
|
############################################
|
663
|
# Get entry/client/group prefix
|
664
|
# Arg: type (main/entry/client/client_group)
|
665
|
function get_prefix {
|
666
|
if [ "\$1" = "main" ]; then
|
667
|
echo ""
|
668
|
elif [ "\$1" = "entry" ]; then
|
669
|
echo "ENTRY_"
|
670
|
elif [ "\$1" = "client" ]; then
|
671
|
echo "GLIDECLIENT_"
|
672
|
elif [ "\$1" = "client_group" ]; then
|
673
|
echo "GLIDECLIENT_GROUP_"
|
674
|
else
|
675
|
echo "[get_prefix] Invalid id: \$1" 1>&2
|
676
|
return 1
|
677
|
fi
|
678
|
}
|
679
|
|
680
|
EOF
|
681
|
}
|
682
|
|
683
|
###################################
|
684
|
# Put parameters into the config file
|
685
|
function params2file {
|
686
|
param_list=""
|
687
|
|
688
|
while [ $# -gt 0 ]
|
689
|
do
|
690
|
pfval=`echo "$2" | sed\
|
691
|
-e 's/\.nbsp,/ /g'\
|
692
|
-e 's/\.semicolon,/;/g'\
|
693
|
-e 's/\.colon,/:/g'\
|
694
|
-e 's/\.tilde,/~/g'\
|
695
|
-e 's/\.not,/!/g'\
|
696
|
-e 's/\.question,/?/g'\
|
697
|
-e 's/\.star,/*/g'\
|
698
|
-e 's/\.dollar,/$/g'\
|
699
|
-e 's/\.comment,/#/g'\
|
700
|
-e 's/\.sclose,/]/g'\
|
701
|
-e 's/\.sopen,/[/g'\
|
702
|
-e 's/\.gclose,/}/g'\
|
703
|
-e 's/\.gopen,/{/g'\
|
704
|
-e 's/\.close,/)/g'\
|
705
|
-e 's/\.open,/(/g'\
|
706
|
-e 's/\.gt,/>/g'\
|
707
|
-e 's/\.lt,/</g'\
|
708
|
-e 's/\.minus,/-/g'\
|
709
|
-e 's/\.plus,/+/g'\
|
710
|
-e 's/\.eq,/=/g'\
|
711
|
-e "s/\.singquot,/'/g"\
|
712
|
-e 's/\.quot,/"/g'\
|
713
|
-e 's/\.fork,/\`/g'\
|
714
|
-e 's/\.pipe,/|/g'\
|
715
|
-e 's/\.backslash,/\\\/g'\
|
716
|
-e 's/\.amp,/\&/g'\
|
717
|
-e 's/\.comma,/,/g'\
|
718
|
-e 's/\.dot,/./g'`
|
719
|
add_config_line "$1 $pfval"
|
720
|
if [ $? -ne 0 ]; then
|
721
|
glidein_exit 1
|
722
|
fi
|
723
|
if [ -z "$param_list" ]; then
|
724
|
param_list="$1"
|
725
|
else
|
726
|
param_list="${param_list},$1"
|
727
|
fi
|
728
|
shift;shift
|
729
|
done
|
730
|
echo "PARAM_LIST ${param_list}"
|
731
|
return 0
|
732
|
}
|
733
|
|
734
|
|
735
|
################
|
736
|
# Parse arguments
|
737
|
set_debug=1
|
738
|
sleep_time=1199
|
739
|
if [ "$operation_mode" = "nodebug" ]; then
|
740
|
set_debug=0
|
741
|
elif [ "$operation_mode" = "fast" ]; then
|
742
|
sleep_time=150
|
743
|
set_debug=1
|
744
|
elif [ "$operation_mode" = "check" ]; then
|
745
|
sleep_time=150
|
746
|
set_debug=2
|
747
|
fi
|
748
|
|
749
|
if [ -z "$descript_file" ]; then
|
750
|
warn "Missing descript fname." 1>&2
|
751
|
usage
|
752
|
fi
|
753
|
|
754
|
if [ -z "$descript_entry_file" ]; then
|
755
|
warn "Missing descript fname for entry." 1>&2
|
756
|
usage
|
757
|
fi
|
758
|
|
759
|
if [ -z "$glidein_name" ]; then
|
760
|
warn "Missing gliden name." 1>&2
|
761
|
usage
|
762
|
fi
|
763
|
|
764
|
if [ -z "$glidein_entry" ]; then
|
765
|
warn "Missing glidein entry name." 1>&2
|
766
|
usage
|
767
|
fi
|
768
|
|
769
|
|
770
|
if [ -z "$repository_url" ]; then
|
771
|
warn "Missing Web URL." 1>&2
|
772
|
usage
|
773
|
fi
|
774
|
|
775
|
repository_entry_url="${repository_url}/entry_${glidein_entry}"
|
776
|
|
777
|
if [ -z "$proxy_url" ]; then
|
778
|
proxy_url="None"
|
779
|
fi
|
780
|
|
781
|
if [ "$proxy_url" = "OSG" ]; then
|
782
|
if [ -z "$OSG_SQUID_LOCATION" ]; then
|
783
|
# if OSG does not define a Squid, then don't use any
|
784
|
proxy_url="None"
|
785
|
warn "OSG_SQUID_LOCATION undefined, not using any Squid URL" 1>&2
|
786
|
else
|
787
|
proxy_url=`echo $OSG_SQUID_LOCATION |awk -F ':' '{if ($2 =="") {print $1 ":3128"} else {print $0}}'`
|
788
|
fi
|
789
|
fi
|
790
|
|
791
|
if [ -z "$sign_id" ]; then
|
792
|
warn "Missing signature." 1>&2
|
793
|
usage
|
794
|
fi
|
795
|
|
796
|
if [ -z "$sign_entry_id" ]; then
|
797
|
warn "Missing entry signature." 1>&2
|
798
|
usage
|
799
|
fi
|
800
|
|
801
|
if [ -z "$sign_type" ]; then
|
802
|
sign_type="sha1"
|
803
|
fi
|
804
|
|
805
|
if [ "$sign_type" = "sha1" ]; then
|
806
|
sign_sha1="$sign_id"
|
807
|
sign_entry_sha1="$sign_entry_id"
|
808
|
else
|
809
|
warn "Unsupported signtype $sign_type found." 1>&2
|
810
|
usage
|
811
|
fi
|
812
|
|
813
|
if [ -n "$client_repository_url" ]; then
|
814
|
# client data is optional, user url as a switch
|
815
|
if [ -z "$client_sign_type" ]; then
|
816
|
client_sign_type="sha1"
|
817
|
fi
|
818
|
|
819
|
if [ "$client_sign_type" = "sha1" ]; then
|
820
|
client_sign_sha1="$client_sign_id"
|
821
|
else
|
822
|
warn "Unsupported clientsigntype $client_sign_type found." 1>&2
|
823
|
usage
|
824
|
fi
|
825
|
|
826
|
if [ -z "$client_descript_file" ]; then
|
827
|
warn "Missing client descript fname." 1>&2
|
828
|
usage
|
829
|
fi
|
830
|
|
831
|
if [ -n "$client_repository_group_url" ]; then
|
832
|
# client group data is optional, user url as a switch
|
833
|
if [ -z "$client_group" ]; then
|
834
|
warn "Missing client group name." 1>&2
|
835
|
usage
|
836
|
fi
|
837
|
|
838
|
if [ -z "$client_descript_group_file" ]; then
|
839
|
warn "Missing client descript fname for group." 1>&2
|
840
|
usage
|
841
|
fi
|
842
|
|
843
|
if [ "$client_sign_type" = "sha1" ]; then
|
844
|
client_sign_group_sha1="$client_sign_group_id"
|
845
|
else
|
846
|
warn "Unsupported clientsigntype $client_sign_type found." 1>&2
|
847
|
usage
|
848
|
fi
|
849
|
fi
|
850
|
fi
|
851
|
|
852
|
function md5wrapper {
|
853
|
# $1 - file name
|
854
|
# $2 - option (quiet)
|
855
|
local ONLY_SUM
|
856
|
if [ "x$2" = "xquiet" ]; then
|
857
|
ONLY_SUM=yes
|
858
|
fi
|
859
|
local executable=md5sum
|
860
|
which $executable 1>/dev/null 2>&1
|
861
|
if [ "$?" -ne 0 ]; then
|
862
|
executable=md5
|
863
|
which $executable 1>/dev/null 2>&1
|
864
|
if [ "$?" -ne 0 ]; then
|
865
|
echo "???"
|
866
|
return 1
|
867
|
fi
|
868
|
[ -n "$ONLY_SUM" ] && executable="md5 -q \"$1\"" || executable="md5 \"$1\""
|
869
|
else
|
870
|
[ -n "$ONLY_SUM" ] && executable="md5sum \"$1\" | cut -d ' ' -f 1" || executable="md5sum \"$1\""
|
871
|
fi
|
872
|
local res
|
873
|
# Flagged by some checkers but OK
|
874
|
res="$(eval "$executable" 2>/dev/null)"
|
875
|
if [ $? -ne 0 ]; then
|
876
|
echo "???"
|
877
|
return 1
|
878
|
fi
|
879
|
echo "$res"
|
880
|
}
|
881
|
|
882
|
|
883
|
startup_time=`date +%s`
|
884
|
echo "Starting glidein_startup.sh at `date` ($startup_time)"
|
885
|
echo "script_checksum = '`md5wrapper "$0"`'"
|
886
|
echo "debug_mode = '$operation_mode'"
|
887
|
echo "condorg_cluster = '$condorg_cluster'"
|
888
|
echo "condorg_subcluster= '$condorg_subcluster'"
|
889
|
echo "condorg_schedd = '$condorg_schedd'"
|
890
|
echo "glidein_credential_id = '$glidein_cred_id'"
|
891
|
echo "glidein_factory = '$glidein_factory'"
|
892
|
echo "glidein_name = '$glidein_name'"
|
893
|
echo "glidein_entry = '$glidein_entry'"
|
894
|
if [ -n "$client_name" ]; then
|
895
|
# client name not required as it is not used for anything but debug info
|
896
|
echo "client_name = '$client_name'"
|
897
|
fi
|
898
|
if [ -n "$client_group" ]; then
|
899
|
echo "client_group = '$client_group'"
|
900
|
fi
|
901
|
echo "work_dir = '$work_dir'"
|
902
|
echo "web_dir = '$repository_url'"
|
903
|
echo "sign_type = '$sign_type'"
|
904
|
echo "proxy_url = '$proxy_url'"
|
905
|
echo "descript_fname = '$descript_file'"
|
906
|
echo "descript_entry_fname = '$descript_entry_file'"
|
907
|
echo "sign_id = '$sign_id'"
|
908
|
echo "sign_entry_id = '$sign_entry_id'"
|
909
|
if [ -n "$client_repository_url" ]; then
|
910
|
echo "client_web_dir = '$client_repository_url'"
|
911
|
echo "client_descript_fname = '$client_descript_file'"
|
912
|
echo "client_sign_type = '$client_sign_type'"
|
913
|
echo "client_sign_id = '$client_sign_id'"
|
914
|
if [ -n "$client_repository_group_url" ]; then
|
915
|
echo "client_web_group_dir = '$client_repository_group_url'"
|
916
|
echo "client_descript_group_fname = '$client_descript_group_file'"
|
917
|
echo "client_sign_group_id = '$client_sign_group_id'"
|
918
|
fi
|
919
|
fi
|
920
|
echo
|
921
|
echo "Running on `uname -n`"
|
922
|
echo "System: `uname -a`"
|
923
|
if [ -e '/etc/redhat-release' ]; then
|
924
|
echo "Release: `cat /etc/redhat-release 2>&1`"
|
925
|
fi
|
926
|
echo "As: `id`"
|
927
|
echo "PID: $$"
|
928
|
echo
|
929
|
|
930
|
if [ $set_debug -ne 0 ]; then
|
931
|
echo "------- Initial environment ---------------" 1>&2
|
932
|
env 1>&2
|
933
|
echo "------- =================== ---------------" 1>&2
|
934
|
fi
|
935
|
|
936
|
########################################
|
937
|
# make sure nobody else can write my files
|
938
|
# In the Grid world I cannot trust anybody
|
939
|
umask 0022
|
940
|
if [ $? -ne 0 ]; then
|
941
|
early_glidein_failure "Failed in umask 0022"
|
942
|
fi
|
943
|
|
944
|
########################################
|
945
|
# Setup OSG and/or Globus
|
946
|
if [ -r "$OSG_GRID/setup.sh" ]; then
|
947
|
. "$OSG_GRID/setup.sh"
|
948
|
else
|
949
|
if [ -r "${GLITE_LOCAL_CUSTOMIZATION_DIR}/cp_1.sh" ]; then
|
950
|
. "${GLITE_LOCAL_CUSTOMIZATION_DIR}/cp_1.sh"
|
951
|
fi
|
952
|
fi
|
953
|
|
954
|
if [ -z "$GLOBUS_PATH" ]; then
|
955
|
if [ -z "$GLOBUS_LOCATION" ]; then
|
956
|
# if GLOBUS_LOCATION not defined, try to guess it
|
957
|
if [ -r "/opt/globus/etc/globus-user-env.sh" ]; then
|
958
|
GLOBUS_LOCATION=/opt/globus
|
959
|
elif [ -r "/osgroot/osgcore/globus/etc/globus-user-env.sh" ]; then
|
960
|
GLOBUS_LOCATION=/osgroot/osgcore/globus
|
961
|
else
|
962
|
warn "GLOBUS_LOCATION not defined and could not guess it." 1>&2
|
963
|
warn "Looked in:" 1>&2
|
964
|
warn ' /opt/globus/etc/globus-user-env.sh' 1>&2
|
965
|
warn ' /osgroot/osgcore/globus/etc/globus-user-env.sh' 1>&2
|
966
|
warn 'Continuing like nothing happened' 1>&2
|
967
|
fi
|
968
|
fi
|
969
|
|
970
|
if [ -r "$GLOBUS_LOCATION/etc/globus-user-env.sh" ]; then
|
971
|
. "$GLOBUS_LOCATION/etc/globus-user-env.sh"
|
972
|
else
|
973
|
warn "GLOBUS_PATH not defined and $GLOBUS_LOCATION/etc/globus-user-env.sh does not exist." 1>&2
|
974
|
warn 'Continuing like nothing happened' 1>&2
|
975
|
fi
|
976
|
fi
|
977
|
|
978
|
function set_proxy_fullpath {
|
979
|
# Set the X509_USER_PROXY path to full path to the file
|
980
|
fullpath="`readlink -f $X509_USER_PROXY`"
|
981
|
if [ $? -eq 0 ]; then
|
982
|
echo "Setting X509_USER_PROXY $X509_USER_PROXY to canonical path $fullpath" 1>&2
|
983
|
export X509_USER_PROXY="$fullpath"
|
984
|
else
|
985
|
echo "Unable to get canonical path for X509_USER_PROXY, using $X509_USER_PROXY" 1>&2
|
986
|
fi
|
987
|
}
|
988
|
|
989
|
|
990
|
[ -n "$X509_USER_PROXY" ] && set_proxy_fullpath
|
991
|
|
992
|
########################################
|
993
|
# prepare and move to the work directory
|
994
|
if [ "$work_dir" = "Condor" ]; then
|
995
|
work_dir="$_CONDOR_SCRATCH_DIR"
|
996
|
elif [ "$work_dir" = "CONDOR" ]; then
|
997
|
work_dir="$_CONDOR_SCRATCH_DIR"
|
998
|
elif [ "$work_dir" = "OSG" ]; then
|
999
|
work_dir="$OSG_WN_TMP"
|
1000
|
elif [ "$work_dir" = "TMPDIR" ]; then
|
1001
|
work_dir="$TMPDIR"
|
1002
|
elif [ "$work_dir" = "AUTO" ]; then
|
1003
|
automatic_work_dir
|
1004
|
elif [ "$work_dir" = "." ]; then
|
1005
|
work_dir=`pwd`
|
1006
|
elif [ -z "$work_dir" ]; then
|
1007
|
work_dir=`pwd`
|
1008
|
fi
|
1009
|
|
1010
|
if [ -z "$work_dir" ]; then
|
1011
|
early_glidein_failure "Unable to identify Startup dir for the glidein."
|
1012
|
fi
|
1013
|
|
1014
|
if [ -e "$work_dir" ]; then
|
1015
|
echo >/dev/null
|
1016
|
else
|
1017
|
early_glidein_failure "Startup dir $work_dir does not exist."
|
1018
|
fi
|
1019
|
|
1020
|
start_dir=`pwd`
|
1021
|
echo "Started in $start_dir"
|
1022
|
|
1023
|
def_work_dir="$work_dir/glide_XXXXXX"
|
1024
|
work_dir=`mktemp -d "$def_work_dir"`
|
1025
|
if [ $? -ne 0 ]; then
|
1026
|
early_glidein_failure "Cannot create temp '$def_work_dir'"
|
1027
|
else
|
1028
|
cd "$work_dir"
|
1029
|
if [ $? -ne 0 ]; then
|
1030
|
early_glidein_failure "Dir '$work_dir' was created but I cannot cd into it."
|
1031
|
else
|
1032
|
echo "Running in $work_dir"
|
1033
|
fi
|
1034
|
fi
|
1035
|
work_dir_created=1
|
1036
|
|
1037
|
# mktemp makes it user readable by definition (ignores umask)
|
1038
|
chmod a+rx "$work_dir"
|
1039
|
if [ $? -ne 0 ]; then
|
1040
|
early_glidein_failure "Failed chmod '$work_dir'"
|
1041
|
fi
|
1042
|
|
1043
|
def_glide_local_tmp_dir="/tmp/glide_`id -u -n`_XXXXXX"
|
1044
|
glide_local_tmp_dir=`mktemp -d "$def_glide_local_tmp_dir"`
|
1045
|
if [ $? -ne 0 ]; then
|
1046
|
early_glidein_failure "Cannot create temp '$def_glide_local_tmp_dir'"
|
1047
|
fi
|
1048
|
glide_local_tmp_dir_created=1
|
1049
|
|
1050
|
# the tmpdir should be world writable
|
1051
|
# This way it will work even if the user spawned by the glidein is different
|
1052
|
# than the glidein user
|
1053
|
chmod 1777 "$glide_local_tmp_dir"
|
1054
|
if [ $? -ne 0 ]; then
|
1055
|
early_glidein_failure "Failed chmod '$glide_local_tmp_dir'"
|
1056
|
fi
|
1057
|
|
1058
|
glide_tmp_dir="${work_dir}/tmp"
|
1059
|
mkdir "$glide_tmp_dir"
|
1060
|
if [ $? -ne 0 ]; then
|
1061
|
early_glidein_failure "Cannot create '$glide_tmp_dir'"
|
1062
|
fi
|
1063
|
# the tmpdir should be world writable
|
1064
|
# This way it will work even if the user spawned by the glidein is different
|
1065
|
# than the glidein user
|
1066
|
chmod 1777 "$glide_tmp_dir"
|
1067
|
if [ $? -ne 0 ]; then
|
1068
|
early_glidein_failure "Failed chmod '$glide_tmp_dir'"
|
1069
|
fi
|
1070
|
|
1071
|
short_main_dir=main
|
1072
|
main_dir="${work_dir}/${short_main_dir}"
|
1073
|
mkdir "$main_dir"
|
1074
|
if [ $? -ne 0 ]; then
|
1075
|
early_glidein_failure "Cannot create '$main_dir'"
|
1076
|
fi
|
1077
|
|
1078
|
short_entry_dir=entry_${glidein_entry}
|
1079
|
entry_dir="${work_dir}/${short_entry_dir}"
|
1080
|
mkdir "$entry_dir"
|
1081
|
if [ $? -ne 0 ]; then
|
1082
|
early_glidein_failure "Cannot create '$entry_dir'"
|
1083
|
fi
|
1084
|
|
1085
|
if [ -n "$client_repository_url" ]; then
|
1086
|
short_client_dir=client
|
1087
|
client_dir="${work_dir}/${short_client_dir}"
|
1088
|
mkdir "$client_dir"
|
1089
|
if [ $? -ne 0 ]; then
|
1090
|
early_glidein_failure "Cannot create '$client_dir'"
|
1091
|
fi
|
1092
|
|
1093
|
if [ -n "$client_repository_group_url" ]; then
|
1094
|
short_client_group_dir=client_group_${client_group}
|
1095
|
client_group_dir="${work_dir}/${short_client_group_dir}"
|
1096
|
mkdir "$client_group_dir"
|
1097
|
if [ $? -ne 0 ]; then
|
1098
|
early_glidein_failure "Cannot create '$client_group_dir'"
|
1099
|
fi
|
1100
|
fi
|
1101
|
fi
|
1102
|
|
1103
|
create_add_config_line add_config_line.source
|
1104
|
source add_config_line.source
|
1105
|
|
1106
|
create_get_id_selectors get_id_selectors.source
|
1107
|
source get_id_selectors.source
|
1108
|
|
1109
|
wrapper_list="$PWD/wrapper_list.lst"
|
1110
|
touch "$wrapper_list"
|
1111
|
|
1112
|
# create glidein_config
|
1113
|
glidein_config="$PWD/glidein_config"
|
1114
|
echo > "$glidein_config"
|
1115
|
if [ $? -ne 0 ]; then
|
1116
|
early_glidein_failure "Could not create '$glidein_config'"
|
1117
|
fi
|
1118
|
echo "# --- glidein_startup vals ---" >> glidein_config
|
1119
|
echo "GLIDEIN_Factory $glidein_factory" >> glidein_config
|
1120
|
echo "GLIDEIN_Name $glidein_name" >> glidein_config
|
1121
|
echo "GLIDEIN_Entry_Name $glidein_entry" >> glidein_config
|
1122
|
if [ -n "$client_name" ]; then
|
1123
|
# client name not required as it is not used for anything but debug info
|
1124
|
echo "GLIDECLIENT_Name $client_name" >> glidein_config
|
1125
|
fi
|
1126
|
if [ -n "$client_group" ]; then
|
1127
|
# client group not required as it is not used for anything but debug info
|
1128
|
echo "GLIDECLIENT_Group $client_group" >> glidein_config
|
1129
|
fi
|
1130
|
echo "GLIDEIN_CredentialIdentifier $glidein_cred_id" >> glidein_config
|
1131
|
echo "CONDORG_CLUSTER $condorg_cluster" >> glidein_config
|
1132
|
echo "CONDORG_SUBCLUSTER $condorg_subcluster" >> glidein_config
|
1133
|
echo "CONDORG_SCHEDD $condorg_schedd" >> glidein_config
|
1134
|
echo "DEBUG_MODE $set_debug" >> glidein_config
|
1135
|
echo "GLIDEIN_STARTUP_PID $$" >> glidein_config
|
1136
|
echo "GLIDEIN_WORK_DIR $main_dir" >> glidein_config
|
1137
|
echo "GLIDEIN_ENTRY_WORK_DIR $entry_dir" >> glidein_config
|
1138
|
echo "TMP_DIR $glide_tmp_dir" >> glidein_config
|
1139
|
echo "GLIDEIN_LOCAL_TMP_DIR $glide_local_tmp_dir" >> glidein_config
|
1140
|
echo "PROXY_URL $proxy_url" >> glidein_config
|
1141
|
echo "DESCRIPTION_FILE $descript_file" >> glidein_config
|
1142
|
echo "DESCRIPTION_ENTRY_FILE $descript_entry_file" >> glidein_config
|
1143
|
echo "GLIDEIN_Signature $sign_id" >> glidein_config
|
1144
|
echo "GLIDEIN_Entry_Signature $sign_entry_id" >> glidein_config
|
1145
|
if [ -n "$client_repository_url" ]; then
|
1146
|
echo "GLIDECLIENT_WORK_DIR $client_dir" >> glidein_config
|
1147
|
echo "GLIDECLIENT_DESCRIPTION_FILE $client_descript_file" >> glidein_config
|
1148
|
echo "GLIDECLIENT_Signature $client_sign_id" >> glidein_config
|
1149
|
if [ -n "$client_repository_group_url" ]; then
|
1150
|
echo "GLIDECLIENT_GROUP_WORK_DIR $client_group_dir" >> glidein_config
|
1151
|
echo "GLIDECLIENT_DESCRIPTION_GROUP_FILE $client_descript_group_file" >> glidein_config
|
1152
|
echo "GLIDECLIENT_Group_Signature $client_sign_group_id" >> glidein_config
|
1153
|
fi
|
1154
|
fi
|
1155
|
echo "ADD_CONFIG_LINE_SOURCE $PWD/add_config_line.source" >> glidein_config
|
1156
|
echo "GET_ID_SELECTORS_SOURCE $PWD/get_id_selectors.source" >> glidein_config
|
1157
|
echo "WRAPPER_LIST $wrapper_list" >> glidein_config
|
1158
|
echo "SLOTS_LAYOUT $slots_layout" >> glidein_config
|
1159
|
# Add a line saying we are still initializing
|
1160
|
echo "GLIDEIN_INITIALIZED 0" >> glidein_config
|
1161
|
# but be optimist, and leave advertise_only for the actual error handling script
|
1162
|
echo "GLIDEIN_ADVERTISE_ONLY 0" >> glidein_config
|
1163
|
echo "# --- User Parameters ---" >> glidein_config
|
1164
|
if [ $? -ne 0 ]; then
|
1165
|
# we should probably be testing all others as well, but this is better than nothing
|
1166
|
early_glidein_failure "Failed in updating '$glidein_config'"
|
1167
|
fi
|
1168
|
params2file $params
|
1169
|
|
1170
|
|
1171
|
############################################
|
1172
|
# get the proper descript file based on id
|
1173
|
# Arg: type (main/entry/client/client_group)
|
1174
|
function get_repository_url {
|
1175
|
if [ "$1" = "main" ]; then
|
1176
|
echo $repository_url
|
1177
|
elif [ "$1" = "entry" ]; then
|
1178
|
echo $repository_entry_url
|
1179
|
elif [ "$1" = "client" ]; then
|
1180
|
echo $client_repository_url
|
1181
|
elif [ "$1" = "client_group" ]; then
|
1182
|
echo $client_repository_group_url
|
1183
|
else
|
1184
|
echo "[get_repository_url] Invalid id: $1" 1>&2
|
1185
|
return 1
|
1186
|
fi
|
1187
|
}
|
1188
|
|
1189
|
#####################
|
1190
|
# Check signature
|
1191
|
function check_file_signature {
|
1192
|
cfs_id="$1"
|
1193
|
cfs_fname="$2"
|
1194
|
|
1195
|
cfs_work_dir=`get_work_dir $cfs_id`
|
1196
|
|
1197
|
cfs_desc_fname="${cfs_work_dir}/$cfs_fname"
|
1198
|
cfs_signature="${cfs_work_dir}/signature.sha1"
|
1199
|
|
1200
|
if [ $check_signature -gt 0 ]; then # check_signature is global for simplicity
|
1201
|
tmp_signname="${cfs_signature}_$$_`date +%s`_$RANDOM"
|
1202
|
grep " $cfs_fname$" "$cfs_signature" > $tmp_signname
|
1203
|
if [ $? -ne 0 ]; then
|
1204
|
rm -f $tmp_signname
|
1205
|
echo "No signature for $cfs_desc_fname." 1>&2
|
1206
|
else
|
1207
|
(cd "$cfs_work_dir" && sha1sum -c "$tmp_signname") 1>&2
|
1208
|
cfs_rc=$?
|
1209
|
if [ $cfs_rc -ne 0 ]; then
|
1210
|
$main_dir/error_augment.sh -init
|
1211
|
$main_dir/error_gen.sh -error "check_file_signature" "Corruption" "File $cfs_desc_fname is corrupted." "file" "$cfs_desc_fname" "source_type" "$cfs_id"
|
1212
|
$main_dir/error_augment.sh -process $cfs_rc "check_file_signature" "$PWD" "sha1sum -c $tmp_signname" "`date +%s`" "`date +%s`"
|
1213
|
$main_dir/error_augment.sh -concat
|
1214
|
warn "File $cfs_desc_fname is corrupted." 1>&2
|
1215
|
rm -f $tmp_signname
|
1216
|
return 1
|
1217
|
fi
|
1218
|
rm -f $tmp_signname
|
1219
|
echo "Signature OK for ${cfs_id}:${cfs_fname}." 1>&2
|
1220
|
fi
|
1221
|
fi
|
1222
|
return 0
|
1223
|
}
|
1224
|
|
1225
|
#####################
|
1226
|
# Untar support func
|
1227
|
|
1228
|
function get_untar_subdir {
|
1229
|
gus_id="$1"
|
1230
|
gus_fname="$2"
|
1231
|
|
1232
|
gus_prefix=`get_prefix $gus_id`
|
1233
|
gus_config_cfg="${gus_prefix}UNTAR_CFG_FILE"
|
1234
|
|
1235
|
gus_config_file="`grep "^$gus_config_cfg " glidein_config | cut -d ' ' -f 2-`"
|
1236
|
if [ -z "$gus_config_file" ]; then
|
1237
|
warn "Error, cannot find '$gus_config_cfg' in glidein_config." 1>&2
|
1238
|
glidein_exit 1
|
1239
|
fi
|
1240
|
|
1241
|
gus_dir="`grep -i "^$gus_fname " "$gus_config_file" | cut -s -f 2-`"
|
1242
|
if [ -z "$gus_dir" ]; then
|
1243
|
warn "Error, untar dir for '$gus_fname' cannot be empty." 1>&2
|
1244
|
glidein_exit 1
|
1245
|
fi
|
1246
|
|
1247
|
echo "$gus_dir"
|
1248
|
return 0
|
1249
|
}
|
1250
|
|
1251
|
#####################
|
1252
|
# Periodic execution support function and global variable
|
1253
|
add_startd_cron_counter=0
|
1254
|
function add_periodic_script {
|
1255
|
# schedules a script for periodic execution using startd_cron
|
1256
|
# parameters: wrapper full path, period, cwd, executable path (from cwd),
|
1257
|
# config file path (from cwd), ID
|
1258
|
# global variable: add_startd_cron_counter
|
1259
|
#TODO: should it allow for variable number of parameters?
|
1260
|
local include_fname=condor_config_startd_cron_include
|
1261
|
local s_wrapper="$1"
|
1262
|
local s_period_sec="${2}s"
|
1263
|
local s_cwd="$3"
|
1264
|
local s_fname="$4"
|
1265
|
local s_config="$5"
|
1266
|
local s_ffb_id="$6"
|
1267
|
local s_cc_prefix="$7"
|
1268
|
if [ $add_startd_cron_counter -eq 0 ]; then
|
1269
|
# Make sure that no undesired file is there when called for first cron
|
1270
|
rm $include_fname
|
1271
|
fi
|
1272
|
let add_startd_cron_counter=add_startd_cron_counter+1
|
1273
|
local name_prefix=GLIDEIN_PS_
|
1274
|
local s_name="${name_prefix}$add_startd_cron_counter"
|
1275
|
|
1276
|
# Append the following to the startd configuration
|
1277
|
# Instead of Periodic and Kill wait for completion:
|
1278
|
# STARTD_CRON_DATE_MODE = WaitForExit
|
1279
|
cat >> $include_fname << EOF
|
1280
|
STARTD_CRON_JOBLIST = \$(STARTD_CRON_JOBLIST) $s_name
|
1281
|
STARTD_CRON_${s_name}_MODE = Periodic
|
1282
|
STARTD_CRON_${s_name}_KILL = True
|
1283
|
STARTD_CRON_${s_name}_PERIOD = $s_period_sec
|
1284
|
STARTD_CRON_${s_name}_EXECUTABLE = $s_wrapper
|
1285
|
STARTD_CRON_${s_name}_ARGS = $s_config $s_ffb_id $s_name $s_fname $s_cc_prefix
|
1286
|
STARTD_CRON_${s_name}_CWD = $s_cwd
|
1287
|
STARTD_CRON_${s_name}_SLOTS = 1
|
1288
|
STARTD_CRON_${s_name}_JOB_LOAD = 0.01
|
1289
|
EOF
|
1290
|
# NOPREFIX is a keyword for not setting the prefix for all condor attributes
|
1291
|
[ "xNOPREFIX" != "x${s_cc_prefix}" ] && echo "STARTD_CRON_${s_name}_PREFIX = ${s_cc_prefix}" >> $include_fname
|
1292
|
add_config_line "GLIDEIN_condor_config_startd_cron_include" "$include_fname"
|
1293
|
add_config_line "# --- Lines starting with $s_cc_prefix are from priodic scripts ---"
|
1294
|
}
|
1295
|
|
1296
|
#####################
|
1297
|
# Fetch a single file
|
1298
|
#
|
1299
|
# Check cWDictFile/FileDictFile for the number and type of parameters (has to be consistent)
|
1300
|
function fetch_file_regular {
|
1301
|
fetch_file "$1" "$2" "$2" "regular" 0 "GLIDEIN_PS_" "TRUE" "FALSE"
|
1302
|
}
|
1303
|
|
1304
|
function fetch_file {
|
1305
|
if [ $# -gt 8 ]; then
|
1306
|
# For compatibility w/ future versions (add new parameters at the end)
|
1307
|
echo "More then 8 arguments, considering the first 8 ($#/$ifs_str): $*" 1>&2
|
1308
|
elif [ $# -ne 8 ]; then
|
1309
|
if [ $# -eq 7 ]; then
|
1310
|
#TODO: remove in version 3.3
|
1311
|
# For compatibility with past versions (old file list formats)
|
1312
|
# 3.2.13 and older: prefix (par 6) added in #12705, 3.2.14?
|
1313
|
# 3.2.10 and older: period (par 5) added: fetch_file_try "$1" "$2" "$3" "$4" 0 "GLIDEIN_PS_" "$5" "$6"
|
1314
|
fetch_file_try "$1" "$2" "$3" "$4" "$5" "GLIDEIN_PS_" "$6" "$7"
|
1315
|
if [ $? -ne 0 ]; then
|
1316
|
glidein_exit 1
|
1317
|
fi
|
1318
|
return 0
|
1319
|
fi
|
1320
|
if [ $# -eq 6 ]; then
|
1321
|
# added to maintain compatibility with older (3.2.10) file list format
|
1322
|
#TODO: remove in version 3.3
|
1323
|
fetch_file_try "$1" "$2" "$3" "$4" 0 "GLIDEIN_PS_" "$5" "$6"
|
1324
|
if [ $? -ne 0 ]; then
|
1325
|
glidein_exit 1
|
1326
|
fi
|
1327
|
return 0
|
1328
|
fi
|
1329
|
local ifs_str
|
1330
|
printf -v ifs_str '%q' "$IFS"
|
1331
|
warn "Not enough arguments in fetch_file, 8 expected ($#/$ifs_str): $*" 1>&2
|
1332
|
glidein_exit 1
|
1333
|
fi
|
1334
|
|
1335
|
fetch_file_try "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8"
|
1336
|
if [ $? -ne 0 ]; then
|
1337
|
glidein_exit 1
|
1338
|
fi
|
1339
|
return 0
|
1340
|
}
|
1341
|
|
1342
|
function fetch_file_try {
|
1343
|
fft_id="$1"
|
1344
|
fft_target_fname="$2"
|
1345
|
fft_real_fname="$3"
|
1346
|
fft_file_type="$4"
|
1347
|
fft_period="$5"
|
1348
|
fft_cc_prefix="$6"
|
1349
|
fft_config_check="$7"
|
1350
|
fft_config_out="$8"
|
1351
|
|
1352
|
if [ "$fft_config_check" = "TRUE" ]; then
|
1353
|
# TRUE is a special case
|
1354
|
fft_get_ss=1
|
1355
|
else
|
1356
|
fft_get_ss=`grep -i "^$fft_config_check " glidein_config | cut -d ' ' -f 2-`
|
1357
|
fi
|
1358
|
|
1359
|
# TODO: what if fft_get_ss is not 1? nothing? fft_rc is not set but is returned
|
1360
|
if [ "$fft_get_ss" = "1" ]; then
|
1361
|
fetch_file_base "$fft_id" "$fft_target_fname" "$fft_real_fname" "$fft_file_type" "$fft_config_out" "$fft_period" "$fft_cc_prefix"
|
1362
|
fft_rc=$?
|
1363
|
fi
|
1364
|
|
1365
|
return $fft_rc
|
1366
|
}
|
1367
|
|
1368
|
function perform_wget {
|
1369
|
wget_args=("$@")
|
1370
|
arg_len=${#wget_args[@]}
|
1371
|
ffb_url="${wget_args[0]}"
|
1372
|
ffb_repository=$(dirname "$ffb_url")
|
1373
|
ffb_real_fname=$(basename "$ffb_url")
|
1374
|
proxy_url="None"
|
1375
|
for ((i=0; i<arg_len; i++));
|
1376
|
do
|
1377
|
if [ "${wget_args[$i]}" = "--output-document" ]; then
|
1378
|
ffb_tmp_outname=${wget_args[$i+1]}
|
1379
|
fi
|
1380
|
if [ "${wget_args[$i]}" = "--proxy" ]; then
|
1381
|
proxy_url=${wget_args[$i+1]}
|
1382
|
fi
|
1383
|
done
|
1384
|
START=$(date +%s)
|
1385
|
if [ "$proxy_url" != "None" ]; then
|
1386
|
wget_args=(${wget_args[@]:0:$arg_len-2})
|
1387
|
wget_cmd=$(echo "env http_proxy=${proxy_url} wget ${wget_args[@]}"| sed 's/"/\\\"/g')
|
1388
|
wget_resp=$(env http_proxy="$proxy_url" wget "${wget_args[@]}" 2>&1)
|
1389
|
wget_retval=$?
|
1390
|
else
|
1391
|
wget_cmd=$(echo "wget ${wget_args[@]}"| sed 's/"/\\\"/g')
|
1392
|
wget_resp=$(wget "${wget_args[@]}" 2>&1)
|
1393
|
wget_retval=$?
|
1394
|
fi
|
1395
|
|
1396
|
if [ $wget_retval -ne 0 ]; then
|
1397
|
wget_version=$(wget --version 2>&1 | head -1)
|
1398
|
warn "$wget_cmd failed. version:$wget_version exit code $wget_retval stderr: $wget_resp "
|
1399
|
# cannot use error_*.sh helper functions
|
1400
|
# may not have been loaded yet, and wget fails often
|
1401
|
echo "<OSGTestResult id=\"perform_wget\" version=\"4.3.1\">
|
1402
|
<operatingenvironment>
|
1403
|
<env name=\"cwd\">$PWD</env>
|
1404
|
<env name=\"uname\">$(uname -a)</env>
|
1405
|
<env name=\"release\">$(cat /etc/system-release)</env>
|
1406
|
<env name=\"wget_version\">$wget_version</env>
|
1407
|
</operatingenvironment>
|
1408
|
<test>
|
1409
|
<cmd>$wget_cmd</cmd>
|
1410
|
<tStart>$(date --date=@${START} +%Y-%m-%dT%H:%M:%S%:z)</tStart>
|
1411
|
<tEnd>$(date +%Y-%m-%dT%H:%M:%S%:z)</tEnd>
|
1412
|
</test>
|
1413
|
<result>
|
1414
|
<status>ERROR</status>
|
1415
|
<metric name=\"failure\" ts=\"$(date --date=@${START} +%Y-%m-%dT%H:%M:%S%:z)\" uri=\"local\">Network</metric>
|
1416
|
<metric name=\"URL\" ts=\"$(date --date=@${START} +%Y-%m-%dT%H:%M:%S%:z)\" uri=\"local\">${ffb_url}</metric>
|
1417
|
<metric name=\"http_proxy\" ts=\"$(date --date=@${START} +%Y-%m-%dT%H:%M:%S%:z)\" uri=\"local\">$proxy_url</metric>
|
1418
|
<metric name=\"source_type\" ts=\"$(date --date=@${START} +%Y-%m-%dT%H:%M:%S%:z)\" uri=\"local\">${ffb_id}</metric>
|
1419
|
</result>
|
1420
|
<detail>
|
1421
|
Failed to load file '$ffb_real_fname' from '$ffb_repository' using proxy '$proxy_url'. $wget_resp
|
1422
|
</detail>
|
1423
|
</OSGTestResult>" > otrb_output.xml
|
1424
|
warn "Failed to load file '$ffb_real_fname' from '$ffb_repository'."
|
1425
|
|
1426
|
if [ -f otr_outlist.list ]; then
|
1427
|
chmod u+w otr_outlist.list
|
1428
|
else
|
1429
|
touch otr_outlist.list
|
1430
|
fi
|
1431
|
cat otrb_output.xml >> otr_outlist.list
|
1432
|
echo "<?xml version=\"1.0\"?>" > otrx_output.xml
|
1433
|
cat otrb_output.xml >> otrx_output.xml
|
1434
|
rm -f otrb_output.xml
|
1435
|
chmod a-w otr_outlist.list
|
1436
|
fi
|
1437
|
return $wget_retval
|
1438
|
}
|
1439
|
|
1440
|
function perform_curl {
|
1441
|
curl_args=("$@")
|
1442
|
arg_len=${#curl_args[@]}
|
1443
|
ffb_url="${curl_args[0]}"
|
1444
|
ffb_repository=$(dirname "$ffb_url")
|
1445
|
ffb_real_fname=$(basename "$ffb_url")
|
1446
|
for ((i=0; i<arg_len; i++));
|
1447
|
do
|
1448
|
if [ "${curl_args[$i]}" = "--output" ]; then
|
1449
|
ffb_tmp_outname=${curl_args[$i+1]}
|
1450
|
fi
|
1451
|
if [ "${curl_args[$i]}" = "--proxy" ]; then
|
1452
|
proxy_url=${curl_args[$i+1]}
|
1453
|
fi
|
1454
|
done
|
1455
|
|
1456
|
START=$(date +%s)
|
1457
|
curl_cmd=$(echo "curl ${curl_args[@]}" | sed 's/"/\\\"/g')
|
1458
|
curl_resp=$(curl "${curl_args[@]}" 2>&1)
|
1459
|
curl_retval=$?
|
1460
|
if [ $curl_retval -eq 0 ] && [ ! -e "${ffb_tmp_outname}" ] ; then
|
1461
|
touch "${ffb_tmp_outname}"
|
1462
|
fi
|
1463
|
|
1464
|
|
1465
|
|
1466
|
if [ $curl_retval -ne 0 ]; then
|
1467
|
curl_version=$(curl --version 2>&1 | head -1)
|
1468
|
warn "$curl_cmd failed. version:$curl_version exit code $curl_retval stderr: $curl_resp "
|
1469
|
# cannot use error_*.sh helper functions
|
1470
|
# may not have been loaded yet, and wget fails often
|
1471
|
echo "<OSGTestResult id=\"perform_curl\" version=\"4.3.1\">
|
1472
|
<operatingenvironment>
|
1473
|
<env name=\"cwd\">$PWD</env>
|
1474
|
<env name=\"uname\">$(uname -a)</env>
|
1475
|
<env name=\"release\">$(cat /etc/system-release)</env>
|
1476
|
<env name=\"curl_version\">$curl_version</env>
|
1477
|
</operatingenvironment>
|
1478
|
<test>
|
1479
|
<cmd>${curl_cmd}</cmd>
|
1480
|
<tStart>$(date --date=@${START} +%Y-%m-%dT%H:%M:%S%:z)</tStart>
|
1481
|
<tEnd>$(date +%Y-%m-%dT%H:%M:%S%:z)</tEnd>
|
1482
|
</test>
|
1483
|
<result>
|
1484
|
<status>ERROR</status>
|
1485
|
<metric name=\"failure\" ts=\"$(date --date=@${START} +%Y-%m-%dT%H:%M:%S%:z)\" uri=\"local\">Network</metric>
|
1486
|
<metric name=\"URL\" ts=\"$(date --date=@${START} +%Y-%m-%dT%H:%M:%S%:z)\" uri=\"local\">${ffb_url}</metric>
|
1487
|
<metric name=\"http_proxy\" ts=\"$(date --date=@${START} +%Y-%m-%dT%H:%M:%S%:z)\" uri=\"local\">$proxy_url</metric>
|
1488
|
<metric name=\"source_type\" ts=\"$(date --date=@${START} +%Y-%m-%dT%H:%M:%S%:z)\" uri=\"local\">${ffb_id}</metric>
|
1489
|
</result>
|
1490
|
<detail>
|
1491
|
Failed to load file '$ffb_real_fname' from '$ffb_repository' using proxy '$proxy_url'. ${curl_resp}
|
1492
|
</detail>
|
1493
|
</OSGTestResult>" > otrb_output.xml
|
1494
|
warn "Failed to load file '$ffb_real_fname' from '$ffb_repository'."
|
1495
|
|
1496
|
if [ -f otr_outlist.list ]; then
|
1497
|
chmod u+w otr_outlist.list
|
1498
|
else
|
1499
|
touch otr_outlist.list
|
1500
|
fi
|
1501
|
cat otrb_output.xml >> otr_outlist.list
|
1502
|
echo "<?xml version=\"1.0\"?>" > otrx_output.xml
|
1503
|
cat otrb_output.xml >> otrx_output.xml
|
1504
|
rm -f otrb_output.xml
|
1505
|
chmod a-w otr_outlist.list
|
1506
|
fi
|
1507
|
return $curl_retval
|
1508
|
}
|
1509
|
|
1510
|
function fetch_file_base {
|
1511
|
ffb_id="$1"
|
1512
|
ffb_target_fname="$2"
|
1513
|
ffb_real_fname="$3"
|
1514
|
ffb_file_type="$4"
|
1515
|
ffb_config_out="$5"
|
1516
|
ffb_period=$6
|
1517
|
# condor cron prefix, used only for periodic executables
|
1518
|
ffb_cc_prefix="$7"
|
1519
|
|
1520
|
ffb_work_dir=$(get_work_dir "$ffb_id")
|
1521
|
|
1522
|
ffb_repository=$(get_repository_url "$ffb_id")
|
1523
|
|
1524
|
ffb_tmp_outname="$ffb_work_dir/$ffb_real_fname"
|
1525
|
ffb_outname="$ffb_work_dir/$ffb_target_fname"
|
1526
|
#these don't appear to be used anywhere
|
1527
|
ffb_desc_fname="$ffb_work_dir/$fname"
|
1528
|
ffb_signature="$ffb_work_dir/signature.sha1"
|
1529
|
|
1530
|
|
1531
|
|
1532
|
# Create a dummy default in case something goes wrong
|
1533
|
# cannot use error_*.sh helper functions
|
1534
|
# may not have been loaded yet
|
1535
|
have_dummy_otrx=1
|
1536
|
echo "<?xml version=\"1.0\"?>
|
1537
|
<OSGTestResult id=\"fetch_file_base\" version=\"4.3.1\">
|
1538
|
<operatingenvironment>
|
1539
|
<env name=\"cwd\">$PWD</env>
|
1540
|
</operatingenvironment>
|
1541
|
<test>
|
1542
|
<cmd>Unknown</cmd>
|
1543
|
<tStart>$(date +%Y-%m-%dT%H:%M:%S%:z)</tStart>
|
1544
|
<tEnd>$(date +%Y-%m-%dT%H:%M:%S%:z)</tEnd>
|
1545
|
</test>
|
1546
|
<result>
|
1547
|
<status>ERROR</status>
|
1548
|
<metric name=\"failure\" ts=\"$(date +%Y-%m-%dT%H:%M:%S%:z)\" uri=\"local\">Unknown</metric>
|
1549
|
<metric name=\"source_type\" ts=\"$(date +%Y-%m-%dT%H:%M:%S%:z)\" uri=\"local\">$ffb_id</metric>
|
1550
|
</result>
|
1551
|
<detail>
|
1552
|
An unknown error occured.
|
1553
|
</detail>
|
1554
|
</OSGTestResult>" > otrx_output.xml
|
1555
|
user_agent="glidein/$glidein_entry/$condorg_schedd/$condorg_cluster.$condorg_subcluster/$client_name"
|
1556
|
ffb_url="$ffb_repository/$ffb_real_fname"
|
1557
|
curl_version=$(curl --version | head -1 )
|
1558
|
wget_version=$(wget --version | head -1 )
|
1559
|
#old wget command:
|
1560
|
#wget --user-agent="wget/glidein/$glidein_entry/$condorg_schedd/$condorg_cluster.$condorg_subcluster/$client_name" "$ffb_nocache_str" -q -O "$ffb_tmp_outname" "$ffb_repository/$ffb_real_fname"
|
1561
|
#equivalent to:
|
1562
|
#wget ${ffb_url} --user-agent=${user_agent} -q -O "${ffb_tmp_outname}" "${ffb_nocache_str}"
|
1563
|
#with env http_proxy=$proxy_url set if proxy_url != "None"
|
1564
|
#
|
1565
|
#construct curl equivalent so we can try either
|
1566
|
|
1567
|
wget_args=("${ffb_url}" "--user-agent" "wget/${user_agent}" "--quiet" "--output-document" "${ffb_tmp_outname}" )
|
1568
|
curl_args=("${ffb_url}" "--user-agent" "curl/${user_agent}" "--silent" "--show-error" "--output" "${ffb_tmp_outname}")
|
1569
|
|
1570
|
if [ "$ffb_file_type" = "nocache" ]; then
|
1571
|
if [ "$curl_version" != "" ]; then
|
1572
|
curl_args+=("--header")
|
1573
|
curl_args+=("'Cache-Control: no-cache'")
|
1574
|
fi
|
1575
|
if [ "$wget_version" != "" ]; then
|
1576
|
if wget --help | grep -q "\-\-no-cache "; then
|
1577
|
wget_args+=("--no-cache")
|
1578
|
elif wget --help |grep -q "\-\-cache="; then
|
1579
|
wget_args+=("--cache=off")
|
1580
|
else
|
1581
|
warn "wget $wget_version cannot disable caching"
|
1582
|
fi
|
1583
|
fi
|
1584
|
fi
|
1585
|
|
1586
|
if [ "$proxy_url" != "None" ];then
|
1587
|
if [ "$curl_version" != "" ]; then
|
1588
|
curl_args+=("--proxy")
|
1589
|
curl_args+=("$proxy_url")
|
1590
|
fi
|
1591
|
if [ "$wget_version" != "" ]; then
|
1592
|
#these two arguments have to be last as coded, put any future
|
1593
|
#wget args earlier in wget_args array
|
1594
|
wget_args+=("--proxy")
|
1595
|
wget_args+=("$proxy_url")
|
1596
|
fi
|
1597
|
fi
|
1598
|
|
1599
|
fetch_completed=1
|
1600
|
if [ $fetch_completed -ne 0 ] && [ "$wget_version" != "" ]; then
|
1601
|
perform_wget "${wget_args[@]}"
|
1602
|
fetch_completed=$?
|
1603
|
fi
|
1604
|
if [ $fetch_completed -ne 0 ] && [ "$curl_version" != "" ]; then
|
1605
|
perform_curl "${curl_args[@]}"
|
1606
|
fetch_completed=$?
|
1607
|
fi
|
1608
|
|
1609
|
if [ $fetch_completed -ne 0 ]; then
|
1610
|
return $fetch_completed
|
1611
|
fi
|
1612
|
# check signature
|
1613
|
check_file_signature "$ffb_id" "$ffb_real_fname"
|
1614
|
if [ $? -ne 0 ]; then
|
1615
|
# error already displayed inside the function
|
1616
|
return 1
|
1617
|
fi
|
1618
|
|
1619
|
# rename it to the correct final name, if needed
|
1620
|
if [ "$ffb_tmp_outname" != "$ffb_outname" ]; then
|
1621
|
mv "$ffb_tmp_outname" "$ffb_outname"
|
1622
|
if [ $? -ne 0 ]; then
|
1623
|
warn "Failed to rename $ffb_tmp_outname into $ffb_outname"
|
1624
|
return 1
|
1625
|
fi
|
1626
|
fi
|
1627
|
|
1628
|
# if executable, execute
|
1629
|
if [ "$ffb_file_type" = "exec" ]; then
|
1630
|
chmod u+x "$ffb_outname"
|
1631
|
if [ $? -ne 0 ]; then
|
1632
|
warn "Error making '$ffb_outname' executable" 1>&2
|
1633
|
return 1
|
1634
|
fi
|
1635
|
if [ "$ffb_id" = "main" -a "$ffb_target_fname" = "$last_script" ]; then # last_script global for simplicity
|
1636
|
echo "Skipping last script $last_script" 1>&2
|
1637
|
else
|
1638
|
echo "Executing $ffb_outname"
|
1639
|
# have to do it here, as this will be run before any other script
|
1640
|
chmod u+rx $main_dir/error_augment.sh
|
1641
|
|
1642
|
# the XML file will be overwritten now, and hopefully not an error situation
|
1643
|
have_dummy_otrx=0
|
1644
|
$main_dir/error_augment.sh -init
|
1645
|
START=$(date +%s)
|
1646
|
"$ffb_outname" glidein_config "$ffb_id"
|
1647
|
ret=$?
|
1648
|
END=$(date +%s)
|
1649
|
$main_dir/error_augment.sh -process $ret "$ffb_id/$ffb_target_fname" "$PWD" "$ffb_outname glidein_config" "$START" "$END" #generating test result document
|
1650
|
$main_dir/error_augment.sh -concat
|
1651
|
if [ $ret -ne 0 ]; then
|
1652
|
echo "=== Validation error in $ffb_outname ===" 1>&2
|
1653
|
warn "Error running '$ffb_outname'" 1>&2
|
1654
|
cat otrx_output.xml | awk 'BEGIN{fr=0;}/<[/]detail>/{fr=0;}{if (fr==1) print $0}/<detail>/{fr=1;}' 1>&2
|
1655
|
return 1
|
1656
|
else
|
1657
|
# If ran successfully and periodic, schedule to execute with schedd_cron
|
1658
|
echo "=== validation OK in $ffb_outname ($ffb_period) ===" 1>&2
|
1659
|
if [ $ffb_period -gt 0 ]; then
|
1660
|
add_periodic_script "$main_dir/script_wrapper.sh" $ffb_period "$work_dir" "$ffb_outname" glidein_config "$ffb_id" "$ffb_cc_prefix"
|
1661
|
fi
|
1662
|
fi
|
1663
|
fi
|
1664
|
elif [ "$ffb_file_type" = "wrapper" ]; then
|
1665
|
echo "$ffb_outname" >> "$wrapper_list"
|
1666
|
elif [ "$ffb_file_type" = "untar" ]; then
|
1667
|
ffb_short_untar_dir=`get_untar_subdir "$ffb_id" "$ffb_target_fname"`
|
1668
|
ffb_untar_dir="${ffb_work_dir}/${ffb_short_untar_dir}"
|
1669
|
START=`date +%s`
|
1670
|
(mkdir "$ffb_untar_dir" && cd "$ffb_untar_dir" && tar -xmzf "$ffb_outname") 1>&2
|
1671
|
ret=$?
|
1672
|
if [ $ret -ne 0 ]; then
|
1673
|
$main_dir/error_augment.sh -init
|
1674
|
$main_dir/error_gen.sh -error "tar" "Corruption" "Error untarring '$ffb_outname'" "file" "$ffb_outname" "source_type" "$cfs_id"
|
1675
|
$main_dir/error_augment.sh -process $cfs_rc "tar" "$PWD" "mkdir $ffb_untar_dir && cd $ffb_untar_dir && tar -xmzf $ffb_outname" "$START" "`date +%s`"
|
1676
|
$main_dir/error_augment.sh -concat
|
1677
|
warn "Error untarring '$ffb_outname'" 1>&2
|
1678
|
return 1
|
1679
|
fi
|
1680
|
fi
|
1681
|
|
1682
|
if [ "$ffb_config_out" != "FALSE" ]; then
|
1683
|
ffb_prefix=`get_prefix $ffb_id`
|
1684
|
if [ "$ffb_file_type" = "untar" ]; then
|
1685
|
# when untaring the original file is less interesting than the untar dir
|
1686
|
add_config_line "${ffb_prefix}${ffb_config_out}" "$ffb_untar_dir"
|
1687
|
if [ $? -ne 0 ]; then
|
1688
|
glidein_exit 1
|
1689
|
fi
|
1690
|
else
|
1691
|
add_config_line "${ffb_prefix}${ffb_config_out}" "$ffb_outname"
|
1692
|
if [ $? -ne 0 ]; then
|
1693
|
glidein_exit 1
|
1694
|
fi
|
1695
|
fi
|
1696
|
fi
|
1697
|
|
1698
|
if [ "$have_dummy_otrx" -eq 1 ]; then
|
1699
|
# noone should really look at this file, but just to avoid confusion
|
1700
|
echo "<?xml version=\"1.0\"?>
|
1701
|
<OSGTestResult id=\"fetch_file_base\" version=\"4.3.1\">
|
1702
|
<operatingenvironment>
|
1703
|
<env name=\"cwd\">$PWD</env>
|
1704
|
</operatingenvironment>
|
1705
|
<test>
|
1706
|
<cmd>Unknown</cmd>
|
1707
|
<tStart>`date +%Y-%m-%dT%H:%M:%S%:z`</tStart>
|
1708
|
<tEnd>`date +%Y-%m-%dT%H:%M:%S%:z`</tEnd>
|
1709
|
</test>
|
1710
|
<result>
|
1711
|
<status>OK</status>
|
1712
|
</result>
|
1713
|
</OSGTestResult>" > otrx_output.xml
|
1714
|
fi
|
1715
|
|
1716
|
return 0
|
1717
|
}
|
1718
|
|
1719
|
echo "Downloading files from Factory and Frontend"
|
1720
|
|
1721
|
#####################################
|
1722
|
# Fetch descript and signature files
|
1723
|
|
1724
|
# disable signature check before I get the signature file itself
|
1725
|
# check_signature is global
|
1726
|
check_signature=0
|
1727
|
|
1728
|
for gs_id in main entry client client_group
|
1729
|
do
|
1730
|
if [ -z "$client_repository_url" ]; then
|
1731
|
if [ "$gs_id" = "client" ]; then
|
1732
|
# no client file when no cilent_repository
|
1733
|
continue
|
1734
|
fi
|
1735
|
fi
|
1736
|
if [ -z "$client_repository_group_url" ]; then
|
1737
|
if [ "$gs_id" = "client_group" ]; then
|
1738
|
# no client group file when no cilent_repository_group
|
1739
|
continue
|
1740
|
fi
|
1741
|
fi
|
1742
|
|
1743
|
gs_id_work_dir=`get_work_dir $gs_id`
|
1744
|
|
1745
|
# Fetch description file
|
1746
|
gs_id_descript_file=`get_descript_file $gs_id`
|
1747
|
fetch_file_regular "$gs_id" "$gs_id_descript_file"
|
1748
|
signature_file_line="`grep "^signature " "${gs_id_work_dir}/${gs_id_descript_file}"`"
|
1749
|
if [ $? -ne 0 ]; then
|
1750
|
warn "No signature in description file ${gs_id_work_dir}/${gs_id_descript_file}." 1>&2
|
1751
|
glidein_exit 1
|
1752
|
fi
|
1753
|
signature_file="`echo "$signature_file_line" | cut -s -f 2-`"
|
1754
|
|
1755
|
# Fetch signature file
|
1756
|
gs_id_signature=`get_signature $gs_id`
|
1757
|
fetch_file_regular "$gs_id" "$signature_file"
|
1758
|
echo "$gs_id_signature ${signature_file}">"${gs_id_work_dir}/signature.sha1.test"
|
1759
|
(cd "${gs_id_work_dir}"&&sha1sum -c signature.sha1.test) 1>&2
|
1760
|
if [ $? -ne 0 ]; then
|
1761
|
warn "Corrupted signature file '${gs_id_work_dir}/${signature_file}'." 1>&2
|
1762
|
glidein_exit 1
|
1763
|
fi
|
1764
|
# for simplicity use a fixed name for signature file
|
1765
|
mv "${gs_id_work_dir}/${signature_file}" "${gs_id_work_dir}/signature.sha1"
|
1766
|
done
|
1767
|
|
1768
|
# re-enable for everything else
|
1769
|
check_signature=1
|
1770
|
|
1771
|
# Now verify the description was not tampered with
|
1772
|
# doing it so late should be fine, since nobody should have been able
|
1773
|
# to fake the signature file, even if it faked its name in
|
1774
|
# the description file
|
1775
|
for gs_id in main entry client client_group
|
1776
|
do
|
1777
|
if [ -z "$client_repository_url" ]; then
|
1778
|
if [ "$gs_id" = "client" ]; then
|
1779
|
# no client file when no cilent_repository
|
1780
|
continue
|
1781
|
fi
|
1782
|
fi
|
1783
|
if [ -z "$client_repository_group_url" ]; then
|
1784
|
if [ "$gs_id" = "client_group" ]; then
|
1785
|
# no client group file when no cilent_repository_group
|
1786
|
continue
|
1787
|
fi
|
1788
|
fi
|
1789
|
|
1790
|
gs_id_descript_file="`get_descript_file $gs_id`"
|
1791
|
check_file_signature "$gs_id" "$gs_id_descript_file"
|
1792
|
if [ $? -ne 0 ]; then
|
1793
|
gs_id_work_dir="`get_work_dir $gs_id`"
|
1794
|
warn "Corrupted description file ${gs_id_work_dir}/${gs_id_descript_file}." 1>&2
|
1795
|
glidein_exit 1
|
1796
|
fi
|
1797
|
done
|
1798
|
|
1799
|
###################################################
|
1800
|
# get last_script, as it is used by the fetch_file
|
1801
|
gs_id_work_dir="`get_work_dir main`"
|
1802
|
gs_id_descript_file="`get_descript_file main`"
|
1803
|
last_script="`grep "^last_script " "${gs_id_work_dir}/$gs_id_descript_file" | cut -s -f 2-`"
|
1804
|
if [ -z "$last_script" ]; then
|
1805
|
warn "last_script not in description file ${gs_id_work_dir}/$gs_id_descript_file." 1>&2
|
1806
|
glidein_exit 1
|
1807
|
fi
|
1808
|
|
1809
|
|
1810
|
##############################
|
1811
|
# Fetch all the other files
|
1812
|
for gs_file_id in "main file_list" "client preentry_file_list" "client_group preentry_file_list" "client aftergroup_preentry_file_list" "entry file_list" "client file_list" "client_group file_list" "client aftergroup_file_list" "main after_file_list"
|
1813
|
do
|
1814
|
gs_id=`echo $gs_file_id |awk '{print $1}'`
|
1815
|
|
1816
|
if [ -z "$client_repository_url" ]; then
|
1817
|
if [ "$gs_id" = "client" ]; then
|
1818
|
# no client file when no client_repository
|
1819
|
continue
|
1820
|
fi
|
1821
|
fi
|
1822
|
if [ -z "$client_repository_group_url" ]; then
|
1823
|
if [ "$gs_id" = "client_group" ]; then
|
1824
|
# no client group file when no client_repository_group
|
1825
|
continue
|
1826
|
fi
|
1827
|
fi
|
1828
|
|
1829
|
gs_file_list_id=`echo $gs_file_id |awk '{print $2}'`
|
1830
|
|
1831
|
gs_id_work_dir=`get_work_dir $gs_id`
|
1832
|
gs_id_descript_file=`get_descript_file $gs_id`
|
1833
|
|
1834
|
# extract list file name
|
1835
|
gs_file_list_line="`grep "^$gs_file_list_id " "${gs_id_work_dir}/$gs_id_descript_file"`"
|
1836
|
if [ $? -ne 0 ]; then
|
1837
|
if [ -z "$client_repository_group_url" ]; then
|
1838
|
if [ "${gs_file_list_id:0:11}" = "aftergroup_" ]; then
|
1839
|
# afterfile_.. files optional when no client_repository_group
|
1840
|
continue
|
1841
|
fi
|
1842
|
fi
|
1843
|
warn "No '$gs_file_list_id' in description file ${gs_id_work_dir}/${gs_id_descript_file}." 1>&2
|
1844
|
glidein_exit 1
|
1845
|
fi
|
1846
|
# space+tab separated file with multiple elements (was: awk '{print $2}', not safe for spaces in file name)
|
1847
|
gs_file_list="`echo "$gs_file_list_line" | cut -s -f 2 | sed -e 's/[[:space:]]*$//'`"
|
1848
|
|
1849
|
# fetch list file
|
1850
|
fetch_file_regular "$gs_id" "$gs_file_list"
|
1851
|
|
1852
|
# Fetch files contained in list
|
1853
|
while read file
|
1854
|
do
|
1855
|
if [ "${file:0:1}" != "#" ]; then
|
1856
|
fetch_file "$gs_id" $file
|
1857
|
fi
|
1858
|
done < "${gs_id_work_dir}/${gs_file_list}"
|
1859
|
|
1860
|
done
|
1861
|
|
1862
|
###############################
|
1863
|
# Start the glidein main script
|
1864
|
add_config_line "GLIDEIN_INITIALIZED" "1"
|
1865
|
|
1866
|
echo "# --- Last Script values ---" >> glidein_config
|
1867
|
last_startup_time=`date +%s`
|
1868
|
let validation_time=$last_startup_time-$startup_time
|
1869
|
echo "=== Last script starting `date` ($last_startup_time) after validating for $validation_time ==="
|
1870
|
echo
|
1871
|
ON_DIE=0
|
1872
|
trap 'ignore_signal' SIGHUP
|
1873
|
trap_with_arg 'on_die' SIGTERM SIGINT SIGQUIT
|
1874
|
#trap 'on_die' TERM
|
1875
|
#trap 'on_die' INT
|
1876
|
gs_id_work_dir=`get_work_dir main`
|
1877
|
$main_dir/error_augment.sh -init
|
1878
|
"${gs_id_work_dir}/$last_script" glidein_config &
|
1879
|
wait $!
|
1880
|
ret=$?
|
1881
|
if [ $ON_DIE -eq 1 ]; then
|
1882
|
ret=0
|
1883
|
fi
|
1884
|
last_startup_end_time=`date +%s`
|
1885
|
$main_dir/error_augment.sh -process $ret "$last_script" "$PWD" "${gs_id_work_dir}/$last_script glidein_config" "$last_startup_time" "$last_startup_end_time"
|
1886
|
$main_dir/error_augment.sh -concat
|
1887
|
|
1888
|
let last_script_time=$last_startup_end_time-$last_startup_time
|
1889
|
echo "=== Last script ended `date` ($last_startup_end_time) with code $ret after $last_script_time ==="
|
1890
|
echo
|
1891
|
if [ $ret -ne 0 ]; then
|
1892
|
warn "Error running '$last_script'" 1>&2
|
1893
|
fi
|
1894
|
|
1895
|
#Things like periodic scripts might put messages here if they want them printed in the logfile
|
1896
|
echo "=== Exit messages left by periodic scripts ===" 1>&2
|
1897
|
cat exit_message 1>&2
|
1898
|
echo 1>&2
|
1899
|
|
1900
|
|
1901
|
#########################
|
1902
|
# clean up after I finish
|
1903
|
glidein_exit $ret
|