scripts: purge use of test '-a' and '-o' ops and clean up
[girocco.git] / jobd / update.sh
blobb3e8cfae85948f0f2d0c7ce45256d9a11e649759
1 #!/bin/sh
3 . @basedir@/shlib.sh
5 set -e
7 if [ $# -ne 1 ]; then
8 echo "Usage: update.sh projname" >&2
9 exit 1
12 # date -R is linux-only, POSIX equivalent is '+%a, %d %b %Y %T %z'
13 datefmt='+%a, %d %b %Y %T %z'
15 # freshen_loose_objects full-sha ...
16 # if "$n" is a loose object, set its modification time to now
17 # otherwise silently do nothing with no error. To facilitate conversion
18 # of mirror projects to push projects we also add group write permission.
19 freshen_loose_objects() {
20 _list=
21 for _sha; do
22 _fn="${_sha#??}"
23 _shard="${_sha%$_fn}"
24 _list="$_list objects/$_shard/$_fn"
25 done
26 if [ -n "$_list" ]; then
27 chmod ug+w $_list 2>/dev/null || :
28 touch -c $_list 2>/dev/null || :
32 # darcs fast-export | git fast-import with error handling
33 git_darcs_fetch() (
34 set_utf8_locale
35 _err1=
36 _err2=
37 exec 3>&1
38 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
40 exec 4>&3 3>&1 1>&4 4>&-
42 _e1=0
43 "$cfg_basedir"/bin/darcs-fast-export \
44 --export-marks="$(pwd)/dfe-marks" \
45 --import-marks="$(pwd)/dfe-marks" "$1" 3>&- || _e1=$?
46 echo $_e1 >&3
47 } |
49 _e2=0
50 git fast-import \
51 --export-marks="$(pwd)/gfi-marks" \
52 --export-pack-edges="$(pwd)/gfi-packs" \
53 --import-marks="$(pwd)/gfi-marks" \
54 --force 3>&- || _e2=$?
55 echo $_e2 >&3
58 EOT
59 exec 3>&-
60 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
61 return $?
64 # bzr fast-export | git fast-import with error handling
65 git_bzr_fetch() (
66 set_utf8_locale
67 BZR_LOG=/dev/null
68 export BZR_LOG
69 _err1=
70 _err2=
71 exec 3>&1
72 { read -r _err1 || :; read -r _err2 || :; } <<-EOT
74 exec 4>&3 3>&1 1>&4 4>&-
76 _e1=0
77 bzr fast-export --plain \
78 --export-marks="$(pwd)/bfe-marks" \
79 --import-marks="$(pwd)/bfe-marks" "$1" 3>&- || _e1=$?
80 echo $_e1 >&3
81 } |
83 _e2=0
84 git fast-import \
85 --export-marks="$(pwd)/gfi-marks" \
86 --export-pack-edges="$(pwd)/gfi-packs" \
87 --import-marks="$(pwd)/gfi-marks" \
88 --force 3>&- || _e2=$?
89 echo $_e2 >&3
92 EOT
93 exec 3>&-
94 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
95 return $?
98 [ -n "$cfg_mirror" ] || { echo "Mirroring is disabled" >&2; exit 0; }
100 umask 002
101 [ "$cfg_permission_control" != "Hooks" ] || umask 000
102 clean_git_env
104 proj="${1%.git}"
105 cd "$cfg_reporoot/$proj.git"
107 trap 'if [ $? != 0 ]; then echo "update failed dir: $PWD" >&2; fi; rm -f "$bang_log"' EXIT
108 trap 'exit 130' INT
109 trap 'exit 143' TERM
111 if check_interval lastrefresh $cfg_min_mirror_interval; then
112 progress "= [$proj] update skip (last at $(config_get lastrefresh))"
113 exit 0
115 if [ -e .nofetch ]; then
116 progress "x [$proj] update disabled (.nofetch exists)"
117 exit 0
119 progress "+ [$proj] update ($(date))"
121 # Any pre-existing FETCH_HEAD from a previous fetch, failed or not, is garbage
122 rm -f FETCH_HEAD
124 # A previous failed update attempt can leave a huge tmp_pack_XXXXXX file behind.
125 # Since no pushes are allowed to mirrors, we know that any such files that exist
126 # at this point in time are garbage and can be safely deleted, we do not even
127 # need to check how old they are. A tmp_idx_XXXXXX file is also created during
128 # the later stages of the fetch process, so we kill any of those as well.
129 find objects/pack -maxdepth 1 -type f -name "tmp_pack_?*" -print0 | xargs -0 rm -f
130 find objects/pack -maxdepth 1 -type f -name "tmp_idx_?*" -print0 | xargs -0 rm -f
132 # Make sure we have a reflogs subdirectory and abort the update if not
133 # This should not count as a normal "bang" failure if unsuccessful
134 [ -d reflogs ] || mkdir -p reflogs >/dev/null 2>&1 || :
135 [ -d reflogs ]
137 bang_setup
138 bang_action="update"
139 bang_trap() {
140 if [ -n "$1" ]; then
141 # Throttle retries
142 # Since gitweb shows the .last_refresh date, it's safe to update
143 # gitweb.lastrefresh to throttle the updates w/o corrupting the
144 # last refresh date display on the gitweb summary page
145 # It's therefore important that we do NOT touch .last_refresh here
146 config_set lastrefresh "$(date "$datefmt")"
150 bang echo "Project: $proj"
151 bang echo ""
152 mail="$(config_get owner)" || :
153 url="$(config_get baseurl)" || :
154 case "$url" in *" "*|*" "*|"")
155 bang_eval 'echo "Bad mirror URL (\"$url\")"; ! :'
156 exit 1
157 esac
158 bang echo "Mirroring from URL \"$url\""
159 bang echo ""
160 statusok="$(git config --bool gitweb.statusupdates 2>/dev/null || echo true)"
161 mailaddrs=
162 [ "$statusok" = "false" ] || [ -z "$mail" ] || mailaddrs="$mail"
163 [ -z "$cfg_admincc" ] || [ "$cfg_admincc" = "0" ] || [ -z "$cfg_admin" ] ||
164 if [ -z "$mailaddrs" ]; then mailaddrs="$cfg_admin"; else mailaddrs="$mailaddrs,$cfg_admin"; fi
166 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
167 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-before"
168 ! [ -e .delaygc ] || >.allowgc || :
169 svnpackcreated=
171 # Make sure we don't get any unwanted loose objects
172 # Starting with Git v2.10.0 fast-import can generate loose objects unless we
173 # tweak its configuration to prevent that
174 git_add_config 'fetch.unpackLimit=1'
175 # Note the git config documentation is wrong
176 # transfer.unpackLimit, if set, overrides fetch.unpackLimit
177 git_add_config 'transfer.unpackLimit=1'
178 # But not the Git v2.10.0 and later fastimport.unpackLimit which improperly uses <= instead of <
179 git_add_config 'fastimport.unpackLimit=0'
181 case "$url" in
182 svn://* | svn+http://* | svn+https://* | svn+file://* | svn+ssh://*)
183 [ -n "$cfg_mirror_svn" ] || { echo "Mirroring svn is disabled" >&2; exit 0; }
184 # Use an 'anonsvn' username as is commonly used for anonymous svn
185 # Use an 'anonsvn' password as is commonly used for anonymous svn
186 GIT_ASKPASS_PASSWORD=anonsvn
187 export GIT_ASKPASS_PASSWORD
188 # Update the git svn url to match baseurl but be cognizant of any
189 # needed prefix changes. See the comments in taskd/clone.sh about
190 # why we need to put up with a prefix in the first place.
191 case "$url" in svn+ssh://*) svnurl="$url";; *) svnurl="${url#svn+}";; esac
192 svnurl="${svnurl%/}"
193 svnurlold="$(config_get svnurl)" || :
194 if [ "$svnurl" != "$svnurlold" ]; then
195 # We better already have an svn-remote.svn.fetch setting
196 bang test -n "$(git config --get-all svn-remote.svn.fetch)" || :
197 # the only way to truly know what the proper prefix is
198 # is to attempt a fresh git-svn init -s on the new url
199 rm -rf svn-new-url || :
200 # We require svn info to succeed on the URL otherwise it's
201 # simply not a valid URL and without using -s on the init it
202 # will not otherwise be tested until the fetch
203 bang eval 'svn --non-interactive --username anonsvn --password anonsvn info "$svnurl" >/dev/null'
204 bang mkdir svn-new-url
205 GIT_DIR=svn-new-url bang git init --bare --quiet
206 # We initially use -s for the init which will possibly shorten
207 # the URL. However, the shortening can fail if a password is
208 # not required for the longer version but is for the shorter,
209 # so try again without -s if the -s version fails.
210 cmdstr='git svn init --username=anonsvn --prefix "" -s "$svnurl" </dev/null >/dev/null 2>&1 || '
211 cmdstr="$cmdstr"'git svn init --username=anonsvn --prefix "" "$svnurl" </dev/null >/dev/null 2>&1'
212 GIT_DIR=svn-new-url bang eval "$cmdstr"
213 gitsvnurl="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.url)" || :
214 gitsvnfetch="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.fetch)" || :
215 gitsvnprefixnew="${gitsvnfetch%%:*}"
216 gitsvnsuffixnew="${gitsvnprefixnew##*/}"
217 gitsvnprefixnew="${gitsvnprefixnew%$gitsvnsuffixnew}"
218 rm -rf svn-new-url || :
219 # Using GIT_DIR= with bang leaves it set to svn-new-url, so reset it to .
220 GIT_DIR=.
221 if [ "$gitsvnurl" != "$(git config --get svn-remote.svn.url || :)" ]; then
222 # The url has been changed.
223 # We must update the url and replace the prefix on all config items
224 gitsvnfetch="$(git config --get-all svn-remote.svn.fetch | head -1)" || :
225 gitsvnprefixold="${gitsvnfetch%%:*}"
226 gitsvnsuffixold="${gitsvnprefixold##*/}"
227 gitsvnprefixold="${gitsvnprefixold%$gitsvnsuffixold}"
228 git config --remove-section 'svn-remote.svnnew' 2>/dev/null || :
229 git config 'svn-remote.svnnew.url' "$gitsvnurl"
230 git config --get-regexp '^svn-remote\.svn\.' |
231 while read -r sname sval; do
232 case "$sname" in
233 svn-remote.svn.fetch|svn-remote.svn.branches|svn-remote.svn.tags)
234 sname="${sname#svn-remote.svn.}"
235 sval="${sval#$gitsvnprefixold}"
236 bang git config --add "svn-remote.svnnew.$sname" "${gitsvnprefixnew}$sval"
237 esac
238 done
239 test $? -eq 0
240 bang git config -f svn/.metadata svn-remote.svn.reposRoot "$gitsvnurl"
241 bang git config --remove-section svn-remote.svn
242 bang git config --rename-section svn-remote.svnnew svn-remote.svn
244 bang config_set svnurl "$svnurl"
246 # remove any stale *.lock files greater than 1 hour old in case
247 # git-svn was killed on the last update because it took too long
248 find svn -type f -name '*.lock' -mmin +60 -print0 2>/dev/null | xargs -0 rm -f
249 # remember the starting time so we can easily combine fetched loose objects
250 # we sleep for 1 second after creating .svnpack to make sure all objects are newer
251 if ! [ -e .svnpack ]; then
252 svnpackcreated=1
253 rm -f .svnpack
254 >.svnpack
255 sleep 1
257 GIT_DIR=. bang git svn fetch --log-window-size=$var_log_window_size --username=anonsvn --quiet </dev/null
258 # git svn does not preserve group permissions in the svn subdirectory
259 chmod -R ug+rw,o+r svn
260 # git svn also leaves behind ref turds that end with @nnn
261 # We get rid of them now
262 git for-each-ref --format='%(refname)' |
263 LC_ALL=C sed '/^..*@[1-9][0-9]*$/!d; s/^/delete /' |
264 git_updateref_stdin
265 unset GIT_ASKPASS_PASSWORD
267 darcs://*)
268 [ -n "$cfg_mirror_darcs" ] || { echo "Mirroring darcs is disabled" >&2; exit 0; }
269 httpurl="http://${url#darcs://}"
270 # remove any stale lock files greater than 1 hour old in case
271 # darcs_fast_export was killed on the last update because it took too long
272 find *.darcs -maxdepth 2 -type f -name 'lock' -mmin +60 -print0 2>/dev/null | xargs -0 rm -f
273 bang git_darcs_fetch "$httpurl"
275 bzr://*)
276 [ -n "$cfg_mirror_bzr" ] || { echo "Mirroring bzr is disabled" >&2; exit 0; }
277 bzrurl="${url#bzr://}"
278 bang git_bzr_fetch "$bzrurl"
280 hg+http://* | hg+https://* | hg+file://* | hg+ssh://*)
281 [ -n "$cfg_mirror_hg" ] || { echo "Mirroring hg is disabled" >&2; exit 0; }
282 # We just remove hg+ here, so hg+http://... becomes http://...
283 hgurl="${url#hg+}"
284 # Fetch any new updates
285 bang hg -R "$(pwd)/repo.hg" pull
286 # Do the fast-export | fast-import
287 bang git_hg_fetch
290 [ "$url" = "$(git config --get remote.origin.url || :)" ] || bang config_set_raw remote.origin.url "$url"
291 pruneopt=--prune
292 [ "$(git config --bool fetch.prune 2>/dev/null || :)" != "false" ] || pruneopt=
293 if ! is_gfi_mirror_url "$url"; then
294 lastwasclean=
295 [ "$(git config --bool girocco.lastupdateclean 2>/dev/null || :)" != "true" ] || lastwasclean=1
296 nextisclean=
297 [ "$(git config --bool girocco.cleanmirror 2>/dev/null || :)" != "true" ] || nextisclean=1
298 if [ "$nextisclean" != "$lastwasclean" ]; then
299 if [ -n "$nextisclean" ]; then
300 git config --replace-all remote.origin.fetch "+refs/heads/*:refs/heads/*"
301 git config --add remote.origin.fetch "+refs/tags/*:refs/tags/*"
302 git config --add remote.origin.fetch "+refs/notes/*:refs/notes/*"
303 git config --add remote.origin.fetch "+refs/top-bases/*:refs/top-bases/*"
304 else
305 git config --replace-all remote.origin.fetch "+refs/*:refs/*"
309 # remember the starting time so we can easily detect new packs for fast-import mirrors
310 # we sleep for 1 second after creating .gfipack to make sure all packs are newer
311 if is_gfi_mirror_url "$url" && [ ! -e .gfipack ]; then
312 rm -f .gfipack
313 >.gfipack
314 sleep 1
316 GIT_SSL_NO_VERIFY=1 bang git remote update $pruneopt
317 if ! is_gfi_mirror_url "$url" && [ "$nextisclean" != "$lastwasclean" ]; then
318 if [ -n "$nextisclean" ]; then
319 # We must manually purge the unclean refs now as even prune won't do it
320 git for-each-ref --format='%(refname)' |
321 LC_ALL=C sed \
322 -e '/^refs\/heads\//d' \
323 -e '/^refs\/tags\//d' \
324 -e '/^refs\/notes\//d' \
325 -e '/^refs\/top-bases\//d' \
326 -e 's/^/delete /' |
327 git_updateref_stdin
329 git config --bool girocco.lastupdateclean ${nextisclean:-0}
331 if [ -e .gfipack ] && is_gfi_mirror_url "$url"; then
332 find objects/pack -type f -newer .gfipack -name "pack-$octet20.pack" -print >>gfi-packs
333 rm -f .gfipack
336 esac
338 # The objects subdirectories permissions must be updated now.
339 # In the case of a dumb http clone, the permissions will not be correct
340 # (missing group write) despite the core.sharedrepository=1 setting!
341 # The objects themselves seem to have the correct permissions.
342 # This problem appears to have been fixed in the most recent git versions.
343 perms=g+w
344 [ "$cfg_permission_control" != "Hooks" ] || perms=go+w
345 chmod $perms $(find objects -maxdepth 1 -type d) 2>/dev/null || :
347 bang git update-server-info
349 # We maintain the last refresh date in two places deliberately
350 # so that it's available as part of the config data and also
351 # as a standalone file timestamp that can be accessed without git.
352 bang config_set lastrefresh "$(date "$datefmt")"
353 { >.last_refresh; } 2>/dev/null || :
355 # Check to see if any refs changed
356 bang_eval "git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
357 bang_eval "LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-after"
358 refschanged=
359 cmp -s .refs-before .refs-after || refschanged=1
361 # There's no way to get git svn to fetch packs, so we always need to run
362 # a mini-gc afterwards if svn actually fetched anything, but that's okay
363 # because it will be very quick
364 if [ -z "$refschanged" ] && [ -n "$svnpackcreated" ]; then
365 # we created the .svnpack but didn't actually fetch anything
366 # so remove it to avoid forcing a mini-gc if not necessary
367 rm -f .svnpack
369 if [ -e .svnpack ] && ! [ -e .needsgc ]; then
370 >.needsgc
373 # Force a mini-gc if $Girocco::Config::delay_gfi_redelta is false and there's
374 # at least one gfi pack present now
375 if [ -z "$cfg_delay_gfi_redelta" ] && ! [ -e .needsgc ] &&
376 [ -f gfi-packs ] && [ -s gfi-packs ] && is_gfi_mirror_url "$url"; then
377 >.needsgc
380 # Activate a mini-gc if there are at least 20 packs present now
381 if ! [ -e .needsgc ]; then
382 packs=
383 { packs="$(list_packs --quiet --count --exclude-no-idx objects/pack || :)" || :; } 2>/dev/null
384 if [ -n "$packs" ] && [ "$packs" -ge 20 ]; then
385 >.needsgc
389 # Look at which refs changed and trigger ref-change for these
390 sockpath="$cfg_chroot/etc/taskd.socket"
391 if [ -n "$refschanged" ]; then
392 bang config_set lastreceive "$(date '+%a, %d %b %Y %T %z')"
393 # We always use UTC for the log timestamp so that chroot and non-chroot match up.
394 # We don't have to worry about multiple log files since only one update runs
395 lognamets="$(TZ=UTC date '+%Y%m%d_%H%M%S')"
396 loghhmmss="${lognamets##*_}"
397 logname="reflogs/${lognamets%%_*}"
398 # We freshen the mod time to now on any old or new ref that is a loose object
399 # For old refs we do it so we will be able to keep them around for 1 day
400 # For new refs we do it in case we are about to run gc and the new ref
401 # actually points to an oldish loose object that had been unreachable
402 # We probably do not need to do it for new refs as Git tries to do that,
403 # but since we're already doing it for old refs (which Git does not do),
404 # it's almost no extra work for new refs, just in case.
406 echo "ref-changes %$proj% $proj"
407 LC_ALL=C join .refs-before .refs-after |
408 LC_ALL=C sed -e '/^[^ ][^ ]* \([^ ][^ ]*\) \1$/d' |
409 while read ref old new; do
410 echo "$loghhmmss $old $new $ref" >&3
411 freshen_loose_objects "$old" "$new"
412 echo "$old $new $ref"
413 done
414 LC_ALL=C join -v 1 .refs-before .refs-after |
415 while read ref old; do
416 echo "$loghhmmss $old 0000000000000000000000000000000000000000 $ref" >&3
417 freshen_loose_objects "$old"
418 echo "$old 0000000000000000000000000000000000000000 $ref"
419 done
420 LC_ALL=C join -v 2 .refs-before .refs-after |
421 while read ref new; do
422 echo "$loghhmmss 0000000000000000000000000000000000000000 $new $ref" >&3
423 freshen_loose_objects "$new"
424 echo "0000000000000000000000000000000000000000 $new $ref"
425 done
426 git for-each-ref --format='%(objectname) %(objectname) %(refname)' refs/heads
427 echo "done ref-changes %$proj% $proj"
428 } >.refs-temp 3>>"$logname"
429 if [ -S "$sockpath" ]; then
430 nc_openbsd -w 15 -U "$sockpath" <.refs-temp || :
432 bang config_set lastchange "$(date '+%a, %d %b %Y %T %z')"
433 bang_eval "git for-each-ref --sort=-committerdate --format='%(committerdate:iso8601)' \
434 --count=1 refs/heads >info/lastactivity"
435 ! [ -d htmlcache ] || { >htmlcache/changed; } 2>/dev/null || :
436 # While unlikely, it is conceivable that several ref updates have occurred that
437 # did not actually create any packs. In that case we could build up a large
438 # number of log files so request gc if there are more than 50 of them now.
439 # In the case of a mirror this is a practical impossibility but we check anyway.
440 if ! [ -e .needsgc ]; then
441 logfiles=
442 { logfiles="$(($(find reflogs -maxdepth 1 -type f -print | wc -l || :)+0))" || :; } 2>/dev/null
443 if [ -n "$logfiles" ] && [ "$logfiles" -ge 50 ]; then
444 >.needsgc
447 rm -f .delaygc .allowgc
450 # If the repository does not yet have a valid HEAD symref try to set one
451 # If an empty repository was cloned and then later becomes unempty you just
452 # lose out on the fancy "symref=HEAD:" logic and get this version instead
453 check_and_set_head || :
455 rm -f .refs-before .refs-after .refs-temp FETCH_HEAD
457 if is_banged; then
458 [ -z "$mailaddrs" ] || ! was_banged_message_sent ||
460 echo "$proj update succeeded - failure recovery"
461 echo "this status message may be disabled on the project admin page"
462 } | mailref "update@$cfg_gitweburl/$proj.git" -s "[$cfg_name] $proj update succeeded" "$mailaddrs" || :
463 bang_reset
466 progress "- [$proj] update ($(date))"