8 echo "Usage: update.sh projname" >&2
12 # date -R is linux-only, POSIX equivalent is '+%a, %d %b %Y %T %z'
13 datefmt
='+%a, %d %b %Y %T %z'
15 git_fetch_q_progress
() {
16 PATH
="$var_git_exec_path:$cfg_basedir/bin:$PATH" @basedir@
/jobd
/git-fetch-q-progress.sh
"$@"
19 # freshen_loose_objects full-sha ...
20 # if "$n" is a loose object, set its modification time to now
21 # otherwise silently do nothing with no error. To facilitate conversion
22 # of mirror projects to push projects we also add group write permission.
23 freshen_loose_objects
() {
28 _list
="$_list objects/$_shard/$_fn"
30 if [ -n "$_list" ]; then
31 chmod ug
+w
$_list 2>/dev
/null ||
:
32 touch -c $_list 2>/dev
/null ||
:
36 # darcs fast-export | git fast-import with error handling
42 { read -r _err1 ||
:; read -r _err2 ||
:; } <<-EOT
44 exec 4>&3 3>&1 1>&4 4>&-
47 "$cfg_basedir"/bin/darcs-fast-export \
48 --export-marks="$(pwd)/dfe-marks" \
49 --import-marks="$(pwd)/dfe-marks" "$1" 3>&- || _e1=$?
55 --export-marks="$(pwd)/gfi-marks" \
56 --export-pack-edges="$(pwd)/gfi-packs" \
57 --import-marks="$(pwd)/gfi-marks" \
58 --force 3>&- || _e2=$?
64 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
68 # bzr fast-export | git fast-import with error handling
76 { read -r _err1 ||
:; read -r _err2 ||
:; } <<-EOT
78 exec 4>&3 3>&1 1>&4 4>&-
81 bzr fast-export --plain \
82 --export-marks="$(pwd)/bfe-marks" \
83 --import-marks="$(pwd)/bfe-marks" "$1" 3>&- || _e1=$?
89 --export-marks="$(pwd)/gfi-marks" \
90 --export-pack-edges="$(pwd)/gfi-packs" \
91 --import-marks="$(pwd)/gfi-marks" \
92 --force 3>&- || _e2=$?
98 [ "$_err1" = 0 ] && [ "$_err2" = 0 ]
102 [ -n "$cfg_mirror" ] ||
{ echo "Mirroring is disabled" >&2; exit 0; }
105 [ "$cfg_permission_control" != "Hooks" ] ||
umask 000
109 cd "$cfg_reporoot/$proj.git"
111 # Activate a mini-gc if needed
112 # We do this here as well as after a successful fetch so that if we're stuck
113 # in a fetch loop where fetches are succeeding in fetching new packs but the
114 # ref update is failing for some reason (perhaps a non-commit under refs/heads)
115 # and a previous invokation therefore had a "bang" exit then we will still
116 # get the .needsgc flag set in a timely fashion to avoid excess pack build up.
117 check_and_set_needsgc
119 trap 'if [ $? != 0 ]; then echo "update failed dir: $PWD" >&2; fi; rm -f "$bang_log"' EXIT
123 if [ "${force_update:-0}" = "0" ] && check_interval lastrefresh
$cfg_min_mirror_interval; then
124 progress
"= [$proj] update skip (last at $(config_get lastrefresh))"
127 if [ -e .nofetch
]; then
128 progress
"x [$proj] update disabled (.nofetch exists)"
131 progress
"+ [$proj] update ($(date))"
133 # Any pre-existing FETCH_HEAD from a previous fetch, failed or not, is garbage
136 # A previous failed update attempt can leave a huge tmp_pack_XXXXXX file behind.
137 # Since no pushes are allowed to mirrors, we know that any such files that exist
138 # at this point in time are garbage and can be safely deleted, we do not even
139 # need to check how old they are. A tmp_idx_XXXXXX file is also created during
140 # the later stages of the fetch process, so we kill any of those as well.
141 find -L objects
/pack
-maxdepth 1 -type f
-name "tmp_pack_?*" -exec rm -f '{}' + ||
:
142 find -L objects
/pack
-maxdepth 1 -type f
-name "tmp_idx_?*" -exec rm -f '{}' + ||
:
144 # Make sure we have a reflogs subdirectory and abort the update if not
145 # This should not count as a normal "bang" failure if unsuccessful
146 [ -d reflogs
] || mkdir
-p reflogs
>/dev
/null
2>&1 ||
:
150 do_check_after_refs
=1
156 # Since gitweb shows the .last_refresh date, it's safe to update
157 # gitweb.lastrefresh to throttle the updates w/o corrupting the
158 # last refresh date display on the gitweb summary page
159 # It's therefore important that we do NOT touch .last_refresh here
160 config_set lastrefresh
"$(date "$datefmt")"
164 bang
echo "Project: $proj"
165 bang
echo " Date: $(TZ=UTC date '+%Y-%m-%d %T UTC')"
167 mail="$(config_get owner)" ||
:
168 url
="$(config_get baseurl)" ||
:
169 case "$url" in *" "*|
*" "*|
"")
170 bang_eval
'echo "Bad mirror URL (\"$url\")"; ! :'
173 bang
echo "Mirroring from URL \"$url\""
175 statusok
="$(git config --bool gitweb.statusupdates 2>/dev/null || echo true)"
177 [ "$statusok" = "false" ] ||
[ -z "$mail" ] || mailaddrs
="$mail"
178 [ -z "$cfg_admincc" ] ||
[ "$cfg_admincc" = "0" ] ||
[ -z "$cfg_admin" ] ||
179 if [ -z "$mailaddrs" ]; then mailaddrs
="$cfg_admin"; else mailaddrs
="$mailaddrs,$cfg_admin"; fi
181 bang_eval
"git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
182 bang_eval
"LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-before"
185 [ -n "$do_check_after_refs" ] ||
return 0
186 bang_eval
"git for-each-ref --format '%(refname) %(objectname)' >.refs-temp"
187 bang_eval
"LC_ALL=C sort -b -k1,1 <.refs-temp >.refs-after"
189 cmp -s .refs-before .refs-after || refschanged
=1
193 ! [ -e .delaygc
] ||
>.allowgc ||
:
195 # Make sure we don't get any unwanted loose objects
196 # Starting with Git v2.10.0 fast-import can generate loose objects unless we
197 # tweak its configuration to prevent that
198 git_add_config
'fetch.unpackLimit=1'
199 # Note the git config documentation is wrong
200 # transfer.unpackLimit, if set, overrides fetch.unpackLimit
201 git_add_config
'transfer.unpackLimit=1'
202 # But not the Git v2.10.0 and later fastimport.unpackLimit which improperly uses <= instead of <
203 git_add_config
'fastimport.unpackLimit=0'
205 # remember the starting time so we can easily combine fetched loose objects
206 # we sleep for 1 second after creating .needspack to make sure all objects are newer
207 if ! [ -e .needspack
]; then
214 svn
://* | svn
+http
://* | svn
+https
://* | svn
+file://* | svn
+ssh://*)
215 [ -n "$cfg_mirror_svn" ] ||
{ echo "Mirroring svn is disabled" >&2; exit 0; }
216 # Allow the username to be specified in the "svn-credential.svn.username"
217 # property and the password in the "svn-credential.svn.password" property
218 # Use an 'anonsvn' username by default as is commonly used for anonymous svn
219 # Default the password to the same as the username
220 # The password property will be ignored unless a username has been specified
221 if svnuser
="$(git config --get svn-credential.svn.username)" && [ -n "$svnuser" ]; then
222 if ! svnpass
="$(git config --get svn-credential.svn.password)"; then
227 case "$url1" in ?
*"@"?
*)
228 urlsch
="${url%%://*}"
229 url
="$urlsch://${url#*@}"
232 # As a fallback, check in the URL, just in case
236 case "$url1" in ?
*"@"?
*)
237 urlsch
="${url%%://*}"
238 url
="$urlsch://${url#*@}"
240 svnuser
="${url1%%:*}"
241 if [ -n "$svnuser" ]; then
243 case "$url1" in *":"*)
248 if [ -z "$svnuser" ]; then
253 GIT_ASKPASS_PASSWORD
="$svnpass"
254 export GIT_ASKPASS_PASSWORD
255 # Update the git svn url to match baseurl but be cognizant of any
256 # needed prefix changes. See the comments in taskd/clone.sh about
257 # why we need to put up with a prefix in the first place.
258 case "$url" in svn
+ssh://*) svnurl
="$url";; *) svnurl
="${url#svn+}";; esac
260 svnurlold
="$(config_get svnurl)" ||
:
261 if [ "$svnurl" != "$svnurlold" ]; then
262 # We better already have an svn-remote.svn.fetch setting
263 bang
test -n "$(git config --get-all svn-remote.svn.fetch)" ||
:
264 # the only way to truly know what the proper prefix is
265 # is to attempt a fresh git-svn init -s on the new url
266 rm -rf svn-new-url ||
:
267 # We require svn info to succeed on the URL otherwise it's
268 # simply not a valid URL and without using -s on the init it
269 # will not otherwise be tested until the fetch
270 bang
eval 'svn --non-interactive --username "$svnuser" --password "$svnpass" info "$svnurl" >/dev/null'
271 bang mkdir svn-new-url
272 GIT_DIR
=svn-new-url bang git init
--bare --quiet
273 # We initially use -s for the init which will possibly shorten
274 # the URL. However, the shortening can fail if a password is
275 # not required for the longer version but is for the shorter,
276 # so try again without -s if the -s version fails.
277 cmdstr
='git svn init --username="$svnuser" --prefix "" -s "$svnurl" </dev/null >/dev/null 2>&1 || '
278 cmdstr
="$cmdstr"'git svn init --username="$svnuser" --prefix "" "$svnurl" </dev/null >/dev/null 2>&1'
279 GIT_DIR
=svn-new-url bang
eval "$cmdstr"
280 gitsvnurl
="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.url)" ||
:
281 gitsvnfetch
="$(GIT_DIR=svn-new-url git config --get svn-remote.svn.fetch)" ||
:
282 gitsvnprefixnew
="${gitsvnfetch%%:*}"
283 gitsvnsuffixnew
="${gitsvnprefixnew##*/}"
284 gitsvnprefixnew
="${gitsvnprefixnew%$gitsvnsuffixnew}"
285 rm -rf svn-new-url ||
:
286 # Using GIT_DIR= with bang leaves it set to svn-new-url, so reset it to .
288 if [ "$gitsvnurl" != "$(git config --get svn-remote.svn.url || :)" ]; then
289 # The url has been changed.
290 # We must update the url and replace the prefix on all config items
291 gitsvnfetch
="$(git config --get-all svn-remote.svn.fetch | head -1)" ||
:
292 gitsvnprefixold
="${gitsvnfetch%%:*}"
293 gitsvnsuffixold
="${gitsvnprefixold##*/}"
294 gitsvnprefixold
="${gitsvnprefixold%$gitsvnsuffixold}"
295 git config
--remove-section 'svn-remote.svnnew' 2>/dev
/null ||
:
296 git config
'svn-remote.svnnew.url' "$gitsvnurl"
297 git config
--get-regexp '^svn-remote\.svn\.' |
298 while read -r sname sval
; do
300 svn-remote.svn.fetch|svn-remote.svn.branches|svn-remote.svn.tags
)
301 sname
="${sname#svn-remote.svn.}"
302 sval
="${sval#$gitsvnprefixold}"
303 bang git config
--add "svn-remote.svnnew.$sname" "${gitsvnprefixnew}$sval"
307 bang git config
-f svn
/.metadata svn-remote.svn.reposRoot
"$gitsvnurl"
308 bang git config
--remove-section svn-remote.svn
309 bang git config
--rename-section svn-remote.svnnew svn-remote.svn
311 bang config_set svnurl
"$svnurl"
313 # remove any stale *.lock files greater than 1 hour old in case
314 # git-svn was killed on the last update because it took too long
315 find -L svn
-type f
-name '*.lock' -mmin +60 -exec rm -f '{}' + 2>/dev
/null ||
:
316 GIT_DIR
=. bang git svn fetch
--log-window-size=$var_log_window_size --username="$svnuser" --quiet </dev
/null
317 # git svn does not preserve group permissions in the svn subdirectory
318 chmod -R ug
+rw
,o
+r svn
319 # git svn also leaves behind ref turds that end with @nnn
320 # We get rid of them now
321 git for-each-ref
--format='%(refname)' |
322 LC_ALL
=C
sed '/^..*@[1-9][0-9]*$/!d; s/^/delete /' |
324 unset GIT_ASKPASS_PASSWORD
326 darcs
://* | darcs
+http
://* | darcs
+https
://*)
327 [ -n "$cfg_mirror_darcs" ] ||
{ echo "Mirroring darcs is disabled" >&2; exit 0; }
329 darcs
://*) darcsurl
="http://${url#darcs://}";;
330 *) darcsurl
="${url#darcs+}";;
332 # remove any stale lock files greater than 1 hour old in case
333 # darcs_fast_export was killed on the last update because it took too long
334 find -L *.darcs
-maxdepth 2 -type f
-name 'lock' -mmin +60 -exec rm -f '{}' + 2>/dev
/null ||
:
335 bang git_darcs_fetch
"$darcsurl"
338 [ -n "$cfg_mirror_bzr" ] ||
{ echo "Mirroring bzr is disabled" >&2; exit 0; }
339 bzrurl
="${url#bzr://}"
340 bang git_bzr_fetch
"$bzrurl"
342 hg
+http
://* | hg
+https
://* | hg
+file://* | hg
+ssh://*)
343 [ -n "$cfg_mirror_hg" ] ||
{ echo "Mirroring hg is disabled" >&2; exit 0; }
344 # We just remove hg+ here, so hg+http://... becomes http://...
346 # Fetch any new updates
347 bang hg
-R "$(pwd)/repo.hg" pull
348 # Do the fast-export | fast-import
352 [ "$url" = "$(git config --get remote.origin.url || :)" ] || bang config_set_raw remote.origin.url
"$url"
354 [ "$(git config --bool fetch.prune 2>/dev/null || :)" != "false" ] || pruneopt
=
355 if ! is_gfi_mirror_url
"$url"; then
357 [ "$(git config --bool girocco.lastupdateclean 2>/dev/null || :)" != "true" ] || lastwasclean
=1
359 [ "$(git config --bool girocco.cleanmirror 2>/dev/null || :)" != "true" ] || nextisclean
=1
360 if [ "$nextisclean" != "$lastwasclean" ]; then
361 if [ -n "$nextisclean" ]; then
362 git config
--replace-all remote.origin.fetch
"+refs/heads/*:refs/heads/*"
363 git config
--add remote.origin.fetch
"+refs/tags/*:refs/tags/*"
364 git config
--add remote.origin.fetch
"+refs/notes/*:refs/notes/*"
365 git config
--add remote.origin.fetch
"+refs/top-bases/*:refs/top-bases/*"
367 git config
--replace-all remote.origin.fetch
"+refs/*:refs/*"
371 # remember the starting time so we can easily detect new packs for fast-import mirrors
372 # we sleep for 1 second after creating .gfipack to make sure all packs are newer
373 if is_gfi_mirror_url
"$url" && [ ! -e .gfipack
]; then
379 git config remotes.default
>/dev
/null
2>&1 || fetcharg
="--all"
381 [ "$show_progress" != "0" ] || fetchcmd
="git fetch -q"
382 if [ -n "$var_have_git_171" ] && [ "${show_progress:-0}" != "0" ]; then
383 # git fetch learned --progress in v1.7.1
384 case "$show_progress" in
386 # full volume progress with all the spammy noise
387 fetchcmd
="git fetch --progress"
390 # a kinder, gentler progress that doesn't leave one
391 # covered all over in exploded bits of spam afterwards
392 fetchcmd
="git_fetch_q_progress"
396 # It's possible for a fetch to actually do something while still returning
397 # a non-zero result (perhaps some of the refs were updated but some were
398 # not -- a malicious Git-impersonation trying to set refs/heads/... refs
399 # to non-commit objects for example).
400 GIT_SSL_NO_VERIFY
=1 bang_catch
eval "$fetchcmd" $pruneopt --multiple "$fetcharg"
401 # If we did fetch anything, don't treat it as an error, but do keep the log;
402 # otherwise invoke bang_failed as for a normal failure
403 if [ "${bang_errcode:-0}" != "0" ]; then
404 save_bang_errcode
="$bang_errcode"
406 if [ -n "$refschanged" ]; then
407 keep_bang_log
="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
409 bang_cmd
="git fetch${pruneopt:+ $pruneopt} --multiple $fetcharg"
410 bang_errcode
="$save_bang_errcode"
414 if ! is_gfi_mirror_url
"$url" && [ "$nextisclean" != "$lastwasclean" ]; then
415 if [ -n "$nextisclean" ]; then
416 # We must manually purge the unclean refs now as even prune won't do it
417 git for-each-ref
--format='%(refname)' |
419 -e '/^refs\/heads\//d' \
420 -e '/^refs\/tags\//d' \
421 -e '/^refs\/notes\//d' \
422 -e '/^refs\/top-bases\//d' \
426 git config
--bool girocco.lastupdateclean
${nextisclean:-0}
428 if [ -e .gfipack
] && is_gfi_mirror_url
"$url"; then
429 find -L objects
/pack
-type f
-newer .gfipack
-name "pack-$octet20*.pack" -print >>gfi-packs
435 # The objects subdirectories permissions must be updated now.
436 # In the case of a dumb http clone, the permissions will not be correct
437 # (missing group write) despite the core.sharedrepository=1 setting!
438 # The objects themselves seem to have the correct permissions.
439 # This problem appears to have been fixed in the most recent git versions.
441 [ "$cfg_permission_control" != "Hooks" ] || perms
=go
+w
442 chmod $perms $
(find -L objects
-maxdepth 1 -type d
) 2>/dev
/null ||
:
444 bang git update-server-info
446 # We maintain the last refresh date in two places deliberately
447 # so that it's available as part of the config data and also
448 # as a standalone file timestamp that can be accessed without git.
449 bang config_set lastrefresh
"$(date "$datefmt")"
450 { >.last_refresh
; } 2>/dev
/null ||
:
452 # Check to see if any refs changed
455 # Force a mini-gc if $Girocco::Config::delay_gfi_redelta is false and there's
456 # at least one gfi pack present now
457 if [ -z "$cfg_delay_gfi_redelta" ] && ! [ -e .needsgc
] &&
458 [ -f gfi-packs
] && [ -s gfi-packs
] && is_gfi_mirror_url
"$url"; then
462 # Activate a mini-gc if needed
463 check_and_set_needsgc
465 # Look at which refs changed and trigger ref-change for these
466 sockpath
="$cfg_chroot/etc/taskd.socket"
467 if [ -n "$refschanged" ]; then
468 bang config_set lastreceive
"$(date '+%a, %d %b %Y %T %z')"
469 # We always use UTC for the log timestamp so that chroot and non-chroot match up.
470 # We don't have to worry about multiple log files since only one update runs
471 lognamets
="$(TZ=UTC date '+%Y%m%d_%H%M%S')"
472 loghhmmss
="${lognamets##*_}"
473 logname
="reflogs/${lognamets%%_*}"
474 # We freshen the mod time to now on any old or new ref that is a loose object
475 # For old refs we do it so we will be able to keep them around for 1 day
476 # For new refs we do it in case we are about to run gc and the new ref
477 # actually points to an oldish loose object that had been unreachable
478 # We probably do not need to do it for new refs as Git tries to do that,
479 # but since we're already doing it for old refs (which Git does not do),
480 # it's almost no extra work for new refs, just in case.
482 echo "ref-changes %$proj% $proj"
483 LC_ALL
=C
join .refs-before .refs-after |
484 LC_ALL
=C
sed -e '/^[^ ][^ ]* \([^ ][^ ]*\) \1$/d' |
485 while read ref old new
; do
486 echo "$loghhmmss $old $new $ref" >&3
487 freshen_loose_objects
"$old" "$new"
488 echo "$old $new $ref"
490 LC_ALL
=C
join -v 1 .refs-before .refs-after |
491 while read ref old
; do
492 echo "$loghhmmss $old 0000000000000000000000000000000000000000 $ref" >&3
493 freshen_loose_objects
"$old"
494 echo "$old 0000000000000000000000000000000000000000 $ref"
496 LC_ALL
=C
join -v 2 .refs-before .refs-after |
497 while read ref new
; do
498 echo "$loghhmmss 0000000000000000000000000000000000000000 $new $ref" >&3
499 freshen_loose_objects
"$new"
500 echo "0000000000000000000000000000000000000000 $new $ref"
502 git for-each-ref
--format='%(objectname) %(objectname) %(refname)' refs
/heads
503 echo "done ref-changes %$proj% $proj"
504 } >.refs-temp
3>>"$logname"
505 if [ -S "$sockpath" ]; then
507 nc_openbsd
-w 15 -U "$sockpath" <.refs-temp ||
:
510 bang config_set lastchange
"$(date '+%a, %d %b %Y %T %z')"
511 bang_eval
"git for-each-ref --sort=-committerdate --format='%(committerdate:iso8601)' \
512 --count=1 refs/heads >info/lastactivity"
513 ! [ -d htmlcache
] ||
{ >htmlcache
/changed
; } 2>/dev
/null ||
:
514 rm -f .delaygc .allowgc
516 [ "${cfg_autogchack:-0}" != "0" ] &&
517 [ "$(git config --get --bool girocco.autogchack 2>/dev/null)" != "false" ]
519 mv -f .refs-after .refs-last
523 # If the repository does not yet have a valid HEAD symref try to set one
524 # If an empty repository was cloned and then later becomes unempty you just
525 # lose out on the fancy "symref=HEAD:" logic and get this version instead
526 check_and_set_head ||
:
528 rm -f .refs-before .refs-after .refs-temp FETCH_HEAD
531 [ -z "$mailaddrs" ] ||
! was_banged_message_sent ||
533 echo "$proj update succeeded - failure recovery"
534 echo "this status message may be disabled on the project admin page"
535 } | mailref
"update@$cfg_gitweburl/$proj.git" -s "[$cfg_name] $proj update succeeded" "$mailaddrs" ||
:
539 if [ -n "$keep_bang_log" ] && [ -s "$bang_log" ]; then
540 cat "$bang_log" >.banglog
542 echo "$keep_bang_log failed with error code $save_bang_errcode" >>.banglog
545 progress
"- [$proj] update ($(date))"