gc.sh: the new order
[girocco.git] / jobd / gc.sh
blobee9fc18c38ca892e263f7fd9fa00e455c742724a
1 #!/bin/sh
3 # NOTE: additional options can be passed to git repack by specifying
4 # them after the project name, for example:
5 # gc.sh my-project -f
7 . @basedir@/shlib.sh
9 set -e
11 if [ $# -lt 1 ]; then
12 echo "Usage: gc.sh projname [extra-repack-args]" >&2
13 exit 1
16 # packing options
17 packopts="--depth=50 --window=50 --window-memory=${var_window_memory:-1g}"
18 quiet=; [ -n "$show_progress" ] || quiet=-q
20 umask 002
21 [ "$cfg_permission_control" != "Hooks" ] || umask 000
22 clean_git_env
24 pidactive() {
25 if _result="$(kill -0 "$1" 2>&1)"; then
26 # process exists and we have permission to signal it
27 return 0
29 case "$_result" in *"not permitted"*)
30 # we do not have permission to signal the process
31 return 0
32 esac
33 # process does not exist
34 return 1
37 createlock() {
38 # A .lock file should only exist for much less than a second.
39 # If we see a stale lock file (> 1h old), remove it and then,
40 # just in case, wait 30 seconds for any process whose .lock
41 # we might have just removed (it's racy) to finish doing what
42 # should take much less than a second to do.
43 _stalelock="$(find "$1.lock" -maxdepth 1 -mmin +60 -print 2>/dev/null)" || :
44 if [ -n "$_stalelock" ]; then
45 rm -f "$_stalelock"
46 sleep 30
48 for _try in p p n; do
49 if (set -C; >"$1.lock") 2>/dev/null; then
50 echo "$1.lock"
51 return 0
53 # delay and try again
54 [ "$_try" != "p" ] || sleep 1
55 done
56 # cannot create lock file
57 return 1
60 # The pre-receive script creates one ref log file per push but we want them to
61 # be coalesced into one ref log file per day. We are guaranteed that any files
62 # we find to coalesce are NOT currently being written to since they are always
63 # written first as temporary files and then moved into place. We attempt to
64 # transfer the most recent modification time to the coalesced log file which
65 # would step on its mod time if it were being written to directly, but if we
66 # find per-process ref log files then it must be a push project and the only
67 # thing that would write directly to the main per-day log file would be a
68 # mirror project so there's actually no conflict.
69 # Also, if the clock is wonky (or was futzed with) we may have both YYYYMMDD
70 # and YYYYMMDD.gz present in which case combine them into YYYYMMDD
71 coalesce_reflogs() {
72 [ -d reflogs ] || return 0
73 rm -f .gc_failed
74 find reflogs -maxdepth 1 -type f -name "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]" -print |
75 while read -r rname; do
76 if [ -e "$rname.gz" ]; then
77 if [ -s "$rname" ]; then
78 # Presumably the .gz file must have been created before the non-gz
79 # file since it had to be uncompressed at some point therefore
80 # we need to append the non-gz contents to it but keep the non-gz
81 # contents timestamp so we rename to YYYYMMDD_ which will sort first
82 # and be picked up in the next step if we are interrupted in the middle.
83 # If a YYYYMMDD_ file already exists we append to it and transfer the
84 # timestamp. Finally we transfer the YYYYMMDD_ timestamp to the result
85 # and remove the YYYYMMDD_ temporary file leaving the result uncompressed.
86 if [ -e "${rname}_" ]; then
87 cat "$rname" >>"${rname}_"
88 touch -r "$rname" "${rname}_"
89 rm -f "$rname"
90 ! [ -e "$rname" ]
91 else
92 mv "$rname" "${rname}_"
94 gzip -d "$rname.gz" </dev/null
95 [ -e "$rname" ] && ! [ -e "$rname.gz" ]
96 cat "${rname}_" >>"$rname"
97 touch -r "${rname}_" "$rname"
98 rm -f "${rname}_"
99 else
100 # Just remove the empty file to resolve the problem
101 rm -f "$rname"
104 done
105 find reflogs -maxdepth 1 -type f -name "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]_*" -print | LC_ALL=C sort |
106 while read -r rname; do
107 logname="${rname%%_*}"
108 # If someone's been futzing with the date, the file we want to
109 # append to could already have been compressed, so we just uncompress
110 # it here. The previous block guarantees we do not have both a compressed
111 # and uncompressed version present at the same time.
112 if [ -e "$logname.gz" ]; then
113 gzip -d "$logname.gz" </dev/null
114 [ -e "$logname" ] && ! [ -e "$logname.gz" ]
116 cat "$rname" >>"$logname"
117 touch -r "$rname" "$logname"
118 rm -f "$rname"
119 if [ -e "$rname" ]; then
120 >.gc_failed
121 echo "! [$proj] failed to remove $rname" >&2
122 exit 1 # will only exit subshell created by "|"
124 done
125 ! [ -e .gc_failed ]
128 # Remove any files in reflogs that are older than $cfg_reflogs_lifetime days
129 prune_reflogs() {
130 [ -d reflogs ] || return 0
131 exp="$(( ${cfg_reflogs_lifetime:-1} * 1440 ))"
132 [ $exp -gt 0 ] || exp=1440
133 [ $exp -le 43200 ] || exp=43200
134 find reflogs -maxdepth 1 -type f -mmin "+$exp" -exec rm -f '{}' + || :
137 # Compact any reflogs that are not today's UTC date unless a .gz version exists
138 compact_reflogs() {
139 [ -d reflogs ] || return 0
140 _td="reflogs/$(TZ=UTC date '+%Y%m%d')"
141 find reflogs -maxdepth 1 -type f -name "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]" -print |
142 while read -r rname; do
143 [ "$rname" != "$_td" ] || continue
144 ! [ -e "$rname.gz" ] || continue
145 gzip -9 "$rname" </dev/null
146 done
149 # return true if there's more than one objects/pack-<sha>.pack file or
150 # ANY sha-1 files in objects
151 is_dirty() {
152 _packs=$(find objects/pack -type f -name "pack-$octet20*.pack" -print | head -n 2 | LC_ALL=C wc -l)
153 if [ $_packs != 1 ] && [ $_packs != 0 ]; then
154 return 0
156 _objs=$(find objects/$octet -type f -name "$octet19*" -print 2>/dev/null | head -n 1 | LC_ALL=C wc -l)
157 [ $_objs -ne 0 ]
160 # make sure combine-packs uses the correct Git executable
161 run_combine_packs() {
162 PATH="$var_git_exec_path:$cfg_basedir/bin:$PATH" @basedir@/jobd/combine-packs.sh "$@"
165 # combine the input pack(s) into a new pack (or possibly packs if packSizeLimit set)
166 # input pack names are read from standard input one per line delimited by the first
167 # ':', ' ' or '\n' character on the line (which allows gfi-packs to be read directly)
168 # all arguments, if any, are passed to pack-objects as additional options
169 # returns non-zero on failure AND creates .gc_failed in that case
170 combine_packs() {
171 rm -f .gc_failed
172 find objects/pack -maxdepth 1 -type f -name '*.zap*' -exec rm -f '{}' + || :
173 run_combine_packs --replace "$@" $packopts --all-progress-implied $quiet --non-empty || {
174 >.gc_failed
175 return 1
177 return 0
180 # if the current directory is_gfi_mirror then repack all packs listed in gfi-packs
181 repack_gfi_packs() {
182 [ -n "$gfi_mirror" ] || return 0
183 [ -d objects/pack ] || { rm -f gfi-packs; return 0; }
184 progress "~ [$proj] redeltifying poor quality git fast-import packs"
185 combine_packs --ignore-missing --no-reuse-delta <gfi-packs
186 rm -f gfi-packs
187 return 0
190 # combine small packs into larger pack(s)
191 # we avoid any keep, bndl or bitmap packs
192 # if the optional argument is non-empty even a single small pack will be redeltad
193 combine_small_packs() {
194 _didprogress=
195 _minsmallpacks=2
196 if [ -n "$1" ] && [ -n "$noreusedeltaopt" ]; then
197 _minsmallpacks=1
199 _lpo="--exclude-no-idx --exclude-keep --exclude-bitmap --exclude-bndl"
200 _lpo="$_lpo --exclude-sfx _u --exclude-sfx _o"
201 _lpo="$_lpo --quiet --object-limit $var_redelta_threshold objects/pack"
202 while
203 _cnt="$(list_packs --count $_lpo)" || :
204 test "${_cnt:-0}" -ge $_minsmallpacks
206 [ -n "$_didprogress" ] || {
207 progress "~ [$proj] combining small packs into a single larger pack"
208 _didprogress=1
210 _newp="$(list_packs $_lpo | combine_packs --names $noreusedeltaopt)"
211 _newc="$(( $(echo "$_newp" | LC_ALL=C wc -w) ))"
212 # be paranoid and exit the loop if we haven't reduced the number of packs
213 [ $_newc -lt $_cnt ] || break
214 _minsmallpacks=2
215 done
216 return 0
219 # Unfortunately git-svn lacks the ability to store newly fetched revisions as a pack.
220 # However, the fetch code conveniently sets .svnpack just before it runs git-svn fetch
221 # so that it's easy to find all the objects that have been fetched by git-svn and
222 # combine them into a pack. The --no-reuse-delta option is meaningless here since
223 # everything to be packed is a loose object and therefore not a delta so deltification
224 # will always take place.
225 make_svn_pack() {
226 [ -f .svnpack ] && [ -n "$svn_mirror" ] || return 0
227 rm -f .svnpackgc
228 mv -f .svnpack .svnpackgc
229 progress "~ [$proj] combining loose git-svn objects into a pack"
230 _newp="$(find objects/$octet -maxdepth 1 -type f -newer .svnpackgc -name "$octet19*" -print 2>/dev/null |
231 LC_ALL=C awk -F / '{print $2 $3}' |
232 run_combine_packs --objects --names $packopts --incremental --all-progress-implied $quiet --non-empty)" || {
233 mv -f .svnpackgc .svnpack
234 >.gc_failed
235 return 1
237 if [ -n "$_newp" ]; then
238 # remove the now-redundant loose objects -- this is always safe
239 # even during a concurrent push because a reprepare_packed_git
240 # will be triggered if an object that should be there is not
241 # found thereby finding it in the new pack instead
242 git prune-packed $quiet
244 rm -f .svnpackgc
247 # HEADSHA="$(pack_is_complete /full/path/to/some.pack /full/path/to/packed-refs "$(cat HEAD)")"
248 pack_is_complete() {
249 # Must have a matching .idx file and a non-empty packed-refs file
250 [ -s "${1%.pack}.idx" ] || return 1
251 [ -s "$2" ] || return 1
252 _headsha=
253 case "$3" in
254 $octet20*)
255 _headsha="$3"
257 "ref: refs/"?*|"ref:refs/"?*|"refs/"?*)
258 _headmatch="${3#ref:}"
259 _headmatch="${_headmatch# }"
260 _headmatchpat="$(echo "$_headmatch" | LC_ALL=C sed -e 's/\([.$]\)/\\\1/g')"
261 _headsha="$(LC_ALL=C grep -e "^$octet20$hexdig* $_headmatchpat\$" <"$2" |
262 LC_ALL=C cut -d ' ' -f 1)"
263 case "$_headsha" in $octet20*) :;; *)
264 return 1
265 esac
268 # bad HEAD
269 return 1
270 esac
271 rm -rf pack_is_complete_test
272 mkdir pack_is_complete_test
273 mkdir pack_is_complete_test/refs
274 mkdir pack_is_complete_test/objects
275 mkdir pack_is_complete_test/objects/pack
276 echo "$_headsha" >pack_is_complete_test/HEAD
277 ln -s "$1" pack_is_complete_test/objects/pack/
278 ln -s "${1%.pack}.idx" pack_is_complete_test/objects/pack/
279 ln -s "$2" pack_is_complete_test/packed-refs
280 _count="$(git --git-dir=pack_is_complete_test rev-list --count --all 2>/dev/null)" || :
281 rm -rf pack_is_complete_test
282 [ -n "$_count" ] || return 1
283 [ "$_count" -gt 0 ] 2>/dev/null || return 1
284 echo "$_headsha"
287 # On return a "$lockf" will have been created that must be removed when gc is done
288 lock_gc() {
289 # be compatibile with gc.pid file from newer Git releases
290 lockf=gc.pid
291 hn="$(hostname)"
292 active=
293 if [ "$(createlock "$lockf")" ]; then
294 # If $lockf is:
295 # 1) less than 12 hours old
296 # 2) contains two fields (pid hostname) NO trailing NL
297 # 3) the hostname is different OR the pid is still alive
298 # then we exit as another active process is holding the lock
299 if [ "$(find "$lockf" -maxdepth 1 -mmin -720 -print 2>/dev/null)" ]; then
300 apid=
301 ahost=
302 read -r apid ahost ajunk <"$lockf" || :
303 if [ "$apid" ] && [ "$ahost" ]; then
304 if [ "$ahost" != "$hn" ] || pidactive "$apid"; then
305 active=1
309 else
310 echo >&2 "[$proj] unable to create gc.pid.lock file"
311 exit 1
313 if [ -n "$active" ]; then
314 rm -f "$lockf.lock"
315 echo >&2 "[$proj] gc already running on machine '$ahost' pid '$apid'"
316 exit 1
318 printf "%s %s" "$$" "$hn" >"$lockf.lock"
319 chmod 0664 "$lockf.lock"
320 mv -f "$lockf.lock" "$lockf"
323 # Create a repack subdirectory such that running repack in it will pack the
324 # same things that a pack in the normal directory would except that the pack
325 # is guaranteed to be generated in an optimized order by adding a suitable
326 # synthesized ref in the refs/tags namespace (yes, pack-objects.c really does
327 # behave differently depending on the contents of the refs/tags namespace).
328 # Before calling this, pack-refs --all MUST be performed or the wrong pack
329 # will end up being made.
330 # If a ref deletion is pushed after making the repack subdir but before the
331 # the actual repack, the discarded objects will be packed -- no big deal,
332 # they'll get discarded the next time gc runs.
333 # If a fast-forward ref update is pushed after making the repack subdir but
334 # before the actual repack, it will be picked up and the new objects packed
335 # (subject to the normal git repack race about picking such updates up).
336 # If a non-fast-forward ref update is pushed after making the repack subdir but
337 # before the actual repack, it will be picked up like a fast-forward update but
338 # the discarded objects will be included like a ref deletion (until the next
339 # scheduled gc takes place).
340 # We retain a copy of the original packed-refs file as repack/packed-refs.orig
341 # If ref deletions come in while we're repacking, the original packed-refs
342 # file will be modified, but we'll still pack the deleted ref(s).
343 # If the packed-refs.orig file is used to create the bundle header we avoid
344 # a situation where the bundle contains a ref state that never actually
345 # existed in reality (for example a new branch is pushed and then an old
346 # branch deleted afterwards -- the deletion would show up in the bundle
347 # because it will cause the original packed-refs file to be re-written, but
348 # the new branch creation will not unless we do another pack-refs which might
349 # lead to having in incomplete bundle). Therefore we want to keep a copy of
350 # the original packed-refs file around. We do the same thing for HEAD.
351 make_repack_dir() {
352 ! [ -d repack ] || rm -rf repack
353 ! [ -d repack ] || { echo >&2 "[$proj] cannot remove repack subdirectory"; exit 1; }
354 mkdir repack repack/refs repack/alt repack/alt/pack
355 [ -d info ] || mkdir info
356 ln -s ../config repack/config
357 ln -s ../info repack/info
358 ln -s ../objects repack/objects
359 ln -s ../../refs repack/refs/refs
360 _lines=$(( $(LC_ALL=C wc -l <packed-refs) ))
361 cat HEAD >repack/HEAD.orig
362 cat packed-refs >repack/packed-refs.orig
363 if [ $(LC_ALL=C wc -l <repack/packed-refs.orig) -ne "$_lines" ]; then
364 echo >&2 "[$proj] error: make_repack_dir failed original packed-refs line count sanity check"
365 exit 1
367 sed 's, refs/, refs/!/,' <repack/packed-refs.orig >repack/packed-refs
368 headref="$(git rev-parse --verify --quiet HEAD)" || :
369 if [ -n "$headref" ]; then
370 echo "$headref refs/heads/!!" >>repack/packed-refs
371 _lines=$(( $_lines + 1 ))
373 optref="$(git rev-list -n 1 --all 2>/dev/null)" || :
374 if [ -n "$optref" ]; then
375 echo "$optref refs/tags/!" >>repack/packed-refs
376 _lines=$(( $_lines + 1 ))
377 echo "$optref" >repack/HEAD
378 else
379 cat HEAD >repack/HEAD
381 if [ $(LC_ALL=C wc -l <repack/packed-refs) -ne "$_lines" ]; then
382 echo >&2 "[$proj] error: make_repack_dir failed packed-refs line count sanity check"
383 exit 1
387 # Remove any crud that's been left behind by interrupted operations
388 # that did not clean up after themselves
389 remove_crud() {
390 # Remove any existing FETCH_HEAD
391 # There can only be a FETCH_HEAD if we've been fetching, not if we've been
392 # receiving pushes (those never create a FETCH_HEAD).
393 # And if we're fetching because we're a mirror, we know we're not fetching right
394 # now since jobd.pl never runs a project's fetch simultaneously with its gc.
395 # Therefore any existing FETCH_HEAD is junk. And it may be many megabytes if
396 # there were a lot of refs.
397 rm -f FETCH_HEAD
399 # remove any existing pack_is_complete_test or repack subdirectories
400 # If either exists when this function is called it's crud
401 rm -rf pack_is_complete_test repack
403 # Remove any stale pack remnants that are more than an hour old.
404 # Stale pack fragments are defined as any pack-<sha1>.ext where .ext is NOT
405 # .pack AND the corresponding .pack DOES NOT exist. A bunch of stale
406 # pack-<sha1>.idx files without their corresponding .pack files are worthless
407 # and just waste space. Normally there shouldn't be any remnants but actually
408 # this can happen when things are interrupted at just the wrong time.
409 # Note that the objects/pack directory is created by git init and should
410 # always exist.
411 find objects/pack -maxdepth 1 -type f -mmin +60 -name "pack-$octet20*.?*" -print |
412 LC_ALL=C sed -e 's/^objects\/pack\/pack-//; s/\..*$//' | LC_ALL=C sort -u |
413 while read packsha; do
414 ! [ -e "objects/pack/pack-$packsha.pack" ] || continue
415 rm -f "objects/pack/pack-$packsha".?*
416 done
418 # Remove any stale tmp reflogs files that are more than one hour old.
419 # Since they are created only while the pre-receive hook is running and
420 # all it does is process a bunch of refs passed to it on standard input
421 # it's inconceivable that it would ever take as much as an hour to run.
422 if [ -d reflogs ]; then
423 find reflogs -maxdepth 1 -type f -mmin +60 -name "tmp_*" -exec rm -f '{}' + || :
426 # Remove any stale object tmp_obj_* files that are more than 3 hours old.
427 # Really these files should only exist very briefly so there shouldn't be any
428 # but things happen that can end up leaving them behind.
429 find objects/$octet -maxdepth 1 -type f -mmin +180 -name "tmp_obj_?*" -exec rm -f '{}' + 2>/dev/null || :
431 # Remove any stale pack .keep files that are more than 12 hours old.
432 # We don't do anything to create any permanent pack .keep files, so they must
433 # be remnants from some failed push or something. Removing the .keep will
434 # allow the pack to be properly repacked.
435 find objects/pack -maxdepth 1 -type f -mmin +720 -name "pack-$octet20*.keep" -exec rm -f '{}' + || :
437 # Remove any stale tmp_pack_*, tmp_idx_*, tmp_bitmap_*, packtmp-* or .tmp-*-pack* files
438 # that are more than 12 hours old.
439 find objects/pack -maxdepth 1 -type f -mmin +720 \( \
440 -name "tmp_pack_?*" -o -name "tmp_idx_?*" -o -name "tmp_bitmap_?*" -o \
441 -name "packtmp-?*" -o -name ".tmp-?*-pack*" \
442 \) -exec rm -f '{}' + || :
444 # Remove any stale incoming-* object quarantine directories that are
445 # more than 12 hours old. These are new with Git >= 2.11.0.
446 find objects -maxdepth 1 -type d -name 'incoming-?*' -mmin +720 \
447 -exec rm -rf '{}' + || :
449 # Remove any stale shallow_* files that are more than 12 hours old.
450 # These can be left behind by Git >= 1.8.4.2 and < 2.0.0 when a client
451 # requests a shallow clone.
452 find . -maxdepth 1 -type f -mmin +720 -name "shallow_?*" -exec rm -f '{}' + || :
454 # Remove any stale *.temp files in the objects area that are more than 12 hours old.
455 # This can be stale sha1.temp, or stale *.pack.temp so we kill all stale *.temp.
456 find objects -type f -mmin +720 -name "*.temp" -exec rm -f '{}' + || :
458 # Remove any stale *.lock files in the htmlcache area that might have been left
459 # behind after an abnormal exit during an attempt to update a cached file and
460 # are more than 1 hour old.
461 ! [ -d htmlcache ] || find htmlcache -type f -mmin +60 -name "*.lock" -exec rm -f '{}' + || :
463 # Remove any stale git-svn temp files that are more than 12 hours old.
464 # The git-svn process creates temp files with random 10 character names
465 # in the root of $GIT_DIR. Unfortunately they do not have a recognizable
466 # prefix, so we just have to kill any files with a 10-character name. We
467 # do this only for git-svn mirrors. All characters are chosen from
468 # [A-Za-z0-9_] so we can at least check that and fortunately the only
469 # collision is 'FETCH_HEAD' but that shouldn't matter.
470 # There may also be temp files with a Git_ prefix as well.
471 if [ -n "$svn_mirror" ]; then
472 _randchar='[A-Za-z0-9_]'
473 _randchar2="$_randchar$_randchar"
474 _randchar4="$_randchar2$_randchar2"
475 _randchar10="$_randchar4$_randchar4$_randchar2"
476 find . -maxdepth 1 -type f -mmin +720 -name "$_randchar10" -exec rm -f '{}' + || :
477 find . -maxdepth 1 -type f -mmin +720 -name "Git_*" -exec rm -f '{}' + || :
480 # Remove any stale fast_import_crash_<pid> files that are more than 3 days old.
481 if [ -n "$gfi_mirror" ]; then
482 find . -maxdepth 1 -type f -mmin +4320 -name "fast_import_crash_?*" -exec rm -f '{}' + || :
485 # Remove any stale core or *.core or core.* files that are more than 3 days old.
486 find . -maxdepth 1 -type f -mmin +4320 \( -name "core" -o -name "*.core" -o -name "core.*" \) \
487 -exec rm -f '{}' + || :
491 ## Garbage Collection Types
493 ## There are two kinds of possible garbage collection (gc) operations:
495 ## 1. A normal, full gc
496 ## 2. A "mini" gc
498 ## If the full garbage collection interval has expired (or gc has never been
499 ## run), then a normal, full gc will take place. Otherwise, a "mini" gc will
500 ## take place if the file .needsgc exists.
502 ## A "mini" gc is similar to "git gc --auto" in that it may not end up actually
503 ## doing anything unless the right conditions are present so it's not a burden
504 ## to run it often. If the file .needsgc exists, a "mini" gc will occur at
505 ## the next opportunity.
507 ## Note, however, that the .nogc file suppresses ALL gc activity (normal or mini).
510 proj="${1%.git}"
511 shift
512 cd "$cfg_reporoot/$proj.git"
513 [ -d objects/pack ] || { rm -f gfi-packs; mkdir -p objects/pack; }
514 mirror_url="$(get_mirror_url)"
515 svn_mirror=
516 ! is_svn_mirror_url "$mirror_url" || svn_mirror=1
517 gfi_mirror=
518 if [ -f gfi-packs ] && [ -s gfi-packs ] && is_gfi_mirror_url "$mirror_url"; then
519 gfi_mirror=1
522 # If git config --bool --get girocco.redelta is explicitly false then automatic
523 # redelta when there are less than $var_redelta_threshold objects will be suppressed.
524 # On the other hand, if git config --get girocco.redelta is "always" then, on a full
525 # gc only, for the final repack, deltas will always be recomputed.
526 # This can be set on a per-project basis to avoid unusual pathological gc behavior.
527 # Setting this will hurt efficiency of the affected repository.
528 # Note that fast-import packs ALWAYS get new deltas regardless of this setting.
529 noreusedeltaopt="--no-reuse-delta"
530 [ "$(git config --bool --get girocco.redelta 2>/dev/null || :)" != "false" ] || noreusedeltaopt=
531 alwaysredelta=
532 [ "$(git config --get girocco.redelta 2>/dev/null || :)" != "always" ] || alwaysredelta=1
534 # Extract any -f or -F or --no-reuse-object or --no-reuse-delta options
535 # to be compatible with the old and new gc.sh versions and avoid ugly argument
536 # duplication in process lists at the same time
537 # Any options found will override the "girocco.redelta" setting
538 recompress=
539 idx=$#
540 while [ $idx -gt 0 ]; do
541 idx=$(( $idx - 1 ))
542 opt="$1"
543 shift
544 case "$opt" in
545 -f|--no-reuse-delta)
546 alwaysredelta=1
547 continue
549 -F|--no-reuse-object)
550 alwaysredelta=1
551 recompress=1
552 continue
554 -?*)
557 printf >&2 '%s\n' "bad non-option argument: $opt"
558 echo >&2 "(Did you perhaps intend to use a --xxx=yyy form?)"
559 exit 1
560 esac
561 [ -z "$opt" ] || set -- "$@" "$opt"
562 done
563 if [ -n "$alwaysredelta" ]; then
564 noreusedeltaopt="--no-reuse-delta"
565 [ -z "$recompress" ] || noreusedeltaopt="--no-reuse-object"
568 trap 'e=$?; rm -f .gc_in_progress; if [ $e != 0 ]; then echo "gc failed dir: $PWD" >&2; fi' EXIT
569 trap 'exit 130' INT
570 trap 'exit 143' TERM
572 # date -R is linux-only, POSIX equivalent is '+%a, %d %b %Y %T %z'
573 datefmt='+%a, %d %b %Y %T %z'
575 isminigc=
576 if [ "${force_gc:-0}" = "0" ] && check_interval lastgc $cfg_min_gc_interval; then
577 if [ -e .needsgc ]; then
578 isminigc=1
579 else
580 progress "= [$proj] garbage check skip (last at $(config_get lastgc))"
581 exit 0
584 if [ -e .nogc ]; then
585 progress "x [$proj] garbage check disabled"
586 exit 0
589 if [ -n "$isminigc" ]; then
590 # Perform a "mini" gc
591 # Note that .delaygc is ignored here as that's only intended for full gc
592 lock_gc
593 rm -f .allowgc .needsgc
594 rm -f objects/pack/pack-*_r.keep
595 remove_crud
596 coalesce_reflogs
597 prune_reflogs
598 compact_reflogs
599 miniactive=
600 if [ -f .svnpack ] && [ -n "$svn_mirror" ]; then
601 miniactive=1
602 progress "+ [$proj] mini garbage check ($(date))"
603 make_svn_pack
605 if [ -z "$cfg_delay_gfi_redelta" ] && [ -n "$gfi_mirror" ]; then
606 # $Girocco::Config::delay_gfi_redelta is false, force redeltification now
607 if [ -z "$miniactive" ]; then
608 miniactive=1
609 progress "+ [$proj] mini garbage check ($(date))"
611 repack_gfi_packs
613 # If there aren't at least 10 non-keep, non-bitmap, non-bndl packs then
614 # don't actually process them yet
615 lpo="--exclude-no-idx --exclude-keep --exclude-bitmap --exclude-bndl --quiet"
616 packcnt="$(list_packs --count $lpo objects/pack)" || :
617 if [ "${packcnt:-0}" -ge 10 ]; then
618 if [ -z "$miniactive" ]; then
619 miniactive=1
620 progress "+ [$proj] mini garbage check ($(date))"
622 if [ -n "$gfi_mirror" ]; then
623 repack_gfi_packs
624 packcnt="$(list_packs --count $lpo objects/pack)" || :
626 # if repack_gfi_packs dropped the pack count to < 10 don't combine
627 if [ "${packcnt:-0}" -ge 10 ]; then
628 combine_small_packs
629 packcnt="$(list_packs --count $lpo objects/pack)" || :
631 # if we still have more than 10 packs trigger a full gc
632 if [ "${packcnt:-0}" -ge 10 ]; then
633 # We shouldn't be in a .delaygc state at this point, but if
634 # we are then nuke it because we really need a full gc now
635 rm -f .delaygc
636 git config --unset gitweb.lastgc
637 rm -f "$lockf"
638 progress "- [$proj] mini garbage check triggering full gc too many packs ($(date))"
639 exit 0
642 rm -f "$lockf"
643 if [ -n "$miniactive" ]; then
644 git update-server-info
645 progress "- [$proj] mini garbage check ($(date))"
646 else
647 progress "= [$proj] mini garbage check nothing but crud removal to do ($(date))"
649 exit 0
652 # Avoid unnecessary garbage collections:
653 # 1. If lastreceive is set and is older than lastgc
654 # -AND-
655 # 2. We are not a fork (! -s alternates) -OR- lastparentgc is older than lastgc
657 # If lastgc is NOT set or lastreceive is NOT set we MUST run gc
658 # If we are a fork and lastparentgc is NOT set we MUST run gc
660 # If the repo is dirty after removing any crud we MUST run gc
662 gcstart="$(date "$datefmt")"
663 skipgc=
664 isfork=
665 ! [ -s objects/info/alternates ] || isfork=1
666 lastparentgcsecs=
667 [ -z "$isfork" ] || lastparentgcsecs="$(config_get_date_seconds lastparentgc)" || :
668 lastreceivesecs=
669 if lastreceivesecs="$(config_get_date_seconds lastreceive)" &&
670 [ "${force_gc:-0}" = "0" ] &&
671 lastgcsecs="$(config_get_date_seconds lastgc)" &&
672 [ $lastreceivesecs -lt $lastgcsecs ]; then
673 # We've run gc since we last received, so maybe we can skip,
674 # check if not fork or fork and lastparentgc < lastgc
675 if [ -n "$isfork" ]; then
676 if [ -n "$lastparentgcsecs" ] &&
677 [ $lastparentgcsecs -lt $lastgcsecs ]; then
678 # We've run gc since our parent ran gc so we can skip
679 skipgc=1
681 else
682 # We don't have any alternates (we're not a forK) so we can skip
683 skipgc=1
687 # Prevent any other simultaneous gc operations
688 lock_gc
690 # At this point, if .allowgc or .gc_failed exists, it's now crud to be removed
691 rm -f .allowgc .gc_failed
693 # Ideally we would do this in post-receive, but that would mean duplicating the
694 # logic so it's available in the chroot jail and that's highly undesirable
695 # Instead, since the first gc will be triggered immediately following the first
696 # push, we do the check here as it's quick and harmless if HEAD is already valid
697 check_and_set_head || :
699 # Always get rid of crud
700 remove_crud
702 # Always perform reflogs maintenance
703 coalesce_reflogs
704 prune_reflogs
705 compact_reflogs
707 # Run 'git svn gc' now for svn mirrors
708 if [ -n "$svn_mirror" ]; then
709 git svn gc || :
712 # Skip the actual gc if .delaygc is set
713 if [ -e .delaygc ]; then
714 progress "x [$proj] garbage check delayed (except for crud removal)"
715 rm -f "$lockf"
716 exit 0
719 # Do not skip gc if the repo is dirty
720 if [ -n "$skipgc" ] && ! is_dirty; then
721 progress "= [$proj] garbage check nothing but crud removal to do ($(date))"
722 config_set lastgc "$gcstart"
723 rm -f "$lockf"
724 exit 0
727 bumptime=
728 if [ -n "$isfork" ] && [ -z "$lastparentgcsecs" ]; then
729 # set lastparentgc and then update gcstart to be at least 1 second later
730 config_set lastparentgc "$gcstart"
731 bumptime=1
733 if [ -z "$lastreceivesecs" ]; then
734 # set lastreceive and then update gcstart to be at least 1 second later
735 config_set lastreceive "$gcstart"
736 bumptime=1
738 if [ -n "$bumptime" ]; then
739 sleep 1
740 gcstart="$(date "$datefmt")"
743 progress "+ [$proj] garbage check ($(date))"
745 newdeltas=
746 [ -z "$alwaysredelta" ] || newdeltas="$noreusedeltaopt"
747 if [ -z "$newdeltas" ] && [ -n "$gfi_mirror" ]; then
748 if [ $(list_packs --exclude-no-idx --count objects/pack) -le \
749 $(list_packs --exclude-no-idx --count --quiet --only gfi-packs) ]; then
750 # Don't bother with repack_gfi_packs since everything's being repacked
751 newdeltas="--no-reuse-delta"
754 if [ -z "$newdeltas" ] && [ -n "$noreusedeltaopt" ] &&
755 [ $(list_packs --exclude-no-idx --count-objects objects/pack) -le $var_redelta_threshold ]; then
756 # There aren't enough objects to worry about so just redelta to get the best pack
757 newdeltas="--no-reuse-delta"
759 if [ -z "$newdeltas" ]; then
760 # Since we're not going to recompute deltas overall, we need to do the
761 # "mini" maintenance so that we can get more optimal deltas
762 [ -z "$noreusedeltaopt" ] || make_svn_pack
763 repack_gfi_packs
764 force_single_pack_redelta=
765 [ -n "$gfi_mirror" ] || [ -n "$svn_mirror" ] || force_single_pack_redelta=1
766 [ -z "$noreusedeltaopt" ] || combine_small_packs $force_single_pack_redelta
770 ## Safe Pruning In Forks
772 ## We are about to perform garbage collection. We do NOT use the "git gc" or
773 ## the "git repack" commands directly as they do not provide enough control over
774 ## the fine details. However, we DO maintain a "gc.pid" file during our garbage
775 ## collection so that a simultaneous "git gc" by an administrator will be
776 ## blocked (and similarly we refuse to start garbage collection if we cannot
777 ## create the "gc.pid" file).
779 ## When we say "gc" in the below description we are referring to our "gc.sh"
780 ## script, NOT the "git gc" command.
782 ## If the project we are running garbage collection (gc) on has any forks we
783 ## must be careful not to remove any objects that while no longer referenced by
784 ## this project (the parent) are still referenced by one or more forks (the
785 ## children) otherwise the children will become corrupt and we can't abide
786 ## corrupt children.
788 ## One way to accomplish this is to simply hard-link all currently existing
789 ## loose objects and packs in the parent into all the children that refer to the
790 ## parent (via a line in their objects/info/alternates file) before beginning
791 ## the gc operation and then relying on a subsequent gc in the child to clean up
792 ## any excess objects/packs. We used to use this strategy but it's very
793 ## inefficient because:
795 ## 1. The disk space used by the old pack(s)/object(s) will not be reclaimed
796 ## until all children (and their children, if any) run gc by which time
797 ## it's quite possible the topmost parent will have run gc again and
798 ## hard-linked yet another old pack down to its children (not to mention
799 ## loose objects).
801 ## 2. When using the "-A" option with "git repack", any new objects in the
802 ## parent that are not referenced by children will continually get
803 ## exploded out of the hard-linked pack in the children whenever the
804 ## children run gc.
806 ## 3. To avoid suboptimal and/or unnecessarily many packs being hard-linked
807 ## into child forks, we must run the "mini" gc maintenance before we
808 ## perform the hard-linking into the children which provides yet another
809 ## source of inefficiency.
811 ## While we were still using the "-A" option to "git repack" (that was not
812 ## always the case) to guarantee we can access old ref values for long enough
813 ## to send out a meaningful mail.sh notification, another, more efficient,
814 ## option became available to prevent corruption of child forks that continue
815 ## to refer to objects that are no longer reachable from any ref in the parent.
817 ## The only things that need be copied (or hard-linked) into the child fork(s)
818 ## are those objects that have become unreachable from any ref in the parent.
820 ## When we were using the "git repack -A -d" + "git prune --expire=1.day.ago"
821 ## technique, the only objects that could ever be removed were loose objects
822 ## that "git prune" determined were expired. In that case, loose objects were
823 ## all that need be hard-linked down to child forks in order to avoid
824 ## corruption of any child fork(s).
826 ## The "git repack -A -d" + "git prune --expire=1.day.ago" + hard-linking loose
827 ## objects to child forks technique remains fundamentally sound from the
828 ## perspective of supporting simultaneous gc and push and keeping newly
829 ## unreachable objects around long enough to be sure we can send out meaningful
830 ## ref change notifications and never corrupting any child forks and never
831 ## persisting the lifetime of large old packs containing mostly duplicate or
832 ## unreachable objects as gc percolates through a project's entire fork tree.
834 ## However, that technique suffers from one potential prodigious pitfall.
836 ## Unreachable objects come flying out of their packs to splatter all over the
837 ## objects subdirectories possibly creating a huge, inefficient mess.
839 ## Often this is not an issue. Even with a lot of rebasing going on, usually
840 ## the only objects that will splatter are some commits, trees and the odd blob
841 ## here and there. Not enough to be overly concerned about.
843 ## However, for the reppository that frequently experiences a lot of non-fast-
844 ## forward updates and/or outright ref deletion, the number of objects suddenly
845 ## popping out of their packs at "git repack -A -d" time can be overwhelming.
847 ## To avoid this issue we now use a four phase pack creation strategy.
848 ## This will result in creation of up to four packs (instead of at most one).
850 ## I. A complete pack (with bitmaps if appropriate) gets created including
851 ## only "reachable" objects from all refs/... refs plus HEAD. This will
852 ## also serve as the virtual bundle for the repository.
854 ## II. A pack of recently-became-unreachable objects and friends is created.
855 ## (The "friends" are ref logs, linked working tree HEADs and indicies.)
856 ## Because both the pre-receive and update.sh script record all ref
857 ## changes we can easily choose the cut off point for "recently".
858 ## It is only the fact we maintain those logs in the reflogs subdirectory
859 ## that allows this step to be possible.
861 ## III. If the repository has any forks with a non-zero length alternates file,
862 ## yet another pack of "--keep-unreachable" objects is generated that will
863 ## not actually be kept in the parent, but hard-linked into all the forks.
865 ## IV. Finally, after running "git prune-packed", any remaining loose objects
866 ## are migrated into a pack of their own.
868 ## We then remove any non-.keep packs that existed before we started the
869 ## process being careful to keep any same-pack pushes for the "Push Pack Redux"
870 ## race condition (see README-GC).
872 ## By using "git pack-objects" directly we are able to accomplish this with
873 ## very little additional effort.
875 ## The packs produced by (III) are treated almost like ".keep" packs by child
876 ## forks in that the objects in them are never repacked into any other
877 ## "--keep-unreachable" packs (but they can migrate into phase I or II packs)
878 ## and those phase III packs are then hard-linked into any grandchild forks.
880 ## This avoids the space explosion that could occur if each fork level ended
881 ## up duplicating the "--keep-unreachable" pack space by repacking those
882 ## objects (essentially breaking the hard-link to the single copy of those
883 ## objects).
885 ## While it is true that each level of forks could potentially add yet another
886 ## phase III pack to be hard-linked down to its children, such packs will only
887 ## include unreachable objects not already in any phase III packs that were
888 ## received from the parent.
890 ## The space for the phase III packs will not be reclaimed until the gc
891 ## finishes percolating through the entire "fork tree" of a project.
893 ## This is not much different than the "git repack -A -d" situation where
894 ## all the loose objects are hard-linked down into child forks. In that
895 ## case forks that actually need any of those objects could gradually reduce
896 ## the number of objects hard-linked into deeper fork levels.
898 ## The difference with a phase III "--keep-unreachable" pack is that there
899 ## cannot be any gradual reduction like that since it would require repacking
900 ## the pack and breaking the hard-link thereby increasing storage space. The
901 ## storage will instead always be reclaimed all at once when all of the
902 ## projects in the "fork tree" complete their gc.
904 ## However, the belief is that the huge space win by having all the
905 ## unreachable objects packed up together far eclipses (when many objects are
906 ## involved, the single-pack version can end up using 1/20th or less of the
907 ## disk space compared to having them all as loose objects) any brief minor
908 ## space savings that might occur under the "git repack -A -d" loose object
909 ## system prior to the gc collection completing for all the projects in the
910 ## "fork tree".
914 ## utility functions
917 # rename_pack oldnamepath newnamepath
918 # note that .keep files are left untouched and not moved at all!
919 rename_pack() {
920 [ $# -eq 2 ] && [ "$1" != "$2" ] || {
921 echo >&2 "[$proj] incorrect use of rename_pack function"
922 exit 1
924 # Git assumes that if the destination of the rename already exists
925 # that it is, in fact, a copy of the same bytes so silently succeeds
926 # without doing anything. We duplicate that logic here.
927 # Git checks for the .idx file first before even trying to use a pack
928 # so it should be the last moved and the first removed.
929 for ext in pack bitmap idx; do
930 [ -f "$1.$ext" ] || continue
931 ln "$1.$ext" "$2.$ext" >/dev/null 2>&1 ||
932 [ -f "$2.$ext" ] || {
933 echo >&2 "[$proj] unable to move $1.$ext to $2.$ext"
934 exit 1
936 done
937 for ext in idx pack bitmap; do
938 rm -f "$1.$ext"
939 done
940 return 0
943 make_packs_ugw() {
944 find "$1" -maxdepth 1 -type f ! -perm -ug+w \
945 -name "pack-$octet20*.pack" -exec chmod ug+w '{}' + || :
946 } 2>/dev/null
948 vcnt() {
949 eval "$1="'$(( $# - 1 ))'
952 get_index_tree() {
953 if [ -s "$1" ]; then
954 GIT_INDEX_FILE="$1"
955 export GIT_INDEX_FILE
956 git write-tree 2>/dev/null || :
957 unset GIT_INDEX_FILE
961 get_detached_head() {
962 if [ -s "$1" ] && read -r _head <"$1" 2>/dev/null; then
963 case "$_head" in $octet20*)
964 echo "$_head"
965 esac
969 # compute_extra_reachables
970 # create lines suitable for a packed-refs file mentioning all the
971 # other refs we might like to keep.
972 # the current directory MUST be set to the repository's --git-dir
973 # the following are included:
974 # * refs mentioned in reflogs/... files
975 # * tree(s) created from index file(s)
976 # * detached linked working tree heads
977 # Resulting objects are tested for existence and uniqified then output
978 # one per line under a refs/z* namespace
979 compute_extra_reachables() {
981 digits8='[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]'
982 find reflogs -mindepth 1 -maxdepth 1 -type f -name "$digits8*" -exec gzip -c -d -f '{}' + |
983 awk '{print $2; print $3}'
984 ! [ -f index ] || get_index_tree index
985 if [ -d worktrees ]; then
986 find worktrees -mindepth 2 -maxdepth 2 -name HEAD -type f -print |
987 while read -r lwth; do
988 get_detached_head "$lwth"
989 get_index_tree "${lwth%HEAD}index"
990 done
992 } | LC_ALL=C sort -u |
993 git cat-file ${var_have_git_260:+--buffer} --batch-check"${var_have_git_185:+=%(objectname)}" |
994 awk '!/missing/ {num++; print $1 " " "refs/" substr("zzzzzzzzzzzz", 1, length(num)) "/" num}'
998 ## main gc logic
1001 # Everything else is more efficient if we do this first
1002 # The "--prune" option is the default since v1.5.0 but it serves as "documentation" here
1003 git pack-refs --all --prune
1004 [ -e packed-refs ] || >>packed-refs # should never happen...
1006 # If we have a logs directory or a worktrees directory expire the ref logs now
1007 # Note that Git itself does not use either --rewrite or --updateref, so neither do we
1008 ! [ -d logs ] && ! [ -d worktrees ] || eval git reflog expire --all "${quiet:+>/dev/null 2>&1}" || :
1010 make_repack_dir
1011 ! [ -e .gc_failed ] || exit 1
1012 rm -f .gc_in_progress # make sure
1013 touch .gc_in_progress # it's truly fresh
1014 rm -f bundles/* objects/pack/pack-*.bndl
1015 # This is perhaps a bit aggressive in that if we're suffering from "Push Pack Redux"
1016 # and somehow we get run again immediately after the run where "Push Pack Redux" happened
1017 # and we have garbage collection forced, there's just the barest, almost negligible,
1018 # possibility that the "Push Pack Redux" ref updates _still_ have not happened and we
1019 # should not be removing _r .keep files. None of the normal Girocco processing can
1020 # cause this. The second run of this script would have to use the force gc option
1021 # for it to even be possible in the first place. What's much more likely is that
1022 # the initial run of this script was somehow interrupted in the middle before it
1023 # could get rid of the _r .keep file itself in which case it's better to get rid of
1024 # it now to avoid keeping something around that would perturb our nice and neat gc
1025 rm -f objects/pack/pack-*_r.keep
1026 # We will add .keep files for _u packs if and when we run phase III
1027 # Otherwise they need to not have any .keep files during phases I and II
1028 rm -f objects/pack/pack-*_u.keep
1030 # We need to make sure that any non-Girocco (barely tolerated) Git object creation
1031 # activity will be able to "freshen" the pack containing a pre-existing object
1032 # that's being written. This really should not be necessary as the pre-receive
1033 # hook should make sure this takes place for any incoming pushes.
1034 # However, do it here anyway just in case.
1035 make_packs_ugw objects/pack
1037 # This is only effective with Git v2.3.5 and later and it will only matter when
1038 # we are using one of the "internal_rev_list" modes of pack-objects
1039 # (the combine-packs.sh script never uses any of those modes)
1040 # The "git repack" and "git prune" commands always sets this internally themselves
1041 # It makes no difference if there's no repository corruption
1042 GIT_REF_PARANOIA=1 && export GIT_REF_PARANOIA
1044 # All of the options we might want to use with pack-objects were supported
1045 # at some point prior to Git version v1.6.6 which is the minimum version that
1046 # Girocco now requires. Except for one (--use-bitmap-index). Several of them
1047 # are "boiler plate" options we always want to use so we bundle them up here.
1048 pkopt="--delta-base-offset --keep-true-parents --non-empty --all-progress-implied"
1049 # We want to use --include-tag, but before Git v2.10.1 it would leave out
1050 # "middle" tags (e.g. a tag of a tag of a commit would omit the tagged tag)
1051 # See http://repo.or.cz/git.git/b773ddea2cd3b08c for details
1052 # ("pack-objects: walk tag chains for --include-tag", 2016-09-07, v2.10.1)
1053 # This is not a free check as it matches all refs against refs/tags/ then
1054 # peels all the annotated tags and checks for inclusion. The situation in
1055 # which it would add a tag that was not already included by a reachability
1056 # trace that included tag starting points can only occur if a new tag gets
1057 # pushed during gc pointing to something that would have been packed anyway.
1058 # But, it could happen and, really, campared to gc as a whole it's not that
1059 # expensive to perform (provided we do not get an unconnected pack).
1060 [ -z "$var_have_git_2101" ] || pkopt="$pkopt --include-tag"
1061 pkopt="$pkopt ${quiet:---progress} $packopts"
1063 # The git pack-objects command only supports bitmaps if all objects are being
1064 # packed (the "--all" option) and the "--stdout" option is NOT being used.
1065 # Additionally, while packing, if any encountered reachable objects are
1066 # determined to be "not wanted" then no bitmap index will be written anyway.
1067 # While it is theoretically possible that a project with a non-empty alternates
1068 # file ends up packing all objects (because it does not actually use any of the
1069 # objects found in the alternates), it's very unlikely. And, in the unlikely
1070 # event that did occur, clients would see a message about only using one bitmap
1071 # because Git can only use one bitmap at a time and at least one of the
1072 # alternates is bound to have a bitmap. Therefore if we see a non-empty
1073 # alternates file, we disable writing bitmaps which avoids the warning and any
1074 # possibility of a client warning as well. Also if we are running anything
1075 # before Git v2.1.0 (the effective version for repack.writeBitmaps=true) then
1076 # we also always disable bitmap writing.
1077 wbmopt=
1078 [ -z "$var_have_git_210" ] || wbmopt="--write-bitmap-index"
1079 # More recent versions of pack-objects have optimizations when not using the
1080 # --local option. If we do not have any alternates it's a pointless option.
1081 # If we do have alternates we need to skip writing a bitmap and we cannot
1082 # have a bundle since it must contain all objects.
1083 if [ -n "$isfork" ]; then
1084 lclopt="--local"
1085 wbmopt=
1086 makebndl=
1087 else
1088 lclopt=
1089 makebndl=1
1093 ## Phase I
1096 wbmstr=
1097 [ -n "$wbmopt" ] || wbmstr=" (bitmaps disabled)"
1098 progress "~ [$proj] running primary full gc pack-objects$wbmstr"
1100 gotforks=
1101 ! has_forks_with_alternates "$proj" || gotforks=1
1103 # To avoid "Push Pack Redux" (see README-GC), after collecting the initial
1104 # preexisting non-keep pack list, we rename them so that an incoming push
1105 # pack cannot possibly experience a pack name collision. Git does not require
1106 # use of the "default" pack names, simply that the proper extensions are used.
1107 # We rename to insert an "_r" just before the extension to avoid "Push Pack Redux"
1108 # name collisions. Later on we may create an "unreachable" pack for hard-linking
1109 # down into forks and it will have an "_u" inserted just before its extension.
1110 packlist="$(list_packs -C objects/pack --exclude-no-idx --exclude-keep --quiet .)" || :
1111 oldpacks=
1112 for oldpack in $packlist; do
1113 oldpack="${oldpack#pack-}"
1114 oldpack="${oldpack%.pack}"
1115 [ -f "objects/pack/pack-$oldpack.pack" ] || {
1116 echo >&2 "[$proj] unable to list old pack files"
1117 exit 1
1119 if [ "${oldpack#*[!0-9a-fA-F]}" != "$oldpack" ]; then
1120 # names not exclusively hexadecimal do not need renaming
1121 oldpacks="${oldpacks:+$oldpacks }$oldpack"
1122 continue
1124 rename_pack "objects/pack/pack-$oldpack" "objects/pack/pack-${oldpack}_r" || {
1125 echo >&2 "[$proj] unable to rename old pack files"
1126 exit 1
1128 # If the oldpack has a .keep now it means a "Push Pack Redux" is actually
1129 # in progress at this moment and we need to .keep the renamed pack,
1130 # otherwise no "Push Pack Redux" has started yet or it has already finished.
1131 # In either case we're okay because if it's just finished then all ref
1132 # changes have already been made so we don't need a .keep and we will
1133 # see the ref changes and grab all the objects via a reachability trace.
1134 # If it hasn't started yet that's okay because we're done moving that
1135 # name so a complete pack will appear under the old name that we'll
1136 # leave alone.
1137 if [ -f "objects/pack/pack-$oldpack.keep" ]; then
1138 echo "Push Pack Redux" >"objects/pack/pack-${oldpack}_r.keep"
1139 else
1140 oldpacks="${oldpacks:+$oldpacks }${oldpack}_r"
1142 done
1144 # We wish to keep deltas from our last full pack so if we're not redeltaing
1145 # then make sure the .pack associated with the .bitmap has a newer mod time
1146 # (If there is no .bitmap then touch the pack with the most objects instead.)
1147 if [ -z "$newdeltas" ]; then
1148 bmpack="$(list_packs --exclude-no-bitmap --exclude-no-idx --max-matches 1 objects/pack)"
1149 [ -n "$bmpack" ] || bmpack="$(list_packs --exclude-no-idx --max-matches 1 --object-limit -1 --include-boundary objects/pack)"
1150 if [ -n "$bmpack" ] && [ -f "$bmpack" ] && [ -s "$bmpack" ]; then
1151 sleep 1
1152 touch -c "$bmpack" 2>/dev/null || :
1153 # We must touch .gc_in_progress here to avoid $bmpack looking
1154 # like it's been "freshened" when redundant packs are removed
1155 # It's okay if they have the same mod time, but POSIX does not
1156 # guarantee an ordering for the "touching" that occurs which is
1157 # why this must be a separate command but needs no "sleep 1"
1158 touch .gc_in_progress
1162 # Now we need to make sure that any "freshening" that takes place will actually
1163 # result in a "newer" modification time than the .gc_in_progress file now has
1164 sleep 1
1166 # We run git pack-objects from the repack subdirectory so we can force
1167 # optimized packs to be generated even for repositories that do not have any
1168 # tagged commits
1169 packs="$(git --git-dir=repack pack-objects </dev/null \
1170 $pkopt --all $newdeltas $lclopt ${wbmopt:---honor-pack-keep} $@ repack/alt/pack/pack)"
1171 vcnt packcnt $packs
1172 [ $packcnt -eq 1 ] || makebndl=
1175 ## Phase II
1178 progress "~ [$proj] running supplementary gc pack-objects"
1180 # Add the "supplementary" refs
1181 compute_extra_reachables >>repack/packed-refs
1183 # Subtract the primary refs
1184 GIT_ALTERNATE_OBJECT_DIRECTORIES="$PWD/repack/alt"
1185 export GIT_ALTERNATE_OBJECT_DIRECTORIES
1187 # For this one we MUST use --local and MUST NOT use --write-bitmap-index
1188 # However, if there is a "logs" subdirectory we need to use --reflog
1189 # We do add it, just in case, if the linked working trees dir is present
1190 # We do not add --indexed-objects as that requires v2.2.0 and it's unclear
1191 # if it properly includes linked working tree index files or not. The
1192 # above compute_extra_reachables has already included all index trees (thereby
1193 # providing proper --indexed-objects support for all Git versions) making the
1194 # option completely unnecessary.
1195 rflopt=
1196 ! [ -d logs ] && ! [ -d worktrees ] || rflopt=--reflog
1197 spacks="$(git --git-dir=repack pack-objects </dev/null \
1198 $pkopt --honor-pack-keep --all $rflopt $newdeltas --local $@ repack/alt/pack/pack)"
1201 ## Phase III
1204 # There's nothing to do for Phase III unless we have forks that refer to our
1205 # project from their alternates file
1206 hlpacks=
1207 upacks=
1208 if [ -n "$gotforks" ]; then
1210 progress "~ [$proj] running keep-unreachable gc pack-objects for forks"
1212 # If we are a fork, any pre-existing _u packs need to have a .keep
1213 # for this phase and be added to the hlpacks list otherwise (we are
1214 # not a fork) pre-existing _u packs are anomalies to be treated like
1215 # regular non-_u packs
1216 if [ -n "$isfork" ]; then
1217 for upack in $(find objects/pack -mindepth 1 -maxdepth 1 -name "pack-$octet20*_u.pack" -print); do
1218 upack="${upack%.pack}"
1219 [ -e "$upack.keep" ] || echo "unreachable" >"$upack.keep"
1220 hlpacks="${hlpacks:+$hlpacks }${upack#objects/pack/pack-}"
1221 done
1223 # Using either --no-reuse-delta or --no-reuse-object together with the
1224 # --keep-unreachable option is a very, very, very bad idea when good
1225 # packs are the desired outcome. If newdeltas are being generated
1226 # then we pack to a temp name, and use combine-packs.sh to get a better
1227 # pack as the result to avoid making a bad --keep-unreachable pack
1228 pfx=
1229 [ -z "$newdeltas" ] || pfx="ku"
1230 upacks="$(git --git-dir=repack pack-objects </dev/null \
1231 $pkopt --honor-pack-keep --all $rflopt --keep-unreachable --local $@ repack/alt/pack/${pfx}pack)"
1232 if [ -n "$upacks" ] && [ -n "$newdeltas" ]; then
1233 progress "~ [$proj] rebuilding keep-unreachable pack deltas"
1234 oldupacks="$upacks"
1235 upacks="$(
1236 printf "repack/alt/pack/${pfx}pack-%s.pack\n" $oldupacks |
1237 run_combine_packs --names --weak-naming --non-empty --all-progress-implied ${quiet:---progress} \
1238 $packopts $newdeltas $@ repack/alt/pack/pack)"
1239 eval rm -f "$(printf \""repack/alt/pack/${pfx}pack-%s.*"\"" " $oldupacks)"
1241 for upack in $upacks; do
1242 rename_pack "repack/alt/pack/pack-$upack" "repack/alt/pack/pack-${upack}_u"
1243 done
1244 rm -f objects/pack/pack-*_u.keep
1245 [ -z "$hlpacks" ] && [ -z "$upacks" ] ||
1246 progress "~ [$proj] hard-linking keep-unreachable pack(s) into immediate child forks"
1248 # We have to update the lastparentgc time in the child forks even if they do not get any
1249 # new "unreachable packs" because they need to run gc just in case the parent now has some
1250 # objects that used to only be in the child so they can be removed from the child.
1251 # For example, a "patch" might be developed first in a fork and then later accepted into
1252 # the parent in which case the objects making up the patch in the child fork are now
1253 # redundant (since they're now in the parent as well) and need to be removed from the
1254 # child fork which can only happen if the child fork runs gc.
1255 lastparentgc="$(date "$datefmt")"
1257 # It is enough to copy objects just one level down and get_repo_list
1258 # takes a regular expression (which is automatically prefixed with '^')
1259 # so we can easily match forks exactly one level down from this project
1260 forkdir="$proj"
1261 get_repo_list "$forkdir/[^/:][^/:]*:" |
1262 while read fork; do
1263 # Ignore forks that do not exist or are symbolic links
1264 ! [ -L "$cfg_reporoot/$fork.git" ] && [ -d "$cfg_reporoot/$fork.git" ] ||
1265 continue
1266 # Or do not have a non-zero length alternates file
1267 [ -s "$cfg_reporoot/$fork.git/objects/info/alternates" ] ||
1268 continue
1269 runupdate=
1270 # Match hlpacks in parent project if any
1271 if [ -n "$hlpacks" ]; then
1272 mkdir -p "$cfg_reporoot/$fork.git/objects/pack"
1273 eval ln -f "$(printf '"objects/pack/pack-%s.pack" ' $hlpacks)" \
1274 "$(printf '"objects/pack/pack-%s.idx" ' $hlpacks)" \
1275 '"$cfg_reporoot/$fork.git/objects/pack/"'
1276 runupdate=1
1278 # Match upacks in repack/alt area if any
1279 if [ -n "$upacks" ]; then
1280 mkdir -p "$cfg_reporoot/$fork.git/objects/pack"
1281 eval ln -f "$(printf '"repack/alt/pack/pack-%s_u.pack" ' $upacks)" \
1282 "$(printf '"repack/alt/pack/pack-%s_u.idx" ' $upacks)" \
1283 '"$cfg_reporoot/$fork.git/objects/pack/"'
1284 runupdate=1
1286 if ! [ -e "$cfg_reporoot/$fork.git/.needsgc" ]; then
1287 # Trigger a mini gc in the fork if it now has too many packs
1288 packs="$(list_packs --quiet --count --exclude-no-idx --exclude-keep "$cfg_reporoot/$fork.git/objects/pack")" || :
1289 if [ -n "$packs" ] && [ "$packs" -ge 20 ]; then
1290 >"$cfg_reporoot/$fork.git/.needsgc"
1293 [ -z "$runupdate" ] || git --git-dir="$cfg_reporoot/$fork.git" update-server-info
1294 # Update the fork's lastparentgc date (must be more recent than $gcstart)
1295 git --git-dir="$cfg_reporoot/$fork.git" config gitweb.lastparentgc "$lastparentgc"
1296 done
1299 # Now move any primary/supplementary packs back into objects/pack
1300 # then drop any "unfreshened" redundant packs and clear repack/alt
1302 # First make sure the primary pack(s) have the most recent mod time
1303 [ -z "$packs" ] || printf 'repack/alt/pack/pack-%s.pack\n' $packs | xargs touch -c 2>/dev/null || :
1305 # Move the packs into place
1306 for pack in $packs $spacks; do
1307 rename_pack "repack/alt/pack/pack-$pack" "objects/pack/pack-$pack"
1308 done
1310 # It's possible that one of the $oldpacks had a .bitmap, got renamed (along
1311 # with its .bitmap) and then got "freshened" causing us to not remove it
1312 # However, if $wbmopt is set we most likely now have TWO .bitmap packs!
1313 # This can produce ugly warnings we don't want and possibly get the wrong
1314 # bitmap used since only one .bitmap file can ever be used by Git.
1315 # If this has happened, the .bitmap we want to discard will always have
1316 # an _r infix so we can just zap any such now since it will leave the pack.
1317 [ -z "$wbmopt" ] || rm -f objects/pack/pack-*_r.bitmap || :
1319 # Remove the redundant packs that have not since been "freshened"
1320 # This does not completely eliminate the race condition window (Girocco's own
1321 # activites -- gc/fetch/receive are immune to the race) but it substantially
1322 # shrinks it down to just the time after the find but before the following rm
1323 >repack/oldpacks
1324 [ -z "$oldpacks" ] ||
1325 printf 'objects/pack/pack-%s.pack\n' $oldpacks |
1326 LC_ALL=C sort >repack/oldpacks
1327 find objects/pack -maxdepth 1 -type f -name "pack-$octet20*.pack" -newer .gc_in_progress -print |
1328 LC_ALL=C sort >repack/freshened
1329 deadpacks="$(LC_ALL=C join -v 1 repack/oldpacks repack/freshened | LC_ALL=C sed 's/\.pack$//')"
1330 [ -z "$deadpacks" ] ||
1331 eval echo "$(printf '"%s".* ' $deadpacks)" | xargs rm -f || :
1333 # No need for this anymore
1334 rm -rf repack/alt
1335 unset GIT_ALTERNATE_OBJECT_DIRECTORIES
1338 ## Phase IV
1341 progress "~ [$proj] running gc prune-packed"
1343 # We do not want the redundant packs or any new "--keep-unreachable" pack(s) to be
1344 # present while running prune-packed. We try to guarantee that any loose object
1345 # that's unreachable persists for at least one $Girocco::Config::min_gc_interval
1346 # (not withstanding administrator interference to force earlier gc to occur).
1347 # If we were to include the redundant/keep-unreachable pack(s) when running
1348 # prune-packed and a loose unreachable object happened to be duplicated in one
1349 # of them we would end up removing it too soon and void our guarantee.
1350 git prune-packed $quiet
1352 progress "~ [$proj] running loose objects gc pack-objects"
1354 # Although Git v2.10.0 and later support a --pack-loose-unreachable option,
1355 # we MUST NOT use it for these reasons:
1356 # 1) We're not interested in expensive "unreachable" at this point, only "loose"
1357 # 2) It produces simply horrid packs about 3.8x times larger than they should be
1358 # 3) We don't require anything more than Git v1.6.6
1359 lpacks="$(run_combine_packs </dev/null --names --loose --weak-naming --non-empty --all-progress-implied ${quiet:---progress} $packopts $newdeltas $@)"
1361 if [ -n "$lpacks" ]; then
1362 # Make sure any primary pack(s) have a more recent mod time than "unreachable" objects packs
1363 if [ -n "$packs" ]; then
1364 sleep 1
1365 printf 'objects/pack/pack-%s.pack\n' $packs | xargs touch -c 2>/dev/null || :
1367 # We need to identify these packs later so we don't combine_packs them
1368 for objpack in $lpacks; do
1369 rename_pack "objects/pack/pack-$objpack" "objects/pack/pack-${objpack}_o" || :
1370 done
1371 # Finally zap the corresponding loose objects
1372 progress "~ [$proj] running packed loose objects gc prune-packed"
1373 git prune-packed $quiet
1376 ! [ -e .gc_failed ] || exit 1
1377 # These, if they exist, are now meaningless and need to be removed
1378 rm -f gfi-packs .needsgc .svnpack .svnpackgc
1380 # Make sure this stays up to date
1381 git update-server-info
1383 # We must make loose objects group writable so that they
1384 # can be freshened by other pushers. Technically we need only do this for
1385 # push projects but to enable mirror projects to be more easily converted to
1386 # push projects, we go ahead and do it for all projects.
1387 # By the time we get here we really shouldn't have any of these, but just in case.
1388 { find objects/$octet -type f -name "$octet19*" -exec chmod ug+w '{}' + || :; } 2>/dev/null
1390 # darcs:// mirrors have a xxx.log file that will grow endlessly
1391 # if this is a mirror and the file exists, shorten it to 10000 lines
1392 # also take this opportunity to optimize the darcs repo
1393 if ! [ -e .nofetch ] && [ -n "$cfg_mirror" ]; then
1394 url="$(config_get baseurl)" || :
1395 case "$url" in darcs://*)
1396 if [ -n "$cfg_mirror_darcs" ]; then
1397 url="${url%/}"
1398 basedarcs="$(basename "${url#darcs:/}")"
1399 if [ -f "$basedarcs.log" ]; then
1400 tail -n 10000 "$basedarcs.log" >"$basedarcs.log.$$"
1401 mv -f "$basedarcs.log.$$" "$basedarcs.log"
1403 if [ -d "$basedarcs.darcs" ]; then
1405 cd "$basedarcs.darcs"
1406 # without show_progress suppress non-error output
1407 [ -n "$show_progress" ] || exec >/dev/null
1408 # Note that this does not optimize _darcs/inventories/ :(
1409 darcs optimize || :
1413 esac
1416 # Create a matching .bndl header file for the all-in-one pack we just created
1417 # but only if we're not a fork (otherwise the bundle would not be complete)
1418 # and we are running at least Git version 1.7.2 (pack_is_complete always fails otherwise)
1419 if [ -n "$makebndl" ] && [ -n "$var_have_git_172" ]; then
1420 # There should only be one pack in $packs but do some checking...
1421 # The one we just created will have a .idx and will NOT have a .keep
1422 progress "~ [$proj] creating downloadble bundle header"
1423 pkbase=
1424 pkhead=
1425 IFS= read -r curhead <repack/HEAD.orig || :
1427 [ -s "objects/pack/pack-$packs.pack" ] &&
1428 [ -s "objects/pack/pack-$packs.idx" ] &&
1429 ! [ -e "objects/pack/pack-$packs.keep" ] &&
1430 pkhead="$(pack_is_complete "$PWD/objects/pack/pack-$packs.pack" \
1431 "$PWD/repack/packed-refs.orig" "$curhead")"
1432 then
1433 pkbase="objects/pack/pack-$packs"
1435 if [ -n "$pkbase" ] && [ -n "$pkhead" ]; then
1437 symref=
1438 case "$curhead" in "ref: refs/"?*|"ref:refs/"?*|"refs/"?*)
1439 symref="${curhead#ref:}"
1440 symref="${symref# }"
1441 esac
1442 bndlurl=
1443 [ -z "$cfg_httpbundleurl" ] || bndlurl=" url=$cfg_httpbundleurl/$proj.git/clone.bundle"
1444 echo "# v2 git bundle"
1445 LC_ALL=C sed -ne "/^$octet20$hexdig* refs\/[^ $tab]*\$/ p" <repack/packed-refs.orig
1446 if [ -n "$symref" ]; then
1447 printf "$pkhead HEAD\0symref=HEAD:%s%s\n" "$symref" "$bndlurl"
1448 else
1449 if [ -n "$bndlurl" ]; then
1450 printf "$pkhead HEAD\0%s\n" "${bndlurl:# }"
1451 else
1452 echo "$pkhead HEAD"
1455 echo ""
1456 } >"$pkbase.bndl"
1457 bndletag="$("$cfg_basedir/bin/rangecgi" --etag -m 1 "$pkbase.bndl" "$pkbase.pack")" || :
1458 bndlsha="$(printf '%s' "$bndletag" | git hash-object --stdin)" || :
1459 if [ -n "$bndletag" ]; then
1460 case "$bndlsha" in $octet20*)
1461 bndlshatrailer="${bndlsha#????????}"
1462 bndlshaprefix="${bndlsha%$bndlshatrailer}"
1463 bndlname="$(TZ=UTC date +%Y%m%d_%H%M%S)-${bndlshaprefix:-0}"
1464 [ -d bundles ] || mkdir bundles
1465 echo "${pkbase#objects/pack/}.bndl" >"bundles/$bndlname"
1466 echo "${pkbase#objects/pack/}.pack" >>"bundles/$bndlname"
1467 ln -s -f -n "$bndlname" bundles/latest
1468 esac
1473 # Record the size of this repo as the sum of its clone packed-refs + *.pack sizes as 1024-byte blocks
1474 eval "reposizek=$(( $(
1475 echo 0 $(du -k repack/packed-refs.orig $(printf 'objects/pack/pack-%s.pack ' $packs) 2>/dev/null |
1476 LC_ALL=C awk '{print $1}') |
1477 LC_ALL=C sed -e 's/ / + /g') ))"
1478 config_set_raw girocco.reposizek "${reposizek:-0}"
1480 # Now we're finally done with this
1481 rm -rf repack
1483 # We didn't used to do anything about rerere or worktrees but we're
1484 # trying to make nice with linked working trees these days :)
1485 # Maybe even non-bare repositories too, but *shush* about those ;)
1486 if [ -n "$var_have_git_250" ] && [ -d worktrees ]; then
1487 # The value "3.months.ago" is hard-coded into gc.c rather than
1488 # having the default be in worktree.c so we must provide it if
1489 # we get nothing out of the gc.worktreePruneExpire config item
1490 # Prior to Git v2.6.0 the config item was gc.pruneworktreesexpire
1491 # however we just always use the newer name no matter what Git version
1492 expiry="$(git config --get gc.worktreePruneExpire 2>/dev/null)" || :
1493 eval git worktree prune --expire '"${expiry:-3.months.ago}"' "${quiet:+>/dev/null 2>&1}" || :
1495 # git rerere does it right and handles its own default/config'd expiration values
1496 ! [ -d rr-cache ] || eval git rerere gc "${quiet:+>/dev/null 2>&1}" || :
1498 # We use $gcstart here to avoid a race where a push occurs during the gc itself
1499 # and the next future gc could be incorrectly skipped if we used the current
1500 # timestamp here instead
1501 config_set lastgc "$gcstart"
1502 rm -f "$lockf"
1504 progress "- [$proj] garbage check ($(date))"