Fix #338: re.sub() flag argument at wrong position.
[docutils.git] / sandbox / infrastructure / attic / docutils-update.berlios
bloba1aa82fd41edf952602a8de931dacf4f67d1475d
1 #! /bin/bash
2 # $Id$
4 # ATTENTION
5 # ---------
6 # This script is obsolete::
8 echo "cron job is replaced by local updating (see doctils-update.local)"
9 exit
11 # Kept for reference:
13 # This script is installed as a cron job to automatically update the
14 # Docutils web site whenever the SVN files change. Any .html document
15 # with a corresponding .txt file is regenerated whenever the .txt
16 # changes.
18 # Options:
19 # -f Do not give feedback.
20 # -t Run the script in trace mode ("set -o xtrace").
21 # -u Regenerate .html unconditionally.
22 # -v Run verbosely.
24 # Prerequisites:
26 # - Checked out trunk at $snapshotdir.
27 # - Checked out main tree at $lib.
29 # exit on error
30 set -e
32 # make all newly created files group writeable
33 umask 002
35 basedir=/home/groups/docutils/htdocs
36 project=docutils
37 # $auxdir is non-public.
38 auxdir=$basedir/aux
39 # $htdocsdest is the destination for htdocs and will be moved to
40 # another server later; so we keep it non-public (under $auxdir).
41 htdocsdest=$auxdir/htdocs
42 # Where to create the snapshots (non-public).
43 snapshotdir=$auxdir/snapshots
44 # Where to publish the snapshots (public).
45 snapshotdest=/home/groups/ftp/pub/docutils
46 bindir=$auxdir/bin
47 htdocs_patchfile=$auxdir/htdocs.patch
48 htdocs_tarball=$auxdir/htdocs.tar
49 htdocs_new_tarball=$auxdir/htdocs.new.tar
50 # htdocs directory on SF.net
51 remoteproject=/home/groups/d/do/docutils
52 remotehtdocs=$remoteproject/htdocs
53 pylib=$auxdir/lib/python
54 lib=$pylib/$project
55 # Lock directory.
56 lockdir=$auxdir/lock
57 # SSH stuff.
58 sshdir=$auxdir/.ssh
59 sshhost=docutilsupdate@shell.sourceforge.net
60 sshcommand="ssh -i $lockdir/id_dsa
61 -o UserKnownHostsFile=$sshdir/known_hosts $sshhost"
62 # Project base URL (for sitemap) without trailing slash.
63 baseurl="http://docutils.sourceforge.net"
65 export PYTHONPATH=$pylib:$lib:$lib/extras
66 export PATH=$lib/tools:$PATH
68 trace=0
69 unconditional=0
70 verbose=0
71 feedback=1
73 while getopts ftuv opt
75 case $opt in
76 f) feedback=;;
77 t) trace=1;;
78 u) unconditional=1;;
79 v) verbose=1;;
80 \?) exit 2;;
81 esac
82 done
83 shift `expr $OPTIND - 1`
85 test $feedback && echo 'Starting docutils-update run...' || true
87 if [ $trace -eq 1 -o $verbose -eq 1 ] ; then
88 set -o xtrace
91 # Acquire lock.
92 if ! mkdir $lockdir; then
93 echo
94 echo Could not create lock directory at
95 echo $lockdir
96 echo
97 echo Please ensure no other user is running this script
98 echo and delete the directory.
99 exit 1
101 # Always clean up on exit.
102 trap "rm -rf $lockdir; trap - 0; exit 1" 0 1 2 3 15
103 # Make sure the lock directory is deletable (i.e. rwx) by other group
104 # members (in case this script crashes after copying files into the
105 # directory) and un-readable by world (because we'll be storing the
106 # key in it).
107 chmod 0770 $lockdir
110 # update library area
111 cd $lib
112 svn up --quiet
114 # -------------------- Snapshots: --------------------
116 # gather the materials
117 cd $snapshotdir
118 svn -q revert $project/$project/__init__.py
119 haschanges="`svn up docutils sandbox web | grep -v '^At revision '; true`"
121 # update __version_details__ string
122 version_details="snapshot `date --utc --iso`, r`svn info docutils | grep ^Revision: | sed 's/^Revision: //'`"
123 (echo ",s/^__version_details__ = .*\$/__version_details__ = '$version_details'/";
124 echo wq) | ed $project/$project/__init__.py 2> /dev/null
126 # Ensure proper directory permissions are set so that the files can be
127 # modified by several users. Changing permissions of files is
128 # probably not necessary because files can be deleted and re-created.
129 # Do not change permissions of aux directory to keep it non-public
130 # (but change permissions for all subdirectories).
131 #find $basedir -type f -print0 | xargs -0 chmod ug+rw 2> /dev/null || true
132 find $basedir -name aux -o -type d -print0 | xargs -0 chmod ug+rwxs 2> /dev/null || true
134 # create the snapshots
135 exclude='--exclude=.svn'
136 tar -cz $exclude -f $project-snapshot.tgz $project
137 tar -cz $exclude -f $project-sandbox-snapshot.tgz sandbox
138 tar -cz $exclude -f $project-web-snapshot.tgz web
139 ( cd sandbox/gschwant ;
140 tar -cz $exclude -f ../../docfactory-snapshot.tgz docfactory )
142 # plant the snapshots
143 mv -f *snapshot.tgz $snapshotdest
145 # revert and touch (to avoid updating the web site only because of the
146 # changed timestamp)
147 svn -q revert $project/$project/__init__.py
148 touch $project/$project/__init__.py --date \
149 "`svn info $project/$project/__init__.py | \
150 grep 'Last Changed Date:' | sed 's/[^:]*: //'`"
152 # -------------------- htdocs: --------------------
154 cd $snapshotdir
156 function copy_to_htdocsdest() {
157 find "$@" -type d -name .svn -prune -o \( -type f -o -type l \) -print0 | \
158 xargs -0 cp --no-dereference --update --parents \
159 --target-directory=$htdocsdest
162 # update htdocs
163 copy_to_htdocsdest sandbox
164 (cd $project; copy_to_htdocsdest *)
165 (cd web; copy_to_htdocsdest * .[^.]*)
167 # update HTML docs
168 cd $htdocsdest/tools
170 if [ $trace -eq 0 ] ; then
171 set +o xtrace
174 for makefile in `find .. -name Makefile.docutils-update` ; do
175 dir=`dirname $makefile`
176 ( cd $dir ; make -f Makefile.docutils-update -s )
177 done
179 for htmlfile in `find .. -name '*.html'` ; do
180 dir=`dirname $htmlfile`
181 base=`basename $htmlfile .html`
182 txtfile=$dir/$base.txt
183 if [ -e $txtfile ] ; then
184 if [ $unconditional -eq 1 -o $txtfile -nt $htmlfile ] ; then
185 if [ "${base:0:4}" == "pep-" ] ; then
186 test $feedback && echo "$txtfile (PEP)" || true
187 python $lib/tools/rstpep2html.py --config=$dir/docutils.conf $txtfile $htmlfile
188 haschanges=1
189 else
190 test $feedback && echo $txtfile || true
191 python $lib/tools/rst2html.py --config=$dir/docutils.conf $txtfile $htmlfile
192 haschanges=1
196 done
198 if [ $trace -eq 1 -o $verbose -eq 1 ] ; then
199 set -o xtrace
202 # -------------------- XML sitemap for search engines: --------------------
204 cd $htdocsdest
206 # Update the sitemap only if something has changed because it takes
207 # very much CPU time.
208 if test -n "$haschanges"; then
210 echo '<?xml version="1.0" encoding="UTF-8"?>'
211 echo '<urlset xmlns="http://www.google.com/schemas/sitemap/0.84">'
212 if [ $trace -eq 0 ] ; then
213 set +o xtrace
215 find . -name '.[^.]*' -prune -o -type d -printf '%p/\n' \
216 -o \( -type f -o -type l \) -print | \
217 while read i; do
218 # i is the file name.
219 if test "$i" == ./; then
220 # Homepage.
221 i=index.html
222 url="$baseurl/"
223 elif test "$i" == ./sitemap -o "${i: -1}" == / -a -f "${i}index.html"; then
224 # This is a directory and it has an index.html, so we
225 # don't need to include it.
226 continue
227 else
228 url="$baseurl${i:1}"
229 url="${url// /%20}"
231 lastmod="`date --iso-8601=seconds -u -r "$i"`"
232 # Google wants a colon in front of the last two digits.
233 lastmod="${lastmod::22}:00"
234 if test "${i: -5}" == .html; then
235 # HTML files (including the home page) have highest priority.
236 priority=1.0
237 elif test "${i: -4}" == .txt; then
238 # Text files have medium priority.
239 priority=0.5
240 else
241 # Everything else (source files etc.) has low priority.
242 priority=0.2
244 echo "<url><loc>$url</loc><lastmod>$lastmod</lastmod><priority>$priority</priority></url>"
245 done
246 if [ $trace -eq 1 -o $verbose -eq 1 ] ; then
247 set -o xtrace
249 echo '</urlset>'
250 ) > sitemap
251 # sitemap is compressed on the remote site for smaller patch sizes.
254 # -------------------- Push changes to remote server. --------------------
256 # SSH doesn't want to read id_dsa files which don't have 0600
257 # permissions. This is getting into our way here, but we work around
258 # this by copying id_dsa to $lockdir/id_dsa and setting the
259 # permissions of the resulting id_dsa file to 0600.
261 # Copy the key.
262 cp $sshdir/id_dsa $lockdir/id_dsa
263 # SSH wants this.
264 chmod 0600 $lockdir/id_dsa
266 rm -f $htdocs_patchfile
267 cd $htdocsdest
268 # try to transfer with rsync
269 rsync -e "$sshcommand" -r ./ web.sourceforge.net:$remotehtdocs
271 # Create new tarball.
272 tar cf $htdocs_new_tarball .
274 # If there is no old tarball, we have to transmit the whole tarball.
275 if test ! -f $htdocs_tarball; then
276 test $feedback && echo Transmitting entire tarball. || true
277 gzip -c $htdocs_new_tarball | $sshcommand \
279 set -e
280 umask 002
281 cd $remoteproject
282 gunzip -c > htdocs.tar
283 cd $remotehtdocs
284 tar xmf $remoteproject/htdocs.tar
285 gzip -f sitemap
287 # If the current and the new tarball differ, transmit patch file.
288 elif ! diff -q $htdocs_tarball $htdocs_new_tarball > /dev/null; then
289 # Create patch.
290 $bindir/bsdiff $htdocs_tarball $htdocs_new_tarball $htdocs_patchfile
291 test $feedback && echo Patch size: `du -h $htdocs_patchfile | sed 's/\t.*//'` || true
292 # Delete current tarball. If something goes wrong with uploading
293 # and applying the patch file, docutils-update will notice that
294 # the tarball isn't present at the next run and transfer the whole
295 # tarball, because we're left in an undefined state (the servers
296 # are out of sync).
297 rm -f $htdocs_tarball
298 # Upload patch file.
299 $sshcommand \
301 set -e
302 umask 002
303 cd $remoteproject
304 cat > htdocs.patch
305 ~/bin/bspatch htdocs.tar htdocs.new.tar htdocs.patch
306 cd $remotehtdocs
307 tar xmf $remoteproject/htdocs.new.tar
308 gzip -f sitemap
309 cd $remoteproject
310 mv htdocs.new.tar htdocs.tar
311 rm -f htdocs.patch
313 < $htdocs_patchfile
315 mv $htdocs_new_tarball $htdocs_tarball
317 # Tidy up.
318 rm -f $htdocs_patchfile
319 trap - 0 1 2 3 15
320 rm -rf $lockdir
321 test $feedback && echo '...docutils-update done.' || true
323 # Local Variables:
324 # indent-tabs-mode: nil
325 # End: