check for file "ON_SOURCEFORGE", rsync locally if it exists
[docutils/kirr.git] / sandbox / infrastructure / docutils-update.local
blobf3b4b833e6c4ba058829c3c075055b8f6c9f334d
1 #! /bin/bash
2 # $Id$
4 # This script updates the Docutils web site.
6 # The web-root contains
8 # * files and directories from ``trunk/web``:
9 #
10 # * files and directories from ``trunk/docutils``:
11 # All files for easy referencing in mails.
13 # * and ``trunk/sandbox``.
15 # TODO
17 # * sourceforge does not offer cron, but shell or web instead:
18 # maybe use cgi-bin script to rebuild the website now and then,
19 # e.g. if accessed and more than one day old and svn is newer.
20 # * this file might become easier if cleanest cron task handling is
21 # cut out, as it is not run by cron.
23 # ATTENTION
25 # Any .html document with a corresponding .txt file is regenerated
26 # if the .txt has changed, but no new .html files will be generated.
28 # * Funny thing: sf hides README.txt files.
30 # ATTENTION
32 # Directories might contain Makefile.docutils-update files with
33 # special instructions. only used in docs/user to call rst2s5.
34 # Maybe add special treatment and remove this general solution.
36 # Options:
37 # -f Do not give feedback.
38 # -t Run the script in trace mode ("set -o xtrace").
39 # -u Regenerate .html unconditionally.
40 # -v Run verbosely.
41 # -q Run quiet.
43 # Prerequisites:
46 # exit on error
47 set -e
49 # make all newly created files group writeable
50 umask 002
52 # URL for SVN project checkout:
53 svnurl=https://docutils.svn.sourceforge.net/svnroot/docutils/trunk
55 on_sourceforge=0
56 if [ -e "ON_SOURCEFORGE" ] ; then
57 on_sourceforge=1
60 htmlfilelist=`pwd`/htmlfiles.lst
61 basedir=`pwd`/update-dir
62 if [ ! -e $basedir ] ; then
63 test -d $basedir || mkdir $basedir
65 project=docutils
66 # $auxdir is non-public.
67 auxdir=$basedir/aux
68 test -d $auxdir || mkdir $auxdir
69 # $htdocsdest is the destination for htdocs and will be moved to
70 # another server later; so we keep it non-public (under $auxdir).
71 htdocsdest=$auxdir/htdocs
72 test -d $htdocsdest || mkdir $htdocsdest
73 # Where to create the snapshots (non-public).
74 snapshotdir=$auxdir/snapshots
75 test -d $snapshotdir || mkdir $snapshotdir
77 # htdocs directory on SF.net
78 remoteproject=/home/project-web/docutils
79 remotehtdocs=$remoteproject/htdocs
81 # local checkout
82 pylib=$auxdir/lib/python
83 lib=$pylib/$project
84 # Lock directory.
85 lockdir=$auxdir/lock
87 # Project base URL (for sitemap) without trailing slash.
88 baseurl="http://docutils.sourceforge.net"
90 export PYTHONPATH=$pylib:$lib:$lib/extras
91 export PATH=$lib/tools:$PATH
93 trace=0
94 unconditional=0
95 verbose=0
96 feedback=1
98 while getopts ftuv opt
100 case $opt in
101 f) feedback=;;
102 t) trace=1;;
103 u) unconditional=1;;
104 v) verbose=1;;
105 q) verbose=0;;
106 \?) exit 2;;
107 esac
108 done
109 shift `expr $OPTIND - 1`
111 function print_feedback () {
112 test $feedback && echo "$1" || true
115 print_feedback 'Starting docutils-update run...'
117 if [ $trace -eq 1 -o $verbose -eq 1 ] ; then
118 set -o xtrace
121 # Acquire lock.
122 if ! mkdir $lockdir; then
123 echo
124 echo Could not create lock directory at
125 echo $lockdir
126 echo
127 echo Please ensure no other user is running this script
128 echo and delete the directory.
129 exit 1
131 # Always clean up on exit.
132 trap "rm -rf $lockdir; trap - 0; exit 1" 0 1 2 3 15
133 # Make sure the lock directory is deletable (i.e. rwx) by other group
134 # members (in case this script crashes after copying files into the
135 # directory)
136 chmod 0770 $lockdir
138 # update library area
139 if [ -e $lib ] ; then
140 cd $lib
141 svn up --quiet
142 else
143 test -d $pylib || mkdir -p $pylib
144 cd $pylib
145 svn checkout $svnurl/docutils
148 # -------------------- Snapshots: --------------------
150 # gather the materials
151 cd $snapshotdir
152 for DIR in docutils sandbox web ; do
153 test -d $DIR || svn checkout $svnurl/$DIR
154 done
155 # BUG if checked out for the first time, it has changes.
156 haschanges="`svn up docutils sandbox web | grep -v '^At revision '; true`"
158 # Ensure proper directory permissions are set so that the files can be
159 # modified by several users. Changing permissions of files is
160 # probably not necessary because files can be deleted and re-created.
161 # Do not change permissions of aux directory to keep it non-public
162 # (but change permissions for all subdirectories).
163 find $basedir -name aux -o -type d -print0 | xargs -0 chmod ug+rwxs 2> /dev/null || true
165 # -------------------- htdocs: --------------------
167 cd $snapshotdir
169 # TODO this does not work on macosx
170 function copy_to_htdocsdest() {
171 find "$@" -type d -name .svn -prune -o \( -type f -o -type l \) -print0 | \
172 xargs -0 cp --no-dereference --update --parents \
173 --target-directory=$htdocsdest
176 # update htdocs
177 copy_to_htdocsdest sandbox
178 (cd $project; copy_to_htdocsdest *)
179 (cd web; copy_to_htdocsdest * .[^.]*)
181 # update HTML docs
182 cd $htdocsdest/tools
184 if [ $trace -eq 0 ] ; then
185 set +o xtrace
188 # 1. local Makefiles
189 for makefile in `find .. -name Makefile.docutils-update` ; do
190 dir=`dirname $makefile`
191 ( cd $dir ; make -f Makefile.docutils-update -s )
192 done
194 cd $htdocsdest
196 # 2. generate empty and old html files to force generation
197 # for any txt under docs ?
198 find docs -type f -and -name \*.txt -print | ( \
199 while read -r txtfile ; do
200 dir=`dirname $txtfile`
201 base=`basename $txtfile .txt`
202 htmlfile=$dir/$base.html
203 if [ ! -e $htmlfile ] ; then
204 print_feedback "touch $htmlfile"
205 touch -t 200001010101 $htmlfile
207 done
210 # for any README.txt under sandbox
211 find sandbox -type f -and \( -name README.txt -o -name README \) -print | ( \
212 while read -r txtfile ; do
213 dir=`dirname $txtfile`
214 base=`basename $txtfile .txt`
215 htmlfile=$dir/$base.html
216 if [ ! -e $htmlfile ] ; then
217 print_feedback "touch $htmlfile"
218 touch -t 200001010101 $htmlfile
220 done
223 # for any file in htmlfilelist
224 while read -r htmlfile ; do
225 if [ ! -d `dirname $htmlfile` ] ; then
226 print_feedback "Old htmlfile entry: $htmlfile"
227 elif [ ! -e $htmlfile ] ; then
228 print_feedback "touch $htmlfile"
229 touch -t 200001010101 $htmlfile
231 done < $htmlfilelist
233 # 3. re/generate html from txt
234 cd $htdocsdest/tools
236 for htmlfile in `find .. -name '*.html'` ; do
237 dir=`dirname $htmlfile`
238 base=`basename $htmlfile .html`
239 if [ "$base" == "standalone_rst_html4strict" ] ; then
240 # breaks web update
241 print_feedback "skipped: $dir $base"
242 else
243 txtfile=$dir/$base.txt
244 if [ ! -e $txtfile ] ; then
245 txtfile=$dir/$base.rst
247 if [ ! -e $txtfile ] ; then
248 txtfile=$dir/$base
250 if [ $unconditional -eq 1 -o $txtfile -nt $htmlfile ] ; then
251 if [ "${base:0:4}" == "pep-" ] ; then
252 print_feedback "$txtfile (PEP)"
253 python $lib/tools/rstpep2html.py --config=$dir/docutils.conf $txtfile $htmlfile
254 haschanges=1
255 else
256 print_feedback "$txtfile"
257 python $lib/tools/rst2html.py --config=$dir/docutils.conf $txtfile $htmlfile
258 haschanges=1
262 done
264 if [ $trace -eq 1 -o $verbose -eq 1 ] ; then
265 set -o xtrace
268 # -------------------- XML sitemap for search engines: --------------------
270 cd $htdocsdest
272 # Update the sitemap only if something has changed because it takes
273 # very much CPU time.
274 if test -n "$haschanges"; then
276 echo '<?xml version="1.0" encoding="UTF-8"?>'
277 echo '<urlset xmlns="http://www.google.com/schemas/sitemap/0.84">'
278 if [ $trace -eq 0 ] ; then
279 set +o xtrace
281 find . -name '.[^.]*' -prune -o -type d -printf '%p/\n' \
282 -o \( -type f -o -type l \) -print | \
283 while read i; do
284 # i is the file name.
285 if test "$i" == ./; then
286 # Homepage.
287 i=index.html
288 url="$baseurl/"
289 elif test "$i" == ./sitemap -o "${i: -1}" == / -a -f "${i}index.html"; then
290 # This is a directory and it has an index.html, so we
291 # don't need to include it.
292 continue
293 else
294 url="$baseurl${i:1}"
295 url="${url// /%20}"
297 lastmod="`date --iso-8601=seconds -u -r "$i"`"
298 # Google wants a colon in front of the last two digits.
299 lastmod="${lastmod::22}:00"
300 if test "${i: -5}" == .html; then
301 # HTML files (including the home page) have highest priority.
302 priority=1.0
303 elif test "${i: -4}" == .txt; then
304 # Text files have medium priority.
305 priority=0.5
306 else
307 # Everything else (source files etc.) has low priority.
308 priority=0.2
310 echo "<url><loc>$url</loc><lastmod>$lastmod</lastmod><priority>$priority</priority></url>"
311 done
312 if [ $trace -eq 1 -o $verbose -eq 1 ] ; then
313 set -o xtrace
315 echo '</urlset>'
316 ) > sitemap
317 gzip -f sitemap
320 # -------------------- Push changes to remote server. --------------------
322 # sourceforge no longer allows shell access, use rsync via ssh
323 # specify your user in your .ssh/config
325 cd $htdocsdest
326 if [ $on_sourceforge ] ; then
327 print_feedback "rsync on sf"
328 rsync -r -t ./ $remotehtdocs
329 else
330 # do not use -a to avoid "failed to set permissions"
331 # -t preserve modification times. But a new svn checkout has new modtime.
332 rsync -e ssh -r -t ./ web.sourceforge.net:$remotehtdocs
335 trap - 0 1 2 3 15
336 rm -rf $lockdir
337 print_feedback '...docutils-update done.'
339 # Local Variables:
340 # indent-tabs-mode: nil
341 # End: