add new URLs to robots.txt to prevent crawling
[debiancodesearch.git] / dcs-update-ranking.service
blobaf5cbc60717f732fabd2a89a58d8c3139d617749
1 .include /lib/systemd/system/dcs-common.service
3 [Unit]
4 Description=Debian Code Search: update ranking
5 After=postgresql.service
7 [Service]
8 Restart=no
9 Type=oneshot
10 ExecStart=/usr/bin/wget -q http://udd.debian.org/udd-popcon.sql.xz -O /tmp/popcon.sql.xz
11 ExecStart=/bin/sh -c "echo 'DROP TABLE popcon; DROP TABLE popcon_src;' | psql udd"
12 ExecStart=/bin/sh -c "xz -d -c /tmp/popcon.sql.xz | psql udd"
13 ExecStart=/bin/sh -c "psql dcs < /usr/share/dcs/schema.sql"
14 ExecStart=/usr/bin/dcs-compute-ranking \
15         -mirror_url=http://http.debian.net/debian