add new URLs to robots.txt to prevent crawling
[debiancodesearch.git] / dcs-index-backend.service
blobb7a398ae3d9c7a9a533277b0a9f5d5065eb5d3f0
1 .include /lib/systemd/system/dcs-common.service
3 [Unit]
4 Description=Debian Code Search: index backend
6 [Service]
7 ExecStart=/usr/bin/dcs-index-backend -index_path=/dcs-ssd/unpacked/full.idx
9 [Install]
10 WantedBy=multi-user.target