add new URLs to robots.txt to prevent crawling
[debiancodesearch.git] / dcs-index-backend@.service
blob3f36efaff454556937e7b32cdd1a7620438d5adf
1 .include /lib/systemd/system/dcs-common.service
3 [Unit]
4 Description=Debian Code Search: index backend
6 [Service]
7 ExecStart=/usr/bin/index-backend -listen_address=:2908%i -index_path=/dcs/index.%i.idx
9 [Install]
10 WantedBy=multi-user.target