dd/static/robots.txt
2025-11-15 00:40:34 +09:00

4 lines
63 B
Plaintext

# allow crawling everything by default
User-agent: *
Disallow: