# See https://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
# Allow all crawlers access to the public site
User-agent: *
Disallow: /dashboard/
Disallow: /login
Disallow: /signup
# Disallow specific AI crawlers from the entire site
User-agent: Google-Extended
Disallow: /
User-agent: GPTBot
Disallow: /
User-agent: CCBot
Disallow: /