robots.txt 1.0 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586
  1. User-agent: webproxy-mozz
  2. Disallow: /
  3. User-agent: serpstatbot
  4. Disallow: /
  5. User-agent: ltx71
  6. Disallow: /
  7. User-agent: oBot
  8. Disallow: /
  9. User-agent: DuckDuckBot
  10. Disallow:
  11. User-agent: ia_archiver
  12. Disallow: /
  13. User-agent: ltx71 - (http://ltx71.com/)
  14. Disallow: /
  15. User-agent: BLEXBot
  16. Disallow: /
  17. User-agent: Twingly Recon
  18. Disallow: /
  19. User-agent: Googlebot
  20. Disallow: /identity/
  21. Disallow: /webring.html
  22. Disallow: /blog/
  23. User-agent: Googlebot-Image
  24. Disallow: /
  25. User-agent: Google-Extended
  26. Disallow: /
  27. User-agent: PageThing
  28. Disallow: /
  29. User-agent: adsbot
  30. Disallow: /
  31. User-agent: SurdotlyBot
  32. Disallow: /
  33. User-agent: DataForSeoBot
  34. Disallow: /
  35. User-agent: SpiderLing
  36. Disallow: /
  37. # Fuck your AI and fuck your scraping
  38. User-agent: CCBot
  39. Disallow: /
  40. User-agent: ChatGPT-User
  41. Disallow: /
  42. User-agent: GPTBot
  43. Disallow: /
  44. User-agent: Google-Extended
  45. Disallow: /
  46. User-agent: anthropic-ai
  47. Disallow: /
  48. User-agent: Omgilibot
  49. Disallow: /
  50. User-agent: Omgili
  51. Disallow: /
  52. User-agent: FacebookBot
  53. Disallow: /
  54. User-agent: Bytespider
  55. Disallow: /
  56. User-agent: *
  57. Disallow: /