# Disallow all crawlers access to certain pages User-agent: * Disallow: /main/find/order.html Disallow: /main/find/order Disallow: /main/find/ord/checkout.html Disallow: /main/find/ord/checkout Disallow: /main/find/ord/basket.html Disallow: /main/find/ord/basket Disallow: /main/find/process.html Disallow: /main/find/process Disallow: /main/find/scan/ Disallow: /main/find/goto.html/ # Disallow these crawlers User-agent: Yeti Disallow: / User-agent: TurnitinBot Disallow: / User-agent: boitho.com-dc Disallow: / User-agent: VoilaBot Disallow: / User-agent: twiceler Disallow: / User-agent: seekbot Disallow: / User-agent: gigabot Disallow: / User-agent: psbot Disallow: / User-agent: discobot Disallow: / User-Agent: MJ12bot Disallow: / User-Agent: Yandex Disallow: / User-Agent: Exabot Disallow: / User-Agent: DotBot Disallow: / User-agent: Speedy Disallow: / User-Agent: Charlotte Disallow: / User-Agent: trovitBot Disallow: / User-agent: sogou spider Disallow: / User-agent: spbot Disallow: / User-agent: SemrushBot Disallow: / User-agent: istellabot Disallow: / User-agent: SeznamBot Disallow: / User-agent: Qwantify Disallow: / # Sitemap for compliant crawlers Sitemap: http://books.bibliopolis.com/sitemap.xml