# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-agent: *
# Disallow: /
Sitemap: http://svgr.jp/sitemap-index.xml
User-agent: *
Crawl-Delay: 5
Allow: /
Disallow: /*?*_sid=
Disallow: /oauth/*
Disallow: /redirect/*
User-agent: bingbot
Crawl-Delay: 60
User-agent: Amazonbot
Crawl-Delay: 60
# https://developers.facebook.com/docs/sharing/webmasters/web-crawlers?locale=ja_JP
User-agent: meta-externalagent
Disallow: /
# https://imagesift.com/about
User-Agent: ImagesiftBot
Disallow: /
User-Agent: Barkrowler
Disallow: /
User-Agent: AhrefsBot
Disallow: /
User-Agent: DataForSeoBot
Disallow: /
User-Agent: YandexBot
Disallow: /
User-Agent: proximic
Disallow: /