# See https://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
# AhrefsBot
User-agent: AhrefsBot
Disallow: /
# Common Crawl (https://commoncrawl.org/faq/)
User-agent: CCBot
Crawl-delay: 30
# DotBot (https://opensiteexplorer.org/dotbot)
User-agent: dotbot
Disallow: /
# Grapeshot crawler (making malformed URL requests)
User-Agent: grapeshot
Disallow: /
# Linguee (https://www.linguee.com/bot)
User-agent: Linguee
Disallow: /
# MJ12bot
User-Agent: MJ12bot
Crawl-delay: 60
# MSNBot
User-Agent: msnbot
Crawl-delay: 4
# PetalBot (http://aspiegel.com/petalbot)
User-agent: PetalBot
Disallow: /
# Seekport (https://bot.seekport.com/)
User-agent: SeekportBot
Crawl-delay: 2
# SEMrushBot (crawling ancient links and not respecting limits)
User-Agent: SemrushBot
Disallow: /
# Yahoo (Slurp)
User-agent: Slurp
Crawl-delay: 5
# Yahoo Slurp
User-agent: Yahoo! Slurp
Crawl-delay: 5
# Yandex
User-agent: Yandex
Crawl-delay: 4
User-Agent: *
Disallow: /control_panel/
Disallow: /*/control_panel/
Disallow: /login/
Disallow: /*/login/
Disallow: /search/
Disallow: /*/search/
Disallow: /*/update_state_select
Disallow: /*/ads/
Disallow: /*/feedback
Disallow: /*/report_error
Disallow: /*/responsive
Disallow: /*/translation_suggestion
Disallow: /*/uploads/
Disallow: /*/utility/
Disallow: /*?no_crawl=true
Disallow: /*&no_crawl=true
Sitemap: https://www.machinetools.com/xml_sitemaps/sitemap.xml.gz