Robots.txt
# Version robots 1.0 - 21/10/2024
User-agent: *
Crawl-delay: 1
# Permitir JS y CSS
Allow: /*.js$
Allow: /*.css$
# No indexar PDF
Disallow: /*.pdf$
# Bloqueando algunos badbots
User-agent: Bloglines/3.1
Disallow:/
# Autorizando Semrush
User-agent: SiteAuditBot
Allow:/
User-agent: SemrushBot-SI
Allow:/
User-agent: DOC
Disallow:/
User-agent: dotbot
Disallow:/
User-agent: Download Ninja
Disallow:/
User-agent: Exabot
Disallow:/
User-agent: Fetch
Disallow:/
User-agent: grub-client
Disallow:/
User-agent: HTTrack
Disallow:/
User-agent: HubSpot Crawler
Disallow:/
User-agent: HubSpot
Disallow:/
User-agent: Jyxobot/1
Disallow:/
User-agent: k2spider
Disallow:/
User-agent: larbin
Disallow:/
User-agent: libwww
Disallow:/
User-agent: linko
Disallow:/
User-agent: Microsoft.URL.Control
Disallow:/
User-agent: MSIECrawler
Disallow:/
User-agent: NPBot
Disallow:/
User-agent: Offline Explorer
Disallow:/
User-agent: psbot
Disallow:/
User-agent: sitecheck.internetseer.com
Disallow:/
User-agent: SiteSnagger
Disallow:/
User-agent: Speedy
Disallow:/
User-agent: Screming Frog SEO Spider
Disallow:/
User-agent: Teleport
Disallow:/
User-agent: TeleportPro
Disallow:/
User-agent: UbiCrawler
Disallow:/
User-agent: WebCopier
Disallow:/
User-agent: WebReaper
Disallow:/
User-agent: WebStripper
Disallow:/
User-agent: WebZIP
Disallow:/
User-agent: wget
Disallow:/
User-agent: Xenu
Disallow:/
User-agent: Zao
Disallow:/
User-agent: Zealbot
Disallow:/
User-agent: ZyBORG
Disallow:/
Sitemap: https://www.naturgy.es/sitemap.xml
(Deseable)