# See https://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
User-agent: *
# Allow crawlers to fetch paginated results so they can follow links;
# pagination pages still emit meta noindex,follow to avoid index bloat.
# Disallow search result pages (low value, saves crawl budget)
Disallow: /*?*search=
# Allow main content
Allow: /listing/
Allow: /products/
Allow: /discounts
Allow: /cuponeras
Sitemap: https://lifi.mx/sitemap/sitemap.xml.gz