# robots.txt file for vidow.net # Allow all crawlers access to all content by default User-agent: * Disallow: # Block internal or sensitive areas Disallow: /account/ Disallow: /deactivated/ Disallow: /disabled/ Disallow: /google_authentication/ Disallow: /installed/ Disallow: /reset_password/ Disallow: /verify_email/ Disallow: /welcome_message/ Disallow: /payments/ Disallow: /upgrade/ Disallow: /common/ Disallow: /video_list/ Disallow: /newsletter/ Disallow: /video/download/ # Optional: Specify crawl delay for specific bots if your server faces heavy traffic (in seconds) # Crawl-delay: 10 # Sitemap location to help search engines find all relevant URLs Sitemap: https://vidow.net/sitemap.xml # Block specific bots that may overload your server or are not useful User-agent: BadBot Disallow: / User-agent: AhrefsBot Disallow: / User-agent: MJ12bot Disallow: / # Allow search engines to cache CSS, JS, and image files to improve site performance User-agent: * Allow: /*.css$ Allow: /*.js$ Allow: /*.jpg$ Allow: /*.png$ Allow: /*.gif$ # Google-specific rules User-agent: Googlebot Allow: /$ # Yahoo-specific rules (Slurp) User-agent: Slurp Disallow: # Bing-specific rules (bingbot) User-agent: bingbot Disallow: # DuckDuckGo-specific rules (DuckDuckBot) User-agent: DuckDuckBot Disallow: