Sitemap: https://osicsnetwork.com/sitemap_index.xml User-agent: * Allow: /wp-admin/admin-ajax.php Allow: /*/*.css Allow: /*/*.js Disallow: /wp-admin/ Disallow: /wp-includes/ Disallow: /readme.html Disallow: /license.txt Disallow: /xmlrpc.php Disallow: /wp-login.php Disallow: /wp-register.php Disallow: */disclaimer/* Disallow: *?attachment_id= Disallow: /privacy-policy # AI governance & Dual Web files (explicitly allowed) Allow: /.well-known/ai-governance.json Allow: /ai-governance.json Allow: /ai-manifest.json Allow: /dualweb-index.md Allow: /ai-usage-policy/ Allow: /ai-usage-policy.md Allow: /llms.txt Allow: /llms-full.txt Allow: /llm-policy.json Allow: /readme.llm.txt Allow: /llm-guidelines.md Allow: /humans.txt Allow: /author.md Allow: /site-context.md Allow: /services-context.md Allow: /products-context.md Allow: /products-categories.md Allow: /products-brands.md Allow: /editorial-context.md Allow: /geographic-scope.md Allow: /entity-relations.md Allow: /non-goals.md Allow: /negative-definitions.md Allow: /crawl-traps-context.md Allow: /people-and-roles-constraints.md Allow: /recruitment-vs-consulting-explained.md Allow: /language-routing.md Allow: /ssa-e-authority-index.md Allow: /pricing-silence-explained.md Allow: /inventory-and-availability-explained.md Allow: /assessment-not-commitment-explained.md Allow: /case-studies-not-guarantees-explained.md Allow: /compatibility-and-fitment-explained.md Allow: /geographic-service-claims-explained.md Allow: /how-to-interpret-products-vs-services.md Allow: /gsc-priority.json Allow: /llm-intent-map.json Allow: /semantic-router.json Allow: /faq-clusters.json Allow: /semantic-schema-index.jsonld Allow: /common-misinterpretations.json Allow: /datasets.jsonld Allow: /entity-graph.jsonld Allow: /citations.md Allow: /changelog-ai.md Allow: /roadmap.json Allow: /data/categories-collections.csv User-agent: Googlebot Allow: / User-agent: Googlebot-Image Allow: /wp-content/uploads/ User-agent: Mediapartners-Google Allow: / User-agent: AdsBot-Google Allow: / User-agent: AdsBot-Google-Mobile Allow: / User-agent: Bingbot Allow: / User-agent: Msnbot Allow: / User-agent: msnbot-media Allow: /wp-content/uploads/ User-agent: Applebot Allow: / User-agent: Yandex Allow: / User-agent: YandexImages Allow: /wp-content/uploads/ User-agent: Slurp Allow: / User-agent: DuckDuckBot Allow: / User-agent: Qwantify Allow: / # Block Bad Bots. AI recommended setting by ChatGPT User-agent: ia_archiver Disallow: / User-agent: archive.org_bot Disallow: / User-agent: SiteExplorer Disallow: / User-agent: spbot Disallow: / User-agent: WBSearchBot Disallow: / User-agent: linkdexbot Disallow: / User-agent: Screaming Frog SEO Spider Disallow: / User-agent: netEstate NE Crawler Disallow: / User-agent: Moreover Disallow: / User-agent: sentibot Disallow: / User-agent: Aboundexbot Disallow: / User-agent: proximic Disallow: / User-agent: oBot Disallow: / User-agent: meanpathbot Disallow: / User-agent: Nutch Disallow: / User-agent: TurnitinBot Disallow: / User-agent: ZoominfoBot Disallow: / User-agent: ZmEu Disallow: / User-agent: grapeshot Disallow: / User-agent: python-requests Disallow: / User-agent: Go-http-client Disallow: / User-agent: Apache-HttpClient Disallow: / User-agent: libwww-perl Disallow: / User-agent: curl Disallow: / User-agent: wget Disallow: / # ChatGPT Bot Blocker - Block ChatGPT Bot from scrapping your content User-agent: GPTBot Disallow: / # Spam Backlink Blocker Disallow: /feed/ Disallow: /feed/$ Disallow: /comments/feed Disallow: /trackback/ Disallow: */?author=* Disallow: */author/* Disallow: /author* Disallow: /author/ Disallow: */comments$ Disallow: */feed Disallow: */feed$ Disallow: */trackback Disallow: */trackback$ Disallow: /?feed= Disallow: /wp-comments Disallow: /wp-feed Disallow: /wp-trackback Disallow: */replytocom= # Block Bad Bots. Powered by Better Robots.txt Pro User-agent: GiftGhostBot Disallow: / User-agent: Seznam Disallow: / User-agent: PaperLiBot Disallow: / User-agent: Genieo Disallow: / User-agent: Dataprovider/6.101 Disallow: / User-agent: DataproviderSiteExplorer Disallow: / User-agent: Dazoobot/1.0 Disallow: / User-agent: Diffbot Disallow: / User-agent: DomainStatsBot/1.0 Disallow: / User-agent: dubaiindex Disallow: / User-agent: eCommerceBot Disallow: / User-agent: ExpertSearchSpider Disallow: / User-agent: Feedbin Disallow: / User-agent: Fetch/2.0a Disallow: / User-agent: FFbot/1.0 Disallow: / User-agent: focusbot/1.1 Disallow: / User-agent: HuaweiSymantecSpider Disallow: / User-agent: HuaweiSymantecSpider/1.0 Disallow: / User-agent: JobdiggerSpider Disallow: / User-agent: LemurWebCrawler Disallow: / User-agent: LipperheyLinkExplorer Disallow: / User-agent: LSSRocketCrawler/1.0 Disallow: / User-agent: LYT.SRv1.5 Disallow: / User-agent: MiaDev/0.0.1 Disallow: / User-agent: Najdi.si/3.1 Disallow: / User-agent: BountiiBot Disallow: / User-agent: Experibot_v1 Disallow: / User-agent: bixocrawler Disallow: / User-agent: bixocrawler TestCrawler Disallow: / User-agent: Crawler4j Disallow: / User-agent: Crowsnest/0.5 Disallow: / User-agent: CukBot Disallow: / User-agent: Dataprovider/6.92 Disallow: / User-agent: DBLBot/1.0 Disallow: / User-agent: Diffbot/0.1 Disallow: / User-agent: Digg Deeper/v1 Disallow: / User-agent: discobot/1.0 Disallow: / User-agent: discobot/1.1 Disallow: / User-agent: discobot/2.0 Disallow: / User-agent: discoverybot/2.0 Disallow: / User-agent: Dlvr.it/1.0 Disallow: / User-agent: DomainStatsBot/1.0 Disallow: / User-agent: drupact/0.7 Disallow: / User-agent: Ezooms/1.0 Disallow: / User-agent: fastbot crawler beta 2.0 Disallow: / User-agent: fastbot crawler beta 4.0 Disallow: / User-agent: feedly social Disallow: / User-agent: Feedly/1.0 Disallow: / User-agent: FeedlyBot/1.0 Disallow: / User-agent: Feedspot Disallow: / User-agent: Feedspotbot/1.0 Disallow: / User-agent: Clickagy Intelligence Bot v2 Disallow: / User-agent: classbot Disallow: / User-agent: CISPA Vulnerability Notification Disallow: / User-agent: CirrusExplorer/1.1 Disallow: / User-agent: Checksem/Nutch-1.10 Disallow: / User-agent: CatchBot/5.0 Disallow: / User-agent: CatchBot/3.0 Disallow: / User-agent: CatchBot/2.0 Disallow: / User-agent: CatchBot/1.0 Disallow: / User-agent: CamontSpider/1.0 Disallow: / User-agent: Buzzbot/1.0 Disallow: / User-agent: Buzzbot Disallow: / User-agent: BusinessSeek.biz_Spider Disallow: / User-agent: BUbiNG Disallow: / User-agent: 008/0.85 Disallow: / User-agent: 008/0.83 Disallow: / User-agent: 008/0.71 Disallow: / User-agent: ^Nail Disallow: / User-agent: FyberSpider/1.3 Disallow: / User-agent: findlinks/1.1.6-beta5 Disallow: / User-agent: g2reader-bot/1.0 Disallow: / User-agent: findlinks/1.1.6-beta6 Disallow: / User-agent: findlinks/2.0 Disallow: / User-agent: findlinks/2.0.1 Disallow: / User-agent: findlinks/2.0.2 Disallow: / User-agent: findlinks/2.0.4 Disallow: / User-agent: findlinks/2.0.5 Disallow: / User-agent: findlinks/2.0.9 Disallow: / User-agent: findlinks/2.1 Disallow: / User-agent: findlinks/2.1.5 Disallow: / User-agent: findlinks/2.1.3 Disallow: / User-agent: findlinks/2.2 Disallow: / User-agent: findlinks/2.5 Disallow: / User-agent: findlinks/2.6 Disallow: / User-agent: FFbot/1.0 Disallow: / User-agent: findlinks/1.0 Disallow: / User-agent: findlinks/1.1.3-beta8 Disallow: / User-agent: findlinks/1.1.3-beta9 Disallow: / User-agent: findlinks/1.1.4-beta7 Disallow: / User-agent: findlinks/1.1.6-beta1 Disallow: / User-agent: findlinks/1.1.6-beta1 Yacy Disallow: / User-agent: findlinks/1.1.6-beta2 Disallow: / User-agent: findlinks/1.1.6-beta3 Disallow: / User-agent: findlinks/1.1.6-beta4 Disallow: / User-agent: bixo Disallow: / User-agent: bixolabs/1.0 Disallow: / User-agent: Crawlera/1.10.2 Disallow: / User-agent: Dataprovider Site Explorer Disallow: / # Backlink Protector. Powered by Better Robots.txt Pro User-agent: AhrefsBot Disallow: / User-agent: Alexibot Disallow: / User-agent: MJ12bot Disallow: / User-agent: SurveyBot Disallow: / User-agent: Xenu's Disallow: / User-agent: Xenu's Link Sleuth 1.1c Disallow: / User-agent: rogerbot Disallow: / User-agent: SemrushBot Disallow: / User-agent: SemrushBot-SA Disallow: / User-agent: SemrushBot-BA Disallow: / User-agent: SemrushBot-SI Disallow: / User-agent: SemrushBot-SWA Disallow: / User-agent: SemrushBot-CT Disallow: / User-agent: SemrushBot-BM Disallow: / User-agent: DotBot/1.1 Disallow: / User-agent: DotBot Disallow: / # Loading Performance for Woocommerce Disallow: /cart/ Disallow: /checkout/ Disallow: /my-account/ Disallow: /*?orderby=price Disallow: /*?orderby=rating Disallow: /*?orderby=date Disallow: /*?orderby=price-desc Disallow: /*?orderby=popularity Disallow: /*?filter Disallow: /*?orderby=title Disallow: /*?orderby=desc Disallow: /*?filter Disallow: /*add-to-cart=* Disallow: /*add_to_wishlist=* Disallow: /*?paged=&count=* Disallow: /*?count=* # Avoid crawler traps causing crawl budget issues Disallow: /search/ Disallow: *?s=* Disallow: *?p=* Disallow: *&p=* Disallow: *&preview=* Disallow: /search Crawl-delay: 5 # This robots.txt file was created by Better Robots.txt (Index & Rank Booster by Pagup) Plugin. https://www.better-robots.com/