diff --git a/robots.json b/robots.json index 5d5f692..60c431c 100644 --- a/robots.json +++ b/robots.json @@ -174,6 +174,13 @@ "frequency": "Up to 1 page per second", "description": "Officially used for training Meta \"speech recognition technology,\" unknown if used to train Meta AI specifically." }, + "facebookexternalhit": { + "operator": "Meta/Facebook", + "respect": "[No](https://github.com/ai-robots-txt/ai.robots.txt/issues/40#issuecomment-2524591313)", + "function": "Ostensibly only for sharing, but likely used as an AI crawler as well", + "frequency": "Unclear at this time.", + "description": "Note that excluding FacebookExternalHit will block incorporating OpenGraph data when sharing in social media, including rich links in Apple's Messages app. [According to Meta](https://developers.facebook.com/docs/sharing/webmasters/web-crawlers/), its purpose is \"to crawl the content of an app or website that was shared on one of Meta’s family of apps…\". However, see discussions [here](https://github.com/ai-robots-txt/ai.robots.txt/pull/21) and [here](https://github.com/ai-robots-txt/ai.robots.txt/issues/40#issuecomment-2524591313) for evidence to the contrary." + }, "Factset_spyderbot": { "operator": "[Factset](https://www.factset.com/ai)", "respect": "Unclear at this time.",