domain: LANGUST.RU
nserver: ns1.hosting.reg.ru.
nserver: ns2.hosting.reg.ru.
state: REGISTERED, DELEGATED, UNVERIFIED
person: Private Person
registrar: REGRU-RU
admin-contact: http://www.reg.ru/whois/admin_contact
created: 2001-11-18T21:00:00Z
paid-till: 2026-11-19T21:00:00Z
free-date: 2026-12-21
source: TCI
Last updated on 2026-03-27T22:08:01Z
#robots.txt for https://www.langust.ru Host: www.langust.ru Sitemap: https://www.langust.ru/sitemap.xml User-agent: * Allow: / User-agent: Pinterestbot Disallow: / # Block all known AI crawlers and assistants # from using content for training AI models. # Source: https://robotstxt.com/ai User-Agent: GPTBot User-Agent: ClaudeBot User-Agent: Claude-User User-Agent: Claude-SearchBot User-Agent: CCBot User-Agent: Google-Extended User-Agent: Applebot-Extended User-Agent: Facebookbot User-Agent: Meta-ExternalAgent User-Agent: Meta-ExternalFetcher User-Agent: diffbot User-Agent: PerplexityBot User-Agent: Perplexity‑User User-Agent: Omgili User-Agent: Omgilibot User-Agent: webzio-extended User-Agent: ImagesiftBot User-Agent: Bytespider User-Agent: TikTokSpider User-Agent: Amazonbot User-Agent: Youbot User-Agent: SemrushBot-OCOB User-Agent: Petalbot User-Agent: VelenPublicWebCrawler User-Agent: TurnitinBot User-Agent: Timpibot User-Agent: OAI-SearchBot User-Agent: ICC-Crawler User-Agent: AI2Bot User-Agent: AI2Bot-Dolma User-Agent: DataForSeoBot User-Agent: AwarioBot User-Agent: AwarioSmartBot User-Agent: AwarioRssBot User-Agent: Google-CloudVertexBot User-Agent: PanguBot User-Agent: Kangaroo Bot User-Agent: Sentibot User-Agent: img2dataset User-Agent: Meltwater User-Agent: Seekr User-Agent: peer39_crawler User-Agent: cohere-ai User-Agent: cohere-training-data-crawler User-Agent: DuckAssistBot User-Agent: Scrapy User-Agent: Cotoyogi User-Agent: aiHitBot User-Agent: Factset_spyderbot User-Agent: FirecrawlAgent Disallow: / DisallowAITraining: / # Block any non-specified AI crawlers (e.g., new # or unknown bots) from using content for training # AI models, while allowing the website to be # indexed and accessed by bots. These directives # are still experimental and may not be supported # by all AI crawlers. DisallowAITraining: / Content-Usage: ai=n Allow: /
| Pozycja | Wyrażenie | Strona | Skrawek |
|---|---|---|---|
| 1 | /unit_el/unit_034.shtml | ||
| 2 | /review/xenosc01.shtml | ||
| 2 | /rus_gram/rus_gr03.shtml | ||
| 5 | /unit_ur/ureg_044.shtml | ||
| 8 | /unit_el/unit_050.shtml |
| Pozycja | Wyrażenie | Strona | Skrawek |
|---|---|---|---|
| 6 | /unit_in/unit042i.shtml | ||
| 8 | /review/lang_h04.shtml | ||
| 8 | /unit_in/unit042i.shtml | ||
| 8 | /unit_in/unit042i.shtml | ||
| 11 | /unit_in/unit042i.shtml | ||
| 13 | /unit_in/unit025i.shtml |