Coverage for src / competitive_verifier / oj / languages / special_comments.py: 100%
31 statements
« prev ^ index » next coverage.py v7.13.1, created at 2026-04-26 12:38 +0900
« prev ^ index » next coverage.py v7.13.1, created at 2026-04-26 12:38 +0900
1# Python Version: 3.x
2import functools
3import pathlib
4import re
5from collections.abc import Iterable, Mapping
6from logging import getLogger
8from competitive_verifier.util import read_text_normalized
10logger = getLogger(__name__)
12SPECIAL_COMMENTS_PATTERN = re.compile(
13 r"(?:verify-helper|verification-helper|competitive-verifier):\s*([0-9A-Za-z_]+)(?:\s(.*))?$"
14)
17# special comments like Vim and Python: see https://www.python.org/dev/peps/pep-0263/
18@functools.cache
19def list_special_comments(path: pathlib.Path) -> Mapping[str, str]:
20 attributes: dict[str, str] = {}
21 for line in read_text_normalized(path).splitlines():
22 matched = SPECIAL_COMMENTS_PATTERN.search(line)
23 if matched:
24 key = matched.group(1)
25 value = (matched.group(2) or "").strip()
26 attributes[key] = value
27 return attributes
30def _unquote(s: str) -> str:
31 if s.startswith(("'", '"', "`")):
32 end_quote_pos = s.rfind(s[0])
33 if end_quote_pos == 0:
34 # Remove opening quote from the URL like `"https://atcoder.jp/`
35 return s[1:]
36 # Remove quotes and trailing superfluous chars around the URL
37 return s[1:end_quote_pos]
38 return s
41@functools.cache
42def list_embedded_urls(path: pathlib.Path) -> Iterable[str]:
43 pattern = re.compile(
44 r"""['"`]?https?://\S*"""
45 ) # use a broad pattern. There are no needs to make match strict.
46 content = read_text_normalized(path)
47 # The URL may be written like `"https://atcoder.jp/"`. In this case, we need to remove `"`s around the URL.
48 # We also need to remove trailing superfluous chars in a case like `{"url":"https://atcoder.jp/"}`.
49 urls = {_unquote(url) for url in pattern.findall(content)}
50 return sorted(urls)