Coverage for src / competitive_verifier / oj / format.py: 100%
97 statements
« prev ^ index » next coverage.py v7.13.1, created at 2026-03-05 16:00 +0000
« prev ^ index » next coverage.py v7.13.1, created at 2026-03-05 16:00 +0000
1import pathlib
2from abc import ABC, abstractmethod
3from collections.abc import Generator, Iterable
4from dataclasses import dataclass
5from typing import TypeAlias
7from colorama import Fore, Style
9_whitespace_table = str.maketrans(
10 {
11 " ": "_",
12 "\t": "\\t",
13 "\r": "\\r",
14 "\n": "\\n",
15 }
16)
19def _replace_whitespace(s: str) -> str:
20 return s.translate(_whitespace_table)
23class _PrettyToken(ABC):
24 """A token for verification output."""
26 value: str
28 def __init__(self, value: str) -> None:
29 self.value = value
31 @classmethod
32 @abstractmethod
33 def style(cls) -> str: ...
35 def render(self) -> str:
36 return self.style() + self.value + Style.RESET_ALL
39class _BodyToken(_PrettyToken):
40 @classmethod
41 def style(cls) -> str:
42 return Style.BRIGHT
45class _HintToken(_PrettyToken):
46 @classmethod
47 def style(cls) -> str:
48 return Style.DIM
51class _WhitespaceToken(_PrettyToken):
52 def __init__(self, value: str) -> None:
53 self.value = _replace_whitespace(value)
55 @classmethod
56 def style(cls) -> str:
57 return Style.DIM
60_NewlineToken: TypeAlias = _WhitespaceToken
63def _tokenize_str(s: str) -> list[_PrettyToken]:
64 tokens: list[_PrettyToken] = []
65 l = 0
66 while l < len(s):
67 r = l + 1
68 while r < len(s) and (s[l] in " \t") == (s[r] in " \t"):
69 r += 1
70 typ = _WhitespaceToken if s[l] in " \t" else _BodyToken
71 tokens.append(typ(s[l:r]))
72 l = r
73 return tokens
76def _tokenize_line(line: str) -> list[_PrettyToken]:
77 body = line.rstrip()
78 newline = line[len(body) :]
79 tokens: list[_PrettyToken] = []
81 # add the body of line
82 if body:
83 tokens += _tokenize_str(body)
85 # add newlines
86 if newline:
87 if newline in ("\n", "\r\n"):
88 tokens.append(_NewlineToken(newline))
89 else:
90 whitespace = newline.rstrip("\r\n")
91 newline = newline[len(whitespace) :]
92 tokens.append(_WhitespaceToken(whitespace))
93 tokens.append(_HintToken("(trailing whitespace)"))
94 tokens.append(_NewlineToken(newline))
96 return tokens
99def _merge_token(tokens: list[_PrettyToken]) -> Iterable[_PrettyToken]:
100 if len(tokens) == 0:
101 yield _HintToken("(empty)")
102 return
104 prev = tokens[0]
105 for token in tokens[1:]:
106 if type(prev) is type(token):
107 prev = type(prev)(prev.value + token.value)
108 else:
109 yield prev
110 prev = token
111 yield prev
113 if isinstance(prev, _BodyToken):
114 yield _HintToken("(no trailing newline)")
117@dataclass
118class Printer:
119 content: str | pathlib.Path
120 limit: int = 60
121 head: int = 20
122 tail: int = 20
124 def __post_init__(self): # pragma: no cover
125 if self.head + self.tail >= self.limit:
126 raise ValueError
128 def __str__(self) -> str:
129 return self.render_file_content()
131 def render_file_content(self) -> str:
132 tokens = self._tokenize_file_content()
133 return "".join(token.render() for token in tokens)
135 def _tokenize_file_content(self) -> Iterable[_PrettyToken]:
136 if not isinstance(self.content, str):
137 self.content = self.content.read_text()
139 # Choose the shortest one from the three candidates.
140 return _merge_token(list(self._token(self.content)))
142 def _token(self, text: str) -> Generator[_PrettyToken, None, None]:
143 if len(text) < self.limit:
144 # short
145 for line in text.splitlines(keepends=True):
146 yield from _tokenize_line(line)
147 else:
148 # long
149 left = text[: self.head]
150 right = text[-self.tail :]
152 for line in left.splitlines(keepends=True):
153 yield from _tokenize_line(line)
155 yield _HintToken(f"... ({len(text) - len(right) - len(left)} chars) ...")
157 for line in right.splitlines(keepends=True):
158 yield from _tokenize_line(line)
161def green(s: str) -> str:
162 return Fore.GREEN + s + Fore.RESET
165def red(s: str) -> str:
166 return Fore.RED + s + Fore.RESET