Coverage for src / competitive_verifier / models / result.py: 100%

81 statements  

« prev     ^ index     » next       coverage.py v7.13.1, created at 2026-03-05 16:00 +0000

1import datetime 

2import pathlib 

3from logging import getLogger 

4from typing import TYPE_CHECKING, Any 

5 

6from pydantic import BaseModel, Field, field_validator 

7 

8from competitive_verifier.log import GitHubMessageParams 

9from competitive_verifier.util import to_relative 

10 

11from .path import ForcePosixPath 

12from .result_status import JudgeStatus, ResultStatus 

13 

14if TYPE_CHECKING: 

15 from _typeshed import StrPath 

16 

17logger = getLogger(__name__) 

18 

19 

20class TestcaseResult(BaseModel): 

21 name: str = Field( 

22 description="The name of test case.", 

23 ) 

24 """The name of test case. 

25 """ 

26 

27 status: JudgeStatus = Field( 

28 description="The result status of the test case.", 

29 ) 

30 """The result status of the test case. 

31 """ 

32 

33 elapsed: float = Field( 

34 description="Number of seconds elapsed for the test case.", 

35 ) 

36 """Number of seconds elapsed for the test case. 

37 """ 

38 

39 memory: float | None = Field( 

40 default=None, 

41 description="The size of memory used in megabytes.", 

42 ) 

43 """The size of memory used in megabytes. 

44 """ 

45 

46 

47class VerificationResult(BaseModel): 

48 verification_name: str | None = Field( 

49 default=None, 

50 description="The name of verification.", 

51 ) 

52 """The name of verification. 

53 """ 

54 status: ResultStatus = Field( 

55 description="The result status of verification.", 

56 ) 

57 """The result status of verification. 

58 """ 

59 

60 elapsed: float = Field( 

61 description="Total number of seconds elapsed for all test cases.", 

62 ) 

63 """Total number of seconds elapsed for all test cases. 

64 """ 

65 

66 slowest: float | None = Field( 

67 default=None, 

68 description="Maximum number of seconds elapsed for each test cases.", 

69 ) 

70 """Maximum number of seconds elapsed for each test cases. 

71 """ 

72 

73 heaviest: float | None = Field( 

74 default=None, 

75 description="Maximum size of memory used in megabytes.", 

76 ) 

77 """Maximum size of memory used in megabytes. 

78 """ 

79 

80 testcases: list[TestcaseResult] | None = Field( 

81 default=None, 

82 description="The results of each test case.", 

83 ) 

84 """The results of each test case. 

85 """ 

86 

87 last_execution_time: datetime.datetime = Field( 

88 default_factory=lambda: datetime.datetime.now(datetime.timezone.utc), 

89 description="The time at which the last validation was performed.", 

90 ) 

91 """The time at which the last validation was performed. 

92 """ 

93 

94 @field_validator("status", mode="before") 

95 @classmethod 

96 def verification_list(cls, v: Any) -> Any: # noqa: ANN401 

97 return v.lower() if isinstance(v, str) else v 

98 

99 def need_reverifying(self, base_time: datetime.datetime) -> bool: 

100 if self.status != ResultStatus.SUCCESS: 

101 return True 

102 

103 return self.last_execution_time < base_time 

104 

105 

106class FileResult(BaseModel): 

107 verifications: list[VerificationResult] = Field( 

108 default_factory=list[VerificationResult], 

109 description="The results of each verification.", 

110 ) 

111 """The results of each verification. 

112 """ 

113 

114 newest: bool = Field( 

115 default=True, 

116 description="Whether the verification was performed on the most recent run.", 

117 ) 

118 """Whether the verification was performed on the most recent run. 

119 """ 

120 

121 def need_verification(self, base_time: datetime.datetime) -> bool: 

122 if len(self.verifications) == 0: 

123 return True 

124 return any(r.need_reverifying(base_time) for r in self.verifications) 

125 

126 def is_success(self, *, allow_skip: bool) -> bool: 

127 if allow_skip: 

128 return all(r.status != ResultStatus.FAILURE for r in self.verifications) 

129 return all(r.status == ResultStatus.SUCCESS for r in self.verifications) 

130 

131 

132class VerifyCommandResult(BaseModel): 

133 total_seconds: float = Field( 

134 description="Total number of seconds elapsed for all verification.", 

135 ) 

136 """Total number of seconds elapsed for all verification. 

137 """ 

138 

139 files: dict[ForcePosixPath, FileResult] = Field( 

140 default_factory=dict[ForcePosixPath, FileResult], 

141 description="The files to be verified.", 

142 ) 

143 """The files to be verified. 

144 """ 

145 

146 @classmethod 

147 def parse_file_relative(cls, path: "StrPath") -> "VerifyCommandResult": 

148 impl = cls.model_validate_json(pathlib.Path(path).read_bytes()) 

149 new_files: dict[pathlib.Path, FileResult] = {} 

150 for p, f in impl.files.items(): 

151 rp = to_relative(p) 

152 if not rp: 

153 logger.warning( 

154 "Files in other directories are not subject to verification: %s", 

155 p, 

156 extra={"github": GitHubMessageParams()}, 

157 ) 

158 continue 

159 new_files[rp] = f 

160 

161 impl.files = new_files 

162 return impl 

163 

164 def merge(self, other: "VerifyCommandResult") -> "VerifyCommandResult": 

165 d = self.files.copy() 

166 for k, r in other.files.items(): 

167 cur = d.get(k) 

168 if r.newest or (cur is None) or (not cur.newest): 

169 d[k] = r 

170 return VerifyCommandResult( 

171 total_seconds=self.total_seconds + other.total_seconds, 

172 files=d, 

173 ) 

174 

175 def is_success(self, *, allow_skip: bool = True) -> bool: 

176 return all(f.is_success(allow_skip=allow_skip) for f in self.files.values())