Coverage for src / competitive_verifier / documents / render.py: 94%
335 statements
« prev ^ index » next coverage.py v7.13.1, created at 2026-03-05 16:00 +0000
« prev ^ index » next coverage.py v7.13.1, created at 2026-03-05 16:00 +0000
1import datetime
2import enum
3import pathlib
4from abc import ABC, abstractmethod
5from collections.abc import Iterable
6from collections.abc import Set as AbstractSet
7from dataclasses import dataclass
8from functools import cached_property
9from itertools import chain
10from logging import getLogger
11from typing import BinaryIO
13from pydantic import BaseModel
15from competitive_verifier import git, log
16from competitive_verifier.models import (
17 DocumentOutputMode,
18 ForcePosixPath,
19 ProblemVerification,
20 ResultStatus,
21 SortedPathSet,
22 VerificationFile,
23 VerificationInput,
24 VerificationResult,
25 VerifyCommandResult,
26)
27from competitive_verifier.util import (
28 normalize_bytes_text,
29 read_text_normalized,
30 resolve_relative_or_abs_path,
31)
33from .config import ConfigYaml
34from .front_matter import FrontMatter, Markdown
35from .render_data import (
36 CategorizedIndex,
37 CodePageData,
38 Dependency,
39 EmbeddedCode,
40 EnvTestcaseResult,
41 IndexFiles,
42 IndexRenderData,
43 MultiCodePageData,
44 PageRenderData,
45 RenderLink,
46 StatusIcon,
47)
49logger = getLogger(__name__)
52def _paths_to_render_links(
53 paths: SortedPathSet, page_jobs: dict[pathlib.Path, "PageRenderJob"]
54) -> list[RenderLink]:
55 def get_link(path: pathlib.Path) -> RenderLink | None:
56 job = page_jobs.get(path)
57 if not job:
58 return None
59 return job.to_render_link()
61 return [
62 link
63 for link in map(
64 get_link, sorted(paths, key=lambda p: str.casefold(p.as_posix()))
65 )
66 if link
67 ]
70class MultiTargetMarkdown(Markdown):
71 path: ForcePosixPath # pyright: ignore[reportIncompatibleVariableOverride, reportGeneralTypeIssues]
72 front_matter: FrontMatter # pyright: ignore[reportIncompatibleVariableOverride]
73 multi_documentation_of: list[pathlib.Path]
76@dataclass
77class UserMarkdowns:
78 single: dict[pathlib.Path, Markdown]
79 multi: list[MultiTargetMarkdown]
81 @staticmethod
82 def select_markdown(sources: set[pathlib.Path]) -> "UserMarkdowns":
83 single: dict[pathlib.Path, Markdown] = {}
84 multi: list[MultiTargetMarkdown] = []
85 markdowns = [Markdown.load_file(t) for t in sources if t.suffix == ".md"]
86 for md in markdowns:
87 if not (md.path and md.front_matter and md.front_matter.documentation_of):
88 continue
90 if isinstance(md.front_matter.documentation_of, str):
91 source_path = resolve_relative_or_abs_path(
92 md.front_matter.documentation_of,
93 basedir=md.path.parent,
94 )
95 if source_path in sources:
96 md.front_matter.documentation_of = source_path.as_posix()
97 single[source_path] = md
98 else:
99 logger.warning(
100 "Markdown(%s) documentation_of: %s is not found.",
101 md.path,
102 md.front_matter.documentation_of,
103 extra={"github": log.GitHubMessageParams(file=md.path)},
104 )
105 else:
106 multi_documentation_of: list[pathlib.Path] = []
107 for d in md.front_matter.documentation_of:
108 source_path = resolve_relative_or_abs_path(
109 d,
110 basedir=md.path.parent,
111 )
112 if source_path in sources: 112 ↛ 115line 112 didn't jump to line 115 because the condition on line 112 was always true
113 multi_documentation_of.append(source_path)
114 else:
115 logger.warning(
116 "Markdown(%s) documentation_of: %s is not found.",
117 md.path,
118 d,
119 extra={"github": log.GitHubMessageParams(file=md.path)},
120 )
121 if multi_documentation_of: 121 ↛ 131line 121 didn't jump to line 131 because the condition on line 121 was always true
122 multi.append(
123 MultiTargetMarkdown(
124 path=md.path,
125 front_matter=md.front_matter,
126 content=md.content,
127 multi_documentation_of=multi_documentation_of,
128 )
129 )
130 else:
131 logger.warning(
132 "Markdown(%s) documentation_of have no valid files.",
133 md.path,
134 extra={"github": log.GitHubMessageParams(file=md.path)},
135 )
137 for m in multi:
138 if m.front_matter and m.front_matter.keep_single:
139 continue
140 for source in m.multi_documentation_of:
141 redirect_to = f"/{m.path.with_suffix('').as_posix()}"
142 s = single.get(source)
143 if not s:
144 s = Markdown(content=b"", front_matter=None)
145 if not s.front_matter:
146 s.front_matter = FrontMatter()
148 if m.front_matter.display == DocumentOutputMode.never:
149 s.front_matter.display = DocumentOutputMode.never
150 else:
151 s.front_matter.display = DocumentOutputMode.no_index
152 s.front_matter.redirect_to = redirect_to
154 single[source] = s
155 return UserMarkdowns(
156 single=single,
157 multi=multi,
158 )
161class _VerificationStatusFlag(enum.Flag):
162 IS_LIBRARY = 0
163 NOTHING = 0
164 LIBRARY_NOTHING = IS_LIBRARY
165 TEST_NOTHING = enum.auto()
166 IS_TEST = TEST_NOTHING
167 HAVE_AC = enum.auto()
168 HAVE_WA = enum.auto()
169 HAVE_SKIP = enum.auto()
171 LIBRARY_AC_WA_SKIP = IS_LIBRARY | HAVE_AC | HAVE_WA | HAVE_SKIP
172 LIBRARY_AC_WA = IS_LIBRARY | HAVE_AC | HAVE_WA
173 LIBRARY_AC_SKIP = IS_LIBRARY | HAVE_AC | HAVE_SKIP
174 LIBRARY_AC = IS_LIBRARY | HAVE_AC
175 LIBRARY_WA_SKIP = IS_LIBRARY | HAVE_WA | HAVE_SKIP
176 LIBRARY_WA = IS_LIBRARY | HAVE_WA
177 LIBRARY_SKIP = IS_LIBRARY | HAVE_SKIP
179 TEST_AC_WA_SKIP = IS_TEST | HAVE_AC | HAVE_WA | HAVE_SKIP
180 TEST_AC_WA = IS_TEST | HAVE_AC | HAVE_WA
181 TEST_AC_SKIP = IS_TEST | HAVE_AC | HAVE_SKIP
182 TEST_AC = IS_TEST | HAVE_AC
183 TEST_WA_SKIP = IS_TEST | HAVE_WA | HAVE_SKIP
184 TEST_WA = IS_TEST | HAVE_WA
185 TEST_SKIP = IS_TEST | HAVE_SKIP
187 def to_status(self) -> StatusIcon:
188 d = {
189 self.LIBRARY_AC_WA_SKIP: StatusIcon.LIBRARY_SOME_WA,
190 self.LIBRARY_AC_WA: StatusIcon.LIBRARY_SOME_WA,
191 self.LIBRARY_AC_SKIP: StatusIcon.LIBRARY_PARTIAL_AC,
192 self.LIBRARY_AC: StatusIcon.LIBRARY_ALL_AC,
193 self.LIBRARY_WA_SKIP: StatusIcon.LIBRARY_ALL_WA,
194 self.LIBRARY_WA: StatusIcon.LIBRARY_ALL_WA,
195 self.LIBRARY_SKIP: StatusIcon.LIBRARY_NO_TESTS,
196 self.LIBRARY_NOTHING: StatusIcon.LIBRARY_NO_TESTS,
197 self.TEST_AC_WA_SKIP: StatusIcon.TEST_WRONG_ANSWER,
198 self.TEST_AC_WA: StatusIcon.TEST_WRONG_ANSWER,
199 self.TEST_AC_SKIP: StatusIcon.TEST_WAITING_JUDGE,
200 self.TEST_AC: StatusIcon.TEST_ACCEPTED,
201 self.TEST_WA_SKIP: StatusIcon.TEST_WRONG_ANSWER,
202 self.TEST_WA: StatusIcon.TEST_WRONG_ANSWER,
203 self.TEST_SKIP: StatusIcon.TEST_WAITING_JUDGE,
204 self.TEST_NOTHING: StatusIcon.TEST_WAITING_JUDGE,
205 }
206 return d[self]
208 @classmethod
209 def from_status(cls, status: StatusIcon) -> "_VerificationStatusFlag":
210 d = {
211 StatusIcon.LIBRARY_SOME_WA: cls.LIBRARY_AC_WA,
212 StatusIcon.LIBRARY_PARTIAL_AC: cls.LIBRARY_AC_SKIP,
213 StatusIcon.LIBRARY_ALL_AC: cls.LIBRARY_AC,
214 StatusIcon.LIBRARY_ALL_WA: cls.LIBRARY_WA,
215 StatusIcon.LIBRARY_NO_TESTS: cls.LIBRARY_NOTHING,
216 StatusIcon.TEST_ACCEPTED: cls.TEST_AC,
217 StatusIcon.TEST_WRONG_ANSWER: cls.TEST_WA,
218 StatusIcon.TEST_WAITING_JUDGE: cls.TEST_NOTHING,
219 }
220 return d[status]
223class SourceCodeStat(BaseModel):
224 path: ForcePosixPath
225 is_verification: bool
226 verification_status: StatusIcon
227 file_input: VerificationFile
228 timestamp: datetime.datetime
229 depends_on: SortedPathSet
230 required_by: SortedPathSet
231 verified_with: SortedPathSet
232 verification_results: list[VerificationResult] | None = None
234 @staticmethod
235 def resolve_dependency(
236 *,
237 verifications: VerificationInput,
238 result: VerifyCommandResult,
239 included_files: AbstractSet[pathlib.Path],
240 ) -> dict[pathlib.Path, "SourceCodeStat"]:
241 d: dict[pathlib.Path, SourceCodeStat] = {}
242 statuses: dict[pathlib.Path, _VerificationStatusFlag] = dict.fromkeys(
243 verifications.files.keys(), _VerificationStatusFlag.NOTHING
244 )
245 verification_results_dict: dict[pathlib.Path, list[VerificationResult]] = {}
247 for p, r in result.files.items():
248 if p not in included_files: 248 ↛ 249line 248 didn't jump to line 249 because the condition on line 248 was never true
249 continue
250 st = _VerificationStatusFlag.NOTHING
251 for v in r.verifications:
252 if v.status == ResultStatus.SUCCESS:
253 st |= _VerificationStatusFlag.HAVE_AC
254 elif v.status == ResultStatus.FAILURE:
255 st |= _VerificationStatusFlag.HAVE_WA
256 elif v.status == ResultStatus.SKIPPED: 256 ↛ 251line 256 didn't jump to line 251 because the condition on line 256 was always true
257 st |= _VerificationStatusFlag.HAVE_SKIP
258 statuses[p] = st
259 verification_results_dict[p] = r.verifications
261 for group0 in verifications.scc():
262 group = group0 & included_files
263 if not group: 263 ↛ 264line 263 didn't jump to line 264 because the condition on line 263 was never true
264 continue
266 group_status = _VerificationStatusFlag.NOTHING
268 for path in group:
269 group_status |= statuses[path]
271 for path in group:
272 depends_on = verifications.depends_on[path] & included_files
273 required_by = verifications.required_by[path] & included_files
274 verified_with = verifications.verified_with[path] & included_files
276 for dep in depends_on:
277 statuses[dep] |= group_status
279 timestamp = git.get_commit_time(
280 verifications.transitive_depends_on[path]
281 )
282 file_input = verifications.files[path]
283 is_verification = file_input.is_verification()
285 verification_results = verification_results_dict.get(path)
287 if is_verification and verification_results is None: 287 ↛ 288line 287 didn't jump to line 288 because the condition on line 287 was never true
288 raise ValueError("needs verification_results")
290 flag_status = group_status | (
291 _VerificationStatusFlag.IS_TEST
292 if is_verification
293 else _VerificationStatusFlag.IS_LIBRARY
294 )
296 d[path] = SourceCodeStat(
297 path=path,
298 file_input=file_input,
299 is_verification=is_verification,
300 depends_on=depends_on,
301 required_by=required_by,
302 verified_with=verified_with,
303 timestamp=timestamp,
304 verification_status=flag_status.to_status(),
305 verification_results=verification_results,
306 )
307 return d
310class RenderJob(ABC):
311 @property
312 @abstractmethod
313 def destination_name(self) -> pathlib.Path: ...
315 @abstractmethod
316 def write_to(self, fp: BinaryIO): ...
318 @staticmethod
319 def enumerate_jobs(
320 *,
321 sources: set[pathlib.Path],
322 verifications: VerificationInput,
323 result: VerifyCommandResult,
324 config: ConfigYaml,
325 index_md: Markdown | None = None,
326 ) -> list["RenderJob"]:
327 def plain_content(source: pathlib.Path) -> RenderJob | None:
328 if source.suffix == ".md":
329 md = Markdown.load_file(source)
330 if md.front_matter and md.front_matter.documentation_of:
331 return None
332 elif source.suffix == ".html":
333 pass
334 else:
335 return None
336 return PlainRenderJob(
337 source_path=source,
338 content=source.read_bytes(),
339 )
341 user_markdowns = UserMarkdowns.select_markdown(sources)
343 logger.info(" %s source files...", len(sources))
345 class SourceForDebug(BaseModel):
346 sources: SortedPathSet
347 markdowns: UserMarkdowns
349 logger.debug(
350 "source: %s",
351 SourceForDebug(
352 sources=sources,
353 markdowns=user_markdowns,
354 ),
355 )
356 with log.group("Resolve dependency"):
357 stats_dict = SourceCodeStat.resolve_dependency(
358 verifications=verifications,
359 result=result,
360 included_files=sources,
361 )
363 page_jobs: dict[pathlib.Path, PageRenderJob] = {}
364 jobs: list[RenderJob] = []
365 for source in sources:
366 markdown = user_markdowns.single.get(source) or Markdown.make_default(
367 source
368 )
369 stat = stats_dict.get(source)
370 if not stat:
371 plain_job = plain_content(source)
372 if plain_job is not None:
373 jobs.append(plain_job)
374 elif source.suffix != ".md":
375 logger.info("Skip file: %s", source)
376 continue
377 group_dir = None
378 if config.consolidate:
379 consolidate = config.consolidate
380 group_dir = next(
381 filter(lambda p: p in consolidate, source.parents), None
382 )
384 pj = PageRenderJob(
385 source_path=source,
386 group_dir=group_dir or source.parent,
387 markdown=markdown,
388 stat=stat,
389 verifications=verifications,
390 result=result,
391 page_jobs=page_jobs,
392 )
394 if pj.display == DocumentOutputMode.never:
395 continue
397 page_jobs[pj.source_path] = pj
398 jobs.append(pj)
400 multis: list[MultiCodePageRenderJob] = []
401 for md in user_markdowns.multi:
402 group_dir = None
403 if config.consolidate: 403 ↛ 408line 403 didn't jump to line 408 because the condition on line 403 was always true
404 consolidate = config.consolidate
405 group_dir = next(
406 filter(lambda p: p in consolidate, md.path.parents), None
407 )
408 job = MultiCodePageRenderJob(
409 markdown=md,
410 group_dir=group_dir or md.path.parent,
411 page_jobs=page_jobs,
412 )
414 if md.front_matter.display == DocumentOutputMode.never:
415 continue
416 multis.append(job)
418 jobs.extend(multis)
419 jobs.append(
420 IndexRenderJob(
421 page_jobs=page_jobs,
422 multicode_docs=multis,
423 index_md=index_md,
424 )
425 )
426 return jobs
429@dataclass(frozen=True)
430class PlainRenderJob(RenderJob):
431 source_path: ForcePosixPath
432 content: bytes
434 @property
435 def destination_name(self):
436 return self.source_path
438 def write_to(self, fp: BinaryIO):
439 fp.write(self.content)
442class MarkdownRenderJob(RenderJob):
443 source_path: pathlib.Path
444 markdown: Markdown
446 @property
447 def destination_name(self):
448 return self.source_path
450 def write_to(self, fp: BinaryIO):
451 self.markdown.dump_merged(fp)
454@dataclass(frozen=True)
455class PageRenderJob(RenderJob):
456 source_path: pathlib.Path
457 group_dir: pathlib.Path
458 markdown: Markdown
459 stat: SourceCodeStat
460 verifications: VerificationInput
461 result: VerifyCommandResult
462 page_jobs: dict[pathlib.Path, "PageRenderJob"]
464 @property
465 def is_verification(self):
466 return self.stat.is_verification
468 @property
469 def display(self):
470 return self.front_matter.display or DocumentOutputMode.visible
472 def __str__(self) -> str:
473 return f"PageRenderJob(source_path={self.source_path!r},markdown={self.markdown!r},stat={self.stat!r})"
475 def validate_front_matter(self):
476 front_matter = self.markdown.front_matter
477 if ( 477 ↛ 485line 477 didn't jump to line 485 because the condition on line 477 was never true
478 front_matter
479 and front_matter.documentation_of
480 and (
481 not isinstance(front_matter.documentation_of, str)
482 or self.source_path != pathlib.Path(front_matter.documentation_of)
483 )
484 ):
485 raise ValueError(
486 "PageRenderJob.path must equal front_matter.documentation_of."
487 )
489 def to_render_link(self, *, index: bool = False) -> RenderLink | None:
490 if self.display in (DocumentOutputMode.hidden, DocumentOutputMode.never) or (
491 index and self.display == DocumentOutputMode.no_index
492 ):
493 return None
494 return RenderLink(
495 path=self.source_path,
496 filename=self.source_path.relative_to(self.group_dir).as_posix(),
497 title=self.front_matter.title,
498 icon=self.stat.verification_status,
499 )
501 @cached_property
502 def front_matter(self) -> FrontMatter:
503 front_matter = (
504 self.markdown.front_matter.model_copy()
505 if self.markdown.front_matter
506 else FrontMatter()
507 )
508 front_matter.documentation_of = self.source_path.as_posix()
509 if not front_matter.layout: 509 ↛ 512line 509 didn't jump to line 512 because the condition on line 509 was always true
510 front_matter.layout = "document"
512 input_file = self.verifications.files.get(self.source_path)
513 if not front_matter.title and (input_file and input_file.title):
514 front_matter.title = input_file.title
515 if not front_matter.display and (input_file and input_file.display):
516 front_matter.display = input_file.display
518 return front_matter
520 @property
521 def destination_name(self):
522 return self.source_path.with_suffix(self.source_path.suffix + ".md")
524 def write_to(self, fp: BinaryIO):
525 self.validate_front_matter()
526 front_matter = self.front_matter
527 front_matter.data = self.get_page_data()
528 Markdown(
529 path=self.source_path,
530 front_matter=front_matter,
531 content=self.markdown.content,
532 ).dump_merged(fp)
534 def get_page_data(self) -> PageRenderData:
535 depends_on = _paths_to_render_links(self.stat.depends_on, self.page_jobs)
536 required_by = _paths_to_render_links(self.stat.required_by, self.page_jobs)
537 verified_with = _paths_to_render_links(self.stat.verified_with, self.page_jobs)
539 attributes = self.stat.file_input.document_attributes.copy()
540 if problem_url := next(
541 (
542 v.problem
543 for v in self.stat.file_input.verification_list
544 if isinstance(v, ProblemVerification)
545 ),
546 None,
547 ):
548 attributes.setdefault("PROBLEM", problem_url)
550 code = read_text_normalized(self.source_path)
552 embedded = [EmbeddedCode(name="default", code=code)]
553 embedded.extend(
554 EmbeddedCode(name=s.name, code=read_text_normalized(s.path))
555 for s in self.stat.file_input.additonal_sources
556 )
558 return PageRenderData(
559 path=self.source_path,
560 path_extension=self.source_path.suffix.lstrip("."),
561 title=self.front_matter.title,
562 embedded=embedded,
563 timestamp=self.stat.timestamp,
564 attributes=attributes,
565 testcases=(
566 [
567 EnvTestcaseResult(
568 name=c.name,
569 status=c.status,
570 elapsed=c.elapsed,
571 memory=c.memory,
572 environment=v.verification_name,
573 )
574 for v in self.stat.verification_results
575 for c in (v.testcases or [])
576 ]
577 if self.stat.verification_results
578 else None
579 ),
580 verification_status=self.stat.verification_status,
581 is_verification_file=self.stat.is_verification,
582 is_failed=self.stat.verification_status.is_failed,
583 document_path=self.markdown.path,
584 dependencies=[
585 Dependency(type="Depends on", files=depends_on),
586 Dependency(type="Required by", files=required_by),
587 Dependency(type="Verified with", files=verified_with),
588 ],
589 depends_on=[link.path for link in depends_on],
590 required_by=[link.path for link in required_by],
591 verified_with=[link.path for link in verified_with],
592 )
595@dataclass(frozen=True)
596class MultiCodePageRenderJob(RenderJob):
597 markdown: MultiTargetMarkdown
598 group_dir: pathlib.Path
599 page_jobs: dict[pathlib.Path, "PageRenderJob"]
601 def __str__(self) -> str:
602 return f"MultiCodePageRenderJob(multi_documentation_of={self.markdown.multi_documentation_of!r})"
604 @cached_property
605 def jobs(self) -> list[PageRenderJob]:
606 jobs: list[PageRenderJob] = []
607 for m in self.markdown.multi_documentation_of:
608 job = self.page_jobs.get(m)
609 if not job: 609 ↛ 610line 609 didn't jump to line 610 because the condition on line 609 was never true
610 continue
611 jobs.append(job)
612 return jobs
614 @cached_property
615 def verification_status(self) -> StatusIcon:
616 flag = _VerificationStatusFlag.NOTHING
617 for job in self.jobs:
618 flag |= _VerificationStatusFlag.from_status(job.stat.verification_status)
619 return flag.to_status()
621 @property
622 def is_verification(self):
623 return self.verification_status.is_test
625 @property
626 def display(self):
627 return self.markdown.front_matter.display or DocumentOutputMode.visible
629 @property
630 def destination_name(self) -> pathlib.Path:
631 return self.markdown.path
633 def to_render_link(self, *, index: bool = False) -> RenderLink:
634 return RenderLink(
635 path=self.markdown.path.with_suffix(""),
636 filename=self.markdown.path.relative_to(self.group_dir).as_posix(),
637 title=self.markdown.front_matter.title,
638 icon=self.verification_status,
639 )
641 def write_to(self, fp: BinaryIO):
642 front_matter = self.markdown.front_matter
643 front_matter.layout = "multidoc"
644 front_matter.data = self.get_page_data()
645 Markdown(
646 path=self.markdown.path,
647 front_matter=front_matter,
648 content=self.markdown.content,
649 ).dump_merged(fp)
651 def get_page_data(self) -> MultiCodePageData:
652 codes = [
653 CodePageData.model_validate(
654 {"document_content": normalize_bytes_text(j.markdown.content)}
655 | j.get_page_data().model_dump(),
656 )
657 for j in self.jobs
658 ]
660 multi_documentation_of_set = set(self.markdown.multi_documentation_of)
661 multi_documentation_of_set.add(self.markdown.path)
662 depends_on_paths = (
663 set(chain.from_iterable(j.stat.depends_on for j in self.jobs))
664 - multi_documentation_of_set
665 )
666 required_by_paths = (
667 set(chain.from_iterable(j.stat.required_by for j in self.jobs))
668 - multi_documentation_of_set
669 )
670 verified_with_paths = (
671 set(chain.from_iterable(j.stat.verified_with for j in self.jobs))
672 - multi_documentation_of_set
673 )
675 depends_on = _paths_to_render_links(depends_on_paths, self.page_jobs)
676 required_by = _paths_to_render_links(required_by_paths, self.page_jobs)
677 verified_with = _paths_to_render_links(verified_with_paths, self.page_jobs)
679 return MultiCodePageData(
680 path=self.markdown.path,
681 verification_status=self.verification_status,
682 is_failed=any(c.is_failed for c in codes),
683 codes=codes,
684 dependencies=[
685 Dependency(type="Depends on", files=depends_on),
686 Dependency(type="Required by", files=required_by),
687 Dependency(type="Verified with", files=verified_with),
688 ],
689 )
692@dataclass
693class IndexRenderJob(RenderJob):
694 page_jobs: dict[pathlib.Path, "PageRenderJob"]
695 multicode_docs: list[MultiCodePageRenderJob]
696 index_md: Markdown | None = None
698 def __str__(self) -> str:
699 @dataclass
700 class _IndexRenderJob:
701 job_paths: Iterable[pathlib.Path]
703 s = repr(
704 _IndexRenderJob(
705 job_paths=self.page_jobs.keys(),
706 )
707 )
708 index = s.find("_IndexRenderJob")
709 return s[index + 1 :]
711 @property
712 def destination_name(self):
713 return pathlib.Path("index.md")
715 def write_to(self, fp: BinaryIO):
716 Markdown(
717 path=self.destination_name,
718 front_matter=FrontMatter(
719 layout="toppage",
720 data=self.get_page_data(),
721 ),
722 content=self.index_md.content if self.index_md else b"",
723 ).dump_merged(fp)
725 def get_page_data(self) -> IndexRenderData:
726 library_categories: dict[str, list[RenderLink]] = {}
727 verification_categories: dict[str, list[RenderLink]] = {}
728 for job in chain.from_iterable([self.page_jobs.values(), self.multicode_docs]):
729 if job.display != DocumentOutputMode.visible:
730 continue
731 categories = (
732 verification_categories if job.is_verification else library_categories
733 )
735 directory = job.group_dir
736 category = directory.as_posix()
737 if category == ".":
738 category = ""
739 elif not category.endswith("/"): 739 ↛ 742line 739 didn't jump to line 742 because the condition on line 739 was always true
740 category = f"{category}/"
742 if category not in categories:
743 categories[category] = []
745 link = job.to_render_link(index=True)
746 if link: 746 ↛ 728line 746 didn't jump to line 728 because the condition on line 746 was always true
747 categories[category].append(link)
749 def _build_categories_list(
750 categories: dict[str, list[RenderLink]],
751 ) -> list[CategorizedIndex]:
752 return sorted(
753 (
754 CategorizedIndex(
755 name=category,
756 pages=sorted(pages, key=lambda p: p.path.as_posix()),
757 )
758 for category, pages in categories.items()
759 ),
760 key=lambda d: d.name,
761 )
763 return IndexRenderData(
764 top=[
765 IndexFiles(
766 type="Library Files",
767 categories=_build_categories_list(library_categories),
768 ),
769 IndexFiles(
770 type="Verification Files",
771 categories=_build_categories_list(verification_categories),
772 ),
773 ],
774 )