feat: migrate repo & init classes

This commit is contained in:
李衍志523370910113 2024-10-23 18:53:41 +08:00
parent 16c7cb517a
commit 805a79bf10
7 changed files with 145 additions and 79 deletions

View File

@ -1,15 +1,17 @@
from typing import List
from joj3_config_generator.lib.repo import getHealthcheckConfig, getTeapotConfig from joj3_config_generator.lib.repo import getHealthcheckConfig, getTeapotConfig
from joj3_config_generator.lib.task import ( from joj3_config_generator.models import (
fix_comment, Cmd,
fix_diff, CmdFile,
fix_keyword, ExecutorConfig,
fix_result_detail, ExecutorWithConfig,
get_conf_stage, ParserConfig,
get_executorWithConfig, Repo,
ResultConfig,
Stage,
StageConfig,
Task,
TeapotConfig,
) )
from joj3_config_generator.models import joj1, repo, result, task
# FIXME: LLM generated convert function, only for demostration # FIXME: LLM generated convert function, only for demostration
@ -19,29 +21,72 @@ def convert(repo_conf: repo.Config, task_conf: task.Config) -> result.Config:
name=task_conf.task, name=task_conf.task,
# TODO: specify the exact folder difference # TODO: specify the exact folder difference
log_path=f"{task_conf.task.replace(' ', '-')}.log", log_path=f"{task_conf.task.replace(' ', '-')}.log",
# TODO: specify the exact folder difference
log_path=f"{task_conf.task.replace(' ', '-')}.log",
expire_unix_timestamp=( expire_unix_timestamp=(
int(task_conf.release.deadline.timestamp()) int(task_conf.release.deadline.timestamp())
if task_conf.release.deadline if task_conf.release.deadline
else -1 else -1
), ),
stage=result.Stage(stages=[], sandbox_token=repo_conf.sandbox_token), stage=StageConfig(stages=[], sandbox_token=repo_conf.sandbox_token),
teapot=getTeapotConfig(repo_conf, task_conf), teapot=getTeapotConfig(repo_conf, task_conf),
) )
# Construct healthcheck stage # Construct healthcheck stage
healthcheck_stage = getHealthcheckConfig(repo_conf, task_conf) healthcheck_stage = getHealthcheckConfig(repo_conf, task_conf)
result_conf.stage.stages.append(healthcheck_stage) result_conf.stage.stages.append(healthcheck_stage)
cached: list[str] = [] cached = []
# Convert each stage in the task configuration # Convert each stage in the task configuration
for task_stage in task_conf.stages: for task_stage in task_conf.stages:
executor_with_config, cached = get_executorWithConfig(task_stage, cached) file_import = (
conf_stage = get_conf_stage(task_stage, executor_with_config) task_stage.files.import_
conf_stage = fix_result_detail(task_stage, conf_stage) if hasattr(task_stage, "files")
conf_stage = fix_comment(task_stage, conf_stage) and hasattr(task_stage.files, "import_")
conf_stage = fix_keyword(task_stage, conf_stage) and (task_stage.files is not None)
conf_stage = fix_diff(task_stage, conf_stage) and (task_stage.files.import_ is not None)
else []
)
copy_in_files = [file for file in file_import if (file not in cached)]
file_export = (
task_stage.files.export
if hasattr(task_stage, "files")
and hasattr(task_stage.files, "export")
and (task_stage.files is not None)
else []
)
executor_with_config = ExecutorWithConfig(
default=Cmd(
args=task_stage.command.split(),
copy_in={
file: CmdFile(src=f"/home/tt/.config/joj/{file}")
for file in copy_in_files
},
copy_in_cached={file: file for file in copy_in_files},
copy_out_cached=file_export if file_export is not None else [],
),
cases=[], # You can add cases if needed
)
if file_export is not None:
for file in file_export:
if file not in cached:
cached.append(file)
conf_stage = Stage(
name=task_stage.name,
# TODO: we may have cq in future
group="joj" if "judge" in task_stage.name else None,
executor=ExecutorConfig(
name="sandbox",
with_=executor_with_config,
),
parsers=[
ParserConfig(name=parser, with_={}) for parser in task_stage.parsers
],
)
if "result-detail" in task_stage.parsers:
result_detail_parser = next(
p for p in conf_stage.parsers if p.name == "result-detail"
)
if task_stage.result_detail is not None:
result_detail_parser.with_.update(task_stage.result_detail)
result_conf.stage.stages.append(conf_stage) result_conf.stage.stages.append(conf_stage)
return result_conf return result_conf

View File

@ -0,0 +1,11 @@
from joj3_config_generator.models.repo import Repo as Repo
from joj3_config_generator.models.result import Cmd as Cmd
from joj3_config_generator.models.result import CmdFile as CmdFile
from joj3_config_generator.models.result import ExecutorConfig as ExecutorConfig
from joj3_config_generator.models.result import ExecutorWithConfig as ExecutorWithConfig
from joj3_config_generator.models.result import ParserConfig as ParserConfig
from joj3_config_generator.models.result import ResultConfig as ResultConfig
from joj3_config_generator.models.result import Stage as Stage
from joj3_config_generator.models.result import StageConfig as StageConfig
from joj3_config_generator.models.result import TeapotConfig as TeapotConfig
from joj3_config_generator.models.task import Task as Task

View File

@ -1,8 +1,22 @@
import hashlib import hashlib
import socket import os
import tempfile import tempfile
from joj3_config_generator.models import joj1, repo, result, task from dotenv import load_dotenv
from joj3_config_generator.models import (
Cmd,
CmdFile,
ExecutorConfig,
ExecutorWithConfig,
ParserConfig,
Repo,
ResultConfig,
Stage,
StageConfig,
Task,
TeapotConfig,
)
def get_temp_directory() -> str: def get_temp_directory() -> str:
@ -10,12 +24,17 @@ def get_temp_directory() -> str:
def getGradingRepoName() -> str: def getGradingRepoName() -> str:
host_name = socket.gethostname() path = os.path.expanduser("~/.config/teapot/teapot.env")
return f"{host_name.split('-')[0]}-joj" if os.path.exists(path):
load_dotenv(path)
repo_name = os.environ.get("GITEA_ORG_NAME")
if repo_name is not None:
return f"{repo_name.split('-')[0]}-joj"
return "ece482-joj"
def getTeapotConfig(repo_conf: repo.Config, task_conf: task.Config) -> result.Teapot: def getTeapotConfig(repo_conf: Repo, task_conf: Task) -> TeapotConfig:
teapot = result.Teapot( teapot = TeapotConfig(
# TODO: fix the log path # TODO: fix the log path
log_path=f"{task_conf.task.replace(' ', '-')}-joint-teapot-debug.log", log_path=f"{task_conf.task.replace(' ', '-')}-joint-teapot-debug.log",
scoreboard_path=f"{task_conf.task.replace(' ', '-')}-scoreboard.csv", scoreboard_path=f"{task_conf.task.replace(' ', '-')}-scoreboard.csv",
@ -25,7 +44,7 @@ def getTeapotConfig(repo_conf: repo.Config, task_conf: task.Config) -> result.Te
return teapot return teapot
def getHealthcheckCmd(repo_conf: repo.Config) -> result.Cmd: def getHealthcheckCmd(repo_conf: Repo) -> Cmd:
repoSize = repo_conf.max_size repoSize = repo_conf.max_size
immutable = repo_conf.files.immutable immutable = repo_conf.files.immutable
repo_size = f"-repoSize={str(repoSize)} " repo_size = f"-repoSize={str(repoSize)} "
@ -52,11 +71,11 @@ def getHealthcheckCmd(repo_conf: repo.Config) -> result.Cmd:
args = args + immutable_files args = args + immutable_files
cmd = result.Cmd( cmd = Cmd(
args=args.split(), args=args.split(),
# FIXME: easier to edit within global scope # FIXME: easier to edit within global scope
copy_in={ copy_in={
f"/{get_temp_directory()}/repo-health-checker": result.CmdFile( f"/{get_temp_directory()}/repo-health-checker": CmdFile(
src=f"/{get_temp_directory()}/repo-health-checker" src=f"/{get_temp_directory()}/repo-health-checker"
) )
}, },
@ -64,17 +83,15 @@ def getHealthcheckCmd(repo_conf: repo.Config) -> result.Cmd:
return cmd return cmd
def getHealthcheckConfig( def getHealthcheckConfig(repo_conf: Repo, task_conf: Task) -> Stage:
repo_conf: repo.Config, task_conf: task.Config healthcheck_stage = Stage(
) -> result.StageDetail:
healthcheck_stage = result.StageDetail(
name="healthcheck", name="healthcheck",
group="", group="",
executor=result.Executor( executor=ExecutorConfig(
name="sandbox", name="sandbox",
with_=result.ExecutorWith(default=getHealthcheckCmd(repo_conf), cases=[]), with_=ExecutorWithConfig(default=getHealthcheckCmd(repo_conf), cases=[]),
), ),
parsers=[result.Parser(name="healthcheck", with_={"score": 0, "comment": ""})], parsers=[ParserConfig(name="healthcheck", with_={"score": 0, "comment": ""})],
) )
return healthcheck_stage return healthcheck_stage

View File

@ -62,8 +62,8 @@ def convert(root: Path = Path(".")) -> result.Config:
task_toml = task_file.read() task_toml = task_file.read()
repo_obj = rtoml.loads(repo_toml) repo_obj = rtoml.loads(repo_toml)
task_obj = rtoml.loads(task_toml) task_obj = rtoml.loads(task_toml)
result_model = convert_conf(repo.Config(**repo_obj), task.Config(**task_obj)) print(task_obj)
result_model = remove_nulls(result_model) result_model = convert_conf(Repo(**repo_obj), Task(**task_obj))
result_dict = result_model.model_dump(by_alias=True) result_dict = result_model.model_dump(by_alias=True)
with open(result_json_path, "w") as result_file: with open(result_json_path, "w") as result_file:

View File

@ -9,6 +9,7 @@ class CmdFile(BaseModel):
file_id: Optional[str] = Field(None, serialization_alias="fileId") file_id: Optional[str] = Field(None, serialization_alias="fileId")
name: Optional[str] = None name: Optional[str] = None
max: Optional[int] = 4 * 1024 * 1024 max: Optional[int] = 4 * 1024 * 1024
max: Optional[int] = 4 * 1024 * 1024
symlink: Optional[str] = None symlink: Optional[str] = None
stream_in: bool = Field(False, serialization_alias="streamIn") stream_in: bool = Field(False, serialization_alias="streamIn")
stream_out: bool = Field(False, serialization_alias="streamOut") stream_out: bool = Field(False, serialization_alias="streamOut")
@ -22,11 +23,19 @@ class Cmd(BaseModel):
stdout: Optional[CmdFile] = CmdFile(name="stdout", max=4 * 1024) stdout: Optional[CmdFile] = CmdFile(name="stdout", max=4 * 1024)
stderr: Optional[CmdFile] = CmdFile(name="stderr", max=4 * 1024) stderr: Optional[CmdFile] = CmdFile(name="stderr", max=4 * 1024)
cpu_limit: int = Field(4 * 1000000000, serialization_alias="cpuLimit") cpu_limit: int = Field(4 * 1000000000, serialization_alias="cpuLimit")
env: list[str] = ["PATH=/usr/bin:/bin:/usr/local/bin"]
stdin: Optional[CmdFile] = CmdFile(content="")
stdout: Optional[CmdFile] = CmdFile(name="stdout", max=4 * 1024)
stderr: Optional[CmdFile] = CmdFile(name="stderr", max=4 * 1024)
cpu_limit: int = Field(4 * 1000000000, serialization_alias="cpuLimit")
real_cpu_limit: int = Field(0, serialization_alias="realCpuLimit") real_cpu_limit: int = Field(0, serialization_alias="realCpuLimit")
clock_limit: int = Field(8 * 1000000000, serialization_alias="clockLimit") clock_limit: int = Field(8 * 1000000000, serialization_alias="clockLimit")
memory_limit: int = Field(4 * 1024 * 1024, serialization_alias="memoryLimit") memory_limit: int = Field(4 * 1024 * 1024, serialization_alias="memoryLimit")
clock_limit: int = Field(8 * 1000000000, serialization_alias="clockLimit")
memory_limit: int = Field(4 * 1024 * 1024, serialization_alias="memoryLimit")
stack_limit: int = Field(0, serialization_alias="stackLimit") stack_limit: int = Field(0, serialization_alias="stackLimit")
proc_limit: int = Field(50, serialization_alias="procLimit") proc_limit: int = Field(50, serialization_alias="procLimit")
proc_limit: int = Field(50, serialization_alias="procLimit")
cpu_rate_limit: int = Field(0, serialization_alias="cpuRateLimit") cpu_rate_limit: int = Field(0, serialization_alias="cpuRateLimit")
cpu_set_limit: str = Field("", serialization_alias="cpuSetLimit") cpu_set_limit: str = Field("", serialization_alias="cpuSetLimit")
copy_in: Dict[str, CmdFile] = Field({}, serialization_alias="copyIn") copy_in: Dict[str, CmdFile] = Field({}, serialization_alias="copyIn")
@ -45,17 +54,24 @@ class Cmd(BaseModel):
class OptionalCmd(BaseModel): class OptionalCmd(BaseModel):
args: Optional[list[str]] = None args: Optional[list[str]] = None
env: Optional[list[str]] = ["PATH=/usr/bin:/bin:/usr/local/bin"] env: Optional[list[str]] = ["PATH=/usr/bin:/bin:/usr/local/bin"]
env: Optional[list[str]] = ["PATH=/usr/bin:/bin:/usr/local/bin"]
stdin: Optional[CmdFile] = None stdin: Optional[CmdFile] = None
stdout: Optional[CmdFile] = None stdout: Optional[CmdFile] = None
stderr: Optional[CmdFile] = None stderr: Optional[CmdFile] = None
cpu_limit: Optional[int] = Field(4 * 1000000000, serialization_alias="cpuLimit") cpu_limit: Optional[int] = Field(4 * 1000000000, serialization_alias="cpuLimit")
cpu_limit: Optional[int] = Field(4 * 1000000000, serialization_alias="cpuLimit")
real_cpu_limit: Optional[int] = Field(None, serialization_alias="realCpuLimit") real_cpu_limit: Optional[int] = Field(None, serialization_alias="realCpuLimit")
clock_limit: Optional[int] = Field(8 * 1000000000, serialization_alias="clockLimit") clock_limit: Optional[int] = Field(8 * 1000000000, serialization_alias="clockLimit")
memory_limit: Optional[int] = Field( memory_limit: Optional[int] = Field(
4 * 1024 * 1024, serialization_alias="memoryLimit" 4 * 1024 * 1024, serialization_alias="memoryLimit"
) )
clock_limit: Optional[int] = Field(8 * 1000000000, serialization_alias="clockLimit")
memory_limit: Optional[int] = Field(
4 * 1024 * 1024, serialization_alias="memoryLimit"
)
stack_limit: Optional[int] = Field(None, serialization_alias="stackLimit") stack_limit: Optional[int] = Field(None, serialization_alias="stackLimit")
proc_limit: Optional[int] = Field(50, serialization_alias="procLimit") proc_limit: Optional[int] = Field(50, serialization_alias="procLimit")
proc_limit: Optional[int] = Field(50, serialization_alias="procLimit")
cpu_rate_limit: Optional[int] = Field(None, serialization_alias="cpuRateLimit") cpu_rate_limit: Optional[int] = Field(None, serialization_alias="cpuRateLimit")
cpu_set_limit: Optional[str] = Field(None, serialization_alias="cpuSetLimit") cpu_set_limit: Optional[str] = Field(None, serialization_alias="cpuSetLimit")
copy_in: Optional[Dict[str, CmdFile]] = Field(None, serialization_alias="copyIn") copy_in: Optional[Dict[str, CmdFile]] = Field(None, serialization_alias="copyIn")
@ -81,7 +97,14 @@ class OptionalCmd(BaseModel):
) )
class ExecutorWith(BaseModel): class Stage(BaseModel):
name: str
group: Optional[str] = None
executor: "ExecutorConfig"
parsers: list["ParserConfig"]
class ExecutorWithConfig(BaseModel):
default: Cmd default: Cmd
cases: List[OptionalCmd] cases: List[OptionalCmd]

View File

@ -18,26 +18,13 @@ class ParserDummy(BaseModel):
class ParserKeyword(BaseModel): class ParserKeyword(BaseModel):
keyword: Optional[list[str]] = [] keyword: Optional[list[str]] = None
weight: Optional[list[int]] = [] weight: Optional[list[int]] = None
class Outputs(BaseModel):
score: Optional[int] = 0
ignorespaces: Optional[bool] = False
hide: Optional[bool] = False
forcequit: Optional[bool] = True
class ParserDiff(BaseModel):
output: Optional[Outputs] = Outputs()
class Files(BaseModel): class Files(BaseModel):
import_: Optional[List[str]] = Field( import_: Optional[list[str]] = Field([], alias="import")
[], serialization_alias="import", validation_alias="import" export: Optional[list[str]] = []
)
export: Optional[List[str]] = []
class Limit(BaseModel): class Limit(BaseModel):
@ -48,37 +35,20 @@ class Limit(BaseModel):
class Stage(BaseModel): class Stage(BaseModel):
name: Optional[str] = None # Stage name name: str # Stage name
command: Optional[str] = None # Command to run command: str # Command to run
files: Optional[Files] = None files: Optional[Files] = None
score: Optional[int] = 0 score: Optional[int] = 0
parsers: Optional[list[str]] = [] # list of parsers parsers: list[str] # list of parsers
limit: Optional[Limit] = Limit() limit: Optional[Limit] = None
dummy: Optional[ParserDummy] = ParserDummy() dummy: Optional[ParserDummy] = ParserDummy()
result_status: Optional[ParserDummy] = Field(ParserDummy(), alias="result-status")
keyword: Optional[ParserKeyword] = ParserKeyword() keyword: Optional[ParserKeyword] = ParserKeyword()
clangtidy: Optional[ParserKeyword] = ParserKeyword() clangtidy: Optional[ParserKeyword] = ParserKeyword()
cppcheck: Optional[ParserKeyword] = ParserKeyword() cppcheck: Optional[ParserKeyword] = ParserKeyword()
# FIXME: determine cpplint type cpplint: Optional[ParserKeyword] = ParserKeyword()
# cpplint: Optional[ParserKeyword] = ParserKeyword()
cpplint: Optional[ParserDummy] = ParserDummy()
result_detail: Optional[ParserResultDetail] = Field( result_detail: Optional[ParserResultDetail] = Field(
ParserResultDetail(), alias="result-detail" ParserResultDetail(), alias="result-detail"
) )
skip: Optional[list[str]] = []
diff: Optional[ParserDiff] = ParserDiff()
cases: Optional[Dict[str, "Stage"]] = {}
class Config:
extra = "allow"
@root_validator(pre=True)
def gather_cases(cls: Type["Stage"], values: Dict[str, Any]) -> Dict[str, Any]:
cases = {k: v for k, v in values.items() if k.startswith("case")}
for key in cases:
values.pop(key)
values["cases"] = {k: Stage(**v) for k, v in cases.items()}
return values
class Release(BaseModel): class Release(BaseModel):

View File

@ -7,4 +7,4 @@ sandbox_token = "test"
whitelist_patterns = ["*.py", "*.txt", "*.md"] whitelist_patterns = ["*.py", "*.txt", "*.md"]
whitelist_file = ".whitelist" whitelist_file = ".whitelist"
required = ["main.py", "README.md"] required = ["main.py", "README.md"]
immutable = [] immutable = [".gitignore", ".gitattributes", "push.yaml", "release.yaml"]