chore: more compact code

This commit is contained in:
李衍志523370910113 2024-10-26 15:28:29 +08:00
parent eefb687c91
commit 1ada796954
3 changed files with 133 additions and 48 deletions

View File

@ -7,6 +7,8 @@ from joj3_config_generator.lib.task import (
fix_diff,
fix_keyword,
fix_result_detail,
get_conf_stage,
get_executorWithConfig,
)
from joj3_config_generator.models import (
Cmd,
@ -44,30 +46,11 @@ def convert(repo_conf: repo.Config, task_conf: task.Config) -> result.Config:
# Construct healthcheck stage
healthcheck_stage = getHealthcheckConfig(repo_conf, task_conf)
result_conf.stage.stages.append(healthcheck_stage)
cached = []
cached: list[str] = []
# Convert each stage in the task configuration
for task_stage in task_conf.stages:
executor_with_config = result.ExecutorWith(
default=result.Cmd(
args=task_stage.command.split(),
copy_in={
file: result.CmdFile(src=file) for file in task_stage.files.import_
},
copy_out_cached=task_stage.files.export,
),
cases=[], # You can add cases if needed
)
conf_stage = result.StageDetail(
name=task_stage.name,
group=task_conf.task,
executor=result.Executor(
name="sandbox",
with_=executor_with_config,
),
parsers=[
result.Parser(name=parser, with_={}) for parser in task_stage.parsers
],
)
executor_with_config, cached = get_executorWithConfig(task_stage, cached)
conf_stage = get_conf_stage(task_stage, executor_with_config)
conf_stage = fix_result_detail(task_stage, conf_stage)
conf_stage = fix_comment(task_stage, conf_stage)
conf_stage = fix_keyword(task_stage, conf_stage)

View File

@ -1,10 +1,112 @@
from typing import Tuple
import rtoml
from joj3_config_generator.models.result import CmdFile, OptionalCmd
from joj3_config_generator.models import (
ExecutorConfig,
ExecutorWithConfig,
ParserConfig,
)
from joj3_config_generator.models.result import Cmd, CmdFile, OptionalCmd
from joj3_config_generator.models.result import Stage as ResultStage
from joj3_config_generator.models.task import Stage as TaskStage
def get_conf_stage(
task_stage: TaskStage, executor_with_config: ExecutorWithConfig
) -> ResultStage:
conf_stage = ResultStage(
name=task_stage.name if task_stage.name is not None else "",
# TODO: we may have cq in future
group=(
"joj"
if (task_stage.name is not None) and ("judge" in task_stage.name)
else None
),
executor=ExecutorConfig(
name="sandbox",
with_=executor_with_config,
),
parsers=(
[ParserConfig(name=parser, with_={}) for parser in task_stage.parsers]
if task_stage.parsers is not None
else []
),
)
return conf_stage
def get_executorWithConfig(
task_stage: TaskStage, cached: list[str]
) -> Tuple[ExecutorWithConfig, list[str]]:
file_import = (
task_stage.files.import_
if hasattr(task_stage, "files")
and hasattr(task_stage.files, "import_")
and (task_stage.files is not None)
and (task_stage.files.import_ is not None)
else []
)
copy_in_files = [file for file in file_import if (file not in cached)]
file_export = (
task_stage.files.export
if hasattr(task_stage, "files")
and hasattr(task_stage.files, "export")
and (task_stage.files is not None)
else []
)
executor_with_config = ExecutorWithConfig(
default=Cmd(
args=(task_stage.command.split() if task_stage.command is not None else []),
copy_in={
file: CmdFile(src=f"/home/tt/.config/joj/{file}")
for file in copy_in_files
},
copy_in_cached={file: file for file in copy_in_files},
copy_out_cached=file_export if file_export is not None else [],
cpu_limit=(
task_stage.limit.cpu * 1_000_000_000
if task_stage.limit is not None and task_stage.limit.cpu is not None
else 4 * 1_000_000_000
),
clock_limit=(
2 * task_stage.limit.cpu * 1_000_000_000
if task_stage.limit is not None and task_stage.limit.cpu is not None
else 8 * 1_000_000_000
),
memory_limit=(
task_stage.limit.mem * 1_024 * 1_024
if task_stage.limit is not None and task_stage.limit.mem is not None
else 4 * 1_024 * 1_024
),
stderr=CmdFile(
name="stderr",
max=(
task_stage.limit.stderr * 1_000_000_000
if task_stage.limit is not None
and task_stage.limit.stderr is not None
else 4 * 1_024 * 1_024
),
),
stdout=CmdFile(
name="stdout",
max=(
task_stage.limit.stdout * 1_000_000_000
if task_stage.limit is not None
and task_stage.limit.stdout is not None
else 4 * 1_024 * 1_024
),
),
),
cases=[], # You can add cases if needed
)
if file_export is not None:
for file in file_export:
if file not in cached:
cached.append(file)
return (executor_with_config, cached)
def fix_keyword(task_stage: TaskStage, conf_stage: ResultStage) -> ResultStage:
keyword_parser = ["clangtidy", "keyword", "cppcheck"] # TODO: may add cpplint
if task_stage.parsers is not None:

View File

@ -18,7 +18,7 @@
"/<function",
"get_temp_directory",
"at",
"0x7f67094f3240>/repo-health-checker",
"0x7fc2485231a0>/repo-health-checker",
"-root=.",
"-repoSize=50.5",
"-meta=main.py",
@ -71,8 +71,8 @@
"cpuRateLimit": 0,
"cpuSetLimit": "",
"copyIn": {
"//tmp/repo-checker-9gy9931v/repo-health-checker": {
"src": "//tmp/repo-checker-kjnt9uw0/repo-health-checker",
"//tmp/repo-checker-5xkj4dm4/repo-health-checker": {
"src": "//tmp/repo-checker-k3fmck15/repo-health-checker",
"content": null,
"fileId": null,
"name": null,
@ -136,7 +136,7 @@
"content": null,
"fileId": null,
"name": "stdout",
"max": 4096,
"max": 4000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
@ -147,15 +147,15 @@
"content": null,
"fileId": null,
"name": "stderr",
"max": 4096,
"max": 128000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
"pipe": false
},
"cpuLimit": 4000000000,
"cpuLimit": 180000000000,
"realCpuLimit": 0,
"clockLimit": 8000000000,
"clockLimit": 360000000000,
"memoryLimit": 4194304,
"stackLimit": 0,
"procLimit": 50,
@ -291,7 +291,7 @@
"content": null,
"fileId": null,
"name": "stdout",
"max": 4096,
"max": 4000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
@ -302,7 +302,7 @@
"content": null,
"fileId": null,
"name": "stderr",
"max": 4096,
"max": 4000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
@ -420,7 +420,7 @@
"content": null,
"fileId": null,
"name": "stdout",
"max": 4096,
"max": 65000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
@ -431,7 +431,7 @@
"content": null,
"fileId": null,
"name": "stderr",
"max": 4096,
"max": 4000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
@ -611,7 +611,7 @@
"content": null,
"fileId": null,
"name": "stdout",
"max": 4096,
"max": 4000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
@ -622,7 +622,7 @@
"content": null,
"fileId": null,
"name": "stderr",
"max": 4096,
"max": 65000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
@ -744,7 +744,7 @@
"content": null,
"fileId": null,
"name": "stdout",
"max": 4096,
"max": 65000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
@ -755,7 +755,7 @@
"content": null,
"fileId": null,
"name": "stderr",
"max": 4096,
"max": 4000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
@ -842,7 +842,7 @@
"content": null,
"fileId": null,
"name": "stdout",
"max": 4096,
"max": 4000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
@ -853,16 +853,16 @@
"content": null,
"fileId": null,
"name": "stderr",
"max": 4096,
"max": 4000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
"pipe": false
},
"cpuLimit": 4000000000,
"cpuLimit": 3000000000,
"realCpuLimit": 0,
"clockLimit": 8000000000,
"memoryLimit": 4194304,
"clockLimit": 6000000000,
"memoryLimit": 78643200,
"stackLimit": 0,
"procLimit": 50,
"cpuRateLimit": 0,
@ -1093,7 +1093,7 @@
"content": null,
"fileId": null,
"name": "stdout",
"max": 4096,
"max": 4000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
@ -1104,16 +1104,16 @@
"content": null,
"fileId": null,
"name": "stderr",
"max": 4096,
"max": 4000000000,
"symlink": null,
"streamIn": false,
"streamOut": false,
"pipe": false
},
"cpuLimit": 4000000000,
"cpuLimit": 10000000000,
"realCpuLimit": 0,
"clockLimit": 8000000000,
"memoryLimit": 4194304,
"clockLimit": 20000000000,
"memoryLimit": 524288000,
"stackLimit": 0,
"procLimit": 50,
"cpuRateLimit": 0,