WIP: dev #6

Closed
李衍志523370910113 wants to merge 131 commits from dev into master
34 changed files with 3833 additions and 298 deletions

2
.gitignore vendored
View File

@ -15,7 +15,7 @@ dist/
downloads/ downloads/
eggs/ eggs/
.eggs/ .eggs/
lib/ # lib/
lib64/ lib64/
parts/ parts/
sdist/ sdist/

View File

@ -1,92 +1,98 @@
from typing import List import json
import os
from pathlib import Path
from typing import Any, List
import rtoml
from joj3_config_generator.models import joj1, repo, result, task from joj3_config_generator.models import joj1, repo, result, task
from joj3_config_generator.processers.joj1 import get_joj1_run_stage
from joj3_config_generator.processers.repo import ( # get_teapotcheck_config,
get_healthcheck_config,
get_teapot_stage,
)
from joj3_config_generator.processers.task import (
fix_diff,
fix_dummy,
fix_file,
fix_keyword,
fix_result_detail,
get_conf_stage,
get_executorWithConfig,
)
# FIXME: LLM generated convert function, only for demostration def convert(
def convert(repo_conf: repo.Config, task_conf: task.Config) -> result.Config: repo_conf: repo.Config, task_conf: task.Config, conf_root: Path
) -> result.Config:
# Create the base ResultConf object # Create the base ResultConf object
result_conf = result.Config( result_conf = result.Config(
name=task_conf.task, name=task_conf.task.name,
log_path=f"{task_conf.task.replace(' ', '_')}.log", # exact folder difference specified by type
log_path=f"{Path.home()}/.cache/joj3/{task_conf.task.type_}.log",
expire_unix_timestamp=( expire_unix_timestamp=(
int(task_conf.release.deadline.timestamp()) int(task_conf.release.deadline.timestamp())
if task_conf.release.deadline if task_conf.release.deadline
else -1 else -1
), ),
stage=result.Stage(stages=[], sandbox_token=repo_conf.sandbox_token), actor_csv_path=f"{Path.home()}/.config/joj/students.csv", # students.csv position
teapot=result.Teapot(), max_total_score=repo_conf.max_total_score,
stage=result.Stage(
stages=[],
sandbox_token=repo_conf.sandbox_token,
poststages=[get_teapot_stage(repo_conf)],
),
) )
# Construct healthcheck stage
healthcheck_stage = get_healthcheck_config(repo_conf)
result_conf.stage.stages.append(healthcheck_stage)
stages: List[str] = []
# Convert each stage in the task configuration # Convert each stage in the task configuration
for task_stage in task_conf.stages: for task_stage in task_conf.stages:
executor_with_config = result.ExecutorWith( executor_with_config, stages = get_executorWithConfig(
default=result.Cmd( task_stage, stages, conf_root
args=task_stage.command.split(),
copy_in={
file: result.CmdFile(src=file) for file in task_stage.files.import_
},
copy_out_cached=task_stage.files.export,
),
cases=[], # You can add cases if needed
) )
conf_stage = result.StageDetail( conf_stage = get_conf_stage(task_stage, executor_with_config)
name=task_stage.name, conf_stage = fix_result_detail(task_stage, conf_stage)
group=task_conf.task, conf_stage = fix_dummy(task_stage, conf_stage)
executor=result.Executor( conf_stage = fix_keyword(task_stage, conf_stage)
name="sandbox", conf_stage = fix_file(task_stage, conf_stage)
with_=executor_with_config, conf_stage = fix_diff(task_stage, conf_stage, task_conf, conf_root)
),
parsers=[
result.Parser(name=parser, with_={}) for parser in task_stage.parsers
],
)
if "result-detail" in task_stage.parsers:
result_detail_parser = next(
p for p in conf_stage.parsers if p.name == "result-detail"
)
result_detail_parser.with_.update(task_stage.result_detail)
result_conf.stage.stages.append(conf_stage) result_conf.stage.stages.append(conf_stage)
return result_conf return result_conf
# FIXME: LLM generated convert function, only for demostration
def convert_joj1(joj1_conf: joj1.Config) -> task.Config: def convert_joj1(joj1_conf: joj1.Config) -> task.Config:
stages = [] stages = [get_joj1_run_stage(joj1_conf)]
for language in joj1_conf.languages:
# Here you might want to create a stage for each language
# You can define a command based on language properties
command = f"run {language.language}"
# Assuming we don't have explicit files, we will set empty ones or default behavior
files = task.Files(import_=[], export=[])
# Score can be derived from the first case or set to a default
score = 0
parsers: List[str] = [] # Define parsers if applicable
if joj1_conf.cases:
score = sum(
case.score for case in joj1_conf.cases
) # Sum scores for all cases
# Creating a stage for each language
stages.append(
task.Stage(
name=language.language,
command=command,
files=files,
score=score,
parsers=parsers,
result_detail=task.ParserResultDetail(), # You can customize this further if needed
)
)
# Assuming no deadline is provided in `joj1`, you can set it accordingly
release_deadline = (
None # Placeholder for future implementation if deadlines are defined
)
return task.Config( return task.Config(
task=joj1_conf.languages[0].language if joj1_conf.languages else "Unnamed Task", task=task.Task(
release=task.Release(deadline=release_deadline), name=("Blank Task"),
),
release=task.Release(deadline=None),
stages=stages, stages=stages,
) )
def distribute_json(folder_path: str, repo_obj: Any, conf_root: Path) -> None:
for root, _, files in os.walk(folder_path):
for file in files:
if file.endswith(".toml"):
toml_file_path = os.path.join(root, file)
json_file_path = os.path.join(root, file.replace(".toml", ".json"))
with open(toml_file_path) as toml_file:
task_toml = toml_file.read()
task_obj = rtoml.loads(task_toml)
result_model = convert(
repo.Config(**repo_obj), task.Config(**task_obj), conf_root
)
result_dict = result_model.model_dump(by_alias=True, exclude_none=True)
with open(json_file_path, "w") as result_file:
json.dump(result_dict, result_file, ensure_ascii=False, indent=4)
result_file.write("\n")
print(f"Successfully convert {toml_file_path} into json!")
assert os.path.exists(
json_file_path
), f"Failed to convert {toml_file_path} into json!"

View File

@ -1,8 +1,8 @@
import json import json
import os import os
from pathlib import Path from pathlib import Path
from typing import Any, Dict
import inquirer
import rtoml import rtoml
import typer import typer
import yaml import yaml
@ -15,23 +15,6 @@ from joj3_config_generator.utils.logger import logger
app = typer.Typer(add_completion=False) app = typer.Typer(add_completion=False)
@app.command()
def create(toml: typer.FileTextWrite) -> None:
"""
Create a new JOJ3 toml config file
"""
logger.info("Creating")
questions = [
inquirer.List(
"size",
message="What size do you need?",
choices=["Jumbo", "Large", "Standard", "Medium", "Small", "Micro"],
),
]
answers = inquirer.prompt(questions)
logger.info(answers)
@app.command() @app.command()
def convert_joj1(yaml_file: typer.FileText, toml_file: typer.FileTextWrite) -> None: def convert_joj1(yaml_file: typer.FileText, toml_file: typer.FileTextWrite) -> None:
""" """
@ -41,28 +24,46 @@ def convert_joj1(yaml_file: typer.FileText, toml_file: typer.FileTextWrite) -> N
joj1_obj = yaml.safe_load(yaml_file.read()) joj1_obj = yaml.safe_load(yaml_file.read())
joj1_model = joj1.Config(**joj1_obj) joj1_model = joj1.Config(**joj1_obj)
task_model = convert_joj1_conf(joj1_model) task_model = convert_joj1_conf(joj1_model)
result_dict = task_model.model_dump(by_alias=True) result_dict = task_model.model_dump(by_alias=True, exclude_none=True)
toml_file.write(rtoml.dumps(result_dict)) toml_file.write(rtoml.dumps(result_dict))
@app.command() @app.command()
def convert(root: Path = Path(".")) -> None: def convert(
""" root: Path = typer.Option(
Convert given dir of JOJ3 toml config files to JOJ3 json config files Path("."),
""" "--conf-root",
logger.info(f"Converting files in {root.absolute()}") "-c",
repo_toml_path = os.path.join(root, "repo.toml") help="This should be consistent with the root of how you run JOJ3",
# TODO: loop through all dirs to find all task.toml ),
task_toml_path = os.path.join(root, "task.toml") repo_path: Path = typer.Option(
result_json_path = os.path.join(root, "task.json") Path("."),
with open(repo_toml_path) as repo_file: "--repo-root",
"-r",
help="This would be where you put your repo.toml file",
),
debug: bool = typer.Option(
False, "--debug", "-d", help="Enable debug mode for more verbose output"
),
) -> Dict[str, Any]:
logger.info(f"Converting files in {repo_path.absolute()}")
repo_toml_path = os.path.join(repo_path.absolute(), "basic", "repo.toml")
task_toml_path = os.path.join(repo_path.absolute(), "basic", "task.toml")
result_json_path = os.path.join(repo_path.absolute(), "basic", "task.json")
with open(repo_toml_path, encoding=None) as repo_file:
repo_toml = repo_file.read() repo_toml = repo_file.read()
with open(task_toml_path) as task_file: with open(task_toml_path, encoding=None) as task_file:
task_toml = task_file.read() task_toml = task_file.read()
repo_obj = rtoml.loads(repo_toml) repo_obj = rtoml.loads(repo_toml)
task_obj = rtoml.loads(task_toml) task_obj = rtoml.loads(task_toml)
result_model = convert_conf(repo.Config(**repo_obj), task.Config(**task_obj)) result_model = convert_conf(repo.Config(**repo_obj), task.Config(**task_obj), root)
result_dict = result_model.model_dump(by_alias=True) result_dict = result_model.model_dump(by_alias=True, exclude_none=True)
with open(result_json_path, "w") as result_file:
with open(result_json_path, "w", encoding=None) as result_file:
json.dump(result_dict, result_file, ensure_ascii=False, indent=4) json.dump(result_dict, result_file, ensure_ascii=False, indent=4)
result_file.write("\n") result_file.write("\n")
# distribution on json
# need a get folder path function
# distribute_json(folder_path, repo_obj, conf_root)
return result_dict

View File

@ -1,18 +1,22 @@
from typing import List, Optional from typing import List
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
class Files(BaseModel): class Files(BaseModel):
whitelist_patterns: List[str]
whitelist_file: Optional[str]
required: List[str] required: List[str]
immutable: List[str] immutable: List[str]
class Group(BaseModel):
name: List[str]
max_count: List[int]
time_period_hour: List[int]
class Config(BaseModel): class Config(BaseModel):
teaching_team: List[str]
max_size: float = Field(..., ge=0) max_size: float = Field(..., ge=0)
release_tags: List[str]
files: Files files: Files
sandbox_token: str sandbox_token: str
max_total_score: int = Field(100)
groups: Group

View File

@ -8,31 +8,32 @@ class CmdFile(BaseModel):
content: Optional[str] = None content: Optional[str] = None
file_id: Optional[str] = Field(None, serialization_alias="fileId") file_id: Optional[str] = Field(None, serialization_alias="fileId")
name: Optional[str] = None name: Optional[str] = None
max: Optional[int] = None max: Optional[int] = 400 * 1024 * 1024
symlink: Optional[str] = None symlink: Optional[str] = None
stream_in: bool = Field(False, serialization_alias="streamIn") stream_in: Optional[bool] = Field(None, serialization_alias="streamIn")
stream_out: bool = Field(False, serialization_alias="streamOut") stream_out: Optional[bool] = Field(None, serialization_alias="streamOut")
pipe: bool = False pipe: Optional[bool] = None
class Cmd(BaseModel): class Cmd(BaseModel):
args: List[str] args: Optional[List[str]] = None
env: List[str] = [] env: Optional[List[str]] = ["PATH=/usr/bin:/bin:/usr/local/bin"]

Unify the use of list or List everywhere as we only support Python >=3.9 now.

Unify the use of `list` or `List` everywhere as we only support Python >=3.9 now.
stdin: Optional[CmdFile] = None stdin: Optional[CmdFile] = CmdFile(content="")
stdout: Optional[CmdFile] = None stdout: Optional[CmdFile] = CmdFile(name="stdout", max=4 * 1024)
stderr: Optional[CmdFile] = None stderr: Optional[CmdFile] = CmdFile(name="stderr", max=4 * 1024)
cpu_limit: int = Field(0, serialization_alias="cpuLimit") cpu_limit: int = Field(4 * 1000000000000, serialization_alias="cpuLimit")
real_cpu_limit: int = Field(0, serialization_alias="realCpuLimit") real_cpu_limit: int = Field(0, serialization_alias="realCpuLimit")
clock_limit: int = Field(0, serialization_alias="clockLimit") clock_limit: int = Field(8 * 1000000000000, serialization_alias="clockLimit")
memory_limit: int = Field(0, serialization_alias="memoryLimit") memory_limit: int = Field(800 * 1024 * 1024, serialization_alias="memoryLimit")
stack_limit: int = Field(0, serialization_alias="stackLimit") stack_limit: int = Field(0, serialization_alias="stackLimit")
proc_limit: int = Field(0, serialization_alias="procLimit") proc_limit: int = Field(50, serialization_alias="procLimit")
cpu_rate_limit: int = Field(0, serialization_alias="cpuRateLimit") cpu_rate_limit: int = Field(0, serialization_alias="cpuRateLimit")
cpu_set_limit: str = Field("", serialization_alias="cpuSetLimit") cpu_set_limit: str = Field("", serialization_alias="cpuSetLimit")
copy_in: Dict[str, CmdFile] = Field({}, serialization_alias="copyIn") copy_in: Dict[str, CmdFile] = Field({}, serialization_alias="copyIn")
copy_in_cached: Dict[str, str] = Field({}, serialization_alias="copyInCached") copy_in_cached: Dict[str, str] = Field({}, serialization_alias="copyInCached")
copy_in_dir: str = Field(".", serialization_alias="copyInDir") copy_in_dir: str = Field(".", serialization_alias="copyInDir")
copy_out: List[str] = Field([], serialization_alias="copyOut") # reconsider this default situation
copy_out: List[str] = Field(["stdout", "stderr"], serialization_alias="copyOut")
copy_out_cached: List[str] = Field([], serialization_alias="copyOutCached") copy_out_cached: List[str] = Field([], serialization_alias="copyOutCached")
copy_out_max: int = Field(0, serialization_alias="copyOutMax") copy_out_max: int = Field(0, serialization_alias="copyOutMax")
copy_out_dir: str = Field("", serialization_alias="copyOutDir") copy_out_dir: str = Field("", serialization_alias="copyOutDir")
@ -43,17 +44,21 @@ class Cmd(BaseModel):
class OptionalCmd(BaseModel): class OptionalCmd(BaseModel):
args: Optional[List[str]] = None args: Optional[list[str]] = None
env: Optional[List[str]] = None env: Optional[list[str]] = ["PATH=/usr/bin:/bin:/usr/local/bin"]
stdin: Optional[CmdFile] = None stdin: Optional[CmdFile] = None
stdout: Optional[CmdFile] = None stdout: Optional[CmdFile] = None
stderr: Optional[CmdFile] = None stderr: Optional[CmdFile] = None
cpu_limit: Optional[int] = Field(None, serialization_alias="cpuLimit") cpu_limit: Optional[int] = Field(4 * 1000000000000, serialization_alias="cpuLimit")
real_cpu_limit: Optional[int] = Field(None, serialization_alias="realCpuLimit") real_cpu_limit: Optional[int] = Field(None, serialization_alias="realCpuLimit")
clock_limit: Optional[int] = Field(None, serialization_alias="clockLimit") clock_limit: Optional[int] = Field(
memory_limit: Optional[int] = Field(None, serialization_alias="memoryLimit") 8 * 1000000000000, serialization_alias="clockLimit"
)
memory_limit: Optional[int] = Field(
800 * 1024 * 1024, serialization_alias="memoryLimit"
)
stack_limit: Optional[int] = Field(None, serialization_alias="stackLimit") stack_limit: Optional[int] = Field(None, serialization_alias="stackLimit")
proc_limit: Optional[int] = Field(None, serialization_alias="procLimit") proc_limit: Optional[int] = Field(50, serialization_alias="procLimit")
cpu_rate_limit: Optional[int] = Field(None, serialization_alias="cpuRateLimit") cpu_rate_limit: Optional[int] = Field(None, serialization_alias="cpuRateLimit")
cpu_set_limit: Optional[str] = Field(None, serialization_alias="cpuSetLimit") cpu_set_limit: Optional[str] = Field(None, serialization_alias="cpuSetLimit")
copy_in: Optional[Dict[str, CmdFile]] = Field(None, serialization_alias="copyIn") copy_in: Optional[Dict[str, CmdFile]] = Field(None, serialization_alias="copyIn")
@ -61,7 +66,9 @@ class OptionalCmd(BaseModel):
None, serialization_alias="copyInCached" None, serialization_alias="copyInCached"
) )
copy_in_dir: Optional[str] = Field(None, serialization_alias="copyInDir") copy_in_dir: Optional[str] = Field(None, serialization_alias="copyInDir")
copy_out: Optional[List[str]] = Field(None, serialization_alias="copyOut") copy_out: Optional[List[str]] = Field(
["stdout", "stderr"], serialization_alias="copyOut"
)
copy_out_cached: Optional[List[str]] = Field( copy_out_cached: Optional[List[str]] = Field(
None, serialization_alias="copyOutCached" None, serialization_alias="copyOutCached"
) )
@ -89,16 +96,16 @@ class Executor(BaseModel):
with_: ExecutorWith = Field(..., serialization_alias="with") with_: ExecutorWith = Field(..., serialization_alias="with")
class Parser(BaseModel): class ParserConfig(BaseModel):
name: str name: str
with_: Dict[str, Any] = Field(..., serialization_alias="with") with_: Dict[str, Any] = Field(..., serialization_alias="with")
class StageDetail(BaseModel): class StageDetail(BaseModel):
name: str name: str
group: str group: Optional[str] = ""
executor: Executor executor: Executor
parsers: List[Parser] parsers: List[ParserConfig]
class Stage(BaseModel): class Stage(BaseModel):
@ -110,25 +117,14 @@ class Stage(BaseModel):
"/tmp/joj3_result.json", serialization_alias="outputPath" "/tmp/joj3_result.json", serialization_alias="outputPath"
) # nosec: B108 ) # nosec: B108
stages: List[StageDetail] stages: List[StageDetail]
prestages: Optional[List[StageDetail]] = None
poststages: List[StageDetail]
class Teapot(BaseModel):
log_path: str = Field(
"/home/tt/.cache/joint-teapot-debug.log", serialization_alias="logPath"
)
scoreboard_path: str = Field("scoreboard.csv", serialization_alias="scoreboardPath")
failed_table_path: str = Field(
"failed-table.md", serialization_alias="failedTablePath"
)
grading_repo_name: str = Field("", serialization_alias="gradingRepoName")
skip_issue: bool = Field(False, serialization_alias="skipIssue")
skip_scoreboard: bool = Field(False, serialization_alias="skipScoreboard")
skip_failed_table: bool = Field(False, serialization_alias="skipFailedTable")
class Config(BaseModel): class Config(BaseModel):
name: str = "unknown" name: str = ""
log_path: str = Field("", serialization_alias="logPath") log_path: str = Field("", serialization_alias="logPath")
expire_unix_timestamp: int = Field(-1, serialization_alias="expireUnixTimestamp") expire_unix_timestamp: int = Field(-1, serialization_alias="expireUnixTimestamp")
actor_csv_path: str = Field("", serialization_alias="actorCsvPath")
max_total_score: int = Field(100, serialization_alias="maxTotalScore")
stage: Stage stage: Stage
teapot: Teapot

View File

@ -1,37 +1,112 @@
from datetime import datetime from datetime import datetime
from typing import List, Optional from typing import Any, Dict, List, Optional, Type
from pydantic import BaseModel, Field from pydantic import BaseModel, Field, model_validator
class ParserResultDetail(BaseModel): class ParserResultDetail(BaseModel):
time: bool = True # Display run time time: Optional[bool] = True # Display run time
mem: bool = True # Display memory usage mem: Optional[bool] = True # Display memory usage
stdout: bool = False # Display stdout messages stdout: Optional[bool] = False # Display stdout messages
stderr: bool = False # Display stderr messages stderr: Optional[bool] = False # Display stderr messages
exitstatus: Optional[bool] = False
class ParserFile(BaseModel):
name: Optional[str] = None
class ParserLog(BaseModel):
fileName: Optional[str] = None
msg: Optional[str] = None
level: Optional[str] = None
class ParserDummy(BaseModel):
comment: Optional[str] = ""
score: Optional[int] = 0
forcequit: Optional[bool] = False
class ParserKeyword(BaseModel):
keyword: Optional[list[str]] = []
weight: Optional[list[int]] = []
class Outputs(BaseModel):
score: Optional[int] = 0
ignorespaces: Optional[bool] = True
hide: Optional[bool] = False
forcequit: Optional[bool] = False
class ParserDiff(BaseModel):
output: Optional[Outputs] = Outputs()
class Files(BaseModel): class Files(BaseModel):
import_: List[str] = Field(serialization_alias="import", validation_alias="import") import_: Optional[list[str]] = Field([], alias="import")
export: List[str] export: Optional[list[str]] = []
class Limit(BaseModel):
mem: Optional[int] = 800
cpu: Optional[int] = 1000
stderr: Optional[int] = 800
stdout: Optional[int] = 800
class Stage(BaseModel): class Stage(BaseModel):
name: str # Stage name name: Optional[str] = None # Stage name
command: str # Command to run env: Optional[list[str]] = None
files: Files # Files to import and export command: Optional[str] = None # Command to run
score: int # Score for the task files: Optional[Files] = None
parsers: List[str] # list of parsers in_: Optional[str] = Field(None, alias="in")
result_detail: ParserResultDetail = ( out_: Optional[str] = Field(None, alias="out")
ParserResultDetail() score: Optional[int] = 0
) # for result-detail parser parsers: Optional[list[str]] = [] # list of parsers
limit: Optional[Limit] = Limit()
dummy: Optional[ParserDummy] = ParserDummy()
result_status: Optional[ParserDummy] = Field(ParserDummy(), alias="result-status")
keyword: Optional[ParserKeyword] = ParserKeyword()
clangtidy: Optional[ParserKeyword] = ParserKeyword()
cppcheck: Optional[ParserKeyword] = ParserKeyword()
cpplint: Optional[ParserKeyword] = ParserKeyword()
result_detail: Optional[ParserResultDetail] = Field(
ParserResultDetail(), alias="result-detail"
)
file: Optional[ParserFile] = ParserFile()
skip: Optional[list[str]] = []
# cases related
cases: Optional[Dict[str, "Stage"]] = {}
diff: Optional[ParserDiff] = ParserDiff()
class Config:
extra = "allow"
@model_validator(mode="before")
@classmethod
def gather_cases(cls: Type["Stage"], values: Dict[str, Any]) -> Dict[str, Any]:
cases = {k: v for k, v in values.items() if k.startswith("case")}
for key in cases:
values.pop(key)
values["cases"] = {k: Stage(**v) for k, v in cases.items()}
return values
class Release(BaseModel): class Release(BaseModel):
deadline: Optional[datetime] # RFC 3339 formatted date-time with offset deadline: Optional[datetime] # RFC 3339 formatted date-time with offset
class Task(BaseModel):
type_: Optional[str] = Field(
"", serialization_alias="type", validation_alias="type"
)
name: str
class Config(BaseModel): class Config(BaseModel):
task: str # Task name (e.g., hw3 ex5) task: Task
release: Release # Release configuration release: Release
stages: List[Stage] # list of stage configurations stages: List[Stage] # list of stage configurations

View File

@ -0,0 +1,52 @@
from typing import List
import humanfriendly
from pytimeparse.timeparse import timeparse
from joj3_config_generator.models import joj1, result, task
def get_joj1_run_stage(joj1_config: joj1.Config) -> task.Stage:
default_cpu = timeparse("1s")
default_mem = humanfriendly.parse_size("32m")
cases_conf = []
for i, case in enumerate(joj1_config.cases):
cases_conf.append(
task.Stage(
score=case.score,
command=case.execute_args if case.execute_args else None,
limit=task.Limit(
cpu=timeparse(case.time) if case.time else default_cpu,
mem=(
humanfriendly.parse_size(case.memory)
if case.memory
else default_mem
),
),
)
)
for i, case in enumerate(joj1_config.cases):
cases_conf[i].in_ = case.input
cases_conf[i].out_ = case.output
run_config = task.Stage(
name="This is the converted joj1 run stage",
parsers=["diff", "result-status"],
score=100,
limit=task.Limit(
cpu=(
timeparse(joj1_config.cases[0].time)
if joj1_config.cases[0].time is not None
else default_cpu
),
mem=(
humanfriendly.parse_size(joj1_config.cases[0].memory)
if joj1_config.cases[0].memory is not None
else default_mem
),
),
cases={f"case{i}": cases_conf[i] for i, _ in enumerate(joj1_config.cases)},
)
return run_config
# TODO: get formatted joj1 config, match the criterion in the doc

View File

@ -0,0 +1,147 @@
import hashlib
import shlex
import socket
from pathlib import Path
from joj3_config_generator.models import repo, result
def get_grading_repo_name() -> str:
# FIXME: uncomment back when everything is ready!
# host_name = "engr151"
host_name = socket.gethostname()
return f"{host_name.split('-')[0]}-joj"
def get_teapot_stage(repo_conf: repo.Config) -> result.StageDetail:
args_ = ""
args_ = (
args_
+ f"/usr/local/bin/joint-teapot joj3-all-env /home/tt/.config/teapot/teapot.env --grading-repo-name {get_grading_repo_name()} --max-total-score {repo_conf.max_total_score}"
)
stage_conf = result.StageDetail(
name="teapot",
executor=result.Executor(
name="local",
with_=result.ExecutorWith(
default=result.Cmd(
args=shlex.split(args_),
env=[
f"LOG_FILE_PATH={Path.home()}/.cache/joint-teapot-debug.log"
], # TODO: fix it according to the task name
),
cases=[],
),
),
parsers=[result.ParserConfig(name="log", with_={"msg": "joj3 summary"})],
)
return stage_conf
def get_healthcheck_args(repo_conf: repo.Config) -> str:
repoSize = repo_conf.max_size
immutable = repo_conf.files.immutable
repo_size = f"-repoSize={str(repoSize)} "
required_files = repo_conf.files.required
for i, meta in enumerate(required_files):
required_files[i] = f"-meta={meta} "
immutable_files = "-checkFileNameList="
for i, name in enumerate(immutable):
if i == len(immutable) - 1:
immutable_files = immutable_files + name + " "
else:
immutable_files = immutable_files + name + ","
chore = "/usr/local/bin/repo-health-checker -root=. "
args = ""
args = args + chore
args = args + repo_size
for meta in required_files:
args = args + meta
args = args + get_hash(immutable)
args = args + immutable_files
return args
def get_debug_args(repo_conf: repo.Config) -> str:
args = ""
args = (
args
+ f"/usr/local/bin/joint-teapot joj3-check-env {Path.home()}/.config/teapot/teapot.env --grading-repo-name {get_grading_repo_name()} --group-config "
)
group_config = ""
for i, name in enumerate(repo_conf.groups.name):
group_config = (
group_config
+ f"{name}={repo_conf.groups.max_count[i]}:{repo_conf.groups.time_period_hour[i]},"
)
# default value hardcoded
group_config = group_config + "=100:24"
args = args + group_config
return args
def get_healthcheck_config(repo_conf: repo.Config) -> result.StageDetail:
healthcheck_stage = result.StageDetail(
name="healthcheck",
group="",
executor=result.Executor(
name="local",
with_=result.ExecutorWith(
default=result.Cmd(),
cases=[
result.OptionalCmd(
args=shlex.split(get_healthcheck_args(repo_conf)),
),
result.OptionalCmd(
args=shlex.split(get_debug_args(repo_conf)),
env=[
f"LOG_FILE_PATH={Path.home()}/.cache/joint-teapot-debug.log"
],
),
],
),
),
parsers=[
result.ParserConfig(name="healthcheck", with_={"score": 1}),
result.ParserConfig(name="debug", with_={"score": 0}),
],
)
return healthcheck_stage
def calc_sha256sum(file_path: str) -> str:
sha256_hash = hashlib.sha256()
with open(file_path, "rb") as f:
for byte_block in iter(lambda: f.read(65536 * 2), b""):
sha256_hash.update(byte_block)
return sha256_hash.hexdigest()
def get_hash(immutable_files: list[str]) -> str: # input should be a list
# FIXME: should be finalized when get into the server
current_file_path = Path(__file__).resolve()
project_root = current_file_path.parents[2]
file_path = f"{project_root}/tests/immutable_p3-test/"
# default value as hardcoded
# file_path = "{Path.home()}/.cache/immutable"
immutable_hash = []
for i, file in enumerate(immutable_files):
immutable_files[i] = file_path + file.rsplit("/", 1)[-1]
for i, file in enumerate(immutable_files):
immutable_hash.append(calc_sha256sum(file))
hash_check = "-checkFileSumList="
for i, file in enumerate(immutable_hash):
if i == len(immutable_hash) - 1:
hash_check = hash_check + file + " "
else:
hash_check = hash_check + file + ","
return hash_check

View File

@ -0,0 +1,316 @@
import re
import shlex
from pathlib import Path
from typing import List, Tuple
from joj3_config_generator.models import result, task
def get_conf_stage(
task_stage: task.Stage, executor_with_config: result.ExecutorWith
) -> result.StageDetail:
conf_stage = result.StageDetail(
name=task_stage.name if task_stage.name is not None else "",
# group is determined by adding between "[]" in the name of the task
group=(
match.group(1)
if (match := re.search(r"\[([^\[\]]+)\]", task_stage.name or ""))
else ""
),
executor=result.Executor(
name="sandbox",
with_=executor_with_config,
),
parsers=(
[
result.ParserConfig(name=parser, with_={})
for parser in task_stage.parsers
]
if task_stage.parsers is not None
else []
),
)
return conf_stage
def get_executorWithConfig(
task_stage: task.Stage, cached: List[str], conf_root: Path
) -> Tuple[result.ExecutorWith, List[str]]:
file_import = (
task_stage.files.import_
if hasattr(task_stage, "files")
and hasattr(task_stage.files, "import_")
and (task_stage.files is not None)
and (task_stage.files.import_ is not None)
else []
)
copy_in_files = [file for file in file_import if file not in cached]
file_export = (
task_stage.files.export
if hasattr(task_stage, "files")
and hasattr(task_stage.files, "export")
and (task_stage.files is not None)
else []
)
copy_out_files = ["stdout", "stderr"]
executor_with_config = result.ExecutorWith(
default=result.Cmd(
args=(
shlex.split(task_stage.command)
if task_stage.command is not None
else []
),
copy_in={
file: result.CmdFile(src=f"{Path.home()}/{conf_root}/tools/{file}")
# all copyin files store in this tools folder
# are there any corner cases
for file in copy_in_files
},
stdin=(
result.CmdFile(content="")
if (
(task_stage.parsers is not None)
and ("diff" not in task_stage.parsers)
)
else None
),
copy_out=copy_out_files,
copy_in_cached={file: file for file in cached},
copy_out_cached=file_export if file_export is not None else [],
cpu_limit=(
task_stage.limit.cpu * 1_000_000_000_000
if task_stage.limit is not None and task_stage.limit.cpu is not None
else 80 * 1_000_000_000_000
),
clock_limit=(
2 * task_stage.limit.cpu * 1_000_000_000_000
if task_stage.limit is not None and task_stage.limit.cpu is not None
else 80 * 1_000_000_000_000
),
memory_limit=(
task_stage.limit.mem * 1_024 * 1_024
if task_stage.limit is not None and task_stage.limit.mem is not None
else 800 * 1_024 * 1_024
),
stderr=result.CmdFile(
name="stderr",
max=(
task_stage.limit.stderr * 1_000_000_000_000
if task_stage.limit is not None
and task_stage.limit.stderr is not None
else 800 * 1_024 * 1_024
),
),
stdout=result.CmdFile(
name="stdout",
max=(
task_stage.limit.stdout * 1_000_000_000_000
if task_stage.limit is not None
and task_stage.limit.stdout is not None
else 800 * 1_024 * 1_024
),
),
),
cases=[],
)
if file_export is not None:
for file in file_export:
if file not in cached:
cached.append(file)
return (executor_with_config, cached)
def fix_keyword(
task_stage: task.Stage, conf_stage: result.StageDetail
) -> result.StageDetail:
keyword_parser = ["clangtidy", "keyword", "cppcheck", "cpplint"]
if task_stage.parsers is not None:
for parser in task_stage.parsers:
if parser in keyword_parser:
keyword_parser_ = next(
p for p in conf_stage.parsers if p.name == parser
)
keyword_weight = []
if getattr(task_stage, parser, None) is not None:
unique_weight = list(set(getattr(task_stage, parser).weight))
for score in unique_weight:
keyword_weight.append({"keywords": [], "score": score})
for idx, score in enumerate(unique_weight):
for idx_, score_ in enumerate(
getattr(task_stage, parser).weight
):
if score == score_:
keyword_weight[idx]["keywords"].append(
getattr(task_stage, parser).keyword[idx_]
)
else:
continue
keyword_parser_.with_.update(
{
"matches": keyword_weight,
"fullscore": 0,
"minscore": -1000,
"files": ["stdout", "stderr"],
}
)
else:
continue
return conf_stage
def fix_result_detail(
task_stage: task.Stage, conf_stage: result.StageDetail
) -> result.StageDetail:
if (task_stage.parsers is not None) and ("result-detail" in task_stage.parsers):
result_detail_parser = next(
p for p in conf_stage.parsers if p.name == "result-detail"
)
if task_stage.result_detail is not None:
show_files = []
if (
task_stage.result_detail.stdout
and task_stage.result_detail.stdout is not None
):
show_files.append("stdout")
if (
task_stage.result_detail.stderr
and task_stage.result_detail.stderr is not None
):
show_files.append("stderr")
result_detail_parser.with_.update(
{
"score": 0,
"comment": "",
"showFiles": show_files,
"showExitStatus": task_stage.result_detail.exitstatus,
"showRuntime": task_stage.result_detail.time,
"showMemory": task_stage.result_detail.mem,
}
)
return conf_stage
def fix_dummy(
task_stage: task.Stage, conf_stage: result.StageDetail
) -> result.StageDetail:
dummy_parser = [
"dummy",
"result-status",
]
if task_stage.parsers is not None:
for parser in task_stage.parsers:
if parser in dummy_parser:
dummy_parser_ = next(p for p in conf_stage.parsers if p.name == parser)
if (
getattr(task_stage, parser.replace("-", "_"), None) is not None
) and (task_stage.result_status is not None):
dummy_parser_.with_.update(
{
"score": task_stage.result_status.score,
"comment": task_stage.result_status.comment,
"forceQuitOnNotAccepted": task_stage.result_status.forcequit,
}
)
else:
continue
return conf_stage
def fix_file(
task_stage: task.Stage, conf_stage: result.StageDetail
) -> result.StageDetail:
file_parser = ["file"]
if task_stage.parsers is not None:
for parser in task_stage.parsers:
if parser in file_parser:
file_parser_ = next(p for p in conf_stage.parsers if p.name == parser)
if task_stage.file is not None:
file_parser_.with_.update({"name": task_stage.file.name})
else:
continue
return conf_stage
def fix_diff(
task_stage: task.Stage,
conf_stage: result.StageDetail,
task_conf: task.Config,
conf_root: Path,
) -> result.StageDetail:
if task_stage.parsers is not None and "diff" in task_stage.parsers:
diff_parser = next((p for p in conf_stage.parsers if p.name == "diff"), None)
skip = task_stage.skip or []
cases = task_stage.cases or {}
finalized_cases = [case for case in cases if case not in skip]
stage_cases = []
parser_cases = []
for case in finalized_cases:
case_stage = task_stage.cases.get(case) if task_stage.cases else None
if not case_stage:
continue
# Ensure case_stage.limit is defined before accessing .cpu and .mem
cpu_limit = (
case_stage.limit.cpu * 1_000_000_000
if case_stage.limit and case_stage.limit.cpu is not None
else 0
)
clock_limit = (
2 * case_stage.limit.cpu * 1_000_000_000
if case_stage.limit and case_stage.limit.cpu is not None
else 0
)
memory_limit = (
case_stage.limit.mem * 1_024 * 1_024
if case_stage.limit and case_stage.limit.mem is not None
else 0
)
command = case_stage.command if case_stage.command is not None else None
stdin = case_stage.in_ if case_stage.in_ is not None else f"{case}.in"
stdout = case_stage.out_ if case_stage.out_ is not None else f"{case}.out"
stage_cases.append(
result.OptionalCmd(
stdin=result.CmdFile(
src=f"{Path.home()}/{conf_root}/{task_conf.task.type_}/{stdin}"
),
args=(shlex.split(command) if command is not None else None),
cpu_limit=cpu_limit,
clock_limit=clock_limit,
memory_limit=memory_limit,
proc_limit=50,
)
)
# Ensure case_stage.diff and case_stage.diff.output are defined
diff_output = (
case_stage.diff.output
if case_stage.diff and case_stage.diff.output
else None
)
if diff_output:
parser_cases.append(
{
"outputs": [
{
"score": diff_output.score,
"fileName": "stdout",
"answerPath": f"{Path.home()}/{conf_root}/{task_conf.task.type_}/{stdout}",
"forceQuitOnDiff": diff_output.forcequit,
"alwaysHide": diff_output.hide,
"compareSpace": not diff_output.ignorespaces,
}
]
}
)
if diff_parser and task_stage.diff is not None:
diff_parser.with_.update({"name": "diff", "cases": parser_cases})
conf_stage.executor.with_.cases = stage_cases
return conf_stage

View File

@ -1,10 +1,15 @@
teaching_team = ["prof_john", "ta_alice", "ta_bob"]
max_size = 50.5
release_tags = ["v1.0", "v2.0", "final"]
sandbox_token = "test" sandbox_token = "test"
# reconfigure later
max_total_score = 100
max_size = 50.5
# for tests
[groups]
name = ["joj", "run"]
max_count = [1000, 1000]
time_period_hour = [24, 24]
[files] [files]
whitelist_patterns = ["*.py", "*.txt", "*.md"] required = ["README.md", "Changelog.md"]
whitelist_file = ".whitelist" immutable = [".gitignore", ".gitattributes",".gitea/workflows/push.yaml", ".gitea/workflows/release.yaml"]
required = ["main.py", "README.md"]
immutable = ["config.yaml", "setup.py"]

File diff suppressed because it is too large Load Diff

View File

@ -1,30 +1,440 @@
task = "hw3 ex5" # general task configuration

make this basic test as simple as possible, and create new test cases for each kind of stage

make this basic test as simple as possible, and create new test cases for each kind of stage
task.name = "hw7 ex2" # task name
task.type = "homework/h7/e2" # remove this task type later
[release] release.deadline = 2024-12-30 23:59:59+08:00
deadline = "2024-10-18T23:59:00+08:00"
[[stages]] [[stages]]
name = "judge_base" name = "Compilation"
command = "./matlab-joj ./h3/ex5.m" command = "./tools/compile" # eg. script running cmake commands
score = 100 files.import = [ "tools/compile" ]
parsers = ["diff", "result-detail"] files.export = [ "h7/build/ex2", "h7/build/ex2-asan", "h7/build/ex2-ubsan", "h7/build/ex2-msan", "h7/build/compile_commands.json" ]
score = 1
files.import = ["tools/matlab-joj", "tools/matlab_formatter.py"] # compile parsers
files.export = ["output/ex5_results.txt", "output/ex5_logs.txt"] parsers = [ "result-detail", "result-status" ]
result-status.comment = "Congratulations! Your code compiled successfully."
result_detail.time = false result-detail.exitstatus = true
result_detail.mem = false result-detail.stderr = true
result_detail.stderr = true result-detail.time = false
result-detail.mem = false
result-status.forcequit = true
[[stages]] [[stages]]
name = "judge_base2" name = "[cq] Filelength"
command = "./matlab-joj ./h3/ex5.m" command = "./tools/filelength 400 300 *.cpp *.h"
score = 80 files.import = [ "tools/filelength" ]
parsers = ["diff", "result-detail"]
files.import = ["tools/matlab-joj", "tools/matlab_formatter.py"] parsers = [ "keyword", "result-detail" ]
files.export = ["output/ex5_results2.txt", "output/ex5_logs2.txt"] keyword.keyword = [ "max", "recommended"]
keyword.weight = [ 20, 10 ]
result-detail.exitstatus = true
result-detail.stdout = true
result-detail.time = false
result-detail.mem = false
result_detail.time = true [[stages]]
result_detail.mem = true name = "[cq] Clang-tidy"
result_detail.stderr = false command = "run-clang-tidy-18 -header-filter=.* -quiet -load=/usr/local/lib/libcodequality.so -p h7/build h7/ex2.cpp"
files.import = [ "tests/homework/h7/.clang-tidy", "h7/build/compile_commands.json" ]
limit.stdout = 65
parsers = [ "clangtidy", "result-detail" ]
clangtidy.keyword = [ "codequality-unchecked-malloc-result", "codequality-no-global-variables", "codequality-no-header-guard", "codequality-no-fflush-stdin", "readability-function-size", "readability-duplicate-include", "readability-identifier-naming", "readability-redundant", "readability-misleading-indentation", "readability-misplaced-array-index", "cppcoreguidelines-init-variables", "bugprone-suspicious-string-compare", "google-global-names-in-headers", "clang-diagnostic", "clang-analyzer", "misc", "performance", "portability" ]
clangtidy.weight = [ 5, 20, 20, 20, 10, 5, 5, 5, 15, 5, 5, 5, 5, 5, 5, 5, 5, 5]
result-detail.exitstatus = true
result-detail.stdout = true
result-detail.time = false
result-detail.mem = false
[[stages]]
name = "[cq] Cppcheck"
command = "cppcheck --template='{\"file\":\"{file}\",\"line\":{line}, \"column\":{column}, \"severity\":\"{severity}\", \"message\":\"{message}\", \"id\":\"{id}\"}' --force --enable=all --suppress=missingIncludeSystem --quiet h7/ex2.cpp"
limit.stderr = 65
parsers = [ "cppcheck", "result-detail" ]
cppcheck.keyword = ["error", "warning", "portability", "performance", "style"]
cppcheck.weight = [15, 5, 5, 5, 5]
result-detail.exitstatus = true
result-detail.stderr = true
result-detail.time = false
result-detail.mem = false
[[stages]]
name = "[cq] Cpplint"
command = "cpplint --linelength=120 --filter=-legal,-readability/casting,-whitespace,-runtime/printf,-runtime/threadsafe_fn,-runtime/int,-readability/todo,-build/include_subdir,-build/header_guard,-build/include_what_you_use --recursive --exclude=build h7/ex2.cpp"
limit.stdout = 65
parsers = [ "cpplint", "result-detail" ]
cpplint.keyword = [ "runtime", "readability", "build" ]
cpplint.weight = [ 5, 20, 10]
result-detail.exitstatus = true
result-detail.stderr = true
result-detail.time = false
result-detail.mem = false
[[stages]]
name = "[joj] ex2-asan"
command="./h7/build/ex2-asan -a"
files.import = [ "h7/build/ex2-asan" ]
limit.mem = 91224961
parsers = [ "diff", "result-detail" ]
result-detail.exitstatus = true
result-detail.stderr = true
# will be removed as long as the name is fixed
case0.diff.output.score = 5
case0.limit.cpu = 1
case0.limit.mem = 91224961
case0.diff.output.ignorespaces = true
#case0.limit.stdout = 8
#case0.command = "./h7/build/ex2"
case0.in = "case0.in"
case1.diff.output.score = 5
case1.limit.cpu = 1
case1.limit.mem = 91224961
case1.diff.output.ignorespaces = true
#case1.limit.stdout = 8
#case1.command = "./h7/build/ex2"
case1.in = "case1.in"
case2.diff.output.score = 5
case2.limit.cpu = 1
case2.limit.mem = 91224961
case2.diff.output.ignorespaces = true
#case2.limit.stdout = 8
#case2.command = "./h7/build/ex2"
case2.in = "case2.in"
case3.diff.output.score = 5
case3.limit.cpu = 1
case3.limit.mem = 91224961
case3.diff.output.ignorespaces = true
#case3.limit.stdout = 8
#case3.command = "./h7/build/ex2"
case3.in = "case3.in"
case4.diff.output.score = 10
case4.limit.cpu = 1
case4.limit.mem = 91224961
case4.diff.output.ignorespaces = true
#case4.limit.stdout = 8
#case4.command = "./h7/build/ex2"
case4.in = "case4.in"
case5.diff.output.score = 10
case5.limit.cpu = 1
case5.limit.mem = 91224961
case5.diff.output.ignorespaces = true
#case5.limit.stdout = 8
#case5.command = "./h7/build/ex2"
case5.in = "case5.in"
case6.diff.output.score = 15
case6.limit.cpu = 1
case6.limit.mem = 91224961
case6.diff.output.ignorespaces = true
#case6.limit.stdout = 8
#case6.command = "./h7/build/ex2"
case6.in = "case6.in"
case7.diff.output.score = 15
case7.limit.cpu = 1
case7.limit.mem = 91224961
case7.diff.output.ignorespaces = true
#case7.limit.stdout = 8
#case7.command = "./h7/build/ex2"
case7.in = "case7.in"
case8.diff.output.score = 15
case8.limit.cpu = 1
case8.limit.mem = 91224961
case8.diff.output.ignorespaces = true
#case8.limit.stdout = 8
#case8.command = "./h7/build/ex2"
case8.in = "case8.in"
case9.diff.output.score = 15
case9.limit.cpu = 1
case9.limit.mem = 91224961
case9.diff.output.ignorespaces = true
#case9.limit.stdout = 8
#case9.command = "./h7/build/ex2"
case9.in = "case9.in"
[[stages]]
name = "[joj] ex2-msan"
command="./h7/build/ex2-msan -a"
files.import = [ "h7/build/ex2-msan" ]
limit.mem = 91224961
parsers = [ "diff", "result-detail" ]
result-detail.exitstatus = true
result-detail.stderr = true
# will be removed as long as the name is fixed
case0.diff.output.score = 5
case0.limit.cpu = 1
case0.limit.mem = 91224961
case0.diff.output.ignorespaces = true
#case0.limit.stdout = 8
#case0.command = "./h7/build/ex2"
case0.in = "case0.in"
case1.diff.output.score = 5
case1.limit.cpu = 1
case1.limit.mem = 91224961
case1.diff.output.ignorespaces = true
#case1.limit.stdout = 8
#case1.command = "./h7/build/ex2"
case1.in = "case1.in"
case2.diff.output.score = 5
case2.limit.cpu = 1
case2.limit.mem = 91224961
case2.diff.output.ignorespaces = true
#case2.limit.stdout = 8
#case2.command = "./h7/build/ex2"
case2.in = "case2.in"
case3.diff.output.score = 5
case3.limit.cpu = 1
case3.limit.mem = 91224961
case3.diff.output.ignorespaces = true
#case3.limit.stdout = 8
#case3.command = "./h7/build/ex2"
case3.in = "case3.in"
case4.diff.output.score = 10
case4.limit.cpu = 1
case4.limit.mem = 91224961
case4.diff.output.ignorespaces = true
#case4.limit.stdout = 8
#case4.command = "./h7/build/ex2"
case4.in = "case4.in"
case5.diff.output.score = 10
case5.limit.cpu = 1
case5.limit.mem = 91224961
case5.diff.output.ignorespaces = true
#case5.limit.stdout = 8
#case5.command = "./h7/build/ex2"
case5.in = "case5.in"
case6.diff.output.score = 15
case6.limit.cpu = 1
case6.limit.mem = 91224961
case6.diff.output.ignorespaces = true
#case6.limit.stdout = 8
#case6.command = "./h7/build/ex2"
case6.in = "case6.in"
case7.diff.output.score = 15
case7.limit.cpu = 1
case7.limit.mem = 91224961
case7.diff.output.ignorespaces = true
#case7.limit.stdout = 8
#case7.command = "./h7/build/ex2"
case7.in = "case7.in"
case8.diff.output.score = 15
case8.limit.cpu = 1
case8.limit.mem = 91224961
case8.diff.output.ignorespaces = true
#case8.limit.stdout = 8
#case8.command = "./h7/build/ex2"
case8.in = "case8.in"
case9.diff.output.score = 15
case9.limit.cpu = 1
case9.limit.mem = 91224961
case9.diff.output.ignorespaces = true
#case9.limit.stdout = 8
#case9.command = "./h7/build/ex2"
case9.in = "case9.in"
[[stages]]
name = "[joj] ex2-ubsan"
command="./h7/build/ex2-ubsan -a"
files.import = [ "h7/build/ex2-ubsan" ]
parsers = [ "diff", "result-detail" ]
result-detail.exitstatus = true
result-detail.stderr = true
# will be removed as long as the name is fixed
case0.diff.output.score = 5
case0.limit.cpu = 1
case0.limit.mem = 91224961
case0.diff.output.ignorespaces = true
#case0.limit.stdout = 8
#case0.command = "./h7/build/ex2"
case0.in = "case0.in"
case1.diff.output.score = 5
case1.limit.cpu = 1
case1.limit.mem = 91224961
case1.diff.output.ignorespaces = true
#case1.limit.stdout = 8
#case1.command = "./h7/build/ex2"
case1.in = "case1.in"
case2.diff.output.score = 5
case2.limit.cpu = 1
case2.limit.mem = 91224961
case2.diff.output.ignorespaces = true
#case2.limit.stdout = 8
#case2.command = "./h7/build/ex2"
case2.in = "case2.in"
case3.diff.output.score = 5
case3.limit.cpu = 1
case3.limit.mem = 91224961
case3.diff.output.ignorespaces = true
#case3.limit.stdout = 8
#case3.command = "./h7/build/ex2"
case3.in = "case3.in"
case4.diff.output.score = 10
case4.limit.cpu = 1
case4.limit.mem = 91224961
case4.diff.output.ignorespaces = true
#case4.limit.stdout = 8
#case4.command = "./h7/build/ex2"
case4.in = "case4.in"
case5.diff.output.score = 10
case5.limit.cpu = 1
case5.limit.mem = 91224961
case5.diff.output.ignorespaces = true
#case5.limit.stdout = 8
#case5.command = "./h7/build/ex2"
case5.in = "case5.in"
case6.diff.output.score = 15
case6.limit.cpu = 1
case6.limit.mem = 91224961
case6.diff.output.ignorespaces = true
#case6.limit.stdout = 8
#case6.command = "./h7/build/ex2"
case6.in = "case6.in"
case7.diff.output.score = 15
case7.limit.cpu = 1
case7.limit.mem = 91224961
case7.diff.output.ignorespaces = true
#case7.limit.stdout = 8
#case7.command = "./h7/build/ex2"
case7.in = "case7.in"
case8.diff.output.score = 15
case8.limit.cpu = 1
case8.limit.mem = 91224961
case8.diff.output.ignorespaces = true
#case8.limit.stdout = 8
#case8.command = "./h7/build/ex2"
case8.in = "case8.in"
case9.diff.output.score = 15
case9.limit.cpu = 1
case9.limit.mem = 91224961
case9.diff.output.ignorespaces = true
#case9.limit.stdout = 8
#case9.command = "./h7/build/ex2"
case9.in = "case9.in"
[[stages]]
name = "[joj] ex2"
command="./h7/build/ex2"
files.import = [ "h7/build/ex2" ]
score = 10
parsers = [ "diff", "result-detail" ]
result-detail.exitstatus = true
result-detail.stderr = true
result-status.forcequit = true
# will be removed as long as the name is fixed
case0.diff.output.score = 5
case0.limit.cpu = 1
case0.limit.mem = 91224961
case0.diff.output.ignorespaces = true
#case0.limit.stdout = 8
#case0.command = "./h7/build/ex2"
case0.in = "case0.in"
case1.diff.output.score = 5
case1.limit.cpu = 1
case1.limit.mem = 91224961
case1.diff.output.ignorespaces = true
#case1.limit.stdout = 8
#case1.command = "./h7/build/ex2"
case1.in = "case1.in"
case2.diff.output.score = 5
case2.limit.cpu = 1
case2.limit.mem = 91224961
case2.diff.output.ignorespaces = true
#case2.limit.stdout = 8
#case2.command = "./h7/build/ex2"
case2.in = "case2.in"
case3.diff.output.score = 5
case3.limit.cpu = 1
case3.limit.mem = 91224961
case3.diff.output.ignorespaces = true
#case3.limit.stdout = 8
#case3.command = "./h7/build/ex2"
case3.in = "case3.in"
case4.diff.output.score = 10
case4.limit.cpu = 1
case4.limit.mem = 91224961
case4.diff.output.ignorespaces = true
#case4.limit.stdout = 8
#case4.command = "./h7/build/ex2"
case4.in = "case4.in"
case5.diff.output.score = 10
case5.limit.cpu = 1
case5.limit.mem = 91224961
case5.diff.output.ignorespaces = true
#case5.limit.stdout = 8
#case5.command = "./h7/build/ex2"
case5.in = "case5.in"
case6.diff.output.score = 15
case6.limit.cpu = 1
case6.limit.mem = 91224961
case6.diff.output.ignorespaces = true
#case6.limit.stdout = 8
#case6.command = "./h7/build/ex2"
case6.in = "case6.in"
case7.diff.output.score = 15
case7.limit.cpu = 1
case7.limit.mem = 91224961
case7.diff.output.ignorespaces = true
#case7.limit.stdout = 8
#case7.command = "./h7/build/ex2"
case7.in = "case7.in"
case8.diff.output.score = 15
case8.limit.cpu = 1
case8.limit.mem = 91224961
case8.diff.output.ignorespaces = true
#case8.limit.stdout = 8
#case8.command = "./h7/build/ex2"
case8.in = "case8.in"
case9.diff.output.score = 15
case9.limit.cpu = 1
case9.limit.mem = 91224961
case9.diff.output.ignorespaces = true
#case9.limit.stdout = 8
#case9.command = "./h7/build/ex2"
case9.in = "case9.in"

View File

@ -2,6 +2,7 @@ import json
import os import os
from typing import Any, Dict, Tuple from typing import Any, Dict, Tuple
import Path
import rtoml import rtoml
from joj3_config_generator.convert import convert from joj3_config_generator.convert import convert
@ -28,5 +29,7 @@ def read_convert_files(
def load_case(case_name: str) -> None: def load_case(case_name: str) -> None:
repo, task, expected_result = read_convert_files(case_name) repo, task, expected_result = read_convert_files(case_name)
result = convert(repo, task).model_dump(mode="json", by_alias=True) result = convert(repo, task, conf_root=Path(".")).model_dump(

why modify this?

why modify this?

ah, I guess some incompatible within convert()? to make sure actually the process is the same. If its not your wish i ll revert it

ah, I guess some incompatible within `convert()`? to make sure actually the process is the same. If its not your wish i ll revert it
mode="json", by_alias=True, exclude_none=True
)
assert result == expected_result assert result == expected_result

33
tests/immutable_file/.gitattributes vendored Normal file
View File

@ -0,0 +1,33 @@
*.avi filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.djvu filter=lfs diff=lfs merge=lfs -text
*.doc filter=lfs diff=lfs merge=lfs -text
*.docx filter=lfs diff=lfs merge=lfs -text
*.epub filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.ipynb filter=lfs diff=lfs merge=lfs -text
*.jpeg filter=lfs diff=lfs merge=lfs -text
*.JPEG filter=lfs diff=lfs merge=lfs -text
*.jpg filter=lfs diff=lfs merge=lfs -text
*.JPG filter=lfs diff=lfs merge=lfs -text
*.mkv filter=lfs diff=lfs merge=lfs -text
*.mp4 filter=lfs diff=lfs merge=lfs -text
*.ods filter=lfs diff=lfs merge=lfs -text
*.odt filter=lfs diff=lfs merge=lfs -text
*.otf filter=lfs diff=lfs merge=lfs -text
*.pdf filter=lfs diff=lfs merge=lfs -text
*.PDF filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text
*.PNG filter=lfs diff=lfs merge=lfs -text
*.ppt filter=lfs diff=lfs merge=lfs -text
*.pptx filter=lfs diff=lfs merge=lfs -text
*.ps filter=lfs diff=lfs merge=lfs -text
*.rar filter=lfs diff=lfs merge=lfs -text
*.tar filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.ttf filter=lfs diff=lfs merge=lfs -text
*.webm filter=lfs diff=lfs merge=lfs -text
*.xls filter=lfs diff=lfs merge=lfs -text
*.xlsx filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text

23
tests/immutable_file/.gitignore vendored Normal file
View File

@ -0,0 +1,23 @@
################################
## White list based gitignore ##
################################
# forbidden
*
.*
# allowed
!.gitignore
!.gitattributes
!.gitea/
!.gitea/issue_template/
!.gitea/workflows/
!*.yaml
!Makefile
!CMakeLists.txt
!h[0-8]/
!*.m
!*.c
!*.cpp
!*.h
!*.md

View File

@ -0,0 +1,19 @@
name: Run JOJ3 on Push
on: [push]
jobs:
run:
container:
image: focs.ji.sjtu.edu.cn:5000/gitea/runner-images:focs-ubuntu-latest-slim
volumes:
- /home/tt/.config:/home/tt/.config
- /home/tt/.cache:/home/tt/.cache
- /home/tt/.ssh:/home/tt/.ssh
steps:
- name: Check out repository code
uses: https://gitea.com/BoYanZh/checkout@focs
with:
fetch-depth: 0
- name: run joj3
run: |
sudo -E -u tt joj3 -conf-root /home/tt/.config/joj/tests/homework

View File

@ -0,0 +1,21 @@
name: Run JOJ3 on Release
on:
release:
types: [published]
jobs:
run:
container:
image: focs.ji.sjtu.edu.cn:5000/gitea/runner-images:focs-ubuntu-latest-slim
volumes:
- /home/tt/.config:/home/tt/.config
- /home/tt/.cache:/home/tt/.cache
- /home/tt/.ssh:/home/tt/.ssh
steps:
- name: Check out repository code
uses: https://gitea.com/BoYanZh/checkout@focs
with:
fetch-depth: 0
- name: run joj3
run: |
sudo -E -u tt joj3 -conf-root "/home/tt/.config/joj/tests/homework" -conf-name "conf-release.json" -tag "${{ github.ref_name }}"

View File

@ -0,0 +1,33 @@
*.avi filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.djvu filter=lfs diff=lfs merge=lfs -text
*.doc filter=lfs diff=lfs merge=lfs -text
*.docx filter=lfs diff=lfs merge=lfs -text
*.epub filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.ipynb filter=lfs diff=lfs merge=lfs -text
*.jpeg filter=lfs diff=lfs merge=lfs -text
*.JPEG filter=lfs diff=lfs merge=lfs -text
*.jpg filter=lfs diff=lfs merge=lfs -text
*.JPG filter=lfs diff=lfs merge=lfs -text
*.mkv filter=lfs diff=lfs merge=lfs -text
*.mp4 filter=lfs diff=lfs merge=lfs -text
*.ods filter=lfs diff=lfs merge=lfs -text
*.odt filter=lfs diff=lfs merge=lfs -text
*.otf filter=lfs diff=lfs merge=lfs -text
*.pdf filter=lfs diff=lfs merge=lfs -text
*.PDF filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text
*.PNG filter=lfs diff=lfs merge=lfs -text
*.ppt filter=lfs diff=lfs merge=lfs -text
*.pptx filter=lfs diff=lfs merge=lfs -text
*.ps filter=lfs diff=lfs merge=lfs -text
*.rar filter=lfs diff=lfs merge=lfs -text
*.tar filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.ttf filter=lfs diff=lfs merge=lfs -text
*.webm filter=lfs diff=lfs merge=lfs -text
*.xls filter=lfs diff=lfs merge=lfs -text
*.xlsx filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text

23
tests/immutable_hteam-test/.gitignore vendored Normal file
View File

@ -0,0 +1,23 @@
################################
## White list based gitignore ##
################################
# forbidden
*
.*
# allowed
!.gitignore
!.gitattributes
!.gitea/
!.gitea/issue_template/
!.gitea/workflows/
!*.yaml
!Makefile
!CMakeLists.txt
!h[0-8]/
!*.m
!*.c
!*.cpp
!*.h
!*.md

View File

@ -0,0 +1,19 @@
name: Run JOJ3 on Push
on: [push]
jobs:
run:
container:
image: focs.ji.sjtu.edu.cn:5000/gitea/runner-images:focs-ubuntu-latest-slim
volumes:
- /home/tt/.config:/home/tt/.config
- /home/tt/.cache:/home/tt/.cache
- /home/tt/.ssh:/home/tt/.ssh
steps:
- name: Check out repository code
uses: https://gitea.com/BoYanZh/checkout@focs
with:
fetch-depth: 0
- name: run joj3
run: |
sudo -E -u tt joj3 -conf-root /home/tt/.config/joj/tests/homework

View File

@ -0,0 +1,21 @@
name: Run JOJ3 on Release
on:
release:
types: [published]
jobs:
run:
container:
image: focs.ji.sjtu.edu.cn:5000/gitea/runner-images:focs-ubuntu-latest-slim
volumes:
- /home/tt/.config:/home/tt/.config
- /home/tt/.cache:/home/tt/.cache
- /home/tt/.ssh:/home/tt/.ssh
steps:
- name: Check out repository code
uses: https://gitea.com/BoYanZh/checkout@focs
with:
fetch-depth: 0
- name: run joj3
run: |
sudo -E -u tt joj3 -conf-root "/home/tt/.config/joj/tests/homework" -conf-name "conf-release.json" -tag "${{ github.ref_name }}"

33
tests/immutable_hteam/.gitattributes vendored Normal file
View File

@ -0,0 +1,33 @@
*.avi filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.djvu filter=lfs diff=lfs merge=lfs -text
*.doc filter=lfs diff=lfs merge=lfs -text
*.docx filter=lfs diff=lfs merge=lfs -text
*.epub filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.ipynb filter=lfs diff=lfs merge=lfs -text
*.jpeg filter=lfs diff=lfs merge=lfs -text
*.JPEG filter=lfs diff=lfs merge=lfs -text
*.jpg filter=lfs diff=lfs merge=lfs -text
*.JPG filter=lfs diff=lfs merge=lfs -text
*.mkv filter=lfs diff=lfs merge=lfs -text
*.mp4 filter=lfs diff=lfs merge=lfs -text
*.ods filter=lfs diff=lfs merge=lfs -text
*.odt filter=lfs diff=lfs merge=lfs -text
*.otf filter=lfs diff=lfs merge=lfs -text
*.pdf filter=lfs diff=lfs merge=lfs -text
*.PDF filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text
*.PNG filter=lfs diff=lfs merge=lfs -text
*.ppt filter=lfs diff=lfs merge=lfs -text
*.pptx filter=lfs diff=lfs merge=lfs -text
*.ps filter=lfs diff=lfs merge=lfs -text
*.rar filter=lfs diff=lfs merge=lfs -text
*.tar filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.ttf filter=lfs diff=lfs merge=lfs -text
*.webm filter=lfs diff=lfs merge=lfs -text
*.xls filter=lfs diff=lfs merge=lfs -text
*.xlsx filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text

23
tests/immutable_hteam/.gitignore vendored Normal file
View File

@ -0,0 +1,23 @@
################################
## White list based gitignore ##
################################
# forbidden
*
.*
# allowed
!.gitignore
!.gitattributes
!.gitea/
!.gitea/issue_template/
!.gitea/workflows/
!*.yaml
!Makefile
!CMakeLists.txt
!h[0-8]/
!*.m
!*.c
!*.cpp
!*.h
!*.md

View File

@ -0,0 +1,19 @@
name: Run JOJ3 on Push
on: [push]
jobs:
run:
container:
image: focs.ji.sjtu.edu.cn:5000/gitea/runner-images:focs-ubuntu-latest-slim
volumes:
- /home/tt/.config:/home/tt/.config
- /home/tt/.cache:/home/tt/.cache
- /home/tt/.ssh:/home/tt/.ssh
steps:
- name: Check out repository code
uses: https://gitea.com/BoYanZh/checkout@focs
with:
fetch-depth: 0
- name: run joj3
run: |
sudo -E -u tt joj3 -conf-root /home/tt/.config/joj/homework

View File

@ -0,0 +1,21 @@
name: Run JOJ3 on Release
on:
release:
types: [published]
jobs:
run:
container:
image: focs.ji.sjtu.edu.cn:5000/gitea/runner-images:focs-ubuntu-latest-slim
volumes:
- /home/tt/.config:/home/tt/.config
- /home/tt/.cache:/home/tt/.cache
- /home/tt/.ssh:/home/tt/.ssh
steps:
- name: Check out repository code
uses: https://gitea.com/BoYanZh/checkout@focs
with:
fetch-depth: 0
- name: run joj3
run: |
sudo -E -u tt joj3 -conf-root "/home/tt/.config/joj/homework" -conf-name "conf-release.json" -tag "${{ github.ref_name }}"

33
tests/immutable_p3-test/.gitattributes vendored Normal file
View File

@ -0,0 +1,33 @@
*.avi filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.djvu filter=lfs diff=lfs merge=lfs -text
*.doc filter=lfs diff=lfs merge=lfs -text
*.docx filter=lfs diff=lfs merge=lfs -text
*.epub filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.ipynb filter=lfs diff=lfs merge=lfs -text
*.jpeg filter=lfs diff=lfs merge=lfs -text
*.JPEG filter=lfs diff=lfs merge=lfs -text
*.jpg filter=lfs diff=lfs merge=lfs -text
*.JPG filter=lfs diff=lfs merge=lfs -text
*.mkv filter=lfs diff=lfs merge=lfs -text
*.mp4 filter=lfs diff=lfs merge=lfs -text
*.ods filter=lfs diff=lfs merge=lfs -text
*.odt filter=lfs diff=lfs merge=lfs -text
*.otf filter=lfs diff=lfs merge=lfs -text
*.pdf filter=lfs diff=lfs merge=lfs -text
*.PDF filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text
*.PNG filter=lfs diff=lfs merge=lfs -text
*.ppt filter=lfs diff=lfs merge=lfs -text
*.pptx filter=lfs diff=lfs merge=lfs -text
*.ps filter=lfs diff=lfs merge=lfs -text
*.rar filter=lfs diff=lfs merge=lfs -text
*.tar filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.ttf filter=lfs diff=lfs merge=lfs -text
*.webm filter=lfs diff=lfs merge=lfs -text
*.xls filter=lfs diff=lfs merge=lfs -text
*.xlsx filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text

29
tests/immutable_p3-test/.gitignore vendored Normal file
View File

@ -0,0 +1,29 @@
################################
## White list based gitignore ##
################################
# forbidden
*
.*
# allowed
!.gitignore
!.gitattributes
!.gitea/
!.gitea/issue_template/
!.gitea/workflows/
!src/
src/*
!src/ipa
!src/ipa/*.h
!src/ipa/*.cpp
!src/ipa/Makefile
!src/pms
!src/pms/*.cpp
!src/pms/*.h
!src/pms/Makefile
!CMakeLists.txt
!Makefile
!*.md
!*.yaml

View File

@ -0,0 +1,19 @@
name: Run JOJ3 on Push
on: [push]
jobs:
run:
container:
image: focs.ji.sjtu.edu.cn:5000/gitea/runner-images:focs-ubuntu-latest-slim
volumes:
- /home/tt/.config:/home/tt/.config
- /home/tt/.cache:/home/tt/.cache
- /home/tt/.ssh:/home/tt/.ssh
steps:
- name: Check out repository code
uses: https://gitea.com/BoYanZh/checkout@focs
with:
fetch-depth: 0
- name: run joj3
run: |
sudo -E -u tt joj3 -conf-root /home/tt/.config/joj/tests/projects/p3

View File

@ -0,0 +1,21 @@
name: Run JOJ3 on Release
on:
release:
types: [published]
jobs:
run:
container:
image: focs.ji.sjtu.edu.cn:5000/gitea/runner-images:focs-ubuntu-latest-slim
volumes:
- /home/tt/.config:/home/tt/.config
- /home/tt/.cache:/home/tt/.cache
- /home/tt/.ssh:/home/tt/.ssh
steps:
- name: Check out repository code
uses: https://gitea.com/BoYanZh/checkout@focs
with:
fetch-depth: 0
- name: run joj3
run: |
sudo -E -u tt joj3 -conf-root "/home/tt/.config/joj/tests/projects/p3" -conf-name "conf-release.json" -tag "${{ github.ref_name }}"

33
tests/immutable_p3/.gitattributes vendored Normal file
View File

@ -0,0 +1,33 @@
*.avi filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.djvu filter=lfs diff=lfs merge=lfs -text
*.doc filter=lfs diff=lfs merge=lfs -text
*.docx filter=lfs diff=lfs merge=lfs -text
*.epub filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.ipynb filter=lfs diff=lfs merge=lfs -text
*.jpeg filter=lfs diff=lfs merge=lfs -text
*.JPEG filter=lfs diff=lfs merge=lfs -text
*.jpg filter=lfs diff=lfs merge=lfs -text
*.JPG filter=lfs diff=lfs merge=lfs -text
*.mkv filter=lfs diff=lfs merge=lfs -text
*.mp4 filter=lfs diff=lfs merge=lfs -text
*.ods filter=lfs diff=lfs merge=lfs -text
*.odt filter=lfs diff=lfs merge=lfs -text
*.otf filter=lfs diff=lfs merge=lfs -text
*.pdf filter=lfs diff=lfs merge=lfs -text
*.PDF filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text
*.PNG filter=lfs diff=lfs merge=lfs -text
*.ppt filter=lfs diff=lfs merge=lfs -text
*.pptx filter=lfs diff=lfs merge=lfs -text
*.ps filter=lfs diff=lfs merge=lfs -text
*.rar filter=lfs diff=lfs merge=lfs -text
*.tar filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.ttf filter=lfs diff=lfs merge=lfs -text
*.webm filter=lfs diff=lfs merge=lfs -text
*.xls filter=lfs diff=lfs merge=lfs -text
*.xlsx filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text

29
tests/immutable_p3/.gitignore vendored Normal file
View File

@ -0,0 +1,29 @@
################################
## White list based gitignore ##
################################
# forbidden
*
.*
# allowed
!.gitignore
!.gitattributes
!.gitea/
!.gitea/issue_template/
!.gitea/workflows/
!src/
src/*
!src/ipa
!src/ipa/*.h
!src/ipa/*.cpp
!src/ipa/Makefile
!src/pms
!src/pms/*.cpp
!src/pms/*.h
!src/pms/Makefile
!CMakeLists.txt
!Makefile
!*.md
!*.yaml

View File

@ -0,0 +1,19 @@
name: Run JOJ3 on Push
on: [push]
jobs:
run:
container:
image: focs.ji.sjtu.edu.cn:5000/gitea/runner-images:focs-ubuntu-latest-slim
volumes:
- /home/tt/.config:/home/tt/.config
- /home/tt/.cache:/home/tt/.cache
- /home/tt/.ssh:/home/tt/.ssh
steps:
- name: Check out repository code
uses: https://gitea.com/BoYanZh/checkout@focs
with:
fetch-depth: 0
- name: run joj3
run: |
sudo -E -u tt joj3 -conf-root /home/tt/.config/joj/projects/p3

View File

@ -0,0 +1,21 @@
name: Run JOJ3 on Release
on:
release:
types: [published]
jobs:
run:
container:
image: focs.ji.sjtu.edu.cn:5000/gitea/runner-images:focs-ubuntu-latest-slim
volumes:
- /home/tt/.config:/home/tt/.config
- /home/tt/.cache:/home/tt/.cache
- /home/tt/.ssh:/home/tt/.ssh
steps:
- name: Check out repository code
uses: https://gitea.com/BoYanZh/checkout@focs
with:
fetch-depth: 0
- name: run joj3
run: |
sudo -E -u tt joj3 -conf-root "/home/tt/.config/joj/projects/p3" -conf-name "conf-release.json" -tag "${{ github.ref_name }}"