dev #10

Merged
李衍志523370910113 merged 238 commits from dev into master 2025-03-05 16:20:39 +08:00
5 changed files with 51 additions and 39 deletions
Showing only changes of commit 70366f467f - Show all commits

View File

@ -18,12 +18,12 @@ from joj3_config_generator.processers.task import (
fix_keyword,
fix_result_detail,
get_conf_stage,
get_executorWithConfig,
get_executor_with_config,
)
def convert(
repo_conf: repo.Config, task_conf: task.Config, conf_root: Path
repo_conf: repo.Config, task_conf: task.Config, repo_root: Path
) -> result.Config:
# Create the base ResultConf object
result_conf = result.Config(
@ -45,20 +45,18 @@ def convert(
)
# Construct healthcheck stage
healthcheck_stage = get_healthcheck_config(repo_conf)
healthcheck_stage = get_healthcheck_config(repo_conf, repo_root)
jon-lee marked this conversation as resolved Outdated

forgot to uncommented 😭

forgot to uncommented 😭

fixed

fixed
result_conf.stage.stages.append(healthcheck_stage)
stages: List[str] = []
# Convert each stage in the task configuration
for task_stage in task_conf.stages:
executor_with_config, stages = get_executorWithConfig(
task_stage, stages, conf_root
)
executor_with_config, stages = get_executor_with_config(task_stage, stages)
conf_stage = get_conf_stage(task_stage, executor_with_config)
conf_stage = fix_result_detail(task_stage, conf_stage)
conf_stage = fix_dummy(task_stage, conf_stage)
conf_stage = fix_keyword(task_stage, conf_stage)
conf_stage = fix_file(task_stage, conf_stage)
conf_stage = fix_diff(task_stage, conf_stage, task_conf, conf_root)
conf_stage = fix_diff(task_stage, conf_stage, task_conf)
result_conf.stage.stages.append(conf_stage)
return result_conf
@ -75,7 +73,7 @@ def convert_joj1(joj1_conf: joj1.Config) -> task.Config:
)
def distribute_json(folder_path: str, repo_obj: Any, conf_root: Path) -> None:
def distribute_json(folder_path: str, repo_obj: Any, repo_conf: Path) -> None:
for root, _, files in os.walk(folder_path):
for file in files:
if file.endswith(".toml"): # to pass test here
@ -85,7 +83,7 @@ def distribute_json(folder_path: str, repo_obj: Any, conf_root: Path) -> None:
task_toml = toml_file.read()
task_obj = rtoml.loads(task_toml)
result_model = convert(
repo.Config(**repo_obj), task.Config(**task_obj), conf_root
repo.Config(**repo_obj), task.Config(**task_obj), repo_conf
)
result_dict = result_model.model_dump(by_alias=True, exclude_none=True)

View File

@ -35,40 +35,50 @@ def convert(
Path("."),
"--conf-root",
"-c",
help="This should be consistent with the root of how you run JOJ3",
help="This is where you want to put all your 'task.toml' type folders, default choice for your input can be '/home/tt/.config/joj/'",
),
repo_path: Path = typer.Option(
Path("."),
"--repo-root",
"-r",
help="This would be where you put your repo.toml file",
help="This would be where you put your 'repo.toml' file as well as your 'immutable files', they should all be at same place, default choice for your input can be 'immutable_files', which is the folder at the position '/home/tt/.config/joj/'",
),
distribute: bool = typer.Option(
False, "--distribute", "-d", help="This flag determine whether to distribute"
),
) -> Dict[str, Any]:
) -> None:
logger.info(f"Converting files in {root.absolute()}")
repo_toml_path = os.path.join(repo_path.absolute(), "basic", "repo.toml")
task_toml_path = os.path.join(root.absolute(), "basic", "task.toml")
result_json_path = os.path.join(root.absolute(), "basic", "task.json")
if distribute is False:
repo_toml_path = os.path.join(repo_path.absolute(), "basic", "repo.toml")
else:
repo_toml_path = os.path.join("/home/tt/.config/joj", repo_path, "repo.toml")
repo_toml_path = os.path.join(repo_path, "repo.toml")
with open(repo_toml_path, encoding=None) as repo_file:
repo_toml = repo_file.read()
with open(task_toml_path, encoding=None) as task_file:
task_toml = task_file.read()
repo_obj = rtoml.loads(repo_toml)
task_obj = rtoml.loads(task_toml)
result_model = convert_conf(repo.Config(**repo_obj), task.Config(**task_obj), root)
result_dict = result_model.model_dump(by_alias=True, exclude_none=True)
if distribute is False:
task_toml_path = os.path.join(root.absolute(), "basic", "task.toml")
result_json_path = os.path.join(root.absolute(), "basic", "task.json")
jon-lee marked this conversation as resolved

why?

why?

fixed

fixed
with open(result_json_path, "w", encoding=None) as result_file:
json.dump(result_dict, result_file, ensure_ascii=False, indent=4)
result_file.write("\n")
with open(task_toml_path, encoding=None) as task_file:
task_toml = task_file.read()
task_obj = rtoml.loads(task_toml)
result_model = convert_conf(
repo.Config(**repo_obj), task.Config(**task_obj), repo_path
)
result_dict = result_model.model_dump(by_alias=True, exclude_none=True)
with open(result_json_path, "w", encoding=None) as result_file:
json.dump(result_dict, result_file, ensure_ascii=False, indent=4)
result_file.write("\n")
# distribution on json
# need a get folder path function
if distribute:
else:
folder_path = "/home/tt/.config/joj"
folder_path = f"{Path.home()}/Desktop/engr151-joj/home/tt/.config/joj/homework"
folder_path = f"{Path.home()}/Desktop/FOCS/JOJ3-config-generator/tests/convert/"
distribute_json(folder_path, repo_obj, conf_root=root)
return result_dict
# to be used in real action
folder_path = f"{root}"
distribute_json(folder_path, repo_obj, repo_path)

View File

@ -37,7 +37,7 @@ def get_teapot_stage(repo_conf: repo.Config) -> result.StageDetail:
return stage_conf
def get_healthcheck_args(repo_conf: repo.Config) -> str:
def get_healthcheck_args(repo_conf: repo.Config, repo_root: Path) -> str:
repoSize = repo_conf.max_size
immutable = repo_conf.files.immutable
repo_size = f"-repoSize={str(repoSize)} "
@ -59,7 +59,7 @@ def get_healthcheck_args(repo_conf: repo.Config) -> str:
for meta in required_files:
args = args + meta
args = args + get_hash(immutable)
args = args + get_hash(immutable, repo_root)
args = args + immutable_files
@ -84,7 +84,9 @@ def get_debug_args(repo_conf: repo.Config) -> str:
return args
def get_healthcheck_config(repo_conf: repo.Config) -> result.StageDetail:
def get_healthcheck_config(
repo_conf: repo.Config, repo_root: Path
) -> result.StageDetail:
healthcheck_stage = result.StageDetail(
name="healthcheck",
group="",
@ -94,7 +96,7 @@ def get_healthcheck_config(repo_conf: repo.Config) -> result.StageDetail:
default=result.Cmd(),
cases=[
result.OptionalCmd(
args=shlex.split(get_healthcheck_args(repo_conf)),
args=shlex.split(get_healthcheck_args(repo_conf, repo_root)),
),
result.OptionalCmd(
args=shlex.split(get_debug_args(repo_conf)),
@ -119,14 +121,17 @@ def calc_sha256sum(file_path: str) -> str:
return sha256_hash.hexdigest()
def get_hash(immutable_files: list[str]) -> str: # input should be a list
def get_hash(
immutable_files: list[str], repo_root: Path
) -> str: # input should be a list
# FIXME: should be finalized when get into the server
current_file_path = Path(__file__).resolve()
project_root = current_file_path.parents[2]
# FIXME: givin the path
file_path = f"{project_root}/tests/immutable_file/"
# default value as hardcoded
# file_path = "{Path.home()}/.cache/immutable"
# to be use
# file_path = f"/home/tt/.config/joj/{repo_root}/"
immutable_hash = []
for i, file in enumerate(immutable_files):
immutable_files[i] = file_path + file.rsplit("/", 1)[-1]

View File

@ -33,8 +33,8 @@ def get_conf_stage(
return conf_stage
jon-lee marked this conversation as resolved Outdated

should loop through conf_stage.parsers here and update the with field according to the parser name.

should loop through `conf_stage.parsers` here and update the `with` field according to the parser name.

I think its already implemented in each of the fix_parsers functions

I think its already implemented in each of the `fix_parsers` functions

No, do not find the parser in the fix_xxx function. Instead, iterate through the parsers here and decide how to fill in the with.

No, do not find the parser in the `fix_xxx` function. Instead, iterate through the parsers here and decide how to fill in the `with`.

resolved.

resolved.

Use a dict to store parser name, field, function to process.

    process_dict: Dict[
        str, Tuple[Callable[[result.ParserConfig, BaseModel], None], BaseModel]
    ] = {
        "clangtidy": (fix_keyword, task_stage.clangtidy),
        "keyword": (fix_keyword, task_stage.keyword),
        "diff": (fix_diff, task_stage.diff),
    }
    for i, parser in enumerate(task_stage.parsers):
        if parser in process_dict:
            func, parser_model = process_dict[parser]
            func(conf_stage.parsers[i], parser_model)
Use a dict to store parser name, field, function to process. ``` process_dict: Dict[ str, Tuple[Callable[[result.ParserConfig, BaseModel], None], BaseModel] ] = { "clangtidy": (fix_keyword, task_stage.clangtidy), "keyword": (fix_keyword, task_stage.keyword), "diff": (fix_diff, task_stage.diff), } for i, parser in enumerate(task_stage.parsers): if parser in process_dict: func, parser_model = process_dict[parser] func(conf_stage.parsers[i], parser_model) ```

resolved.

resolved.
jon-lee marked this conversation as resolved Outdated

Do we need to support both kinds of names?

Do we need to support both kinds of names?

probably yes, since it is easy for new ta to type it wrong

probably yes, since it is easy for new ta to type it wrong

parsers name should be a str enum, force them to use the correct names

parsers name should be a str enum, force them to use the correct names

ok, then removed.

ok, then removed.
def get_executorWithConfig(
task_stage: task.Stage, cached: List[str], conf_root: Path
def get_executor_with_config(
jon-lee marked this conversation as resolved Outdated

underscore

underscore

fixed

fixed
task_stage: task.Stage, cached: List[str]
) -> Tuple[result.ExecutorWith, List[str]]:
file_import = (
task_stage.files.import_
@ -61,7 +61,7 @@ def get_executorWithConfig(
else []
),
copy_in={
jon-lee marked this conversation as resolved Outdated

not necessary

not necessary

resolved.

resolved.
file: result.CmdFile(src=f"/home/tt/{conf_root}/tools/{file}")
file: result.CmdFile(src=f"/home/tt/.config/joj/tools/{file}")
# all copyin files store in this tools folder
# are there any corner cases
for file in copy_in_files
@ -238,7 +238,6 @@ def fix_diff(
task_stage: task.Stage,
conf_stage: result.StageDetail,
task_conf: task.Config,
conf_root: Path,
) -> result.StageDetail:
if task_stage.parsers is not None and "diff" in task_stage.parsers:
diff_parser = next((p for p in conf_stage.parsers if p.name == "diff"), None)
@ -277,7 +276,7 @@ def fix_diff(
stage_cases.append(
result.OptionalCmd(
stdin=result.CmdFile(
src=f"/home/tt/{conf_root}/{task_conf.task.type_}/{stdin}"
src=f"/home/tt/.config/joj/{task_conf.task.type_}/{stdin}"
),
args=(shlex.split(command) if command is not None else None),
cpu_limit=cpu_limit,
@ -300,7 +299,7 @@ def fix_diff(
{
"score": diff_output.score,
"fileName": "stdout",
"answerPath": f"/home/tt/{conf_root}/{task_conf.task.type_}/{stdout}",
"answerPath": f"/home/tt/.config/joj/{task_conf.task.type_}/{stdout}",
"forceQuitOnDiff": diff_output.forcequit,
"alwaysHide": diff_output.hide,
"compareSpace": not diff_output.ignorespaces,

View File

@ -29,7 +29,7 @@ def read_convert_files(
def load_case(case_name: str) -> None:
repo, task, expected_result = read_convert_files(case_name)
result = convert(repo, task, conf_root=Path(".")).model_dump(
result = convert(repo, task, repo_root=Path(".")).model_dump(
mode="json", by_alias=True, exclude_none=True
)
assert result == expected_result