dev #10

Merged
李衍志523370910113 merged 238 commits from dev into master 2025-03-05 16:20:39 +08:00
9 changed files with 1247 additions and 682 deletions
Showing only changes of commit 979185252d - Show all commits

View File

@ -22,19 +22,21 @@ from joj3_config_generator.processers.task import (
)
def convert(repo_conf: repo.Config, task_conf: task.Config) -> result.Config:
def convert(
repo_conf: repo.Config, task_conf: task.Config, conf_root: Path
) -> result.Config:
# Create the base ResultConf object
result_conf = result.Config(
name=task_conf.task.name,
# TODO: specify the exact folder difference
# exact folder difference specified by type
log_path=f"{Path.home()}/.cache/joj3/{task_conf.task.type_}.log",
jon-lee marked this conversation as resolved Outdated

Make this Path.home() default to /home/tt. For now, create a const for this dir.

Make this `Path.home()` default to `/home/tt`. For now, create a const for this dir.

fixed

fixed
expire_unix_timestamp=(
int(task_conf.release.deadline.timestamp())
if task_conf.release.deadline
else -1
),
jon-lee marked this conversation as resolved Outdated

where is it used?

where is it used?

this should be storing all the files that are about to be copy in or out

this should be storing all the files that are about to be copy in or out

It is as the input and output for the following functions about parsers

It is as the input and output for the following functions about parsers

so this feature is not implemented?

so this feature is not implemented?
    if not repo_conf.force_skip_health_check_on_test or not current_test:
        result_conf.stage.stages.append(get_health_check_config(repo_conf))
    cached: List[str] = []
    # Convert each stage in the task configuration
    for task_stage in task_conf.stages:
        executor_with_config, cached = get_executor_with_config(task_stage, cached)
        conf_stage = get_conf_stage(task_stage, executor_with_config)
        conf_stage = fix_result_detail(task_stage, conf_stage)
        conf_stage = fix_dummy(task_stage, conf_stage)
        conf_stage = fix_keyword(task_stage, conf_stage)
        conf_stage = fix_file(task_stage, conf_stage)
        conf_stage = fix_diff(task_stage, conf_stage, task_conf)
        result_conf.stage.stages.append(conf_stage)

it is

    for task_stage in task_conf.stages:
        executor_with_config, cached = get_executor_with_config(task_stage, cached)

this is a loop, so this cached will be updated in every round of stage

```python if not repo_conf.force_skip_health_check_on_test or not current_test: result_conf.stage.stages.append(get_health_check_config(repo_conf)) cached: List[str] = [] # Convert each stage in the task configuration for task_stage in task_conf.stages: executor_with_config, cached = get_executor_with_config(task_stage, cached) conf_stage = get_conf_stage(task_stage, executor_with_config) conf_stage = fix_result_detail(task_stage, conf_stage) conf_stage = fix_dummy(task_stage, conf_stage) conf_stage = fix_keyword(task_stage, conf_stage) conf_stage = fix_file(task_stage, conf_stage) conf_stage = fix_diff(task_stage, conf_stage, task_conf) result_conf.stage.stages.append(conf_stage) ``` it is ```python for task_stage in task_conf.stages: executor_with_config, cached = get_executor_with_config(task_stage, cached) ``` this is a loop, so this `cached` will be updated in every round of stage

The return value is unnecessary.

The return value is unnecessary.

I have a lazing coding style here, everything has get imported would get exported, so should maintain this until the end of the loop. Everything is exported in previous stage would be imported in the next stage.

I have a lazing coding style here, everything has get imported would get exported, so should maintain this until the end of the loop. Everything is exported in previous stage would be imported in the next stage.
  1. The return value is unnecessary
  2. It should be a set
1. The return value is unnecessary 2. It should be a `set`

try it yourself

try it yourself

I see why

I see why

resolved.

resolved.
# FIXME: don't hardcode
actor_csv_path=f"{Path.home()}/.config/joj/students.csv",
actor_csv_path=f"{Path.home()}/.config/joj/students.csv", # students.csv position
max_total_score=repo_conf.max_total_score,
stage=result.Stage(
stages=[],
sandbox_token=repo_conf.sandbox_token,
@ -44,18 +46,19 @@ def convert(repo_conf: repo.Config, task_conf: task.Config) -> result.Config:
# Construct healthcheck stage
healthcheck_stage = get_healthcheck_config(repo_conf)
jon-lee marked this conversation as resolved Outdated

forgot to uncommented 😭

forgot to uncommented 😭

fixed

fixed
# teapotcheck_stage = get_teapotcheck_config(repo_conf, task_conf)
result_conf.stage.stages.append(healthcheck_stage)
cached: List[str] = []
stages: List[str] = []
# Convert each stage in the task configuration
for task_stage in task_conf.stages:
executor_with_config, cached = get_executorWithConfig(task_stage, cached)
executor_with_config, stages = get_executorWithConfig(
task_stage, stages, conf_root
)
conf_stage = get_conf_stage(task_stage, executor_with_config)
conf_stage = fix_result_detail(task_stage, conf_stage)
conf_stage = fix_dummy(task_stage, conf_stage)
conf_stage = fix_keyword(task_stage, conf_stage)
conf_stage = fix_file(task_stage, conf_stage)
conf_stage = fix_diff(task_stage, conf_stage, task_conf)
conf_stage = fix_diff(task_stage, conf_stage, task_conf, conf_root)
result_conf.stage.stages.append(conf_stage)
return result_conf
@ -72,7 +75,7 @@ def convert_joj1(joj1_conf: joj1.Config) -> task.Config:
)
def distribute_json(folder_path: str, repo_obj: Any) -> None:
def distribute_json(folder_path: str, repo_obj: Any, conf_root: Path) -> None:
for root, _, files in os.walk(folder_path):
for file in files:
if file.endswith(".toml"):
@ -81,7 +84,9 @@ def distribute_json(folder_path: str, repo_obj: Any) -> None:
with open(toml_file_path) as toml_file:
task_toml = toml_file.read()
task_obj = rtoml.loads(task_toml)
result_model = convert(repo.Config(**repo_obj), task.Config(**task_obj))
result_model = convert(
repo.Config(**repo_obj), task.Config(**task_obj), conf_root
)
result_dict = result_model.model_dump(by_alias=True, exclude_none=True)
with open(json_file_path, "w") as result_file:

View File

@ -36,29 +36,34 @@ def convert(
"-c",
help="This should be consistent with the root of how you run JOJ3",
),
repo_path: Path = typer.Option(
Path("."),
"--repo-root",
"-r",
help="This would be where you put your repo.toml file",
),
debug: bool = typer.Option(
False, "--debug", "-d", help="Enable debug mode for more verbose output"
),
) -> Dict[str, Any]:
logger.info(f"Converting files in {root.absolute()}")
repo_toml_path = os.path.join(root.absolute(), "basic", "repo.toml")
# TODO: loop through all dirs to find all task.toml
task_toml_path = os.path.join(root.absolute(), "basic", "task.toml")
result_json_path = os.path.join(root.absolute(), "basic", "task.json")
with open(repo_toml_path) as repo_file:
logger.info(f"Converting files in {repo_path.absolute()}")
repo_toml_path = os.path.join(repo_path.absolute(), "basic", "repo.toml")
task_toml_path = os.path.join(repo_path.absolute(), "basic", "task.toml")
result_json_path = os.path.join(repo_path.absolute(), "basic", "task.json")
with open(repo_toml_path, encoding=None) as repo_file:
repo_toml = repo_file.read()
with open(task_toml_path) as task_file:
with open(task_toml_path, encoding=None) as task_file:
task_toml = task_file.read()
repo_obj = rtoml.loads(repo_toml)
task_obj = rtoml.loads(task_toml)
result_model = convert_conf(repo.Config(**repo_obj), task.Config(**task_obj))
result_model = convert_conf(repo.Config(**repo_obj), task.Config(**task_obj), root)
result_dict = result_model.model_dump(by_alias=True, exclude_none=True)
with open(result_json_path, "w") as result_file:
with open(result_json_path, "w", encoding=None) as result_file:
jon-lee marked this conversation as resolved

why?

why?

fixed

fixed
json.dump(result_dict, result_file, ensure_ascii=False, indent=4)
result_file.write("\n")
# distribution on json
# need a get folder path function
# distribute_json(folder_path, repo_obj)
# distribute_json(folder_path, repo_obj, conf_root)
return result_dict

View File

@ -1,7 +1,7 @@
from datetime import datetime
jon-lee marked this conversation as resolved Outdated

every field in this file should not be optional. we give an default value here if any field does not exist

every field in this file should not be optional. we give an default value here if any field does not exist

and use underscore naming in this file

and use underscore naming in this file

every field in this file should not be optional. we give an default value here if any field does not exist

fixed

> every field in this file should not be optional. we give an default value here if any field does not exist fixed

and use underscore naming in this file

fixed.

> and use underscore naming in this file fixed.
from typing import Any, Dict, List, Optional, Type
from pydantic import BaseModel, Field, root_validator
from pydantic import BaseModel, Field, model_validator
class ParserResultDetail(BaseModel):
@ -58,8 +58,6 @@ class Limit(BaseModel):
class Stage(BaseModel):
name: Optional[str] = None # Stage name
group: Optional[str] = None # TODO: may need to formulate this
path: Optional[str] = None # FIXME: this is highly possible to be removed in future
env: Optional[list[str]] = None
command: Optional[str] = None # Command to run
files: Optional[Files] = None
@ -79,6 +77,7 @@ class Stage(BaseModel):
)
file: Optional[ParserFile] = ParserFile()
skip: Optional[list[str]] = []
# cases related
cases: Optional[Dict[str, "Stage"]] = {}
diff: Optional[ParserDiff] = ParserDiff()
@ -86,7 +85,9 @@ class Stage(BaseModel):
class Config:
jon-lee marked this conversation as resolved Outdated

deprecated

deprecated

@bomingzh any suggestions on the structure?

@bomingzh any suggestions on the structure?
PydanticDeprecatedSince20: Support for class-based `config` is deprecated, use ConfigDict instead. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.9/migration/
``` PydanticDeprecatedSince20: Support for class-based `config` is deprecated, use ConfigDict instead. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.9/migration/ ```

fixed with model_config

fixed with `model_config`

str here need to be a StrEnum now.

str here need to be a `StrEnum` now.

But I guess we don't know the set of case in advance, making it dynamic StrEnum is meaningless

But I guess we don't know the set of case in advance, making it dynamic `StrEnum` is meaningless

line changed, the comment is for parsers

line changed, the comment is for `parsers`
extra = "allow"
jon-lee marked this conversation as resolved Outdated

this should be the StrEnum

this should be the `StrEnum`

It is supported now.

It is supported now.
def gather_cases(self, values: Dict[str, Any]) -> Dict[str, Any]:
@model_validator(mode="before")
@classmethod
def gather_cases(cls: Type["Stage"], values: Dict[str, Any]) -> Dict[str, Any]:
cases = {k: v for k, v in values.items() if k.startswith("case")}
for key in cases:
values.pop(key)

View File

@ -30,7 +30,6 @@ def get_joj1_run_stage(joj1_config: joj1.Config) -> task.Stage:
cases_conf[i].out_ = case.output
run_config = task.Stage(
name="This is the converted joj1 run stage",
group="joj",
parsers=["diff", "result-status"],
score=100,
limit=task.Limit(
@ -45,6 +44,9 @@ def get_joj1_run_stage(joj1_config: joj1.Config) -> task.Stage:
else default_mem
),
),
cases={f"case{i}": cases_conf[i] for i, case in enumerate(joj1_config.cases)},
cases={f"case{i}": cases_conf[i] for i, _ in enumerate(joj1_config.cases)},
)
return run_config
# TODO: get formatted joj1 config, match the criterion in the doc

View File

@ -1,4 +1,6 @@
import re
jon-lee marked this conversation as resolved

Path should not be relative to JOJ3_CONFIG_ROOT in this file, should be relative to task.toml dir

Path should not be relative to `JOJ3_CONFIG_ROOT` in this file, should be relative to `task.toml` dir

I reckon you said things is relative to JOJ3_CONFIG_ROOT in JTC before. we have a task.type in task.toml to mend the path

I reckon you said things is relative to `JOJ3_CONFIG_ROOT` in JTC before. we have a `task.type` in `task.toml` to mend the path

config.path is relative to JOJ3_CONFIG_ROOT.

`config.path` is relative to `JOJ3_CONFIG_ROOT`.

could you explain further? I m not quite sure my understanding is clear.

could you explain further? I m not quite sure my understanding is clear.

In joj3_config_generator/models/task.py, Config.path is relative to JOJ3_CONFIG_ROOT, so task.toml will located at JOJ3_CONFIG_ROOT / task_conf.path in JTC.

In `joj3_config_generator/models/task.py`, `Config.path` is relative to `JOJ3_CONFIG_ROOT`, so `task.toml` will located at `JOJ3_CONFIG_ROOT / task_conf.path` in JTC.
import shlex
jon-lee marked this conversation as resolved

Some with_.update is still using raw dict, not model with model_dump.

Some `with_.update` is still using raw dict, not model with `model_dump`.
from pathlib import Path
from typing import List, Tuple
from joj3_config_generator.models import result, task
@ -9,13 +11,12 @@ def get_conf_stage(
) -> result.StageDetail:
conf_stage = result.StageDetail(
name=task_stage.name if task_stage.name is not None else "",
# FIXME: to be deterined the way
# group=(
# re.search(r'\[([^\[\]]+)\]', task_stage.name).group(1)
# if (task_stage.name is not None and re.search(r'\[([^\[\]]+)\]', task_stage.name))
# else ""
# ),
group=(task_stage.group if (task_stage.group is not None) else ""),
# group is determined by adding between "[]" in the name of the task
group=(
match.group(1)
jon-lee marked this conversation as resolved Outdated

BTW, is this outdated?

BTW, is this outdated?

Never heard about this rule.

Never heard about this rule.

@manuel what would be the current intended rule for group?

@manuel what would be the current intended rule for `group`?

seems current strategy is fine, resolved.

seems current strategy is fine, resolved.
if (match := re.search(r"\[([^\[\]]+)\]", task_stage.name or ""))
else ""
),
executor=result.Executor(
name="sandbox",
with_=executor_with_config,
@ -33,7 +34,7 @@ def get_conf_stage(
jon-lee marked this conversation as resolved Outdated

should loop through conf_stage.parsers here and update the with field according to the parser name.

should loop through `conf_stage.parsers` here and update the `with` field according to the parser name.

I think its already implemented in each of the fix_parsers functions

I think its already implemented in each of the `fix_parsers` functions

No, do not find the parser in the fix_xxx function. Instead, iterate through the parsers here and decide how to fill in the with.

No, do not find the parser in the `fix_xxx` function. Instead, iterate through the parsers here and decide how to fill in the `with`.

resolved.

resolved.

Use a dict to store parser name, field, function to process.

    process_dict: Dict[
        str, Tuple[Callable[[result.ParserConfig, BaseModel], None], BaseModel]
    ] = {
        "clangtidy": (fix_keyword, task_stage.clangtidy),
        "keyword": (fix_keyword, task_stage.keyword),
        "diff": (fix_diff, task_stage.diff),
    }
    for i, parser in enumerate(task_stage.parsers):
        if parser in process_dict:
            func, parser_model = process_dict[parser]
            func(conf_stage.parsers[i], parser_model)
Use a dict to store parser name, field, function to process. ``` process_dict: Dict[ str, Tuple[Callable[[result.ParserConfig, BaseModel], None], BaseModel] ] = { "clangtidy": (fix_keyword, task_stage.clangtidy), "keyword": (fix_keyword, task_stage.keyword), "diff": (fix_diff, task_stage.diff), } for i, parser in enumerate(task_stage.parsers): if parser in process_dict: func, parser_model = process_dict[parser] func(conf_stage.parsers[i], parser_model) ```

resolved.

resolved.
jon-lee marked this conversation as resolved Outdated

Do we need to support both kinds of names?

Do we need to support both kinds of names?

probably yes, since it is easy for new ta to type it wrong

probably yes, since it is easy for new ta to type it wrong

parsers name should be a str enum, force them to use the correct names

parsers name should be a str enum, force them to use the correct names

ok, then removed.

ok, then removed.
def get_executorWithConfig(
jon-lee marked this conversation as resolved Outdated

underscore

underscore

fixed

fixed
task_stage: task.Stage, cached: List[str]
task_stage: task.Stage, cached: List[str], conf_root: Path
) -> Tuple[result.ExecutorWith, List[str]]:
file_import = (
task_stage.files.import_
@ -43,7 +44,7 @@ def get_executorWithConfig(
and (task_stage.files.import_ is not None)
else []
)
copy_in_files = [file for file in file_import if (file not in cached)]
copy_in_files = [file for file in file_import if file not in cached]
file_export = (
task_stage.files.export
if hasattr(task_stage, "files")
@ -59,16 +60,10 @@ def get_executorWithConfig(
if task_stage.command is not None
else []
),
# FIXME: remove this trick
copy_in={
jon-lee marked this conversation as resolved Outdated

not necessary

not necessary

resolved.

resolved.
("./.clang-tidy" if file.endswith("clang-tidy") else file): (
result.CmdFile(src=f"/home/tt/.config/joj/{file}")
if not file.endswith("main.cpp")
else result.CmdFile(
# src=f"/home/tt/.config/joj/homework/h7/e3/ex3-main.cpp"
src=f"/home/tt/.config/joj/homework/h8/e1/ex1-main.cpp"
)
)
file: result.CmdFile(src=f"{Path.home()}/{conf_root}/tools/{file}")
# all copyin files store in this tools folder
# are there any corner cases
for file in copy_in_files
},
stdin=(
@ -116,7 +111,7 @@ def get_executorWithConfig(
),
),
),
cases=[], # You can add cases if needed
cases=[],
)
if file_export is not None:
for file in file_export:
@ -239,9 +234,11 @@ def fix_file(
return conf_stage
# TODO: add the logic of looping through all the files in the conf-root and generated conf.toml accordingly, while also get the path of the json file.
def fix_diff(
task_stage: task.Stage, conf_stage: result.StageDetail, task_conf: task.Config
task_stage: task.Stage,
conf_stage: result.StageDetail,
task_conf: task.Config,
conf_root: Path,
) -> result.StageDetail:
if task_stage.parsers is not None and "diff" in task_stage.parsers:
diff_parser = next((p for p in conf_stage.parsers if p.name == "diff"), None)
@ -280,8 +277,7 @@ def fix_diff(
stage_cases.append(
result.OptionalCmd(
stdin=result.CmdFile(
src=f"/home/tt/.config/joj/{task_conf.task.type_}/{stdin}"
# src=f"/home/tt/.config/joj/{task_stage.path}/{stdin}"
src=f"{Path.home()}/{conf_root}/{task_conf.task.type_}/{stdin}"
),
args=(shlex.split(command) if command is not None else None),
cpu_limit=cpu_limit,
@ -304,8 +300,7 @@ def fix_diff(
{
"score": diff_output.score,
"fileName": "stdout",
"answerPath": f"/home/tt/.config/joj/{task_conf.task.type_}/{stdout}",
# "answerPath": f"/home/tt/.config/joj/{task_stage.path}/{stdin}",
"answerPath": f"{Path.home()}/{conf_root}/{task_conf.task.type_}/{stdout}",
"forceQuitOnDiff": diff_output.forcequit,
"alwaysHide": diff_output.hide,
"compareSpace": not diff_output.ignorespaces,

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,6 @@ task.name = "hw7 ex2" # task name
task.type = "homework/h7/e2" # remove this task type later
release.deadline = 2024-12-30 23:59:59+08:00
release.stages = [ "compile" ]
[[stages]]
name = "Compilation"
@ -76,7 +75,6 @@ result-detail.mem = false
[[stages]]
name = "[joj] ex2-asan"
group = "run"
command="./h7/build/ex2-asan -a"
files.import = [ "h7/build/ex2-asan" ]
limit.mem = 91224961
@ -168,7 +166,6 @@ case9.in = "case9.in"
[[stages]]
name = "[joj] ex2-msan"
group = "joj"
command="./h7/build/ex2-msan -a"
files.import = [ "h7/build/ex2-msan" ]
limit.mem = 91224961
@ -352,7 +349,6 @@ case9.in = "case9.in"
[[stages]]
name = "[joj] ex2"
group = "joj"
command="./h7/build/ex2"
files.import = [ "h7/build/ex2" ]
score = 10

View File

@ -2,6 +2,7 @@ import json
import os
from typing import Any, Dict, Tuple
import Path
import rtoml
from joj3_config_generator.convert import convert
@ -28,7 +29,7 @@ def read_convert_files(
def load_case(case_name: str) -> None:
repo, task, expected_result = read_convert_files(case_name)
result = convert(repo, task).model_dump(
result = convert(repo, task, conf_root=Path(".")).model_dump(
mode="json", by_alias=True, exclude_none=True
)
assert result == expected_result

View File

@ -1,608 +0,0 @@
[[stages]]
name = "This is the converted joj1 run stage"
group = "joj"
score = 100
parsers = ["diff", "result-status"]
skip = []
[stages.limit]
mem = 32000000
cpu = 1
stderr = 800
stdout = 800
[stages.dummy]
comment = ""
score = 0
forcequit = false
[stages.result-status]
comment = ""
score = 0
forcequit = false
[stages.keyword]
keyword = []
weight = []
[stages.clangtidy]
keyword = []
weight = []
[stages.cppcheck]
keyword = []
weight = []
[stages.cpplint]
keyword = []
weight = []
[stages.result-detail]
time = true
mem = true
stdout = false
stderr = false
exitstatus = false
[stages.file]
[stages.cases.case0]
command = "-abcd --aaaa bbbb"
in = "case0.in"
out = "case0.out"
score = 10
parsers = []
skip = []
[stages.cases.case0.limit]
mem = 32000000
cpu = 1
stderr = 800
stdout = 800
[stages.cases.case0.dummy]
comment = ""
score = 0
forcequit = false
[stages.cases.case0.result-status]
comment = ""
score = 0
forcequit = false
[stages.cases.case0.keyword]
keyword = []
weight = []
[stages.cases.case0.clangtidy]
keyword = []
weight = []
[stages.cases.case0.cppcheck]
keyword = []
weight = []
[stages.cases.case0.cpplint]
keyword = []
weight = []
[stages.cases.case0.result-detail]
time = true
mem = true
stdout = false
stderr = false
exitstatus = false
[stages.cases.case0.file]
[stages.cases.case0.cases]
[stages.cases.case0.diff.output]
score = 0
ignorespaces = true
hide = false
forcequit = false
[stages.cases.case1]
in = "case1.in"
out = "case1.out"
score = 10
parsers = []
skip = []
[stages.cases.case1.limit]
mem = 32000000
cpu = 1
stderr = 800
stdout = 800
[stages.cases.case1.dummy]
comment = ""
score = 0
forcequit = false
[stages.cases.case1.result-status]
comment = ""
score = 0
forcequit = false
[stages.cases.case1.keyword]
keyword = []
weight = []
[stages.cases.case1.clangtidy]
keyword = []
weight = []
[stages.cases.case1.cppcheck]
keyword = []
weight = []
[stages.cases.case1.cpplint]
keyword = []
weight = []
[stages.cases.case1.result-detail]
time = true
mem = true
stdout = false
stderr = false
exitstatus = false
[stages.cases.case1.file]
[stages.cases.case1.cases]
[stages.cases.case1.diff.output]
score = 0
ignorespaces = true
hide = false
forcequit = false
[stages.cases.case2]
in = "case2.in"
out = "case2.out"
score = 10
parsers = []
skip = []
[stages.cases.case2.limit]
mem = 32000000
cpu = 1
stderr = 800
stdout = 800
[stages.cases.case2.dummy]
comment = ""
score = 0
forcequit = false
[stages.cases.case2.result-status]
comment = ""
score = 0
forcequit = false
[stages.cases.case2.keyword]
keyword = []
weight = []
[stages.cases.case2.clangtidy]
keyword = []
weight = []
[stages.cases.case2.cppcheck]
keyword = []
weight = []
[stages.cases.case2.cpplint]
keyword = []
weight = []
[stages.cases.case2.result-detail]
time = true
mem = true
stdout = false
stderr = false
exitstatus = false
[stages.cases.case2.file]
[stages.cases.case2.cases]
[stages.cases.case2.diff.output]
score = 0
ignorespaces = true
hide = false
forcequit = false
[stages.cases.case3]
in = "case3.in"
out = "case3.out"
score = 10
parsers = []
skip = []
[stages.cases.case3.limit]
mem = 32000000
cpu = 1
stderr = 800
stdout = 800
[stages.cases.case3.dummy]
comment = ""
score = 0
forcequit = false
[stages.cases.case3.result-status]
comment = ""
score = 0
forcequit = false
[stages.cases.case3.keyword]
keyword = []
weight = []
[stages.cases.case3.clangtidy]
keyword = []
weight = []
[stages.cases.case3.cppcheck]
keyword = []
weight = []
[stages.cases.case3.cpplint]
keyword = []
weight = []
[stages.cases.case3.result-detail]
time = true
mem = true
stdout = false
stderr = false
exitstatus = false
[stages.cases.case3.file]
[stages.cases.case3.cases]
[stages.cases.case3.diff.output]
score = 0
ignorespaces = true
hide = false
forcequit = false
[stages.cases.case4]
in = "case4.in"
out = "case4.out"
score = 10
parsers = []
skip = []
[stages.cases.case4.limit]
mem = 32000000
cpu = 1
stderr = 800
stdout = 800
[stages.cases.case4.dummy]
comment = ""
score = 0
forcequit = false
[stages.cases.case4.result-status]
comment = ""
score = 0
forcequit = false
[stages.cases.case4.keyword]
keyword = []
weight = []
[stages.cases.case4.clangtidy]
keyword = []
weight = []
[stages.cases.case4.cppcheck]
keyword = []
weight = []
[stages.cases.case4.cpplint]
keyword = []
weight = []
[stages.cases.case4.result-detail]
time = true
mem = true
stdout = false
stderr = false
exitstatus = false
[stages.cases.case4.file]
[stages.cases.case4.cases]
[stages.cases.case4.diff.output]
score = 0
ignorespaces = true
hide = false
forcequit = false
[stages.cases.case5]
in = "case5.in"
out = "case5.out"
score = 10
parsers = []
skip = []
[stages.cases.case5.limit]
mem = 32000000
cpu = 1
stderr = 800
stdout = 800
[stages.cases.case5.dummy]
comment = ""
score = 0
forcequit = false
[stages.cases.case5.result-status]
comment = ""
score = 0
forcequit = false
[stages.cases.case5.keyword]
keyword = []
weight = []
[stages.cases.case5.clangtidy]
keyword = []
weight = []
[stages.cases.case5.cppcheck]
keyword = []
weight = []
[stages.cases.case5.cpplint]
keyword = []
weight = []
[stages.cases.case5.result-detail]
time = true
mem = true
stdout = false
stderr = false
exitstatus = false
[stages.cases.case5.file]
[stages.cases.case5.cases]
[stages.cases.case5.diff.output]
score = 0
ignorespaces = true
hide = false
forcequit = false
[stages.cases.case6]
in = "case6.in"
out = "case6.out"
score = 10
parsers = []
skip = []
[stages.cases.case6.limit]
mem = 32000000
cpu = 1
stderr = 800
stdout = 800
[stages.cases.case6.dummy]
comment = ""
score = 0
forcequit = false
[stages.cases.case6.result-status]
comment = ""
score = 0
forcequit = false
[stages.cases.case6.keyword]
keyword = []
weight = []
[stages.cases.case6.clangtidy]
keyword = []
weight = []
[stages.cases.case6.cppcheck]
keyword = []
weight = []
[stages.cases.case6.cpplint]
keyword = []
weight = []
[stages.cases.case6.result-detail]
time = true
mem = true
stdout = false
stderr = false
exitstatus = false
[stages.cases.case6.file]
[stages.cases.case6.cases]
[stages.cases.case6.diff.output]
score = 0
ignorespaces = true
hide = false
forcequit = false
[stages.cases.case7]
in = "case7.in"
out = "case7.out"
score = 10
parsers = []
skip = []
[stages.cases.case7.limit]
mem = 32000000
cpu = 1
stderr = 800
stdout = 800
[stages.cases.case7.dummy]
comment = ""
score = 0
forcequit = false
[stages.cases.case7.result-status]
comment = ""
score = 0
forcequit = false
[stages.cases.case7.keyword]
keyword = []
weight = []
[stages.cases.case7.clangtidy]
keyword = []
weight = []
[stages.cases.case7.cppcheck]
keyword = []
weight = []
[stages.cases.case7.cpplint]
keyword = []
weight = []
[stages.cases.case7.result-detail]
time = true
mem = true
stdout = false
stderr = false
exitstatus = false
[stages.cases.case7.file]
[stages.cases.case7.cases]
[stages.cases.case7.diff.output]
score = 0
ignorespaces = true
hide = false
forcequit = false
[stages.cases.case8]
in = "case8.in"
out = "case8.out"
score = 10
parsers = []
skip = []
[stages.cases.case8.limit]
mem = 32000000
cpu = 1
stderr = 800
stdout = 800
[stages.cases.case8.dummy]
comment = ""
score = 0
forcequit = false
[stages.cases.case8.result-status]
comment = ""
score = 0
forcequit = false
[stages.cases.case8.keyword]
keyword = []
weight = []
[stages.cases.case8.clangtidy]
keyword = []
weight = []
[stages.cases.case8.cppcheck]
keyword = []
weight = []
[stages.cases.case8.cpplint]
keyword = []
weight = []
[stages.cases.case8.result-detail]
time = true
mem = true
stdout = false
stderr = false
exitstatus = false
[stages.cases.case8.file]
[stages.cases.case8.cases]
[stages.cases.case8.diff.output]
score = 0
ignorespaces = true
hide = false
forcequit = false
[stages.cases.case9]
in = "case9.in"
out = "case9.out"
score = 10
parsers = []
skip = []
[stages.cases.case9.limit]
mem = 32000000
cpu = 1
stderr = 800
stdout = 800
[stages.cases.case9.dummy]
comment = ""
score = 0
forcequit = false
[stages.cases.case9.result-status]
comment = ""
score = 0
forcequit = false
[stages.cases.case9.keyword]
keyword = []
weight = []
[stages.cases.case9.clangtidy]
keyword = []
weight = []
[stages.cases.case9.cppcheck]
keyword = []
weight = []
[stages.cases.case9.cpplint]
keyword = []
weight = []
[stages.cases.case9.result-detail]
time = true
mem = true
stdout = false
stderr = false
exitstatus = false
[stages.cases.case9.file]
[stages.cases.case9.cases]
[stages.cases.case9.diff.output]
score = 0
ignorespaces = true
hide = false
forcequit = false
[stages.diff.output]
score = 0
ignorespaces = true
hide = false
forcequit = false
[task]
type = ""
name = "Blank Task"
[release]