feat: basic project layout
This commit is contained in:
parent
27952126e4
commit
ba19112ed4
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -159,4 +159,3 @@ cython_debug/
|
||||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
#.idea/
|
#.idea/
|
||||||
|
|
||||||
|
|
47
.pre-commit-config.yaml
Normal file
47
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
---
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v4.4.0
|
||||||
|
hooks:
|
||||||
|
- id: check-yaml
|
||||||
|
- id: check-toml
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: trailing-whitespace
|
||||||
|
- id: requirements-txt-fixer
|
||||||
|
- repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt
|
||||||
|
rev: 0.2.3
|
||||||
|
hooks:
|
||||||
|
- id: yamlfmt
|
||||||
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
|
rev: v1.11.2
|
||||||
|
hooks:
|
||||||
|
- id: mypy
|
||||||
|
additional_dependencies:
|
||||||
|
- pydantic
|
||||||
|
- types-PyYAML
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v3.17.0
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
- repo: https://github.com/hadialqattan/pycln
|
||||||
|
rev: v2.4.0
|
||||||
|
hooks:
|
||||||
|
- id: pycln
|
||||||
|
- repo: https://github.com/PyCQA/bandit
|
||||||
|
rev: 1.7.10
|
||||||
|
hooks:
|
||||||
|
- id: bandit
|
||||||
|
- repo: https://github.com/PyCQA/isort
|
||||||
|
rev: 5.13.2
|
||||||
|
hooks:
|
||||||
|
- id: isort
|
||||||
|
args: [--profile, black, --filter-files]
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 24.8.0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
- repo: https://github.com/Lucas-C/pre-commit-hooks
|
||||||
|
rev: v1.5.5
|
||||||
|
hooks:
|
||||||
|
- id: remove-crlf
|
||||||
|
- id: remove-tabs
|
18
README.md
18
README.md
|
@ -1,2 +1,18 @@
|
||||||
# joj-config-generator
|
# JOJ3-config-generator
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
### Setup venv (Optional)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python3 -m venv env # you only need to do that once
|
||||||
|
# each time when you need this venv, if on Linux / macOS use
|
||||||
|
source env/bin/activate
|
||||||
|
```
|
||||||
|
|
||||||
|
### For developers
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip3 install ".[dev]"
|
||||||
|
pre-commit install
|
||||||
|
```
|
||||||
|
|
|
@ -1,13 +0,0 @@
|
||||||
import os
|
|
||||||
import json
|
|
||||||
|
|
||||||
class compiled:
|
|
||||||
def __init__(self, name, path, args):
|
|
||||||
self.name = name
|
|
||||||
self.path = path
|
|
||||||
self.args = args
|
|
||||||
|
|
||||||
class define_compiled:
|
|
||||||
def get_compiled():
|
|
||||||
for i in os.listdir(os.path.dirname(__file__) + "/compiled"):
|
|
||||||
pass
|
|
|
@ -1,175 +0,0 @@
|
||||||
import yaml
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
def matlab_json_init():
|
|
||||||
output_json = {
|
|
||||||
"sandboxExecServer": "172.17.0.1:5051",
|
|
||||||
"outputPath": "/tmp/joj3_result.json",
|
|
||||||
"stages": []
|
|
||||||
}
|
|
||||||
healthcheck_json = {
|
|
||||||
"name": "healthcheck",
|
|
||||||
"executor": {
|
|
||||||
"name": "sandbox",
|
|
||||||
"with": {
|
|
||||||
"default": {
|
|
||||||
"args": [
|
|
||||||
"./healthcheck",
|
|
||||||
"-root=.",
|
|
||||||
"-meta=readme",
|
|
||||||
"-whitelist=stderr",
|
|
||||||
"-whitelist=stdout",
|
|
||||||
"-whitelist=.*\\.toml",
|
|
||||||
"-whitelist=.*\\.md",
|
|
||||||
"-whitelist=healthcheck",
|
|
||||||
"-whitelist=.*\\.json",
|
|
||||||
"-whitelist=.git.*"
|
|
||||||
],
|
|
||||||
"env": [
|
|
||||||
"PATH=/usr/bin:/bin"
|
|
||||||
],
|
|
||||||
"cpuLimit": 10000000000,
|
|
||||||
"memoryLimit": 104857600,
|
|
||||||
"procLimit": 50,
|
|
||||||
"copyInDir": ".",
|
|
||||||
"copyIn": {
|
|
||||||
"healthcheck": {
|
|
||||||
"src": "./../../../../../../build/healthcheck",
|
|
||||||
"copyOut": [
|
|
||||||
"stdout",
|
|
||||||
"stderr"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"stdin": {
|
|
||||||
"content": ""
|
|
||||||
},
|
|
||||||
"stdout": {
|
|
||||||
"name": "stdout",
|
|
||||||
"max": 4096
|
|
||||||
},
|
|
||||||
"stderr": {
|
|
||||||
"name": "stderr",
|
|
||||||
"max": 4096
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"parser": {
|
|
||||||
"name": "healthcheck",
|
|
||||||
"with": {
|
|
||||||
"score": 10,
|
|
||||||
"comment": " + comment from json conf"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
run_json = {
|
|
||||||
"name": "run",
|
|
||||||
"executor": {
|
|
||||||
"name": "sandbox",
|
|
||||||
"with": {
|
|
||||||
"default": {
|
|
||||||
"args": [
|
|
||||||
""
|
|
||||||
],
|
|
||||||
"env": [
|
|
||||||
"PATH=/usr/bin:/bin"
|
|
||||||
],
|
|
||||||
"cpuLimit": 20000000000,
|
|
||||||
"memoryLimit": 104857600,
|
|
||||||
"clockLimit": 40000000000,
|
|
||||||
"procLimit": 50,
|
|
||||||
"copyOut": [
|
|
||||||
"stdout",
|
|
||||||
"stderr"
|
|
||||||
],
|
|
||||||
"stdout": {
|
|
||||||
"name": "stdout",
|
|
||||||
"max": 4096
|
|
||||||
},
|
|
||||||
"stderr": {
|
|
||||||
"name": "stderr",
|
|
||||||
"max": 4096
|
|
||||||
},
|
|
||||||
# matlab don't need this
|
|
||||||
# "copyInCached": {
|
|
||||||
# "a": "a"
|
|
||||||
# }
|
|
||||||
},
|
|
||||||
"cases": []
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"parser": {
|
|
||||||
"name": "diff",
|
|
||||||
"with": {
|
|
||||||
"cases": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
output_json["stages"].append(healthcheck_json)
|
|
||||||
output_json["stages"].append(run_json)
|
|
||||||
return output_json
|
|
||||||
|
|
||||||
def get_cases(output_json, yaml_data):
|
|
||||||
for case in yaml_data['cases']:
|
|
||||||
print(yaml_data['cases'])
|
|
||||||
input_entry = {
|
|
||||||
"stdin":{
|
|
||||||
"src": case["input"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
output_entry = {
|
|
||||||
"outputs": {
|
|
||||||
"score": 100,
|
|
||||||
"fileName": "stdout",
|
|
||||||
"answerPath": case["output"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
output_json["stages"][1]["executor"]["with"]["cases"].append(input_entry)
|
|
||||||
output_json["stages"][1]["parser"]["with"]["cases"].append(output_entry)
|
|
||||||
return output_json
|
|
||||||
|
|
||||||
|
|
||||||
# Function to merge YAML content into the JSON structure
|
|
||||||
def yaml_to_custom_json(yaml_file, json_file):
|
|
||||||
# Load YAML data from the input file
|
|
||||||
with open(yaml_file, 'r') as f:
|
|
||||||
yaml_data = yaml.safe_load(f)
|
|
||||||
|
|
||||||
# Define the base JSON structure as per your example
|
|
||||||
output_json = matlab_json_init()
|
|
||||||
|
|
||||||
# memory limit migration
|
|
||||||
memory_str = yaml_data['default']['memory']
|
|
||||||
memory_limit = int(memory_str[:-1]) * 1024 * 1024
|
|
||||||
output_json["stages"][0]["executor"]["with"]["default"]["memoryLimit"] = memory_limit
|
|
||||||
|
|
||||||
# time limit migration
|
|
||||||
time_str = yaml_data['default']['time']
|
|
||||||
cpu_limit = int(time_str[:-1]) * 1000000000
|
|
||||||
clock_limit = 2 * cpu_limit
|
|
||||||
output_json['stages'][1]["executor"]["with"]["default"]["cpuLimit"] = cpu_limit
|
|
||||||
output_json["stages"][1]["executor"]["with"]["default"]["clockLimit"] = clock_limit
|
|
||||||
|
|
||||||
# test cases migration
|
|
||||||
# testcases input migration
|
|
||||||
# # testcases output migration
|
|
||||||
output_json = get_cases(output_json, yaml_data)
|
|
||||||
|
|
||||||
# execution args migration
|
|
||||||
args = "octave " + assignment_name + ".m"
|
|
||||||
output_json["stages"][1]["executor"]["with"]["default"]["args"] = args.split()
|
|
||||||
|
|
||||||
# Write the output JSON to the specified file
|
|
||||||
with open(json_file, 'w') as f:
|
|
||||||
json.dump(output_json, f, indent=2)
|
|
||||||
|
|
||||||
# i/p of files
|
|
||||||
yaml_file = './ex4/config.yaml'
|
|
||||||
json_file = './output.json'
|
|
||||||
assignment_name = "ex4"
|
|
||||||
yaml_to_custom_json(yaml_file, json_file)
|
|
||||||
|
|
||||||
print(f"YAML content has been successfully converted to JSON and saved to {json_file}")
|
|
5
main.py
5
main.py
|
@ -1,7 +1,6 @@
|
||||||
import os
|
def main() -> None:
|
||||||
|
|
||||||
def main():
|
|
||||||
print("test")
|
print("test")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
18
mypy.ini
Executable file
18
mypy.ini
Executable file
|
@ -0,0 +1,18 @@
|
||||||
|
[mypy]
|
||||||
|
plugins = pydantic.mypy
|
||||||
|
|
||||||
|
follow_imports = silent
|
||||||
|
warn_redundant_casts = True
|
||||||
|
warn_unused_ignores = True
|
||||||
|
disallow_any_generics = True
|
||||||
|
check_untyped_defs = True
|
||||||
|
no_implicit_reexport = True
|
||||||
|
|
||||||
|
# for strict mypy: (this is the tricky one :-))
|
||||||
|
disallow_untyped_defs = True
|
||||||
|
|
||||||
|
[pydantic-mypy]
|
||||||
|
init_forbid_extra = True
|
||||||
|
init_typed = True
|
||||||
|
warn_required_dynamic_aliases = True
|
||||||
|
warn_untyped_fields = True
|
16
pyproject.toml
Normal file
16
pyproject.toml
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools >= 61.0"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "joj3-config-generator"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = ["PyYAML>=6.0.2"]
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
authors = []
|
||||||
|
maintainers = []
|
||||||
|
description = "config generator for joj3"
|
||||||
|
readme = "README.md"
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = ["pre-commit>=3.7.1", "black>=24.4.2"]
|
14
src/language_specific/declare_compilers.py
Normal file
14
src/language_specific/declare_compilers.py
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
class compiled:
|
||||||
|
def __init__(self, name: str, path: str, args: list[str]) -> None:
|
||||||
|
self.name = name
|
||||||
|
self.path = path
|
||||||
|
self.args = args
|
||||||
|
|
||||||
|
|
||||||
|
class define_compiled:
|
||||||
|
def get_compiled(self) -> None:
|
||||||
|
for i in os.listdir(os.path.dirname(__file__) + "/compiled"):
|
||||||
|
pass
|
142
src/language_specific/matlab_defs.py
Normal file
142
src/language_specific/matlab_defs.py
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
import json
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
def matlab_json_init() -> dict[str, Any]:
|
||||||
|
output_json: dict[str, Any] = {
|
||||||
|
"sandboxExecServer": "172.17.0.1:5051",
|
||||||
|
"outputPath": "/tmp/joj3_result.json", # nosec
|
||||||
|
"stages": [],
|
||||||
|
}
|
||||||
|
healthcheck_json = {
|
||||||
|
"name": "healthcheck",
|
||||||
|
"executor": {
|
||||||
|
"name": "sandbox",
|
||||||
|
"with": {
|
||||||
|
"default": {
|
||||||
|
"args": [
|
||||||
|
"./healthcheck",
|
||||||
|
"-root=.",
|
||||||
|
"-meta=readme",
|
||||||
|
"-whitelist=stderr",
|
||||||
|
"-whitelist=stdout",
|
||||||
|
"-whitelist=.*\\.toml",
|
||||||
|
"-whitelist=.*\\.md",
|
||||||
|
"-whitelist=healthcheck",
|
||||||
|
"-whitelist=.*\\.json",
|
||||||
|
"-whitelist=.git.*",
|
||||||
|
],
|
||||||
|
"env": ["PATH=/usr/bin:/bin"],
|
||||||
|
"cpuLimit": 10000000000,
|
||||||
|
"memoryLimit": 104857600,
|
||||||
|
"procLimit": 50,
|
||||||
|
"copyInDir": ".",
|
||||||
|
"copyIn": {
|
||||||
|
"healthcheck": {
|
||||||
|
"src": "./../../../../../../build/healthcheck",
|
||||||
|
"copyOut": ["stdout", "stderr"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"stdin": {"content": ""},
|
||||||
|
"stdout": {"name": "stdout", "max": 4096},
|
||||||
|
"stderr": {"name": "stderr", "max": 4096},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"parser": {
|
||||||
|
"name": "healthcheck",
|
||||||
|
"with": {"score": 10, "comment": " + comment from json conf"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
run_json = {
|
||||||
|
"name": "run",
|
||||||
|
"executor": {
|
||||||
|
"name": "sandbox",
|
||||||
|
"with": {
|
||||||
|
"default": {
|
||||||
|
"args": [""],
|
||||||
|
"env": ["PATH=/usr/bin:/bin"],
|
||||||
|
"cpuLimit": 20000000000,
|
||||||
|
"memoryLimit": 104857600,
|
||||||
|
"clockLimit": 40000000000,
|
||||||
|
"procLimit": 50,
|
||||||
|
"copyOut": ["stdout", "stderr"],
|
||||||
|
"stdout": {"name": "stdout", "max": 4096},
|
||||||
|
"stderr": {"name": "stderr", "max": 4096},
|
||||||
|
# matlab don't need this
|
||||||
|
# "copyInCached": {
|
||||||
|
# "a": "a"
|
||||||
|
# }
|
||||||
|
},
|
||||||
|
"cases": [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"parser": {"name": "diff", "with": {"cases": []}},
|
||||||
|
}
|
||||||
|
output_json["stages"].append(healthcheck_json)
|
||||||
|
output_json["stages"].append(run_json)
|
||||||
|
return output_json
|
||||||
|
|
||||||
|
|
||||||
|
def get_cases(output_json: dict[str, Any], yaml_data: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
for case in yaml_data["cases"]:
|
||||||
|
print(yaml_data["cases"])
|
||||||
|
input_entry = {"stdin": {"src": case["input"]}}
|
||||||
|
output_entry = {
|
||||||
|
"outputs": {
|
||||||
|
"score": 100,
|
||||||
|
"fileName": "stdout",
|
||||||
|
"answerPath": case["output"],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
output_json["stages"][1]["executor"]["with"]["cases"].append(input_entry)
|
||||||
|
output_json["stages"][1]["parser"]["with"]["cases"].append(output_entry)
|
||||||
|
return output_json
|
||||||
|
|
||||||
|
|
||||||
|
# Function to merge YAML content into the JSON structure
|
||||||
|
def yaml_to_custom_json(yaml_file: str, json_file: str) -> None:
|
||||||
|
# Load YAML data from the input file
|
||||||
|
with open(yaml_file) as f:
|
||||||
|
yaml_data = yaml.safe_load(f)
|
||||||
|
|
||||||
|
# Define the base JSON structure as per your example
|
||||||
|
output_json = matlab_json_init()
|
||||||
|
|
||||||
|
# memory limit migration
|
||||||
|
memory_str = yaml_data["default"]["memory"]
|
||||||
|
memory_limit = int(memory_str[:-1]) * 1024 * 1024
|
||||||
|
output_json["stages"][0]["executor"]["with"]["default"][
|
||||||
|
"memoryLimit"
|
||||||
|
] = memory_limit
|
||||||
|
|
||||||
|
# time limit migration
|
||||||
|
time_str = yaml_data["default"]["time"]
|
||||||
|
cpu_limit = int(time_str[:-1]) * 1000000000
|
||||||
|
clock_limit = 2 * cpu_limit
|
||||||
|
output_json["stages"][1]["executor"]["with"]["default"]["cpuLimit"] = cpu_limit
|
||||||
|
output_json["stages"][1]["executor"]["with"]["default"]["clockLimit"] = clock_limit
|
||||||
|
|
||||||
|
# test cases migration
|
||||||
|
# testcases input migration
|
||||||
|
# # testcases output migration
|
||||||
|
output_json = get_cases(output_json, yaml_data)
|
||||||
|
|
||||||
|
# execution args migration
|
||||||
|
args = "octave " + assignment_name + ".m"
|
||||||
|
output_json["stages"][1]["executor"]["with"]["default"]["args"] = args.split()
|
||||||
|
|
||||||
|
# Write the output JSON to the specified file
|
||||||
|
with open(json_file, "w") as f:
|
||||||
|
json.dump(output_json, f, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
# i/p of files
|
||||||
|
yaml_file = "./ex4/config.yaml"
|
||||||
|
json_file = "./output.json"
|
||||||
|
assignment_name = "ex4"
|
||||||
|
yaml_to_custom_json(yaml_file, json_file)
|
||||||
|
|
||||||
|
print(f"YAML content has been successfully converted to JSON and saved to {json_file}")
|
Loading…
Reference in New Issue
Block a user