integrate test
This commit is contained in:
13
test/ctest_config
Normal file
13
test/ctest_config
Normal file
@ -0,0 +1,13 @@
|
||||
add_test(NAME basic_1 COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py basic_1 --skip-check --ignore-first-dependency)
|
||||
add_test(NAME basic_2 COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py basic_2 --skip-check --ignore-first-dependency)
|
||||
add_test(NAME basic_3 COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py basic_3 --skip-check --ignore-first-dependency)
|
||||
add_test(NAME basic_4 COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py basic_4 --skip-check --ignore-first-dependency)
|
||||
add_test(NAME basic_5 COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py basic_5 --skip-check --ignore-first-dependency)
|
||||
add_test(NAME basic_6 COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py basic_6 --skip-check --ignore-first-dependency)
|
||||
add_test(NAME basic_extra COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py basic_extra --skip-check --ignore-first-dependency)
|
||||
add_test(NAME pressure_1_easy COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py pressure_1_easy --skip-check --ignore-first-dependency)
|
||||
add_test(NAME pressure_2_easy COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py pressure_2_easy --skip-check --ignore-first-dependency)
|
||||
add_test(NAME pressure_3_easy COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py pressure_3_easy --skip-check --ignore-first-dependency)
|
||||
add_test(NAME pressure_1_hard COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py pressure_1_hard --skip-check --ignore-first-dependency)
|
||||
add_test(NAME pressure_2_hard COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py pressure_2_hard --skip-check --ignore-first-dependency)
|
||||
add_test(NAME pressure_3_hard COMMAND ${PROJECT_SOURCE_DIR}/test/ojtest.py pressure_3_hard --skip-check --ignore-first-dependency)
|
124
test/ojtest.py
124
test/ojtest.py
@ -17,4 +17,126 @@ import threading
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
print("script_dir: " + script_dir)
|
||||
path_to_exec_file = os.path.join(script_dir, "../build/code")
|
||||
print("path_to_exec_file: " + path_to_exec_file)
|
||||
print("path_to_exec_file: " + path_to_exec_file)
|
||||
data_dir = script_dir + "/data/ojdata/"
|
||||
print("directory of data:", data_dir)
|
||||
config_file_path = data_dir + "config.json"
|
||||
print("config_file_path: ", config_file_path)
|
||||
|
||||
with open(config_file_path, 'r') as f:
|
||||
test_config = json.load(f)
|
||||
|
||||
# print(test_config) # print the loaded config to verify
|
||||
|
||||
argc = len(sys.argv)
|
||||
argv = sys.argv
|
||||
|
||||
print("argc: ", argc)
|
||||
print("argv: ", argv)
|
||||
if argc==1:
|
||||
print("preparing to run all testgroups...")
|
||||
elif argc>=2:
|
||||
print("preparing to run testgroup: ", argv[1])
|
||||
|
||||
skip_check=False
|
||||
ignore_first_dependency=False
|
||||
for i in range(argc):
|
||||
if argv[i]=="--skip-check":
|
||||
skip_check=True
|
||||
if argv[i]=="--ignore-first-dependency":
|
||||
ignore_first_dependency=True
|
||||
|
||||
if not skip_check:
|
||||
command = 'cat ticket.sum | sha256sum -c'
|
||||
print("checking the sha256sum of testpoint...")
|
||||
process = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=data_dir)
|
||||
# Check the exit status of the command
|
||||
if process.returncode == 0:
|
||||
print("The test data is correct!")
|
||||
else:
|
||||
print(f"Command failed with exit code {process.returncode}")
|
||||
print(process.stderr.decode('utf-8')) # print the error message
|
||||
|
||||
def FetchTestPointInfo(id):
|
||||
for it in test_config["Details"]:
|
||||
if it["ID"]==id:
|
||||
return it
|
||||
|
||||
passed_test={0:True}
|
||||
has_unpassed_test=False
|
||||
def RunTestGroup(name):
|
||||
global passed_test
|
||||
global has_unpassed_test
|
||||
print("preparing to run testgroup: ", name)
|
||||
test_point_list=[]
|
||||
for it in test_config["Groups"]:
|
||||
if it["GroupName"]==name:
|
||||
test_point_list=it["TestPoints"]
|
||||
print("test_point_list: ", test_point_list)
|
||||
playground_dir="/tmp/"+name
|
||||
# remove directory /tmp/$name
|
||||
shutil.rmtree(playground_dir, ignore_errors=True)
|
||||
# create directory /tmp/$name
|
||||
os.makedirs(playground_dir)
|
||||
for test_point_id in test_point_list:
|
||||
test_info=FetchTestPointInfo(test_point_id)
|
||||
print("test info of test point ", test_point_id, ": ", test_info)
|
||||
disk_limit=test_info["DiskLimit"]
|
||||
dependency=test_info["Dependency"]
|
||||
if passed_test.get(dependency, False)==False and not (ignore_first_dependency and test_point_id==test_point_list[0]):
|
||||
print(f'dependency {dependency} not met, skip this test point')
|
||||
passed_test[test_point_id]=False
|
||||
has_unpassed_test=True
|
||||
continue
|
||||
if disk_limit<0:
|
||||
disk_limit = -disk_limit
|
||||
# remove directory /tmp/$name
|
||||
shutil.rmtree(playground_dir, ignore_errors=True)
|
||||
# create directory /tmp/$name
|
||||
os.makedirs(playground_dir)
|
||||
time_limit=test_info["TimeLimit"]
|
||||
file_number_limit=test_info["FileNumberLimit"]
|
||||
memory_limit=test_info["MemoryLimit"]
|
||||
input_file=data_dir+str(test_point_id)+".in"
|
||||
output_file=playground_dir+"/"+str(test_point_id)+".out"
|
||||
answer_file=data_dir+str(test_point_id)+".out"
|
||||
stderr_file=playground_dir+"/"+str(test_point_id)+".err"
|
||||
diff_file=playground_dir+"/"+str(test_point_id)+".diff"
|
||||
time_limit = int(time_limit / 1000) # convert to seconds
|
||||
memory_limit = int(memory_limit / 1024) # convert to KB
|
||||
disk_limit = int(disk_limit / 512) # convert to 512B
|
||||
print("input_file {}, output_file {}, answer_file {}".format(input_file, output_file, answer_file))
|
||||
print("time limit {}, disk limit {}, file number limit {}".format(time_limit, disk_limit, file_number_limit))
|
||||
# run the path_to_exec_file with input_file and output_file with cwd=playground_dir
|
||||
command = f'ulimit -t {time_limit} && ulimit -m {memory_limit} && ulimit -f {disk_limit} && ulimit -n {file_number_limit} && {path_to_exec_file} < {input_file} > {output_file} 2> {stderr_file}'
|
||||
print("the test command is: ", command)
|
||||
process = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=playground_dir)
|
||||
# Check the exit status of the command
|
||||
if process.returncode == 0:
|
||||
print("Test point ", test_point_id, " successfully run!")
|
||||
# run diff command to check the output
|
||||
command = f'diff {output_file} {answer_file} > {diff_file}'
|
||||
process = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=playground_dir)
|
||||
if process.returncode == 0:
|
||||
print("Test point ", test_point_id, " passed!")
|
||||
passed_test[test_point_id]=True
|
||||
else:
|
||||
print("Test point ", test_point_id, " failed!")
|
||||
print(process.stderr.decode('utf-8')) # print the error message
|
||||
has_unpassed_test=True
|
||||
passed_test[test_point_id]=False
|
||||
else:
|
||||
print("Test point ", test_point_id, " failed to run!")
|
||||
print(process.stderr.decode('utf-8')) # print the error message
|
||||
has_unpassed_test=True
|
||||
passed_test[test_point_id]=False
|
||||
|
||||
if argc>=2:
|
||||
RunTestGroup(argv[1])
|
||||
else:
|
||||
for it in test_config["Groups"]:
|
||||
RunTestGroup(it["GroupName"])
|
||||
|
||||
if has_unpassed_test:
|
||||
print("Some tests failed!")
|
||||
exit(1)
|
Reference in New Issue
Block a user