This commit is contained in:
QuanyiLi
2023-05-08 17:30:09 +01:00
parent 500f69f3fb
commit 25e911f22d
2 changed files with 4 additions and 2 deletions

View File

@@ -10,6 +10,7 @@ from scenarionet.verifier.utils import verify_loading_into_metadrive
def test_combine_multiple_dataset(): def test_combine_multiple_dataset():
dataset_name = "nuscenes" dataset_name = "nuscenes"
original_dataset_path = os.path.join(SCENARIONET_PACKAGE_PATH, "tests", "test_dataset", dataset_name) original_dataset_path = os.path.join(SCENARIONET_PACKAGE_PATH, "tests", "test_dataset", dataset_name)
test_dataset_path = os.path.join(SCENARIONET_PACKAGE_PATH, "tests", "test_dataset")
dataset_paths = [original_dataset_path + "_{}".format(i) for i in range(5)] dataset_paths = [original_dataset_path + "_{}".format(i) for i in range(5)]
output_path = os.path.join(SCENARIONET_PACKAGE_PATH, "tests", "tmp", "combine") output_path = os.path.join(SCENARIONET_PACKAGE_PATH, "tests", "tmp", "combine")
@@ -20,7 +21,7 @@ def test_combine_multiple_dataset():
for scenario_file in sorted_scenarios: for scenario_file in sorted_scenarios:
read_scenario(dataset_path, mapping, scenario_file) read_scenario(dataset_path, mapping, scenario_file)
success, result = verify_loading_into_metadrive( success, result = verify_loading_into_metadrive(
dataset_path, result_save_dir="test_dataset", steps_to_run=1000, num_workers=4 dataset_path, result_save_dir=test_dataset_path, steps_to_run=1000, num_workers=4
) )
assert success assert success

View File

@@ -16,6 +16,7 @@ def test_generate_from_error():
set_random_drop(True) set_random_drop(True)
dataset_name = "nuscenes" dataset_name = "nuscenes"
original_dataset_path = os.path.join(SCENARIONET_PACKAGE_PATH, "tests", "test_dataset", dataset_name) original_dataset_path = os.path.join(SCENARIONET_PACKAGE_PATH, "tests", "test_dataset", dataset_name)
test_dataset_path = os.path.join(SCENARIONET_PACKAGE_PATH, "tests", "test_dataset")
dataset_paths = [original_dataset_path + "_{}".format(i) for i in range(5)] dataset_paths = [original_dataset_path + "_{}".format(i) for i in range(5)]
dataset_path = os.path.join(SCENARIONET_PACKAGE_PATH, "tests", "tmp", "combine") dataset_path = os.path.join(SCENARIONET_PACKAGE_PATH, "tests", "tmp", "combine")
combine_multiple_dataset(dataset_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True) combine_multiple_dataset(dataset_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True)
@@ -24,7 +25,7 @@ def test_generate_from_error():
for scenario_file in sorted_scenarios: for scenario_file in sorted_scenarios:
read_scenario(dataset_path, mapping, scenario_file) read_scenario(dataset_path, mapping, scenario_file)
success, logs = verify_loading_into_metadrive( success, logs = verify_loading_into_metadrive(
dataset_path, result_save_dir="test_dataset", steps_to_run=1000, num_workers=3 dataset_path, result_save_dir=test_dataset_path, steps_to_run=1000, num_workers=3
) )
set_random_drop(False) set_random_drop(False)
# get error file # get error file