multi-processing test

This commit is contained in:
QuanyiLi
2023-05-07 23:14:01 +01:00
parent 5c1b2e053b
commit 97a4e61f38
4 changed files with 24 additions and 21 deletions

View File

@@ -38,22 +38,23 @@ if __name__ == '__main__':
}
)
success = []
env.reset(force_seed=2)
while True:
env.reset(force_seed=2)
for t in range(10000):
o, r, d, info = env.step([0, 0])
assert env.observation_space.contains(o)
c_lane = env.vehicle.lane
long, lat, = c_lane.local_coordinates(env.vehicle.position)
# if env.config["use_render"]:
env.render(
text={
"seed": env.engine.global_seed + env.config["start_scenario_index"],
}
)
for seed in [91]:
env.reset(force_seed=seed)
for t in range(10000):
o, r, d, info = env.step([0, 0])
assert env.observation_space.contains(o)
c_lane = env.vehicle.lane
long, lat, = c_lane.local_coordinates(env.vehicle.position)
# if env.config["use_render"]:
env.render(
text={
"seed": env.engine.global_seed + env.config["start_scenario_index"],
}
)
if d:
if info["arrive_dest"]:
print("seed:{}, success".format(env.engine.global_random_seed))
break
if d:
if info["arrive_dest"]:
print("seed:{}, success".format(env.engine.global_random_seed))
print(t)
break

View File

@@ -14,7 +14,7 @@ def _test_combine_dataset():
combine_path = os.path.join(SCENARIONET_DATASET_PATH, "combined_dataset")
combine_multiple_dataset(combine_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True)
os.makedirs("verify_results", exist_ok=True)
success, result = verify_loading_into_metadrive(combine_path, "verify_results", steps_to_run=250)
success, result = verify_loading_into_metadrive(combine_path, "verify_results")
assert success

View File

@@ -21,9 +21,11 @@ def test_combine_multiple_dataset():
summary, sorted_scenarios, mapping = read_dataset_summary(dataset_path)
for scenario_file in sorted_scenarios:
read_scenario(os.path.join(dataset_path, mapping[scenario_file], scenario_file))
num_worker = 4 if len(summary) > 4 else 1
success, result = verify_loading_into_metadrive(dataset_path,
result_save_dir="test_dataset",
steps_to_run=300)
steps_to_run=1000,
num_workers=num_worker)
assert success

View File

@@ -11,7 +11,7 @@ from metadrive.scenario.utils import get_number_of_scenarios
from functools import partial
def verify_loading_into_metadrive(dataset_path, result_save_dir, steps_to_run=300, num_workers=8):
def verify_loading_into_metadrive(dataset_path, result_save_dir, steps_to_run=1000, num_workers=8):
if result_save_dir is not None:
assert os.path.exists(result_save_dir) and os.path.isdir(
result_save_dir), "Argument result_save_dir must be an existing dir"
@@ -45,7 +45,7 @@ def verify_loading_into_metadrive(dataset_path, result_save_dir, steps_to_run=30
if result:
print("All scenarios can be loaded successfully!")
else:
print("Fail to load all scenarios, see log for more details! Number of failed scenarios: {}".format(logs))
print("Fail to load all scenarios, see log for more details! Number of failed scenarios: {}".format(len(logs)))
return result, logs