format
This commit is contained in:
@@ -41,10 +41,9 @@ def try_generating_mapping(file_folder):
|
|||||||
return mapping
|
return mapping
|
||||||
|
|
||||||
|
|
||||||
def combine_multiple_dataset(output_path, *dataset_paths,
|
def combine_multiple_dataset(
|
||||||
force_overwrite=False,
|
output_path, *dataset_paths, force_overwrite=False, try_generate_missing_file=True, filters: List[Callable] = None
|
||||||
try_generate_missing_file=True,
|
):
|
||||||
filters: List[Callable] = None):
|
|
||||||
"""
|
"""
|
||||||
Combine multiple datasets. Each dataset should have a dataset_summary.pkl
|
Combine multiple datasets. Each dataset should have a dataset_summary.pkl
|
||||||
:param output_path: The path to store the output dataset
|
:param output_path: The path to store the output dataset
|
||||||
|
|||||||
@@ -357,9 +357,9 @@ def extract_traffic(scenario: NuPlanScenario, center):
|
|||||||
type=MetaDriveType.UNSET,
|
type=MetaDriveType.UNSET,
|
||||||
state=dict(
|
state=dict(
|
||||||
position=np.zeros(shape=(episode_len, 3)),
|
position=np.zeros(shape=(episode_len, 3)),
|
||||||
heading=np.zeros(shape=(episode_len,)),
|
heading=np.zeros(shape=(episode_len, )),
|
||||||
velocity=np.zeros(shape=(episode_len, 2)),
|
velocity=np.zeros(shape=(episode_len, 2)),
|
||||||
valid=np.zeros(shape=(episode_len,)),
|
valid=np.zeros(shape=(episode_len, )),
|
||||||
length=np.zeros(shape=(episode_len, 1)),
|
length=np.zeros(shape=(episode_len, 1)),
|
||||||
width=np.zeros(shape=(episode_len, 1)),
|
width=np.zeros(shape=(episode_len, 1)),
|
||||||
height=np.zeros(shape=(episode_len, 1))
|
height=np.zeros(shape=(episode_len, 1))
|
||||||
|
|||||||
@@ -17,10 +17,14 @@ def convert_pg_scenario(scenario_index, version, env):
|
|||||||
|
|
||||||
|
|
||||||
def get_pg_scenarios(num_scenarios, policy, start_seed=0):
|
def get_pg_scenarios(num_scenarios, policy, start_seed=0):
|
||||||
env = MetaDriveEnv(dict(start_seed=start_seed,
|
env = MetaDriveEnv(
|
||||||
num_scenarios=num_scenarios,
|
dict(
|
||||||
traffic_density=0.2,
|
start_seed=start_seed,
|
||||||
agent_policy=policy,
|
num_scenarios=num_scenarios,
|
||||||
crash_vehicle_done=False,
|
traffic_density=0.2,
|
||||||
map=2))
|
agent_policy=policy,
|
||||||
|
crash_vehicle_done=False,
|
||||||
|
map=2
|
||||||
|
)
|
||||||
|
)
|
||||||
return [i for i in range(num_scenarios)], env
|
return [i for i in range(num_scenarios)], env
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ def contains_explicit_return(f):
|
|||||||
|
|
||||||
|
|
||||||
def write_to_directory(
|
def write_to_directory(
|
||||||
convert_func, scenarios, output_path, dataset_version, dataset_name, force_overwrite=False, **kwargs
|
convert_func, scenarios, output_path, dataset_version, dataset_name, force_overwrite=False, **kwargs
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Convert a batch of scenarios.
|
Convert a batch of scenarios.
|
||||||
|
|||||||
@@ -8,10 +8,12 @@ from scenarionet import SCENARIONET_DATASET_PATH
|
|||||||
from scenarionet.builder.utils import combine_multiple_dataset
|
from scenarionet.builder.utils import combine_multiple_dataset
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
dataset_paths = [os.path.join(SCENARIONET_DATASET_PATH, "nuscenes"),
|
dataset_paths = [
|
||||||
os.path.join(SCENARIONET_DATASET_PATH, "nuplan"),
|
os.path.join(SCENARIONET_DATASET_PATH, "nuscenes"),
|
||||||
os.path.join(SCENARIONET_DATASET_PATH, "waymo"),
|
os.path.join(SCENARIONET_DATASET_PATH, "nuplan"),
|
||||||
os.path.join(SCENARIONET_DATASET_PATH, "pg")]
|
os.path.join(SCENARIONET_DATASET_PATH, "waymo"),
|
||||||
|
os.path.join(SCENARIONET_DATASET_PATH, "pg")
|
||||||
|
]
|
||||||
|
|
||||||
combine_path = os.path.join(SCENARIONET_DATASET_PATH, "combined_dataset")
|
combine_path = os.path.join(SCENARIONET_DATASET_PATH, "combined_dataset")
|
||||||
combine_multiple_dataset(combine_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True)
|
combine_multiple_dataset(combine_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True)
|
||||||
@@ -47,11 +49,9 @@ if __name__ == '__main__':
|
|||||||
c_lane = env.vehicle.lane
|
c_lane = env.vehicle.lane
|
||||||
long, lat, = c_lane.local_coordinates(env.vehicle.position)
|
long, lat, = c_lane.local_coordinates(env.vehicle.position)
|
||||||
# if env.config["use_render"]:
|
# if env.config["use_render"]:
|
||||||
env.render(
|
env.render(text={
|
||||||
text={
|
"seed": env.engine.global_seed + env.config["start_scenario_index"],
|
||||||
"seed": env.engine.global_seed + env.config["start_scenario_index"],
|
})
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if d:
|
if d:
|
||||||
if info["arrive_dest"]:
|
if info["arrive_dest"]:
|
||||||
|
|||||||
@@ -6,10 +6,12 @@ from scenarionet.verifier.utils import verify_loading_into_metadrive
|
|||||||
|
|
||||||
|
|
||||||
def _test_combine_dataset():
|
def _test_combine_dataset():
|
||||||
dataset_paths = [os.path.join(SCENARIONET_DATASET_PATH, "nuscenes"),
|
dataset_paths = [
|
||||||
os.path.join(SCENARIONET_DATASET_PATH, "nuplan"),
|
os.path.join(SCENARIONET_DATASET_PATH, "nuscenes"),
|
||||||
os.path.join(SCENARIONET_DATASET_PATH, "waymo"),
|
os.path.join(SCENARIONET_DATASET_PATH, "nuplan"),
|
||||||
os.path.join(SCENARIONET_DATASET_PATH, "pg")]
|
os.path.join(SCENARIONET_DATASET_PATH, "waymo"),
|
||||||
|
os.path.join(SCENARIONET_DATASET_PATH, "pg")
|
||||||
|
]
|
||||||
|
|
||||||
combine_path = os.path.join(SCENARIONET_DATASET_PATH, "combined_dataset")
|
combine_path = os.path.join(SCENARIONET_DATASET_PATH, "combined_dataset")
|
||||||
combine_multiple_dataset(combine_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True)
|
combine_multiple_dataset(combine_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True)
|
||||||
|
|||||||
@@ -22,40 +22,34 @@ def test_filter_dataset():
|
|||||||
# ========================= test 1 =========================
|
# ========================= test 1 =========================
|
||||||
# nuscenes data has no light
|
# nuscenes data has no light
|
||||||
# light_condition = ScenarioFilter.make(ScenarioFilter.has_traffic_light)
|
# light_condition = ScenarioFilter.make(ScenarioFilter.has_traffic_light)
|
||||||
sdc_driving_condition = ScenarioFilter.make(ScenarioFilter.sdc_moving_dist,
|
sdc_driving_condition = ScenarioFilter.make(ScenarioFilter.sdc_moving_dist, target_dist=30, condition="greater")
|
||||||
target_dist=30,
|
summary, mapping = combine_multiple_dataset(
|
||||||
condition="greater")
|
output_path,
|
||||||
summary, mapping = combine_multiple_dataset(output_path,
|
*dataset_paths,
|
||||||
*dataset_paths,
|
force_overwrite=True,
|
||||||
force_overwrite=True,
|
try_generate_missing_file=True,
|
||||||
try_generate_missing_file=True,
|
filters=[sdc_driving_condition]
|
||||||
filters=[sdc_driving_condition]
|
)
|
||||||
)
|
|
||||||
assert len(summary) > 0
|
assert len(summary) > 0
|
||||||
|
|
||||||
# ========================= test 2 =========================
|
# ========================= test 2 =========================
|
||||||
|
|
||||||
num_condition = ScenarioFilter.make(ScenarioFilter.object_number,
|
num_condition = ScenarioFilter.make(
|
||||||
number_threshold=50,
|
ScenarioFilter.object_number, number_threshold=50, object_type=MetaDriveType.PEDESTRIAN, condition="greater"
|
||||||
object_type=MetaDriveType.PEDESTRIAN,
|
)
|
||||||
condition="greater")
|
|
||||||
|
|
||||||
summary, mapping = combine_multiple_dataset(output_path,
|
summary, mapping = combine_multiple_dataset(
|
||||||
*dataset_paths,
|
output_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True, filters=[num_condition]
|
||||||
force_overwrite=True,
|
)
|
||||||
try_generate_missing_file=True,
|
|
||||||
filters=[num_condition])
|
|
||||||
assert len(summary) > 0
|
assert len(summary) > 0
|
||||||
|
|
||||||
# ========================= test 3 =========================
|
# ========================= test 3 =========================
|
||||||
|
|
||||||
traffic_light = ScenarioFilter.make(ScenarioFilter.has_traffic_light)
|
traffic_light = ScenarioFilter.make(ScenarioFilter.has_traffic_light)
|
||||||
|
|
||||||
summary, mapping = combine_multiple_dataset(output_path,
|
summary, mapping = combine_multiple_dataset(
|
||||||
*dataset_paths,
|
output_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True, filters=[traffic_light]
|
||||||
force_overwrite=True,
|
)
|
||||||
try_generate_missing_file=True,
|
|
||||||
filters=[traffic_light])
|
|
||||||
assert len(summary) > 0
|
assert len(summary) > 0
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -25,4 +25,5 @@ if __name__ == "__main__":
|
|||||||
dataset_version=version,
|
dataset_version=version,
|
||||||
dataset_name=dataset_name,
|
dataset_name=dataset_name,
|
||||||
force_overwrite=force_overwrite,
|
force_overwrite=force_overwrite,
|
||||||
nuscenes=nusc)
|
nuscenes=nusc
|
||||||
|
)
|
||||||
|
|||||||
@@ -49,11 +49,9 @@ if __name__ == '__main__':
|
|||||||
c_lane = env.vehicle.lane
|
c_lane = env.vehicle.lane
|
||||||
long, lat, = c_lane.local_coordinates(env.vehicle.position)
|
long, lat, = c_lane.local_coordinates(env.vehicle.position)
|
||||||
if env.config["use_render"]:
|
if env.config["use_render"]:
|
||||||
env.render(
|
env.render(text={
|
||||||
text={
|
"seed": env.engine.global_seed + env.config["start_scenario_index"],
|
||||||
"seed": env.engine.global_seed + env.config["start_scenario_index"],
|
})
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if d and info["arrive_dest"]:
|
if d and info["arrive_dest"]:
|
||||||
print("seed:{}, success".format(env.engine.global_random_seed))
|
print("seed:{}, success".format(env.engine.global_random_seed))
|
||||||
|
|||||||
@@ -12,20 +12,16 @@ def test_combine_multiple_dataset():
|
|||||||
dataset_paths = [original_dataset_path + "_{}".format(i) for i in range(5)]
|
dataset_paths = [original_dataset_path + "_{}".format(i) for i in range(5)]
|
||||||
|
|
||||||
output_path = os.path.join(SCENARIONET_PACKAGE_PATH, "tests", "combine")
|
output_path = os.path.join(SCENARIONET_PACKAGE_PATH, "tests", "combine")
|
||||||
combine_multiple_dataset(output_path,
|
combine_multiple_dataset(output_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True)
|
||||||
*dataset_paths,
|
|
||||||
force_overwrite=True,
|
|
||||||
try_generate_missing_file=True)
|
|
||||||
dataset_paths.append(output_path)
|
dataset_paths.append(output_path)
|
||||||
for dataset_path in dataset_paths:
|
for dataset_path in dataset_paths:
|
||||||
summary, sorted_scenarios, mapping = read_dataset_summary(dataset_path)
|
summary, sorted_scenarios, mapping = read_dataset_summary(dataset_path)
|
||||||
for scenario_file in sorted_scenarios:
|
for scenario_file in sorted_scenarios:
|
||||||
read_scenario(os.path.join(dataset_path, mapping[scenario_file], scenario_file))
|
read_scenario(os.path.join(dataset_path, mapping[scenario_file], scenario_file))
|
||||||
num_worker = 4 if len(summary) > 4 else 1
|
num_worker = 4 if len(summary) > 4 else 1
|
||||||
success, result = verify_loading_into_metadrive(dataset_path,
|
success, result = verify_loading_into_metadrive(
|
||||||
result_save_dir="test_dataset",
|
dataset_path, result_save_dir="test_dataset", steps_to_run=1000, num_workers=num_worker
|
||||||
steps_to_run=1000,
|
)
|
||||||
num_workers=num_worker)
|
|
||||||
assert success
|
assert success
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -18,15 +18,15 @@ def test_filter_dataset():
|
|||||||
# ========================= test 1 =========================
|
# ========================= test 1 =========================
|
||||||
# nuscenes data has no light
|
# nuscenes data has no light
|
||||||
# light_condition = ScenarioFilter.make(ScenarioFilter.has_traffic_light)
|
# light_condition = ScenarioFilter.make(ScenarioFilter.has_traffic_light)
|
||||||
sdc_driving_condition = ScenarioFilter.make(ScenarioFilter.sdc_moving_dist,
|
sdc_driving_condition = ScenarioFilter.make(ScenarioFilter.sdc_moving_dist, target_dist=30, condition="smaller")
|
||||||
target_dist=30,
|
|
||||||
condition="smaller")
|
|
||||||
answer = ['scene-0553', 'scene-0757', 'scene-1100']
|
answer = ['scene-0553', 'scene-0757', 'scene-1100']
|
||||||
summary, mapping = combine_multiple_dataset(output_path,
|
summary, mapping = combine_multiple_dataset(
|
||||||
*dataset_paths,
|
output_path,
|
||||||
force_overwrite=True,
|
*dataset_paths,
|
||||||
try_generate_missing_file=True,
|
force_overwrite=True,
|
||||||
filters=[sdc_driving_condition])
|
try_generate_missing_file=True,
|
||||||
|
filters=[sdc_driving_condition]
|
||||||
|
)
|
||||||
assert len(answer) == len(summary)
|
assert len(answer) == len(summary)
|
||||||
for a in answer:
|
for a in answer:
|
||||||
in_ = False
|
in_ = False
|
||||||
@@ -36,42 +36,35 @@ def test_filter_dataset():
|
|||||||
break
|
break
|
||||||
assert in_
|
assert in_
|
||||||
|
|
||||||
sdc_driving_condition = ScenarioFilter.make(ScenarioFilter.sdc_moving_dist,
|
sdc_driving_condition = ScenarioFilter.make(ScenarioFilter.sdc_moving_dist, target_dist=5, condition="greater")
|
||||||
target_dist=5,
|
summary, mapping = combine_multiple_dataset(
|
||||||
condition="greater")
|
output_path,
|
||||||
summary, mapping = combine_multiple_dataset(output_path,
|
*dataset_paths,
|
||||||
*dataset_paths,
|
force_overwrite=True,
|
||||||
force_overwrite=True,
|
try_generate_missing_file=True,
|
||||||
try_generate_missing_file=True,
|
filters=[sdc_driving_condition]
|
||||||
filters=[sdc_driving_condition])
|
)
|
||||||
assert len(summary) == 8
|
assert len(summary) == 8
|
||||||
|
|
||||||
# ========================= test 2 =========================
|
# ========================= test 2 =========================
|
||||||
|
|
||||||
num_condition = ScenarioFilter.make(ScenarioFilter.object_number,
|
num_condition = ScenarioFilter.make(
|
||||||
number_threshold=50,
|
ScenarioFilter.object_number, number_threshold=50, object_type=MetaDriveType.PEDESTRIAN, condition="greater"
|
||||||
object_type=MetaDriveType.PEDESTRIAN,
|
)
|
||||||
condition="greater")
|
|
||||||
|
|
||||||
answer = ['sd_nuscenes_v1.0-mini_scene-0061.pkl', 'sd_nuscenes_v1.0-mini_scene-1094.pkl']
|
answer = ['sd_nuscenes_v1.0-mini_scene-0061.pkl', 'sd_nuscenes_v1.0-mini_scene-1094.pkl']
|
||||||
summary, mapping = combine_multiple_dataset(output_path,
|
summary, mapping = combine_multiple_dataset(
|
||||||
*dataset_paths,
|
output_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True, filters=[num_condition]
|
||||||
force_overwrite=True,
|
)
|
||||||
try_generate_missing_file=True,
|
|
||||||
filters=[num_condition])
|
|
||||||
assert len(answer) == len(summary)
|
assert len(answer) == len(summary)
|
||||||
for a in answer:
|
for a in answer:
|
||||||
assert a in summary
|
assert a in summary
|
||||||
|
|
||||||
num_condition = ScenarioFilter.make(ScenarioFilter.object_number,
|
num_condition = ScenarioFilter.make(ScenarioFilter.object_number, number_threshold=50, condition="greater")
|
||||||
number_threshold=50,
|
|
||||||
condition="greater")
|
|
||||||
|
|
||||||
summary, mapping = combine_multiple_dataset(output_path,
|
summary, mapping = combine_multiple_dataset(
|
||||||
*dataset_paths,
|
output_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True, filters=[num_condition]
|
||||||
force_overwrite=True,
|
)
|
||||||
try_generate_missing_file=True,
|
|
||||||
filters=[num_condition])
|
|
||||||
assert len(summary) > 0
|
assert len(summary) > 0
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -13,14 +13,12 @@ from functools import partial
|
|||||||
|
|
||||||
def verify_loading_into_metadrive(dataset_path, result_save_dir, steps_to_run=1000, num_workers=8):
|
def verify_loading_into_metadrive(dataset_path, result_save_dir, steps_to_run=1000, num_workers=8):
|
||||||
if result_save_dir is not None:
|
if result_save_dir is not None:
|
||||||
assert os.path.exists(result_save_dir) and os.path.isdir(
|
assert os.path.exists(result_save_dir
|
||||||
result_save_dir), "Argument result_save_dir must be an existing dir"
|
) and os.path.isdir(result_save_dir), "Argument result_save_dir must be an existing dir"
|
||||||
num_scenario = get_number_of_scenarios(dataset_path)
|
num_scenario = get_number_of_scenarios(dataset_path)
|
||||||
argument_list = []
|
argument_list = []
|
||||||
|
|
||||||
func = partial(loading_wrapper,
|
func = partial(loading_wrapper, dataset_path=dataset_path, steps_to_run=steps_to_run)
|
||||||
dataset_path=dataset_path,
|
|
||||||
steps_to_run=steps_to_run)
|
|
||||||
|
|
||||||
num_scenario_each_worker = int(num_scenario // num_workers)
|
num_scenario_each_worker = int(num_scenario // num_workers)
|
||||||
for i in range(num_workers):
|
for i in range(num_workers):
|
||||||
@@ -50,8 +48,11 @@ def verify_loading_into_metadrive(dataset_path, result_save_dir, steps_to_run=10
|
|||||||
|
|
||||||
|
|
||||||
def loading_into_metadrive(start_scenario_index, num_scenario, dataset_path, steps_to_run):
|
def loading_into_metadrive(start_scenario_index, num_scenario, dataset_path, steps_to_run):
|
||||||
print("================ Begin Scenario Loading Verification for scenario {}-{} ================ \n".format(
|
print(
|
||||||
start_scenario_index, num_scenario + start_scenario_index))
|
"================ Begin Scenario Loading Verification for scenario {}-{} ================ \n".format(
|
||||||
|
start_scenario_index, num_scenario + start_scenario_index
|
||||||
|
)
|
||||||
|
)
|
||||||
success = True
|
success = True
|
||||||
env = ScenarioEnv(
|
env = ScenarioEnv(
|
||||||
{
|
{
|
||||||
@@ -81,9 +82,10 @@ def loading_into_metadrive(start_scenario_index, num_scenario, dataset_path, ste
|
|||||||
file_path = os.path.join(dataset_path, env.engine.data_manager.mapping[file_name], file_name)
|
file_path = os.path.join(dataset_path, env.engine.data_manager.mapping[file_name], file_name)
|
||||||
error_file = {"scenario_index": scenario_index, "file_path": file_path, "error": str(e)}
|
error_file = {"scenario_index": scenario_index, "file_path": file_path, "error": str(e)}
|
||||||
error_files.append(error_file)
|
error_files.append(error_file)
|
||||||
logger.warning("\n Scenario Error, "
|
logger.warning(
|
||||||
"scenario_index: {}, file_path: {}.\n Error message: {}".format(scenario_index, file_path,
|
"\n Scenario Error, "
|
||||||
str(e)))
|
"scenario_index: {}, file_path: {}.\n Error message: {}".format(scenario_index, file_path, str(e))
|
||||||
|
)
|
||||||
success = False
|
success = False
|
||||||
finally:
|
finally:
|
||||||
env.close()
|
env.close()
|
||||||
@@ -92,7 +94,4 @@ def loading_into_metadrive(start_scenario_index, num_scenario, dataset_path, ste
|
|||||||
|
|
||||||
def loading_wrapper(arglist, dataset_path, steps_to_run):
|
def loading_wrapper(arglist, dataset_path, steps_to_run):
|
||||||
assert len(arglist) == 2, "Too much arguments!"
|
assert len(arglist) == 2, "Too much arguments!"
|
||||||
return loading_into_metadrive(arglist[0],
|
return loading_into_metadrive(arglist[0], arglist[1], dataset_path=dataset_path, steps_to_run=steps_to_run)
|
||||||
arglist[1],
|
|
||||||
dataset_path=dataset_path,
|
|
||||||
steps_to_run=steps_to_run)
|
|
||||||
|
|||||||
Reference in New Issue
Block a user