provide scripts
This commit is contained in:
@@ -45,7 +45,7 @@ def combine_multiple_dataset(
|
|||||||
raise FileExistsError("Output path already exists!")
|
raise FileExistsError("Output path already exists!")
|
||||||
else:
|
else:
|
||||||
shutil.rmtree(output_abs_path)
|
shutil.rmtree(output_abs_path)
|
||||||
os.mkdir(output_abs_path)
|
os.makedirs(output_abs_path, exist_ok=False)
|
||||||
|
|
||||||
summaries = {}
|
summaries = {}
|
||||||
mappings = {}
|
mappings = {}
|
||||||
|
|||||||
@@ -1,60 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
from metadrive.envs.scenario_env import ScenarioEnv
|
|
||||||
from metadrive.policy.replay_policy import ReplayEgoCarPolicy
|
|
||||||
from metadrive.scenario.utils import get_number_of_scenarios
|
|
||||||
|
|
||||||
from scenarionet import SCENARIONET_DATASET_PATH
|
|
||||||
from scenarionet.builder.utils import combine_multiple_dataset
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
dataset_paths = [
|
|
||||||
os.path.join(SCENARIONET_DATASET_PATH, "nuscenes"),
|
|
||||||
os.path.join(SCENARIONET_DATASET_PATH, "nuplan"),
|
|
||||||
os.path.join(SCENARIONET_DATASET_PATH, "waymo"),
|
|
||||||
os.path.join(SCENARIONET_DATASET_PATH, "pg")
|
|
||||||
]
|
|
||||||
|
|
||||||
combine_path = os.path.join(SCENARIONET_DATASET_PATH, "combined_dataset")
|
|
||||||
combine_multiple_dataset(combine_path, *dataset_paths, force_overwrite=True, try_generate_missing_file=True)
|
|
||||||
|
|
||||||
env = ScenarioEnv(
|
|
||||||
{
|
|
||||||
"use_render": True,
|
|
||||||
"agent_policy": ReplayEgoCarPolicy,
|
|
||||||
"manual_control": False,
|
|
||||||
"show_interface": True,
|
|
||||||
"show_logo": False,
|
|
||||||
"show_fps": False,
|
|
||||||
"num_scenarios": get_number_of_scenarios(combine_path),
|
|
||||||
"horizon": 1000,
|
|
||||||
"no_static_vehicles": True,
|
|
||||||
"vehicle_config": dict(
|
|
||||||
show_navi_mark=False,
|
|
||||||
no_wheel_friction=True,
|
|
||||||
lidar=dict(num_lasers=120, distance=50, num_others=4),
|
|
||||||
lane_line_detector=dict(num_lasers=12, distance=50),
|
|
||||||
side_detector=dict(num_lasers=160, distance=50)
|
|
||||||
),
|
|
||||||
"data_directory": combine_path,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
success = []
|
|
||||||
while True:
|
|
||||||
for seed in [91]:
|
|
||||||
env.reset(force_seed=seed)
|
|
||||||
for t in range(10000):
|
|
||||||
o, r, d, info = env.step([0, 0])
|
|
||||||
assert env.observation_space.contains(o)
|
|
||||||
c_lane = env.vehicle.lane
|
|
||||||
long, lat, = c_lane.local_coordinates(env.vehicle.position)
|
|
||||||
# if env.config["use_render"]:
|
|
||||||
env.render(text={
|
|
||||||
"seed": env.engine.global_seed + env.config["start_scenario_index"],
|
|
||||||
})
|
|
||||||
|
|
||||||
if d:
|
|
||||||
if info["arrive_dest"]:
|
|
||||||
print("seed:{}, success".format(env.engine.global_random_seed))
|
|
||||||
print(t)
|
|
||||||
break
|
|
||||||
@@ -2,17 +2,27 @@
|
|||||||
This script aims to convert nuplan scenarios to ScenarioDescription, so that we can load any nuplan scenarios into
|
This script aims to convert nuplan scenarios to ScenarioDescription, so that we can load any nuplan scenarios into
|
||||||
MetaDrive.
|
MetaDrive.
|
||||||
"""
|
"""
|
||||||
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from scenarionet import SCENARIONET_DATASET_PATH
|
from scenarionet import SCENARIONET_DATASET_PATH
|
||||||
from scenarionet.converter.nuplan.utils import get_nuplan_scenarios, convert_nuplan_scenario
|
from scenarionet.converter.nuplan.utils import get_nuplan_scenarios, convert_nuplan_scenario
|
||||||
from scenarionet.converter.utils import write_to_directory
|
from scenarionet.converter.utils import write_to_directory
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
force_overwrite = True
|
parser = argparse.ArgumentParser()
|
||||||
dataset_name = "nuplan"
|
parser.add_argument("--dataset_name", "-n", default="nuplan",
|
||||||
output_path = os.path.join(SCENARIONET_DATASET_PATH, dataset_name)
|
help="Dataset name, will be used to generate scenario files")
|
||||||
version = 'v1.1'
|
parser.add_argument("--dataset_path", "-d", default=os.path.join(SCENARIONET_DATASET_PATH, "nuplan"),
|
||||||
|
help="The path of the dataset")
|
||||||
|
parser.add_argument("--version", "-v", default='v1.1', required=True, help="version")
|
||||||
|
parser.add_argument("--overwrite", action="store_true", help="If the dataset_path exists, overwrite it")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
force_overwrite = args.overwrite
|
||||||
|
dataset_name = args.dataset_name
|
||||||
|
output_path = args.dataset_path
|
||||||
|
version = args.version
|
||||||
|
|
||||||
data_root = os.path.join(os.getenv("NUPLAN_DATA_ROOT"), "nuplan-v1.1/splits/mini")
|
data_root = os.path.join(os.getenv("NUPLAN_DATA_ROOT"), "nuplan-v1.1/splits/mini")
|
||||||
map_root = os.getenv("NUPLAN_MAPS_ROOT")
|
map_root = os.getenv("NUPLAN_MAPS_ROOT")
|
||||||
|
|||||||
@@ -2,17 +2,27 @@
|
|||||||
This script aims to convert nuscenes scenarios to ScenarioDescription, so that we can load any nuscenes scenarios into
|
This script aims to convert nuscenes scenarios to ScenarioDescription, so that we can load any nuscenes scenarios into
|
||||||
MetaDrive.
|
MetaDrive.
|
||||||
"""
|
"""
|
||||||
|
import argparse
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
from scenarionet import SCENARIONET_DATASET_PATH
|
from scenarionet import SCENARIONET_DATASET_PATH
|
||||||
from scenarionet.converter.nuscenes.utils import convert_nuscenes_scenario, get_nuscenes_scenarios
|
from scenarionet.converter.nuscenes.utils import convert_nuscenes_scenario, get_nuscenes_scenarios
|
||||||
from scenarionet.converter.utils import write_to_directory
|
from scenarionet.converter.utils import write_to_directory
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
dataset_name = "nuscenes"
|
parser = argparse.ArgumentParser()
|
||||||
output_path = os.path.join(SCENARIONET_DATASET_PATH, dataset_name)
|
parser.add_argument("--dataset_name", "-n", default="nuscenes",
|
||||||
version = 'v1.0-mini'
|
help="Dataset name, will be used to generate scenario files")
|
||||||
force_overwrite = True
|
parser.add_argument("--dataset_path", "-d", default=os.path.join(SCENARIONET_DATASET_PATH, "nuscenes"),
|
||||||
|
help="The path of the dataset")
|
||||||
|
parser.add_argument("--version", "-v", default='v1.0-mini', required=True, help="version")
|
||||||
|
parser.add_argument("--overwrite", action="store_true", help="If the dataset_path exists, overwrite it")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
force_overwrite = args.overwrite
|
||||||
|
dataset_name = args.dataset_name
|
||||||
|
output_path = args.dataset_path
|
||||||
|
version = args.version
|
||||||
|
|
||||||
dataroot = '/home/shady/data/nuscenes'
|
dataroot = '/home/shady/data/nuscenes'
|
||||||
scenarios, nusc = get_nuscenes_scenarios(dataroot, version)
|
scenarios, nusc = get_nuscenes_scenarios(dataroot, version)
|
||||||
|
|||||||
@@ -1,18 +1,28 @@
|
|||||||
|
# from metadrive.policy.expert_policy import ExpertPolicy
|
||||||
|
import argparse
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
import metadrive
|
import metadrive
|
||||||
|
from metadrive.policy.idm_policy import IDMPolicy
|
||||||
|
|
||||||
from scenarionet import SCENARIONET_DATASET_PATH
|
from scenarionet import SCENARIONET_DATASET_PATH
|
||||||
from scenarionet.converter.pg.utils import get_pg_scenarios, convert_pg_scenario
|
from scenarionet.converter.pg.utils import get_pg_scenarios, convert_pg_scenario
|
||||||
from scenarionet.converter.utils import write_to_directory
|
from scenarionet.converter.utils import write_to_directory
|
||||||
from metadrive.policy.idm_policy import IDMPolicy
|
|
||||||
# from metadrive.policy.expert_policy import ExpertPolicy
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
dataset_name = "pg"
|
parser = argparse.ArgumentParser()
|
||||||
output_path = os.path.join(SCENARIONET_DATASET_PATH, dataset_name)
|
parser.add_argument("--dataset_name", "-n", default="pg",
|
||||||
version = metadrive.constants.DATA_VERSION
|
help="Dataset name, will be used to generate scenario files")
|
||||||
force_overwrite = True
|
parser.add_argument("--dataset_path", "-d", default=os.path.join(SCENARIONET_DATASET_PATH, "pg"),
|
||||||
|
help="The path of the dataset")
|
||||||
|
parser.add_argument("--version", "-v", default=metadrive.constants.DATA_VERSION, required=True, help="version")
|
||||||
|
parser.add_argument("--overwrite", action="store_true", help="If the dataset_path exists, overwrite it")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
force_overwrite = args.overwrite
|
||||||
|
dataset_name = args.dataset_name
|
||||||
|
output_path = args.dataset_path
|
||||||
|
version = args.version
|
||||||
|
|
||||||
scenario_indices, env = get_pg_scenarios(30, IDMPolicy)
|
scenario_indices, env = get_pg_scenarios(30, IDMPolicy)
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from scenarionet import SCENARIONET_DATASET_PATH
|
from scenarionet import SCENARIONET_DATASET_PATH
|
||||||
from scenarionet.converter.utils import write_to_directory
|
from scenarionet.converter.utils import write_to_directory
|
||||||
from scenarionet.converter.waymo.utils import convert_waymo_scenario, get_waymo_scenarios
|
from scenarionet.converter.waymo.utils import convert_waymo_scenario, get_waymo_scenarios
|
||||||
@@ -7,10 +9,19 @@ from scenarionet.converter.waymo.utils import convert_waymo_scenario, get_waymo_
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
force_overwrite = True
|
parser = argparse.ArgumentParser()
|
||||||
dataset_name = "waymo"
|
parser.add_argument("--dataset_name", "-n", default="waymo",
|
||||||
output_path = os.path.join(SCENARIONET_DATASET_PATH, dataset_name)
|
help="Dataset name, will be used to generate scenario files")
|
||||||
version = 'v1.2'
|
parser.add_argument("--dataset_path", "-d", default=os.path.join(SCENARIONET_DATASET_PATH, "waymo"),
|
||||||
|
help="The path of the dataset")
|
||||||
|
parser.add_argument("--version", "-v", default='v1.2', required=True, help="version")
|
||||||
|
parser.add_argument("--overwrite", action="store_true", help="If the dataset_path exists, overwrite it")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
force_overwrite = args.overwrite
|
||||||
|
dataset_name = args.dataset_name
|
||||||
|
output_path = args.dataset_path
|
||||||
|
version = args.version
|
||||||
|
|
||||||
waymo_data_direction = os.path.join(SCENARIONET_DATASET_PATH, "waymo_origin")
|
waymo_data_direction = os.path.join(SCENARIONET_DATASET_PATH, "waymo_origin")
|
||||||
scenarios = get_waymo_scenarios(waymo_data_direction)
|
scenarios = get_waymo_scenarios(waymo_data_direction)
|
||||||
|
|||||||
12
scenarionet/scripts/generate_from_error_file.py
Normal file
12
scenarionet/scripts/generate_from_error_file.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import argparse
|
||||||
|
|
||||||
|
from scenarionet.verifier.error import ErrorFile
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--file", "-f", required=True, help="The path of the error file")
|
||||||
|
parser.add_argument("--dataset_path", "-d", required=True, help="The path of the generated dataset")
|
||||||
|
parser.add_argument("--overwrite", action="store_true", help="If the dataset_path exists, overwrite it")
|
||||||
|
parser.add_argument("--broken", action="store_true", help="Generate dataset containing only broken files")
|
||||||
|
args = parser.parse_args()
|
||||||
|
ErrorFile.generate_dataset(args.file, args.dataset_path, args.overwrite, args.broken)
|
||||||
52
scenarionet/scripts/run_simulation.py
Normal file
52
scenarionet/scripts/run_simulation.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
|
||||||
|
from metadrive.envs.scenario_env import ScenarioEnv
|
||||||
|
from metadrive.policy.replay_policy import ReplayEgoCarPolicy
|
||||||
|
from metadrive.scenario.utils import get_number_of_scenarios
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--dataset_path", "-d", required=True, help="The path of the dataset")
|
||||||
|
parser.add_argument("--render", action="store_true", help="Enable 3D rendering")
|
||||||
|
parser.add_argument("--scenario_index", default=None, type=int, help="Specifying a scenario to run")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
dataset_path = os.path.abspath(args.dataset_path)
|
||||||
|
num_scenario = get_number_of_scenarios(dataset_path)
|
||||||
|
if args.scenario_index is not None:
|
||||||
|
assert args.scenario_index < num_scenario, \
|
||||||
|
"The specified scenario index exceeds the scenario range: {}!".format(num_scenario)
|
||||||
|
|
||||||
|
env = ScenarioEnv(
|
||||||
|
{
|
||||||
|
"use_render": args.render,
|
||||||
|
"agent_policy": ReplayEgoCarPolicy,
|
||||||
|
"manual_control": False,
|
||||||
|
"show_interface": True,
|
||||||
|
"show_logo": False,
|
||||||
|
"show_fps": False,
|
||||||
|
"num_scenarios": num_scenario,
|
||||||
|
"horizon": 1000,
|
||||||
|
"vehicle_config": dict(
|
||||||
|
show_navi_mark=False,
|
||||||
|
no_wheel_friction=True,
|
||||||
|
lidar=dict(num_lasers=120, distance=50, num_others=4),
|
||||||
|
lane_line_detector=dict(num_lasers=12, distance=50),
|
||||||
|
side_detector=dict(num_lasers=160, distance=50)
|
||||||
|
),
|
||||||
|
"data_directory": dataset_path,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
for seed in range(num_scenario if args.scenario_index is not None else 1000000):
|
||||||
|
env.reset(force_seed=seed if args.scenario_index is not None else args.scenario_index)
|
||||||
|
for t in range(10000):
|
||||||
|
o, r, d, info = env.step([0, 0])
|
||||||
|
if env.config["use_render"]:
|
||||||
|
env.render(text={
|
||||||
|
"seed": env.engine.global_seed + env.config["start_scenario_index"],
|
||||||
|
})
|
||||||
|
|
||||||
|
if d and info["arrive_dest"]:
|
||||||
|
print("scenario:{}, success".format(env.engine.global_random_seed))
|
||||||
|
break
|
||||||
@@ -2,6 +2,8 @@ import copy
|
|||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
|
from metadrive.scenario.scenario_description import ScenarioDescription as SD
|
||||||
|
|
||||||
from scenarionet import SCENARIONET_PACKAGE_PATH
|
from scenarionet import SCENARIONET_PACKAGE_PATH
|
||||||
from scenarionet.builder.utils import combine_multiple_dataset
|
from scenarionet.builder.utils import combine_multiple_dataset
|
||||||
from scenarionet.common_utils import read_dataset_summary, read_scenario
|
from scenarionet.common_utils import read_dataset_summary, read_scenario
|
||||||
@@ -44,15 +46,17 @@ def test_combine_multiple_dataset():
|
|||||||
assert recursive_equal(read_fail_summary, fail_summary)
|
assert recursive_equal(read_fail_summary, fail_summary)
|
||||||
|
|
||||||
# assert pass+fail = origin
|
# assert pass+fail = origin
|
||||||
all_summaries = copy.deep(read_pass_summary)
|
all_summaries = copy.deepcopy(read_pass_summary)
|
||||||
all_summaries.update(fail_summary)
|
all_summaries.update(fail_summary)
|
||||||
assert recursive_equal(all_summaries, summary)
|
assert recursive_equal(all_summaries, summary)
|
||||||
|
|
||||||
# test read
|
# test read
|
||||||
for scenario in read_pass_summary:
|
for scenario in read_pass_summary:
|
||||||
read_scenario(pass_dataset, read_pass_mapping, scenario)
|
sd = read_scenario(pass_dataset, read_pass_mapping, scenario)
|
||||||
for scenario in read_pass_summary:
|
SD.sanity_check(sd)
|
||||||
read_scenario(fail_dataset, read_fail_mapping, scenario)
|
for scenario in read_fail_summary:
|
||||||
|
sd = read_scenario(fail_dataset, read_fail_mapping, scenario)
|
||||||
|
SD.sanity_check(sd)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
|
import shutil
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from typing import List
|
from typing import List
|
||||||
@@ -64,10 +65,13 @@ class ErrorFile:
|
|||||||
"""
|
"""
|
||||||
# TODO Add test!
|
# TODO Add test!
|
||||||
new_dataset_path = os.path.abspath(new_dataset_path)
|
new_dataset_path = os.path.abspath(new_dataset_path)
|
||||||
if os.path.exists(new_dataset_path) and not force_overwrite:
|
if os.path.exists(new_dataset_path):
|
||||||
|
if force_overwrite:
|
||||||
|
shutil.rmtree(new_dataset_path)
|
||||||
|
else:
|
||||||
raise ValueError("Directory: {} already exists! "
|
raise ValueError("Directory: {} already exists! "
|
||||||
"Set force_overwrite=True to overwrite".format(new_dataset_path))
|
"Set force_overwrite=True to overwrite".format(new_dataset_path))
|
||||||
os.makedirs(new_dataset_path, exist_ok=True)
|
os.makedirs(new_dataset_path, exist_ok=False)
|
||||||
|
|
||||||
with open(error_file_path, "r+") as f:
|
with open(error_file_path, "r+") as f:
|
||||||
error_file = json.load(f)
|
error_file = json.load(f)
|
||||||
|
|||||||
Reference in New Issue
Block a user