diff --git a/scenarionet/tests/local_test/_test_convert_nuplan.py b/scenarionet/tests/local_test/_test_convert_nuplan.py new file mode 100644 index 0000000..c77f05d --- /dev/null +++ b/scenarionet/tests/local_test/_test_convert_nuplan.py @@ -0,0 +1,39 @@ +""" +This script aims to convert nuplan scenarios to ScenarioDescription, so that we can load any nuplan scenarios into +MetaDrive. +""" +import argparse +import os + +from scenarionet import SCENARIONET_DATASET_PATH +from scenarionet.converter.nuplan.utils import get_nuplan_scenarios, convert_nuplan_scenario +from scenarionet.converter.utils import write_to_directory + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--dataset_name", "-n", default="nuplan", + help="Dataset name, will be used to generate scenario files") + parser.add_argument("--dataset_path", "-d", default=os.path.join(SCENARIONET_DATASET_PATH, "nuplan"), + help="The path of the dataset") + parser.add_argument("--version", "-v", default='v1.1', help="version") + parser.add_argument("--overwrite", action="store_true", help="If the dataset_path exists, overwrite it") + args = parser.parse_args() + + force_overwrite = True + dataset_name = args.dataset_name + output_path = args.dataset_path + version = args.version + + data_root = os.path.join(os.getenv("NUPLAN_DATA_ROOT"), "nuplan-v1.1/splits/mini") + map_root = os.getenv("NUPLAN_MAPS_ROOT") + scenarios = get_nuplan_scenarios(data_root, map_root, logs=["2021.07.16.20.45.29_veh-35_01095_01486"]) + + write_to_directory( + convert_func=convert_nuplan_scenario, + scenarios=scenarios, + output_path=output_path, + dataset_version=version, + dataset_name=dataset_name, + force_overwrite=force_overwrite, + num_workers=8, + ) diff --git a/scenarionet/tests/local_test/_test_convert_nuscenes.py b/scenarionet/tests/local_test/_test_convert_nuscenes.py new file mode 100644 index 0000000..662ee59 --- /dev/null +++ b/scenarionet/tests/local_test/_test_convert_nuscenes.py @@ -0,0 +1,39 @@ +""" +This script aims to convert nuscenes scenarios to ScenarioDescription, so that we can load any nuscenes scenarios into +MetaDrive. +""" +import argparse +import os.path + +from scenarionet import SCENARIONET_DATASET_PATH +from scenarionet.converter.nuscenes.utils import convert_nuscenes_scenario, get_nuscenes_scenarios +from scenarionet.converter.utils import write_to_directory + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--dataset_name", "-n", default="nuscenes", + help="Dataset name, will be used to generate scenario files") + parser.add_argument("--dataset_path", "-d", default=os.path.join(SCENARIONET_DATASET_PATH, "nuscenes"), + help="The path of the dataset") + parser.add_argument("--version", "-v", default='v1.0-mini', help="version") + parser.add_argument("--overwrite", action="store_true", help="If the dataset_path exists, overwrite it") + args = parser.parse_args() + + force_overwrite = True + dataset_name = args.dataset_name + output_path = args.dataset_path + version = args.version + + dataroot = '/home/shady/data/nuscenes' + scenarios, nusc = get_nuscenes_scenarios(dataroot, version) + + write_to_directory( + convert_func=convert_nuscenes_scenario, + scenarios=scenarios, + output_path=output_path, + dataset_version=version, + dataset_name=dataset_name, + force_overwrite=force_overwrite, + nuscenes=nusc, + num_workers=4 + ) diff --git a/scenarionet/tests/local_test/_test_convert_pg.py b/scenarionet/tests/local_test/_test_convert_pg.py new file mode 100644 index 0000000..c4b31e7 --- /dev/null +++ b/scenarionet/tests/local_test/_test_convert_pg.py @@ -0,0 +1,37 @@ +# from metadrive.policy.expert_policy import ExpertPolicy +import argparse +import os.path + +import metadrive +from metadrive.policy.idm_policy import IDMPolicy + +from scenarionet import SCENARIONET_DATASET_PATH +from scenarionet.converter.pg.utils import get_pg_scenarios, convert_pg_scenario +from scenarionet.converter.utils import write_to_directory + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--dataset_name", "-n", default="pg", + help="Dataset name, will be used to generate scenario files") + parser.add_argument("--dataset_path", "-d", default=os.path.join(SCENARIONET_DATASET_PATH, "pg"), + help="The path of the dataset") + parser.add_argument("--version", "-v", default=metadrive.constants.DATA_VERSION, help="version") + parser.add_argument("--overwrite", action="store_true", help="If the dataset_path exists, overwrite it") + args = parser.parse_args() + + force_overwrite = True + dataset_name = args.dataset_name + output_path = args.dataset_path + version = args.version + + scenario_indices, env = get_pg_scenarios(100, IDMPolicy) + + write_to_directory( + convert_func=convert_pg_scenario, + scenarios=scenario_indices, + output_path=output_path, + dataset_version=version, + dataset_name=dataset_name, + force_overwrite=force_overwrite, + env=env + ) diff --git a/scenarionet/tests/local_test/_test_convert_waymo.py b/scenarionet/tests/local_test/_test_convert_waymo.py index 228d9ed..043e1bc 100644 --- a/scenarionet/tests/local_test/_test_convert_waymo.py +++ b/scenarionet/tests/local_test/_test_convert_waymo.py @@ -32,5 +32,4 @@ if __name__ == '__main__': dataset_version=version, dataset_name=dataset_name, force_overwrite=force_overwrite, - num_workers=8 - ) + num_workers=8)