get a unified func
This commit is contained in:
@@ -0,0 +1,3 @@
|
|||||||
|
from scenarionet.converter.nuscenes.utils import convert_one_nuscenes_scenario
|
||||||
|
from scenarionet.converter.nuplan.utils import convert_one_nuplan_scenario
|
||||||
|
from scenarionet.converter.utils import write_to_directory
|
||||||
|
|||||||
@@ -345,10 +345,11 @@ def get_map_features(scene_info, nuscenes: NuScenes, map_center, radius=250, poi
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def convert_one_nuscenes_scenario(scene_token: str, nuscenes: NuScenes):
|
def convert_one_nuscenes_scenario(scene, nuscenes: NuScenes):
|
||||||
"""
|
"""
|
||||||
Data will be interpolated to 0.1s time interval, while the time interval of original key frames are 0.5s.
|
Data will be interpolated to 0.1s time interval, while the time interval of original key frames are 0.5s.
|
||||||
"""
|
"""
|
||||||
|
scene_token = scene["token"]
|
||||||
scenario_log_interval = 0.1
|
scenario_log_interval = 0.1
|
||||||
scene_info = nuscenes.get("scene", scene_token)
|
scene_info = nuscenes.get("scene", scene_token)
|
||||||
frames = []
|
frames = []
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ from scenarionet.converter.nuplan.utils import get_nuplan_scenarios, convert_one
|
|||||||
from scenarionet.converter.utils import dict_recursive_remove_array
|
from scenarionet.converter.utils import dict_recursive_remove_array
|
||||||
|
|
||||||
|
|
||||||
def convert_nuplan(output_path, dataset_params, worker_index=None, force_overwrite=False):
|
def convert_nuplan(dataset_params, output_path, worker_index=None, force_overwrite=False):
|
||||||
save_path = copy.deepcopy(output_path)
|
save_path = copy.deepcopy(output_path)
|
||||||
output_path = output_path + "_tmp"
|
output_path = output_path + "_tmp"
|
||||||
# meta recorder and data summary
|
# meta recorder and data summary
|
||||||
|
|||||||
@@ -2,76 +2,28 @@
|
|||||||
This script aims to convert nuscenes scenarios to ScenarioDescription, so that we can load any nuscenes scenarios into
|
This script aims to convert nuscenes scenarios to ScenarioDescription, so that we can load any nuscenes scenarios into
|
||||||
MetaDrive.
|
MetaDrive.
|
||||||
"""
|
"""
|
||||||
import copy
|
|
||||||
import os
|
|
||||||
import pickle
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
import tqdm
|
|
||||||
from metadrive.scenario.scenario_description import ScenarioDescription
|
|
||||||
|
|
||||||
|
from scenarionet import SCENARIONET_DATASET_PATH
|
||||||
from scenarionet.converter.nuscenes.utils import convert_one_nuscenes_scenario
|
from scenarionet.converter.nuscenes.utils import convert_one_nuscenes_scenario
|
||||||
from scenarionet.converter.utils import dict_recursive_remove_array
|
from scenarionet.converter.utils import write_to_directory
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from nuscenes import NuScenes
|
from nuscenes import NuScenes
|
||||||
except ImportError:
|
except ImportError:
|
||||||
print("Can not find nuscenes-devkit")
|
print("Can not find nuscenes-devkit")
|
||||||
|
|
||||||
|
#
|
||||||
def convert_nuscenes(version, dataroot, output_path, worker_index=None, verbose=True, force_overwrite=False):
|
|
||||||
save_path = copy.deepcopy(output_path)
|
|
||||||
output_path = output_path + "_tmp"
|
|
||||||
# meta recorder and data summary
|
|
||||||
if os.path.exists(output_path):
|
|
||||||
shutil.rmtree(output_path)
|
|
||||||
os.makedirs(output_path, exist_ok=False)
|
|
||||||
|
|
||||||
# make real save dir
|
|
||||||
delay_remove = None
|
|
||||||
if os.path.exists(save_path):
|
|
||||||
if force_overwrite:
|
|
||||||
delay_remove = save_path
|
|
||||||
else:
|
|
||||||
raise ValueError("Directory already exists! Abort")
|
|
||||||
|
|
||||||
metadata_recorder = {}
|
|
||||||
total_scenarios = 0
|
|
||||||
desc = ""
|
|
||||||
summary_file = "dataset_summary.pkl"
|
|
||||||
if worker_index is not None:
|
|
||||||
desc += "Worker {} ".format(worker_index)
|
|
||||||
summary_file = "dataset_summary_worker{}.pkl".format(worker_index)
|
|
||||||
|
|
||||||
# Init.
|
|
||||||
nusc = NuScenes(version=version, verbose=verbose, dataroot=dataroot)
|
|
||||||
scenes = nusc.scene
|
|
||||||
for scene in tqdm.tqdm(scenes):
|
|
||||||
sd_scene = convert_one_nuscenes_scenario(scene["token"], nusc)
|
|
||||||
sd_scene = sd_scene.to_dict()
|
|
||||||
ScenarioDescription.sanity_check(sd_scene, check_self_type=True)
|
|
||||||
export_file_name = "sd_{}_{}.pkl".format("nuscenes_" + version, scene["token"])
|
|
||||||
p = os.path.join(output_path, export_file_name)
|
|
||||||
with open(p, "wb") as f:
|
|
||||||
pickle.dump(sd_scene, f)
|
|
||||||
metadata_recorder[export_file_name] = copy.deepcopy(sd_scene[ScenarioDescription.METADATA])
|
|
||||||
|
|
||||||
# rename and save
|
|
||||||
if delay_remove is not None:
|
|
||||||
shutil.rmtree(delay_remove)
|
|
||||||
os.rename(output_path, save_path)
|
|
||||||
summary_file = os.path.join(save_path, summary_file)
|
|
||||||
with open(summary_file, "wb") as file:
|
|
||||||
pickle.dump(dict_recursive_remove_array(metadata_recorder), file)
|
|
||||||
print("Summary is saved at: {}".format(summary_file))
|
|
||||||
assert delay_remove == save_path
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
output_path = AssetLoader.file_path("nuscenes", return_raw_style=False)
|
output_path = SCENARIONET_DATASET_PATH
|
||||||
version = 'v1.0-mini'
|
version = 'v1.0-mini'
|
||||||
verbose = True
|
|
||||||
dataroot = '/home/shady/data/nuscenes'
|
dataroot = '/home/shady/data/nuscenes'
|
||||||
worker_index = None
|
|
||||||
force_overwrite = True
|
force_overwrite = True
|
||||||
convert_nuscenes(version, dataroot, output_path, force_overwrite=force_overwrite)
|
nusc = NuScenes(version=version, dataroot=dataroot)
|
||||||
|
scenarios = nusc.scene
|
||||||
|
write_to_directory(convert_func=convert_one_nuscenes_scenario,
|
||||||
|
scenarios=scenarios,
|
||||||
|
output_path=output_path,
|
||||||
|
version=version,
|
||||||
|
dataset_name="nuscenes",
|
||||||
|
force_overwrite=True,
|
||||||
|
nuscenes=nusc)
|
||||||
|
|||||||
@@ -1,8 +1,15 @@
|
|||||||
|
import copy
|
||||||
import math
|
import math
|
||||||
|
import ast
|
||||||
|
import inspect
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
import shutil
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from metadrive.scenario import ScenarioDescription as SD
|
import tqdm
|
||||||
|
from metadrive.scenario import ScenarioDescription as SD, ScenarioDescription
|
||||||
|
|
||||||
|
|
||||||
def nuplan_to_metadrive_vector(vector, nuplan_center=(0, 0)):
|
def nuplan_to_metadrive_vector(vector, nuplan_center=(0, 0)):
|
||||||
@@ -99,3 +106,49 @@ def get_number_summary(scenario):
|
|||||||
number_summary_dict["dynamic_object_states_counter"] = dict(dynamic_object_states_counter)
|
number_summary_dict["dynamic_object_states_counter"] = dict(dynamic_object_states_counter)
|
||||||
|
|
||||||
return number_summary_dict
|
return number_summary_dict
|
||||||
|
|
||||||
|
|
||||||
|
def contains_explicit_return(f):
|
||||||
|
return any(isinstance(node, ast.Return) for node in ast.walk(ast.parse(inspect.getsource(f))))
|
||||||
|
|
||||||
|
|
||||||
|
def write_to_directory(convert_func, scenarios, output_path, version, dataset_name, force_overwrite=False, **kwargs):
|
||||||
|
if not contains_explicit_return(convert_func):
|
||||||
|
raise RuntimeError("The convert function should return a metadata dict")
|
||||||
|
|
||||||
|
save_path = copy.deepcopy(output_path)
|
||||||
|
output_path = output_path + "_tmp"
|
||||||
|
# meta recorder and data summary
|
||||||
|
if os.path.exists(output_path):
|
||||||
|
shutil.rmtree(output_path)
|
||||||
|
os.makedirs(output_path, exist_ok=False)
|
||||||
|
|
||||||
|
# make real save dir
|
||||||
|
delay_remove = None
|
||||||
|
if os.path.exists(save_path):
|
||||||
|
if force_overwrite:
|
||||||
|
delay_remove = save_path
|
||||||
|
else:
|
||||||
|
raise ValueError("Directory already exists! Abort")
|
||||||
|
|
||||||
|
summary_file = "dataset_summary.pkl"
|
||||||
|
|
||||||
|
metadata_recorder = {}
|
||||||
|
for scenario in tqdm.tqdm(scenarios):
|
||||||
|
sd_scenario = convert_func(scenario, **kwargs)
|
||||||
|
sd_scenario = sd_scenario.to_dict()
|
||||||
|
ScenarioDescription.sanity_check(sd_scenario, check_self_type=True)
|
||||||
|
export_file_name = "sd_{}_{}.pkl".format(dataset_name+"_" + version, scenario["token"])
|
||||||
|
p = os.path.join(output_path, export_file_name)
|
||||||
|
with open(p, "wb") as f:
|
||||||
|
pickle.dump(sd_scenario, f)
|
||||||
|
|
||||||
|
# rename and save
|
||||||
|
if delay_remove is not None:
|
||||||
|
shutil.rmtree(delay_remove)
|
||||||
|
os.rename(output_path, save_path)
|
||||||
|
summary_file = os.path.join(save_path, summary_file)
|
||||||
|
with open(summary_file, "wb") as file:
|
||||||
|
pickle.dump(dict_recursive_remove_array(metadata_recorder), file)
|
||||||
|
print("Summary is saved at: {}".format(summary_file))
|
||||||
|
assert delay_remove == save_path
|
||||||
|
|||||||
Reference in New Issue
Block a user