show import error
This commit is contained in:
@@ -8,10 +8,10 @@ logger = logging.getLogger(__name__)
|
|||||||
try:
|
try:
|
||||||
from nuplan.common.actor_state.tracked_objects_types import TrackedObjectType
|
from nuplan.common.actor_state.tracked_objects_types import TrackedObjectType
|
||||||
from nuplan.common.maps.maps_datatypes import TrafficLightStatusType
|
from nuplan.common.maps.maps_datatypes import TrafficLightStatusType
|
||||||
except ImportError:
|
|
||||||
logger.warning("Can not import nuplan-devkit")
|
|
||||||
|
|
||||||
NuPlanEgoType = TrackedObjectType.EGO
|
NuPlanEgoType = TrackedObjectType.EGO
|
||||||
|
except ImportError as e:
|
||||||
|
logger.warning("Can not import nuplan-devkit: {}".format(e))
|
||||||
|
|
||||||
|
|
||||||
def get_traffic_obj_type(nuplan_type):
|
def get_traffic_obj_type(nuplan_type):
|
||||||
|
|||||||
@@ -31,10 +31,11 @@ try:
|
|||||||
from nuplan.planning.script.builders.scenario_filter_builder import build_scenario_filter
|
from nuplan.planning.script.builders.scenario_filter_builder import build_scenario_filter
|
||||||
from nuplan.planning.script.utils import set_up_common_builder
|
from nuplan.planning.script.utils import set_up_common_builder
|
||||||
import nuplan
|
import nuplan
|
||||||
except ImportError:
|
|
||||||
logger.warning("Can not import nuplan-devkit")
|
|
||||||
|
|
||||||
NUPLAN_PACKAGE_PATH = os.path.dirname(nuplan.__file__)
|
NUPLAN_PACKAGE_PATH = os.path.dirname(nuplan.__file__)
|
||||||
|
except ImportError as e:
|
||||||
|
raise ImportError("Can not import nuplan-devkit: {}".format(e))
|
||||||
|
|
||||||
EGO = "ego"
|
EGO = "ego"
|
||||||
|
|
||||||
|
|
||||||
@@ -429,4 +430,4 @@ def convert_nuplan_scenario(scenario: NuPlanScenario):
|
|||||||
# map
|
# map
|
||||||
result[SD.MAP_FEATURES] = extract_map_features(scenario.map_api, scenario_center)
|
result[SD.MAP_FEATURES] = extract_map_features(scenario.map_api, scenario_center)
|
||||||
|
|
||||||
return result
|
return result, scenario.scenario_name
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ try:
|
|||||||
from nuscenes.map_expansion.arcline_path_utils import discretize_lane
|
from nuscenes.map_expansion.arcline_path_utils import discretize_lane
|
||||||
from nuscenes.map_expansion.map_api import NuScenesMap
|
from nuscenes.map_expansion.map_api import NuScenesMap
|
||||||
from pyquaternion import Quaternion
|
from pyquaternion import Quaternion
|
||||||
except ImportError:
|
except ImportError as e:
|
||||||
logger.warning("Can not import nuscenes-devkit")
|
logger.warning("Can not import nuscenes-devkit: {}".format(e))
|
||||||
|
|
||||||
EGO = "ego"
|
EGO = "ego"
|
||||||
|
|
||||||
|
|||||||
@@ -2,64 +2,10 @@
|
|||||||
This script aims to convert nuplan scenarios to ScenarioDescription, so that we can load any nuplan scenarios into
|
This script aims to convert nuplan scenarios to ScenarioDescription, so that we can load any nuplan scenarios into
|
||||||
MetaDrive.
|
MetaDrive.
|
||||||
"""
|
"""
|
||||||
import copy
|
from scenarionet import SCENARIONET_DATASET_PATH
|
||||||
import os
|
import os
|
||||||
import pickle
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
import tqdm
|
|
||||||
from metadrive.scenario.scenario_description import ScenarioDescription
|
|
||||||
|
|
||||||
from scenarionet.converter.nuplan.utils import get_nuplan_scenarios, convert_nuplan_scenario
|
from scenarionet.converter.nuplan.utils import get_nuplan_scenarios, convert_nuplan_scenario
|
||||||
from scenarionet.converter.utils import dict_recursive_remove_array_and_set
|
from scenarionet.converter.utils import write_to_directory
|
||||||
|
|
||||||
|
|
||||||
def convert_nuplan(dataset_params, output_path, worker_index=None, force_overwrite=False):
|
|
||||||
save_path = copy.deepcopy(output_path)
|
|
||||||
output_path = output_path + "_tmp"
|
|
||||||
# meta recorder and data summary
|
|
||||||
if os.path.exists(output_path):
|
|
||||||
shutil.rmtree(output_path)
|
|
||||||
os.makedirs(output_path, exist_ok=False)
|
|
||||||
|
|
||||||
# make real save dir
|
|
||||||
delay_remove = None
|
|
||||||
if os.path.exists(save_path):
|
|
||||||
if force_overwrite:
|
|
||||||
delay_remove = save_path
|
|
||||||
else:
|
|
||||||
raise ValueError("Directory already exists! Abort")
|
|
||||||
|
|
||||||
metadata_recorder = {}
|
|
||||||
total_scenarios = 0
|
|
||||||
desc = ""
|
|
||||||
summary_file = "dataset_summary.pkl"
|
|
||||||
if worker_index is not None:
|
|
||||||
desc += "Worker {} ".format(worker_index)
|
|
||||||
summary_file = "dataset_summary_worker{}.pkl".format(worker_index)
|
|
||||||
|
|
||||||
# Init.
|
|
||||||
scenarios = get_nuplan_scenarios(dataset_params)
|
|
||||||
for scenario in tqdm.tqdm(scenarios):
|
|
||||||
sd_scenario = convert_nuplan_scenario(scenario)
|
|
||||||
sd_scenario = sd_scenario.to_dict()
|
|
||||||
ScenarioDescription.sanity_check(sd_scenario, check_self_type=True)
|
|
||||||
export_file_name = "sd_{}_{}.pkl".format("nuplan", scenario.scenario_name)
|
|
||||||
p = os.path.join(output_path, export_file_name)
|
|
||||||
with open(p, "wb") as f:
|
|
||||||
pickle.dump(sd_scenario, f)
|
|
||||||
metadata_recorder[export_file_name] = copy.deepcopy(sd_scenario[ScenarioDescription.METADATA])
|
|
||||||
# rename and save
|
|
||||||
if delay_remove is not None:
|
|
||||||
shutil.rmtree(delay_remove)
|
|
||||||
os.rename(output_path, save_path)
|
|
||||||
summary_file = os.path.join(save_path, summary_file)
|
|
||||||
with open(summary_file, "wb") as file:
|
|
||||||
pickle.dump(dict_recursive_remove_array_and_set(metadata_recorder), file)
|
|
||||||
print("Summary is saved at: {}".format(summary_file))
|
|
||||||
if delay_remove is not None:
|
|
||||||
assert delay_remove == save_path, delay_remove + " vs. " + save_path
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
# 14 types
|
# 14 types
|
||||||
@@ -98,7 +44,15 @@ if __name__ == "__main__":
|
|||||||
# "scenario_filter.limit_scenarios_per_type=10", # use 10 scenarios per scenario type
|
# "scenario_filter.limit_scenarios_per_type=10", # use 10 scenarios per scenario type
|
||||||
"scenario_filter.timestamp_threshold_s=20", # minial scenario duration (s)
|
"scenario_filter.timestamp_threshold_s=20", # minial scenario duration (s)
|
||||||
]
|
]
|
||||||
output_path = AssetLoader.file_path("nuplan", return_raw_style=False)
|
|
||||||
worker_index = None
|
|
||||||
force_overwrite = True
|
force_overwrite = True
|
||||||
convert_nuplan(output_path, dataset_params, worker_index=worker_index, force_overwrite=force_overwrite)
|
output_path = os.path.join(SCENARIONET_DATASET_PATH, "nuplan")
|
||||||
|
version = 'v1.2'
|
||||||
|
|
||||||
|
scenarios = get_nuplan_scenarios(dataset_params)
|
||||||
|
write_to_directory(convert_func=convert_nuplan_scenario,
|
||||||
|
scenarios=scenarios,
|
||||||
|
output_path=output_path,
|
||||||
|
dataset_version=version,
|
||||||
|
dataset_name="nuscenes",
|
||||||
|
force_overwrite=force_overwrite,
|
||||||
|
)
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ except ImportError:
|
|||||||
try:
|
try:
|
||||||
from waymo_open_dataset.protos import scenario_pb2
|
from waymo_open_dataset.protos import scenario_pb2
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
# TODO, redo all waymo import error!
|
||||||
try:
|
try:
|
||||||
from metadrive.utils.waymo.protos import scenario_pb2 # Local files that only in PZH's computer.
|
from metadrive.utils.waymo.protos import scenario_pb2 # Local files that only in PZH's computer.
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
|||||||
Reference in New Issue
Block a user