Files
scenarionet/scenarionet/converter/utils.py

136 lines
4.3 KiB
Python
Raw Normal View History

2023-05-06 16:45:06 +01:00
import ast
2023-05-06 16:59:17 +01:00
import copy
2023-05-06 16:45:06 +01:00
import inspect
2023-05-06 20:33:47 +01:00
import logging
2023-05-06 16:59:17 +01:00
import math
2023-05-06 16:45:06 +01:00
import os
import pickle
import shutil
2023-05-08 12:31:52 +01:00
from scenarionet.common_utils import save_summary_anda_mapping
2023-05-06 14:36:44 +01:00
import numpy as np
2023-05-06 16:45:06 +01:00
import tqdm
2023-05-06 18:40:59 +01:00
from metadrive.scenario import ScenarioDescription as SD
2023-05-06 14:36:44 +01:00
2023-05-06 20:33:47 +01:00
logger = logging.getLogger(__file__)
2023-05-06 14:36:44 +01:00
def nuplan_to_metadrive_vector(vector, nuplan_center=(0, 0)):
"All vec in nuplan should be centered in (0,0) to avoid numerical explosion"
vector = np.array(vector)
vector -= np.asarray(nuplan_center)
return vector
def compute_angular_velocity(initial_heading, final_heading, dt):
"""
Calculate the angular velocity between two headings given in radians.
Parameters:
initial_heading (float): The initial heading in radians.
final_heading (float): The final heading in radians.
dt (float): The time interval between the two headings in seconds.
Returns:
float: The angular velocity in radians per second.
"""
# Calculate the difference in headings
delta_heading = final_heading - initial_heading
# Adjust the delta_heading to be in the range (-π, π]
delta_heading = (delta_heading + math.pi) % (2 * math.pi) - math.pi
# Compute the angular velocity
angular_vel = delta_heading / dt
return angular_vel
def mph_to_kmh(speed_in_mph: float):
speed_in_kmh = speed_in_mph * 1.609344
2023-05-06 16:00:17 +01:00
return speed_in_kmh
2023-05-06 16:45:06 +01:00
def contains_explicit_return(f):
return any(isinstance(node, ast.Return) for node in ast.walk(ast.parse(inspect.getsource(f))))
2023-05-06 22:10:58 +01:00
def write_to_directory(
2023-05-08 11:40:47 +01:00
convert_func, scenarios, output_path, dataset_version, dataset_name, force_overwrite=False, **kwargs
2023-05-06 22:10:58 +01:00
):
2023-05-06 16:59:17 +01:00
"""
Convert a batch of scenarios.
"""
2023-05-06 16:45:06 +01:00
if not contains_explicit_return(convert_func):
raise RuntimeError("The convert function should return a metadata dict")
2023-05-06 21:06:51 +01:00
if "version" in kwargs:
2023-05-06 20:33:47 +01:00
kwargs.pop("version")
logger.info("the specified version in kwargs is replaced by argument: 'dataset_version'")
2023-05-06 16:45:06 +01:00
save_path = copy.deepcopy(output_path)
output_path = output_path + "_tmp"
# meta recorder and data summary
if os.path.exists(output_path):
shutil.rmtree(output_path)
os.makedirs(output_path, exist_ok=False)
# make real save dir
delay_remove = None
if os.path.exists(save_path):
if force_overwrite:
delay_remove = save_path
else:
raise ValueError("Directory already exists! Abort")
2023-05-07 12:26:02 +01:00
summary_file = SD.DATASET.SUMMARY_FILE
mapping_file = SD.DATASET.MAPPING_FILE
2023-05-06 16:45:06 +01:00
2023-05-07 12:26:02 +01:00
summary_file_path = os.path.join(output_path, summary_file)
mapping_file_path = os.path.join(output_path, mapping_file)
summary = {}
mapping = {}
2023-05-06 16:45:06 +01:00
for scenario in tqdm.tqdm(scenarios):
2023-05-06 16:59:17 +01:00
# convert scenario
2023-05-06 20:33:47 +01:00
sd_scenario = convert_func(scenario, dataset_version, **kwargs)
2023-05-06 22:35:40 +01:00
scenario_id = sd_scenario[SD.ID]
2023-05-07 13:52:43 +01:00
export_file_name = SD.get_export_file_name(dataset_name, dataset_version, scenario_id)
2023-05-06 16:59:17 +01:00
# add agents summary
summary_dict = {}
ego_car_id = sd_scenario[SD.METADATA][SD.SDC_ID]
2023-05-06 18:40:59 +01:00
summary_dict[ego_car_id] = SD.get_object_summary(
2023-05-06 16:59:17 +01:00
state_dict=sd_scenario.get_sdc_track()["state"], id=ego_car_id, type=sd_scenario.get_sdc_track()["type"]
)
for track_id, track in sd_scenario[SD.TRACKS].items():
2023-05-06 18:40:59 +01:00
summary_dict[track_id] = SD.get_object_summary(state_dict=track["state"], id=track_id, type=track["type"])
sd_scenario[SD.METADATA][SD.SUMMARY.OBJECT_SUMMARY] = summary_dict
2023-05-06 16:59:17 +01:00
# count some objects occurrence
2023-05-06 18:40:59 +01:00
sd_scenario[SD.METADATA][SD.SUMMARY.NUMBER_SUMMARY] = SD.get_number_summary(sd_scenario)
2023-05-07 12:26:02 +01:00
# update summary/mapping dicy
summary[export_file_name] = copy.deepcopy(sd_scenario[SD.METADATA])
mapping[export_file_name] = "" # in the same dir
2023-05-06 16:59:17 +01:00
# sanity check
2023-05-06 16:45:06 +01:00
sd_scenario = sd_scenario.to_dict()
2023-05-06 18:40:59 +01:00
SD.sanity_check(sd_scenario, check_self_type=True)
2023-05-06 16:59:17 +01:00
# dump
2023-05-06 16:45:06 +01:00
p = os.path.join(output_path, export_file_name)
with open(p, "wb") as f:
pickle.dump(sd_scenario, f)
2023-05-08 11:40:47 +01:00
# store summary file
save_summary_anda_mapping(summary_file_path, mapping_file_path, summary, mapping)
2023-05-06 17:52:20 +01:00
2023-05-06 16:45:06 +01:00
# rename and save
if delay_remove is not None:
2023-05-06 17:10:12 +01:00
assert delay_remove == save_path
2023-05-06 16:45:06 +01:00
shutil.rmtree(delay_remove)
os.rename(output_path, save_path)
2023-05-07 19:06:23 +01:00
return summary, mapping