solve import conflict
This commit is contained in:
@@ -1,6 +1,5 @@
|
||||
import pickle
|
||||
|
||||
from metadrive.engine.asset_loader import AssetLoader
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -28,7 +27,7 @@ if __name__ == '__main__':
|
||||
if len(new_summary) >= 3:
|
||||
break
|
||||
|
||||
file_path = AssetLoader.file_path("waymo", "dataset_summary.pkl", return_raw_style=False)
|
||||
file_path = AssetLoader.file_path("../converter/waymo", "dataset_summary.pkl", return_raw_style=False)
|
||||
with open(file_path, "wb") as f:
|
||||
pickle.dump(new_summary, f)
|
||||
|
||||
@@ -8,14 +8,13 @@ import pickle
|
||||
import shutil
|
||||
|
||||
import tqdm
|
||||
|
||||
from metadrive.engine.asset_loader import AssetLoader
|
||||
from metadrive.scenario.scenario_description import ScenarioDescription
|
||||
from metadrive.utils.nuplan.utils import get_nuplan_scenarios, convert_one_scenario
|
||||
from metadrive.utils.utils import dict_recursive_remove_array
|
||||
|
||||
from scenarionet.converter.nuplan.utils import get_nuplan_scenarios, convert_one_nuplan_scenario
|
||||
from scenarionet.converter.utils import dict_recursive_remove_array
|
||||
|
||||
|
||||
def convert_scenarios(output_path, dataset_params, worker_index=None, force_overwrite=False):
|
||||
def convert_nuplan(output_path, dataset_params, worker_index=None, force_overwrite=False):
|
||||
save_path = copy.deepcopy(output_path)
|
||||
output_path = output_path + "_tmp"
|
||||
# meta recorder and data summary
|
||||
@@ -42,7 +41,7 @@ def convert_scenarios(output_path, dataset_params, worker_index=None, force_over
|
||||
# Init.
|
||||
scenarios = get_nuplan_scenarios(dataset_params)
|
||||
for scenario in tqdm.tqdm(scenarios):
|
||||
sd_scenario = convert_one_scenario(scenario)
|
||||
sd_scenario = convert_one_nuplan_scenario(scenario)
|
||||
sd_scenario = sd_scenario.to_dict()
|
||||
ScenarioDescription.sanity_check(sd_scenario, check_self_type=True)
|
||||
export_file_name = "sd_{}_{}.pkl".format("nuplan", scenario.scenario_name)
|
||||
@@ -102,4 +101,4 @@ if __name__ == "__main__":
|
||||
output_path = AssetLoader.file_path("nuplan", return_raw_style=False)
|
||||
worker_index = None
|
||||
force_overwrite = True
|
||||
convert_scenarios(output_path, dataset_params, worker_index=worker_index, force_overwrite=force_overwrite)
|
||||
convert_nuplan(output_path, dataset_params, worker_index=worker_index, force_overwrite=force_overwrite)
|
||||
|
||||
42
scenarionet/converter/nuplan/type.py
Normal file
42
scenarionet/converter/nuplan/type.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import logging
|
||||
|
||||
from metadrive.type import MetaDriveType
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from nuplan.common.actor_state.tracked_objects_types import TrackedObjectType
|
||||
from nuplan.common.maps.maps_datatypes import TrafficLightStatusType
|
||||
except ImportError:
|
||||
logger.warning("Can not import nuplan-devkit")
|
||||
|
||||
NuPlanEgoType = TrackedObjectType.EGO
|
||||
|
||||
|
||||
def get_traffic_obj_type(nuplan_type):
|
||||
if nuplan_type == TrackedObjectType.VEHICLE:
|
||||
return MetaDriveType.VEHICLE
|
||||
elif nuplan_type == TrackedObjectType.TRAFFIC_CONE:
|
||||
return MetaDriveType.TRAFFIC_CONE
|
||||
elif nuplan_type == TrackedObjectType.PEDESTRIAN:
|
||||
return MetaDriveType.PEDESTRIAN
|
||||
elif nuplan_type == TrackedObjectType.BICYCLE:
|
||||
return MetaDriveType.CYCLIST
|
||||
elif nuplan_type == TrackedObjectType.BARRIER:
|
||||
return MetaDriveType.TRAFFIC_BARRIER
|
||||
elif nuplan_type == TrackedObjectType.EGO:
|
||||
raise ValueError("Ego should not be in detected resukts")
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def set_light_status(status):
|
||||
if status == TrafficLightStatusType.GREEN:
|
||||
return MetaDriveType.LIGHT_GREEN
|
||||
elif status == TrafficLightStatusType.RED:
|
||||
return MetaDriveType.LIGHT_RED
|
||||
elif status == TrafficLightStatusType.YELLOW:
|
||||
return MetaDriveType.LIGHT_YELLOW
|
||||
elif status == TrafficLightStatusType.UNKNOWN:
|
||||
return MetaDriveType.LIGHT_UNKNOWN
|
||||
@@ -5,27 +5,25 @@ from dataclasses import dataclass
|
||||
from os.path import join
|
||||
|
||||
import numpy as np
|
||||
from nuplan.common.actor_state.agent import Agent
|
||||
from nuplan.common.actor_state.static_object import StaticObject
|
||||
from nuplan.common.actor_state.tracked_objects_types import TrackedObjectType
|
||||
from nuplan.common.maps.maps_datatypes import TrafficLightStatusType
|
||||
from metadrive.scenario import ScenarioDescription as SD
|
||||
from metadrive.type import MetaDriveType
|
||||
from shapely.geometry.linestring import LineString
|
||||
from shapely.geometry.multilinestring import MultiLineString
|
||||
|
||||
from metadrive.scenario import ScenarioDescription as SD
|
||||
from metadrive.type import MetaDriveType
|
||||
from metadrive.utils.coordinates_shift import nuplan_to_metadrive_vector
|
||||
from metadrive.utils.math import compute_angular_velocity
|
||||
from scenarionet.converter.nuplan.type import get_traffic_obj_type, NuPlanEgoType, set_light_status
|
||||
from scenarionet.converter.utils import nuplan_to_metadrive_vector, compute_angular_velocity
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
from metadrive.utils import is_win
|
||||
import geopandas as gpd
|
||||
from shapely.ops import unary_union
|
||||
|
||||
try:
|
||||
import geopandas as gpd
|
||||
from nuplan.common.actor_state.agent import Agent
|
||||
from nuplan.common.actor_state.static_object import StaticObject
|
||||
from nuplan.common.actor_state.state_representation import Point2D
|
||||
from nuplan.common.maps.maps_datatypes import SemanticMapLayer, StopLineType
|
||||
from shapely.ops import unary_union
|
||||
|
||||
from nuplan.planning.scenario_builder.nuplan_db.nuplan_scenario import NuPlanScenario
|
||||
import hydra
|
||||
from nuplan.planning.scenario_builder.nuplan_db.nuplan_scenario import NuPlanScenario
|
||||
@@ -213,17 +211,6 @@ def extract_map_features(map_api, center, radius=250):
|
||||
return ret
|
||||
|
||||
|
||||
def set_light_status(status):
|
||||
if status == TrafficLightStatusType.GREEN:
|
||||
return MetaDriveType.LIGHT_GREEN
|
||||
elif status == TrafficLightStatusType.RED:
|
||||
return MetaDriveType.LIGHT_RED
|
||||
elif status == TrafficLightStatusType.YELLOW:
|
||||
return MetaDriveType.LIGHT_YELLOW
|
||||
elif status == TrafficLightStatusType.UNKNOWN:
|
||||
return MetaDriveType.LIGHT_UNKNOWN
|
||||
|
||||
|
||||
def set_light_position(scenario, lane_id, center, target_position=8):
|
||||
lane = scenario.map_api.get_map_object(str(lane_id), SemanticMapLayer.LANE_CONNECTOR)
|
||||
assert lane is not None, "Can not find lane: {}".format(lane_id)
|
||||
@@ -274,23 +261,6 @@ def extract_traffic_light(scenario, center):
|
||||
return lights
|
||||
|
||||
|
||||
def get_traffic_obj_type(nuplan_type):
|
||||
if nuplan_type == TrackedObjectType.VEHICLE:
|
||||
return MetaDriveType.VEHICLE
|
||||
elif nuplan_type == TrackedObjectType.TRAFFIC_CONE:
|
||||
return MetaDriveType.TRAFFIC_CONE
|
||||
elif nuplan_type == TrackedObjectType.PEDESTRIAN:
|
||||
return MetaDriveType.PEDESTRIAN
|
||||
elif nuplan_type == TrackedObjectType.BICYCLE:
|
||||
return MetaDriveType.CYCLIST
|
||||
elif nuplan_type == TrackedObjectType.BARRIER:
|
||||
return MetaDriveType.TRAFFIC_BARRIER
|
||||
elif nuplan_type == TrackedObjectType.EGO:
|
||||
raise ValueError("Ego should not be in detected resukts")
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def parse_object_state(obj_state, nuplan_center):
|
||||
ret = {}
|
||||
ret["position"] = nuplan_to_metadrive_vector([obj_state.center.x, obj_state.center.y], nuplan_center)
|
||||
@@ -346,9 +316,9 @@ def extract_traffic(scenario: NuPlanScenario, center):
|
||||
type=MetaDriveType.UNSET,
|
||||
state=dict(
|
||||
position=np.zeros(shape=(episode_len, 3)),
|
||||
heading=np.zeros(shape=(episode_len, )),
|
||||
heading=np.zeros(shape=(episode_len,)),
|
||||
velocity=np.zeros(shape=(episode_len, 2)),
|
||||
valid=np.zeros(shape=(episode_len, )),
|
||||
valid=np.zeros(shape=(episode_len,)),
|
||||
length=np.zeros(shape=(episode_len, 1)),
|
||||
width=np.zeros(shape=(episode_len, 1)),
|
||||
height=np.zeros(shape=(episode_len, 1))
|
||||
@@ -392,7 +362,7 @@ def extract_traffic(scenario: NuPlanScenario, center):
|
||||
obj_type = MetaDriveType.VEHICLE
|
||||
ego_track[SD.TYPE] = obj_type
|
||||
if ego_track[SD.METADATA]["nuplan_type"] is None:
|
||||
ego_track[SD.METADATA]["nuplan_type"] = int(TrackedObjectType.EGO)
|
||||
ego_track[SD.METADATA]["nuplan_type"] = int(NuPlanEgoType)
|
||||
ego_track[SD.METADATA]["type"] = obj_type
|
||||
state = obj_state
|
||||
ego_track["state"]["position"][frame_idx] = [state["position"][0], state["position"][1], 0.0]
|
||||
@@ -418,7 +388,7 @@ def extract_traffic(scenario: NuPlanScenario, center):
|
||||
return tracks
|
||||
|
||||
|
||||
def convert_one_scenario(scenario: NuPlanScenario):
|
||||
def convert_one_nuplan_scenario(scenario: NuPlanScenario):
|
||||
"""
|
||||
Data will be interpolated to 0.1s time interval, while the time interval of original key frames are 0.5s.
|
||||
"""
|
||||
|
||||
@@ -5,14 +5,13 @@ MetaDrive.
|
||||
import copy
|
||||
import os
|
||||
import pickle
|
||||
import shutil
|
||||
|
||||
import tqdm
|
||||
|
||||
from metadrive.engine.asset_loader import AssetLoader
|
||||
from metadrive.scenario.scenario_description import ScenarioDescription
|
||||
from metadrive.utils.nuscenes.utils import convert_one_scenario
|
||||
from metadrive.utils.utils import dict_recursive_remove_array
|
||||
import shutil
|
||||
|
||||
from scenarionet.converter.nuscenes.utils import convert_one_nuscenes_scenario
|
||||
from scenarionet.converter.utils import dict_recursive_remove_array
|
||||
|
||||
try:
|
||||
from nuscenes import NuScenes
|
||||
@@ -20,7 +19,7 @@ except ImportError:
|
||||
print("Can not find nuscenes-devkit")
|
||||
|
||||
|
||||
def convert_scenarios(version, dataroot, output_path, worker_index=None, verbose=True, force_overwrite=False):
|
||||
def convert_nuscenes(version, dataroot, output_path, worker_index=None, verbose=True, force_overwrite=False):
|
||||
save_path = copy.deepcopy(output_path)
|
||||
output_path = output_path + "_tmp"
|
||||
# meta recorder and data summary
|
||||
@@ -48,7 +47,7 @@ def convert_scenarios(version, dataroot, output_path, worker_index=None, verbose
|
||||
nusc = NuScenes(version=version, verbose=verbose, dataroot=dataroot)
|
||||
scenes = nusc.scene
|
||||
for scene in tqdm.tqdm(scenes):
|
||||
sd_scene = convert_one_scenario(scene["token"], nusc)
|
||||
sd_scene = convert_one_nuscenes_scenario(scene["token"], nusc)
|
||||
sd_scene = sd_scene.to_dict()
|
||||
ScenarioDescription.sanity_check(sd_scene, check_self_type=True)
|
||||
export_file_name = "sd_{}_{}.pkl".format("nuscenes_" + version, scene["token"])
|
||||
@@ -74,4 +73,4 @@ if __name__ == "__main__":
|
||||
dataroot = '/home/shady/data/nuscenes'
|
||||
worker_index = None
|
||||
force_overwrite = True
|
||||
convert_scenarios(version, dataroot, output_path, force_overwrite=force_overwrite)
|
||||
convert_nuscenes(version, dataroot, output_path, force_overwrite=force_overwrite)
|
||||
|
||||
@@ -1,19 +1,25 @@
|
||||
import copy
|
||||
import logging
|
||||
|
||||
import geopandas as gpd
|
||||
import numpy as np
|
||||
from nuscenes import NuScenes
|
||||
from nuscenes.can_bus.can_bus_api import NuScenesCanBus
|
||||
from nuscenes.eval.common.utils import quaternion_yaw
|
||||
from nuscenes.map_expansion.arcline_path_utils import discretize_lane
|
||||
from nuscenes.map_expansion.map_api import NuScenesMap
|
||||
from pyquaternion import Quaternion
|
||||
from shapely.ops import unary_union, cascaded_union
|
||||
import logging
|
||||
from metadrive.scenario import ScenarioDescription as SD
|
||||
from metadrive.type import MetaDriveType
|
||||
from metadrive.utils.nuscenes.detection_type import ALL_TYPE, HUMAN_TYPE, BICYCLE_TYPE, VEHICLE_TYPE
|
||||
from shapely.ops import unary_union
|
||||
|
||||
from scenarionet.converter.nuscenes.type import ALL_TYPE, HUMAN_TYPE, BICYCLE_TYPE, VEHICLE_TYPE
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
try:
|
||||
from nuscenes import NuScenes
|
||||
from nuscenes.can_bus.can_bus_api import NuScenesCanBus
|
||||
from nuscenes.eval.common.utils import quaternion_yaw
|
||||
from nuscenes.map_expansion.arcline_path_utils import discretize_lane
|
||||
from nuscenes.map_expansion.map_api import NuScenesMap
|
||||
from pyquaternion import Quaternion
|
||||
except ImportError:
|
||||
logger.warning("Can not import nuscenes-devkit")
|
||||
|
||||
EGO = "ego"
|
||||
|
||||
|
||||
@@ -122,9 +128,9 @@ def get_tracks_from_frames(nuscenes: NuScenes, scene_info, frames, num_to_interp
|
||||
type=MetaDriveType.UNSET,
|
||||
state=dict(
|
||||
position=np.zeros(shape=(episode_len, 3)),
|
||||
heading=np.zeros(shape=(episode_len, )),
|
||||
heading=np.zeros(shape=(episode_len,)),
|
||||
velocity=np.zeros(shape=(episode_len, 2)),
|
||||
valid=np.zeros(shape=(episode_len, )),
|
||||
valid=np.zeros(shape=(episode_len,)),
|
||||
length=np.zeros(shape=(episode_len, 1)),
|
||||
width=np.zeros(shape=(episode_len, 1)),
|
||||
height=np.zeros(shape=(episode_len, 1))
|
||||
@@ -177,7 +183,7 @@ def get_tracks_from_frames(nuscenes: NuScenes, scene_info, frames, num_to_interp
|
||||
interpolate_tracks[id]["metadata"]["track_length"] = new_episode_len
|
||||
|
||||
# valid first
|
||||
new_valid = np.zeros(shape=(new_episode_len, ))
|
||||
new_valid = np.zeros(shape=(new_episode_len,))
|
||||
if track["state"]["valid"][0]:
|
||||
new_valid[0] = 1
|
||||
for k, valid in enumerate(track["state"]["valid"][1:], start=1):
|
||||
@@ -339,7 +345,7 @@ def get_map_features(scene_info, nuscenes: NuScenes, map_center, radius=250, poi
|
||||
return ret
|
||||
|
||||
|
||||
def convert_one_scenario(scene_token: str, nuscenes: NuScenes):
|
||||
def convert_one_nuscenes_scenario(scene_token: str, nuscenes: NuScenes):
|
||||
"""
|
||||
Data will be interpolated to 0.1s time interval, while the time interval of original key frames are 0.5s.
|
||||
"""
|
||||
|
||||
52
scenarionet/converter/utils.py
Normal file
52
scenarionet/converter/utils.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import math
|
||||
|
||||
import numpy as np
|
||||
|
||||
|
||||
def nuplan_to_metadrive_vector(vector, nuplan_center=(0, 0)):
|
||||
"All vec in nuplan should be centered in (0,0) to avoid numerical explosion"
|
||||
vector = np.array(vector)
|
||||
# if len(vector.shape) == 1:
|
||||
# vector[1] *= -1
|
||||
# else:
|
||||
# vector[:, 1] *= -1
|
||||
vector -= np.asarray(nuplan_center)
|
||||
return vector
|
||||
|
||||
|
||||
def compute_angular_velocity(initial_heading, final_heading, dt):
|
||||
"""
|
||||
Calculate the angular velocity between two headings given in radians.
|
||||
|
||||
Parameters:
|
||||
initial_heading (float): The initial heading in radians.
|
||||
final_heading (float): The final heading in radians.
|
||||
dt (float): The time interval between the two headings in seconds.
|
||||
|
||||
Returns:
|
||||
float: The angular velocity in radians per second.
|
||||
"""
|
||||
|
||||
# Calculate the difference in headings
|
||||
delta_heading = final_heading - initial_heading
|
||||
|
||||
# Adjust the delta_heading to be in the range (-π, π]
|
||||
delta_heading = (delta_heading + math.pi) % (2 * math.pi) - math.pi
|
||||
|
||||
# Compute the angular velocity
|
||||
angular_vel = delta_heading / dt
|
||||
|
||||
return angular_vel
|
||||
|
||||
|
||||
def dict_recursive_remove_array(d):
|
||||
if isinstance(d, np.ndarray):
|
||||
return d.tolist()
|
||||
if isinstance(d, dict):
|
||||
for k in d.keys():
|
||||
d[k] = dict_recursive_remove_array(d[k])
|
||||
return d
|
||||
|
||||
def mph_to_kmh(speed_in_mph: float):
|
||||
speed_in_kmh = speed_in_mph * 1.609344
|
||||
return speed_in_kmh
|
||||
@@ -7,7 +7,6 @@ This script will create the output folder "processed_data" sharing the same leve
|
||||
|
||||
"""
|
||||
import argparse
|
||||
from metadrive.utils.utils import dict_recursive_remove_array
|
||||
import copy
|
||||
import os
|
||||
import pickle
|
||||
@@ -15,7 +14,7 @@ from collections import defaultdict
|
||||
|
||||
import numpy as np
|
||||
|
||||
from metadrive.constants import DATA_VERSION
|
||||
from scenarionet.converter.utils import dict_recursive_remove_array
|
||||
|
||||
try:
|
||||
import tensorflow as tf
|
||||
@@ -35,7 +34,7 @@ except ImportError:
|
||||
|
||||
from metadrive.scenario import ScenarioDescription as SD
|
||||
from metadrive.type import MetaDriveType
|
||||
from metadrive.utils.waymo.utils import extract_tracks, extract_dynamic_map_states, extract_map_features, \
|
||||
from scenarionet.converter.waymo.utils import extract_tracks, extract_dynamic_map_states, extract_map_features, \
|
||||
compute_width
|
||||
import sys
|
||||
|
||||
@@ -118,7 +117,7 @@ def _get_number_summary(scenario):
|
||||
return number_summary_dict
|
||||
|
||||
|
||||
def parse_data(file_list, input_path, output_path, worker_index=None):
|
||||
def convert_waymo(file_list, input_path, output_path, worker_index=None):
|
||||
scenario = scenario_pb2.Scenario()
|
||||
|
||||
metadata_recorder = {}
|
||||
@@ -145,8 +144,8 @@ def parse_data(file_list, input_path, output_path, worker_index=None):
|
||||
md_scenario = SD()
|
||||
|
||||
md_scenario[SD.ID] = scenario.scenario_id
|
||||
|
||||
md_scenario[SD.VERSION] = DATA_VERSION
|
||||
# TODO LQY, get version from original files
|
||||
md_scenario[SD.VERSION] = "1.2"
|
||||
|
||||
# Please note that SDC track index is not identical to sdc_id.
|
||||
# sdc_id is a unique indicator to a track, while sdc_track_index is only the index of the sdc track
|
||||
@@ -255,7 +254,7 @@ if __name__ == "__main__":
|
||||
# parse raw data from input path to output path,
|
||||
# there is 1000 raw data in google cloud, each of them produce about 500 pkl file
|
||||
file_list = os.listdir(raw_data_path)
|
||||
parse_data(file_list, raw_data_path, output_path)
|
||||
convert_waymo(file_list, raw_data_path, output_path)
|
||||
sys.exit()
|
||||
# file_path = AssetLoader.file_path("waymo", "processed", "0.pkl", return_raw_style=False)
|
||||
# data = read_waymo_data(file_path)
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
for i in 0 1 2 3 4 5 6 7 8 9; do
|
||||
nohup python /home/qyli/metadrive/metadrive/utils/waymo/script/convert_waymo_to_metadrive.py ./scenario_${i} > ${i}.log 2>&1 &
|
||||
done
|
||||
@@ -1,69 +0,0 @@
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
|
||||
from tqdm import tqdm
|
||||
|
||||
from metadrive.envs.real_data_envs.waymo_env import WaymoEnv
|
||||
from metadrive.policy.idm_policy import WaymoIDMPolicy
|
||||
|
||||
try:
|
||||
from metadrive.utils.waymo.waymo_type import WaymoAgentType
|
||||
from metadrive.utils.waymo.waymo_type import WaymoRoadLineType, WaymoRoadEdgeType
|
||||
finally:
|
||||
pass
|
||||
|
||||
|
||||
def handler(signum, frame):
|
||||
raise Exception("end of time")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
scenario_data_path = sys.argv[1]
|
||||
start = int(sys.argv[2])
|
||||
processed_data_path = scenario_data_path + "_filtered"
|
||||
if not os.path.exists(processed_data_path):
|
||||
os.mkdir(processed_data_path)
|
||||
if not os.path.exists(scenario_data_path) or not os.path.exists(processed_data_path):
|
||||
raise ValueError("Path Not exist")
|
||||
num_scenarios = len(os.listdir(scenario_data_path))
|
||||
max_step = 1500
|
||||
min_step = 50
|
||||
|
||||
env = WaymoEnv(
|
||||
{
|
||||
"use_render": False,
|
||||
"agent_policy": WaymoIDMPolicy,
|
||||
"data_directory": scenario_data_path,
|
||||
"start_scenario_index": start * 1000,
|
||||
"num_scenarios": num_scenarios,
|
||||
"store_map": False,
|
||||
# "manual_control": True,
|
||||
# "debug":True,
|
||||
"no_traffic": True,
|
||||
"horizon": 1500,
|
||||
}
|
||||
)
|
||||
try:
|
||||
env.reset()
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
pass
|
||||
for i in tqdm(range(num_scenarios)):
|
||||
try:
|
||||
signal.signal(signal.SIGALRM, handler)
|
||||
signal.alarm(10)
|
||||
env.reset(force_seed=i)
|
||||
while True:
|
||||
o, r, d, info = env.step([0, 0])
|
||||
if d or env.episode_step > max_step:
|
||||
if info["arrive_dest"] and env.episode_step > min_step:
|
||||
os.rename(
|
||||
os.path.join(scenario_data_path, "{}.pkl".format(i + start * 1000)),
|
||||
os.path.join(processed_data_path, "{}.pkl".format(i + start * 1000))
|
||||
)
|
||||
break
|
||||
except:
|
||||
# print("\n No Route or Timeout, Fail, Seed: {}".format(i))
|
||||
pass
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
for i in 0 1 2 3 4 5 6 7 8 9; do
|
||||
nohup python /home/lfeng/metadrive/metadrive/utils/waymo/filter_cases.py /home/lfeng/waymo/scenarios_processed_${i} ${i} > ${i}.log 2>&1 &
|
||||
done
|
||||
@@ -1,18 +1,16 @@
|
||||
import matplotlib.pyplot as plt
|
||||
from metadrive.scenario.utils import read_scenario_data
|
||||
from matplotlib.pyplot import figure
|
||||
|
||||
from metadrive.type import MetaDriveType
|
||||
from metadrive.utils.math import mph_to_kmh
|
||||
from metadrive.utils.waymo.waymo_type import WaymoLaneType, WaymoAgentType
|
||||
from metadrive.utils.waymo.waymo_type import WaymoRoadLineType, WaymoRoadEdgeType
|
||||
from scenarionet.converter.utils import mph_to_kmh
|
||||
from scenarionet.converter.waymo.type import WaymoLaneType, WaymoAgentType, WaymoRoadLineType, WaymoRoadEdgeType
|
||||
|
||||
try:
|
||||
import tensorflow as tf
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
from metadrive.utils.waymo.protos import scenario_pb2
|
||||
from scenarionet.converter.waymo.protos import scenario_pb2
|
||||
except ImportError:
|
||||
pass
|
||||
import pickle
|
||||
@@ -100,7 +98,6 @@ def extract_edge(f):
|
||||
edge = dict()
|
||||
f_ = f.road_edge
|
||||
|
||||
# TODO: Need to transform this to MetaDrive version
|
||||
edge["type"] = WaymoRoadEdgeType.from_waymo(f_.type)
|
||||
|
||||
edge["polyline"] = extract_poly(f_.polyline)
|
||||
@@ -302,24 +299,6 @@ class CustomUnpickler(pickle.Unpickler):
|
||||
return super().find_class(module, name)
|
||||
|
||||
|
||||
def read_waymo_data(file_path):
|
||||
return read_scenario_data(file_path)
|
||||
|
||||
|
||||
def draw_waymo_map(data):
|
||||
"""
|
||||
TODO: Need this function in future.
|
||||
"""
|
||||
figure(figsize=(8, 6), dpi=500)
|
||||
for key, value in data[ScenarioDescription.MAP_FEATURES].items():
|
||||
if value.get("type", None) == "center_lane":
|
||||
plt.scatter([x[0] for x in value["polyline"]], [y[1] for y in value["polyline"]], s=0.5)
|
||||
elif value.get("type", None) == "road_edge":
|
||||
plt.scatter([x[0] for x in value["polyline"]], [y[1] for y in value["polyline"]], s=0.5, c=(0, 0, 0))
|
||||
# elif value.get("type", None) == "road_line":
|
||||
# plt.scatter([x[0] for x in value["polyline"]], [y[1] for y in value["polyline"]], s=0.5, c=(0.8,0.8,0.8))
|
||||
plt.show()
|
||||
|
||||
|
||||
# return the nearest point"s index of the line
|
||||
def nearest_point(point, line):
|
||||
@@ -369,7 +348,6 @@ def compute_width(map):
|
||||
lane["width"] = width
|
||||
return
|
||||
|
||||
|
||||
# parse raw data from input path to output path
|
||||
|
||||
# def convert_polyline_to_metadrive(waymo_polyline, coordinate_transform=True):
|
||||
|
||||
Reference in New Issue
Block a user