rename scripts and update readme
This commit is contained in:
@@ -63,15 +63,15 @@ With teh error file, one can build a new database excluding or including the bro
|
||||
|
||||
**Existence check**
|
||||
```
|
||||
python -m scenarionet.verify_completeness -d /database/to/check --result_save_dir /error/file/path
|
||||
python -m scenarionet.verify_existence -d /database/to/check --error_file_path /error/file/path
|
||||
```
|
||||
**Runnable check**
|
||||
```
|
||||
python -m scenarionet.verify_simulation -d /database/to/check --result_save_dir /error/file/path
|
||||
python -m scenarionet.verify_simulation -d /database/to/check --error_file_path /error/file/path
|
||||
```
|
||||
**Generating new database**
|
||||
```
|
||||
python -m scenarionet.generate_from_error_file -d /new/database/path --file /error/file/path --broken
|
||||
python -m scenarionet.generate_from_error_file -d /new/database/path --file /error/file/path
|
||||
```
|
||||
|
||||
### visualization
|
||||
|
||||
@@ -31,7 +31,7 @@ def test_generate_from_error():
|
||||
for scenario_file in sorted_scenarios:
|
||||
read_scenario(dataset_path, mapping, scenario_file)
|
||||
success, logs = verify_database(
|
||||
dataset_path, result_save_dir="../test_dataset", steps_to_run=1000, num_workers=16, overwrite=True
|
||||
dataset_path, error_file_path="../test_dataset", steps_to_run=1000, num_workers=16, overwrite=True
|
||||
)
|
||||
set_random_drop(False)
|
||||
# get error file
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
python ../../merge_database.py --overwrite --exist_ok --database_path ../tmp/test_combine_dataset --from_datasets ../../../dataset/waymo ../../../dataset/pg ../../../dataset/nuscenes ../../../dataset/nuplan --overwrite
|
||||
python ../../verify_simulation.py --overwrite --database_path ../tmp/test_combine_dataset --result_save_dir ../tmp/test_combine_dataset --random_drop --num_workers=16
|
||||
python ../../verify_simulation.py --overwrite --database_path ../tmp/test_combine_dataset --error_file_path ../tmp/test_combine_dataset --random_drop --num_workers=16
|
||||
python ../../generate_from_error_file.py --file ../tmp/test_combine_dataset/error_scenarios_for_test_combine_dataset.json --overwrite --database_path ../tmp/verify_pass
|
||||
python ../../generate_from_error_file.py --file ../tmp/test_combine_dataset/error_scenarios_for_test_combine_dataset.json --overwrite --database_path ../tmp/verify_fail --broken
|
||||
@@ -21,7 +21,7 @@ def test_combine_multiple_dataset():
|
||||
for scenario_file in sorted_scenarios:
|
||||
read_scenario(dataset_path, mapping, scenario_file)
|
||||
success, result = verify_database(
|
||||
dataset_path, result_save_dir=test_dataset_path, steps_to_run=1000, num_workers=4, overwrite=True
|
||||
dataset_path, error_file_path=test_dataset_path, steps_to_run=1000, num_workers=4, overwrite=True
|
||||
)
|
||||
assert success
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ def test_generate_from_error():
|
||||
for scenario_file in sorted_scenarios:
|
||||
read_scenario(dataset_path, mapping, scenario_file)
|
||||
success, logs = verify_database(
|
||||
dataset_path, result_save_dir=TMP_PATH, steps_to_run=1000, num_workers=3, overwrite=True
|
||||
dataset_path, error_file_path=TMP_PATH, steps_to_run=1000, num_workers=3, overwrite=True
|
||||
)
|
||||
set_random_drop(False)
|
||||
# get error file
|
||||
|
||||
@@ -28,7 +28,7 @@ def test_move_database():
|
||||
for scenario_file in sorted_scenarios:
|
||||
read_scenario(output_path, mapping, scenario_file)
|
||||
success, result = verify_database(
|
||||
output_path, result_save_dir=output_path, steps_to_run=0, num_workers=4, overwrite=True
|
||||
output_path, error_file_path=output_path, steps_to_run=0, num_workers=4, overwrite=True
|
||||
)
|
||||
assert success
|
||||
|
||||
@@ -45,7 +45,7 @@ def test_move_database():
|
||||
for scenario_file in sorted_scenarios:
|
||||
read_scenario(output_path, mapping, scenario_file)
|
||||
success, result = verify_database(
|
||||
output_path, result_save_dir=output_path, steps_to_run=0, num_workers=4, overwrite=True
|
||||
output_path, error_file_path=output_path, steps_to_run=0, num_workers=4, overwrite=True
|
||||
)
|
||||
assert success
|
||||
|
||||
|
||||
@@ -20,13 +20,13 @@ def test_verify_completeness():
|
||||
read_scenario(dataset_path, mapping, scenario_file)
|
||||
set_random_drop(True)
|
||||
success, result = verify_database(
|
||||
dataset_path, result_save_dir=TMP_PATH, steps_to_run=0, num_workers=4, overwrite=True
|
||||
dataset_path, error_file_path=TMP_PATH, steps_to_run=0, num_workers=4, overwrite=True
|
||||
)
|
||||
assert not success
|
||||
|
||||
set_random_drop(False)
|
||||
success, result = verify_database(
|
||||
dataset_path, result_save_dir=TMP_PATH, steps_to_run=0, num_workers=4, overwrite=True
|
||||
dataset_path, error_file_path=TMP_PATH, steps_to_run=0, num_workers=4, overwrite=True
|
||||
)
|
||||
assert success
|
||||
|
||||
|
||||
@@ -25,12 +25,12 @@ def set_random_drop(drop):
|
||||
RANDOM_DROP = drop
|
||||
|
||||
|
||||
def verify_database(dataset_path, result_save_dir, overwrite=False, num_workers=8, steps_to_run=1000):
|
||||
def verify_database(dataset_path, error_file_path, overwrite=False, num_workers=8, steps_to_run=1000):
|
||||
global RANDOM_DROP
|
||||
assert os.path.isdir(result_save_dir), "result_save_dir must be a dir, get {}".format(result_save_dir)
|
||||
os.makedirs(result_save_dir, exist_ok=True)
|
||||
assert os.path.isdir(error_file_path), "error_file_path must be a dir, get {}".format(error_file_path)
|
||||
os.makedirs(error_file_path, exist_ok=True)
|
||||
error_file_name = EF.get_error_file_name(dataset_path)
|
||||
if os.path.exists(os.path.join(result_save_dir, error_file_name)) and not overwrite:
|
||||
if os.path.exists(os.path.join(error_file_path, error_file_name)) and not overwrite:
|
||||
raise FileExistsError(
|
||||
"An error_file already exists in result_save_directory. "
|
||||
"Setting overwrite=True to cancel this alert"
|
||||
@@ -65,7 +65,7 @@ def verify_database(dataset_path, result_save_dir, overwrite=False, num_workers=
|
||||
logger.info("All scenarios can be loaded successfully!")
|
||||
else:
|
||||
# save result
|
||||
path = EF.dump(result_save_dir, errors, dataset_path)
|
||||
path = EF.dump(error_file_path, errors, dataset_path)
|
||||
logger.info(
|
||||
"Fail to load all scenarios. Number of failed scenarios: {}. "
|
||||
"See: {} more details! ".format(len(errors), path)
|
||||
|
||||
@@ -7,11 +7,11 @@ if __name__ == '__main__':
|
||||
parser.add_argument(
|
||||
"--database_path", "-d", required=True, help="Dataset path, a directory containing summary.pkl and mapping.pkl"
|
||||
)
|
||||
parser.add_argument("--result_save_dir", default="./", help="Where to save the error file")
|
||||
parser.add_argument("--error_file_path", default="./", help="Where to save the error file")
|
||||
parser.add_argument(
|
||||
"--overwrite",
|
||||
action="store_true",
|
||||
help="If an error file already exists in result_save_dir, "
|
||||
help="If an error file already exists in error_file_path, "
|
||||
"whether to overwrite it"
|
||||
)
|
||||
parser.add_argument("--num_workers", type=int, default=8, help="number of workers to use")
|
||||
@@ -20,7 +20,7 @@ if __name__ == '__main__':
|
||||
set_random_drop(args.random_drop)
|
||||
verify_database(
|
||||
args.database_path,
|
||||
args.result_save_dir,
|
||||
args.error_file_path,
|
||||
overwrite=args.overwrite,
|
||||
num_workers=args.num_workers,
|
||||
steps_to_run=0
|
||||
@@ -7,15 +7,15 @@ if __name__ == '__main__':
|
||||
parser.add_argument(
|
||||
"--database_path", "-d", required=True, help="Dataset path, a directory containing summary.pkl and mapping.pkl"
|
||||
)
|
||||
parser.add_argument("--result_save_dir", default="./", help="Where to save the error file")
|
||||
parser.add_argument("--error_file_path", default="./", help="Where to save the error file")
|
||||
parser.add_argument(
|
||||
"--overwrite",
|
||||
action="store_true",
|
||||
help="If an error file already exists in result_save_dir, "
|
||||
help="If an error file already exists in error_file_path, "
|
||||
"whether to overwrite it"
|
||||
)
|
||||
parser.add_argument("--num_workers", type=int, default=8, help="number of workers to use")
|
||||
parser.add_argument("--random_drop", action="store_true", help="Randomly make some scenarios fail. for test only!")
|
||||
args = parser.parse_args()
|
||||
set_random_drop(args.random_drop)
|
||||
verify_database(args.database_path, args.result_save_dir, overwrite=args.overwrite, num_workers=args.num_workers)
|
||||
verify_database(args.database_path, args.error_file_path, overwrite=args.overwrite, num_workers=args.num_workers)
|
||||
|
||||
Reference in New Issue
Block a user