Skip to content

Commit

Permalink
Merge pull request #314 from quentin-leboutet/quentin-leboutet/point_…
Browse files Browse the repository at this point in the history
…goal_nav_training

Quentin leboutet/point goal nav training
  • Loading branch information
thias15 committed Dec 19, 2022
2 parents b25575e + 797f976 commit ca8e129
Show file tree
Hide file tree
Showing 13 changed files with 844 additions and 330 deletions.
118 changes: 80 additions & 38 deletions policy/openbot/associate_frames.py
Expand Up @@ -107,24 +107,45 @@ def associate(first_list, second_list, max_offset):
return matches


def match_frame_ctrl_cmd(
data_dir, datasets, max_offset, redo_matching=False, remove_zeros=True
def match_frame_ctrl_input(
data_dir,
datasets,
max_offset,
redo_matching=False,
remove_zeros=True,
policy="autopilot",
):
frames = []
for dataset in datasets:
for folder in utils.list_dirs(os.path.join(data_dir, dataset)):
session_dir = os.path.join(data_dir, dataset, folder)
frame_list = match_frame_session(
session_dir, max_offset, redo_matching, remove_zeros
session_dir, max_offset, redo_matching, remove_zeros, policy
)
for timestamp in list(frame_list):
frames.append(frame_list[timestamp][0])
return frames


def match_frame_session(
session_dir, max_offset, redo_matching=False, remove_zeros=True
session_dir, max_offset, redo_matching=False, remove_zeros=True, policy="autopilot"
):

if policy == "autopilot":
matched_frames_file_name = "matched_frame_ctrl_cmd.txt"
processed_frames_file_name = "matched_frame_ctrl_cmd_processed.txt"
log_file = "indicatorLog.txt"
csv_label_string = "timestamp (frame),time_offset (cmd-frame),time_offset (ctrl-frame),frame,left,right,cmd\n"
csv_label_string_processed = "timestamp,frame,left,right,cmd\n"
elif policy == "point_goal_nav":
matched_frames_file_name = "matched_frame_ctrl_goal.txt"
processed_frames_file_name = "matched_frame_ctrl_goal_processed.txt"
log_file = "goalLog.txt"
csv_label_string = "timestamp (frame),time_offset (goal-frame),time_offset (ctrl-frame),frame,left,right,dist,sinYaw,cosYaw\n"
csv_label_string_processed = "timestamp,frame,left,right,dist,sinYaw,cosYaw\n"
else:
raise Exception("Unknown policy")

sensor_path = os.path.join(session_dir, "sensor_data")
img_path = os.path.join(session_dir, "images")
print("Processing folder %s" % (session_dir))
Expand Down Expand Up @@ -156,66 +177,87 @@ def match_frame_session(
print(" Frames and controls matched.")

if not redo_matching and os.path.isfile(
os.path.join(sensor_path, "matched_frame_ctrl_cmd.txt")
os.path.join(sensor_path, matched_frames_file_name)
):
print(" Frames and commands already matched.")
else:
# Match frames and controls with indicator commands
frame_list = read_file_list(os.path.join(sensor_path, "matched_frame_ctrl.txt"))
if len(frame_list) == 0:
raise Exception("Empty matched_frame_ctrl.txt")
cmd_list = read_file_list(os.path.join(sensor_path, "indicatorLog.txt"))
# Set indicator signal to 0 for initial frames
if len(cmd_list) == 0 or sorted(frame_list)[0] < sorted(cmd_list)[0]:
cmd_list[sorted(frame_list)[0]] = ["0"]
cmd_list = read_file_list(os.path.join(sensor_path, log_file))

if policy == "autopilot":
# Set indicator signal to 0 for initial frames
if len(cmd_list) == 0 or sorted(frame_list)[0] < sorted(cmd_list)[0]:
cmd_list[sorted(frame_list)[0]] = ["0"]

elif policy == "point_goal_nav":
if len(cmd_list) == 0:
raise Exception("Empty goalLog.txt")

matches = associate(frame_list, cmd_list, max_offset)
with open(os.path.join(sensor_path, "matched_frame_ctrl_cmd.txt"), "w") as f:
f.write(
"timestamp (frame),time_offset (cmd-frame),time_offset (ctrl-frame),frame,left,right,cmd\n"
)
with open(os.path.join(sensor_path, matched_frames_file_name), "w") as f:
f.write(csv_label_string)
for a, b in matches:
f.write(
"%d,%d,%s,%s\n"
% (a, b - a, ",".join(frame_list[a]), ",".join(cmd_list[b]))
)
print(" Frames and commands matched.")
print(" Frames and high-level commands matched.")

if not redo_matching and os.path.isfile(
os.path.join(sensor_path, "matched_frame_ctrl_cmd_processed.txt")
os.path.join(sensor_path, processed_frames_file_name)
):
print(" Preprocessing already completed.")
else:
# Cleanup: Add path and remove frames where vehicle was stationary
frame_list = read_file_list(
os.path.join(sensor_path, "matched_frame_ctrl_cmd.txt")
)
with open(
os.path.join(sensor_path, "matched_frame_ctrl_cmd_processed.txt"), "w"
) as f:
f.write("timestamp,frame,left,right,cmd\n")
frame_list = read_file_list(os.path.join(sensor_path, matched_frames_file_name))
with open(os.path.join(sensor_path, processed_frames_file_name), "w") as f:
f.write(csv_label_string_processed)
# max_ctrl = get_max_ctrl(frame_list)
for timestamp in list(frame_list):
frame = frame_list[timestamp]
if len(frame) < 6:
continue
left = int(frame[3])
right = int(frame[4])
# left = normalize(max_ctrl, frame[3])
# right = normalize(max_ctrl, frame[4])
if remove_zeros and left == 0 and right == 0:
print(f" Removed timestamp: {timestamp}")
del frame
else:
frame_name = os.path.join(img_path, frame[2] + "_crop.jpeg")
cmd = int(frame[5])
f.write(
"%s,%s,%d,%d,%d\n" % (timestamp, frame_name, left, right, cmd)
)

if policy == "autopilot":
left = int(frame[3])
right = int(frame[4])
# left = normalize(max_ctrl, frame[3])
# right = normalize(max_ctrl, frame[4])
if remove_zeros and left == 0 and right == 0:
print(f" Removed timestamp: {timestamp}")
del frame
else:
frame_name = os.path.join(img_path, frame[2] + "_crop.jpeg")
cmd = int(frame[5])
f.write(
"%s,%s,%d,%d,%d\n"
% (timestamp, frame_name, left, right, cmd)
)

elif policy == "point_goal_nav":
left = float(frame_list[timestamp][3])
right = float(frame_list[timestamp][4])
if remove_zeros and left == 0.0 and right == 0.0:
print(" Removed timestamp:%s" % (timestamp))
del frame_list[timestamp]
else:
frame_name = os.path.join(
img_path, frame_list[timestamp][2] + ".jpeg"
)
dist = float(frame_list[timestamp][5])
sinYaw = float(frame_list[timestamp][6])
cosYaw = float(frame_list[timestamp][7])
f.write(
"%s,%s,%f,%f,%f,%f,%f\n"
% (timestamp, frame_name, left, right, dist, sinYaw, cosYaw)
)

print(" Preprocessing completed.")

return read_file_list(
os.path.join(sensor_path, "matched_frame_ctrl_cmd_processed.txt")
)
return read_file_list(os.path.join(sensor_path, processed_frames_file_name))


def normalize(max_ctrl, val):
Expand Down
39 changes: 39 additions & 0 deletions policy/openbot/callbacks.py
Expand Up @@ -18,6 +18,45 @@ def checkpoint_cb(checkpoint_path, steps_per_epoch=-1, num_epochs=10):
return checkpoint_callback


def checkpoint_last_cb(checkpoint_path, steps_per_epoch=-1, num_epochs=10):
checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
filepath=os.path.join(checkpoint_path, "cp-last.ckpt"),
monitor="val_loss",
verbose=0,
save_best_only=False,
save_weights_only=False,
mode="auto",
save_freq="epoch" if steps_per_epoch < 0 else int(num_epochs * steps_per_epoch),
)
return checkpoint_callback


def checkpoint_best_train_cb(checkpoint_path, steps_per_epoch=-1, num_epochs=10):
checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
filepath=os.path.join(checkpoint_path, "cp-best-train.ckpt"),
monitor="loss",
verbose=0,
save_best_only=True,
save_weights_only=False,
mode="auto",
save_freq="epoch" if steps_per_epoch < 0 else int(num_epochs * steps_per_epoch),
)
return checkpoint_callback


def checkpoint_best_val_cb(checkpoint_path, steps_per_epoch=-1, num_epochs=10):
checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
filepath=os.path.join(checkpoint_path, "cp-best-val.ckpt"),
monitor="val_loss",
verbose=0,
save_best_only=True,
save_weights_only=False,
mode="auto",
save_freq="epoch" if steps_per_epoch < 0 else int(num_epochs * steps_per_epoch),
)
return checkpoint_callback


def tensorboard_cb(log_path):
tensorboard_callback = tf.keras.callbacks.TensorBoard(
log_dir=log_path,
Expand Down
6 changes: 4 additions & 2 deletions policy/openbot/data_augmentation.py
Expand Up @@ -3,6 +3,7 @@
This script implements several routines for data augmentation.
"""
import tensorflow as tf
import numpy as np


def augment_img(img):
Expand All @@ -18,6 +19,7 @@ def augment_img(img):
img = tf.image.random_saturation(img, 0.6, 1.6)
img = tf.image.random_brightness(img, 0.05)
img = tf.image.random_contrast(img, 0.7, 1.3)
img = tf.clip_by_value(img, clip_value_min=0.0, clip_value_max=1.0)
return img


Expand All @@ -32,7 +34,7 @@ def augment_cmd(cmd):
cmd: augmented command
"""
if not (cmd > 0 or cmd < 0):
coin = tf.random.uniform(shape=[1], minval=0, maxval=1, dtype=tf.dtypes.float32)
coin = np.random.default_rng().uniform(low=0.0, high=1.0, size=None)
if coin < 0.25:
cmd = -1.0
elif coin < 0.5:
Expand All @@ -41,7 +43,7 @@ def augment_cmd(cmd):


def flip_sample(img, cmd, label):
coin = tf.random.uniform(shape=[1], minval=0, maxval=1, dtype=tf.dtypes.float32)
coin = np.random.default_rng().uniform(low=0.0, high=1.0, size=None)
if coin < 0.5:
img = tf.image.flip_left_right(img)
cmd = -cmd
Expand Down
84 changes: 51 additions & 33 deletions policy/openbot/dataloader.py
Expand Up @@ -6,17 +6,30 @@


class dataloader:
def __init__(self, data_dir: str, datasets: List[str]):
def __init__(self, data_dir: str, datasets: List[str], policy: str):
self.data_dir = data_dir
self.policy = policy # "autopilot" or "point_goal_nav"
self.datasets = datasets
self.labels = self.load_labels()
self.index_table = self.lookup_table()
self.label_values = tf.constant(
[(float(label[0]), float(label[1])) for label in self.labels.values()]
)
self.cmd_values = tf.constant(
[(float(label[2])) for label in self.labels.values()]
)

if self.policy == "autopilot":
self.label_divider = 255.0
self.processed_frames_file_name = "matched_frame_ctrl_cmd_processed.txt"
self.cmd_values = tf.constant(
[(float(label[2])) for label in self.labels.values()]
)
elif self.policy == "point_goal_nav":
self.label_divider = 1.0
self.processed_frames_file_name = "matched_frame_ctrl_goal_processed.txt"
self.cmd_values = tf.constant(
[(float(l[2]), float(l[3]), float(l[4])) for l in self.labels.values()]
)
else:
raise Exception("Unknown policy")

# Load labels
def load_labels(self):
Expand All @@ -27,34 +40,39 @@ def load_labels(self):
for f in os.listdir(os.path.join(self.data_dir, dataset))
if not f.startswith(".")
]:
with open(
os.path.join(
self.data_dir,
dataset,
folder,
"sensor_data",
"matched_frame_ctrl_cmd_processed.txt",
)
) as f_input:
# discard header
header = f_input.readline()
data = f_input.read()
lines = (
data.replace(",", " ")
.replace("\\", "/")
.replace("\r", "")
.replace("\t", " ")
.split("\n")
)
data = [
[v.strip() for v in line.split(" ") if v.strip() != ""]
for line in lines
if len(line) > 0 and line[0] != "#"
]
# Tuples containing id: framepath and label: left,right,cmd
data = [(line[1], line[2:]) for line in data if len(line) > 1]
corpus.extend(data)
return dict(corpus)
labels_file = os.path.join(
self.data_dir,
dataset,
folder,
"sensor_data",
self.processed_frames_file_name,
)

if os.path.isfile(labels_file):
with open(labels_file) as f_input:

# discard header
header = f_input.readline()
data = f_input.read()
lines = (
data.replace(",", " ")
.replace("\\", "/")
.replace("\r", "")
.replace("\t", " ")
.split("\n")
)
data = [
[v.strip() for v in line.split(" ") if v.strip() != ""]
for line in lines
if len(line) > 0 and line[0] != "#"
]
# Tuples containing id: framepath and respectively labels "left,right,cmd" for autopilot policy
# and labels "left,right,dist,sinYaw,cosYaw" point_goal_nav policy
data = [(line[1], line[2:]) for line in data if len(line) > 1]
corpus.extend(data)
else:
print(f"Skipping {folder}")
return dict(corpus) / self.label_divider

# build a lookup table to get the frame index for the label
def lookup_table(self):
Expand All @@ -70,4 +88,4 @@ def lookup_table(self):

def get_label(self, file_path):
index = self.index_table.lookup(file_path)
return self.cmd_values[index], self.label_values[index] / 255
return self.cmd_values[index], self.label_values[index]

0 comments on commit ca8e129

Please sign in to comment.