Skip to content
This repository has been archived by the owner on Aug 28, 2021. It is now read-only.

Commit

Permalink
removing references
Browse files Browse the repository at this point in the history
  • Loading branch information
danielgordon10 committed Aug 16, 2019
1 parent d52e860 commit 30d76d9
Show file tree
Hide file tree
Showing 11 changed files with 21 additions and 38 deletions.
5 changes: 2 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,16 +44,15 @@ Alternatively, run `pip install git+https://github.com/facebookresearch/habitat-
SplitNet's codebase is known to be compatible with https://github.com/facebookresearch/habitat-api/commit/7015813aefae99233864c4ffcf7e52e9097392a4

## Data
We use the data sources linked from the public habitat-api repository. You will need to individually download SUNCG, MP3D, and Gibson from their sources. [habitat-sim](https://github.com/facebookresearch/habitat-sim#datasets) and [habitat-api](https://github.com/facebookresearch/habitat-api#data) share the links to the files. We additionally use the Point-Nav datasets from habitat-api, but we also provide a script for generating new datasets.
Note: The SUNCG assets are no longer available for download, but we will keep our datasets up in case people already have the data.
We use the data sources linked from the public habitat-api repository. You will need to individually download MP3D, and Gibson from their sources. [habitat-sim](https://github.com/facebookresearch/habitat-sim#datasets) and [habitat-api](https://github.com/facebookresearch/habitat-api#data) share the links to the files. We additionally use the Point-Nav datasets from habitat-api, but we also provide a script for generating new datasets.

To use the same data from the dataset:
1. Create a symlink to where you downloaded the directory containing the `scene_datasets` asset files for each of the datasets. Call this folder `data`.
```bash
ln -s /path/to/habitat/data data
```
1. Verify that this is set up in the expected way by running `ls data/scene_datasets`. You should see:
`gibson mp3d suncg` based on which you have downloaded.
`gibson mp3d` based on which you have downloaded.
1. Download and extract the premade training datasets by running `sh download_dataset.sh`.
1. Copy/Move the downloaded datasets into the data folder.
```bash
Expand Down
2 changes: 1 addition & 1 deletion arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def get_args():
)

parser.add_argument(
"--dataset", type=str, default="mp3d", help="Picks which dataset to load: mp3d | gibson | suncg", required=True
"--dataset", type=str, default="mp3d", help="Picks which dataset to load: mp3d | gibson", required=True
)

parser.add_argument(
Expand Down
14 changes: 3 additions & 11 deletions base_habitat_rl_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,9 +117,7 @@ def setup(self, create_decoder):
if "SEMANTIC_SENSOR" in extra_agent_sensors:
extra_task_sensors.append("CLASS_SEGMENTATION_SENSOR")

if self.shell_args.dataset == "suncg":
data_path = "data/datasets/pointnav/suncg/v1/{split}/{split}.json.gz"
elif self.shell_args.dataset == "mp3d":
if self.shell_args.dataset == "mp3d":
data_path = "data/datasets/pointnav/mp3d/v1/{split}/{split}.json.gz"
elif self.shell_args.dataset == "gibson":
data_path = "data/datasets/pointnav/gibson/v1/{split}/{split}.json.gz"
Expand Down Expand Up @@ -155,19 +153,13 @@ def setup(self, create_decoder):
config.ENVIRONMENT.MAX_EPISODE_STEPS = 250
config.TASK.TOP_DOWN_MAP.DRAW_SOURCE_AND_TARGET = False
self.env_types.append(ExplorationRLEnv)
if self.shell_args.dataset == "suncg":
config.TASK.NUM_EPISODES_BEFORE_JUMP = 5
else:
config.TASK.NUM_EPISODES_BEFORE_JUMP = 5
config.TASK.NUM_EPISODES_BEFORE_JUMP = 5
elif self.shell_args.task == "flee":
config.TASK.COLLISION_REWARD = 0 # -0.1
config.ENVIRONMENT.MAX_EPISODE_STEPS = 250
config.TASK.TOP_DOWN_MAP.DRAW_SOURCE_AND_TARGET = False
self.env_types.append(RunAwayRLEnv)
if self.shell_args.dataset == "suncg":
config.TASK.NUM_EPISODES_BEFORE_JUMP = 5
else:
config.TASK.NUM_EPISODES_BEFORE_JUMP = 5
config.TASK.NUM_EPISODES_BEFORE_JUMP = 5
else:
raise NotImplementedError("Unknown task type")

Expand Down
2 changes: 1 addition & 1 deletion configs/habitat_nav_task_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,5 +34,5 @@ SIMULATOR:
TYPE: HabitatSimSemanticSensor
DATASET:
TYPE: PointNav-v1
DATA_PATH: data/datasets/pointnav/suncg/v1/{split}/{split}.json.gz
DATA_PATH: data/datasets/pointnav/mp3d/v1/{split}/{split}.json.gz

6 changes: 1 addition & 5 deletions dataset_dump/create_pointnav_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
DEBUG = False

DATA_SPLIT = "train"
DATASET = "suncg"
DATASET = "mp3d"
CFG = "configs/habitat_nav_task_config.yaml"

if DATA_SPLIT == "train":
Expand Down Expand Up @@ -48,8 +48,6 @@
# get list of all scenes
if DATASET == "mp3d":
scenes = sorted(glob.glob("data/scene_datasets/mp3d/*"))
elif DATASET == "suncg":
scenes = sorted(glob.glob("data/scene_datasets/suncg/house/*"))
elif DATASET == "gibson":
scenes = sorted(glob.glob("data/scene_datasets/mp3d/*"))
else:
Expand All @@ -75,8 +73,6 @@
for ii, house in enumerate(scenes):
if DATASET == "mp3d":
scene_id = house + os.sep + house + ".glb"
elif DATASET == "suncg":
scene_id = (house + "/house.json",)
elif DATASET == "gibson":
scene_id = house + ".glb"
else:
Expand Down
4 changes: 2 additions & 2 deletions download_dataset.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ wget https://dl.fbaipublicfiles.com/splitnet/splitnet_dataset.tar
echo "Unzipping"
tar -xf splitnet_dataset.tar
mv splitnet_dataset/* .
rm -rf splitnet_dataset
rm -rf splitnet_dataset.tar
rmdir splitnet_dataset
rm splitnet_dataset.tar
echo "Success"
cd ..

4 changes: 2 additions & 2 deletions download_weights.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ wget https://dl.fbaipublicfiles.com/splitnet/splitnet_models.tar
echo "Unzipping"
tar -xf splitnet_models.tar
mv splitnet_models/* .
rm -rf splitnet_models
rm -rf splitnet_models.tar
rmdir splitnet_models
rm splitnet_models.tar
echo "Success"
cd ..

4 changes: 1 addition & 3 deletions eval_splitnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,7 @@


def get_eval_dataset(shell_args, data_subset="val"):
if shell_args.dataset == "suncg":
data_path = "data/datasets/pointnav/suncg/v1/{split}/{split}.json.gz"
elif shell_args.dataset == "mp3d":
if shell_args.dataset == "mp3d":
data_path = "data/datasets/pointnav/mp3d/v1/{split}/{split}.json.gz"
elif shell_args.dataset == "gibson":
data_path = "data/datasets/pointnav/gibson/v1/{split}/{split}.json.gz"
Expand Down
2 changes: 1 addition & 1 deletion eval_splitnet.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# This source code is licensed under the Creative Commons license found in the
# LICENSE file in the root directory of this source tree.

DATASET="suncg"
DATASET="mp3d"
TASK="pointnav"

export GLOG_minloglevel=2
Expand Down
6 changes: 2 additions & 4 deletions flee_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from reinforcement_learning.nav_rl_env import RunAwayRLEnv, ExplorationRLEnv

data_subset = "val"
dataset = "suncg"
dataset = "mp3d"
max_episode_length = 250
RESOLUTION = 10

Expand Down Expand Up @@ -38,9 +38,7 @@ def draw_top_down_map(info, heading, output_size):
return top_down_map


if dataset == "suncg":
data_path = "data/datasets/pointnav/suncg/v1/{split}/{split}.json.gz"
elif dataset == "mp3d":
if dataset == "mp3d":
data_path = "data/datasets/pointnav/mp3d/v1/{split}/{split}.json.gz"
elif dataset == "gibson":
data_path = "data/datasets/pointnav/gibson/v1/{split}/{split}.json.gz"
Expand Down
10 changes: 5 additions & 5 deletions supervised_learning/splitnet_pretrain.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,12 +277,12 @@ def main():
)

sensors = ["RGB_SENSOR", "DEPTH_SENSOR"] + (["SEMANTIC_SENSOR"] if USE_SEMANTIC else [])
if args.dataset == "suncg":
if args.dataset == "mp3d":
data_train = HabitatImageGenerator(
render_gpus,
"suncg",
"mp3d",
args.data_subset,
"data/dumps/suncg/{split}/dataset_one_ep_per_scene.json.gz",
"data/dumps/mp3d/{split}/dataset_one_ep_per_scene.json.gz",
images_before_reset=1000,
sensors=sensors,
transform=train_transforms,
Expand All @@ -293,9 +293,9 @@ def main():

data_test = HabitatImageGenerator(
render_gpus,
"suncg",
"mp3d",
"val",
"data/dumps/suncg/{split}/dataset_one_ep_per_scene.json.gz",
"data/dumps/mp3d/{split}/dataset_one_ep_per_scene.json.gz",
images_before_reset=1000,
sensors=sensors,
)
Expand Down

0 comments on commit 30d76d9

Please sign in to comment.