Skip to content

Commit

Permalink
Import sample datasets from MRPT project
Browse files Browse the repository at this point in the history
  • Loading branch information
jlblancoc committed Oct 1, 2015
1 parent ca10479 commit 5ef517d
Show file tree
Hide file tree
Showing 25 changed files with 975 additions and 42 deletions.
5 changes: 5 additions & 0 deletions datasets/README.txt
@@ -0,0 +1,5 @@
To generate these datasets, compile RWT [1] and invoke:

rwt-dataset-simulator <FILENAME.cfg>

[1] https://github.com/jlblancoc/recursive-world-toolkit
64 changes: 64 additions & 0 deletions datasets/tutorials_dataset-cartesian-3d.cfg
@@ -0,0 +1,64 @@
// ---------------------------------------------------------------
// An example configuration file for rwt-dataset-simulator
//
// Author: Jose Luis Blanco Claraco, Oct 26 2011.
// ---------------------------------------------------------------

// World definition
// -------------------------------
[world]
// "input": RWL file to load and compile
// Can be an absolute path or a relative path wrt this configuration file
// (this applies to all other file names in this config file).
input = world-rooms-3d.rwl

// In case of having random elements in the map, set the seed to -1 to
// initialize with clock, >=0 for any fixed pseudorandom sequence.
//random_seed = -1

// Path to simulate
// -------------------------------
[path]
// The path is a sequence of 3D way-points, and may come from different sources.
source_node_path_IDs= 0 18


// Maximum length of each step:
max_step_lin = 0.30 // meters

// Show robot moving thru the path as we simulate? (Much slower
// but cool to debug)
show_live_3D = true
show_live_3D_sleep_ms = 5 // Slow down (in milliseconds) while rendering


// ----------------------------------------------------------------------
// Sensors to simulate
// See: http://code.google.com/p/recursive-world-toolkit/wiki/Sensors
// ----------------------------------------------------------------------
[sensor]
// Type of sensor to simulate:
type = cartesian_sensor

minRange = 0 // meters
maxRange = 3.0 // meters
fov_h = 140 // Horizontal Field of View (deg)
fov_v = 100 // Vertical Field of View (deg)
xyz_noise_std = 0 // One sigma of Gaussian noise (meters)
check_min_features_per_frame = 0 // Raises an error if there are less than these feats in one frame.

sensor_pose_on_robot = [0 0 0 0 0 0] // (x,y,z,yaw,pitch,roll), angles in degrees
//sensor_pose_on_robot = [0.1 0.2 0.3 10 20 30] // (x,y,z,yaw,pitch,roll), angles in degrees


// -------------------------------
// Output Format
// -------------------------------
[dataset-format]
// Path and prefix of all output files (*_SENSOR.txt, *_GT_PATH.txt, etc.)
output_files_prefix = dataset_tutorials_cartesian_3d
//output_files_prefix = dataset_tutorials_cartesian_3d_displ

//observations_as_c_structs = true // Useful for copying the dataset into tutorial code as examples


62 changes: 62 additions & 0 deletions datasets/tutorials_dataset-cartesian.cfg
@@ -0,0 +1,62 @@
// ---------------------------------------------------------------
// An example configuration file for rwt-dataset-simulator
//
// Author: Jose Luis Blanco Claraco, Oct 26 2011.
// ---------------------------------------------------------------

// World definition
// -------------------------------
[world]
// "input": RWL file to load and compile
// Can be an absolute path or a relative path wrt this configuration file
// (this applies to all other file names in this config file).
input = world-rooms-2d.rwl

// In case of having random elements in the map, set the seed to -1 to
// initialize with clock, >=0 for any fixed pseudorandom sequence.
//random_seed = -1

// Path to simulate
// -------------------------------
[path]
// The path is a sequence of 3D way-points, and may come from different sources.
source_node_path_IDs= 0 18


// Maximum length of each step:
max_step_lin = 0.30 // meters

// Show robot moving thru the path as we simulate? (Much slower
// but cool to debug)
show_live_3D = true
show_live_3D_sleep_ms = 5 // Slow down (in milliseconds) while rendering


// ----------------------------------------------------------------------
// Sensors to simulate
// See: http://code.google.com/p/recursive-world-toolkit/wiki/Sensors
// ----------------------------------------------------------------------
[sensor]
// Type of sensor to simulate:
type = cartesian_sensor

minRange = 0 // meters
maxRange = 5.0 // meters
fov_h = 180 // Horizontal Field of View (deg)
fov_v = 140 // Vertical Field of View (deg)
xyz_noise_std = 0 // One sigma of Gaussian noise (meters)
check_min_features_per_frame = 0 // Raises an error if there are less than these feats in one frame.

//sensor_pose_on_robot (TO DO): Now it's fixed to (yaw,pitch,roll)=(0,0,0)
// -------------------------------
// Output Format
// -------------------------------
[dataset-format]
// Path and prefix of all output files (*_SENSOR.txt, *_GT_PATH.txt, etc.)
output_files_prefix = dataset_tutorials_cartesian_2d
//observations_as_c_structs = true // Useful for copying the dataset into tutorial code as examples
94 changes: 94 additions & 0 deletions datasets/tutorials_dataset-monocular.cfg
@@ -0,0 +1,94 @@
// ---------------------------------------------------------------
// An example configuration file for rwt-dataset-simulator
//
// Author: Jose Luis Blanco Claraco, Oct 26 2011.
// ---------------------------------------------------------------

// World definition
// -------------------------------
[world]
// "input": RWL file to load and compile
// Can be an absolute path or a relative path wrt this configuration file
// (this applies to all other file names in this config file).
input = world-rooms-3d.rwl

// In case of having random elements in the map, set the seed to -1 to
// initialize with clock, >=0 for any fixed pseudorandom sequence.
//random_seed = -1

// Path to simulate
// -------------------------------
[path]
// The path is a sequence of 3D way-points, and may come from different sources.
// Only set one of the following variables:
//
// 1) A plain text file with "X Y Z" lines:
// source_3D_text_file = <FILE.txt>
//
// 2) A sequence of node IDs, corresponding to the 0-based indices of the RWL
// program "NODE" primitives:
// source_node_IDs = ID_1 ID_2 ID_3 .... ID_N
//
// 3) A sequence of node IDs, loaded from a text file (one ID per line):
// source_node_IDs_text_file = <FILE.txt>
//
// 4) A "smart" automatic generator of paths from the RWL nodes.
// [TO DO]
//

//source_3D_text_file = dataset-demo1_waypoints.txt
source_node_IDs = 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18

// Maximum length of each step:
max_step_lin = 0.30 // meters

// Show robot moving thru the path as we simulate? (Much slower
// but cool to debug)
show_live_3D = true
show_live_3D_sleep_ms = 5 // Slow down (in milliseconds) while rendering


// ----------------------------------------------------------------------
// Sensors to simulate
// See: http://code.google.com/p/recursive-world-toolkit/wiki/Sensors
// ----------------------------------------------------------------------
[sensor]
// Type of sensor to simulate. Can be:
// * "camera":
// * "range-only":
//
type = camera

maxRange = 5.0 // meters

// Camera parameters:
resolution = [800 600]
cx = 400
cy = 300
fx = 200
fy = 200
dist = [0 0 0 0 0] // [K1 K2 T1 T2 K3]
focal_length = 0.010 // This param is ignored

check_min_features_per_frame = 4

sensor_pose_on_robot = [0 0 0 -90 0 -90] // (x,y,z,yaw,pitch,roll), angles in degrees
//sensor_pose_on_robot = [0 0 0 0 0 0] // (x,y,z,yaw,pitch,roll), angles in degrees

// Odometry noise parameters:
// ...

// For the sensor & odometry noises: set the seed to -1 to initialize with
// clock, >=0 for any fixed pseudorandom sequence.
//random_seed = -1


// -------------------------------
// Output Format
// -------------------------------
[dataset-format]
// Path and prefix of all output files (*_SENSOR.txt, *_GT_PATH.txt, etc.)
output_files_prefix = OUT_dataset_tutorials_mono

//observations_as_c_structs = true // Useful for copying the dataset into tutorial code as examples

64 changes: 64 additions & 0 deletions datasets/tutorials_dataset-range-bearing-2d.cfg
@@ -0,0 +1,64 @@
// ---------------------------------------------------------------
// An example configuration file for rwt-dataset-simulator
//
// Author: Jose Luis Blanco Claraco, Oct 26 2011.
// ---------------------------------------------------------------

// World definition
// -------------------------------
[world]
// "input": RWL file to load and compile
// Can be an absolute path or a relative path wrt this configuration file
// (this applies to all other file names in this config file).
input = world-rooms-2d.rwl

// In case of having random elements in the map, set the seed to -1 to
// initialize with clock, >=0 for any fixed pseudorandom sequence.
//random_seed = -1

// Path to simulate
// -------------------------------
[path]
// The path is a sequence of 3D way-points, and may come from different sources.
source_node_path_IDs= 0 18


// Maximum length of each step:
max_step_lin = 0.80 // meters
max_step_ang = 30 // degrees

// Show robot moving thru the path as we simulate? (Much slower
// but cool to debug)
show_live_3D = true
show_live_3D_sleep_ms = 5 // Slow down (in milliseconds) while rendering


// ----------------------------------------------------------------------
// Sensors to simulate
// See: http://code.google.com/p/recursive-world-toolkit/wiki/Sensors
// ----------------------------------------------------------------------
[sensor]
// Type of sensor to simulate:
type = range_bearing

minRange = 0 // meters
maxRange = 4.0 // meters
fov_h = 100 // Horizontal Field of View (deg)
fov_v = 140 // Vertical Field of View (deg)
range_noise_std = 0 // One sigma of Gaussian noise (meters)
yaw_noise_std = 0 // One sigma of Gaussian noise (deg)
pitch_noise_std = 0 // One sigma of Gaussian noise (deg)
check_min_features_per_frame = 0 // Raises an error if there are less than these feats in one frame.

//sensor_pose_on_robot (TO DO): Now it's fixed to (yaw,pitch,roll)=(0,0,0)
// -------------------------------
// Output Format
// -------------------------------
[dataset-format]
// Path and prefix of all output files (*_SENSOR.txt, *_GT_PATH.txt, etc.)
output_files_prefix = dataset_tutorials_range_bearing_2d
//observations_as_c_structs = true // Useful for copying the dataset into tutorial code as examples
63 changes: 63 additions & 0 deletions datasets/tutorials_dataset-range-bearing-3d.cfg
@@ -0,0 +1,63 @@
// ---------------------------------------------------------------
// An example configuration file for rwt-dataset-simulator
//
// Author: Jose Luis Blanco Claraco, Oct 26 2011.
// ---------------------------------------------------------------

// World definition
// -------------------------------
[world]
// "input": RWL file to load and compile
// Can be an absolute path or a relative path wrt this configuration file
// (this applies to all other file names in this config file).
input = world-rooms-3d.rwl

// In case of having random elements in the map, set the seed to -1 to
// initialize with clock, >=0 for any fixed pseudorandom sequence.
//random_seed = -1

// Path to simulate
// -------------------------------
[path]
// The path is a sequence of 3D way-points, and may come from different sources.
source_node_path_IDs= 0 18


// Maximum length of each step:
max_step_lin = 0.30 // meters

// Show robot moving thru the path as we simulate? (Much slower
// but cool to debug)
show_live_3D = true
show_live_3D_sleep_ms = 5 // Slow down (in milliseconds) while rendering


// ----------------------------------------------------------------------
// Sensors to simulate
// See: http://code.google.com/p/recursive-world-toolkit/wiki/Sensors
// ----------------------------------------------------------------------
[sensor]
// Type of sensor to simulate:
type = range_bearing

minRange = 0 // meters
maxRange = 4.0 // meters
fov_h = 100 // Horizontal Field of View (deg)
fov_v = 140 // Vertical Field of View (deg)
range_noise_std = 0 // One sigma of Gaussian noise (meters)
yaw_noise_std = 0 // One sigma of Gaussian noise (deg)
pitch_noise_std = 0 // One sigma of Gaussian noise (deg)
check_min_features_per_frame = 0 // Raises an error if there are less than these feats in one frame.

//sensor_pose_on_robot (TO DO): Now it's fixed to (yaw,pitch,roll)=(0,0,0)
// -------------------------------
// Output Format
// -------------------------------
[dataset-format]
// Path and prefix of all output files (*_SENSOR.txt, *_GT_PATH.txt, etc.)
output_files_prefix = dataset_tutorials_range_bearing
//observations_as_c_structs = true // Useful for copying the dataset into tutorial code as examples

0 comments on commit 5ef517d

Please sign in to comment.