diff --git a/cpp/open3d/data/Dataset.h b/cpp/open3d/data/Dataset.h index 97b7ea9fd8f..417e3515dce 100644 --- a/cpp/open3d/data/Dataset.h +++ b/cpp/open3d/data/Dataset.h @@ -723,26 +723,26 @@ class PaintedPlasterTexture : public DownloadDataset { /// cloud, rgb sequence, clean depth sequence, noisy depth sequence, oni /// sequence, and ground-truth camera trajectory. /// -/// RedwoodIndoorLivingRoom1 -/// ├── colors -/// │ ├── 00000.jpg -/// │ ├── 00001.jpg -/// │ ├── ... -/// │ └── 02869.jpg -/// ├── depth -/// │ ├── 00000.png -/// │ ├── 00001.png -/// │ ├── ... -/// │ └── 02869.png -/// ├── depth_noisy -/// │ ├── 00000.png -/// │ ├── 00001.png -/// │ ├── ... -/// │ └── 02869.png -/// ├── dist-model.txt -/// ├── livingroom1.oni -/// ├── livingroom1-traj.txt -/// └── livingroom.ply +/// RedwoodIndoorLivingRoom1 +/// ├── colors +/// │ ├── 00000.jpg +/// │ ├── 00001.jpg +/// │ ├── ... +/// │ └── 02869.jpg +/// ├── depth +/// │ ├── 00000.png +/// │ ├── 00001.png +/// │ ├── ... +/// │ └── 02869.png +/// ├── depth_noisy +/// │ ├── 00000.png +/// │ ├── 00001.png +/// │ ├── ... +/// │ └── 02869.png +/// ├── dist-model.txt +/// ├── livingroom1.oni +/// ├── livingroom1-traj.txt +/// └── livingroom.ply class RedwoodIndoorLivingRoom1 : public DownloadDataset { public: RedwoodIndoorLivingRoom1(const std::string& data_root = ""); @@ -775,30 +775,30 @@ class RedwoodIndoorLivingRoom1 : public DownloadDataset { }; /// \class RedwoodIndoorLivingRoom2 (Augmented ICL-NUIM Dataset) -/// \brief Data class for `RedwoodIndoorLivingRoom1`, containing dense point +/// \brief Data class for `RedwoodIndoorLivingRoom2`, containing dense point /// cloud, rgb sequence, clean depth sequence, noisy depth sequence, oni /// sequence, and ground-truth camera trajectory. /// -/// RedwoodIndoorLivingRoom2 -/// ├── colors -/// │ ├── 00000.jpg -/// │ ├── 00001.jpg -/// │ ├── ... -/// │ └── 02349.jpg -/// ├── depth -/// │ ├── 00000.png -/// │ ├── 00001.png -/// │ ├── ... -/// │ └── 02349.png -/// ├── depth_noisy -/// │ ├── 00000.png -/// │ ├── 00001.png -/// │ ├── ... -/// │ └── 02349.png -/// ├── dist-model.txt -/// ├── livingroom2.oni -/// ├── livingroom2-traj.txt -/// └── livingroom.ply +/// RedwoodIndoorLivingRoom2 +/// ├── colors +/// │ ├── 00000.jpg +/// │ ├── 00001.jpg +/// │ ├── ... +/// │ └── 02349.jpg +/// ├── depth +/// │ ├── 00000.png +/// │ ├── 00001.png +/// │ ├── ... +/// │ └── 02349.png +/// ├── depth_noisy +/// │ ├── 00000.png +/// │ ├── 00001.png +/// │ ├── ... +/// │ └── 02349.png +/// ├── dist-model.txt +/// ├── livingroom2.oni +/// ├── livingroom2-traj.txt +/// └── livingroom.ply class RedwoodIndoorLivingRoom2 : public DownloadDataset { public: RedwoodIndoorLivingRoom2(const std::string& data_root = ""); @@ -831,30 +831,30 @@ class RedwoodIndoorLivingRoom2 : public DownloadDataset { }; /// \class RedwoodIndoorOffice1 (Augmented ICL-NUIM Dataset) -/// \brief Data class for `RedwoodIndoorLivingRoom1`, containing dense point +/// \brief Data class for `RedwoodIndoorOffice1`, containing dense point /// cloud, rgb sequence, clean depth sequence, noisy depth sequence, oni /// sequence, and ground-truth camera trajectory. /// -/// RedwoodIndoorOffice1 -/// ├── colors -/// │ ├── 00000.jpg -/// │ ├── 00001.jpg -/// │ ├── ... -/// │ └── 02689.jpg -/// ├── depth -/// │ ├── 00000.png -/// │ ├── 00001.png -/// │ ├── ... -/// │ └── 02689.png -/// ├── depth_noisy -/// │ ├── 00000.png -/// │ ├── 00001.png -/// │ ├── ... -/// │ └── 02689.png -/// ├── dist-model.txt -/// ├── office1.oni -/// ├── office1-traj.txt -/// └── office.ply +/// RedwoodIndoorOffice1 +/// ├── colors +/// │ ├── 00000.jpg +/// │ ├── 00001.jpg +/// │ ├── ... +/// │ └── 02689.jpg +/// ├── depth +/// │ ├── 00000.png +/// │ ├── 00001.png +/// │ ├── ... +/// │ └── 02689.png +/// ├── depth_noisy +/// │ ├── 00000.png +/// │ ├── 00001.png +/// │ ├── ... +/// │ └── 02689.png +/// ├── dist-model.txt +/// ├── office1.oni +/// ├── office1-traj.txt +/// └── office.ply class RedwoodIndoorOffice1 : public DownloadDataset { public: RedwoodIndoorOffice1(const std::string& data_root = ""); @@ -887,30 +887,30 @@ class RedwoodIndoorOffice1 : public DownloadDataset { }; /// \class RedwoodIndoorOffice2 (Augmented ICL-NUIM Dataset) -/// \brief Data class for `RedwoodIndoorLivingRoom1`, containing dense point +/// \brief Data class for `RedwoodIndoorOffice2`, containing dense point /// cloud, rgb sequence, clean depth sequence, noisy depth sequence, oni /// sequence, and ground-truth camera trajectory. /// -/// RedwoodIndoorOffice2 -/// ├── colors -/// │ ├── 00000.jpg -/// │ ├── 00001.jpg -/// │ ├── ... -/// │ └── 02537.jpg -/// ├── depth -/// │ ├── 00000.png -/// │ ├── 00001.png -/// │ ├── ... -/// │ └── 02537.png -/// ├── depth_noisy -/// │ ├── 00000.png -/// │ ├── 00001.png -/// │ ├── ... -/// │ └── 02537.png -/// ├── dist-model.txt -/// ├── office2.oni -/// ├── office2-traj.txt -/// └── office.ply +/// RedwoodIndoorOffice2 +/// ├── colors +/// │ ├── 00000.jpg +/// │ ├── 00001.jpg +/// │ ├── ... +/// │ └── 02537.jpg +/// ├── depth +/// │ ├── 00000.png +/// │ ├── 00001.png +/// │ ├── ... +/// │ └── 02537.png +/// ├── depth_noisy +/// │ ├── 00000.png +/// │ ├── 00001.png +/// │ ├── ... +/// │ └── 02537.png +/// ├── dist-model.txt +/// ├── office2.oni +/// ├── office2-traj.txt +/// └── office.ply class RedwoodIndoorOffice2 : public DownloadDataset { public: RedwoodIndoorOffice2(const std::string& data_root = ""); diff --git a/cpp/pybind/data/dataset.cpp b/cpp/pybind/data/dataset.cpp index 9cfcca0293c..b86b40c5464 100644 --- a/cpp/pybind/data/dataset.cpp +++ b/cpp/pybind/data/dataset.cpp @@ -974,28 +974,28 @@ void pybind_redwood_indoor_living_room1(py::module& m) { R"doc(RedwoodIndoorLivingRoom1 (Augmented ICL-NUIM Dataset) Data class for `RedwoodIndoorLivingRoom1`, containing dense point cloud, rgb sequence, clean depth sequence, noisy depth sequence, oni -sequence, and ground-truth camera trajectory. +sequence, and ground-truth camera trajectory. :: -RedwoodIndoorLivingRoom1 -|-- colors -| |-- 00000.jpg -| |-- 00001.jpg -| |-- ... -| '-- 02869.jpg -|-- depth -| |-- 00000.png -| |-- 00001.png -| |-- ... -| '-- 02869.png -|-- depth_noisy -| |-- 00000.png -| |-- 00001.png -| |-- ... -| '-- 02869.png -|-- dist-model.txt -|-- livingroom1.oni -|-- livingroom1-traj.txt -'-- livingroom.ply + RedwoodIndoorLivingRoom1 + |-- colors + | |-- 00000.jpg + | |-- 00001.jpg + | |-- ... + | '-- 02869.jpg + |-- depth + | |-- 00000.png + | |-- 00001.png + | |-- ... + | '-- 02869.png + |-- depth_noisy + | |-- 00000.png + | |-- 00001.png + | |-- ... + | '-- 02869.png + |-- dist-model.txt + |-- livingroom1.oni + |-- livingroom1-traj.txt + '-- livingroom.ply )doc"); dataset.def(py::init(), "data_root"_a = ""); dataset.def_property_readonly("point_cloud_path", @@ -1029,28 +1029,28 @@ void pybind_redwood_indoor_living_room2(py::module& m) { R"doc(RedwoodIndoorLivingRoom2 (Augmented ICL-NUIM Dataset) Data class for `RedwoodIndoorLivingRoom2`, containing dense point cloud, rgb sequence, clean depth sequence, noisy depth sequence, oni -sequence, and ground-truth camera trajectory. +sequence, and ground-truth camera trajectory. :: -RedwoodIndoorLivingRoom2 -|-- colors -| |-- 00000.jpg -| |-- 00001.jpg -| |-- ... -| '-- 02349.jpg -|-- depth -| |-- 00000.png -| |-- 00001.png -| |-- ... -| '-- 02349.png -|-- depth_noisy -| |-- 00000.png -| |-- 00001.png -| |-- ... -| '-- 02349.png -|-- dist-model.txt -|-- livingroom2.oni -|-- livingroom2-traj.txt -'-- livingroom.ply + RedwoodIndoorLivingRoom2 + |-- colors + | |-- 00000.jpg + | |-- 00001.jpg + | |-- ... + | '-- 02349.jpg + |-- depth + | |-- 00000.png + | |-- 00001.png + | |-- ... + | '-- 02349.png + |-- depth_noisy + | |-- 00000.png + | |-- 00001.png + | |-- ... + | '-- 02349.png + |-- dist-model.txt + |-- livingroom2.oni + |-- livingroom2-traj.txt + '-- livingroom.ply )doc"); dataset.def(py::init(), "data_root"_a = ""); dataset.def_property_readonly("point_cloud_path", @@ -1083,28 +1083,28 @@ void pybind_redwood_indoor_office1(py::module& m) { R"doc(RedwoodIndoorOffice1 (Augmented ICL-NUIM Dataset) Data class for `RedwoodIndoorOffice1`, containing dense point cloud, rgb sequence, clean depth sequence, noisy depth sequence, oni -sequence, and ground-truth camera trajectory. +sequence, and ground-truth camera trajectory. :: -RedwoodIndoorOffice1 -|-- colors -| |-- 00000.jpg -| |-- 00001.jpg -| |-- ... -| '-- 02689.jpg -|-- depth -| |-- 00000.png -| |-- 00001.png -| |-- ... -| '-- 02689.png -|-- depth_noisy -| |-- 00000.png -| |-- 00001.png -| |-- ... -| '-- 02689.png -|-- dist-model.txt -|-- office1.oni -|-- office1-traj.txt -'-- office.ply + RedwoodIndoorOffice1 + |-- colors + | |-- 00000.jpg + | |-- 00001.jpg + | |-- ... + | '-- 02689.jpg + |-- depth + | |-- 00000.png + | |-- 00001.png + | |-- ... + | '-- 02689.png + |-- depth_noisy + | |-- 00000.png + | |-- 00001.png + | |-- ... + | '-- 02689.png + |-- dist-model.txt + |-- office1.oni + |-- office1-traj.txt + '-- office.ply )doc"); dataset.def(py::init(), "data_root"_a = ""); dataset.def_property_readonly("point_cloud_path", @@ -1136,28 +1136,28 @@ void pybind_redwood_indoor_office2(py::module& m) { R"doc(RedwoodIndoorOffice2 (Augmented ICL-NUIM Dataset) Data class for `RedwoodIndoorOffice2`, containing dense point cloud, rgb sequence, clean depth sequence, noisy depth sequence, oni -sequence, and ground-truth camera trajectory. +sequence, and ground-truth camera trajectory. :: -RedwoodIndoorOffice2 -|-- colors -| |-- 00000.jpg -| |-- 00001.jpg -| |-- ... -| '-- 02537.jpg -|-- depth -| |-- 00000.png -| |-- 00001.png -| |-- ... -| '-- 02537.png -|-- depth_noisy -| |-- 00000.png -| |-- 00001.png -| |-- ... -| '-- 02537.png -|-- dist-model.txt -|-- office2.oni -|-- office2-traj.txt -'-- office.ply + RedwoodIndoorOffice2 + |-- colors + | |-- 00000.jpg + | |-- 00001.jpg + | |-- ... + | '-- 02537.jpg + |-- depth + | |-- 00000.png + | |-- 00001.png + | |-- ... + | '-- 02537.png + |-- depth_noisy + | |-- 00000.png + | |-- 00001.png + | |-- ... + | '-- 02537.png + |-- dist-model.txt + |-- office2.oni + |-- office2-traj.txt + '-- office.ply )doc"); dataset.def(py::init(), "data_root"_a = ""); dataset.def_property_readonly("point_cloud_path", diff --git a/docs/conf.in.py b/docs/conf.in.py index 0954849a09b..39fd31ad673 100644 --- a/docs/conf.in.py +++ b/docs/conf.in.py @@ -27,12 +27,10 @@ # import sys # sys.path.insert(0, os.path.abspath('.')) -import sys import os import re import subprocess -from pathlib import Path -import shutil +import sys def get_git_short_hash(): @@ -120,7 +118,16 @@ def get_git_short_hash(): # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "**.ipynb_checkpoints"] +exclude_patterns = [ + "_build", + "Thumbs.db", + ".DS_Store", + "**.ipynb_checkpoints", + "docker.in.rst", + "getting_started.in.rst", + "jupyter/*/*.ipynb", + "python_api_in/*.rst", +] # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" diff --git a/docs/contribute/contribution_recipes.rst b/docs/contribute/contribution_recipes.rst index b3ff00c4bfb..03102849214 100644 --- a/docs/contribute/contribution_recipes.rst +++ b/docs/contribute/contribution_recipes.rst @@ -264,7 +264,7 @@ Case 4: When adding a Python tutorial .. note:: When you commit a ipynb notebook file make sure to remove the output cells to keep the commit sizes small. - You can use the script ``docs/jupyter/jupyter_strip_output.sh`` for + You can use the script ``docs/jupyter/jupyter_strip_output.py`` for stripping the output cells of all tutorials. Dos diff --git a/docs/cpp_project.rst b/docs/cpp_project.rst index ef931912723..89bfd98b70f 100644 --- a/docs/cpp_project.rst +++ b/docs/cpp_project.rst @@ -10,7 +10,7 @@ We provide two example CMake projects to demonstrate how to use Open3D in your CMake projects. * `Find Pre-Installed Open3D Package in CMake `_ - This option can be used if you'd like Open3D build and install Open3D first, + This option can be used if you'd like to build and install Open3D first, then link your project to Open3D. * `Use Open3D as a CMake External Project `_ This option can be used if you'd like Open3D to build alongside with your diff --git a/docs/getting_started.in.rst b/docs/getting_started.in.rst index e3ff7529fa6..ab448443432 100644 --- a/docs/getting_started.in.rst +++ b/docs/getting_started.in.rst @@ -100,7 +100,7 @@ install the latest development version directly with pip: .. code-block:: bash - pip install --trusted-host www.open3d.org -f http://www.open3d.org/docs/latest/getting_started.html open3d + pip install -U --trusted-host www.open3d.org -f http://www.open3d.org/docs/latest/getting_started.html open3d .. note:: The development wheels for Linux are named according to PEP600. Please @@ -144,9 +144,7 @@ demonstrate the usage of Open3D Python interface. See ``examples/python`` for all Python examples. .. note:: Open3D's Python tutorial utilizes some external packages: ``numpy``, - ``matplotlib``, ``opencv-python``. OpenCV is only used for reconstruction - system. Please read ``util/install-deps-python.sh`` for installing these - packages. + ``matplotlib``, ``opencv-python``. .. _install_open3d_c++: diff --git a/docs/index.rst b/docs/index.rst index 391c0298e64..ca0da37702d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -26,9 +26,9 @@ Open3D: A Modern Library for 3D Data Processing compilation cpp_project builddocs - open3d_ml - arm docker + arm + open3d_ml .. toctree:: :maxdepth: 2 @@ -46,23 +46,6 @@ Open3D: A Modern Library for 3D Data Processing tutorial/sensor/index tutorial/reference -.. toctree:: - :maxdepth: 1 - :caption: Contribute - - contribute/contribute - contribute/contribution_recipes - contribute/styleguide - -.. toctree:: - :maxdepth: 1 - :caption: C++ API - - cpp_api - -.. - Note: when adding new modules, please also update documented_modules.txt. - .. toctree:: :maxdepth: 1 :caption: Python API @@ -88,3 +71,20 @@ Open3D: A Modern Library for 3D Data Processing python_example/pipelines/index python_example/utility/index python_example/visualization/index + +.. toctree:: + :maxdepth: 1 + :caption: C++ API + + cpp_api + +.. toctree:: + :maxdepth: 1 + :caption: Contribute + + contribute/contribute + contribute/contribution_recipes + contribute/styleguide + +.. + Note: when adding new modules, please also update documented_modules.txt. diff --git a/docs/jupyter/t_pipelines/t_icp_registration.ipynb b/docs/jupyter/t_pipelines/t_icp_registration.ipynb index 44a21f39584..cb22159bc7f 100644 --- a/docs/jupyter/t_pipelines/t_icp_registration.ipynb +++ b/docs/jupyter/t_pipelines/t_icp_registration.ipynb @@ -405,7 +405,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "#### 1. Set Inputs and Parameters" + "### 1. Set Inputs and Parameters" ] }, { @@ -453,7 +453,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "#### 2. Get Registration Result from ICP" + "### 2. Get Registration Result from ICP" ] }, { @@ -602,7 +602,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "#### 1. Set Inputs and Parameters" + "### 1. Set Inputs and Parameters" ] }, { @@ -656,7 +656,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "#### 2. Get Registration Result from Multi-Scale ICP" + "### 2. Get Registration Result from Multi-Scale ICP" ] }, { @@ -884,7 +884,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "#### 1. Set Inputs and Parameters" + "### 1. Set Inputs and Parameters" ] }, { diff --git a/docs/jupyter/visualization/visualization.ipynb b/docs/jupyter/visualization/visualization.ipynb index 63dcf2ed863..9b7030599f6 100644 --- a/docs/jupyter/visualization/visualization.ipynb +++ b/docs/jupyter/visualization/visualization.ipynb @@ -129,7 +129,7 @@ "metadata": {}, "source": [ "## Geometry primitives\n", - "The code below generates a box, a sphere, and a cylinder using `create_box`, `create_sphere`, and `create_cylinder`. The box is painted in red, the sphere is painted in blue, and the cylinder is painted in green. Normals are computed for all meshes to support Phong shading (see [Visualize 3D mesh](mesh.ipynb#visualize-a-3d-mesh) and [Surface normal estimation](mesh.ipynb#surface-normal-estimation)). We can even create a coordinate axis using `create_coordinate_frame`, with its origin point set at (-2, -2, -2)." + "The code below generates a box, a sphere, and a cylinder using `create_box`, `create_sphere`, and `create_cylinder`. The box is painted in red, the sphere is painted in blue, and the cylinder is painted in green. Normals are computed for all meshes to support Phong shading (see [Visualize 3D mesh](../geometry/mesh.ipynb#visualize-a-3d-mesh) and [Surface normal estimation](../geometry/mesh.ipynb#surface-normal-estimation)). We can even create a coordinate axis using `create_coordinate_frame`, with its origin point set at (-2, -2, -2)." ] }, { diff --git a/docs/make_docs.py b/docs/make_docs.py index 817898de3eb..8249f66b29e 100644 --- a/docs/make_docs.py +++ b/docs/make_docs.py @@ -11,20 +11,21 @@ # (3) make.py calls the actual `sphinx-build` import argparse -import subprocess -import sys import importlib -import os import inspect -import shutil +import multiprocessing +import os import re -from pathlib import Path -import nbformat -import nbconvert +import shutil import ssl -import certifi +import subprocess +import sys import urllib.request -import multiprocessing +from pathlib import Path + +import certifi +import nbconvert +import nbformat def _create_or_clear_dir(dir_path): @@ -66,10 +67,10 @@ def __init__(self, output_dir="python_api", input_dir="python_api_in"): self.output_dir = output_dir self.input_dir = input_dir self.module_names = PyAPIDocsBuilder._get_documented_module_names() - print("Generating *.rst Python API docs in directory: %s" % - self.output_dir) def generate_rst(self): + print(f"Generating *.rst Python API docs in directory: " + f"{self.output_dir}") _create_or_clear_dir(self.output_dir) for module_name in self.module_names: @@ -279,8 +280,6 @@ def __init__(self, input_dir, pwd, output_dir="python_example"): sys.path.append(os.path.join(pwd, "..", "python", "tools")) from cli import _get_all_examples_dict self.get_all_examples_dict = _get_all_examples_dict - print("Generating *.rst Python example docs in directory: %s" % - self.output_dir) def _get_examples_dict(self): examples_dict = self.get_all_examples_dict() @@ -305,11 +304,11 @@ def _generate_index(title, output_path): f.write(out_string) @staticmethod - def _add_example_to_docs(example, output_path): + def _add_example_to_docs(example: Path, output_path): shutil.copy(example, output_path) - out_string = (f"{example.stem}.py" - f"\n```````````````````````````````````````\n" - f"\n.. literalinclude:: {example.stem}.py" + out_string = (f"{example.name}" + f"\n{'`' * (len(example.name))}\n" + f"\n.. literalinclude:: {example.name}" f"\n :language: python" f"\n :linenos:" f"\n\n\n") @@ -318,6 +317,8 @@ def _add_example_to_docs(example, output_path): f.write(out_string) def generate_rst(self): + print(f"Generating *.rst Python example docs in directory: " + f"{self.output_dir}") _create_or_clear_dir(self.output_dir) examples_dict = self._get_examples_dict() @@ -482,27 +483,26 @@ def run(self): # Jupyter notebooks os.environ["CI"] = "true" - # Copy and execute notebooks in the tutorial folder + # Copy from jupyter to the tutorial folder. nb_paths = [] - nb_direct_copy = [ - 'draw_plotly.ipynb', - 'hashmap.ipynb', - 'jupyter_visualization.ipynb', - 't_icp_registration.ipynb', - 'tensor.ipynb', - ] + nb_parent_src = Path(self.current_file_dir) / "jupyter" + nb_parent_dst = Path(self.current_file_dir) / "tutorial" example_dirs = [ - "geometry", "t_geometry", "core", "data", "pipelines", - "visualization", "t_pipelines" + name for name in os.listdir(nb_parent_src) + if os.path.isdir(nb_parent_src / name) ] + + print(f"Copying {nb_parent_src / 'open3d_tutorial.py'} " + f"to {nb_parent_dst / 'open3d_tutorial.py'}") + shutil.copy( + nb_parent_src / "open3d_tutorial.py", + nb_parent_dst / "open3d_tutorial.py", + ) + for example_dir in example_dirs: - in_dir = (Path(self.current_file_dir) / "jupyter" / example_dir) - out_dir = Path(self.current_file_dir) / "tutorial" / example_dir + in_dir = nb_parent_src / example_dir + out_dir = nb_parent_dst / example_dir out_dir.mkdir(parents=True, exist_ok=True) - shutil.copy( - in_dir.parent / "open3d_tutorial.py", - out_dir.parent / "open3d_tutorial.py", - ) if self.clean_notebooks: for nb_out_path in out_dir.glob("*.ipynb"): @@ -523,6 +523,15 @@ def run(self): shutil.copytree(in_dir / "images", out_dir / "images") # Execute Jupyter notebooks + # Files that should not be executed. + nb_direct_copy = [ + 'draw_plotly.ipynb', + 'hashmap.ipynb', + 'jupyter_visualization.ipynb', + 't_icp_registration.ipynb', + 'tensor.ipynb', + ] + for nb_path in nb_paths: if nb_path.name in nb_direct_copy: print("[Processing notebook {}, directly copied]".format( @@ -584,7 +593,7 @@ def run(self): action="store_true", default=False, help=("Whether to clean existing notebooks in docs/tutorial. " - "Notebooks are copied from examples/python to docs/tutorial."), + "Notebooks are copied from docs/jupyter to docs/tutorial."), ) parser.add_argument( "--execute_notebooks", diff --git a/docs/tutorial/geometry/index.rst b/docs/tutorial/geometry/index.rst index 2cade36c5ed..ea7292387c5 100644 --- a/docs/tutorial/geometry/index.rst +++ b/docs/tutorial/geometry/index.rst @@ -22,6 +22,7 @@ Geometry iss_keypoint_detector ray_casting distance_queries + uvmaps .. toctree:: :caption: Interface diff --git a/docs/tutorial/geometry/uvmaps.rst b/docs/tutorial/geometry/uvmaps.rst index 7fccce5008f..7d4c7aa7344 100644 --- a/docs/tutorial/geometry/uvmaps.rst +++ b/docs/tutorial/geometry/uvmaps.rst @@ -24,7 +24,8 @@ Quick Reference to default UV Maps for some primitive shapes provided by Open3D The examples below all assume the following code preamble: -.. code_block:: python +.. code-block:: python + import open3d as o3d import open3d.visualization.rendering as rendering @@ -45,7 +46,9 @@ Example Texture Map Box (map uv to each face = false) ************************************ -.. code_block:: python +.. code-block:: python + + box = o3d.geometry.TriangleMesh.create_box(create_uv_map=True) o3d.visualization.draw({'name': 'box', 'geometry': box, 'material': material}) @@ -62,7 +65,9 @@ Box (map uv to each face = false) Box (map uv to each face = true) ************************************** -.. code_block:: python +.. code-block:: python + + box = o3d.geometry.TriangleMesh.create_box(create_uv_map=True, map_texture_to_each_face=True) o3d.visualization.draw({'name': 'box', 'geometry': box, 'material': material}) @@ -80,7 +85,9 @@ Box (map uv to each face = true) Tetrahedral ************* -.. code_block:: python +.. code-block:: python + + tetra = o3d.geometry.TriangleMesh.create_tetrahedron(create_uv_map=True) o3d.visualization.draw({'name': 'tetrahedron', 'geometry': tetra, 'material': material}) @@ -98,7 +105,9 @@ Tetrahedral Octahedral *************** -.. code_block:: python +.. code-block:: python + + octo = o3d.geometry.TriangleMesh.create_octahedron(create_uv_map=True) o3d.visualization.draw({'name': 'octahedron', 'geometry': octo, 'material': material}) @@ -115,7 +124,9 @@ Octahedral Icosahedron ************** -.. code_block:: python +.. code-block:: python + + ico = o3d.geometry.TriangleMesh.create_icosahedron(create_uv_map=True) o3d.visualization.draw({'name': 'icosahedron', 'geometry': ico, 'material': material}) @@ -132,7 +143,9 @@ Icosahedron Cylinder ************* -.. code_block:: python +.. code-block:: python + + cylinder = o3d.geometry.TriangleMesh.create_cylinder(create_uv_map=True) o3d.visualization.draw({'name': 'cylinder', 'geometry': cylinder, 'material': material}) @@ -149,7 +162,9 @@ Cylinder Cone ******* -.. code_block:: python +.. code-block:: python + + cone = o3d.geometry.TriangleMesh.create_cone(create_uv_map=True) o3d.visualization.draw({'name': 'cone', 'geometry': cone, 'material': material}) @@ -166,7 +181,9 @@ Cone Sphere ******* -.. code_block:: python +.. code-block:: python + + sphere = o3d.geometry.TriangleMesh.create_sphere(create_uv_map=True) o3d.visualization.draw({'name': 'sphere', 'geometry': sphere, 'material': material}) diff --git a/docs/tutorial/t_geometry/index.rst b/docs/tutorial/t_geometry/index.rst index e1f1e9a4bc8..7c24fb565cb 100644 --- a/docs/tutorial/t_geometry/index.rst +++ b/docs/tutorial/t_geometry/index.rst @@ -1,5 +1,5 @@ Geometry (Tensor) -======== +================= .. toctree:: :caption: Basics diff --git a/docs/tutorial/t_pipelines/index.rst b/docs/tutorial/t_pipelines/index.rst index b687d2ce428..b10fa1b0e09 100644 --- a/docs/tutorial/t_pipelines/index.rst +++ b/docs/tutorial/t_pipelines/index.rst @@ -1,7 +1,7 @@ .. _t_pipelines: Pipelines (Tensor) -========= +================== .. toctree:: diff --git a/docs/tutorial/t_reconstruction_system/index.rst b/docs/tutorial/t_reconstruction_system/index.rst index 31322f613c0..e00d323e410 100644 --- a/docs/tutorial/t_reconstruction_system/index.rst +++ b/docs/tutorial/t_reconstruction_system/index.rst @@ -74,7 +74,7 @@ Example config file for online reconstruction system has been provided in ``examples/python/t_reconstruction_system/default_config.yml``, which looks like the following: .. literalinclude:: ../../../examples/python/t_reconstruction_system/default_config.yml - :language: yml + :language: yaml :lineno-start: 1 :lines: 1- :linenos: @@ -87,7 +87,7 @@ images using the Intel RealSense camera. For more details, please see :ref:`capture_your_own_dataset`. Getting started with online reconstruction system -`````````````````````````````````````` +````````````````````````````````````````````````` .. toctree::