From 45af02a19076426a59143e73254f9b19d5479a14 Mon Sep 17 00:00:00 2001 From: Abhijeet Kasurde Date: Fri, 23 Apr 2021 00:38:52 +0530 Subject: [PATCH 01/16] Add description for COLLECTIONS_SCAN_SYS_PATH (#74351) Fixes: #74275 Signed-off-by: Abhijeet Kasurde (cherry picked from commit 567361b124e79873537704bed7625141c33f35a8) --- lib/ansible/config/base.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/ansible/config/base.yml b/lib/ansible/config/base.yml index 2a0019d7ed7d15..070587942d172f 100644 --- a/lib/ansible/config/base.yml +++ b/lib/ansible/config/base.yml @@ -175,7 +175,8 @@ CACHE_PLUGIN_TIMEOUT: type: integer yaml: {key: facts.cache.timeout} COLLECTIONS_SCAN_SYS_PATH: - name: enable/disable scanning sys.path for installed collections + name: Scan PYTHONPATH for installed collections + description: A boolean to enable or disable scanning the sys.path for installed collections default: true type: boolean env: From f569fb6d3df27d3e61e75a25899d9ff7a9d0ede5 Mon Sep 17 00:00:00 2001 From: Sandra McCann Date: Thu, 22 Apr 2021 14:54:27 -0400 Subject: [PATCH 02/16] lighten navigation background to make section labels easier to read for core docs (#74356) * make section labels for /ansible-core/ docs easier to read, with black text and lighter gray background (cherry picked from commit 6119fb0a9a796887d7d8cc34b4501f69251bf4d6) --- docs/docsite/_static/core.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docsite/_static/core.css b/docs/docsite/_static/core.css index 8fde5e01ad4e51..5a7b0a1717c54d 100644 --- a/docs/docsite/_static/core.css +++ b/docs/docsite/_static/core.css @@ -27,4 +27,4 @@ table.documentation-table .value-name { font-weight: bold; display: inline; } -*/table.documentation-table .value-type{font-size:x-small;color:purple;display:inline}table.documentation-table .value-separator{font-size:x-small;display:inline}table.documentation-table .value-required{font-size:x-small;color:red;display:inline}.value-added-in{font-size:x-small;font-style:italic;color:green;display:inline}/*! Ansible-specific CSS pulled out of rtd theme for 2.9 */.DocSiteProduct-header{flex:1;-webkit-flex:1;padding:10px 20px 20px;display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;align-items:center;-webkit-align-items:center;justify-content:flex-start;-webkit-justify-content:flex-start;margin-left:20px;margin-right:20px;text-decoration:none;font-weight:400;font-family:"Open Sans",sans-serif}.DocSiteProduct-header:active,.DocSiteProduct-header:focus,.DocSiteProduct-header:visited{color:#fff}.DocSiteProduct-header--core{font-size:25px;background-color:#161b1f;border:2px solid #161b1f;border-top-left-radius:4px;border-top-right-radius:4px;color:#fff;padding-left:2px;margin-left:2px}.DocSiteProduct-headerAlign{width:100%}.DocSiteProduct-logo{width:60px;height:60px;margin-bottom:-9px}.DocSiteProduct-logoText{margin-top:6px;font-size:25px;text-align:left}.DocSiteProduct-CheckVersionPara{margin-left:2px;padding-bottom:4px;margin-right:2px;margin-bottom:10px}/*! Ansible color scheme */.wy-nav-top,.wy-side-nav-search{background-color:#161b1f}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#161b1f}.wy-menu-vertical a{padding:0}.wy-menu-vertical a.reference.internal{padding:.4045em 1.618em}/*! Override sphinx rtd theme max-with of 800px */.wy-nav-content{max-width:100%}/*! Override sphinx_rtd_theme - keeps left-nav from overwriting Documentation title */.wy-nav-side{top:45px}/*! Ansible - changed absolute to relative to remove extraneous side scroll bar */.wy-grid-for-nav{position:relative}/*! Ansible narrow the search box */.wy-side-nav-search input[type=text]{width:90%;padding-left:24px}/*! Ansible - remove so highlight indenting is correct */.rst-content .highlighted{padding:0}.DocSiteBanner{display:flex;display:-webkit-flex;justify-content:center;-webkit-justify-content:center;flex-wrap:wrap;-webkit-flex-wrap:wrap;margin-bottom:25px}.DocSiteBanner-imgWrapper{max-width:100%}td,th{min-width:100px}table{overflow-x:auto;display:block;max-width:100%}.documentation-table td.elbow-placeholder{border-left:1px solid #000;border-top:0;width:30px;min-width:30px}.documentation-table td,.documentation-table th{padding:4px;border-left:1px solid #000;border-top:1px solid #000}.documentation-table{border-right:1px solid #000;border-bottom:1px solid #000}@media print{*{background:0 0!important;color:#000!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}#nav,a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}/*! Don't show links for images, or javascript/internal links */pre,blockquote{border:0 solid #999;page-break-inside:avoid}thead{display:table-header-group}/*! h5bp.com/t */tr,img{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}#google_image_div,.DocSiteBanner{display:none!important}}#sideBanner,.DocSite-globalNav{display:none}.DocSite-sideNav{display:block;margin-bottom:40px}.DocSite-nav{display:none}.ansibleNav{background:#000;padding:0 20px;width:auto;border-bottom:1px solid #444;font-size:14px;z-index:1}.ansibleNav ul{list-style:none;padding-left:0;margin-top:0}.ansibleNav ul li{padding:7px 0;border-bottom:1px solid #444}.ansibleNav ul li:last-child{border:none}.ansibleNav ul li a{color:#fff;text-decoration:none;text-transform:uppercase;padding:6px 0}.ansibleNav ul li a:hover{color:#161b1f;background:0 0}h4{font-size:105%}h5{font-size:90%}h6{font-size:80%}@media screen and (min-width:768px){.DocSite-globalNav{display:block;position:fixed}#sideBanner{display:block}.DocSite-sideNav{display:none}.DocSite-nav{flex:initial;-webkit-flex:initial;display:flex;display:-webkit-flex;flex-direction:row;-webkit-flex-direction:row;justify-content:flex-start;-webkit-justify-content:flex-start;padding:15px;background-color:#000;text-decoration:none;font-family:"Open Sans",sans-serif}.DocSiteNav-logo{width:28px;height:28px;margin-right:8px;margin-top:-6px;position:fixed;z-index:1}.DocSiteNav-title{color:#fff;font-size:20px;position:fixed;margin-left:40px;margin-top:-4px;z-index:1}.ansibleNav{height:45px;width:100%;font-size:13px;padding:0 60px 0 0}.ansibleNav ul{float:right;display:flex;flex-wrap:nowrap;margin-top:13px}.ansibleNav ul li{padding:0;border-bottom:none}.ansibleNav ul li a{color:#fff;text-decoration:none;text-transform:uppercase;padding:8px 13px}h4{font-size:105%}h5{font-size:90%}h6{font-size:80%}}@media screen and (min-width:768px){#sideBanner,.DocSite-globalNav{display:block}.DocSite-sideNav{display:none}.DocSite-nav{flex:initial;-webkit-flex:initial;display:flex;display:-webkit-flex;flex-direction:row;-webkit-flex-direction:row;justify-content:flex-start;-webkit-justify-content:flex-start;padding:15px;background-color:#000;text-decoration:none;font-family:"Open Sans",sans-serif}.DocSiteNav-logo{width:28px;height:28px;margin-right:8px;margin-top:-6px;position:fixed}.DocSiteNav-title{color:#fff;font-size:20px;position:fixed;margin-left:40px;margin-top:-4px}.ansibleNav{height:45px;font-size:13px;padding:0 60px 0 0}.ansibleNav ul{float:right;display:flex;flex-wrap:nowrap;margin-top:13px}.ansibleNav ul li{padding:0;border-bottom:none}.ansibleNav ul li a{color:#fff;text-decoration:none;text-transform:uppercase;padding:8px 13px}h4{font-size:105%}h5{font-size:90%}h6{font-size:80%}}tr:hover .ansibleOptionLink::after{visibility:visible}tr .ansibleOptionLink::after{content:"";font-family:FontAwesome}tr .ansibleOptionLink{visibility:hidden;display:inline-block;font:normal normal normal 14px/1 FontAwesome;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}@media screen and (min-width:767px){section [id]{padding-top:45px;margin-top:-45px}section a[id]{padding-top:0;margin-top:0}} +*/table.documentation-table .value-type{font-size:x-small;color:purple;display:inline}table.documentation-table .value-separator{font-size:x-small;display:inline}table.documentation-table .value-required{font-size:x-small;color:red;display:inline}.value-added-in{font-size:x-small;font-style:italic;color:green;display:inline}/*! Ansible-specific CSS pulled out of rtd theme for 2.9 */.DocSiteProduct-header{flex:1;-webkit-flex:1;padding:10px 20px 20px;display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;align-items:center;-webkit-align-items:center;justify-content:flex-start;-webkit-justify-content:flex-start;margin-left:20px;margin-right:20px;text-decoration:none;font-weight:400;font-family:"Open Sans",sans-serif}.DocSiteProduct-header:active,.DocSiteProduct-header:focus,.DocSiteProduct-header:visited{color:#fff}.DocSiteProduct-header--core{font-size:25px;background-color:#161b1f;border:2px solid #161b1f;border-top-left-radius:4px;border-top-right-radius:4px;color:#fff;padding-left:2px;margin-left:2px}.DocSiteProduct-headerAlign{width:100%}.DocSiteProduct-logo{width:60px;height:60px;margin-bottom:-9px}.DocSiteProduct-logoText{margin-top:6px;font-size:25px;text-align:left}.DocSiteProduct-CheckVersionPara{margin-left:2px;padding-bottom:4px;margin-right:2px;margin-bottom:10px}/*! Ansible color scheme */.wy-nav-top,.wy-side-nav-search{background-color:#161b1f}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#161b1f}.wy-menu-vertical a{padding:0}.wy-menu-vertical a.reference.internal{padding:.4045em 1.618em}/*! Override sphinx rtd theme max-with of 800px */.wy-nav-content{max-width:100%}/*! Override sphinx_rtd_theme - keeps left-nav from overwriting Documentation title */.wy-nav-side{top:45px;background:#999}/*! Ansible - changed absolute to relative to remove extraneous side scroll bar */.wy-grid-for-nav{position:relative}/*! Ansible narrow the search box */.wy-side-nav-search input[type=text]{width:90%;padding-left:24px}/*! Ansible - remove so highlight indenting is correct */.rst-content .highlighted{padding:0}.DocSiteBanner{display:flex;display:-webkit-flex;justify-content:center;-webkit-justify-content:center;flex-wrap:wrap;-webkit-flex-wrap:wrap;margin-bottom:25px}.DocSiteBanner-imgWrapper{max-width:100%}td,th{min-width:100px}table{overflow-x:auto;display:block;max-width:100%}.documentation-table td.elbow-placeholder{border-left:1px solid #000;border-top:0;width:30px;min-width:30px}.documentation-table td,.documentation-table th{padding:4px;border-left:1px solid #000;border-top:1px solid #000}.documentation-table{border-right:1px solid #000;border-bottom:1px solid #000}@media print{*{background:0 0!important;color:#000!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}#nav,a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}/*! Don't show links for images, or javascript/internal links */pre,blockquote{border:0 solid #999;page-break-inside:avoid}thead{display:table-header-group}/*! h5bp.com/t */tr,img{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}#google_image_div,.DocSiteBanner{display:none!important}}#sideBanner,.DocSite-globalNav{display:none}.DocSite-sideNav{display:block;margin-bottom:40px}.DocSite-nav{display:none}.ansibleNav{background:#000;padding:0 20px;width:auto;border-bottom:1px solid #444;font-size:14px;z-index:1}.ansibleNav ul{list-style:none;padding-left:0;margin-top:0}.ansibleNav ul li{padding:7px 0;border-bottom:1px solid #444}.ansibleNav ul li:last-child{border:none}.ansibleNav ul li a{color:#fff;text-decoration:none;text-transform:uppercase;padding:6px 0}.ansibleNav ul li a:hover{color:#161b1f;background:0 0}h4{font-size:105%}h5{font-size:90%}h6{font-size:80%}@media screen and (min-width:768px){.DocSite-globalNav{display:block;position:fixed}#sideBanner{display:block}.DocSite-sideNav{display:none}.DocSite-nav{flex:initial;-webkit-flex:initial;display:flex;display:-webkit-flex;flex-direction:row;-webkit-flex-direction:row;justify-content:flex-start;-webkit-justify-content:flex-start;padding:15px;background-color:#000;text-decoration:none;font-family:"Open Sans",sans-serif}.DocSiteNav-logo{width:28px;height:28px;margin-right:8px;margin-top:-6px;position:fixed;z-index:1}.DocSiteNav-title{color:#fff;font-size:20px;position:fixed;margin-left:40px;margin-top:-4px;z-index:1}.ansibleNav{height:45px;width:100%;font-size:13px;padding:0 60px 0 0}.ansibleNav ul{float:right;display:flex;flex-wrap:nowrap;margin-top:13px}.ansibleNav ul li{padding:0;border-bottom:none}.ansibleNav ul li a{color:#fff;text-decoration:none;text-transform:uppercase;padding:8px 13px}h4{font-size:105%}h5{font-size:90%}h6{font-size:80%}}@media screen and (min-width:768px){#sideBanner,.DocSite-globalNav{display:block}.DocSite-sideNav{display:none}.DocSite-nav{flex:initial;-webkit-flex:initial;display:flex;display:-webkit-flex;flex-direction:row;-webkit-flex-direction:row;justify-content:flex-start;-webkit-justify-content:flex-start;padding:15px;background-color:#000;text-decoration:none;font-family:"Open Sans",sans-serif}.DocSiteNav-logo{width:28px;height:28px;margin-right:8px;margin-top:-6px;position:fixed}.DocSiteNav-title{color:#fff;font-size:20px;position:fixed;margin-left:40px;margin-top:-4px}.ansibleNav{height:45px;font-size:13px;padding:0 60px 0 0}.ansibleNav ul{float:right;display:flex;flex-wrap:nowrap;margin-top:13px}.ansibleNav ul li{padding:0;border-bottom:none}.ansibleNav ul li a{color:#fff;text-decoration:none;text-transform:uppercase;padding:8px 13px}h4{font-size:105%}h5{font-size:90%}h6{font-size:80%}}tr:hover .ansibleOptionLink::after{visibility:visible}tr .ansibleOptionLink::after{content:"";font-family:FontAwesome}tr .ansibleOptionLink{visibility:hidden;display:inline-block;font:normal normal normal 14px/1 FontAwesome;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}@media screen and (min-width:767px){section [id]{padding-top:45px;margin-top:-45px}section a[id]{padding-top:0;margin-top:0}} From 7fb119013b1bacb706ed2af301a4f58e00b7ef7d Mon Sep 17 00:00:00 2001 From: Alex Willmer Date: Thu, 22 Apr 2021 21:40:58 +0100 Subject: [PATCH 03/16] Correct splitext() description, and example (#74377) `splitext()` returns a 2-tuple of strings, and the last element of the return value includes the `.` (cherry picked from commit c295de661c8e9462f63bbf716c1d5db23b2e93e5) --- docs/docsite/rst/user_guide/playbooks_filters.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docsite/rst/user_guide/playbooks_filters.rst b/docs/docsite/rst/user_guide/playbooks_filters.rst index 39672232bb4a08..512c39cded5317 100644 --- a/docs/docsite/rst/user_guide/playbooks_filters.rst +++ b/docs/docsite/rst/user_guide/playbooks_filters.rst @@ -1622,12 +1622,12 @@ To get the root and extension of a path or file name (new in version 2.0):: # with path == 'nginx.conf' the return would be ('nginx', '.conf') {{ path | splitext }} -The ``splitext`` filter returns a string. The individual components can be accessed by using the ``first`` and ``last`` filters:: +The ``splitext`` filter always returns a pair of strings. The individual components can be accessed by using the ``first`` and ``last`` filters:: # with path == 'nginx.conf' the return would be 'nginx' {{ path | splitext | first }} - # with path == 'nginx.conf' the return would be 'conf' + # with path == 'nginx.conf' the return would be '.conf' {{ path | splitext | last }} To join one or more path components:: From f9702866ba393b252f19a1ef6aa4cf9a4ece3627 Mon Sep 17 00:00:00 2001 From: Hublerho <43293510+Hublerho@users.noreply.github.com> Date: Fri, 23 Apr 2021 19:44:09 +0200 Subject: [PATCH 04/16] Using "~" instead of "+" for concatination (#74364) Changed FAQ examples to conform with the Jinja documentation: If both values on either side of a plus/+ are numbers, they will be added whereas using "~" will convert all operands into strings and then concatenate them. Closes #73799. (cherry picked from commit e6a5245d6088894d56b8e0406f8ffed9a57046c3) --- docs/docsite/rst/reference_appendices/faq.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/docsite/rst/reference_appendices/faq.rst b/docs/docsite/rst/reference_appendices/faq.rst index b0f1e1a9693696..3a18466318e55d 100644 --- a/docs/docsite/rst/reference_appendices/faq.rst +++ b/docs/docsite/rst/reference_appendices/faq.rst @@ -424,11 +424,11 @@ How do I access a variable name programmatically? +++++++++++++++++++++++++++++++++++++++++++++++++ An example may come up where we need to get the ipv4 address of an arbitrary interface, where the interface to be used may be supplied -via a role parameter or other input. Variable names can be built by adding strings together, like so: +via a role parameter or other input. Variable names can be built by adding strings together using "~", like so: .. code-block:: jinja - {{ hostvars[inventory_hostname]['ansible_' + which_interface]['ipv4']['address'] }} + {{ hostvars[inventory_hostname]['ansible_' ~ which_interface]['ipv4']['address'] }} The trick about going through hostvars is necessary because it's a dictionary of the entire namespace of variables. ``inventory_hostname`` is a magic variable that indicates the current host you are looping over in the host loop. @@ -437,7 +437,7 @@ In the example above, if your interface names have dashes, you must replace them .. code-block:: jinja - {{ hostvars[inventory_hostname]['ansible_' + which_interface | replace('_', '-') ]['ipv4']['address'] }} + {{ hostvars[inventory_hostname]['ansible_' ~ which_interface | replace('_', '-') ]['ipv4']['address'] }} Also see dynamic_variables_. @@ -681,13 +681,13 @@ The above DOES NOT WORK as you expect, if you need to use a dynamic variable use .. code-block:: jinja - {{ hostvars[inventory_hostname]['somevar_' + other_var] }} + {{ hostvars[inventory_hostname]['somevar_' ~ other_var] }} For 'non host vars' you can use the :ref:`vars lookup` plugin: .. code-block:: jinja - {{ lookup('vars', 'somevar_' + other_var) }} + {{ lookup('vars', 'somevar_' ~ other_var) }} .. _why_no_wheel: From 94c8f8d7d28e417c5e8ff34f2ecdb013e067b159 Mon Sep 17 00:00:00 2001 From: Alicia Cozine <879121+acozine@users.noreply.github.com> Date: Fri, 23 Apr 2021 16:03:46 -0500 Subject: [PATCH 05/16] Docs - Split Developing collections page, add info on optional module_utils (#74105) * (cherry picked from commit c90922ee3670e049c9afbbaeaca32f16f0ebbd38) --- .../rst/dev_guide/developing_collections.rst | 830 +----------------- .../developing_collections_changelogs.rst | 80 ++ .../developing_collections_contributing.rst | 38 + .../developing_collections_creating.rst | 57 ++ .../developing_collections_distributing.rst | 241 +++++ .../developing_collections_migrating.rst | 136 +++ .../developing_collections_shared.rst | 77 ++ .../developing_collections_structure.rst | 241 +++++ .../developing_collections_testing.rst | 83 ++ 9 files changed, 979 insertions(+), 804 deletions(-) create mode 100644 docs/docsite/rst/dev_guide/developing_collections_changelogs.rst create mode 100644 docs/docsite/rst/dev_guide/developing_collections_contributing.rst create mode 100644 docs/docsite/rst/dev_guide/developing_collections_creating.rst create mode 100644 docs/docsite/rst/dev_guide/developing_collections_distributing.rst create mode 100644 docs/docsite/rst/dev_guide/developing_collections_migrating.rst create mode 100644 docs/docsite/rst/dev_guide/developing_collections_shared.rst create mode 100644 docs/docsite/rst/dev_guide/developing_collections_structure.rst create mode 100644 docs/docsite/rst/dev_guide/developing_collections_testing.rst diff --git a/docs/docsite/rst/dev_guide/developing_collections.rst b/docs/docsite/rst/dev_guide/developing_collections.rst index 8504f7e58f6bc7..b955b3d2ae1072 100644 --- a/docs/docsite/rst/dev_guide/developing_collections.rst +++ b/docs/docsite/rst/dev_guide/developing_collections.rst @@ -1,825 +1,47 @@ - .. _developing_collections: ********************** Developing collections ********************** -Collections are a distribution format for Ansible content. You can package and distribute playbooks, roles, modules, and plugins using collections. - -You can publish any collection to `Ansible Galaxy `_ or to a private Automation Hub instance. You can publish certified collections to the Red Hat Automation Hub, part of the Red Hat Ansible Automation Platform. - -* For details on how to *use* collections see :ref:`collections`. -* For the current development status of Collections and FAQ see `Ansible Collections Overview and FAQ `_. - -.. contents:: - :local: - :depth: 2 - -.. _collection_structure: - -Collection structure -==================== - -Collections follow a simple data structure. None of the directories are required unless you have specific content that belongs in one of them. A collection does require a ``galaxy.yml`` file at the root level of the collection. This file contains all of the metadata that Galaxy and other tools need in order to package, build and publish the collection:: - - collection/ - ├── docs/ - ├── galaxy.yml - ├── meta/ - │ └── runtime.yml - ├── plugins/ - │ ├── modules/ - │ │ └── module1.py - │ ├── inventory/ - │ └── .../ - ├── README.md - ├── roles/ - │ ├── role1/ - │ ├── role2/ - │ └── .../ - ├── playbooks/ - │ ├── files/ - │ ├── vars/ - │ ├── templates/ - │ └── tasks/ - └── tests/ - - -.. note:: - * Ansible only accepts ``.md`` extensions for the :file:`README` file and any files in the :file:`/docs` folder. - * See the `ansible-collections `_ GitHub Org for examples of collection structure. - * Not all directories are currently in use. Those are placeholders for future features. - -.. _galaxy_yml: - -galaxy.yml ----------- - -A collection must have a ``galaxy.yml`` file that contains the necessary information to build a collection artifact. -See :ref:`collections_galaxy_meta` for details. - -.. _collections_doc_dir: - -docs directory ---------------- - -Put general documentation for the collection here. Keep the specific documentation for plugins and modules embedded as Python docstrings. Use the ``docs`` folder to describe how to use the roles and plugins the collection provides, role requirements, and so on. Use markdown and do not add subfolders. - -Use ``ansible-doc`` to view documentation for plugins inside a collection: - -.. code-block:: bash - - ansible-doc -t lookup my_namespace.my_collection.lookup1 - -The ``ansible-doc`` command requires the fully qualified collection name (FQCN) to display specific plugin documentation. In this example, ``my_namespace`` is the Galaxy namespace and ``my_collection`` is the collection name within that namespace. - -.. note:: The Galaxy namespace of an Ansible collection is defined in the ``galaxy.yml`` file. It can be different from the GitHub organization or repository name. - -.. _collections_plugin_dir: - -plugins directory ------------------- - -Add a 'per plugin type' specific subdirectory here, including ``module_utils`` which is usable not only by modules, but by most plugins by using their FQCN. This is a way to distribute modules, lookups, filters, and so on without having to import a role in every play. - -Vars plugins are unsupported in collections. Cache plugins may be used in collections for fact caching, but are not supported for inventory plugins. - -.. _collection_module_utils: - -module_utils -^^^^^^^^^^^^ - -When coding with ``module_utils`` in a collection, the Python ``import`` statement needs to take into account the FQCN along with the ``ansible_collections`` convention. The resulting Python import will look like ``from ansible_collections.{namespace}.{collection}.plugins.module_utils.{util} import {something}`` - -The following example snippets show a Python and PowerShell module using both default Ansible ``module_utils`` and -those provided by a collection. In this example the namespace is ``community``, the collection is ``test_collection``. -In the Python example the ``module_util`` in question is called ``qradar`` such that the FQCN is -``community.test_collection.plugins.module_utils.qradar``: - -.. code-block:: python - - from ansible.module_utils.basic import AnsibleModule - from ansible.module_utils.common.text.converters import to_text - - from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus - from ansible.module_utils.six.moves.urllib.error import HTTPError - from ansible_collections.community.test_collection.plugins.module_utils.qradar import QRadarRequest - - argspec = dict( - name=dict(required=True, type='str'), - state=dict(choices=['present', 'absent'], required=True), - ) - - module = AnsibleModule( - argument_spec=argspec, - supports_check_mode=True - ) - - qradar_request = QRadarRequest( - module, - headers={"Content-Type": "application/json"}, - not_rest_data_keys=['state'] - ) - -Note that importing something from an ``__init__.py`` file requires using the file name: - -.. code-block:: python - - from ansible_collections.namespace.collection_name.plugins.callback.__init__ import CustomBaseClass - -In the PowerShell example the ``module_util`` in question is called ``hyperv`` such that the FQCN is -``community.test_collection.plugins.module_utils.hyperv``: - -.. code-block:: powershell - - #!powershell - #AnsibleRequires -CSharpUtil Ansible.Basic - #AnsibleRequires -PowerShell ansible_collections.community.test_collection.plugins.module_utils.hyperv - - $spec = @{ - name = @{ required = $true; type = "str" } - state = @{ required = $true; choices = @("present", "absent") } - } - $module = [Ansible.Basic.AnsibleModule]::Create($args, $spec) - - Invoke-HyperVFunction -Name $module.Params.name - - $module.ExitJson() - -.. _collections_roles_dir: - -roles directory ----------------- - -Collection roles are mostly the same as existing roles, but with a couple of limitations: - - - Role names are now limited to contain only lowercase alphanumeric characters, plus ``_`` and start with an alpha character. - - Roles in a collection cannot contain plugins any more. Plugins must live in the collection ``plugins`` directory tree. Each plugin is accessible to all roles in the collection. - -The directory name of the role is used as the role name. Therefore, the directory name must comply with the -above role name rules. -The collection import into Galaxy will fail if a role name does not comply with these rules. - -You can migrate 'traditional roles' into a collection but they must follow the rules above. You may need to rename roles if they don't conform. You will have to move or link any role-based plugins to the collection specific directories. - -.. note:: - - For roles imported into Galaxy directly from a GitHub repository, setting the ``role_name`` value in the role's metadata overrides the role name used by Galaxy. For collections, that value is ignored. When importing a collection, Galaxy uses the role directory as the name of the role and ignores the ``role_name`` metadata value. - -playbooks directory --------------------- - -TBD. - -.. _developing_collections_tests_directory: - -tests directory ----------------- - -Ansible Collections are tested much like Ansible itself, by using the -`ansible-test` utility which is released as part of Ansible, version 2.9.0 and -newer. Because Ansible Collections are tested using the same tooling as Ansible -itself, via `ansible-test`, all Ansible developer documentation for testing is -applicable for authoring Collections Tests with one key concept to keep in mind. - -See :ref:`testing_collections` for specific information on how to test collections -with ``ansible-test``. - -When reading the :ref:`developing_testing` documentation, there will be content -that applies to running Ansible from source code via a git clone, which is -typical of an Ansible developer. However, it's not always typical for an Ansible -Collection author to be running Ansible from source but instead from a stable -release, and to create Collections it is not necessary to run Ansible from -source. Therefore, when references of dealing with `ansible-test` binary paths, -command completion, or environment variables are presented throughout the -:ref:`developing_testing` documentation; keep in mind that it is not needed for -Ansible Collection Testing because the act of installing the stable release of -Ansible containing `ansible-test` is expected to setup those things for you. - -.. _meta_runtime_yml: - -meta directory --------------- - -A collection can store some additional metadata in a ``runtime.yml`` file in the collection's ``meta`` directory. The ``runtime.yml`` file supports the top level keys: - -- *requires_ansible*: - - The version of Ansible required to use the collection. Multiple versions can be separated with a comma. - - .. code:: yaml - - requires_ansible: ">=2.10,<2.11" - - .. note:: although the version is a `PEP440 Version Specifier `_ under the hood, Ansible deviates from PEP440 behavior by truncating prerelease segments from the Ansible version. This means that Ansible 2.11.0b1 is compatible with something that ``requires_ansible: ">=2.11"``. - -- *plugin_routing*: - - Content in a collection that Ansible needs to load from another location or that has been deprecated/removed. - The top level keys of ``plugin_routing`` are types of plugins, with individual plugin names as subkeys. - To define a new location for a plugin, set the ``redirect`` field to another name. - To deprecate a plugin, use the ``deprecation`` field to provide a custom warning message and the removal version or date. If the plugin has been renamed or moved to a new location, the ``redirect`` field should also be provided. If a plugin is being removed entirely, ``tombstone`` can be used for the fatal error message and removal version or date. - - .. code:: yaml - - plugin_routing: - inventory: - kubevirt: - redirect: community.general.kubevirt - my_inventory: - tombstone: - removal_version: "2.0.0" - warning_text: my_inventory has been removed. Please use other_inventory instead. - modules: - my_module: - deprecation: - removal_date: "2021-11-30" - warning_text: my_module will be removed in a future release of this collection. Use another.collection.new_module instead. - redirect: another.collection.new_module - podman_image: - redirect: containers.podman.podman_image - module_utils: - ec2: - redirect: amazon.aws.ec2 - util_dir.subdir.my_util: - redirect: namespace.name.my_util - -- *import_redirection* - - A mapping of names for Python import statements and their redirected locations. - - .. code:: yaml - - import_redirection: - ansible.module_utils.old_utility: - redirect: ansible_collections.namespace_name.collection_name.plugins.module_utils.new_location - - -.. _creating_collections_skeleton: - -Creating a collection skeleton ------------------------------- - -To start a new collection: - -.. code-block:: bash - - collection_dir#> ansible-galaxy collection init my_namespace.my_collection - -.. note:: - - Both the namespace and collection names use the same strict set of requirements. See `Galaxy namespaces `_ on the Galaxy docsite for those requirements. - -Once the skeleton exists, you can populate the directories with the content you want inside the collection. See `ansible-collections `_ GitHub Org to get a better idea of what you can place inside a collection. - -.. _creating_collections: - -Creating collections -====================== - -To create a collection: - -#. Create a collection skeleton with the ``collection init`` command. See :ref:`creating_collections_skeleton` above. -#. Add your content to the collection. -#. Build the collection into a collection artifact with :ref:`ansible-galaxy collection build`. -#. Publish the collection artifact to Galaxy with :ref:`ansible-galaxy collection publish`. - -A user can then install your collection on their systems. - -Currently the ``ansible-galaxy collection`` command implements the following sub commands: - -* ``init``: Create a basic collection skeleton based on the default template included with Ansible or your own template. -* ``build``: Create a collection artifact that can be uploaded to Galaxy or your own repository. -* ``publish``: Publish a built collection artifact to Galaxy. -* ``install``: Install one or more collections. - -To learn more about the ``ansible-galaxy`` command-line tool, see the :ref:`ansible-galaxy` man page. - - -.. _docfragments_collections: - -Using documentation fragments in collections --------------------------------------------- - -To include documentation fragments in your collection: - -#. Create the documentation fragment: ``plugins/doc_fragments/fragment_name``. - -#. Refer to the documentation fragment with its FQCN. - -.. code-block:: yaml - - extends_documentation_fragment: - - community.kubernetes.k8s_name_options - - community.kubernetes.k8s_auth_options - - community.kubernetes.k8s_resource_options - - community.kubernetes.k8s_scale_options - -:ref:`module_docs_fragments` covers the basics for documentation fragments. The `kubernetes `_ collection includes a complete example. - -You can also share documentation fragments across collections with the FQCN. - -.. _building_collections: - -Building collections --------------------- - -To build a collection, run ``ansible-galaxy collection build`` from inside the root directory of the collection: - -.. code-block:: bash - - collection_dir#> ansible-galaxy collection build - -This creates a tarball of the built collection in the current directory which can be uploaded to Galaxy.:: - - my_collection/ - ├── galaxy.yml - ├── ... - ├── my_namespace-my_collection-1.0.0.tar.gz - └── ... - -.. note:: - * Certain files and folders are excluded when building the collection artifact. See :ref:`ignoring_files_and_folders_collections` to exclude other files you would not want to distribute. - * If you used the now-deprecated ``Mazer`` tool for any of your collections, delete any and all files it added to your :file:`releases/` directory before you build your collection with ``ansible-galaxy``. - * The current Galaxy maximum tarball size is 2 MB. - - -This tarball is mainly intended to upload to Galaxy -as a distribution method, but you can use it directly to install the collection on target systems. - -.. _ignoring_files_and_folders_collections: - -Ignoring files and folders -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -By default the build step will include all the files in the collection directory in the final build artifact except for the following: - -* ``galaxy.yml`` -* ``*.pyc`` -* ``*.retry`` -* ``tests/output`` -* previously built artifacts in the root directory -* various version control directories like ``.git/`` - -To exclude other files and folders when building the collection, you can set a list of file glob-like patterns in the -``build_ignore`` key in the collection's ``galaxy.yml`` file. These patterns use the following special characters for -wildcard matching: - -* ``*``: Matches everything -* ``?``: Matches any single character -* ``[seq]``: Matches and character in seq -* ``[!seq]``:Matches any character not in seq - -For example, if you wanted to exclude the :file:`sensitive` folder within the ``playbooks`` folder as well any ``.tar.gz`` archives you -can set the following in your ``galaxy.yml`` file: - -.. code-block:: yaml - - build_ignore: - - playbooks/sensitive - - '*.tar.gz' - -.. note:: - This feature is only supported when running ``ansible-galaxy collection build`` with Ansible 2.10 or newer. - - -.. _trying_collection_locally: - -Trying collections locally --------------------------- - -You can try your collection locally by installing it from the tarball. The following will enable an adjacent playbook to -access the collection: - -.. code-block:: bash - - ansible-galaxy collection install my_namespace-my_collection-1.0.0.tar.gz -p ./collections - - -You should use one of the values configured in :ref:`COLLECTIONS_PATHS` for your path. This is also where Ansible itself will -expect to find collections when attempting to use them. If you don't specify a path value, ``ansible-galaxy collection install`` -installs the collection in the first path defined in :ref:`COLLECTIONS_PATHS`, which by default is ``~/.ansible/collections``. - -If you want to use a collection directly out of a checked out git repository, see :ref:`hacking_collections`. - -Next, try using the local collection inside a playbook. For examples and more details see :ref:`Using collections ` - -.. _collections_scm_install: - -Installing collections from a git repository --------------------------------------------- - -You can also test a version of your collection in development by installing it from a git repository. - -.. code-block:: bash - - ansible-galaxy collection install git+https://github.com/org/repo.git,devel - -.. include:: ../shared_snippets/installing_collections_git_repo.txt - -.. _publishing_collections: - -Distributing collections -======================== - -You can distribute your collections by publishing them on a distribution server. Distribution servers include Ansible Galaxy, Red Hat Automation Hub, and privately hosted Automation Hub instances. You can publish any collection to Ansible Galaxy and/or to a privately hosted Automation Hub instance. If your collection is certified by Red Hat, you can publish it to the Red Hat Automation Hub. - -Prerequisites -------------- - -1. Get a namespace on each distribution server you want to use (Galaxy, private Automation Hub, Red Hat Automation Hub). -2. Get an API token for each distribution server you want to use. -3. Specify your API token(s). - -Getting a namespace -^^^^^^^^^^^^^^^^^^^ - -You need a namespace on Galaxy and/or Automation Hub to upload your collection. To get a namespace: - -* For Galaxy, see `Galaxy namespaces `_ on the Galaxy docsite for details. -* For Automation Hub, see the `Ansible Certified Content FAQ `_. - -.. _galaxy_get_token: - -Getting your API token -^^^^^^^^^^^^^^^^^^^^^^ - -You need an API token for Galaxy and/or Automation Hub to upload your collection. Use the API token(s) to authenticate your connection to the distribution server(s) and protect your content. - -To get your API token: - -* For Galaxy, go to the `Galaxy profile preferences `_ page and click :guilabel:`API Key`. -* For Automation Hub, go to `the token page `_ and click :guilabel:`Load token`. - -Specifying your API token -^^^^^^^^^^^^^^^^^^^^^^^^^ - -Once you have retrieved your API token, you can specify the correct token for each distribution server in two ways: - -* Pass the token to the ``ansible-galaxy`` command using the ``--token``. -* Configure the token within a Galaxy server list in your :file:`ansible.cfg` file. - -Specifying your API token with the ``--token`` argument -....................................................... - -You can use the ``--token`` argument with the ``ansible-galaxy`` command (in conjunction with the ``--server`` argument or :ref:`GALAXY_SERVER` setting in your :file:`ansible.cfg` file). You cannot use ``apt-key`` with any servers defined in your :ref:`Galaxy server list `. - -.. code-block:: text - - ansible-galaxy collection publish ./geerlingguy-collection-1.2.3.tar.gz --token= - -Specifying your API token with a Galaxy server list -................................................... - -You can configure one or more distribution servers for Galaxy in your :file:`ansible.cfg` file under the ``galaxy_server_list`` section. For each server, you also configure the token. - - -.. code-block:: ini - - [galaxy] - server_list = release_galaxy - - [galaxy_server.release_galaxy] - url=https://galaxy.ansible.com/ - token=my_token - -See :ref:`galaxy_server_config` for complete details. - -Publishing a collection ------------------------ - -Once you have a namespace and an API token for each distribution server you want to use, you can distribute your collection by publishing it to Ansible Galaxy, Red Hat Automation Hub, or a privately hosted Automation Hub instance. You can use either the ``ansible-galaxy collection publish`` command or the distribution server (Galaxy, Automation Hub) itself. - -Each time you add features or make changes to your collection, you must publish a new version of the collection. For details on versioning, see :ref:`collection_versions`. - -.. _upload_collection_ansible_galaxy: - -Publish a collection using ``ansible-galaxy`` -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. note:: - By default, ``ansible-galaxy`` uses https://galaxy.ansible.com as the Galaxy server (as listed in the :file:`ansible.cfg` file under :ref:`galaxy_server`). If you are only publishing your collection to Ansible Galaxy, you do not need any further configuration. If you are using Red Hat Automation Hub or any other Galaxy server, see :ref:`Configuring the ansible-galaxy client `. - -To upload the collection artifact with the ``ansible-galaxy`` command: - -.. code-block:: bash - - ansible-galaxy collection publish path/to/my_namespace-my_collection-1.0.0.tar.gz - -.. note:: - - The above command assumes you have retrieved and stored your API token as part of a Galaxy server list. See :ref:`galaxy_get_token` for details. - -The ``ansible-galaxy collection publish`` command triggers an import process, just as if you uploaded the collection through the Galaxy website. The command waits until the import process completes before reporting the status back. If you want to continue without waiting for the import result, use the ``--no-wait`` argument and manually look at the import progress in your `My Imports `_ page. - - -.. _upload_collection_galaxy: - -Publishing a collection using the Galaxy website -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -To publish your collection directly on the Galaxy website: - -#. Go to the `My Content `_ page, and click the **Add Content** button on one of your namespaces. -#. From the **Add Content** dialogue, click **Upload New Collection**, and select the collection archive file from your local filesystem. - -When you upload a collection, it always uploads to the namespace specified in the collection metadata in the ``galaxy.yml`` file, no matter which namespace you select on the website. If you are not an owner of the namespace specified in your collection metadata, the upload request will fail. - -Once Galaxy uploads and accepts a collection, you will be redirected to the **My Imports** page, which displays output from the import process, including any errors or warnings about the metadata and content contained in the collection. - -.. _collection_versions: - -Collection versions -^^^^^^^^^^^^^^^^^^^ - -Each time you publish your collection, you create a new version. Once you publish a version of a collection, you cannot delete or modify that version. Ensure that everything looks okay before publishing. The only way to change a collection is to release a new version. The latest version of a collection (by highest version number) will be the version displayed everywhere in Galaxy or Automation Hub; however, users will still be able to download older versions. - -Collection versions use `Semantic Versioning `_ for version numbers. Please read the official documentation for details and examples. In summary: - -* Increment major (for example: x in `x.y.z`) version number for an incompatible API change. -* Increment minor (for example: y in `x.y.z`) version number for new functionality in a backwards compatible manner (for example new modules/plugins, parameters, return values). -* Increment patch (for example: z in `x.y.z`) version number for backwards compatible bug fixes. - -.. _migrate_to_collection: - -Migrating Ansible content to a different collection -==================================================== - -First, look at `Ansible Collection Checklist `_. - -To migrate content from one collection to another, if the collections are parts of `Ansible distribution `_: - -#. Copy content from the source (old) collection to the target (new) collection. -#. Deprecate the module/plugin with ``removal_version`` scheduled for the next major version in ``meta/runtime.yml`` of the old collection. The deprecation must be released after the copied content has been included in a release of the new collection. -#. When the next major release of the old collection is prepared: - - * remove the module/plugin from the old collection - * remove the symlink stored in ``plugin/modules`` directory if appropriate (mainly when removing from ``community.general`` and ``community.network``) - * remove related unit and integration tests - * remove specific module utils - * remove specific documentation fragments if there are any in the old collection - * add a changelog fragment containing entries for ``removed_features`` and ``breaking_changes``; you can see an example of a changelog fragment in this `pull request `_ - * change ``meta/runtime.yml`` in the old collection: - - * add ``redirect`` to the corresponding module/plugin's entry - * in particular, add ``redirect`` for the removed module utils and documentation fragments if applicable - * remove ``removal_version`` from there - * remove related entries from ``tests/sanity/ignore.txt`` files if exist - * remove changelog fragments for removed content that are not yet part of the changelog (in other words, do not modify `changelogs/changelog.yaml` and do not delete files mentioned in it) - * remove requirements that are no longer required in ``tests/unit/requirements.txt``, ``tests/requirements.yml`` and ``galaxy.yml`` - -According to the above, you need to create at least three PRs as follows: - -#. Create a PR against the new collection to copy the content. -#. Deprecate the module/plugin in the old collection. -#. Later create a PR against the old collection to remove the content according to the schedule. - - -Adding the content to the new collection ----------------------------------------- - -Create a PR in the new collection to: - -#. Copy ALL the related files from the old collection. -#. If it is an action plugin, include the corresponding module with documentation. -#. If it is a module, check if it has a corresponding action plugin that should move with it. -#. Check ``meta/`` for relevant updates to ``runtime.yml`` if it exists. -#. Carefully check the moved ``tests/integration`` and ``tests/units`` and update for FQCN. -#. Review ``tests/sanity/ignore-*.txt`` entries in the old collection. -#. Update ``meta/runtime.yml`` in the old collection. - - -Removing the content from the old collection --------------------------------------------- - -Create a PR against the source collection repository to remove the modules, module_utils, plugins, and docs_fragments related to this migration: - -#. If you are removing an action plugin, remove the corresponding module that contains the documentation. -#. If you are removing a module, remove any corresponding action plugin that should stay with it. -#. Remove any entries about removed plugins from ``meta/runtime.yml``. Ensure they are added into the new repo. -#. Remove sanity ignore lines from ``tests/sanity/ignore\*.txt`` -#. Remove associated integration tests from ``tests/integrations/targets/`` and unit tests from ``tests/units/plugins/``. -#. if you are removing from content from ``community.general`` or ``community.network``, remove entries from ``.github/BOTMETA.yml``. -#. Carefully review ``meta/runtime.yml`` for any entries you may need to remove or update, in particular deprecated entries. -#. Update ``meta/runtime.yml`` to contain redirects for EVERY PLUGIN, pointing to the new collection name. - -.. warning:: - - Maintainers for the old collection have to make sure that the PR is merged in a way that it does not break user experience and semantic versioning: - - #. A new version containing the merged PR must not be released before the collection the content has been moved to has been released again, with that content contained in it. Otherwise the redirects cannot work and users relying on that content will experience breakage. - #. Once 1.0.0 of the collection from which the content has been removed has been released, such PRs can only be merged for a new **major** version (in other words, 2.0.0, 3.0.0, and so on). - - -BOTMETA.yml ------------ - -The ``BOTMETA.yml``, for example in `community.general collection repository `_, is the source of truth for: - -* ansibullbot - -If the old and/or new collection has ``ansibullbot``, its ``BOTMETA.yml`` must be updated correspondingly. - -Ansibulbot will know how to redirect existing issues and PRs to the new repo. -The build process for docs.ansible.com will know where to find the module docs. - -.. code-block:: yaml - - $modules/monitoring/grafana/grafana_plugin.py: - migrated_to: community.grafana - $modules/monitoring/grafana/grafana_dashboard.py: - migrated_to: community.grafana - $modules/monitoring/grafana/grafana_datasource.py: - migrated_to: community.grafana - $plugins/callback/grafana_annotations.py: - maintainers: $team_grafana - labels: monitoring grafana - migrated_to: community.grafana - $plugins/doc_fragments/grafana.py: - maintainers: $team_grafana - labels: monitoring grafana - migrated_to: community.grafana - -`Example PR `_ - -* The ``migrated_to:`` key must be added explicitly for every *file*. You cannot add ``migrated_to`` at the directory level. This is to allow module and plugin webdocs to be redirected to the new collection docs. -* ``migrated_to:`` MUST be added for every: - - * module - * plugin - * module_utils - * contrib/inventory script - -* You do NOT need to add ``migrated_to`` for: - - * Unit tests - * Integration tests - * ReStructured Text docs (anything under ``docs/docsite/rst/``) - * Files that never existed in ``ansible/ansible:devel`` - -.. _testing_collections: - -Testing collections -=================== - -The main tool for testing collections is ``ansible-test``, Ansible's testing tool described in :ref:`developing_testing`. You can run several compile and sanity checks, as well as run unit and integration tests for plugins using ``ansible-test``. When you test collections, test against the ansible-core version(s) you are targeting. - -You must always execute ``ansible-test`` from the root directory of a collection. You can run ``ansible-test`` in Docker containers without installing any special requirements. The Ansible team uses this approach in Shippable both in the ansible/ansible GitHub repository and in the large community collections such as `community.general `_ and `community.network `_. The examples below demonstrate running tests in Docker containers. - -Compile and sanity tests ------------------------- - -To run all compile and sanity tests:: - - ansible-test sanity --docker default -v - -See :ref:`testing_compile` and :ref:`testing_sanity` for more information. See the :ref:`full list of sanity tests ` for details on the sanity tests and how to fix identified issues. - -Unit tests ----------- - -You must place unit tests in the appropriate``tests/unit/plugins/`` directory. For example, you would place tests for ``plugins/module_utils/foo/bar.py`` in ``tests/unit/plugins/module_utils/foo/test_bar.py`` or ``tests/unit/plugins/module_utils/foo/bar/test_bar.py``. For examples, see the `unit tests in community.general `_. - -To run all unit tests for all supported Python versions:: - - ansible-test units --docker default -v - -To run all unit tests only for a specific Python version:: - - ansible-test units --docker default -v --python 3.6 - -To run only a specific unit test:: - - ansible-test units --docker default -v --python 3.6 tests/unit/plugins/module_utils/foo/test_bar.py - -You can specify Python requirements in the ``tests/unit/requirements.txt`` file. See :ref:`testing_units` for more information, especially on fixture files. - -Integration tests ------------------ - -You must place integration tests in the appropriate ``tests/integration/targets/`` directory. For module integration tests, you can use the module name alone. For example, you would place integration tests for ``plugins/modules/foo.py`` in a directory called ``tests/integration/targets/foo/``. For non-module plugin integration tests, you must add the plugin type to the directory name. For example, you would place integration tests for ``plugins/connections/bar.py`` in a directory called ``tests/integration/targets/connection_bar/``. For lookup plugins, the directory must be called ``lookup_foo``, for inventory plugins, ``inventory_foo``, and so on. - -You can write two different kinds of integration tests: - -* Ansible role tests run with ``ansible-playbook`` and validate various aspects of the module. They can depend on other integration tests (usually named ``prepare_bar`` or ``setup_bar``, which prepare a service or install a requirement named ``bar`` in order to test module ``foo``) to set-up required resources, such as installing required libraries or setting up server services. -* ``runme.sh`` tests run directly as scripts. They can set up inventory files, and execute ``ansible-playbook`` or ``ansible-inventory`` with various settings. - -For examples, see the `integration tests in community.general `_. See also :ref:`testing_integration` for more details. - -Since integration tests can install requirements, and set-up, start and stop services, we recommended running them in docker containers or otherwise restricted environments whenever possible. By default, ``ansible-test`` supports Docker images for several operating systems. See the `list of supported docker images `_ for all options. Use the ``default`` image mainly for platform-independent integration tests, such as those for cloud modules. The following examples use the ``centos8`` image. - -To execute all integration tests for a collection:: - - ansible-test integration --docker centos8 -v - -If you want more detailed output, run the command with ``-vvv`` instead of ``-v``. Alternatively, specify ``--retry-on-error`` to automatically re-run failed tests with higher verbosity levels. - -To execute only the integration tests in a specific directory:: - - ansible-test integration --docker centos8 -v connection_bar - -You can specify multiple target names. Each target name is the name of a directory in ``tests/integration/targets/``. - -.. _hacking_collections: - -Contributing to collections -=========================== - -If you want to add functionality to an existing collection, modify a collection you are using to fix a bug, or change the behavior of a module in a collection, clone the git repository for that collection and make changes on a branch. You can combine changes to a collection with a local checkout of Ansible (``source hacking/env-setup``). - -This section describes the process for `community.general `_. To contribute to other collections, replace the folder names ``community`` and ``general`` with the namespace and collection name of a different collection. - -We assume that you have included ``~/dev/ansible/collections/`` in :ref:`COLLECTIONS_PATHS`, and if that path mentions multiple directories, that you made sure that no other directory earlier in the search path contains a copy of ``community.general``. Create the directory ``~/dev/ansible/collections/ansible_collections/community``, and in it clone `the community.general Git repository `_ or a fork of it into the folder ``general``:: - - mkdir -p ~/dev/ansible/collections/ansible_collections/community - cd ~/dev/ansible/collections/ansible_collections/community - git clone git@github.com:ansible-collections/community.general.git general - -If you clone a fork, add the original repository as a remote ``upstream``:: - - cd ~/dev/ansible/collections/ansible_collections/community/general - git remote add upstream git@github.com:ansible-collections/community.general.git - -Now you can use this checkout of ``community.general`` in playbooks and roles with whichever version of Ansible you have installed locally, including a local checkout of ``ansible/ansible``'s ``devel`` branch. - -For collections hosted in the ``ansible_collections`` GitHub org, create a branch and commit your changes on the branch. When you are done (remember to add tests, see :ref:`testing_collections`), push your changes to your fork of the collection and create a Pull Request. For other collections, especially for collections not hosted on GitHub, check the ``README.md`` of the collection for information on contributing to it. - -.. _collection_changelogs: - -Generating changelogs for a collection -====================================== - -We recommend that you use the `antsibull-changelog `_ tool to generate Ansible-compatible changelogs for your collection. The Ansible changelog uses the output of this tool to collate all the collections included in an Ansible release into one combined changelog for the release. - -.. note:: - - Ansible here refers to the Ansible 2.10 or later release that includes a curated set of collections. - -Understanding antsibull-changelog ---------------------------------- - -The ``antsibull-changelog`` tool allows you to create and update changelogs for Ansible collections that are compatible with the combined Ansible changelogs. This is an update to the changelog generator used in prior Ansible releases. The tool adds three new changelog fragment categories: ``breaking_changes``, ``security_fixes`` and ``trivial``. The tool also generates the ``changelog.yaml`` file that Ansible uses to create the combined ``CHANGELOG.rst`` file and Porting Guide for the release. - -See :ref:`changelogs_how_to` and the `antsibull-changelog documentation `_ for complete details. - -.. note:: - - The collection maintainers set the changelog policy for their collections. See the individual collection contributing guidelines for complete details. - -Generating changelogs ---------------------- - -To initialize changelog generation: - -#. Install ``antsibull-changelog``: :code:`pip install antsibull-changelog`. -#. Initialize changelogs for your repository: :code:`antsibull-changelog init `. -#. Optionally, edit the ``changelogs/config.yaml`` file to customize the location of the generated changelog ``.rst`` file or other options. See `Bootstrapping changelogs for collections `_ for details. - -To generate changelogs from the changelog fragments you created: - -#. Optionally, validate your changelog fragments: :code:`antsibull-changelog lint`. -#. Generate the changelog for your release: :code:`antsibull-changelog release [--version version_number]`. - -.. note:: - - Add the ``--reload-plugins`` option if you ran the ``antsibull-changelog release`` command previously and the version of the collection has not changed. ``antsibull-changelog`` caches the information on all plugins and does not update its cache until the collection version changes. - - -Porting Guide entries ----------------------- - -The following changelog fragment categories are consumed by the Ansible changelog generator into the Ansible Porting Guide: - -* ``major_changes`` -* ``breaking_changes`` -* ``deprecated_features`` -* ``removed_features`` +Collections are a distribution format for Ansible content. You can package and distribute playbooks, roles, modules, and plugins using collections. A typical collection addresses a set of related use cases. For example, the ``cisco.ios`` collection automates management of Cisco IOS devices. -Including collection changelogs into Ansible -============================================= +You can create a collection and publish it to `Ansible Galaxy `_ or to a private Automation Hub instance. You can publish certified collections to the Red Hat Automation Hub, part of the Red Hat Ansible Automation Platform. +.. toctree:: + :maxdepth: 2 + :caption: Developing new collections -If your collection is part of Ansible, use one of the following three options to include your changelog into the Ansible release changelog: + developing_collections_creating + developing_collections_shared + developing_collections_testing + developing_collections_distributing -* Use the ``antsibull-changelog`` tool. +.. toctree:: + :maxdepth: 2 + :caption: Working with existing collections -* If are not using this tool, include the properly formatted ``changelog.yaml`` file into your collection. See the `changelog.yaml format `_ for details. + developing_collections_migrating + developing_collections_contributing + developing_collections_changelogs -* Add a link to own changelogs or release notes in any format by opening an issue at https://github.com/ansible-community/ansible-build-data/ with the HTML link to that information. +.. toctree:: + :maxdepth: 2 + :caption: Collections references -.. note:: + developing_collections_structure + collections_galaxy_meta - For the first two options, Ansible pulls the changelog details from Galaxy so your changelogs must be included in the collection version on Galaxy that is included in the upcoming Ansible release. +For instructions on developing modules, see :ref:`developing_modules_general`. .. seealso:: :ref:`collections` - Learn how to install and use collections. - :ref:`collections_galaxy_meta` - Understand the collections metadata structure. - :ref:`developing_modules_general` - Learn about how to write Ansible modules + Learn how to install and use collections in playbooks and roles + :ref:`contributing_maintained_collections` + Guidelines for contributing to selected collections + `Ansible Collections Overview and FAQ `_ + Current development status of community collections and FAQ `Mailing List `_ The development mailing list `irc.freenode.net `_ diff --git a/docs/docsite/rst/dev_guide/developing_collections_changelogs.rst b/docs/docsite/rst/dev_guide/developing_collections_changelogs.rst new file mode 100644 index 00000000000000..363182e43cd7e8 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_collections_changelogs.rst @@ -0,0 +1,80 @@ +.. _collection_changelogs: + +*************************************************************** +Generating changelogs and porting guide entries in a collection +*************************************************************** + +You can create and share changelog and porting guide entries for your collection. If your collection is part of the Ansible Community package, we recommend that you use the `antsibull-changelog `_ tool to generate Ansible-compatible changelogs. The Ansible changelog uses the output of this tool to collate all the collections included in an Ansible release into one combined changelog for the release. + +.. note:: + + Ansible here refers to the Ansible 2.10 or later release that includes a curated set of collections. + +.. contents:: + :local: + :depth: 2 + +Understanding antsibull-changelog +================================= + +The ``antsibull-changelog`` tool allows you to create and update changelogs for Ansible collections that are compatible with the combined Ansible changelogs. This is an update to the changelog generator used in prior Ansible releases. The tool adds three new changelog fragment categories: ``breaking_changes``, ``security_fixes`` and ``trivial``. The tool also generates the ``changelog.yaml`` file that Ansible uses to create the combined ``CHANGELOG.rst`` file and Porting Guide for the release. + +See :ref:`changelogs_how_to` and the `antsibull-changelog documentation `_ for complete details. + +.. note:: + + The collection maintainers set the changelog policy for their collections. See the individual collection contributing guidelines for complete details. + +Generating changelogs +--------------------- + +To initialize changelog generation: + +#. Install ``antsibull-changelog``: :code:`pip install antsibull-changelog`. +#. Initialize changelogs for your repository: :code:`antsibull-changelog init `. +#. Optionally, edit the ``changelogs/config.yaml`` file to customize the location of the generated changelog ``.rst`` file or other options. See `Bootstrapping changelogs for collections `_ for details. + +To generate changelogs from the changelog fragments you created: + +#. Optionally, validate your changelog fragments: :code:`antsibull-changelog lint`. +#. Generate the changelog for your release: :code:`antsibull-changelog release [--version version_number]`. + +.. note:: + + Add the ``--reload-plugins`` option if you ran the ``antsibull-changelog release`` command previously and the version of the collection has not changed. ``antsibull-changelog`` caches the information on all plugins and does not update its cache until the collection version changes. + +Porting Guide entries from changelog fragments +---------------------------------------------- + +The Ansible changelog generator automatically adds several changelog fragment categories to the Ansible Porting Guide: + +* ``major_changes`` +* ``breaking_changes`` +* ``deprecated_features`` +* ``removed_features`` + +Including collection changelogs into Ansible +============================================= + +If your collection is part of Ansible, use one of the following three options to include your changelog into the Ansible release changelog: + +* Use the ``antsibull-changelog`` tool. + +* If are not using this tool, include the properly formatted ``changelog.yaml`` file into your collection. See the `changelog.yaml format `_ for details. + +* Add a link to own changelogs or release notes in any format by opening an issue at https://github.com/ansible-community/ansible-build-data/ with the HTML link to that information. + +.. note:: + + For the first two options, Ansible pulls the changelog details from Galaxy so your changelogs must be included in the collection version on Galaxy that is included in the upcoming Ansible release. + +.. seealso:: + + :ref:`collections` + Learn how to install and use collections. + :ref:`contributing_maintained_collections` + Guidelines for contributing to selected collections + `Mailing List `_ + The development mailing list + `irc.freenode.net `_ + #ansible IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_collections_contributing.rst b/docs/docsite/rst/dev_guide/developing_collections_contributing.rst new file mode 100644 index 00000000000000..20ac45486a709a --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_collections_contributing.rst @@ -0,0 +1,38 @@ +.. _hacking_collections: + +*************************** +Contributing to collections +*************************** + +If you want to add functionality to an existing collection, modify a collection you are using to fix a bug, or change the behavior of a module in a collection, clone the git repository for that collection and make changes on a branch. You can combine changes to a collection with a local checkout of Ansible (``source hacking/env-setup``). +You should first check the collection repository to see if it has specific contribution guidelines. These are typically listed in the README.md or CONTRIBUTING.md files within the repository. +Contributing to a collection: community.general +=============================================== + +This section describes the process for `community.general `_. To contribute to other collections, replace the folder names ``community`` and ``general`` with the namespace and collection name of a different collection. + +We assume that you have included ``~/dev/ansible/collections/`` in :ref:`COLLECTIONS_PATHS`, and if that path mentions multiple directories, that you made sure that no other directory earlier in the search path contains a copy of ``community.general``. Create the directory ``~/dev/ansible/collections/ansible_collections/community``, and in it clone `the community.general Git repository `_ or a fork of it into the folder ``general``:: + + mkdir -p ~/dev/ansible/collections/ansible_collections/community + cd ~/dev/ansible/collections/ansible_collections/community + git clone git@github.com:ansible-collections/community.general.git general + +If you clone a fork, add the original repository as a remote ``upstream``:: + + cd ~/dev/ansible/collections/ansible_collections/community/general + git remote add upstream git@github.com:ansible-collections/community.general.git + +Now you can use this checkout of ``community.general`` in playbooks and roles with whichever version of Ansible you have installed locally, including a local checkout of ``ansible/ansible``'s ``devel`` branch. + +For collections hosted in the ``ansible_collections`` GitHub org, create a branch and commit your changes on the branch. When you are done (remember to add tests, see :ref:`testing_collections`), push your changes to your fork of the collection and create a Pull Request. For other collections, especially for collections not hosted on GitHub, check the ``README.md`` of the collection for information on contributing to it. + +.. seealso:: + + :ref:`collections` + Learn how to install and use collections. + :ref:`contributing_maintained_collections` + Guidelines for contributing to selected collections + `Mailing List `_ + The development mailing list + `irc.freenode.net `_ + #ansible IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_collections_creating.rst b/docs/docsite/rst/dev_guide/developing_collections_creating.rst new file mode 100644 index 00000000000000..721d5c1abf4a64 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_collections_creating.rst @@ -0,0 +1,57 @@ +.. _creating_collections: + +******************** +Creating collections +******************** + +To create a collection: + +#. Create a :ref:`collection skeleton` with the ``collection init`` command. +#. Add modules and other content to the collection. +#. Build the collection into a collection artifact with :ref:`ansible-galaxy collection build`. +#. Publish the collection artifact to Galaxy with :ref:`ansible-galaxy collection publish`. + +A user can then install your collection on their systems. + +.. contents:: + :local: + :depth: 2 + +.. _creating_collections_skeleton: + +Creating a collection skeleton +============================== + +To start a new collection: + +.. code-block:: bash + + collection_dir#> ansible-galaxy collection init my_namespace.my_collection + +.. note:: + + Both the namespace and collection names use the same strict set of requirements. See `Galaxy namespaces `_ on the Galaxy docsite for those requirements. + +Once the skeleton exists, you can populate the directories with the content you want inside the collection. See `ansible-collections `_ GitHub Org to get a better idea of what you can place inside a collection. + +Reference: the ``ansible-galaxy collection`` command + +Currently the ``ansible-galaxy collection`` command implements the following sub commands: + +* ``init``: Create a basic collection skeleton based on the default template included with Ansible or your own template. +* ``build``: Create a collection artifact that can be uploaded to Galaxy or your own repository. +* ``publish``: Publish a built collection artifact to Galaxy. +* ``install``: Install one or more collections. + +To learn more about the ``ansible-galaxy`` command-line tool, see the :ref:`ansible-galaxy` man page. + +.. seealso:: + + :ref:`collections` + Learn how to install and use collections. + :ref:`collection_structure` + Directories and files included in the collection skeleton + `Mailing List `_ + The development mailing list + `irc.freenode.net `_ + #ansible IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_collections_distributing.rst b/docs/docsite/rst/dev_guide/developing_collections_distributing.rst new file mode 100644 index 00000000000000..e31d53368aab34 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_collections_distributing.rst @@ -0,0 +1,241 @@ +.. _distributing_collections: + +************************ +Distributing collections +************************ + +You can distribute your collections by publishing them on a distribution server. Distribution servers include Ansible Galaxy, Red Hat Automation Hub, and privately hosted Automation Hub instances. You can publish any collection to Ansible Galaxy and/or to a privately hosted Automation Hub instance. If your collection is certified by Red Hat, you can publish it to the Red Hat Automation Hub. + +Distributing collections involves three major steps: +#. Configuring your distribution server(s) +#. Building your collection artifact +#. Publishing your collection + +.. contents:: + :local: + :depth: 2 + +Configuring your distribution server or servers +================================================ + +1. Get a namespace on each distribution server you want to use (Galaxy, private Automation Hub, Red Hat Automation Hub). +2. Get an API token for each distribution server you want to use. +3. Specify the API token for each distribution server you want to use. + +Getting a namespace +------------------- + +You need a namespace on Galaxy and/or Automation Hub to upload your collection. To get a namespace: + +* For Galaxy, see `Galaxy namespaces `_ on the Galaxy docsite for details. +* For Automation Hub, see the `Ansible Certified Content FAQ `_. + +.. _galaxy_get_token: + +Getting your API token +---------------------- + +You need an API token for Galaxy and/or Automation Hub to upload your collection. Use the API token(s) to authenticate your connection to the distribution server(s) and protect your content. + +To get your API token: + +* For Galaxy, go to the `Galaxy profile preferences `_ page and click :guilabel:`API Key`. +* For Automation Hub, go to `the token page `_ and click :guilabel:`Load token`. + +Specifying your API token +------------------------- + +Once you have retrieved your API token, you can specify the correct token for each distribution server in two ways: + +* Pass the token to the ``ansible-galaxy`` command using the ``--token``. +* Configure the token within a Galaxy server list in your :file:`ansible.cfg` file. + +Specifying your API token with the ``--token`` argument +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can use the ``--token`` argument with the ``ansible-galaxy`` command (in conjunction with the ``--server`` argument or :ref:`GALAXY_SERVER` setting in your :file:`ansible.cfg` file). You cannot use ``apt-key`` with any servers defined in your :ref:`Galaxy server list `. + +.. code-block:: text + + ansible-galaxy collection publish ./geerlingguy-collection-1.2.3.tar.gz --token= + +Specifying your API token with a Galaxy server list +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can configure one or more distribution servers for Galaxy in your :file:`ansible.cfg` file under the ``galaxy_server_list`` section. For each server, you also configure the token. + + +.. code-block:: ini + + [galaxy] + server_list = release_galaxy + + [galaxy_server.release_galaxy] + url=https://galaxy.ansible.com/ + token=my_token + +See :ref:`galaxy_server_config` for complete details. + +.. _building_collections: + +Building a collection tarball +============================= + +Once you have configured one or more distribution servers, you must build a collection tarball. To build a collection, run ``ansible-galaxy collection build`` from inside the root directory of the collection: + +.. code-block:: bash + + collection_dir#> ansible-galaxy collection build + +This creates a tarball of the built collection in the current directory which can be uploaded to your distribution server:: + + my_collection/ + ├── galaxy.yml + ├── ... + ├── my_namespace-my_collection-1.0.0.tar.gz + └── ... + +.. note:: + * Certain files and folders are excluded when building the collection artifact. See :ref:`ignoring_files_and_folders_collections` to exclude other files you would not want to distribute. + * If you used the now-deprecated ``Mazer`` tool for any of your collections, delete any and all files it added to your :file:`releases/` directory before you build your collection with ``ansible-galaxy``. + * The current Galaxy maximum tarball size is 2 MB. + +This tarball is mainly intended to upload to Galaxy as a distribution method, but you can use it directly to install the collection on target systems. + +.. _ignoring_files_and_folders_collections: + +Ignoring files and folders +-------------------------- + +By default the build step will include all the files in the collection directory in the final build artifact except for the following: + +* ``galaxy.yml`` +* ``*.pyc`` +* ``*.retry`` +* ``tests/output`` +* previously built artifacts in the root directory +* various version control directories like ``.git/`` + +To exclude other files and folders when building the collection, you can set a list of file glob-like patterns in the +``build_ignore`` key in the collection's ``galaxy.yml`` file. These patterns use the following special characters for +wildcard matching: + +* ``*``: Matches everything +* ``?``: Matches any single character +* ``[seq]``: Matches and character in seq +* ``[!seq]``:Matches any character not in seq + +For example, if you wanted to exclude the :file:`sensitive` folder within the ``playbooks`` folder as well any ``.tar.gz`` archives you +can set the following in your ``galaxy.yml`` file: + +.. code-block:: yaml + + build_ignore: + - playbooks/sensitive + - '*.tar.gz' + +.. note:: + This feature is only supported when running ``ansible-galaxy collection build`` with Ansible 2.10 or newer. + +.. _collection_versions: + +Collection versions +=================== + +Each time you publish your collection, you create a new version. Once you publish a version of a collection, you cannot delete or modify that version. Ensure that everything looks okay before publishing. The only way to change a collection is to release a new version. The latest version of a collection (by highest version number) will be the version displayed everywhere in Galaxy or Automation Hub; however, users will still be able to download older versions. + +Collection versions use `Semantic Versioning `_ for version numbers. Please read the official documentation for details and examples. In summary: + +* Increment major (for example: x in `x.y.z`) version number for an incompatible API change. +* Increment minor (for example: y in `x.y.z`) version number for new functionality in a backwards compatible manner (for example new modules/plugins, parameters, return values). +* Increment patch (for example: z in `x.y.z`) version number for backwards compatible bug fixes. + + +.. _trying_collection_locally: + +Trying collections locally +========================== + +Before you publish your collection, test it out locally. Every time you publish a tarball, you create a :ref:`new version ` of your collection. Testing the collection locally gives you confidence that the new version will contain the functionality you want without unexpected behavior. + +Trying your collection from the tarball +--------------------------------------- + +You can try your collection locally by installing it from the tarball. The following will enable an adjacent playbook to access the collection: + +.. code-block:: bash + + ansible-galaxy collection install my_namespace-my_collection-1.0.0.tar.gz -p ./collections + + +You should use one of the values configured in :ref:`COLLECTIONS_PATHS` for your path. This is also where Ansible itself will +expect to find collections when attempting to use them. If you don't specify a path value, ``ansible-galaxy collection install`` +installs the collection in the first path defined in :ref:`COLLECTIONS_PATHS`, which by default is ``~/.ansible/collections``. + +If you want to use a collection directly out of a checked out git repository, see :ref:`hacking_collections`. + +Next, try using the local collection inside a playbook. For examples and more details see :ref:`Using collections ` + +.. _collections_scm_install: + +Trying your collection from a git repository +-------------------------------------------- + +You can also test a version of your collection in development by installing it from a git repository. + +.. code-block:: bash + + ansible-galaxy collection install git+https://github.com/org/repo.git,devel + +.. include:: ../shared_snippets/installing_collections_git_repo.txt + +Publishing a collection +======================= + +Once you have a namespace and an API token for each distribution server you want to use, and you have created and tested a collection tarball, you can distribute your collection by publishing the tarball to Ansible Galaxy, Red Hat Automation Hub, or a privately hosted Automation Hub instance. You can use either the ``ansible-galaxy collection publish`` command or the distribution server (Galaxy, Automation Hub) itself. + +Each time you add features or make changes to your collection, you must create a new collection artifact and publish a new version of the collection. For details on versioning, see :ref:`collection_versions`. + +.. _upload_collection_ansible_galaxy: + +Publish a collection using ``ansible-galaxy`` +--------------------------------------------- + +.. note:: + By default, ``ansible-galaxy`` uses https://galaxy.ansible.com as the Galaxy server (as listed in the :file:`ansible.cfg` file under :ref:`galaxy_server`). If you are only publishing your collection to Ansible Galaxy, you do not need any further configuration. If you are using Red Hat Automation Hub or any other Galaxy server, see :ref:`Configuring the ansible-galaxy client `. + +To upload the collection artifact with the ``ansible-galaxy`` command: + +.. code-block:: bash + + ansible-galaxy collection publish path/to/my_namespace-my_collection-1.0.0.tar.gz + +.. note:: + + The above command assumes you have retrieved and stored your API token as part of a Galaxy server list. See :ref:`galaxy_get_token` for details. + +The ``ansible-galaxy collection publish`` command triggers an import process, just as if you uploaded the collection through the Galaxy website. The command waits until the import process completes before reporting the status back. If you want to continue without waiting for the import result, use the ``--no-wait`` argument and manually look at the import progress in your `My Imports `_ page. + + +.. _upload_collection_galaxy: + +Publishing a collection using the Galaxy website +------------------------------------------------ + +To publish your collection directly on the Galaxy website: + +#. Go to the `My Content `_ page, and click the **Add Content** button on one of your namespaces. +#. From the **Add Content** dialogue, click **Upload New Collection**, and select the collection archive file from your local filesystem. + +When you upload a collection, it always uploads to the namespace specified in the collection metadata in the ``galaxy.yml`` file, no matter which namespace you select on the website. If you are not an owner of the namespace specified in your collection metadata, the upload request will fail. + +Once Galaxy uploads and accepts a collection, you will be redirected to the **My Imports** page, which displays output from the import process, including any errors or warnings about the metadata and content contained in the collection. + +.. seealso:: + + :ref:`collections` + Learn how to install and use collections. + `Mailing List `_ + The development mailing list + `irc.freenode.net `_ + #ansible IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_collections_migrating.rst b/docs/docsite/rst/dev_guide/developing_collections_migrating.rst new file mode 100644 index 00000000000000..9980e238c4bb42 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_collections_migrating.rst @@ -0,0 +1,136 @@ +.. _migrate_to_collection: + +*************************************************** +Migrating Ansible content to a different collection +*************************************************** + +When you move content from one collection to another, for example to extract a set of related modules out of ``community.general`` to create a more focused collection, you must make sure the transition is easy for users to follow. + +.. contents:: + :local: + :depth: 2 + +Migrating content +================= + +Before you start migrating content from one collection to another, look at `Ansible Collection Checklist `_. + +To migrate content from one collection to another, if the collections are parts of `Ansible distribution `_: + +#. Copy content from the source (old) collection to the target (new) collection. +#. Deprecate the module/plugin with ``removal_version`` scheduled for the next major version in ``meta/runtime.yml`` of the old collection. The deprecation must be released after the copied content has been included in a release of the new collection. +#. When the next major release of the old collection is prepared: + + * remove the module/plugin from the old collection + * remove the symlink stored in ``plugin/modules`` directory if appropriate (mainly when removing from ``community.general`` and ``community.network``) + * remove related unit and integration tests + * remove specific module utils + * remove specific documentation fragments if there are any in the old collection + * add a changelog fragment containing entries for ``removed_features`` and ``breaking_changes``; you can see an example of a changelog fragment in this `pull request `_ + * change ``meta/runtime.yml`` in the old collection: + + * add ``redirect`` to the corresponding module/plugin's entry + * in particular, add ``redirect`` for the removed module utils and documentation fragments if applicable + * remove ``removal_version`` from there + * remove related entries from ``tests/sanity/ignore.txt`` files if exist + * remove changelog fragments for removed content that are not yet part of the changelog (in other words, do not modify `changelogs/changelog.yaml` and do not delete files mentioned in it) + * remove requirements that are no longer required in ``tests/unit/requirements.txt``, ``tests/requirements.yml`` and ``galaxy.yml`` + +To implement these changes, you need to create at least three PRs: + +#. Create a PR against the new collection to copy the content. +#. Deprecate the module/plugin in the old collection. +#. Later create a PR against the old collection to remove the content according to the schedule. + + +Adding the content to the new collection +---------------------------------------- + +Create a PR in the new collection to: + +#. Copy ALL the related files from the old collection. +#. If it is an action plugin, include the corresponding module with documentation. +#. If it is a module, check if it has a corresponding action plugin that should move with it. +#. Check ``meta/`` for relevant updates to ``runtime.yml`` if it exists. +#. Carefully check the moved ``tests/integration`` and ``tests/units`` and update for FQCN. +#. Review ``tests/sanity/ignore-*.txt`` entries in the old collection. +#. Update ``meta/runtime.yml`` in the old collection. + + +Removing the content from the old collection +-------------------------------------------- + +Create a PR against the source collection repository to remove the modules, module_utils, plugins, and docs_fragments related to this migration: + +#. If you are removing an action plugin, remove the corresponding module that contains the documentation. +#. If you are removing a module, remove any corresponding action plugin that should stay with it. +#. Remove any entries about removed plugins from ``meta/runtime.yml``. Ensure they are added into the new repo. +#. Remove sanity ignore lines from ``tests/sanity/ignore\*.txt`` +#. Remove associated integration tests from ``tests/integrations/targets/`` and unit tests from ``tests/units/plugins/``. +#. if you are removing from content from ``community.general`` or ``community.network``, remove entries from ``.github/BOTMETA.yml``. +#. Carefully review ``meta/runtime.yml`` for any entries you may need to remove or update, in particular deprecated entries. +#. Update ``meta/runtime.yml`` to contain redirects for EVERY PLUGIN, pointing to the new collection name. + +.. warning:: + + Maintainers for the old collection have to make sure that the PR is merged in a way that it does not break user experience and semantic versioning: + + #. A new version containing the merged PR must not be released before the collection the content has been moved to has been released again, with that content contained in it. Otherwise the redirects cannot work and users relying on that content will experience breakage. + #. Once 1.0.0 of the collection from which the content has been removed has been released, such PRs can only be merged for a new **major** version (in other words, 2.0.0, 3.0.0, and so on). + + +Updating BOTMETA.yml +-------------------- + +The ``BOTMETA.yml``, for example in `community.general collection repository `_, is the source of truth for: + +* ansibullbot + +If the old and/or new collection has ``ansibullbot``, its ``BOTMETA.yml`` must be updated correspondingly. + +Ansibulbot will know how to redirect existing issues and PRs to the new repo. The build process for docs.ansible.com will know where to find the module docs. + +.. code-block:: yaml + + $modules/monitoring/grafana/grafana_plugin.py: + migrated_to: community.grafana + $modules/monitoring/grafana/grafana_dashboard.py: + migrated_to: community.grafana + $modules/monitoring/grafana/grafana_datasource.py: + migrated_to: community.grafana + $plugins/callback/grafana_annotations.py: + maintainers: $team_grafana + labels: monitoring grafana + migrated_to: community.grafana + $plugins/doc_fragments/grafana.py: + maintainers: $team_grafana + labels: monitoring grafana + migrated_to: community.grafana + +`Example PR `_ + +* The ``migrated_to:`` key must be added explicitly for every *file*. You cannot add ``migrated_to`` at the directory level. This is to allow module and plugin webdocs to be redirected to the new collection docs. +* ``migrated_to:`` MUST be added for every: + + * module + * plugin + * module_utils + * contrib/inventory script + +* You do NOT need to add ``migrated_to`` for: + + * Unit tests + * Integration tests + * ReStructured Text docs (anything under ``docs/docsite/rst/``) + * Files that never existed in ``ansible/ansible:devel`` + +.. seealso:: + + :ref:`collections` + Learn how to install and use collections. + :ref:`contributing_maintained_collections` + Guidelines for contributing to selected collections + `Mailing List `_ + The development mailing list + `irc.freenode.net `_ + #ansible IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_collections_shared.rst b/docs/docsite/rst/dev_guide/developing_collections_shared.rst new file mode 100644 index 00000000000000..331b27203ae73a --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_collections_shared.rst @@ -0,0 +1,77 @@ +.. _collections_shared_resources: + +************************************* +Using shared resources in collections +************************************* + +Although developing Ansible modules contained in collections is similar to developing standalone Ansible modules, you use shared resources like documentation fragments and module utilities differently in collections. You can use documentation fragments within and across collections. You can use optional module utilities to support multiple versions of ansible-core in your collection. + +.. contents:: + :local: + :depth: 2 + +.. _docfragments_collections: + +Using documentation fragments in collections +============================================ + +To include documentation fragments in your collection: + +#. Create the documentation fragment: ``plugins/doc_fragments/fragment_name``. + +#. Refer to the documentation fragment with its FQCN. + +.. code-block:: yaml + + extends_documentation_fragment: + - community.kubernetes.k8s_name_options + - community.kubernetes.k8s_auth_options + - community.kubernetes.k8s_resource_options + - community.kubernetes.k8s_scale_options + +:ref:`module_docs_fragments` covers the basics for documentation fragments. The `kubernetes `_ collection includes a complete example. + +If you use FQCN, you can use documentation fragments from one collection in another collection. + +.. _optional_module_utils: + +Leveraging optional module utilities in collections +=================================================== + +Optional module utilities let you adopt the latest features from the most recent ansible-core release in your collection-based modules without breaking your collection on older Ansible versions. With optional module utilities, you can leverage the latest features when running against the latest versions, while still providing fallback behaviors when running against older versions. + +This implementation, widely used in Python programming, wraps optional imports in conditionals or defensive `try/except` blocks, and implements fallback behaviors for missing imports. Ansible's module payload builder supports these patterns by treating any module_utils import nested in a block (e.g., `if`, `try`) as optional. If the requested import cannot be found during the payload build, it is simply omitted from the target payload and assumed that the importing code will handle its absence at runtime. Missing top-level imports of module_utils packages (imports that are not wrapped in a block statement of any kind) will fail the module payload build, and will not execute on the target. + +For example, the `ansible.module_utils.common.respawn` package is only available in Ansible 2.11 and higher. The following module code would fail during the payload build on Ansible 2.10 or earlier (as the requested Python module does not exist, and is not wrapped in a block to signal to the payload builder that it can be omitted from the module payload): + +.. code-block:: python + + from ansible.module_utils.common.respawn import respawn_module + +By wrapping the import statement in a ``try`` block, the payload builder will omit the Python module if it cannot be located, and assume that the Ansible module will handle it at runtime: + +.. code-block:: python + + try: + from ansible.module_utils.common.respawn import respawn_module + except ImportError: + respawn_module = None + ... + if needs_respawn: + if respawn_module: + respawn_module(target) + else: + module.fail_json('respawn is not available in Ansible < 2.11, ensure that foopkg is installed') + +The optional import behavior also applies to module_utils imported from collections. + +.. seealso:: + + :ref:`collections` + Learn how to install and use collections. + :ref:`contributing_maintained_collections` + Guidelines for contributing to selected collections + `Mailing List `_ + The development mailing list + `irc.freenode.net `_ + #ansible IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_collections_structure.rst b/docs/docsite/rst/dev_guide/developing_collections_structure.rst new file mode 100644 index 00000000000000..8d366502613c6a --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_collections_structure.rst @@ -0,0 +1,241 @@ +.. _collection_structure: + +******************** +Collection structure +******************** + +A collection is a simple data structure. None of the directories are required unless you have specific content that belongs in one of them. A collection does require a ``galaxy.yml`` file at the root level of the collection. This file contains all of the metadata that Galaxy and other tools need in order to package, build and publish the collection. + +.. contents:: + :local: + :depth: 2 + +Collection directories and files +================================ + +A collection can contain these directories and files:: + + collection/ + ├── docs/ + ├── galaxy.yml + ├── meta/ + │ └── runtime.yml + ├── plugins/ + │ ├── modules/ + │ │ └── module1.py + │ ├── inventory/ + │ └── .../ + ├── README.md + ├── roles/ + │ ├── role1/ + │ ├── role2/ + │ └── .../ + ├── playbooks/ + │ ├── files/ + │ ├── vars/ + │ ├── templates/ + │ └── tasks/ + └── tests/ + +.. note:: + * Ansible only accepts ``.md`` extensions for the :file:`README` file and any files in the :file:`/docs` folder. + * See the `ansible-collections `_ GitHub Org for examples of collection structure. + * Not all directories are currently in use. Those are placeholders for future features. + +.. _galaxy_yml: + +galaxy.yml +---------- + +A collection must have a ``galaxy.yml`` file that contains the necessary information to build a collection artifact. See :ref:`collections_galaxy_meta` for details. + +.. _collections_doc_dir: + +docs directory +--------------- + +Put general documentation for the collection here. Keep the specific documentation for plugins and modules embedded as Python docstrings. Use the ``docs`` folder to describe how to use the roles and plugins the collection provides, role requirements, and so on. Use markdown and do not add subfolders. + +Use ``ansible-doc`` to view documentation for plugins inside a collection: + +.. code-block:: bash + + ansible-doc -t lookup my_namespace.my_collection.lookup1 + +The ``ansible-doc`` command requires the fully qualified collection name (FQCN) to display specific plugin documentation. In this example, ``my_namespace`` is the Galaxy namespace and ``my_collection`` is the collection name within that namespace. + +.. note:: The Galaxy namespace of an Ansible collection is defined in the ``galaxy.yml`` file. It can be different from the GitHub organization or repository name. + +.. _collections_plugin_dir: + +plugins directory +----------------- + +Add a 'per plugin type' specific subdirectory here, including ``module_utils`` which is usable not only by modules, but by most plugins by using their FQCN. This is a way to distribute modules, lookups, filters, and so on without having to import a role in every play. + +Vars plugins are unsupported in collections. Cache plugins may be used in collections for fact caching, but are not supported for inventory plugins. + +.. _collection_module_utils: + +module_utils +^^^^^^^^^^^^ + +When coding with ``module_utils`` in a collection, the Python ``import`` statement needs to take into account the FQCN along with the ``ansible_collections`` convention. The resulting Python import will look like ``from ansible_collections.{namespace}.{collection}.plugins.module_utils.{util} import {something}`` + +The following example snippets show a Python and PowerShell module using both default Ansible ``module_utils`` and +those provided by a collection. In this example the namespace is ``community``, the collection is ``test_collection``. +In the Python example the ``module_util`` in question is called ``qradar`` such that the FQCN is +``community.test_collection.plugins.module_utils.qradar``: + +.. code-block:: python + + from ansible.module_utils.basic import AnsibleModule + from ansible.module_utils.common.text.converters import to_text + + from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus + from ansible.module_utils.six.moves.urllib.error import HTTPError + from ansible_collections.community.test_collection.plugins.module_utils.qradar import QRadarRequest + + argspec = dict( + name=dict(required=True, type='str'), + state=dict(choices=['present', 'absent'], required=True), + ) + + module = AnsibleModule( + argument_spec=argspec, + supports_check_mode=True + ) + + qradar_request = QRadarRequest( + module, + headers={"Content-Type": "application/json"}, + not_rest_data_keys=['state'] + ) + +Note that importing something from an ``__init__.py`` file requires using the file name: + +.. code-block:: python + + from ansible_collections.namespace.collection_name.plugins.callback.__init__ import CustomBaseClass + +In the PowerShell example the ``module_util`` in question is called ``hyperv`` such that the FQCN is +``community.test_collection.plugins.module_utils.hyperv``: + +.. code-block:: powershell + + #!powershell + #AnsibleRequires -CSharpUtil Ansible.Basic + #AnsibleRequires -PowerShell ansible_collections.community.test_collection.plugins.module_utils.hyperv + + $spec = @{ + name = @{ required = $true; type = "str" } + state = @{ required = $true; choices = @("present", "absent") } + } + $module = [Ansible.Basic.AnsibleModule]::Create($args, $spec) + + Invoke-HyperVFunction -Name $module.Params.name + + $module.ExitJson() + +.. _collections_roles_dir: + +roles directory +---------------- + +Collection roles are mostly the same as existing roles, but with a couple of limitations: + + - Role names are now limited to contain only lowercase alphanumeric characters, plus ``_`` and start with an alpha character. + - Roles in a collection cannot contain plugins any more. Plugins must live in the collection ``plugins`` directory tree. Each plugin is accessible to all roles in the collection. + +The directory name of the role is used as the role name. Therefore, the directory name must comply with the above role name rules. The collection import into Galaxy will fail if a role name does not comply with these rules. + +You can migrate 'traditional roles' into a collection but they must follow the rules above. You may need to rename roles if they don't conform. You will have to move or link any role-based plugins to the collection specific directories. + +.. note:: + + For roles imported into Galaxy directly from a GitHub repository, setting the ``role_name`` value in the role's metadata overrides the role name used by Galaxy. For collections, that value is ignored. When importing a collection, Galaxy uses the role directory as the name of the role and ignores the ``role_name`` metadata value. + +playbooks directory +-------------------- + +TBD. + +.. _developing_collections_tests_directory: + +tests directory +---------------- + +Ansible Collections are tested much like Ansible itself, by using the `ansible-test` utility which is released as part of Ansible, version 2.9.0 and newer. Because Ansible Collections are tested using the same tooling as Ansible itself, via `ansible-test`, all Ansible developer documentation for testing is applicable for authoring Collections Tests with one key concept to keep in mind. + +See :ref:`testing_collections` for specific information on how to test collections with ``ansible-test``. + +When reading the :ref:`developing_testing` documentation, there will be content that applies to running Ansible from source code via a git clone, which is typical of an Ansible developer. However, it's not always typical for an Ansible Collection author to be running Ansible from source but instead from a stable release, and to create Collections it is not necessary to run Ansible from source. Therefore, when references of dealing with `ansible-test` binary paths, command completion, or environment variables are presented throughout the :ref:`developing_testing` documentation; keep in mind that it is not needed for Ansible Collection Testing because the act of installing the stable release of Ansible containing `ansible-test` is expected to setup those things for you. + +.. _meta_runtime_yml: + +meta directory +-------------- + +A collection can store some additional metadata in a ``runtime.yml`` file in the collection's ``meta`` directory. The ``runtime.yml`` file supports the top level keys: + +- *requires_ansible*: + + The version of Ansible required to use the collection. Multiple versions can be separated with a comma. + + .. code:: yaml + + requires_ansible: ">=2.10,<2.11" + + .. note:: although the version is a `PEP440 Version Specifier `_ under the hood, Ansible deviates from PEP440 behavior by truncating prerelease segments from the Ansible version. This means that Ansible 2.11.0b1 is compatible with something that ``requires_ansible: ">=2.11"``. + +- *plugin_routing*: + + Content in a collection that Ansible needs to load from another location or that has been deprecated/removed. + The top level keys of ``plugin_routing`` are types of plugins, with individual plugin names as subkeys. + To define a new location for a plugin, set the ``redirect`` field to another name. + To deprecate a plugin, use the ``deprecation`` field to provide a custom warning message and the removal version or date. If the plugin has been renamed or moved to a new location, the ``redirect`` field should also be provided. If a plugin is being removed entirely, ``tombstone`` can be used for the fatal error message and removal version or date. + + .. code:: yaml + + plugin_routing: + inventory: + kubevirt: + redirect: community.general.kubevirt + my_inventory: + tombstone: + removal_version: "2.0.0" + warning_text: my_inventory has been removed. Please use other_inventory instead. + modules: + my_module: + deprecation: + removal_date: "2021-11-30" + warning_text: my_module will be removed in a future release of this collection. Use another.collection.new_module instead. + redirect: another.collection.new_module + podman_image: + redirect: containers.podman.podman_image + module_utils: + ec2: + redirect: amazon.aws.ec2 + util_dir.subdir.my_util: + redirect: namespace.name.my_util + +- *import_redirection* + + A mapping of names for Python import statements and their redirected locations. + + .. code:: yaml + + import_redirection: + ansible.module_utils.old_utility: + redirect: ansible_collections.namespace_name.collection_name.plugins.module_utils.new_location + +.. seealso:: + + :ref:`distributing_collections` + Learn how to package and publish your collection + :ref:`contributing_maintained_collections` + Guidelines for contributing to selected collections + `Mailing List `_ + The development mailing list + `irc.freenode.net `_ + #ansible IRC chat channel diff --git a/docs/docsite/rst/dev_guide/developing_collections_testing.rst b/docs/docsite/rst/dev_guide/developing_collections_testing.rst new file mode 100644 index 00000000000000..c10694d1a4e091 --- /dev/null +++ b/docs/docsite/rst/dev_guide/developing_collections_testing.rst @@ -0,0 +1,83 @@ +.. _testing_collections: + +******************* +Testing collections +******************* + +Testing your collection ensures that your code works well and integrates well with the rest of the Ansible ecosystem. Your collection should pass the general compile and sanity tests for Ansible code. You should also add unit tests to cover the code in your collection and integration tests to cover the interactions between your collection and ansible-core. + +.. contents:: + :local: + :depth: 2 + +Testing tools +============= + +The main tool for testing collections is ``ansible-test``, Ansible's testing tool described in :ref:`developing_testing`. You can run several compile and sanity checks, as well as run unit and integration tests for plugins using ``ansible-test``. When you test collections, test against the ansible-core version(s) you are targeting. + +You must always execute ``ansible-test`` from the root directory of a collection. You can run ``ansible-test`` in Docker containers without installing any special requirements. The Ansible team uses this approach in Shippable both in the ansible/ansible GitHub repository and in the large community collections such as `community.general `_ and `community.network `_. The examples below demonstrate running tests in Docker containers. + +Compile and sanity tests +------------------------ + +To run all compile and sanity tests:: + + ansible-test sanity --docker default -v + +See :ref:`testing_compile` and :ref:`testing_sanity` for more information. See the :ref:`full list of sanity tests ` for details on the sanity tests and how to fix identified issues. + +Adding unit tests +----------------- + +You must place unit tests in the appropriate``tests/unit/plugins/`` directory. For example, you would place tests for ``plugins/module_utils/foo/bar.py`` in ``tests/unit/plugins/module_utils/foo/test_bar.py`` or ``tests/unit/plugins/module_utils/foo/bar/test_bar.py``. For examples, see the `unit tests in community.general `_. + +To run all unit tests for all supported Python versions:: + + ansible-test units --docker default -v + +To run all unit tests only for a specific Python version:: + + ansible-test units --docker default -v --python 3.6 + +To run only a specific unit test:: + + ansible-test units --docker default -v --python 3.6 tests/unit/plugins/module_utils/foo/test_bar.py + +You can specify Python requirements in the ``tests/unit/requirements.txt`` file. See :ref:`testing_units` for more information, especially on fixture files. + +Adding integration tests +------------------------ + +You must place integration tests in the appropriate ``tests/integration/targets/`` directory. For module integration tests, you can use the module name alone. For example, you would place integration tests for ``plugins/modules/foo.py`` in a directory called ``tests/integration/targets/foo/``. For non-module plugin integration tests, you must add the plugin type to the directory name. For example, you would place integration tests for ``plugins/connections/bar.py`` in a directory called ``tests/integration/targets/connection_bar/``. For lookup plugins, the directory must be called ``lookup_foo``, for inventory plugins, ``inventory_foo``, and so on. + +You can write two different kinds of integration tests: + +* Ansible role tests run with ``ansible-playbook`` and validate various aspects of the module. They can depend on other integration tests (usually named ``prepare_bar`` or ``setup_bar``, which prepare a service or install a requirement named ``bar`` in order to test module ``foo``) to set-up required resources, such as installing required libraries or setting up server services. +* ``runme.sh`` tests run directly as scripts. They can set up inventory files, and execute ``ansible-playbook`` or ``ansible-inventory`` with various settings. + +For examples, see the `integration tests in community.general `_. See also :ref:`testing_integration` for more details. + +Since integration tests can install requirements, and set-up, start and stop services, we recommended running them in docker containers or otherwise restricted environments whenever possible. By default, ``ansible-test`` supports Docker images for several operating systems. See the `list of supported docker images `_ for all options. Use the ``default`` image mainly for platform-independent integration tests, such as those for cloud modules. The following examples use the ``centos8`` image. + +To execute all integration tests for a collection:: + + ansible-test integration --docker centos8 -v + +If you want more detailed output, run the command with ``-vvv`` instead of ``-v``. Alternatively, specify ``--retry-on-error`` to automatically re-run failed tests with higher verbosity levels. + +To execute only the integration tests in a specific directory:: + + ansible-test integration --docker centos8 -v connection_bar + +You can specify multiple target names. Each target name is the name of a directory in ``tests/integration/targets/``. + +.. seealso:: + + :ref:`developing_testing` + More resources on testing Ansible + :ref:`contributing_maintained_collections` + Guidelines for contributing to selected collections + `Mailing List `_ + The development mailing list + `irc.freenode.net `_ + #ansible IRC chat channel From af716f83183925b1a3f4fcf5b6410adb9ccb2f9c Mon Sep 17 00:00:00 2001 From: Ernst Oudhof <17832702+ernst-s@users.noreply.github.com> Date: Fri, 23 Apr 2021 23:52:14 +0200 Subject: [PATCH 06/16] Add weos4 network platform to documentation (#74088) * Add weos4 network platform to documentation * Fix small format issues (cherry picked from commit 7ca5dede978d01f53551da6ae4ccacbad365c88b) --- .../rst/network/user_guide/platform_index.rst | 3 + .../rst/network/user_guide/platform_weos4.rst | 88 +++++++++++++++++++ 2 files changed, 91 insertions(+) create mode 100644 docs/docsite/rst/network/user_guide/platform_weos4.rst diff --git a/docs/docsite/rst/network/user_guide/platform_index.rst b/docs/docsite/rst/network/user_guide/platform_index.rst index 43a8103a41febd..f9a8bc5f3c7bfa 100644 --- a/docs/docsite/rst/network/user_guide/platform_index.rst +++ b/docs/docsite/rst/network/user_guide/platform_index.rst @@ -33,6 +33,7 @@ Some Ansible Network platforms support multiple connection types, privilege esca platform_slxos platform_voss platform_vyos + platform_weos4 platform_netconf_enabled .. _settings_by_platform: @@ -86,6 +87,7 @@ Settings by Platform `Pluribus Netvisor`_ ``community.network.netvisor`` ✓ `Ruckus ICX`_ ``community.network.icx`` ✓ `VyOS`_ `[†]`_ ``vyos.vyos.vyos`` ✓ ✓ + `Westermo WeOS 4`_ ``community.network.weos4`` ✓ OS that supports Netconf `[†]`_ ```` ✓ ✓ =============================== ================================ =========== ======= ======= =========== @@ -116,6 +118,7 @@ Settings by Platform .. _Pluribus Netvisor: https://galaxy.ansible.com/community/network .. _Ruckus ICX: https://galaxy.ansible.com/community/network .. _VyOS: https://galaxy.ansible.com/vyos/vyos +.. _Westermo WeOS 4: https://galaxy.ansible.com/community/network .. _`[†]`: **[†]** Maintained by Ansible Network Team diff --git a/docs/docsite/rst/network/user_guide/platform_weos4.rst b/docs/docsite/rst/network/user_guide/platform_weos4.rst new file mode 100644 index 00000000000000..1a3c79e2a85b5b --- /dev/null +++ b/docs/docsite/rst/network/user_guide/platform_weos4.rst @@ -0,0 +1,88 @@ +.. _weos4_platform_options: + +*************************************** +WeOS 4 Platform Options +*************************************** + +Westermo WeOS 4 is part of the `community.network `_ collection and only supports CLI connections. +This page offers details on how to use ``ansible.netcommon.network_cli`` on WeOS 4 in Ansible. + +.. contents:: + :local: + +Connections available +================================================================================ + +.. table:: + :class: documentation-table + + ==================== ========================================== + .. CLI + ==================== ========================================== + Protocol SSH + + Credentials uses SSH keys / SSH-agent if present + + accepts ``-u myuser -k`` if using password + + Indirect Access via a bastion (jump host) + + Connection Settings ``ansible_connection: community.netcommon.network_cli`` + + |enable_mode| not supported by WeOS 4 + + Returned Data Format ``stdout[0].`` + ==================== ========================================== + +.. |enable_mode| replace:: Enable Mode |br| (Privilege Escalation) + +WeOS 4 does not support ``ansible_connection: local``. You must use ``ansible_connection: ansible.netcommon.network_cli``. + +Using CLI in Ansible +==================== + +Example CLI ``group_vars/weos4.yml`` +------------------------------------ + +.. code-block:: yaml + + ansible_connection: ansible.netcommon.network_cli + ansible_network_os: community.network.weos4 + ansible_user: myuser + ansible_password: !vault... + ansible_ssh_common_args: '-o ProxyCommand="ssh -W %h:%p -q bastion01"' + + +- If you are using SSH keys (including an ssh-agent) you can remove the ``ansible_password`` configuration. +- If you are accessing your host directly (not through a bastion/jump host) you can remove the ``ansible_ssh_common_args`` configuration. +- If you are accessing your host through a bastion/jump host, you cannot include your SSH password in the ``ProxyCommand`` directive. To prevent secrets from leaking out (for example in ``ps`` output), SSH does not support providing passwords via environment variables. + +Example CLI task +---------------- + +.. code-block:: yaml + + - name: Get version information (WeOS 4) + ansible.netcommon.cli_command: + commands: "show version" + register: show_ver + when: ansible_network_os == 'community.network.weos4' + +Example Configuration task +-------------------------- + +.. code-block:: yaml + + - name: Replace configuration with file on ansible host (WeOS 4) + ansible.netcommon.cli_config: + config: "{{ lookup('file', 'westermo.conf') }}" + replace: "yes" + diff_match: exact + diff_replace: config + when: ansible_network_os == 'community.network.weos4' + +.. include:: shared_snippets/SSH_warning.txt + +.. seealso:: + + :ref:`timeout_options` From 19d6c28236b118c2fd8dd745972c0da7b680349f Mon Sep 17 00:00:00 2001 From: Hu Shuai Date: Tue, 27 Apr 2021 14:12:57 +0800 Subject: [PATCH 07/16] Fix typo in Makefile (#74396) Fixed minor typo specfic -> specific (cherry picked from commit 4880fee6ca89b0733c12f61505d4462d99d428f3) --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 0395802220b553..aff3d894239d18 100644 --- a/Makefile +++ b/Makefile @@ -11,7 +11,7 @@ # make deb ------------------ produce a DEB # make docs ----------------- rebuild the manpages (results are checked in) # make gettext -------------- produce POT files for docs -# make generate-po ---------- generate language specfic po file +# make generate-po ---------- generate language specific po file # make needs-translation ---- generate list of file with unstranlated or fuzzy string for a specific language # make tests ---------------- run the tests (see https://docs.ansible.com/ansible/devel/dev_guide/testing_units.html for requirements) From 9881ac812fb81555f82142e6c0588f4050176e81 Mon Sep 17 00:00:00 2001 From: dhx-mike-palandra <45608336+dhx-mike-palandra@users.noreply.github.com> Date: Tue, 27 Apr 2021 04:34:48 -0400 Subject: [PATCH 08/16] Update complex_data_manipulation.rst (#72509) (cherry picked from commit c2985c491b68ad34dd761d03da6e0e53e7c6444e) --- docs/docsite/rst/user_guide/complex_data_manipulation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docsite/rst/user_guide/complex_data_manipulation.rst b/docs/docsite/rst/user_guide/complex_data_manipulation.rst index fc4a16275e0dd8..1ed4ce5b48fe52 100644 --- a/docs/docsite/rst/user_guide/complex_data_manipulation.rst +++ b/docs/docsite/rst/user_guide/complex_data_manipulation.rst @@ -230,7 +230,7 @@ These example produces ``{"a": "b", "c": "d"}`` vars: single_list: [ 'a', 'b', 'c', 'd' ] - mydict: "{{ dict(single_list | slice(2) | list) }}" + mydict: "{{ dict(single_list | slice(2)) }}" .. code-block:: YAML+Jinja @@ -240,7 +240,7 @@ These example produces ``{"a": "b", "c": "d"}`` list_of_pairs: [ ['a', 'b'], ['c', 'd'] ] mydict: "{{ dict(list_of_pairs) }}" -Both end up being the same thing, with the ``slice(2) | list`` transforming ``single_list`` to the same structure as ``list_of_pairs``. +Both end up being the same thing, with ``slice(2)`` transforming ``single_list`` to a ``list_of_pairs`` generator. From abf2df30fdf66c9e5315a78f3e9f8103bf7d9f90 Mon Sep 17 00:00:00 2001 From: jakelevinez <31458570+jakelevinez@users.noreply.github.com> Date: Tue, 27 Apr 2021 05:49:03 -0400 Subject: [PATCH 09/16] Update VMware library installation docs (#71219) Depending upon OS/distro, please use pip/pip3. (cherry picked from commit ddfc648d37f12677935b606151d3be42bcb7d4d7) --- .../rst/scenario_guides/vmware_scenarios/vmware_intro.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docsite/rst/scenario_guides/vmware_scenarios/vmware_intro.rst b/docs/docsite/rst/scenario_guides/vmware_scenarios/vmware_intro.rst index 1f6a0f0fd60c6e..a2458002678780 100644 --- a/docs/docsite/rst/scenario_guides/vmware_scenarios/vmware_intro.rst +++ b/docs/docsite/rst/scenario_guides/vmware_scenarios/vmware_intro.rst @@ -18,7 +18,7 @@ Requirements Ansible VMware modules are written on top of `pyVmomi `_. pyVmomi is the Python SDK for the VMware vSphere API that allows user to manage ESX, ESXi, -and vCenter infrastructure. You can install pyVmomi using pip: +and vCenter infrastructure. You can install pyVmomi using pip (you may need to use pip3, depending on your OS/distro): .. code-block:: bash From 30bfa90fb4df3a58c43f186dd90fc5df01d117b5 Mon Sep 17 00:00:00 2001 From: Mark Chappell Date: Tue, 27 Apr 2021 14:04:49 +0200 Subject: [PATCH 10/16] Update AWS dev guides to use collections utils and fragments (#72312) (cherry picked from commit cf08c23b4f5708c38728da9020441777e0eebcda) --- .../dev_guide/platforms/aws_guidelines.rst | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst b/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst index 47a2f4b17e249b..27cafb722fa907 100644 --- a/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst +++ b/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst @@ -58,7 +58,7 @@ to: .. code-block:: python - from ansible.module_utils.aws.core import AnsibleAWSModule + from ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule ... module = AnsibleAWSModule(...) @@ -118,7 +118,8 @@ Unless the name of your service is quite unique, please consider using ``aws_`` Importing botocore and boto3 ---------------------------- -The ``ansible.module_utils.ec2`` module and ``ansible.module_utils.core.aws`` modules both +The ``ansible_collections.amazon.aws.plugins.module_utils.ec2`` module and +``ansible_collections.amazon.aws.plugins.module_utils.core`` modules both automatically import boto3 and botocore. If boto3 is missing from the system then the variable ``HAS_BOTO3`` will be set to false. Normally, this means that modules don't need to import boto3 directly. There is no need to check ``HAS_BOTO3`` when using AnsibleAWSModule @@ -126,7 +127,7 @@ as the module does that check: .. code-block:: python - from ansible.module_utils.aws.core import AnsibleAWSModule + from ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule try: import botocore except ImportError: @@ -137,7 +138,7 @@ or: .. code-block:: python from ansible.module_utils.basic import AnsibleModule - from ansible.module_utils.ec2 import HAS_BOTO3 + from ansible_collections.amazon.aws.plugins.module_utils.ec2 import HAS_BOTO3 try: import botocore except ImportError: @@ -220,8 +221,8 @@ and that the more esoteric connection options are documented. For example: # some lines omitted here requirements: [ 'botocore', 'boto3' ] extends_documentation_fragment: - - aws - - ec2 + - amazon.aws.aws + - amazon.aws.ec2 ''' Handling exceptions @@ -234,7 +235,7 @@ are a number of possibilities for handling it. ``is_boto3_error_code``. * Use ``aws_module.fail_json_aws()`` to report the module failure in a standard way * Retry using AWSRetry -* Use ``fail_json()`` to report the failure without using ``ansible.module_utils.aws.core`` +* Use ``fail_json()`` to report the failure without using ``ansible_collections.amazon.aws.plugins.module_utils.core`` * Do something custom in the case where you know how to handle the exception For more information on botocore exception handling see the `botocore error documentation `_. @@ -242,7 +243,7 @@ For more information on botocore exception handling see the `botocore error docu Using is_boto3_error_code ------------------------- -To use ``ansible.module_utils.aws.core.is_boto3_error_code`` to catch a single +To use ``ansible_collections.amazon.aws.plugins.module_utils.core.is_boto3_error_code`` to catch a single AWS error code, call it in place of ``ClientError`` in your except clauses. In this case, *only* the ``InvalidGroup.NotFound`` error code will be caught here, and any other error will be raised for handling elsewhere in the program. @@ -268,7 +269,7 @@ amounts of exception handling to existing modules, we recommend migrating the mo .. code-block:: python - from ansible.module_utils.aws.core import AnsibleAWSModule + from ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule # Set up module parameters # module params code here @@ -302,8 +303,8 @@ If you need to perform an action based on the error boto3 returned, use the erro except botocore.exceptions.BotoCoreError as e: module.fail_json_aws(e, msg="Couldn't obtain frooble %s" % name) -using fail_json() and avoiding ansible.module_utils.aws.core ------------------------------------------------------------- +using fail_json() and avoiding ansible_collections.amazon.aws.plugins.module_utils.core +--------------------------------------------------------------------------------------- Boto3 provides lots of useful information when an exception is thrown so pass this to the user along with the message. @@ -494,7 +495,7 @@ and returns True if they are different. .. code-block:: python - from ansible.module_utils.ec2 import compare_policies + from ansible_collections.amazon.aws.plugins.module_utils.ec2 import compare_policies import json From 54f70fd69f382869d48e8a9a503e722436b02ec7 Mon Sep 17 00:00:00 2001 From: Mark Chappell Date: Tue, 27 Apr 2021 14:19:00 +0200 Subject: [PATCH 11/16] Use is_boto3_error_code in 'standard' example (#72313) Use is_boto3_error_code in 'standard' example rather than e.response['Error']['Code'] (#72313) Co-authored-by: Sloane Hertel (cherry picked from commit 63afb33d8669ffc9180e017c4326cd25bf396fd3) --- .../rst/dev_guide/platforms/aws_guidelines.rst | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst b/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst index 27cafb722fa907..ffc41b26d14c58 100644 --- a/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst +++ b/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst @@ -287,7 +287,8 @@ amounts of exception handling to existing modules, we recommend migrating the mo Note that it should normally be acceptable to catch all normal exceptions here, however if you expect anything other than botocore exceptions you should test everything works as expected. -If you need to perform an action based on the error boto3 returned, use the error code. +If you need to perform an action based on the error boto3 returned, use the error code and the +``is_boto3_error_code()`` helper. .. code-block:: python @@ -295,12 +296,9 @@ If you need to perform an action based on the error boto3 returned, use the erro name = module.params.get['name'] try: result = connection.describe_frooble(FroobleName=name) - except botocore.exceptions.ClientError as e: - if e.response['Error']['Code'] == 'FroobleNotFound': - workaround_failure() # This is an error that we can work around - else: - module.fail_json_aws(e, msg="Couldn't obtain frooble %s" % name) - except botocore.exceptions.BotoCoreError as e: + except is_boto3_error_code('FroobleNotFound'): + workaround_failure() # This is an error that we can work around + except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: # pylint: disable=duplicate-except module.fail_json_aws(e, msg="Couldn't obtain frooble %s" % name) using fail_json() and avoiding ansible_collections.amazon.aws.plugins.module_utils.core From 213929f6b67ce41de638fc8c73ab8aa24f19290f Mon Sep 17 00:00:00 2001 From: Abhijeet Kasurde Date: Tue, 27 Apr 2021 19:00:03 +0530 Subject: [PATCH 12/16] Update Kubernetes collection name in docs (#74440) (cherry picked from commit 8d499bbc83b6029f7e067f9f09abe11229202bdb) --- .../rst/dev_guide/developing_collections_shared.rst | 10 +++++----- .../rst/dev_guide/developing_modules_documenting.rst | 4 ++-- docs/docsite/rst/dev_guide/style_guide/index.rst | 2 +- .../scenario_guides/kubernetes_scenarios/k8s_intro.rst | 6 +++--- .../kubernetes_scenarios/k8s_inventory.rst | 8 ++++---- .../kubernetes_scenarios/scenario_k8s_object.rst | 2 +- .../task_outputs/Add_a_pod_using_k8s.yaml | 2 +- docs/docsite/rst/user_guide/playbooks_filters.rst | 6 +++--- 8 files changed, 20 insertions(+), 20 deletions(-) diff --git a/docs/docsite/rst/dev_guide/developing_collections_shared.rst b/docs/docsite/rst/dev_guide/developing_collections_shared.rst index 331b27203ae73a..c509c1ef828ce0 100644 --- a/docs/docsite/rst/dev_guide/developing_collections_shared.rst +++ b/docs/docsite/rst/dev_guide/developing_collections_shared.rst @@ -24,12 +24,12 @@ To include documentation fragments in your collection: .. code-block:: yaml extends_documentation_fragment: - - community.kubernetes.k8s_name_options - - community.kubernetes.k8s_auth_options - - community.kubernetes.k8s_resource_options - - community.kubernetes.k8s_scale_options + - kubernetes.core.k8s_name_options + - kubernetes.core.k8s_auth_options + - kubernetes.core.k8s_resource_options + - kubernetes.core.k8s_scale_options -:ref:`module_docs_fragments` covers the basics for documentation fragments. The `kubernetes `_ collection includes a complete example. +:ref:`module_docs_fragments` covers the basics for documentation fragments. The `kubernetes.core `_ collection includes a complete example. If you use FQCN, you can use documentation fragments from one collection in another collection. diff --git a/docs/docsite/rst/dev_guide/developing_modules_documenting.rst b/docs/docsite/rst/dev_guide/developing_modules_documenting.rst index 759d85d220931c..00bb863dfa4edf 100644 --- a/docs/docsite/rst/dev_guide/developing_modules_documenting.rst +++ b/docs/docsite/rst/dev_guide/developing_modules_documenting.rst @@ -255,7 +255,7 @@ content in a uniform way: .. note:: - To refer to a group of modules in a collection, use ``R()``. When a collection is not the right granularity, use ``C(..)``: - - ``Refer to the R(community.kubernetes collection, plugins_in_community.kubernetes) for information on managing kubernetes clusters.`` + - ``Refer to the R(kubernetes.core collection, plugins_in_kubernetes.core) for information on managing kubernetes clusters.`` - ``The C(win_*) modules (spread across several collections) allow you to manage various aspects of windows hosts.`` @@ -268,7 +268,7 @@ content in a uniform way: Documentation fragments ----------------------- -If you are writing multiple related modules, they may share common documentation, such as authentication details, file mode settings, ``notes:`` or ``seealso:`` entries. Rather than duplicate that information in each module's ``DOCUMENTATION`` block, you can save it once as a doc_fragment plugin and use it in each module's documentation. In Ansible, shared documentation fragments are contained in a ``ModuleDocFragment`` class in `lib/ansible/plugins/doc_fragments/ `_ or the equivalent directory in a collection. To include a documentation fragment, add ``extends_documentation_fragment: FRAGMENT_NAME`` in your module documentation. Use the fully qualified collection name for the FRAGMENT_NAME (for example, ``community.kubernetes.k8s_auth_options``). +If you are writing multiple related modules, they may share common documentation, such as authentication details, file mode settings, ``notes:`` or ``seealso:`` entries. Rather than duplicate that information in each module's ``DOCUMENTATION`` block, you can save it once as a doc_fragment plugin and use it in each module's documentation. In Ansible, shared documentation fragments are contained in a ``ModuleDocFragment`` class in `lib/ansible/plugins/doc_fragments/ `_ or the equivalent directory in a collection. To include a documentation fragment, add ``extends_documentation_fragment: FRAGMENT_NAME`` in your module documentation. Use the fully qualified collection name for the FRAGMENT_NAME (for example, ``kubernetes.core.k8s_auth_options``). Modules should only use items from a doc fragment if the module will implement all of the interface documented there in a manner that behaves the same as the existing modules which import that fragment. The goal is that items imported from the doc fragment will behave identically when used in another module that imports the doc fragment. diff --git a/docs/docsite/rst/dev_guide/style_guide/index.rst b/docs/docsite/rst/dev_guide/style_guide/index.rst index 1cbe5b799065e5..3cf50afe7a2aba 100644 --- a/docs/docsite/rst/dev_guide/style_guide/index.rst +++ b/docs/docsite/rst/dev_guide/style_guide/index.rst @@ -227,7 +227,7 @@ Modules require different suffixes from other plugins: .. code-block:: rst :ref:`arista.eos.eos_config ` - :ref:`community.kubernetes.kubectl connection plugin ` + :ref:`kubernetes.core.kubectl connection plugin ` .. note:: diff --git a/docs/docsite/rst/scenario_guides/kubernetes_scenarios/k8s_intro.rst b/docs/docsite/rst/scenario_guides/kubernetes_scenarios/k8s_intro.rst index 59636720bf9be0..7e156465c388ae 100644 --- a/docs/docsite/rst/scenario_guides/kubernetes_scenarios/k8s_intro.rst +++ b/docs/docsite/rst/scenario_guides/kubernetes_scenarios/k8s_intro.rst @@ -24,13 +24,13 @@ To use the modules, you'll need the following: Installation ============ -The Kubernetes modules are part of the `Ansible Kubernetes collection `_. +The Kubernetes modules are part of the `Ansible Kubernetes collection `_. To install the collection, run the following: .. code-block:: bash - $ ansible-galaxy collection install community.kubernetes + $ ansible-galaxy collection install kubernetes.core Authenticating with the API @@ -45,6 +45,6 @@ To disable SSL certificate verification, set ``verify_ssl`` to false. Reporting an issue ================== -If you find a bug or have a suggestion regarding modules, please file issues at `Ansible Kubernetes collection `_. +If you find a bug or have a suggestion regarding modules, please file issues at `Ansible Kubernetes collection `_. If you find a bug regarding OpenShift client, please file issues at `OpenShift REST Client issues `_. If you find a bug regarding Kubectl binary, please file issues at `Kubectl issue tracker `_ diff --git a/docs/docsite/rst/scenario_guides/kubernetes_scenarios/k8s_inventory.rst b/docs/docsite/rst/scenario_guides/kubernetes_scenarios/k8s_inventory.rst index 82fcac034074fb..bcca09eb15d147 100644 --- a/docs/docsite/rst/scenario_guides/kubernetes_scenarios/k8s_inventory.rst +++ b/docs/docsite/rst/scenario_guides/kubernetes_scenarios/k8s_inventory.rst @@ -30,17 +30,17 @@ To use this Kubernetes dynamic inventory plugin, you need to enable it first by .. code-block:: ini [inventory] - enable_plugins = community.kubernetes.k8s + enable_plugins = kubernetes.core.k8s Then, create a file that ends in ``.k8s.yml`` or ``.k8s.yaml`` in your working directory. -The ``community.kubernetes.k8s`` inventory plugin takes in the same authentication information as any other Kubernetes modules. +The ``kubernetes.core.k8s`` inventory plugin takes in the same authentication information as any other Kubernetes modules. Here's an example of a valid inventory file: .. code-block:: yaml - plugin: community.kubernetes.k8s + plugin: kubernetes.core.k8s Executing ``ansible-inventory --list -i .k8s.yml`` will create a list of Pods that are ready to be configured using Ansible. @@ -48,7 +48,7 @@ You can also provide the namespace to gather information about specific pods fro .. code-block:: yaml - plugin: community.kubernetes.k8s + plugin: kubernetes.core.k8s connections: - namespaces: - test diff --git a/docs/docsite/rst/scenario_guides/kubernetes_scenarios/scenario_k8s_object.rst b/docs/docsite/rst/scenario_guides/kubernetes_scenarios/scenario_k8s_object.rst index 3a0d20c0df3181..8fd46453e4a371 100644 --- a/docs/docsite/rst/scenario_guides/kubernetes_scenarios/scenario_k8s_object.rst +++ b/docs/docsite/rst/scenario_guides/kubernetes_scenarios/scenario_k8s_object.rst @@ -52,7 +52,7 @@ Since Ansible utilizes the Kubernetes API to perform actions, in this use case w To begin, there are a few bits of information we will need. Here you are using Kubeconfig which is pre-configured in your machine. The Kubeconfig is generally located at ``~/.kube/config``. It is highly recommended to store sensitive information such as password, user certificates in a more secure fashion using :ref:`ansible-vault` or using `Ansible Tower credentials `_. -Now you need to supply the information about the Pod which will be created. Using ``definition`` parameter of the ``community.kubernetes.k8s`` module, you specify `PodTemplate `_. This PodTemplate is identical to what you provide to the ``kubectl`` command. +Now you need to supply the information about the Pod which will be created. Using ``definition`` parameter of the ``kubernetes.core.k8s`` module, you specify `PodTemplate `_. This PodTemplate is identical to what you provide to the ``kubectl`` command. What to expect -------------- diff --git a/docs/docsite/rst/scenario_guides/kubernetes_scenarios/task_outputs/Add_a_pod_using_k8s.yaml b/docs/docsite/rst/scenario_guides/kubernetes_scenarios/task_outputs/Add_a_pod_using_k8s.yaml index 7cfd43c815b42b..6b5633f61b1561 100644 --- a/docs/docsite/rst/scenario_guides/kubernetes_scenarios/task_outputs/Add_a_pod_using_k8s.yaml +++ b/docs/docsite/rst/scenario_guides/kubernetes_scenarios/task_outputs/Add_a_pod_using_k8s.yaml @@ -1,7 +1,7 @@ --- - hosts: localhost collections: - - community.kubernetes + - kubernetes.core tasks: - name: Create a pod k8s: diff --git a/docs/docsite/rst/user_guide/playbooks_filters.rst b/docs/docsite/rst/user_guide/playbooks_filters.rst index 512c39cded5317..a50643c8f45cda 100644 --- a/docs/docsite/rst/user_guide/playbooks_filters.rst +++ b/docs/docsite/rst/user_guide/playbooks_filters.rst @@ -1732,12 +1732,12 @@ Getting Kubernetes resource names .. note:: - These filters have migrated to the `community.kubernetes `_ collection. Follow the installation instructions to install that collection. + These filters have migrated to the `kuberernetes.core `_ collection. Follow the installation instructions to install that collection. Use the "k8s_config_resource_name" filter to obtain the name of a Kubernetes ConfigMap or Secret, including its hash:: - {{ configmap_resource_definition | community.kubernetes.k8s_config_resource_name }} + {{ configmap_resource_definition | kuberernetes.core.k8s_config_resource_name }} This can then be used to reference hashes in Pod specifications:: @@ -1754,7 +1754,7 @@ This can then be used to reference hashes in Pod specifications:: containers: - envFrom: - secretRef: - name: {{ my_secret | community.kubernetes.k8s_config_resource_name }} + name: {{ my_secret | kuberernetes.core.k8s_config_resource_name }} .. versionadded:: 2.8 From a7be4fae8b543fa5b9b3b4b58f1fbcac7b30d111 Mon Sep 17 00:00:00 2001 From: Lidiane Taquehara Date: Tue, 27 Apr 2021 11:40:37 -0300 Subject: [PATCH 13/16] Update argcomplete docs links on installation guide (#74410) Link on installation docs is outdated. Switch to currently docs at: https://kislyuk.github.io/argcomplete/ (cherry picked from commit f97787ca7489c69953710746ccb228c626f6109e) --- docs/docsite/rst/installation_guide/intro_installation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docsite/rst/installation_guide/intro_installation.rst b/docs/docsite/rst/installation_guide/intro_installation.rst index cbf3ead5210171..77beeddeed29c0 100644 --- a/docs/docsite/rst/installation_guide/intro_installation.rst +++ b/docs/docsite/rst/installation_guide/intro_installation.rst @@ -575,7 +575,7 @@ As of Ansible 2.9, you can add shell completion of the Ansible command line util You can install ``python-argcomplete`` from EPEL on Red Hat Enterprise based distributions, and or from the standard OS repositories for many other distributions. -For more information about installation and configuration, see the `argcomplete documentation `_. +For more information about installation and configuration, see the `argcomplete documentation `_. Installing ``argcomplete`` on RHEL, CentOS, or Fedora ----------------------------------------------------- @@ -647,7 +647,7 @@ You should place the above commands into your shells profile file such as ``~/.p Using ``argcomplete`` with zsh or tcsh ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -See the `argcomplete documentation `_. +See the `argcomplete documentation `_. .. seealso:: From 2bfee18c300ca908ef9b85a20fa64c98b94c1900 Mon Sep 17 00:00:00 2001 From: Alicia Cozine <879121+acozine@users.noreply.github.com> Date: Tue, 27 Apr 2021 09:41:10 -0500 Subject: [PATCH 14/16] fix spacing to fix header, reorg contributing page (#74421) Co-authored-by: John R Barker (cherry picked from commit 9d9b08bece5d9c422e8e4cfa1ec2f6f5bdfa6b06) --- .../developing_collections_contributing.rst | 31 ++++++++++++++++--- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/docs/docsite/rst/dev_guide/developing_collections_contributing.rst b/docs/docsite/rst/dev_guide/developing_collections_contributing.rst index 20ac45486a709a..8abed636643d36 100644 --- a/docs/docsite/rst/dev_guide/developing_collections_contributing.rst +++ b/docs/docsite/rst/dev_guide/developing_collections_contributing.rst @@ -6,25 +6,46 @@ Contributing to collections If you want to add functionality to an existing collection, modify a collection you are using to fix a bug, or change the behavior of a module in a collection, clone the git repository for that collection and make changes on a branch. You can combine changes to a collection with a local checkout of Ansible (``source hacking/env-setup``). You should first check the collection repository to see if it has specific contribution guidelines. These are typically listed in the README.md or CONTRIBUTING.md files within the repository. + Contributing to a collection: community.general =============================================== -This section describes the process for `community.general `_. To contribute to other collections, replace the folder names ``community`` and ``general`` with the namespace and collection name of a different collection. +These instructions apply to collections hosted in the `ansible_collections GitHub org `_. For other collections, especially for collections not hosted on GitHub, check the ``README.md`` of the collection for information on contributing to it. + +This example uses the `community.general collection `_. To contribute to other collections in the same GitHub org, replace the folder names ``community`` and ``general`` with the namespace and collection name of a different collection. + +Prerequisites +------------- + +* Include ``~/dev/ansible/collections/`` in :ref:`COLLECTIONS_PATHS` +* If that path mentions multiple directories, make sure that no other directory earlier in the search path contains a copy of ``community.general``. + +Creating a PR +------------- + -We assume that you have included ``~/dev/ansible/collections/`` in :ref:`COLLECTIONS_PATHS`, and if that path mentions multiple directories, that you made sure that no other directory earlier in the search path contains a copy of ``community.general``. Create the directory ``~/dev/ansible/collections/ansible_collections/community``, and in it clone `the community.general Git repository `_ or a fork of it into the folder ``general``:: + +* Create the directory ``~/dev/ansible/collections/ansible_collections/community``:: mkdir -p ~/dev/ansible/collections/ansible_collections/community + +* Clone `the community.general Git repository `_ or a fork of it into the directory ``general``:: + cd ~/dev/ansible/collections/ansible_collections/community git clone git@github.com:ansible-collections/community.general.git general -If you clone a fork, add the original repository as a remote ``upstream``:: +* If you clone from a fork, add the original repository as a remote ``upstream``:: cd ~/dev/ansible/collections/ansible_collections/community/general git remote add upstream git@github.com:ansible-collections/community.general.git -Now you can use this checkout of ``community.general`` in playbooks and roles with whichever version of Ansible you have installed locally, including a local checkout of ``ansible/ansible``'s ``devel`` branch. +* Create a branch and commit your changes on the branch. + +* Remember to add tests for your changes, see :ref:`testing_collections`. + +* Push your changes to your fork of the collection and create a Pull Request. -For collections hosted in the ``ansible_collections`` GitHub org, create a branch and commit your changes on the branch. When you are done (remember to add tests, see :ref:`testing_collections`), push your changes to your fork of the collection and create a Pull Request. For other collections, especially for collections not hosted on GitHub, check the ``README.md`` of the collection for information on contributing to it. +You can test your changes by using this checkout of ``community.general`` in playbooks and roles with whichever version of Ansible you have installed locally, including a local checkout of ``ansible/ansible``'s ``devel`` branch. .. seealso:: From 03a46367030b07cd3af36c62261b0984d6b67de2 Mon Sep 17 00:00:00 2001 From: Alex Domoradov Date: Tue, 27 Apr 2021 20:53:21 +0300 Subject: [PATCH 15/16] Update first_found documentation (#70502) * import_tasks do not work with loop, use use include_tasks instead * update documentation (cherry picked from commit bacede7a2b9615e0b1e83aeff69e5c4f080bf791) --- lib/ansible/plugins/lookup/first_found.py | 47 +++++++++++++---------- 1 file changed, 27 insertions(+), 20 deletions(-) diff --git a/lib/ansible/plugins/lookup/first_found.py b/lib/ansible/plugins/lookup/first_found.py index ffe9fd30f65e3f..03fa80d804d4c9 100644 --- a/lib/ansible/plugins/lookup/first_found.py +++ b/lib/ansible/plugins/lookup/first_found.py @@ -10,21 +10,25 @@ version_added: historical short_description: return first file found from list description: - - this lookup checks a list of files and paths and returns the full path to the first combination found. + - This lookup checks a list of files and paths and returns the full path to the first combination found. - As all lookups, when fed relative paths it will try use the current task's location first and go up the chain - to the containing role/play/include/etc's location. + to the containing locations of role / play / include and so on. - The list of files has precedence over the paths searched. - i.e, A task in a role has a 'file1' in the play's relative path, this will be used, 'file2' in role's relative path will not. + For example, A task in a role has a 'file1' in the play's relative path, this will be used, 'file2' in role's relative path will not. - Either a list of files C(_terms) or a key `files` with a list of files is required for this plugin to operate. notes: - This lookup can be used in 'dual mode', either passing a list of file names or a dictionary that has C(files) and C(paths). options: _terms: - description: list of file names + description: A list of file names. files: - description: list of file names + description: A list of file names. + type: list + default: [] paths: - description: list of paths in which to look for the files + description: A list of paths in which to look for the files. + type: list + default: [] skip: type: boolean default: False @@ -33,42 +37,45 @@ EXAMPLES = """ - name: show first existing file or ignore if none do - debug: msg={{lookup('first_found', findme, errors='ignore')}} + debug: + msg: "{{ lookup('first_found', findme, errors='ignore') }}" vars: findme: - "/path/to/foo.txt" - "bar.txt" # will be looked in files/ dir relative to role and/or play - "/path/to/biz.txt" -- name: | - include tasks only if files exist. Note the use of query() to return - a blank list for the loop if no files are found. - import_tasks: '{{ item }}' +- name: include tasks only if files exist. + include_tasks: + file: "{{ query('first_found', params) }}" vars: params: files: - path/tasks.yaml - path/other_tasks.yaml - loop: "{{ query('first_found', params, errors='ignore') }}" - name: | copy first existing file found to /some/file, looking in relative directories from where the task is defined and including any play objects that contain it - copy: src={{lookup('first_found', findme)}} dest=/some/file + copy: + src: "{{ lookup('first_found', findme) }}" + dest: /some/file vars: findme: - foo - - "{{inventory_hostname}}" + - "{{ inventory_hostname }}" - bar - name: same copy but specific paths - copy: src={{lookup('first_found', params)}} dest=/some/file + copy: + src: "{{ lookup('first_found', params) }}" + dest: /some/file vars: params: files: - foo - - "{{inventory_hostname}}" + - "{{ inventory_hostname }}" - bar paths: - /tmp/production @@ -76,7 +83,7 @@ - name: INTERFACES | Create Ansible header for /etc/network/interfaces template: - src: "{{ lookup('first_found', findme)}}" + src: "{{ lookup('first_found', findme) }}" dest: "/etc/foo.conf" vars: findme: @@ -84,12 +91,12 @@ - "default_foo.conf" - name: read vars from first file found, use 'vars/' relative subdir - include_vars: "{{lookup('first_found', params)}}" + include_vars: "{{ lookup('first_found', params) }}" vars: params: files: - - '{{ansible_distribution}}.yml' - - '{{ansible_os_family}}.yml' + - '{{ ansible_distribution }}.yml' + - '{{ ansible_os_family }}.yml' - default.yml paths: - 'vars' From 76ccc4d3022cf3a20611c30b9f9a1dd82418fbcb Mon Sep 17 00:00:00 2001 From: Bill Nottingham Date: Tue, 27 Apr 2021 17:19:57 -0400 Subject: [PATCH 16/16] Product-related updates. (#74454) (cherry picked from commit 34c9ed8a28e6353b927427fdc72a1ec7078040e6) --- docs/docsite/rst/community/communication.rst | 14 +++++++------- .../rst/community/other_tools_and_programs.rst | 2 +- docs/docsite/rst/community/release_managers.rst | 3 +-- docs/docsite/rst/dev_guide/developing_api.rst | 2 +- .../docsite/rst/dev_guide/developing_inventory.rst | 2 +- .../dev_guide/developing_modules_documenting.rst | 4 ++-- .../dev_guide/style_guide/spelling_word_choice.rst | 2 +- .../rst/dev_guide/style_guide/trademarks.rst | 9 ++++----- 8 files changed, 18 insertions(+), 20 deletions(-) diff --git a/docs/docsite/rst/community/communication.rst b/docs/docsite/rst/community/communication.rst index 3c3c5c2ee7ed9e..587f5461c452b8 100644 --- a/docs/docsite/rst/community/communication.rst +++ b/docs/docsite/rst/community/communication.rst @@ -18,7 +18,7 @@ Mailing list information Ansible has several mailing lists. Your first post to the mailing list will be moderated (to reduce spam), so please allow up to a day or so for your first post to appear. * `Ansible Announce list `_ is a read-only list that shares information about new releases of Ansible, and also rare infrequent event information, such as announcements about an upcoming AnsibleFest, which is our official conference series. Worth subscribing to! -* `Ansible AWX List `_ is for `Ansible AWX `_ the upstream version of `Red Hat Ansible Tower `_ +* `Ansible AWX List `_ is for `Ansible AWX `_ * `Ansible Container List `_ is for users and developers of the Ansible Container project. * `Ansible Development List `_ is for learning how to develop on Ansible, asking about prospective feature design, or discussions about extending ansible or features in progress. * `Ansible Lockdown List `_ is for all things related to Ansible Lockdown projects, including DISA STIG automation and CIS Benchmarks. @@ -59,7 +59,7 @@ Many of our community `Working Groups `_ - ``#ansible-aws`` - `Ansible Lockdown Working Group `_ | `gh/ansible/ansible-lockdown `_ - ``#ansible-lockdown``- Security playbooks/roles -- `AWX Working Group `_ - ``#ansible-awx`` - Upstream for Ansible Tower +- `AWX Working Group `_ - ``#ansible-awx`` - `Azure Working Group `_ - ``#ansible-azure`` - `Community Working Group `_ - ``#ansible-community`` - Including Meetups - `Container Working Group `_ - ``#ansible-container`` @@ -95,13 +95,13 @@ IRC meetings The Ansible community holds regular IRC meetings on various topics, and anyone who is interested is invited to participate. For more information about Ansible meetings, consult the `meeting schedule and agenda page `_. -Ansible Tower support questions -=============================== +Ansible Automation Platform support questions +============================================= -Red Hat Ansible `Tower `_ is a UI, Server, and REST endpoint for Ansible. -The Red Hat Ansible Automation subscription contains support for Ansible, Ansible Tower, Ansible Automation for Networking, and more. +Red Hat Ansible `Automation Platform `_ is a subscription that contains support, certified content, and tooling for Ansible including +content management, a controller, UI and REST API. -If you have a question about Ansible Tower, visit `Red Hat support `_ rather than using the IRC channel or the general project mailing list. +If you have a question about Ansible Automation Platform, visit `Red Hat support `_ rather than using the IRC channel or the general project mailing list. The Bullhorn ============ diff --git a/docs/docsite/rst/community/other_tools_and_programs.rst b/docs/docsite/rst/community/other_tools_and_programs.rst index 322dc507cd3004..7a941dcc3183da 100644 --- a/docs/docsite/rst/community/other_tools_and_programs.rst +++ b/docs/docsite/rst/community/other_tools_and_programs.rst @@ -115,7 +115,7 @@ Other tools - `Ansigenome `_ - a command line tool designed to help you manage your Ansible roles. - `ARA `_ - records Ansible playbook runs and makes the recorded data available and intuitive for users and systems by integrating with Ansible as a callback plugin. - `Awesome Ansible `_ - a collaboratively curated list of awesome Ansible resources. -- `AWX `_ - provides a web-based user interface, REST API, and task engine built on top of Ansible. AWX is the upstream project for Red Hat Ansible Tower, part of the Red Hat Ansible Automation subscription. +- `AWX `_ - provides a web-based user interface, REST API, and task engine built on top of Ansible. Red Hat Ansible Automation Platform includes code from AWX. - `Mitogen for Ansible `_ - uses the `Mitogen `_ library to execute Ansible playbooks in a more efficient way (decreases the execution time). - `nanvault `_ - a standalone tool to encrypt and decrypt files in the Ansible Vault format, featuring UNIX-style composability. - `OpsTools-ansible `_ - uses Ansible to configure an environment that provides the support of `OpsTools `_, namely centralized logging and analysis, availability monitoring, and performance monitoring. diff --git a/docs/docsite/rst/community/release_managers.rst b/docs/docsite/rst/community/release_managers.rst index d7c84cd5ca8166..97949a55f7d045 100644 --- a/docs/docsite/rst/community/release_managers.rst +++ b/docs/docsite/rst/community/release_managers.rst @@ -13,7 +13,6 @@ coordinate between: * Contributors without commit privileges * The community * Ansible documentation team -* Ansible Tower team Pre-releases: what and why ========================== @@ -70,7 +69,7 @@ The last RC should be as close to the final as possible. The following things ma .. note:: We want to specifically emphasize that code (in :file:`bin/`, :file:`lib/ansible/`, and :file:`setup.py`) must be the same unless there are extraordinary extenuating circumstances. If there are extenuating circumstances, the Release Manager is responsible for notifying groups - (like the Tower Team) which would want to test the code. + which would want to test the code. Ansible release process diff --git a/docs/docsite/rst/dev_guide/developing_api.rst b/docs/docsite/rst/dev_guide/developing_api.rst index eeff46845b2150..c530b9a32ebd12 100644 --- a/docs/docsite/rst/dev_guide/developing_api.rst +++ b/docs/docsite/rst/dev_guide/developing_api.rst @@ -14,7 +14,7 @@ write plugins, and you can plug in inventory data from external data sources. T gives a basic overview and examples of the Ansible execution and playbook API. If you would like to use Ansible programmatically from a language other than Python, trigger events asynchronously, -or have access control and logging demands, please see the `Ansible Tower documentation `_. +or have access control and logging demands, please see the `AWX project `_. .. note:: Because Ansible relies on forking processes, this API is not thread safe. diff --git a/docs/docsite/rst/dev_guide/developing_inventory.rst b/docs/docsite/rst/dev_guide/developing_inventory.rst index ac54aefe3c64c8..0df314cb247052 100644 --- a/docs/docsite/rst/dev_guide/developing_inventory.rst +++ b/docs/docsite/rst/dev_guide/developing_inventory.rst @@ -469,7 +469,7 @@ An easy way to see how this should look is using :ref:`ansible-inventory`, which Get started with developing a module :ref:`developing_plugins` How to develop plugins - `Ansible Tower `_ + `AWX `_ REST API endpoint and GUI for Ansible, syncs with dynamic inventory `Development Mailing List `_ Mailing list for development topics diff --git a/docs/docsite/rst/dev_guide/developing_modules_documenting.rst b/docs/docsite/rst/dev_guide/developing_modules_documenting.rst index 00bb863dfa4edf..67a4bdd91e6dee 100644 --- a/docs/docsite/rst/dev_guide/developing_modules_documenting.rst +++ b/docs/docsite/rst/dev_guide/developing_modules_documenting.rst @@ -233,8 +233,8 @@ Linking and other format macros within module documentation You can link from your module documentation to other module docs, other resources on docs.ansible.com, and resources elsewhere on the internet with the help of some pre-defined macros. The correct formats for these macros are: -* ``L()`` for links with a heading. For example: ``See L(Ansible Tower,https://www.ansible.com/products/tower).`` As of Ansible 2.10, do not use ``L()`` for relative links between Ansible documentation and collection documentation. -* ``U()`` for URLs. For example: ``See U(https://www.ansible.com/products/tower) for an overview.`` +* ``L()`` for links with a heading. For example: ``See L(Ansible Automation Platform,https://www.ansible.com/products/automation-platform).`` As of Ansible 2.10, do not use ``L()`` for relative links between Ansible documentation and collection documentation. +* ``U()`` for URLs. For example: ``See U(https://www.ansible.com/products/automation-platform) for an overview.`` * ``R()`` for cross-references with a heading (added in Ansible 2.10). For example: ``See R(Cisco IOS Platform Guide,ios_platform_options)``. Use the RST anchor for the cross-reference. See :ref:`adding_anchors_rst` for details. * ``M()`` for module names. For example: ``See also M(ansible.builtin.yum) or M(community.general.apt_rpm)``. diff --git a/docs/docsite/rst/dev_guide/style_guide/spelling_word_choice.rst b/docs/docsite/rst/dev_guide/style_guide/spelling_word_choice.rst index 3f6d8d7b46196d..a39dc9c3822269 100644 --- a/docs/docsite/rst/dev_guide/style_guide/spelling_word_choice.rst +++ b/docs/docsite/rst/dev_guide/style_guide/spelling_word_choice.rst @@ -12,7 +12,7 @@ Spell out the acronym before using it in alone text, such as "The Embedded DevKi Applications +++++++++++++++++++ -When used as a proper name, use the capitalization of the product, such as GNUPro, Source-Navigator, and Ansible Tower. When used as a command, use lowercase as appropriate, such as "To start GCC, type ``gcc``." +When used as a proper name, use the capitalization of the product, such as GNUPro or Source-Navigator. When used as a command, use lowercase as appropriate, such as "To start GCC, type ``gcc``." .. note:: diff --git a/docs/docsite/rst/dev_guide/style_guide/trademarks.rst b/docs/docsite/rst/dev_guide/style_guide/trademarks.rst index 266f16bd1365a9..20c7380c6b8bc7 100644 --- a/docs/docsite/rst/dev_guide/style_guide/trademarks.rst +++ b/docs/docsite/rst/dev_guide/style_guide/trademarks.rst @@ -12,7 +12,7 @@ General Rules: Trademarks should be used on 1st references on a page or within a section. -Use Red Hat® Ansible Tower® or Ansible®, on first reference when referring to products. +Use Red Hat® Ansible® Automation Platform or Ansible®, on first reference when referring to products. Use "Ansible" alone as the company name, as in "Ansible announced quarterly results," which is not marked. @@ -38,7 +38,7 @@ Always use proper trademark form and spelling. Never use a trademark as a noun. Always use a trademark as an adjective modifying the noun. Correct: - Red Hat® Ansible Tower® system performance is incredible. + Red Hat® Ansible® Automation Platform system performance is incredible. Incorrect: Ansible's performance is incredible. @@ -46,7 +46,7 @@ Never use a trademark as a noun. Always use a trademark as an adjective modifyin Never use a trademark as a verb. Trademarks are products or services, never actions. Correct: - "Orchestrate your entire network using Red Hat® Ansible Tower®." + "Orchestrate your entire network using Red Hat® Ansible® Automation Platform." Incorrect: "Ansible your entire network." @@ -54,7 +54,7 @@ Never use a trademark as a verb. Trademarks are products or services, never acti Never modify a trademark to a plural form. Instead, change the generic word from the singular to the plural. Correct: - "Corporate demand for Red Hat® Ansible Tower® configuration software is surging." + "Corporate demand for Red Hat® Ansible® Automation Platform software is surging." Incorrect: "Corporate demand for Ansible is surging." @@ -81,7 +81,6 @@ The mark consists of the letter "A" in a shaded circle. As of 5/11/15, this was Common Ansible Trademarks +++++++++++++++++++++++++++++++++++++++ * Ansible® -* Ansible Tower® Other Common Trademarks and Resource Sites: ++++++++++++++++++++++++++++++++++++++++++++++++