diff --git a/_data/home-content.yml b/_data/home-content.yml
index 50871c01..3a6183ff 100644
--- a/_data/home-content.yml
+++ b/_data/home-content.yml
@@ -1,6 +1,50 @@
+
+
+- title: Pipelines
+ icon: images/home-icons/pipeline.svg
+ url: ''
+ links:
+ - title: Introduction to Pipelines
+ localurl: /docs/pipelines/introduction-to-codefresh-pipelines/
+ - title: Creating Pipelines
+ localurl: /docs/pipelines/pipelines/
+ - title: Pipeline triggers
+ localurl: /docs/pipelines/triggers/
+ - title: Monitoring pipelines
+ localurl: /docs/pipelines/monitoring-pipelines/
+ - title: Shared Configuration
+ localurl: /docs/pipelines/shared-configuration/
+ - title: Using secrets
+ localurl: /docs/pipelines/secrets-store/
+ - title: Pipeline caching
+ localurl: /docs/pipelines/pipeline-caching/
+ - title: Running pipelines locally
+ localurl: /docs/pipelines/running-pipelines-locally/
+ - title: Debugging pipelines
+ localurl: /docs/pipelines/debugging-pipelines/
+
+
+- title: Workflows
+ icon: images/home-icons/pipeline.svg
+ url: ''
+ links:
+ - title: Creating workflows
+ localurl: /docs/workflows/create-pipeline
+ - title: Nested workflows
+ localurl: /docs/workflows/nested-workflows/
+ - title: Configure artifact repository
+ localurl: /docs/workflows/configure-artifact-repository/
+ - title: Selectors for concurrency synchronization
+ localurl: /docs/workflows/concurrency-limit/
+ - title: Sharing file systems
+ localurl: /docs/workflows/sharing-file-system/
+
+
+
+
- title: Clients
icon: images/home-icons/client.svg
url: ''
@@ -32,6 +76,7 @@
- title: Adding Git Sources to GitOps Runtimes
localurl: /docs/installation/git-sources/
+
- title: Administration
icon: images/home-icons/administration.svg
url: ''
diff --git a/_data/nav.yml b/_data/nav.yml
index 0abba04e..6f9db368 100644
--- a/_data/nav.yml
+++ b/_data/nav.yml
@@ -1,6 +1,108 @@
+- title: CI pipelines
+ url: "/pipelines"
+ pages:
+ - title: Introduction to CI pipelines
+ url: "/introduction-to-codefresh-pipelines"
+ - title: Creating a CI pipeline
+ url: "/pipelines"
+ - title: Steps in CI pipelines
+ url: "/steps"
+ sub-pages:
+ - title: Git-clone
+ url: "/git-clone"
+ - title: Freestyle
+ url: "/freestyle"
+ - title: Build
+ url: "/build"
+ - title: Push
+ url: "/push"
+ - title: Composition
+ url: "/composition"
+ - title: Launch-composition
+ url: "/launch-composition"
+ - title: Deploy
+ url: "/deploy"
+ - title: Approval
+ url: "/approval"
+ - title: Triggers in CI pipelines
+ url: "/triggers"
+ sub-pages:
+ - title: Git triggers
+ url: "/git-triggers"
+ - title: DockerHub triggers
+ url: "/dockerhub-triggers"
+ - title: Azure triggers
+ url: "/azure-triggers"
+ - title: Quay triggers
+ url: "/quay-triggers"
+ - title: Helm triggers
+ url: "/helm-triggers"
+ - title: Artifactory triggers
+ url: "/jfrog-triggers"
+ - title: Timer (Cron) triggers
+ url: "/cron-triggers"
+ - title: Conditional execution of steps
+ url: "/conditional-execution-of-steps"
+ - title: Post-step operations"
+ url: "/post-step-operations"
+ - title: Variables in CI pipelines
+ url: "/variables"
+ - title: Hooks in CI pipelines
+ url: "/hooks"
+ - title: Annotations in CI pipelines
+ url: "/annotations"
+ - title: Grouping steps into stages
+ url: "/stages"
+ - title: Caching for CI pipelines
+ url: "/pipeline-caching"
+ - title: Debugging CI pipelines
+ url: "/debugging-pipelines"
+ - title: Monitoring CI pipelines
+ url: "/monitoring-pipelines"
+ - title: Complex CI pipelines
+ url: "/advanced-workflows"
+ - title: Running CI pipelines locally
+ url: "/running-pipelines-locally"
+ - title: Configuration for CI pipelines
+ url: "/configuration"
+ sub-pages:
+ - title: Shared configuration
+ url: "/shared-configuration"
+ - title: Secrets for CI pipelines
+ url: "/secrets-store"
+ - title: Global CI pipeline settings
+ url: "/pipeline-settings"
+ - title: Public logs and status badges
+ url: "/build-status"
+ - title: Service containers
+ url: "/service-containers"
+ - title: Deployment environments
+ url: "/deployment-environments"
+ - title: Docker image metadata
+ url: "/docker-image-metadata"
+ - title: Pipeline definitions YAML
+ url: "/what-is-the-codefresh-yaml"
+
+
+- title: Workflows
+ url: "/workflows"
+ pages:
+ - title: Creating workflows
+ url: "/create-pipeline"
+ - title: Nested workflows
+ url: "/nested-workflows"
+ - title: Configure artifact repository
+ url: "/configure-artifact-repository"
+ - title: Selectors for concurrency synchronization
+ url: "/concurrency-limit"
+ - title: Sharing file systems
+ url: "/sharing-file-system"
+
+
+
- title: Clients
url: "/clients"
pages:
@@ -115,3 +217,4 @@
- title: Service Commitment
url: "/sla"
+
diff --git a/_docs/pipelines/advanced-workflows.md b/_docs/pipelines/advanced-workflows.md
new file mode 100644
index 00000000..1314ff7f
--- /dev/null
+++ b/_docs/pipelines/advanced-workflows.md
@@ -0,0 +1,972 @@
+---
+title: "Advanced workflows with parallel steps"
+description: "Create complex workflows in Codefresh with step dependencies"
+group: codefresh-yaml
+toc: true
+---
+
+Codefresh is very flexible when it comes to pipeline complexity and depth.
+
+You can easily create:
+ * Sequential pipelines where step order is the same as the listing order in YAML (simple)
+ * Sequential pipelines that have some parallel parts (intermediate)
+ * Parallel pipelines where step order is explicitly defined (advanced)
+
+With the parallel execution mode, you can define complex pipelines with fan-in/out configurations capable of matching even the most complicated workflows within an organization.
+
+>In Codefresh, parallel execution is unrelated to [stages]({{site.baseurl}}/docs/codefresh-yaml/stages/). Stages are only a way to visually organize your pipeline steps. The actual execution is independent from the visual layout in the logs view.
+
+Before going any further make sure that you are familiar with the [basics of Codefresh pipelines]({{site.baseurl}}/docs/configure-ci-cd-pipeline/introduction-to-codefresh-pipelines/).
+
+Codefresh offers two modes of execution:
+
+1. Sequential mode (which is the default)
+1. Parallel mode
+
+## Sequential execution mode
+
+The sequential mode is very easy to understand and visualize.
+
+In sequential mode, the Codefresh execution engine starts from the first step defined at the top of the `codefresh.yml` file, and executes all steps one by one going down to the end of the file. A step is either executed or skipped according to its conditions.
+
+>The condition for each step is only examined **once**.
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+mode: sequential
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: sample-python-image
+ working_directory: ./
+ tag: ${{CF_BRANCH_TAG_NORMALIZED}}
+ dockerfile: Dockerfile
+ MyUnitTests:
+ title: Running Unit tests
+ image: ${{MyAppDockerImage}}
+ commands:
+ - python setup.py test
+{% endraw %}
+{% endhighlight %}
+
+Here we have two steps, one that creates a Docker image and a second one that runs [unit tests]({{site.baseurl}}/docs/testing/unit-tests/) inside it. The order of execution is identical to the order of the steps in the YAML file. This means that unit tests will always run after the Docker image creation.
+
+Notice that the line `mode: sequential` is shown only for illustration purposes. Sequential mode is the default, and therefore this line can be omitted.
+
+
+## Inserting parallel steps in a sequential pipeline
+
+You don't have to activate parallel execution mode for the whole pipeline if only a part of it needs to run in parallel. Codefresh allows you insert a parallel phase inside a sequential pipeline with the following syntax:
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ my_task1:
+ title: My Task 1
+ [...]
+ my_parallel_tasks:
+ type: parallel
+ steps:
+ my_task2a:
+ title: My Task 2A
+ [...]
+ my_task2b:
+ title: My Task 2B
+ [...]
+ my_task3:
+ title: My Task3
+ [...]
+{% endraw %}
+{% endhighlight %}
+
+
+In this case tasks 2A and 2B will run in parallel.
+The step name that defines the parallel phase (`my_parallel_tasks` in the example above), is completely arbitrary.
+
+The final order of execution will be
+
+1. Task 1
+1. Task 2A and Task2B at the same time
+1. Task 3
+
+This is the recommended way to start using parallelism in your Codefresh pipelines. It is sufficient for most scenarios that require parallelism.
+
+>The step names must be unique within the same pipeline. The parent and child steps should NOT share the same name.
+
+### Example: pushing multiple Docker images in parallel
+
+Let's see an example where a Docker image is created and then we push it to more than one registry. This is a perfect candidate for parallelization. Here is the `codefresh.yml`:
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+- build
+- push
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ stage: 'build'
+ type: build
+ image_name: trivialgoweb
+ working_directory: ./
+ tag: '${{CF_BRANCH_TAG_NORMALIZED}}'
+ dockerfile: Dockerfile
+ PushingToRegistries:
+ type: parallel
+ stage: 'push'
+ steps:
+ jfrog_PushingTo_jfrog_BintrayRegistry:
+ type: push
+ title: jfrog_Pushing To Bintray Registry
+ candidate: ${{MyAppDockerImage}}
+ tag: '${{CF_SHORT_REVISION}}'
+ registry: bintray
+ PushingToGoogleRegistry:
+ type: push
+ title: Pushing To Google Registry
+ candidate: ${{MyAppDockerImage}}
+ tag: '${{CF_SHORT_REVISION}}'
+ registry: gcr
+ PushingToDockerRegistry:
+ type: push
+ title: Pushing To Dockerhub Registry
+ candidate: ${{MyAppDockerImage}}
+ tag: '${{CF_SHORT_REVISION}}'
+ image_name: kkapelon/trivialgoweb
+ registry: dockerhub
+{% endraw %}
+{% endhighlight %}
+
+The order of execution is the following:
+
+1. MyAppDockerImage ([build step]({{site.baseurl}}/docs/pipelines/steps/build/))
+1. jfrog_PushingTo_jfrog_BintrayRegistry, PushingToGoogleRegistry, PushingToDockerRegistry ([push steps]({{site.baseurl}}/docs/pipelines/steps/push/))
+
+The pipeline view for this yaml file is the following.
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/parallel-push.png"
+url="/images/codefresh-yaml/parallel-push.png"
+alt="Parallel Docker push"
+caption="Parallel Docker push"
+max-width="80%"
+%}
+
+As you can see we have also marked the steps with [stages]({{site.baseurl}}/docs/pipelines/stages/) so that we get a visualization that matches the execution.
+
+
+### Example: Running multiple test suites in parallel
+
+All types of steps can by placed inside a parallel phase. Another common use case would be the parallel execution of [freestyle steps]({{site.baseurl}}/docs/codefresh-yaml/steps/freestyle/) for unit/integration tests.
+
+Let's say that you have a Docker image with a Python back-end and a JavaScript front-end. You could run both types of tests in parallel with the following yaml syntax:
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: my-full-stack-app
+ working_directory: ./
+ tag: '${{CF_BRANCH_TAG_NORMALIZED}}'
+ dockerfile: Dockerfile
+ MyTestingPhases:
+ type: parallel
+ steps:
+ my_back_end_tests:
+ title: Running Back end tests
+ image: ${{MyAppDockerImage}}
+ commands:
+ - python setup.py test
+ my_front_end_tests:
+ title: Running Front End tests
+ image: ${{MyAppDockerImage}}
+ commands:
+ - npm run test
+{% endraw %}
+{% endhighlight %}
+
+Running different types of tests (unit/integration/load/acceptance) in parallel is a very common use case for parallelism inside an otherwise sequential pipeline.
+
+### Defining success criteria for a parallel step
+
+By default, any failed step in a Codefresh pipeline will fail the whole pipeline. There are ways to change this behavior (the `fail_fast` property is explained later in this page), but specifically for parallel steps you can define exactly when the whole step succeeds or fails.
+
+You can define steps that will be used to decide if a parallel step succeeds with this syntax:
+
+{% highlight yaml %}
+second_step:
+ title: Second step
+ success_criteria:
+ steps:
+ only:
+ - my_unit_tests
+ type: parallel
+ steps:
+ my_unit_tests:
+ title: Running Back end tests
+ image: node
+ commands:
+ - npm run test
+ my_integration_tests:
+ title: Running Integration tests
+ image: node
+ commands:
+ - npm run int-test
+ my_acceptance_tests:
+ title: Running Acceptance tests
+ image: node
+ commands:
+ - npm run acceptance-test
+{% endhighlight %}
+
+In the example above, if integration and/or acceptance tests fail, the whole pipeline will continue, because we have defined that only the results of unit test matter for the whole parallel step.
+
+The reverse relationship (i.e., defining steps to be ignored) can be defined with the following syntax
+
+{% highlight yaml %}
+second_step:
+ title: Second step
+ success_criteria:
+ steps:
+ ignore:
+ - my_integration_tests
+ - my_acceptance_tests
+ type: parallel
+ steps:
+ my_unit_tests:
+ title: Running Back end tests
+ image: node
+ commands:
+ - npm run test
+ my_integration_tests:
+ title: Running Integration tests
+ image: node
+ commands:
+ - npm run int-test
+ my_acceptance_tests:
+ title: Running Acceptance tests
+ image: node
+ commands:
+ - npm run acceptance-test
+{% endhighlight %}
+
+In the example above we have explicitly defined that even if the integration or acceptance tests fail the whole pipeline will continue.
+
+### Shared Codefresh volume and race conditions
+
+In any pipeline step, Codefresh automatically attaches a [shared volume]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps) that is used to transfer artifacts between steps. The same volume is also shared between steps that run in parallel.
+
+
+Here is an example where two parallel steps are writing two files. After they finish execution, we list the contents of the project folder.
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ WritingInParallel:
+ type: parallel
+ steps:
+ writing_file_1:
+ title: Step1A
+ image: alpine
+ commands:
+ - echo "Step1A" > first.txt
+ writing_file_2:
+ title: Step1B
+ image: alpine
+ commands:
+ - echo "Step1B" > second.txt
+ MyListing:
+ title: Listing of files
+ image: alpine
+ commands:
+ - ls
+{% endraw %}
+{% endhighlight %}
+
+The results from the `MyListing` step is the following:
+
+```
+first.txt second.txt
+```
+
+This illustrates the side effects for both parallel steps that were executed on the same volume.
+
+>It is therefore your responsibility to make sure that steps that run in parallel play nice with each other. Currently, Codefresh performs no conflict detection at all. If there are race conditions between your parallel steps, (e.g. multiple steps writing at the same files), the final behavior is undefined. It is best to start with a fully sequential pipeline, and use parallelism in a gradual manner if you are unsure about the side effects of your steps
+
+## Implicit parallel steps
+> If you use implicit parallel steps, you _cannot_ use _parallel pipeline mode_.
+
+In all the previous examples, all parallel steps have been defined explicitly in a pipeline. This works well for a small number of steps, but in some cases it can be cumbersome to write such a pipeline, especially when the parallel steps are similar.
+
+Codefresh offers two handy ways to lessen the amount of YAML you have to write and get automatic parallelization with minimum effort.
+
+* The `scale` syntax allows you to quickly create parallel steps that are mostly similar (but still differ)
+* The `matrix` syntax allows you to quickly create parallel steps for multiple combinations of properties
+
+### Scale parallel steps (one dimension)
+
+If you look back at the parallel docker push example you will see that all push steps are the same. The only thing that changes is the registry that they push to.
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+- build
+- push
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ stage: 'build'
+ type: build
+ image_name: trivialgoweb
+ working_directory: ./
+ tag: '${{CF_BRANCH_TAG_NORMALIZED}}'
+ dockerfile: Dockerfile
+ PushingToRegistries:
+ type: parallel
+ stage: 'push'
+ steps:
+ jfrog_PushingTo_jfrog_BintrayRegistry:
+ type: push
+ title: jfrog_Pushing To Bintray Registry
+ candidate: ${{MyAppDockerImage}}
+ tag: '${{CF_SHORT_REVISION}}'
+ registry: bintray
+ PushingToGoogleRegistry:
+ type: push
+ title: Pushing To Google Registry
+ candidate: ${{MyAppDockerImage}}
+ tag: '${{CF_SHORT_REVISION}}'
+ registry: gcr
+ PushingToDockerRegistry:
+ type: push
+ title: Pushing To Dockerhub Registry
+ candidate: ${{MyAppDockerImage}}
+ tag: '${{CF_SHORT_REVISION}}'
+ image_name: kkapelon/trivialgoweb
+ registry: dockerhub
+{% endraw %}
+{% endhighlight %}
+
+
+This pipeline can be simplified by using the special `scale` syntax to create a common parent step with all similarities:
+
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+- build
+- push
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ stage: 'build'
+ type: build
+ image_name: trivialgoweb
+ working_directory: ./
+ tag: '${{CF_BRANCH_TAG_NORMALIZED}}'
+ dockerfile: Dockerfile
+ PushingToRegistries:
+ stage: 'push'
+ type: push
+ tag: '${{CF_SHORT_REVISION}}'
+ candidate: ${{MyAppDockerImage}}
+ scale:
+ jfrog_PushingTo_jfrog_BintrayRegistry:
+ registry: bintray
+ PushingToGoogleRegistry:
+ registry: gcr
+ PushingToDockerRegistry:
+ image_name: kkapelon/trivialgoweb
+ registry: dockerhub
+{% endraw %}
+{% endhighlight %}
+
+You can see now that all common properties are defined once in the parent step (`PushingToRegistries`) while each push step only contains what differs. Codefresh will automatically create parallel steps when it encounters the `scale` syntax.
+
+The resulting pipeline is more concise but runs in the same manner as the original YAML. For a big number of parallel steps, the `scale` syntax is very helpful for making the pipeline definition more clear.
+
+You can use the `scale` syntax with all kinds of steps in Codefresh and not just push steps. Another classic example would be running tests in parallel with different environment variables.
+
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+ run_tests_in_parallel:
+ stage: 'Microservice A'
+ working_directory: './my-front-end-code'
+ image: node:latest
+ commands:
+ - npm run test
+ scale:
+ first:
+ environment:
+ - TEST_NODE=0
+ second:
+ environment:
+ - TEST_NODE=1
+ third:
+ environment:
+ - TEST_NODE=2
+ fourth:
+ environment:
+ - TEST_NODE=3
+{% endraw %}
+{% endhighlight %}
+
+This pipeline will automatically create 4 parallel freestyle steps. All of them will use the same Docker image and executed the same command (`npm run test`) but each one will receive a different value for the environment variable called `TEST_NODE`.
+
+Notice that if you define environment variables on the parent step (`run_tests_in_parallel` in the example above), they will also be available on the children parallel steps. And if those define, environment variables as well, all environment variables will be available.
+
+
+### Matrix parallel steps (multiple dimensions)
+
+The `scale` syntax allows you to easily create multiple parallel steps that differ only in a single dimension. If you have multiple dimensions of properties that differ and you want to run all possible combinations (Cartesian product) then the `matrix` syntax will do that for you automatically.
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+ - prepare
+ - test
+steps:
+ main_clone:
+ title: Cloning main repository...
+ type: git-clone
+ repo: 'codefreshdemo/cf-example-unit-test'
+ revision: 'master'
+ git: github
+ stage: prepare
+ run_my_tests_before_build:
+ stage: test
+ working_directory: './golang-app-A'
+ commands:
+ - go test -v
+ matrix:
+ image:
+ - golang:1.11
+ - golang:1.12
+ - golang:1.13
+ environment:
+ - [CGO_ENABLED=1]
+ - [CGO_ENABLED=0]
+{% endraw %}
+{% endhighlight %}
+
+Here we want run unit tests with 3 different versions of GO and also try with CGO enabled or not. Instead of manually writing 6 parallel steps in your pipeline with all possible combinations, we can simply use the `matrix` syntax to create the following parallel steps:
+
+* Go 1.11 with CGO enabled
+* Go 1.11 with CGO disabled
+* Go 1.12 with CGO enabled
+* Go 1.12 with CGO disabled
+* Go 1.13 with CGO enabled
+* Go 1.13 with CGO disabled
+
+The resulting Codefresh YAML is much more compact. Notice that because the `environment` property in Codefresh is already an array on its own, when we use it with the `matrix` syntax we need to enclose its value with `[]` (array of arrays).
+
+You can add more dimensions to a matrix build (and not just two as shown in the example). Here is another example with 3 dimensions:
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+ - prepare
+ - test
+steps:
+ main_clone:
+ title: Cloning main repository...
+ stage: prepare
+ type: git-clone
+ repo: 'codefresh-contrib/spring-boot-2-sample-app'
+ revision: master
+ git: github
+ MyUnitTests:
+ stage: test
+ matrix:
+ image:
+ - 'maven:3.5.2-jdk-8-alpine'
+ - 'maven:3.6.2-jdk-11-slim'
+ - 'maven:3-jdk-8'
+ commands:
+ - ["mvn --version", "mvn -Dmaven.repo.local=/codefresh/volume/m2_repository test"]
+ - ["mvn --version", "mvn -Dmaven.test.skip -Dmaven.repo.local=/codefresh/volume/m2_repository package"]
+ environment:
+ - [MAVEN_OPTS=-Xms1024m]
+ - [MAVEN_OPTS=-Xms512m]
+{% endraw %}
+{% endhighlight %}
+
+This pipeline creates 3 x 2 x 2 = 12 parallel steps with all the possible combinations of:
+
+* Maven version
+* Running or disabling tests
+* Using 1GB or 512MBs of memory.
+
+Remember that all parallel steps run within the same pipeline executor so make sure that you have enough resources as the number
+of matrix variations can quickly grow if you add too many dimensions.
+
+Notice that, as with the `scale` syntax, the defined values/properties are merged between parent step (`MyUnitTests` in the example above) and children steps. For example, if you set an environment variable on the parent and also on child matrix steps , the result will a merged environment where all values are available.
+
+## Parallel pipeline execution
+> If you use parallel execution mode for pipelines, you _cannot_ use _implicit parallel steps_.
+
+To activate advanced parallel mode for the whole pipeline, you need to declare it explicitly at the root of the `codefresh.yml` file:
+
+```
+version: '1.0'
+mode: parallel
+steps:
+[...]
+```
+
+In full parallel mode, the order of steps inside the `codefresh.yml` **does not** affect the order of execution at all. The Codefresh pipeline engine instead:
+
+1. Evaluates all step-conditions *at the same* time
+2. Executes those that have their requirements met
+3. Starts over with the remaining steps
+4. Stops when there are no more steps to evaluate
+
+This means that in parallel mode the conditions of a step are evaluated **multiple times** as the Codefresh execution engine tries to find which steps it should run next. This implication is very important when you try to understand the order of step execution.
+
+Notice also that in parallel mode, if you don't define any step conditions, Codefresh will try to run **all** steps at once, which is probably not what you want in most cases.
+
+With parallel mode you are expected to define the order of steps in the yaml file, and the Codefresh engine will create a *graph* of execution that satisfies your instructions. This means that writing the `codefresh.yml` file requires more effort on your part, but on the other hand allows you to define the step order in ways not possible with the sequential mode. You also need to define which steps should depend on the automatic cloning of the pipeline (which is special step named `main_clone`).
+
+In the next sections we describe how you can define the steps dependencies in a parallel pipeline.
+
+### Single step dependencies
+
+At the most basic level, you can define that a step *depends on* the execution of another step. This dependency is very flexible as Codefresh allows you run a second step once:
+
+1. The first step is finished with success
+1. The first step is finished with failure
+1. The first completes (regardless of exit) status
+
+The syntax for this is the following post-condition:
+
+{% highlight yaml %}
+second_step:
+ title: Second step
+ when:
+ steps:
+ - name: first_step
+ on:
+ - success
+{% endhighlight %}
+
+If you want to run the second step only if the first one fails the syntax is:
+
+{% highlight yaml %}
+second_step:
+ title: Second step
+ when:
+ steps:
+ - name: first_step
+ on:
+ - failure
+{% endhighlight %}
+
+Finally, if you don't care about the completion status the syntax is:
+
+{% highlight yaml %}
+second_step:
+ title: Second step
+ when:
+ steps:
+ - name: first_step
+ on:
+ - finished
+{% endhighlight %}
+
+Notice that `success` is the default behavior so if you omit the last two lines (i.e., the `on:` part) the second step
+will wait for the next step to run successfully.
+
+>Also notice that the name `main_clone` is reserved for the automatic clone that takes place in the beginning of pipelines that are linked to a git repository. You need to define which steps depend on it (probably the start of your graph) so that `git checkout` happens before the other steps.
+
+As an example, let's assume that you have the following steps in a pipeline:
+
+1. A build step that creates a Docker image
+1. A freestyle step that runs [unit tests]({{site.baseurl}}/docs/testing/unit-tests/) inside the Docker image
+1. A freestyle step that runs [integrations tests]({{site.baseurl}}/docs/testing/integration-tests/) *After* the unit tests, even if they fail
+1. A cleanup step that runs after unit tests if they fail
+
+Here is the full pipeline. Notice the explicit dependency to the `main_clone` step that checks out the code.
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+mode: parallel
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: my-node-js-app
+ working_directory: ./
+ tag: '${{CF_BRANCH_TAG_NORMALIZED}}'
+ dockerfile: Dockerfile
+ when:
+ steps:
+ - name: main_clone
+ on:
+ - success
+ MyUnitTests:
+ title: Running unit tests
+ image: ${{MyAppDockerImage}}
+ fail_fast: false
+ commands:
+ - npm run test
+ when:
+ steps:
+ - name: MyAppDockerImage
+ on:
+ - success
+ MyIntegrationTests:
+ title: Running integration tests
+ image: ${{MyAppDockerImage}}
+ commands:
+ - npm run integration-test
+ when:
+ steps:
+ - name: MyUnitTests
+ on:
+ - finished
+ MyCleanupPhase:
+ title: Cleanup unit test results
+ image: alpine
+ commands:
+ - ./cleanup.sh
+ when:
+ steps:
+ - name: MyUnitTests
+ on:
+ - failure
+{% endraw %}
+{% endhighlight %}
+
+If you run the pipeline you will see that Codefresh automatically understands that `MyIntegrationTests` and `MyCleanupPhase` can run in parallel right after the unit tests finish.
+
+Also notice the `fail_fast: false` line in the unit tests. By default, if *any* steps fails in a pipeline the whole pipeline is marked as a failure. With the `fail_fast` directive we can allow the pipeline to continue so that other steps that depend on the failed step can still run even.
+
+
+### Multipl step dependencies
+
+A pipeline step can also depend on multiple other steps.
+
+The syntax is:
+
+{% highlight yaml %}
+third_step:
+ title: Third step
+ when:
+ steps:
+ all:
+ - name: first_step
+ on:
+ - success
+ - name: second_step
+ on:
+ - finished
+{% endhighlight %}
+
+In this case, the third step will run only when BOTH first and second are finished (and first is actually a success)
+
+*ALL* is the default behavior so it can be omitted if this is what you need. The example above
+is example the same as below:
+
+{% highlight yaml %}
+third_step:
+ title: Third step
+ when:
+ steps:
+ - name: first_step
+ on:
+ - success
+ - name: second_step
+ on:
+ - finished
+{% endhighlight %}
+
+Codefresh also allows you to define *ANY* behavior in an explicit manner:
+
+{% highlight yaml %}
+third_step:
+ title: Third step
+ when:
+ steps:
+ any:
+ - name: first_step
+ on:
+ - success
+ - name: second_step
+ on:
+ - finished
+{% endhighlight %}
+
+Here the third step will run when either the first one *OR* the second one have finished.
+
+As an example let's assume this time that we have:
+
+1. A build step that creates a docker image
+1. Unit tests that will run when the docker image is ready
+1. Integration tests that run either after unit tests or if the docker image is ready (contrived example)
+1. A cleanup step that runs when both kinds of tests are finished
+
+Here is the full pipeline
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+mode: parallel
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: my-node-js-app
+ working_directory: ./
+ tag: '${{CF_BRANCH_TAG_NORMALIZED}}'
+ dockerfile: Dockerfile
+ MyUnitTests:
+ title: Running unit tests
+ image: ${{MyAppDockerImage}}
+ fail_fast: false
+ commands:
+ - npm run test
+ when:
+ steps:
+ - name: MyAppDockerImage
+ on:
+ - success
+ MyIntegrationTests:
+ title: Running integration tests
+ image: ${{MyAppDockerImage}}
+ commands:
+ - npm run integration-test
+ when:
+ steps:
+ any:
+ - name: MyUnitTests
+ on:
+ - finished
+ - name: MyAppDockerImage
+ on:
+ - success
+ MyCleanupPhase:
+ title: Cleanup unit test results
+ image: alpine
+ commands:
+ - ./cleanup.sh
+ when:
+ steps:
+ all:
+ - name: MyUnitTests
+ on:
+ - finished
+ - name: MyIntegrationTests
+ on:
+ - finished
+{% endraw %}
+{% endhighlight %}
+
+In this case Codefresh will make sure that cleanup happens only when both unit and integration tests are finished.
+
+
+### Custom step dependencies
+
+For maximum flexibility you can define a custom condition for a step.
+
+It is hard to describe all possible cases, because Codefresh supports a [mini DSL]({{site.baseurl}}/docs/pipelines/conditional-execution-of-steps/#condition-expression-syntax) for conditions. All examples mentioned in conditional execution are still valid in parallel pipelines.
+
+For example, run this step only if a PR is opened against the production branch:
+
+{% highlight yaml %}
+{% raw %}
+my_step:
+ title: My step
+ when:
+ condition:
+ all:
+ validateTargetBranch: '"${{CF_PULL_REQUEST_TARGET}}" == "production"'
+ validatePRAction: '''${{CF_PULL_REQUEST_ACTION}}'' == ''opened'''
+{% endraw %}
+{% endhighlight %}
+
+Run this step only for the master branch and when the commit message does not include "skip ci":
+
+{% highlight yaml %}
+{% raw %}
+my_step:
+ title: My step
+ when:
+ condition:
+ all:
+ noSkipCiInCommitMessage: 'includes(lower("${{CF_COMMIT_MESSAGE}}"), "skip ci") == false'
+ masterBranch: '"${{CF_BRANCH}}" == "master"'
+{% endraw %}
+{% endhighlight %}
+
+You can now add extra conditions regarding the completion state of specific steps. A global object called `steps` contains all steps by name along with a `result` property with the following possible completion states:
+
+* Success
+* Failure
+* Skipped (only valid in sequential mode)
+* Finished (regardless of status)
+* Pending
+* Running
+
+Finished is a shorthand for `success` or `failure` or `skipped`. It is only valid when used in [step dependencies]({{site.baseurl}}/docs/codefresh-yaml/advanced-workflows/#single-step-dependencies), and cannot be used in custom conditions.
+
+You can mix and match completion states from any other step in your pipeline. Here are some examples:
+
+{% highlight yaml %}
+my_step:
+ title: My step
+ when:
+ condition:
+ all:
+ myCondition: steps.MyUnitTests.result == 'failure' || steps.MyIntegrationTests.result == 'failure'
+{% endhighlight %}
+
+{% highlight yaml %}
+my_step:
+ title: My step
+ when:
+ condition:
+ any:
+ myCondition: steps.MyLoadTesting.result == 'success'
+ myOtherCondition: steps.MyCleanupStep.result == 'success'
+{% endhighlight %}
+
+You can also use conditions in the success criteria for a parallel step. Here is an example
+
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+- start
+- tests
+- cleanup
+steps:
+ MyAppDockerImage:
+ stage: 'start'
+ title: Building Docker Image
+ type: build
+ image_name: my-full-stack-app
+ working_directory: ./01_sequential/
+ tag: '${{CF_BRANCH_TAG_NORMALIZED}}'
+ dockerfile: Dockerfile
+ MyTestingPhases:
+ type: parallel
+ stage: 'tests'
+ success_criteria:
+ condition:
+ all:
+ myCondition: ${{steps.my_back_end_tests.result}} === 'success' && ${{steps.my_front_end_tests.result}} === 'success'
+ steps:
+ my_back_end_tests:
+ title: Running Back end tests
+ image: ${{MyAppDockerImage}}
+ commands:
+ - exit 1
+ my_front_end_tests:
+ title: Running Front End tests
+ image: ${{MyAppDockerImage}}
+ commands:
+ - echo "Second"
+ MyCleanupPhase:
+ stage: 'cleanup'
+ title: Cleanup unit test results
+ image: alpine
+ commands:
+ - echo "Finished"
+{% endraw %}
+{% endhighlight %}
+
+
+## Handling error conditions in a pipeline
+
+It is important to understand the capabilities offered by Codefresh when it comes to error handling. You have several options in different levels of granularity to select what constitutes a failure and what not.
+
+By default, *any* failed step in a pipeline will abort the whole pipeline and mark it as failure.
+
+You can use the directive `fail_fast: false`:
+* In a specific step to mark it as ignored if it fails
+* At the root level of the pipeline if you want to apply it to all steps
+
+Therefore, if you want your pipeline to keep running to completion regardless of errors the following syntax is possible:
+
+```
+version: '1.0'
+fail_fast: false
+steps:
+[...]
+```
+
+You also have the capability to define special steps that will run when the whole pipeline has a special completion status. Codefresh offers a special object called `workflow` that represents the whole pipeline and allows you to evaluate its status in a step.
+
+For example, you can have a cleanup step that will run only if the workflow fails (regardless of the actual step that created the error) with the following syntax:
+
+{% highlight yaml %}
+my_cleanup_step:
+ title: My Pipeline Cleanup
+ when:
+ condition:
+ all:
+ myCondition: workflow.result == 'failure'
+{% endhighlight %}
+
+As another example we have a special step that will send an email if the pipeline succeeds or if load-tests fail:
+
+{% highlight yaml %}
+my_email_step:
+ title: My Email step
+ when:
+ condition:
+ any:
+ myCondition: workflow.result == 'success'
+ myTestCondition: steps.MyLoadTesting.result == 'failure'
+{% endhighlight %}
+
+Notice that both examples assume that `fail_fast: false` is at the root of the `codefresh.yaml` file.
+
+The possible values for `workflow.result` are:
+
+* `running`
+* `terminated`
+* `failure`
+* `pending-approval`
+* `success`
+
+
+## Related articles
+[Variables in pipelines]({{site.baseurl}}/docs/pipelines/variables/)
+[Hooks in pipelines]({{site.baseurl}}/docs/pipelines/hooks/)
+
+
+
+
+
+
+
+
diff --git a/_docs/pipelines/annotations.md b/_docs/pipelines/annotations.md
new file mode 100644
index 00000000..d82fc37d
--- /dev/null
+++ b/_docs/pipelines/annotations.md
@@ -0,0 +1,302 @@
+---
+title: "Annotations in pipelines"
+description: "Mark your builds and projects with extra annotations"
+group: codefresh-yaml
+toc: true
+---
+
+Codefresh supports the annotations of several entities with custom annotations. You can use these annotations to store any optional information that you wish to keep associated with each entity. Examples would be storing the test coverage for a particular build, or a special settings file for a pipeline.
+
+Currently Codefresh supports extra annotations for:
+
+* Projects
+* Pipelines
+* Builds
+* Docker images
+
+You can view/edit annotations using the [Codefresh CLI](https://codefresh-io.github.io/cli/annotations/) or directly in the Codefresh Web UI.
+
+>Notice that the syntax shown in this page is deprecated but still supported. For the new syntax
+see [hooks]({{site.baseurl}}/docs/pipelines/hooks/).
+
+
+## Adding annotations
+
+In the most basic scenario you can use the [post operations]({{site.baseurl}}/docs/pipelines/post-step-operations/) of any Codefresh [step]({{site.baseurl}}/docs/pipelines/steps/) to add annotations:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ my_custom_step:
+ title: Adding annotations to a project
+ image: alpine:3.9
+ commands:
+ - echo "Hello"
+ on_success:
+ annotations:
+ set:
+ - entity_id: annotate-examples
+ entity_type: project
+ annotations:
+ - my_annotation_example1: 10.45
+ - my_empty_annotation
+ - my_string_annotation: Hello World
+{% endraw %}
+{% endhighlight %}
+
+
+This pipeline adds three annotations to a project called `annotate-examples`. The name of each annotation can only contain letters (upper and lowercase), numbers and the underscore character. The name of each annotation must start with a letter.
+
+
+For the `entity_id` value you can also use an actual ID instead of a name. The `entity_id` and `entity_type` are define which entity will hold the annotations. The possible entity types are:
+
+* `project` (for a project, even a different one)
+* `pipeline` (for a pipeline, even a different one)
+* `build` (for a build, even a different one)
+* `image` (for a docker image)
+
+If you don't define them, then by default the current build will be used with these values:
+* `entity_id` is `{% raw %}${{CF_BUILD_ID}}{% endraw %}` (i.e. the current build)
+* `entity_type` is `build`
+
+Here is another example where we add annotations to another pipeline as well as another build (instead of the current one)
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ my_custom_step:
+ title: Adding annotations to multiple entities
+ image: alpine:3.9
+ commands:
+ - echo "Hello"
+ on_success:
+ annotations:
+ set:
+ - entity_id: my-project/my-basic-pipeline
+ entity_type: pipeline
+ annotations:
+ - my_annotation_example1: 10.45
+ - my_empty_annotation
+ - my_string_annotation: Hello World
+ - entity_id: 5ce2a0e869e2ed0a60c1e203
+ entity_type: build
+ annotations:
+ - my_coverage: 70
+ - my_url_example: http://www.example.com
+{% endraw %}
+{% endhighlight %}
+
+It is therefore possible to store annotations on any Codefresh entity (and not just the ones that are connected to the build that is adding annotations).
+
+## Viewing/editing annotations
+
+You can view the annotations using the Codefresh CLI
+
+```shell
+codefresh get annotation project annotate-examples
+```
+
+You can also view annotations within the Codefresh UI.
+
+For build annotations click the *Annotations* on the build details:
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/annotations/view-build-annotations.png"
+url="/images/codefresh-yaml/annotations/view-build-annotations.png"
+alt="Viewing Build annotations"
+caption="Viewing Build annotations"
+max-width="80%"
+%}
+
+For pipeline annotations click the *Annotations* button in the pipeline list view:
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/annotations/view-pipeline-annotations.png"
+url="/images/codefresh-yaml/annotations/view-pipeline-annotations.png"
+alt="Viewing Pipeline annotations"
+caption="Viewing Pipeline annotations"
+max-width="80%"
+%}
+
+For project annotations click the *Annotations* button in the project list view:
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/annotations/view-project-annotations.png"
+file="/images/codefresh-yaml/annotations/view-project-annotations.png"
+url="/images/codefresh-yaml/annotations/view-build-annotations.png"
+alt="Viewing project annotations"
+caption="Viewing project annotations"
+max-width="80%"
+%}
+
+In all cases you will see a dialog with all existing annotations.
+
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/annotations/edit-project-annotations.png"
+url="/images/codefresh-yaml/annotations/edit-project-annotations.png"
+alt="Editing annotations"
+caption="Editing annotations"
+max-width="50%"
+%}
+
+You can add additional annotations manually by clicking the *Add annotation* button and entering:
+
+* The name of the annotation
+* The type of the annotation (text, number, percentage, link, boolean)
+* The desired value
+
+Click *Save* to apply your changes.
+
+## Complex annotation values
+
+Apart from scalar values, you can also store more complex expressions in annotations. You have access to all [Codefresh variables]({{site.baseurl}}/docs/pipelines/variables/), text files from the build and even evaluations from the [expression syntax]({{site.baseurl}}/docs/pipelines/condition-expression-syntax/).
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ main_clone:
+ title: Cloning main repository...
+ type: git-clone
+ repo: 'kostis-codefresh/nestjs-example'
+ revision: '${{CF_REVISION}}'
+ my_custom_step:
+ title: Complex annotations
+ image: alpine:3.9
+ commands:
+ - echo "Hello"
+ - echo "Sample content" > /tmp/my-file.txt
+ on_finish:
+ annotations:
+ set:
+ - entity_id: annotate-examples/simple
+ entity_type: pipeline
+ annotations:
+ - qa: pending
+ - commit_message: ${{CF_COMMIT_MESSAGE}}
+ - is_main_branch:
+ evaluate: "'${{CF_BRANCH}}' == 'main'"
+ - my_json_file: "file:/tmp/my-file.txt"
+ - my_docker_file: "file:Dockerfile"
+{% endraw %}
+{% endhighlight %}
+
+>Notice that this pipeline is using dynamic git repository variables, so it must be linked to a least one [git trigger]({{site.baseurl}}/docs/pipelines/triggers/git-triggers/) in order to work.
+
+The last two annotations add the text of a file as a value. You can define an absolute or relative path. No processing is done on the file before being stored. If a file is not found, the annotation will still be added verbatim.
+We suggest you only store small text files in this manner as annotations values.
+
+## Removing annotations
+
+You can also remove annotations by mentioning their name:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ my_custom_step:
+ title: Adding annotations to a pipeline
+ image: alpine:3.9
+ commands:
+ - echo "Hello"
+ on_success:
+ annotations:
+ set:
+ - entity_id: my-project/my-basic-pipeline
+ entity_type: pipeline
+ annotations:
+ - my_annotation_example1: 10.45
+ - my_empty_annotation
+ - my_string_annotation: Hello World
+ - my_second_annotation: This one will stay
+ my_unit_tests:
+ title: Removing annotations
+ image: alpine:3.9
+ commands:
+ - echo "Tests failed"
+ - exit 1
+ on_fail:
+ annotations:
+ unset:
+ - entity_id: my-project/my-basic-pipeline
+ entity_type: pipeline
+ annotations:
+ - my_annotation_example1
+ - my_empty_annotation
+ - my_string_annotation
+{% endraw %}
+{% endhighlight %}
+
+You can also use both `unset` and `set` block in a single `annotations` block. And of course, you can remove annotations from multiple entities.
+
+The `unset` annotation can be used with all post-step operations (`on_success`, `on_fail`, `on_finish`).
+
+
+## Adding annotations to the current build/image
+
+As a convenience feature:
+
+1. If your pipeline has a build step
+1. If you want to add annotations to the present build or image
+
+you can also define annotations in the root level of the build step and not mention the entity id and type. Annotations will then be added in the present build.
+
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ main_clone:
+ title: Cloning main repository...
+ type: git-clone
+ repo: 'kostis-codefresh/nestjs-example'
+ revision: 'master'
+ MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: my-app-image
+ working_directory: ./
+ tag: 'sample'
+ dockerfile: Dockerfile
+ annotations:
+ set:
+ - annotations:
+ - my_number_annotation: 9999
+ - my_empty_annotation
+ - my_docker_file: "file:Dockerfile"
+ - my_text_annotation: simple_text
+{% endraw %}
+{% endhighlight %}
+
+After running this pipeline at least once, you can retrieve the annotations from any previous build by using the respective id:
+
+```shell
+codefresh get annotation build 5ce26f5ff2ed0edd561fa2fc
+```
+
+You can also define `entity_type` as `image` and don't enter any `entity_id`. In this case the image created from the build step will be annotated.
+
+
+Note that this syntax is optional. You can still define annotations for a build/image or any other entity using the post operations of any step by mentioning explicitly the target id and type.
+
+## Related articles
+[Image annotations]({{site.baseurl}}/docs/docker-registries/metadata-annotations/)
+[Post-Step Operations]({{site.baseurl}}/docs/pipelines/post-step-operations/)
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
+[Hooks in pipelines]({{site.baseurl}}/docs/pipelines/hooks/)
diff --git a/_docs/pipelines/build-status.md b/_docs/pipelines/build-status.md
new file mode 100644
index 00000000..02d9e897
--- /dev/null
+++ b/_docs/pipelines/build-status.md
@@ -0,0 +1,150 @@
+---
+title: "Public logs and status badges"
+description: "Embedding Status Images and viewing public logs"
+group: pipelines
+toc: true
+redirect_from:
+ - /docs/build-status
+ - /docs/build-status/
+ - /docs/build-badges-1
+ - /docs/build-badges-1/
+---
+
+
+Badges are simple images that show you the last build status. They support both the pipeline and branch service status.
+The badges can be embedded into your repository’s `readme.md` file or any other website.
+
+Here is an example:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/badges/badge.png"
+url="/images/pipeline/badges/badge.png"
+alt="Build badge example"
+caption="Build badge example"
+max-width="80%"
+%}
+
+Clicking the badge takes you into the build view of the pipeline.
+
+## Finding the build badge of your project
+
+In the pipeline view of a project, select the *Settings* tab and then click *General*. Next to the *badges* section you will find a link to the build badge.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/badges/get-build-badge.png"
+url="/images/pipeline/badges/get-build-badge.png"
+alt="Build badge setup"
+caption="Build badge setup"
+max-width="80%"
+%}
+
+Click on it and you will get a new dialog where you can select
+
+ * The graphical style of the badge (two styles are offered)
+ * The syntax for the badge
+
+{% include
+ image.html
+ lightbox="true"
+ file="/images/a0c4aed-codefresh_badges_2.png"
+ url="/images/a0c4aed-codefresh_badges_2.png"
+ alt="Codefresh badges syntax"
+ caption="Codefresh badges syntax"
+ max-width="70%"
+ %}
+
+ The following embedding options are available:
+
+ * Markdown for usage in text files (e.g. `README.MD`)
+ * Plain HTML for normal websites
+ * AsciiDoc for documentation pages
+ * Image for any other document type
+
+
+Copy the snippet in your clipboard.
+
+## Using the build badge
+
+Paste the snippet in the file/document where you want the badge to be visible (e.g. in a Readme file in GitHub).
+
+For example, the markdown syntax is
+
+```
+[]( URL_TO_PIPELINE )
+```
+
+You can also manually change the parameters of the link by using
+`https://g.codefresh.io/api/badges/build?*param1*=xxx&*param2*=yyy`\\
+when *param1*, *param2*, etc... are the parameters from the table below.
+
+{: .table .table-bordered .table-hover}
+| Query parameter | Description |
+| -----------------------|--------------------------------------------------------- |
+| **branch** - optional | Name of the branch If not supplied, default is master |
+| **repoName** | Name of the repository |
+| **pipelineName** | Name of the pipeline |
+| **accountName** | Name of the account |
+| **repoOwner** | The name of the repository owner |
+| **key** - optional | Token related to the account |
+| **type** - optional | Badge types cf-1:  - also the default badge. cf-2:  |
+
+Everybody who looks at your readme file will also see the current build status of the associated Codefresh pipeline.
+
+## Public build logs
+
+By default, even though the badge shows the build status for everybody, clicking the badge allows only Codefresh registered users that also have access to the pipeline to view the actual builds.
+
+If you are working on an open-source project and wish for greater visibility, you can enable public logs (and associated badge) for your project so that any user can see the pipeline results (even if they are not logged into Codefresh).
+
+Public logs are disabled by default and you need to explicitly enable them.
+
+>This happens for security reasons. Make sure that the logs you are exposing to the Internet do not have any sensitive information. If you are unsure, you can still use the private badge that shows project status only as explained in the previous section.
+
+To enable the public logs, toggle the respective switch in the pipeline settings:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/badges/toggle-public-logs.png"
+url="/images/pipeline/badges/toggle-public-logs.png"
+alt="Enabling public logs"
+caption="Enabling public logs"
+max-width="80%"
+%}
+
+Then click the *Save* button to apply changes for your pipeline. Once that is done you will also get a second badge (public) as well as the public URL to your project.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/badges/get-public-url.png"
+url="/images/pipeline/badges/get-public-url.png"
+alt="Getting the public URL log view"
+caption="Getting the public URL log view"
+max-width="70%"
+%}
+
+Now you can use this badge and/or public URL anywhere and all users can view your logs without being logged into Codefresh at all (or having access to your pipeline).
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/badges/view-public-logs.png"
+url="/images/pipeline/badges/view-public-logs.png"
+alt="Public logs"
+caption="Public logs"
+max-width="90%"
+%}
+
+Your visitors can also click on each individual pipeline step and see the logs for that step only.
+
+If you are using Codefresh to manage a public project, you should also use the capability to [trigger builds from external forks]({{site.baseurl}}/docs/pipelines/triggers/git-triggers/#support-for-building-pull-requests-from-forks).
+
+## Related articles
+[Introduction to Codefresh pipelines]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines)
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
+[Monitoring pipelines]({{site.baseurl}}/docs/pipelines/monitoring-pipelines/)
diff --git a/_docs/pipelines/condition-expression-syntax.md b/_docs/pipelines/condition-expression-syntax.md
new file mode 100644
index 00000000..8e1ea6c6
--- /dev/null
+++ b/_docs/pipelines/condition-expression-syntax.md
@@ -0,0 +1,107 @@
+---
+title: "Condition Expression Syntax"
+description: "Condition expressions can be included in each step in your codefresh.yml, and must be satisfied for the step to execute."
+group: codefresh-yaml
+redirect_from:
+ - /docs/condition-expression-syntax/
+ - /docs/codefresh-yaml/expression-condition-syntax/
+toc: true
+---
+Each step in `codefresh.yml` file can contain conditions expressions that must be satisfied for the step to execute.
+
+This is a small example of where a condition expression can be used:
+ `YAML`
+{% highlight yaml %}
+step-name:
+ description: Step description
+ image: image/id
+ commands:
+ - bash-command1
+ - bash-command2
+ when:
+ condition:
+ all:
+ executeForMasterBranch: "{% raw %}'${{CF_BRANCH}}{% endraw %}' == 'master'"
+{% endhighlight %}
+
+A condition expression is a basic expression that is evaluated to true/false (to decide whether to execute or not to execute), and can have the following syntax:
+
+### Types
+
+{: .table .table-bordered .table-hover}
+| Type | True/False Examples | True/False |
+| ------- | ----------------------------------------- | --------------|
+| String | True: "hello" False: "" | {::nomarkdown}
String with content = true
Empty string = false
String with content = true
String comparison is lexicographic.{:/} |
+| Number | True: 5 True: 3.4 True: 1.79E+308 | {::nomarkdown}
{:/} |
+| Null | False: null | Always false |
+
+### Variables
+
+You can use the User Provided variables as explained in [Variables]({{site.baseurl}}/docs/pipelines/variables/), including the [variables
+exposed by each individual pipeline step]({{site.baseurl}}/docs/pipelines/variables/#step-variables).
+
+### Unary Operators
+
+{: .table .table-bordered .table-hover}
+| Operator | Operation |
+| ---------- | --------------------- |
+| `-` | Negation of numbers |
+| `!` | Logical NOT |
+
+### Binary Operators
+
+{: .table .table-bordered .table-hover}
+| Operator | Operation |
+| --------------------------- | ----------- |
+| Add, String Concatenation | `+` |
+| Subtract | `-` |
+| Multiply | `*` |
+| Divide | `/` |
+| Modulus | `%` |
+| Logical AND | `&&` |
+| Logical OR | `||` |
+
+### Comparisons
+
+{: .table .table-bordered .table-hover}
+| Operator | Operation |
+| ----------- | ---------------------- |
+| `==` | Equal to |
+| `!=` | Not equal to |
+| `>` | Greater than |
+| `>=` | Greater than or equal |
+| `<` | Less than |
+| `<=` | Less than or equal |
+
+### Functions
+
+{: .table .table-bordered .table-hover}
+| Function Name | Parameters | Return value | Example |
+| ------------- | ------------------ | -------------- | ----------------------- |
+| String | 0: number or string | String of input value. | `String(40) == '40'` |
+| Number | 0: number or string | Number of input value. | `Number('50') == 50` `Number('hello')` is invalid |
+| Boolean | 0: number or string | Boolean of input value. | `Boolean('123') == true` `Boolean('') == false` `Boolean(583) == true` `Boolean(0) == false` |
+| round | 0: number | Rounded number. | `round(1.3) == 1` `round(1.95) == 2` |
+| floor | 0: number | Number rounded to floor. | `floor(1.3) == 1` `floor(1.95) == 1` |
+| upper | 0: string | String in upper case. | `upper('hello') == 'HELLO'` |
+| lower | 0: string | String in lower case. | `lower('BYE BYE') == 'bye bye'` |
+| trim | 0: string | Trimmed string. | `trim(" abc ") == "abc"` |
+| trimLeft | 0: string | Left-trimmed string. | `trimLeft(" abc ") == "abc "`|
+| trimRight | 0: string | Right-trimmed string. | `trimRight(" abc ") == " abc"` |
+| replace | 0: string - main string 1: string - substring to find 2: string - substring to replace | Replace all instances of the sub-string (1) in the main string (0) with the sub-string (2). | `replace('hello there', 'e', 'a') == 'hallo thara'`|
+| substring | 0: string - main string 1: string - index to start 2: string - index to end | Returns a sub-string of a string. | `substring("hello world", 6, 11) == "world"` |
+| length | string | Length of a string. | `length("gump") == 4` |
+| includes | 0: string - main string 1: string - string to search for | Whether a search string is located within the main string. | `includes("codefresh", "odef") == true` |
+| indexOf | 0: string - main string 1: string - string to search for | Index of a search string if it is found inside the main string | `indexOf("codefresh", "odef") == 1` |
+| match | 0: string - main string 1: string - regular expression string, [JS style](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions) (Note: in JS strings, the backslash `\` is an escape character so in order to use a literal backslash, you need to escape it. For example: `"^\\d+$"` instead of `"^\d+$"`) 2: boolean - ignore case | Search for a regular expression inside a string, ignoring or not ignoring case | `match("hello there you", "..ll.", false) == true` `match("hello there you", "..LL.", false) == false` `match("hello there you", "hell$", true) == false` `match("hello there you", "^hell", true) == true` `match("hello there you", "bye", false) == false` |
+| Variable | string | Search for the value of a variable | `Variable('some-clone')` |
+| Member | 0: string - variable name 1: string - member name | Search for the value of a variable member | `Member('some-clone', 'working-directory')` |
+
+## What to read next
+
+* [Conditional Execution of Steps]({{site.baseurl}}/docs/codefresh-yaml/conditional-execution-of-steps/)
+* [Condition Expression Syntax]({{site.baseurl}}/docs/codefresh-yaml/condition-expression-syntax/)
+* [Working Directories]({{site.baseurl}}/docs/codefresh-yaml/working-directories/)
+* [Annotations]({{site.baseurl}}/docs/codefresh-yaml/annotations/)
+* [Pipeline/Step hooks]({{site.baseurl}}/docs/codefresh-yaml/hooks/)
diff --git a/_docs/pipelines/conditional-execution-of-steps.md b/_docs/pipelines/conditional-execution-of-steps.md
new file mode 100644
index 00000000..1019eaad
--- /dev/null
+++ b/_docs/pipelines/conditional-execution-of-steps.md
@@ -0,0 +1,247 @@
+---
+title: "Conditional execution of steps"
+description: "Skip specific pipeline steps according to one or more conditions"
+group: pipelines
+redirect_from:
+ - /docs/conditional-execution-of-steps/
+toc: true
+---
+For each step in a `codefresh.yml` file, you can define a set of conditions which need to be satisfied in order to execute the step. (An introduction to the `codefresh.yml` file can be found [here]({{site.baseurl}}/docs/codefresh-yaml/what-is-the-codefresh-yaml/).)
+
+There are currently two main methods to define conditions:
+* Branch conditions
+* Expression conditions
+
+## Branch Conditions
+
+Usually, you'll want to define a branch condition, be it of the type ```ignore``` for blacklisting a set of branches or of the type ```only``` for allowlisting a set of branches. Each branch specification can either be an exact branch name, e.g. ```master```, or a regular expression, e.g. ```/hotfix$/```. Case insensitive regexps (```/^FB-/i```) are also supported.
+
+Here are some examples:
+
+Only execute for the ```master``` branch:
+
+ `only-master-branch.yml`
+{% highlight yaml %}
+build-step:
+ description: Building the image.
+ type: build
+ dockerfile: Dockerfile
+ image-name: someRepo/someUser
+ when:
+ branch:
+ only:
+ - master
+{% endhighlight %}
+
+Only execute for branches whose name begins with ```FB-``` prefix (feature branches):
+
+ `only-feature-branches.yml`
+{% highlight yaml %}
+build-step:
+ description: Building the image.
+ type: build
+ dockerfile: Dockerfile
+ image-name: someRepo/someUser
+ when:
+ branch:
+ only:
+ - /^FB-.*/i
+{% endhighlight %}
+
+Ignore the develop branch and master branch:
+
+ `ignore-master-and-develop-branch.yml`
+{% highlight yaml %}
+build-step:
+ description: Building the image.
+ type: build
+ dockerfile: Dockerfile
+ image-name: someRepo/someUser
+ when:
+ branch:
+ ignore:
+ - master
+ - develop
+{% endhighlight %}
+
+
+>We use [JavaScript regular expressions](https://developer.mozilla.org/en/docs/Web/JavaScript/Guide/Regular_Expressions) for the syntax in branch conditions.
+
+
+## Condition expressions
+
+Alternatively, you can use more advanced condition expressions.
+
+This follows the standard [condition expression syntax](#condition-expression-syntax). In this case, you can choose to execute if ```all``` expression conditions evaluate to ```true```, or to execute if ```any``` expression conditions evaluate to ```true```.
+
+> Note: Use "" around variables with text to avoid errors in processing the conditions. Example: "${{CF_COMMIT_MESSAGE}}"
+
+Here are some examples. Execute if the string ```[skip ci]``` is not part of the main repository commit message AND if the branch is ```master```
+
+ `all-conditions.yml`
+{% highlight yaml %}
+build-step:
+ description: Building the image.
+ type: build
+ dockerfile: Dockerfile
+ image-name: someRepo/someUser
+ when:
+ condition:
+ all:
+ noSkipCiInCommitMessage: 'includes(lower({% raw %}"${{CF_COMMIT_MESSAGE}}"{% endraw %}), "skip ci") == false'
+ masterBranch: '{% raw %}"${{CF_BRANCH}}{% endraw %}" == "master"'
+{% endhighlight %}
+
+Execute if the string ```[skip ci]``` is not part of the main repository commit message, OR if the branch is not a feature branch (i.e. name starts with FB-)
+
+ `any-condition.yml`
+{% highlight yaml %}
+build-step:
+ description: Building the image.
+ type: build
+ dockerfile: Dockerfile
+ image-name: someRepo/someUser
+ when:
+ condition:
+ any:
+ noSkipCiInCommitMessage: 'includes(lower({% raw %}"${{CF_COMMIT_MESSAGE}}"{% endraw %}), "skip ci") == false'
+ notFeatureBranch: 'match({% raw %}"${{CF_BRANCH}}"{% endraw %}, "^FB-", true) == false'
+{% endhighlight %}
+
+Each step in `codefresh.yml` file can contain conditions expressions that must be satisfied for the step to execute.
+
+This is a small example of where a condition expression can be used:
+ `YAML`
+{% highlight yaml %}
+step-name:
+ description: Step description
+ image: image/id
+ commands:
+ - bash-command1
+ - bash-command2
+ when:
+ condition:
+ all:
+ executeForMasterBranch: "{% raw %}'${{CF_BRANCH}}{% endraw %}' == 'master'"
+{% endhighlight %}
+
+### Condition expression syntax
+A condition expression is a basic expression that is evaluated to true/false (to decide whether to execute or not to execute), and can have the following syntax:
+
+#### Types
+
+{: .table .table-bordered .table-hover}
+| Type | True/False Examples | True/False |
+| ------- | ----------------------------------------- | --------------|
+| String | True: "hello" False: "" | {::nomarkdown}
String with content = true
Empty string = false
String with content = true
String comparison is lexicographic.{:/} |
+| Number | True: 5 True: 3.4 True: 1.79E+308 | {::nomarkdown}
{:/} |
+| Null | False: null | Always false |
+
+#### Variables
+
+You can use the User Provided variables as explained in [Variables]({{site.baseurl}}/docs/pipelines/variables/), including the [variables
+exposed by each individual pipeline step]({{site.baseurl}}/docs/pipelines/variables/#step-variables).
+
+#### Unary Operators
+
+{: .table .table-bordered .table-hover}
+| Operator | Operation |
+| ---------- | --------------------- |
+| `-` | Negation of numbers |
+| `!` | Logical NOT |
+
+#### Binary Operators
+
+{: .table .table-bordered .table-hover}
+| Operator | Operation |
+| --------------------------- | ----------- |
+| Add, String Concatenation | `+` |
+| Subtract | `-` |
+| Multiply | `*` |
+| Divide | `/` |
+| Modulus | `%` |
+| Logical AND | `&&` |
+| Logical OR | `||` |
+
+#### Comparisons
+
+{: .table .table-bordered .table-hover}
+| Operator | Operation |
+| ----------- | ---------------------- |
+| `==` | Equal to |
+| `!=` | Not equal to |
+| `>` | Greater than |
+| `>=` | Greater than or equal |
+| `<` | Less than |
+| `<=` | Less than or equal |
+
+#### Functions
+
+{: .table .table-bordered .table-hover}
+| Function Name | Parameters | Return value | Example |
+| ------------- | ------------------ | -------------- | ----------------------- |
+| String | 0: number or string | String of input value. | `String(40) == '40'` |
+| Number | 0: number or string | Number of input value. | `Number('50') == 50` `Number('hello')` is invalid |
+| Boolean | 0: number or string | Boolean of input value. | `Boolean('123') == true` `Boolean('') == false` `Boolean(583) == true` `Boolean(0) == false` |
+| round | 0: number | Rounded number. | `round(1.3) == 1` `round(1.95) == 2` |
+| floor | 0: number | Number rounded to floor. | `floor(1.3) == 1` `floor(1.95) == 1` |
+| upper | 0: string | String in upper case. | `upper('hello') == 'HELLO'` |
+| lower | 0: string | String in lower case. | `lower('BYE BYE') == 'bye bye'` |
+| trim | 0: string | Trimmed string. | `trim(" abc ") == "abc"` |
+| trimLeft | 0: string | Left-trimmed string. | `trimLeft(" abc ") == "abc "`|
+| trimRight | 0: string | Right-trimmed string. | `trimRight(" abc ") == " abc"` |
+| replace | 0: string - main string 1: string - substring to find 2: string - substring to replace | Replace all instances of the sub-string (1) in the main string (0) with the sub-string (2). | `replace('hello there', 'e', 'a') == 'hallo thara'`|
+| substring | 0: string - main string 1: string - index to start 2: string - index to end | Returns a sub-string of a string. | `substring("hello world", 6, 11) == "world"` |
+| length | string | Length of a string. | `length("gump") == 4` |
+| includes | 0: string - main string 1: string - string to search for | Whether a search string is located within the main string. | `includes("codefresh", "odef") == true` |
+| indexOf | 0: string - main string 1: string - string to search for | Index of a search string if it is found inside the main string | `indexOf("codefresh", "odef") == 1` |
+| match | 0: string - main string 1: string - regular expression string, [JS style](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions) (Note: in JS strings, the backslash `\` is an escape character so in order to use a literal backslash, you need to escape it. For example: `"^\\d+$"` instead of `"^\d+$"`) 2: boolean - ignore case | Search for a regular expression inside a string, ignoring or not ignoring case | `match("hello there you", "..ll.", false) == true` `match("hello there you", "..LL.", false) == false` `match("hello there you", "hell$", true) == false` `match("hello there you", "^hell", true) == true` `match("hello there you", "bye", false) == false` |
+| Variable | string | Search for the value of a variable | `Variable('some-clone')` |
+| Member | 0: string - variable name 1: string - member name | Search for the value of a variable member | `Member('some-clone', 'working-directory')` |
+
+## Execute steps according to the presence of a variable
+
+If a variable does not exist in a Codefresh pipeline, then it will simply stay as a string inside the definition. When the `{% raw %}${{MY_VAR}}{% endraw %}` variable is not available, the engine will literally print `{% raw %}${{MY_VAR}}{% endraw %}`, because that variable doesn't exist.
+
+You can use this mechanism to decide which steps will be executed if a [variable]({{site.baseurl}}/docs/pipelines/variables/) exists or not.
+
+
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ step1:
+ title: "Running if variable exists"
+ type: "freestyle"
+ image: "alpine:3.9"
+ commands:
+ - echo "Step 1 is running"
+ when:
+ condition:
+ all:
+ whenVarExists: 'includes("${{MY_VAR}}", "{{MY_VAR}}") == false'
+ step2:
+ title: "Running if variable does not exist"
+ type: "freestyle"
+ image: "alpine:3.9"
+ commands:
+ - echo "Step 2 is running"
+ when:
+ condition:
+ all:
+ whenVarIsMissing: 'includes("${{MY_VAR}}", "{{MY_VAR}}") == true'
+{% endraw %}
+{% endhighlight %}
+
+Try running the pipeline above and see how it behaves when a variable called `MY_VAR` exists (or doesn't exist).
+
+>Notice that if you use this pattern a lot it means that you are trying to create a complex pipeline that is very smart. We suggest you create instead multiple [simple pipelines for the same project]({{site.baseurl}}/docs/ci-cd-guides/pull-request-branches/#trunk-based-development).
+
+## Related articles
+[Codefresh YAML]({{site.baseurl}}/docs/codefresh-yaml/what-is-the-codefresh-yaml/
+[Variables]({{site.baseurl}}/docs/pipelines/variables/)
+[Pull Requests and Branches]({{site.baseurl}}/docs/ci-cd-guides/pull-request-branches/)
+[Pipeline/Step hooks]({{site.baseurl}}/docs/pipelines/hooks/)
diff --git a/_docs/pipelines/debugging-pipelines.md b/_docs/pipelines/debugging-pipelines.md
new file mode 100644
index 00000000..9ece11c5
--- /dev/null
+++ b/_docs/pipelines/debugging-pipelines.md
@@ -0,0 +1,250 @@
+---
+title: "Debugging pipelines"
+description: "How to pause and inspect pipelines"
+group: configure-ci-cd-pipeline
+toc: true
+---
+
+In addition to [running pipelines locally]({{site.baseurl}}/docs/pipelines/running-pipelines-locally/), Codefresh also allows you to debug pipelines by stopping their execution and inspecting manually their state (files, environment variables, tools etc.)
+
+
+The Codefresh pipeline debugger works similar to your IDE debugger. You can place breakpoints on one or more pipeline steps and once the pipeline hits one of them, it will stop. You will then get a terminal like interface inside your pipeline step where you can run any commands that you wish in order to understand the state of the container.
+
+
+{%
+ include image.html
+ lightbox="true"
+ file="/images/pipeline/debug/debug-session.png"
+ url="/images/pipeline/debug/debug-session.png"
+ alt="A debugging session"
+ caption="A debugging session"
+ max-width="70%"
+%}
+
+There are several options for defining exactly when a step will stop.
+
+## Entering the debugger mode
+
+There are threes ways to enter the debugging mode in a pipeline. You can activate the debugging button when your run the pipeline:
+
+{%
+ include image.html
+ lightbox="true"
+ file="/images/pipeline/debug/run-pipeline-debug.png"
+ url="/images/pipeline/debug/run-pipeline-debug.png"
+ alt="Running a pipeline in debug mode"
+ caption="Running a pipeline in debug mode"
+ max-width="30%"
+%}
+
+Alternatively if a pipeline is already running normally, you can enter debugging mode by clicking on the bug icon on the top right.
+
+{%
+ include image.html
+ lightbox="true"
+ file="/images/pipeline/debug/enter-debug-mode.png"
+ url="/images/pipeline/debug/enter-debug-mode.png"
+ alt="Switching to debug mode"
+ caption="Switching to debug mode"
+ max-width="60%"
+%}
+
+You can restart a pipeline that has already finished in debug mode:
+
+{%
+ include image.html
+ lightbox="true"
+ file="/images/pipeline/debug/restart-in-debug.png"
+ url="/images/pipeline/debug/restart-in-debug.png"
+ alt="Restart in debug mode"
+ caption="Restart in debug mode"
+ max-width="70%"
+%}
+
+Now you are ready to place breakpoints in steps.
+
+
+## Placing breakpoints
+
+Once the debugging mode is active, all pipeline steps will get an extra breakpoint icon on the far right of their box.
+
+{%
+ include image.html
+ lightbox="true"
+ file="/images/pipeline/debug/breakpoint.png"
+ url="/images/pipeline/debug/breakpoint.png"
+ alt="A step breakpoint"
+ caption="A step breakpoint"
+ max-width="70%"
+%}
+
+
+You can click on this icon and define a breakpoint for this particular step. You have the following options
+
+* *Before* - place a breakpoint before the step is initialized
+* *Override* - place a breakpoint after the step has initialized but before its execution ([freestyle steps]({{site.baseurl}}/docs/pipelines/steps/freestyle/))
+* *After* - place a breaking point after the step has finished execution.
+
+You can choose multiple debugging phases. In most cases the `Override` option is the most useful one. The `before` phase allows you to inspect
+a pipeline step even before [service containers]({{site.baseurl}}/docs/pipelines/service-containers/) are up.
+
+The `after` phase is useful if you want to verify files or variables after a step has finished its execution but before the next step starts.
+
+
+## Using the debugger terminal
+
+Once the pipeline reaches a step that has a breakpoint, execution will pause and a new debugger terminal will become available:
+
+{%
+ include image.html
+ lightbox="true"
+ file="/images/pipeline/debug/debug-window.png"
+ url="/images/pipeline/debug/debug-window.png"
+ alt="The debugging terminal"
+ caption="The debugging terminal"
+ max-width="60%"
+%}
+
+You can now manually type commands to inspect your container. If your Codefresh plan has the basic debugging capabilities you can run the following commands:
+
+* `cd, ls` to see files
+* `printenv` to see environment variables
+* `cat` to read files
+* `top` to see what is running
+* `export` and [cf_export]({{site.baseurl}}/docs/pipelines/variables/#using-cf_export-command) to create environment variables
+* `exit` to finish the debugging session
+
+If you have placed a breakpoint in the `override` phase of a freestyle step then the container image is the same as the one defined in the step. Therefore you can execute all tools that you have placed in the image (e.g. compilers, linters, test frameworks etc.)
+
+In all cases the [shared Codefresh volume]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps) is automounted so you can examine your source code or any other intermediate artifacts placed in your project folder or the pipeline cache.
+
+If the breakpoint is on a `before` or `after` phase, the command line terminal is powered by an [alpine](https://alpinelinux.org/) image. The image has already useful tools such as `wget`, `nc` and `vi`. If you have the advanced debugging capabilities in your Codefresh plan you can then install additional tools on your own directly in the terminal with [apk](https://wiki.alpinelinux.org/wiki/Alpine_Linux_package_management). Examples:
+
+* `apk add curl`
+* `apk add nano`
+* `apk add go`
+* `apk add python`
+
+Use the command `apk search foo` to search for a package named foo.
+
+
+## Resuming execution
+
+Once you are happy with your debugging session, click the continue button to resume.
+
+{%
+ include image.html
+ lightbox="true"
+ file="/images/pipeline/debug/resume-button.png"
+ url="/images/pipeline/debug/resume-button.png"
+ alt="Continue execution button"
+ caption="Continue execution button"
+ max-width="60%"
+%}
+
+The pipeline will continue and then stop for the next breakpoint (if any). You can still revisit the debugger window for previous steps to see what debugging commands you had executed.
+
+>Notice that to conserve resources, there is a 15 minute limit on each open debug session. If you don't resume the pipeline within 15 minutes after hitting a breakpoint the whole pipeline will stop with a timeout error.
+
+It is important to understand that if you have chosen the `override` phase in a freestyle step, then the commands mentioned in the pipeline definition are completely ignored.
+
+## Using the alternative debug window
+
+If you enable the debugger on a freestyle step with the "override" option, Codefresh will install some extra tooling on the Docker image that is needed for the debugger itself.
+
+By default, the internal debugger tooling is using node.js, so if your image is already based on Node.js, you might get version conflicts in your application.
+
+You can enable an alternative debugger by passing the variable `DEBUGGER_RUNNER = 2` on the whole pipeline:
+
+{%
+ include image.html
+ lightbox="true"
+ file="/images/pipeline/debug/alternative-debugger.png"
+ url="/images/pipeline/debug/alternative-debugger.png"
+ alt="Enabling the Python based debugger"
+ caption="Enabling the Python based debugger"
+ max-width="60%"
+%}
+
+This debugger is based on Python instead of Node.js and it can work with both Python 2 and 3 Docker images.
+This way the debugger tools will not affect your application. You can also use the same method in a specific freestyle step like this:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ hello_world_step:
+ title: freestyle step
+ image: node:11.1
+ environment:
+ - 'DEBUGGER_RUNNER=2'
+{% endraw %}
+{% endhighlight %}
+
+
+
+
+
+## Inserting breakpoints in the pipeline definition
+
+It is also possible to mention breakpoints in the Codefresh YAML instead of using the UI. Breakpoints mentioned in the `codefresh.yml` file have no effect when the pipeline is not running in Debug mode. You need to run the pipeline in debug mode in order for them to stop the pipeline.
+
+Here is the syntax:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+ - prepare
+ - build
+ - test
+steps:
+ main_clone:
+ title: Cloning main repository...
+ type: git-clone
+ repo: 'codefresh-contrib/python-flask-sample-app'
+ revision: 'master'
+ git: github
+ stage: prepare
+ MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ stage: build
+ image_name: my-app-image
+ working_directory: ./
+ tag: 'master'
+ dockerfile: Dockerfile
+ debug:
+ phases:
+ before: true
+ after: false
+ MyUnitTests:
+ title: Running Unit tests
+ stage: test
+ image: '${{MyAppDockerImage}}'
+ debug:
+ phases:
+ before: false
+ override: true
+ after: false
+ commands:
+ - python setup.py test
+{% endraw %}
+{% endhighlight %}
+
+Once you run this pipeline in debug mode, it will automatically have breakpoints in the respective steps (but you can still override/change them using the GUI).
+
+
+## Troubleshooting
+
+The debugger windows needs some extra tools in a docker image in order to work (such as the `bash` shell). Codefresh automatically installs these tools on your image without any configuration.
+
+If you get the message *your linux distribution is not supported* please contact us so that we can examine your docker image and make sure it is compatible with the Codefresh debugger.
+
+
+## Related articles
+[Codefresh YAML]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/)
+[Pipeline steps]({{site.baseurl}}/docs/pipelines/steps/)
+[Running pipelines locally]({{site.baseurl}}/docs/pipelines/running-pipelines-locally/)
diff --git a/_docs/pipelines/docker-image-metadata.md b/_docs/pipelines/docker-image-metadata.md
new file mode 100644
index 00000000..02cad04a
--- /dev/null
+++ b/_docs/pipelines/docker-image-metadata.md
@@ -0,0 +1,217 @@
+---
+title: "Docker image metadata"
+description: "How to use custom metadata in your Docker images"
+group: pipelines
+redirect_from:
+ - /docs/metadata-annotations/
+ - /docs/docker-registries/metadata-annotations/
+toc: true
+---
+Images built by Codefresh can be annotated with customized metadata.
+This article explains how to create advanced view of your images and enrich them with custom metadata which perfectly fits your flow and image management process.
+
+{%
+ include image.html
+ lightbox="true"
+ file="/images/codefresh-yaml/docker-image-metadata/metadata.png"
+ url="/images/codefresh-yaml/docker-image-metadata/metadata.png"
+ alt="Codefresh Docker registry metadata"
+ max-width="65%"
+%}
+
+>We have since expanded this feature and now you are able to add custom annotations to [pipelines and builds as well]({{site.baseurl}}/docs/pipelines/annotations/). Notice also that the syntax shown in this page is deprecated but still supported. For the new syntax
+see [Hooks in pipelines]({{site.baseurl}}/docs/pipelines/hooks/).
+
+## Metadata types
+
+Images built by Codefresh can be annotated with an array of key-value metadata.
+Metadata values may be of the following types:
+
+{: .table .table-bordered .table-hover}
+| Annotation type | Guidelines | Example |
+| --------------- | ------------------------------------------------ | -------------------------------------------------------- |
+| String | Use string | 'Example note' |
+| Number | use numeric value to set this kind of annotation | 9999 |
+| Boolean | Use true / false value | true |
+| Percentage bar | use 0-100 value ending with % | 85% |
+| Link | use url | {% raw %}`${{CF_COMMIT_URL}}`{% endraw %} |
+
+You can also use [Expression evaluations]({{site.baseurl}}/docs/pipelines/conditional-execution-of-steps/#condition-expression-syntax) to set metadata.
+
+## Annotate your images using Codefresh YAML
+You can annotate an image as part of its build process and also on post build steps.
+
+{:.text-secondary}
+### Build step Image Metadata Annotation
+You can annotate an image as part of its build process by declaring the metadata value on the [Build step]({{site.baseurl}}/docs/pipelines/steps/build/):
+1. The `metadata` attribute
+2. The `set` operation
+3. An array of key-value metadata
+
+ `build-metadata-annotation`
+{% highlight yaml %}
+build_step:
+ type: build
+ ...
+ metadata: # Declare the metadata attribute
+ set: # Specify the set operation
+ - qa: pending
+ - commit_message: {% raw %}${{CF_COMMIT_MESSAGE}}{% endraw %}
+ - exit_code: 0
+ - is_main:
+ evaluate: "{% raw %}'${{CF_BRANCH}}{% endraw %}' == 'main'"
+{% endhighlight %}
+
+{:.text-secondary}
+### Adding annotations to Built images on post-build steps
+Any step in the YAML workflow can annotate built images by using [Post-Step Operations]({{site.baseurl}}/docs/pipelines/post-step-operations/).
+To annotate a built image, configure any step with:
+1. The post-step operation
+2. The `metadata` attribute
+3. The `set` operation
+4. A list of target images with the variable syntax of {% raw %}`${{build_step_name.imageId}}`{% endraw %}
+5. An array of key-value metadata
+
+ `annotating-step`
+{% highlight yaml %}
+build_step:
+ type: build
+ ...
+
+any_step:
+ ...
+ on_success: # Execute only once the step succeeded
+ metadata: # Declare the metadata attribute
+ set: # Specify the set operation
+ - {% raw %}${{build_step.imageId}}{% endraw %}: # Select any number of target images
+ - qa: pending
+
+ on_fail: # Execute only once the step failed
+ metadata: # Declare the metadata attribute
+ set: # Specify the set operation
+ - {% raw %}${{build_step.imageId}}{% endraw %}: # Select any number of target images
+ - exit_code: 1
+
+ on_finish: # Execute in any case
+ metadata: # Declare the metadata attribute
+ set: # Specify the set operation
+ - {% raw %}${{build_step.imageId}}{% endraw %}: # Select any number of target images
+ - is_main:
+ evaluate: "{% raw %}'${{CF_BRANCH}}'{% endraw %} == 'main'"
+{% endhighlight %}
+
+### Example - Quality Image Metadata Annotation
+You can set a quality indicator to images to show if they passed or failed tests. An image with the boolean annotation `CF_QUALITY` set to true will have a quality indicator in the 'Images' view.
+
+ `YAML`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ build_step:
+ type: build
+ image_name: myrepo/imagename
+ working_directory: ./
+ dockerfile: Dockerfile
+
+ unit_test:
+ image: {% raw %}'${{build_step}}'{% endraw %}
+ working_directory: IMAGE_WORK_DIR
+ commands:
+ - echo test
+ on_success:
+ metadata:
+ set:
+ - {% raw %}'${{build_step.imageId}}'{% endraw %}:
+ - CF_QUALITY: true
+ on_fail:
+ metadata:
+ set:
+ - {% raw %}'${{build_step.imageId}}'{% endraw %}:
+ - CF_QUALITY: false
+{% endhighlight %}
+
+Image quality has 3 indicators:
+* True - this image is considered a quality image (ex. passed tests),
+* False - this image is not considered a quality image (ex. when tests failed but the image was already built).
+* No value (nobody set the annotation) - this image has no quality indicator.
+
+{% include image.html lightbox="true" file="/images/c39a9a2-QUALI.png" url="/images/c39a9a2-QUALI.png" alt="QUALI" max-width="40%" %}
+
+
+## Viewing Image Metadata Annotations
+You can view an image's metadata annotation by:
+1. Navigating to the `Images` view
+2. Selecting the target image
+3. Selecting the `Annotations` tab
+
+{%
+ include image.html
+ lightbox="true"
+ file="/images/codefresh-yaml/docker-image-metadata/annotations.png"
+ url="/images/codefresh-yaml/docker-image-metadata/annotations.png"
+ alt="Image annotations"
+ max-width="65%"
+%}
+
+In addition, you can add selected annotations to the images table on images page. To display an annotation in the image table, click on the gear icon at the top right corner of image page and then select all annotations you want to display.
+
+{% include image.html lightbox="true" file="/images/aec92e8-Screen_Shot_2017-10-17_at_3.01.26_PM.png" url="/images/aec92e8-Screen_Shot_2017-10-17_at_3.01.26_PM.png" alt="Screen Shot 2017-10-08 at 8.28.35 AM.png" max-width="40%" %}
+
+
+## Annotating images programmatically
+
+It is also possible to annotate images with the [Codefresh CLI](https://codefresh-io.github.io/cli/).
+
+First find the id of an image that you wish to annotate with the command
+
+```
+codefresh get images
+```
+
+You can also search for a specific image by name:
+
+```
+$ codefresh get images --image-name custom
+ID NAME TAG CREATED SIZE PULL
+b5f103a87856 my-custom-docker-image bla Fri Feb 01 2019 91.01 MB r.cfcr.io/kostis-codefresh/my-custom-docker-image:bla
+```
+Then once you have the ID of the image you can use the [annotate command](https://codefresh-io.github.io/cli/images/annotate-image/) to add extra metadata:
+
+```
+codefresh annotate image b5f103a87856 -l coverage=75
+```
+
+## Using custom metadata in Codefresh pipelines
+
+You can also use the Codefresh CLI to fetch existing metadata from images. It is then very easy to extract and process specific fields with [yq](https://github.com/kislyuk/yq)
+
+Here is an example
+```
+$ codefresh get image b5f103a87856 --output=yaml | yq -r .annotations.coverage
+75
+```
+
+You can then easily process the metadata (e.g. with scripts) and take decisions according to them. Here is an example
+step that will fail the build if test coverage on an image is less than 80%
+
+ `YAML`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ findLabel:
+ title: Get image label for coverage
+ image: codefresh/cli
+ commands:
+ - export MY_COVERAGE=$(codefresh get image b5f103a87856 --output=yaml | yq -r .annotations.coverage)
+ - echo "Coverage is $MY_COVERAGE"
+ - if [[ $MY_COVERAGE -lt "80" ]]; then exit 1 ; fi
+
+{% endhighlight %}
+
+The possibilities are endless as you can take any combination of image metadata and use any complex conditional
+in order to process them in a Codefresh pipeline.
+
+
+## Related articles
+[External Docker Registries]({{site.baseurl}}/docs/docker-registries/external-docker-registries/)
+[Accessing a Docker registry from your Kubernetes cluster]({{site.baseurl}}/docs/deploy-to-kubernetes/access-docker-registry-from-kubernetes/)
diff --git a/_docs/pipelines/docker-operations.md b/_docs/pipelines/docker-operations.md
deleted file mode 100644
index 4678a46d..00000000
--- a/_docs/pipelines/docker-operations.md
+++ /dev/null
@@ -1,8 +0,0 @@
----
-title: "Using Docker"
-description: ""
-group: pipelines
-toc: true
----
-
-Coming soon
diff --git a/_docs/ci-cd-guides/first-pipeline.md b/_docs/pipelines/first-pipeline.md
similarity index 100%
rename from _docs/ci-cd-guides/first-pipeline.md
rename to _docs/pipelines/first-pipeline.md
diff --git a/_docs/pipelines/hooks.md b/_docs/pipelines/hooks.md
new file mode 100644
index 00000000..5505f688
--- /dev/null
+++ b/_docs/pipelines/hooks.md
@@ -0,0 +1,634 @@
+---
+title: "Hooks in pipelines"
+description: "Execute commands before/after each pipeline or step"
+group: pipelines
+toc: true
+---
+
+Hooks in pipelines allow you to run specific actions at the end and the beginning of the pipeline, as well as before/after a step.
+
+Hooks can be a [freestyle step]({{site.baseurl}}/docs/pipelines/steps/freestyle/), as you need to define:
+
+1. A Docker image that will be used to run specific commands.
+1. One or more commands to run within the context of that Docker image.
+
+For simple commands we suggest you use a small image such as `alpine`, but any Docker image can be used in hooks.
+
+## Pipeline hooks
+
+Codefresh allows you to run a specific step before each pipeline as well as after it has finished.
+
+### Running a step at the end of the pipeline
+
+You can easily run a step at the end of pipeline, that will execute even if one of the steps have failed (and thus the pipeline is stopped in middle):
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+hooks:
+ on_finish:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "cleanup after end of pipeline"
+
+steps:
+ step1:
+ title: "Step 1"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Hello world"
+ step2:
+ title: "Step 2"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "There was an error"
+ - exit 1
+{% endraw %}
+{% endhighlight %}
+
+In the example above we define a hook for the whole pipeline that will run a step (the `exec` keyword) inside `alpine:3.9` and will simply execute an `echo` command. Because we have used the `on_finish` keyword, this step will execute even if the whole pipeline fails.
+
+This scenario is very common if you have a cleanup step or a notification step that you always want to run at the end of the pipeline. You will see the cleanup logs in the top pipeline step.
+
+ {% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/hooks/cleanup-step.png"
+url="/images/codefresh-yaml/hooks/cleanup-step.png"
+alt="Running a cleanup step"
+caption="Running a cleanup step"
+max-width="80%"
+%}
+
+Apart from the `on_finish` keyword you can also use `on_success` and `on_fail` if you want the step to only execute according to a specific result of the pipeline. It is also possible to use multiple hooks at the same time:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+hooks:
+ on_finish:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "cleanup after end of pipeline"
+ on_success:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "Send a notification only if pipeline was successful"
+ on_fail:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "Send a notification only if pipeline has failed"
+steps:
+ step1:
+ title: "Step 1"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Hello world"
+ step2:
+ title: "Step 2"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "There was an error"
+ - exit 1 #Comment this line out to see how hooks change
+
+{% endraw %}
+{% endhighlight %}
+
+Note that if you have multiple hooks like the example above, the `on_finish` segment will always execute after any `on_success`/`on_fail` segments (if they are applicable).
+
+
+### Running a step at the start of the pipeline
+
+Similar to the end of the pipeline, you can also execute a step at the beginning of the pipeline with the `on_elected` keyword:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+hooks:
+ on_elected:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "Creating an adhoc test environment"
+ on_finish:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "Destroying test environment"
+steps:
+ step1:
+ title: "Step 1"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Running Integration tests on test environment"
+ step2:
+ title: "Step 2"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Running acceptance tests on test environment"
+
+{% endraw %}
+{% endhighlight %}
+
+All pipeline hooks will be shown in the "initializing process" logs:
+
+ {% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/hooks/before-pipeline.png"
+url="/images/codefresh-yaml/hooks/before-pipeline.png"
+alt="Hooks before a pipeline"
+caption="Hooks before a pipeline"
+max-width="80%"
+%}
+
+It is possible to define all possible hooks (`on_elected`, `on_finish`, `on_success`, `on_fail`) in a single pipeline, if this is required by your development process.
+
+## Step hooks
+
+Hooks can also be defined for individual steps inside a pipeline. This capability allows for more granular control on defining prepare/cleanup phases for specific steps.
+
+The syntax for step hooks is the same as pipeline hooks (`on_elected`, `on_finish`, `on_success`, `on_fail`), you just need to put the respective segment under a step instead of the root of the pipeline.
+
+For example, this pipeline will always run a cleanup step after integration tests (even if the tests themselves fail).
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ step1:
+ title: "Compile application"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Building application"
+ step2:
+ title: "Unit testing"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Running unit tests"
+ hooks:
+ on_finish:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "Create test report"
+ step3:
+ title: "Uploading artifact"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Upload to artifactory"
+{% endraw %}
+{% endhighlight %}
+
+
+Logs for steps hooks are shown in the log window of the step itself.
+
+ {% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/hooks/step-after.png"
+url="/images/codefresh-yaml/hooks/step-after.png"
+alt="Hooks before a pipeline"
+caption="Hooks before a pipeline"
+max-width="80%"
+%}
+
+As with pipeline hooks, it is possible to define multiple hook conditions for each step.
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ step1:
+ title: "Compile application"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Building application"
+ step2:
+ title: "Security scanning"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Running Security scan"
+ hooks:
+ on_elected:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "Authenticating to security scanning service"
+ on_finish:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "Uploading security scan report"
+ on_fail:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "Sending slack notification"
+
+{% endraw %}
+{% endhighlight %}
+
+The order of events in the example above is the following:
+
+1. The `on_elected` segment executes first (authentication)
+1. The step itself executes (the security scan)
+1. The `on_fail` segment executes (only if the step throws an error code)
+1. The `on_finish` segment always executes at the end
+
+
+## Running steps/plugins in hooks
+
+Hooks can use [steps/plugins](https://steps.codefresh.io). With plugins you have to specify:
+
+- The type field for the step/plugin.
+- The arguments needed for the step/plugin.
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+
+hooks: #run slack-notifier hook on build completion
+ on_finish:
+ steps:
+ exec:
+ type: slack-notifier
+ arguments:
+ SLACK_HOOK_URL: '${{SLACK_WEBHOOK_URL}}'
+ SLACK_TEXT: '${{SLACK_TEXT}}'
+
+steps:
+ step1:
+ title: "Freestyle step"
+ type: "freestyle"
+ image: alpine
+ commands:
+ - echo "Codefresh"
+ hooks: #run slack-notifier hook on step completion
+ on_finish:
+ steps:
+ exec:
+ type: slack-notifier
+ arguments:
+ SLACK_HOOK_URL: '${{SLACK_WEBHOOK_URL}}'
+ SLACK_TEXT: '${{SLACK_TEXT}}'
+{% endraw %}
+{% endhighlight %}
+
+## Controlling errors inside pipeline/step hooks
+
+By default if a step fails within a pipeline, the whole pipeline will stop and be marked as failed.
+This is also true for `on_elected` segments as well. If they fail, then the whole pipeline will fail (regardless of the position of the segment in a pipeline or step). However, this only applies to `on_elected` segments.
+`on_success`, `on_fail` and `on_finish` segments do not affect the pipeline outcome at all, and a pipeline will continue even if one of these segments fails.
+
+For example the following pipeline will fail right away, because the pipeline hook fails at the beginning.
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+hooks:
+ on_elected:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "failing on purpose"
+ - exit 1
+steps:
+ step1:
+ title: "Step 1"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Running Integration tests on test environment"
+{% endraw %}
+{% endhighlight %}
+
+You can change this behavior by using the existing [fail_fast property]({{site.baseurl}}/docs/codefresh-yaml/what-is-the-codefresh-yaml/#execution-flow) inside an `on_elected` hook.
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+hooks:
+ on_elected:
+ exec:
+ image: alpine:3.9
+ fail_fast: false
+ commands:
+ - echo "failing on purpose"
+ - exit 1
+steps:
+ step1:
+ title: "Step 1"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Running Integration tests on test environment"
+{% endraw %}
+{% endhighlight %}
+
+This pipeline will now execute successfully and `step1` will still run as normal, because we have used the `fail_fast` property. You can also use the `fail_fast` property on step hooks as well:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ step1:
+ title: "Step 1"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Running Integration tests on test environment"
+ hooks:
+ on_elected:
+ exec:
+ image: alpine:3.9
+ fail_fast: false
+ commands:
+ - echo "failing on purpose"
+ - exit 1
+{% endraw %}
+{% endhighlight %}
+
+
+>Notice that the `fail_fast` property is only available for `on_elected` hooks. The other types of hooks (`on_finish`, `on_success`, `on_fail`) do not affect the outcome of the pipeline in any way. Even if they fail, the pipeline will continue running to completion. This behavior is not configurable.
+
+
+## Using multiple steps for hooks
+
+In all the previous examples, each hook was a single step running on a single Docker image. You can also define multiple steps for each hook. This is possible by inserting an extra `steps` keyword inside the hook and listing multiple Docker images under it:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+hooks:
+ on_finish:
+ steps:
+ mycleanup:
+ image: alpine:3.9
+ commands:
+ - echo "echo cleanup step"
+ mynotification:
+ image: cloudposse/slack-notifier
+ commands:
+ - echo "Notify slack"
+steps:
+ step1:
+ title: "Step 1"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Running Integration tests on test environment"
+{% endraw %}
+{% endhighlight %}
+
+By default all steps in a single hook segment are executed one after the other. But you can also run them in [parallel]({{site.baseurl}}/docs/pipelines/advanced-workflows/#inserting-parallel-steps-in-a-sequential-pipeline):
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ step1:
+ title: "Compile application"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Building application"
+ step2:
+ title: "Unit testing"
+ type: "freestyle"
+ image: node:10-buster
+ commands:
+ - echo "Running Integration tests"
+ - exit 1
+ hooks:
+ on_fail:
+ mode: parallel
+ steps:
+ upload-my-artifact:
+ image: maven:3.5.2-jdk-8-alpine
+ commands:
+ - echo "uploading artifact"
+ my-report:
+ image: alpine:3.9
+ commands:
+ - echo "creating test report"
+{% endraw %}
+{% endhighlight %}
+
+You can use multiple steps in a hook in both the pipeline and the step level.
+
+
+## Using annotations and labels in hooks
+
+The hook syntax can also be used as a unified interface for encompassing the existing syntax of [build annotations]({{site.baseurl}}/docs/pipelines/annotations/) and [metadata]({{site.baseurl}}/docs/pipelines/docker-image-metadata/).
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+hooks:
+ on_elected:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "Creating an adhoc test environment"
+ annotations:
+ set:
+ - entity_type: build
+ annotations:
+ - my_annotation_example1: 10.45
+ - my_string_annotation: Hello World
+steps:
+ clone:
+ title: Cloning source code
+ type: git-clone
+ arguments:
+ repo: 'codefresh-contrib/golang-sample-app'
+ revision: master
+ build-image:
+ type: build
+ image_name: my-golang-image
+ working_directory: '${{clone}}'
+ tag: master
+ hooks:
+ on_success:
+ exec:
+ image: alpine:3.9
+ commands:
+ - echo "Scanning docker image"
+ metadata: # setting metadata
+ set:
+ - '${{build-image.imageId}}':
+ - status: 'Success'
+{% endraw %}
+{% endhighlight %}
+
+Note however, that if you decide to use annotations and metadata inside hooks, you cannot mix and max the old syntax with the new syntax.
+
+The following pipeline is **NOT** valid:
+
+`invalid-codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ test:
+ image: alpine
+ on_success: # you cannot use old style together with hooks
+ annotations:
+ set:
+ - entity_type: build
+ annotations:
+ - status: 'success'
+ commands:
+ - echo block
+ hooks:
+ on_success:
+ annotations:
+ set:
+ - entity_type: build
+ annotations:
+ - status: 'success'
+{% endraw %}
+{% endhighlight %}
+
+The pipeline is not correct, because the first segment of annotations is directly under `on_success` (the old syntax), while the second segment is under `hooks/on_success` (the new syntax).
+
+
+## Syntactic sugar syntax
+
+To simplify the syntax for hooks, the following simplifications are also offered:
+
+If you do not want to use metadata or annotations in your hook the keyword `exec` can be omitted:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+hooks:
+ on_finish: # no exec keyword
+ image: notifications:master
+ commands:
+ - ./send_workflow_finished.js
+steps:
+ build:
+ type: build
+ image_name: my_image
+ tag: master
+ hooks:
+ on_fail: # no exec keyword
+ image: notifications:master
+ commands:
+ - ./send_build_failed.js
+{% endraw %}
+{% endhighlight %}
+
+
+If you do not want to specify the Docker image you can simply omit it. Codefresh will use the `alpine` image in that case to run the hook:
+
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+hooks:
+ on_elected:
+ exec: # no image keyword - alpine image will be used
+ - echo "Pipeline starting"
+steps:
+ build:
+ type: build
+ image_name: my_image
+ tag: master
+ hooks:
+ on_success: # no image keyword - alpine image will be used
+ exec:
+ - echo "Docker image was built successfully"
+ annotations:
+ set:
+ - entity_type: build
+ annotations:
+ - status: 'Success'
+{% endraw %}
+{% endhighlight %}
+
+
+ If you don't use metadata or annotations, you can also completely remove the `exec` keyword and just mention the commands you want to run (`alpine` image will be used by default):
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+hooks:
+ on_elected: # no exec/image keyword - alpine image will be used
+ - echo "Pipeline starting"
+steps:
+ build:
+ type: build
+ image_name: my_image
+ tag: master
+ hooks:
+ on_success: # no exec/image keyword - alpine image will be used
+ - echo "Docker image was built successfully"
+{% endraw %}
+{% endhighlight %}
+
+## Using Type Steps / Plugins in hooks
+
+You can use a type step / plugins in hooks. With this you will need to change `exec` into `steps` with the information needed for the step.
+
+Below is an example pipeline hook using the `slack-notifier` step/plugin for when the pipeline starts.
+
+```yaml
+hooks:
+ on_elected:
+ steps:
+ exec:
+ slack_pending:
+ type: slack-notifier
+ arguments:
+ SLACK_HOOK_URL: {% raw %}'${{SLACK_WEBHOOK_URL}}'{% endraw %}
+ SLACK_TEXT: '*Build Started* :crossed_fingers:'
+```
+
+## Limitations of pipeline/step hooks
+
+With the current implementation of hooks, the following limitations are present:
+
+* The [debugger]({{site.baseurl}}/docs/pipelines/debugging-pipelines/) cannot inspect commands inside hook segments
+* Hooks are not supported for [parallel steps]({{site.baseurl}}/docs/pipelines/advanced-workflows/)
+* Storage integrations don't resolve in hooks (for example, [test reports]({{site.baseurl}}/docs/testing/test-reports/#producing-allure-test-reports-from-codefresh-pipelines))
+* Step hook does not support the working_directory field aka `working_directory: ${{clone}}`
+
+## Related articles
+[Conditional Execution of Steps]({{site.baseurl}}/docs/pipelines/conditional-execution-of-steps/)
+[Working Directories]({{site.baseurl}}/docs/pipelines/working-directories/)
+[Annotations]({{site.baseurl}}/docs/pipelines/annotations/)
+
+
diff --git a/_docs/pipelines/introduction-to-codefresh-pipelines.md b/_docs/pipelines/introduction-to-codefresh-pipelines.md
new file mode 100644
index 00000000..c8d26498
--- /dev/null
+++ b/_docs/pipelines/introduction-to-codefresh-pipelines.md
@@ -0,0 +1,336 @@
+---
+title: "Introduction to Codefresh pipelines"
+description: "Understand how Codefresh pipelines work"
+group: pipelines
+redirect_from:
+ - /docs/introduction-to-codefresh-pipelines/
+ - /docs/configure-ci-cd-pipeline/
+toc: true
+---
+
+
+The central component of the Codefresh platform for continuous integration (CI) are pipelines. Pipelines are workflows that contain individual steps, with each step responsible for a specific action in the CI process.
+
+Use CI pipelines to:
+
+* Compile and package code
+* Build Docker images
+* Push Docker images to any [Docker Registry]({{site.baseurl}}/docs/docker-registries/external-docker-registries/)
+* Deploy applications/artifacts to VMs, Kubernetes clusters, FTP sites, S3 buckets etc.
+* Run [unit tests]({{site.baseurl}}/docs/testing/unit-tests/), [integration tests]({{site.baseurl}}/docs/testing/integration-tests/), acceptance tests etc.
+* Any custom action that you define
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/stages/complex-pipeline.png"
+url="/images/codefresh-yaml/stages/complex-pipeline.png"
+alt="Codefresh CI pipelines"
+caption="Codefresh CI pipelines"
+max-width="90%"
+%}
+
+## Why are Codefresh CI pipelines different?
+
+Codefresh offers unique characteristics in CI pipelines that serve as the cornerstone of the build/deploy process:
+
+1. All [steps]({{site.baseurl}}/docs/pipelines/steps/) in Codefresh pipelines are executed inside a Docker container of your choosing.
+1. All steps in Codefresh share the same "workspace" in the form of a shared Docker volume.
+1. The shared Docker volume is automatically cached between pipeline executions.
+1. Every successful pipeline automatically pushes its Docker image to the default Docker registry defined in your account.
+1. Codefresh has a distributed Docker cache for all build nodes and caches layers similar to the docker daemon on your workstation. This is fully automated, and does not need to be configured to activate it.
+
+### Using Docker containers as build tooling
+
+Unlike traditional solutions, Codefresh was built from the ground up to have full Docker support. All Codefresh pipelines
+deal with Docker images, either using them as runtime tools or creating them as deployment artifacts.
+Everything that happens in Codefresh uses a Docker image behind the scenes.
+
+It is important that you understand how to take advantage of Docker-based pipelines as they are much more powerful than
+traditional VM solutions. The capability to define your own tooling cannot be understated. It is the fastest way to take
+full control of your build tools and to upgrade them easily.
+
+With traditional VM-based build solutions, you are constrained on the build and deployment tools provided by the vendor.
+If for example you need a new version of Node/Java/Python other than the one that is provided on the build slave, you have to wait for your vendor to add it. If you need to use a special tool (e.g terraform, gcloud) and the vendor does
+not support it you are out of luck.
+
+With Codefresh you don't have to care about what is installed in the Runners that execute your builds. They can run *any* Docker image of your choosing. You are free to update the version of the image used at any given time.
+
+Here is an example:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/introduction/steps-example1.png"
+url="/images/pipeline/introduction/steps-example1.png"
+alt="Pipeline with three steps"
+caption="Pipeline with three steps"
+max-width="70%"
+%}
+
+
+1. The first step runs under the context of a Node image that prepares the application and runs [unit tests]({{site.baseurl}}/docs/testing/unit-tests/).
+1. The second step uses an image with s3 command line tools and uploads the test results to a bucket that holds test reports.
+1. The helm step creates a Helm chart and pushes it to a Helm repository.
+
+You don't need to contact Codefresh and ask them to add the S3 executable on the build runners. You just use a prebuilt Docker image that contains it. The version used for Node is defined by you and if you wish to upgrade to another version
+you simply change the definition of the pipeline.
+
+
+Here is another example:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/introduction/steps-example2.png"
+url="/images/pipeline/introduction/steps-example2.png"
+alt="Codefresh steps example 2"
+caption="Pipeline with 4 steps"
+max-width="70%"
+%}
+
+1. The first step runs under the context of a Maven image that compiles the code and creates an executable.
+1. The second step uses a Docker image that contains terraform and creates a single ECS instance in AWS.
+1. The third step uses a custom Docker image that deploys to the ECS container that was just created.
+1. The last step uploads the Maven reports that were created in step 1 to an FTP site.
+
+You should now start seeing the endless possibilities. You can mix and match any Docker image (either a public one
+or your own) to use a build context in your step. This makes Codefresh a future-proof solution for all build tools
+that exist now and all of them that will appear in the future. As long as there is a Docker image for a tool, Codefresh
+can use it in a pipeline without any extra configuration.
+
+Codefresh also offers a [marketplace](https://codefresh.io/steps/){:target="\_blank"} with several existing plugins.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/plugin-directory.png"
+url="/images/pipeline/plugin-directory.png"
+alt="Codefresh steps directory"
+caption="Codefresh steps directory"
+max-width="80%"
+%}
+
+
+All plugins in the marketplace are open-source, and we accept external contributions so you can easily add your own.
+
+
+### Sharing the workspace between build steps
+
+We have seen in the previous section that Codefresh can use Docker images as the context of a build step. The second
+important point to understand regarding Codefresh CI pipelines is that the default workspace of each step is shared between all steps in a pipeline.
+
+This happens via a Docker volume which is attached to all Docker containers that represent each step. This volume is
+always available at `/codefresh/volume`, and is used as the parent folder where the project is cloned.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/introduction/codefresh-volume.png"
+url="/images/pipeline/introduction/codefresh-volume.png"
+alt="Codefresh volume"
+caption="All steps share the same volume"
+max-width="90%"
+%}
+
+Anything in this volume is available to all steps of the pipeline (as well as to subsequent executions of the same pipeline as we will see later).
+
+Again, this places Codefresh ahead of traditional solutions that execute build steps in a completely isolated manner.
+In traditional VM-based builds, using artifacts produced from one step in another step, is a complicated process as one
+must declare which artifact folders should be re-used. Artifact re-use sometimes happens with compression/decompression
+of the respective folder resulting in really slow builds if a project is very big.
+
+Codefresh does not need to bother the user with artifact reuse across steps. *Anything* that is placed in the shared Codefresh volume will automatically be available to the next steps in the pipeline without any extra configuration.
+
+Example 1
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/introduction/codefresh-volume-example1.png"
+url="/images/pipeline/introduction/codefresh-volume-example1.png"
+alt="Codefresh volume example 1"
+caption="Re-using Node Modules"
+max-width="90%"
+%}
+
+1. The first step runs `npm install` and downloads all libraries in `node_modules` into the shared Codefresh volume.
+1. The second step runs `npm test`. The folder `node_modules` is still present from the previous step.
+
+Example 2
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/introduction/codefresh-volume-example2.png"
+url="/images/pipeline/introduction/codefresh-volume-example2.png"
+alt="Codefresh volume example 2"
+caption="Re-using Test reports"
+max-width="90%"
+%}
+
+1. The first step runs `mvn test` and produces some test reports in `target/surefire-reports` into the shared Codefresh volume.
+1. The next step uploads these reports using FTP to an external site.
+
+
+The common volume shared among build steps makes it very easy to create pipelines that work in a gradual manner
+where any step in the pipeline is using artifacts produced by a previous one.
+
+>The shared volume is **NOT available** in [build steps]({{site.baseurl}}/docs/pipelines/steps/build/). This is not a Codefresh limitation. Docker itself [does not allow volumes during builds](https://github.com/moby/moby/issues/14080){:target="\_blank"}. There is no folder `/codefresh/volume` inside a Dockerfile for you to access.
+
+You can also use [environment variables]({{site.baseurl}}/docs/pipelines/variables/) to share information between steps. All predefined environment variables are available to all steps, and each individual step can use `cf_export` to dynamically inject extra environment variables during the build process.
+
+
+## Working with Codefresh pipelines
+
+Now that we know the basics, we can see how you can take advantage of Docker-based pipelines in order to build and deploy your projects.
+
+
+### Cloning the source code
+
+You can clone source code using the built-in [Git-clone step]({{site.baseurl}}/docs/pipelines/steps/git-clone/) as the first step in a CI pipeline, or manually run your own Git clone commands in a freestyle step. Codefresh has built-in [Git integration]({{site.baseurl}}/docs/integrations/git-providers/) with all popular git providers (both cloud and on-premises installations).
+
+Codefresh uses the shared volume as the parent folder of the project. So if your pipeline is connected to a Git repo that contains `my-project` the following will happen:
+
+* `/codefresh/volume` is the shared directory for all steps
+* `/codefresh/volume/my-project` is where the source code exists. This is also the current working directory
+* Any other directory (e.g. `/bin`, `/var`, `/opt`) depends on the current container image that is used as build context
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/introduction/checkout.png"
+url="/images/pipeline/introduction/checkout.png"
+alt="Codefresh checkout folder"
+caption="Codefresh checkout folder"
+max-width="80%"
+%}
+
+There are three important points to consider regarding these folders:
+
+1. The [working directory]({{ site.baseurl }}/docs/pipelines/what-is-the-codefresh-yaml/#working-directories) of each step is by default the project folder (e.g. `/codefresh/volume/my-project`). Therefore
+your build step can run commands exactly as you would run them locally (e.g. `npm install, pip install, mvn package, bundle install`).
+
+1. Notice that the project folder is placed on the Codefresh volume, so by default it is also available to all other steps. The code that you check out in the beginning, as well as all other files that are created on it, are available to all steps. Once you create `node_modules`, or any other folder that exists inside the project folder, it will automatically persist for all other steps.
+
+1. Finally, `/codefresh/volume` is an internal folder name, and you should use `{% raw %}${{CF_VOLUME_PATH}}{% endraw %}` in your codefresh.yml file
+if you really want to reference this folder. You can also reference your project folder as `{% raw %}${{CF_VOLUME_PATH}}/${{CF_REPO_NAME}}{% endraw %}` if you need it.
+
+See the [System Provided Variables]({{site.baseurl}}/docs/pipelines/variables/#system-provided-variables) section for more information.
+
+### Working with Docker inside a Codefresh pipeline
+
+We have already seen that Codefresh pipelines are based on Docker images and that each step runs inside the context of a Docker container. You might be wondering how you can run Docker commands directly inside a Codefresh pipeline.
+
+The answer is that you don't. Even though in the future Codefresh might allow for Docker-in-Docker capabilities, at the moment this is not supported for security reasons (only enterprise customers have access to the underlying Docker daemon). Any scripts that you already have that run Docker commands on their own will need to be adapted to Codefresh pipelines.
+
+Usually you want to run a docker command for four reasons:
+
+1. To build a Docker image
+1. To push a Docker image
+1. To run a docker-compose setup
+1. To run a Docker container
+
+For all these situations Codefresh gives you special pipeline steps that perform the respective action. These are:
+
+1. The [build step]({{site.baseurl}}/docs/pipelines/steps/build/)
+1. The [push step]({{site.baseurl}}/docs/pipelines/steps/push/)
+1. The [compositions step]({{site.baseurl}}/docs/pipelines/steps/composition/)
+1. The [freestyle step]({{site.baseurl}}/docs/pipelines/steps/freestyle/)
+
+The commands you define in a freestyle step run automatically in a Docker container that is attached to that step once the pipeline executes.
+
+Therefore, this command on your local workstation:
+
+```
+docker run python:3.6.4-alpine3.6 pip install .
+```
+
+will become in Codefresh
+
+```
+CollectAllMyDeps:
+ title: Install dependencies
+ image: python:3.6.4-alpine3.6
+ commands:
+ - pip install .
+```
+For the plugins in the [Step Marketplace](https://codefresh.io/steps/) we already give an example of the YAML part that must be included in your pipeline:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/plugin-example.png"
+url="/images/pipeline/plugin-example.png"
+alt="Codefresh steps directory"
+caption="Codefresh steps directory"
+max-width="50%"
+%}
+
+Each plugin also defines its input/output in the form of environment variables and files.
+
+### Creating Docker images dynamically as build tools
+
+
+Now we reach one of the most powerful features of Codefresh pipelines. We have already seen that [freestyle pipeline steps]({{site.baseurl}}/docs/pipelines/steps/freestyle/) are just a series of commands that run inside the context of a Docker container. In most cases the images used
+for the freestyle steps are known in advance and come from public (e.g. Dockerhub) or [private Docker registries]({{site.baseurl}}/docs/docker-registries/external-docker-registries/).
+
+Codefresh is one the few CI/CD solutions that not only offers easy Docker registry integration
+ accessible to all pipelines
+but also allows you to **build docker images on demand in the same pipeline where they are required**.
+
+This means that you can create a special Docker image in an early step inside a Codefresh pipeline and then reference it in a later step in the same pipeline.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/introduction/dynamic-docker-builds.png"
+url="/images/pipeline/introduction/dynamic-docker-builds.png"
+alt="Codefresh dynamic docker builds"
+caption="Creating dynamically Docker images as build steps"
+max-width="90%"
+%}
+
+Let's say for example that you are moving a legacy application to Codefresh which is deployed using a special Python script. Your main application is a Ruby-On-Rails app. Both applications exist in the same git repository (we mention this for simplicity reasons, Codefresh also supports checking out code from multiple repositories).
+
+You can create a single pipeline with Codefresh that does the following:
+
+1. Checks out the code
+1. Creates a Docker image based on Python for the deployment tool
+1. Uploads the Python tool Docker image to the internal registry
+1. Builds the Ruby application using a freestyle step with the R-O-R image from Dockerhub
+1. Deploys the Ruby application by running the Python based deployment tool image (after pulling it first)
+
+This concept is ground-breaking as it allows you to automatically update your build tools that are used in any pipeline.
+Even though you could manually create the Docker images yourself before-hand, it is better to completely automate them
+inside the pipeline they are actually needed. This ensures that both the application and its tooling are always at the latest version.
+
+### How caching works in Codefresh
+
+Codefresh employs several caching mechanisms for both Dockerized and non-dockerized applications. The shared volume is also cached behind the scenes automatically. See our [caching guide]({{site.baseurl}}/docs/configure-ci-cd-pipeline/pipeline-caching/) for more details.
+
+### Calling other pipelines
+
+It is also possible to chain multiple pipelines together in Codefresh. To accomplish this, Codefresh offers
+a special Docker image that contains the [Codefresh CLI](https://codefresh-io.github.io/cli/){:target="\_blank"} and allows you to trigger another pipeline using its name.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/introduction/call-pipeline.png"
+url="/images/pipeline/introduction/call-pipeline.png"
+alt="Codefresh call pipeline"
+caption="Calling another pipeline"
+max-width="80%"
+%}
+
+Notice that each pipeline in Codefresh is completely isolated from the other. They use a different Docker volume so the build context of each one cannot access files from the other. This may change in the future, but for the time being
+you should know that only steps within the same pipeline can share artifacts.
+
+## Related articles
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
+[Codefresh YAML]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/)
+[Build and Docker caching]({{site.baseurl}}/docs/pipelines/pipeline-caching/)
+
+
+
diff --git a/_docs/pipelines/marketplace.md b/_docs/pipelines/marketplace.md
deleted file mode 100644
index 4295f74e..00000000
--- a/_docs/pipelines/marketplace.md
+++ /dev/null
@@ -1,13 +0,0 @@
----
-title: "Codefresh marketplace"
-description: ""
-group: pipelines
-toc: true
----
-
-The Codefresh Hub for Argo documentation can be found in its [official repository](https://github.com/codefresh-io/argo-hub).
-
-Codefresh is fully backing this project and will help all developers that want to contribute to succeed.
-
-You can find documentation about how to contribute to the argo hub in the [official repository contribute section](https://github.com/codefresh-io/argo-hub#How-to-Contribute)
-
diff --git a/_docs/pipelines/monitoring-pipelines.md b/_docs/pipelines/monitoring-pipelines.md
new file mode 100644
index 00000000..c5bab5ea
--- /dev/null
+++ b/_docs/pipelines/monitoring-pipelines.md
@@ -0,0 +1,463 @@
+---
+title: "Monitoring pipelines"
+description: "Viewing your builds and logs"
+group: configure-ci-cd-pipeline
+toc: true
+---
+
+
+All pipeline activity in Codefresh can be viewed in the *Builds* tab.
+* The global build view shows builds for all projects across your organization
+* The project-based view from the settings inside an individual project shows the builds for the selected project
+
+Both views have the same controls and filters.
+
+## Viewing pipeline status
+
+Each screen contains all builds sorted from the most recent to the oldest. The first time you visit
+the screen there are no filters defined.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/builds-dashboard.png"
+url="/images/pipeline/monitoring/builds-dashboard.png"
+alt="Pipeline Activity in Codefresh"
+caption="Pipeline activity"
+max-width="80%"
+%}
+
+By default, it shows all builds that is happening in Codefresh. To narrow the list you can use the filters on the top
+of the screen.
+
+### Applying filters on the build view
+
+Directly above the list you can find several filters.
+
+At the most basic level you can choose between
+
+ * *Running* builds that are currently executing
+ * *Pending* builds which are queued and waiting to start
+ * *Delayed* builds which cannot run yet, because there are no free pipeline builders.
+ A build can be delayed for a maximum of seven days, and each account can have up to 1000 delayed builds at any time.
+ * Builds that are delayed for more than seven days are terminated with a _Delay time limit exceeded_ reason.
+ * If the total number of delayed builds exceed 1000, older builds are terminated with a _Maximum delayed workflows exceeded_ reason.
+
+ * *All* builds regardless of running stage (this is the default)
+
+You can further filter the builds by choosing the various filter types that specify the build job.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/build-filtering.png"
+url="/images/pipeline/monitoring/build-filtering.png"
+alt="Pipeline filters in Codefresh"
+caption="Available filters"
+max-width="50%"
+%}
+
+The available filters are:
+
+* *Pipeline* - any of the pipelines available.
+* *Provider* - type of [Git provider]({{site.baseurl}}/docs/integrations/git-providers/).
+* *Repository* - Git repository from the attached [trigger]({{site.baseurl}}/docs/configure-ci-cd-pipeline/triggers/).
+* *Type* - build, [launch a test environment]({{site.baseurl}}/docs/getting-started/on-demand-environments/#launching-a-docker-image-using-codefresh).
+* *Branch* - any of the available branches from the attached Git trigger.
+* *Committer* - person that made the commit that triggered the build.
+* *Environment* - which [environment]({{site.baseurl}}/docs/deploy-to-kubernetes/environment-dashboard/) was affected.
+* *Status* - success, error, in-progress, pending, terminated etc. A Pending status can also indicate that [pipeline build execution has been paused]({{site.baseurl}}/docs/administration/pipeline-settings/#pause-pipeline-executions) for the account.
+* *Trigger type* - what type of trigger was responsible for this build
+* *Git event* - in the case of [git triggers]({{site.baseurl}}/docs/configure-ci-cd-pipeline/triggers/git-triggers/) the exact event
+
+Notice that all filters are multiple-choice so you can select multiple values for each filter category.
+At any given point you can see all the active filters on top of the screen.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/possible-filters.png"
+url="/images/pipeline/monitoring/possible-filters.png"
+alt="Pipeline filters in Codefresh"
+caption="Active filters"
+max-width="50%"
+%}
+
+You can easily remove active filters, by clicking on them and adding/removing values.
+
+On the right hand side you can also find a filtering toolbar with time options:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/build-filter-date.png"
+url="/images/pipeline/monitoring/build-filter-date.png"
+alt="Filtering options for time"
+caption="Filtering options for time"
+max-width="60%"
+%}
+
+You can combine all previously mentioned filters with the time based filters.
+
+### Creating build views
+
+Once you have a set of filters that you use regularly, you can save them as a custom *Build View* by clicking the *Save as View* button
+and providing a name.
+
+Now you can select at the top of the window any of the available build views to automatically filter results according to the respective sets of filters.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/build-view-selection.png"
+url="/images/pipeline/monitoring/build-view-selection.png"
+alt="Build View selection"
+caption="Build View selection (click to enlarge)"
+max-width="50%"
+%}
+
+You can delete existing build-views by clicking on the *manage views* button.
+You can change the filters of an existing build view by making a new filter selection and then saving the view with an existing name (effectively overwriting it).
+
+
+### Build details
+
+
+For each individual build you can see several details such as the git hash, the person who made the commit, the pipeline that was triggered as well as how much time it took. For each event type you will also see additional context related information.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/build-details-entry.png"
+url="/images/pipeline/monitoring/build-details-entry.png"
+alt="build details in Codefresh"
+caption="Build details"
+max-width="100%"
+%}
+
+Child builds triggered by other builds are identified in the Event column by the icon {::nomarkdown} {:/}.
+The Parent Build column shows the link to the parent build. Mouse over to see the tooltip with information on the parent build. The tooltip includes links to the parent build, repo, branch, commit message, and the ability to filter by repo and branch.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/child-parent-build-info.png"
+url="/images/pipeline/monitoring/child-parent-build-info.png"
+alt="Child build in Builds list"
+caption="Child build in Builds list"
+max-width="70%"
+%}
+
+There are also extra options if you click the small "3-dot" menu button on the right. For a particular build, you can:
+
+- View the logs
+- View the YAML
+- View or add [annotations]({{site.baseurl}}/docs/pipelines/annotations/)
+- View the images produced (and consequently launch an on-demand [test environment]({{site.baseurl}}/docs/getting-started/on-demand-environments/#launching-a-docker-image-using-codefresh))
+
+Notice that if you restart a pipeline it will trigger with the exact settings it *originally* had. So
+if this was a manual trigger where you [disabled caching]({{site.baseurl}}/docs/troubleshooting/common-issues/disabling-codefresh-caching-mechanisms/) or changed the [notification options](#monitoring-pipelines-that-check-pull-requests), the new
+execution will still honor those settings (even if you have changed them for later builds).
+
+An extra button for test reports will be visible if you are using the [test report feature]({{site.baseurl}}/docs/testing/test-reports/) of Codefresh.
+
+
+## Viewing details for an individual pipeline build
+
+If you click on any individual pipeline build, you will enter the pipeline build information screen.
+From here you can see more details for a build such as the logs, running time and resource metrics.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/pipeline-view.png"
+url="/images/pipeline/monitoring/pipeline-view.png"
+alt="Pipeline view"
+caption="Pipeline view"
+max-width="80%"
+%}
+
+Each section in this screen corresponds to each pipeline step. There are two special steps:
+
+* *Initializing Process*
+* *Cloning Main Repository*
+
+These are Codefresh built-in steps and will appear for most builds (you can also create a pipeline that doesn't clone a git repository by default). The rest of the step names depend on your `codefresh.yml` (or the default step names provided by Codefresh). The different columns take the names from the defined [pipeline stages]({{site.baseurl}}/docs/pipelines/stages/).
+
+### Viewing status for pipeline steps
+
+Monitor the status of the steps in the pipeline as they are executed.
+
+{: .table .table-bordered .table-hover}
+| Step Status Icon | Description |
+| ------------------------| ---------------- |
+|{::nomarkdown} {:/}| Pipeline step completed successfully. |
+|{::nomarkdown} {:/}| Pipeline step pending approval has been approved, either manually or automatically. |
+|{::nomarkdown} {:/}| Pipeline step pending approval has been denied approval. |
+|{::nomarkdown} {:/}| Pipeline step currently running. |
+|{::nomarkdown} {:/}| Pipeline step running in debug mode. See [Debugging pipelines]({{site.baseurl}}/docs/configure-ci-cd-pipeline/debugging-pipelines/) for more information. |
+|{::nomarkdown} {:/}| Pipeline step gracefully terminating execution. |
+|{::nomarkdown} {:/}| Pipeline step execution has been manually or automatically terminated. |
+|{::nomarkdown} {:/}| Pipeline step execution has been terminated because of error. |
+
+
+
+### Viewing/downloading logs for builds and build steps
+
+View logs for running and completed builds and download them in HTML or text formats.
+You can view logs online, for the entire build or for single or specific steps in the build. Similarly, you can download the logs for the entire build, or for single or specific steps.
+The Filter Logs option is useful to view and manage logs, especially for large builds as there is a max size limit for logs. You can also search logs.
+
+>Note:
+ The max log size for the entire build is 100MB, and 20MB per step. The system stops generating logs once the build size is exceeded.
+ For large builds, it is easier to filter the logs by single or multiple steps, and then view/download them.
+
+1. In the **Builds** page, select a build.
+1. To view logs online for the selected build, click **Output** in the lower part of the Build page.
+1. Optional. Select **Filter Logs** and then select the step or steps for which view/download logs.
+ Logs are displayed for the selected steps.
+1. From either the context menu on the top-right of the toolbar or from the Output pane, select **Download as HTML** or **Download as text**.
+ The log file is downloaded with the build ID as the filename, including also the step name if the log is for a single step, in the format `'.
+
+ {% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/build-logs.png"
+url="/images/pipeline/monitoring/build-logs.png"
+alt="Build log in Codefresh"
+caption="Build log in Codefresh"
+max-width="60%"
+%}
+
+
+### Viewing variables in pipeline builds
+
+Variables, both system (environment) and custom (user-defined), are injected into pipelines from different sources and at different levels.
+The variables actually used by a specific build of the pipeline varies according to the events that triggered the pipeline.
+Select a build to view all its variables, and identify their source, and overrides if any.
+
+1. In the **Builds** page, either select the build and then open the context-menu, or open the context-menu on the right of the build entry.
+1. Select **Variables**.
+
+ {% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/build-variables-view-option.png"
+url="/images/pipeline/monitoring/build-variables-view-option.png"
+alt="Variables option in context menu of build entry"
+caption="Variables option in context menu of build entry"
+max-width="70%"
+%}
+
+{:start="3"}
+1. If required, click the Sort icon for the group to sort in alphabetical order.
+1. To copy the group's variables to the clipboard, click the Copy icon.
+
+
+Here's an example of the list of variables for a pipeline build.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/build-variables-list.png"
+url="/images/pipeline/monitoring/build-variables-list.png"
+alt="List of variables in selected build"
+caption="List of variables in selected build"
+max-width="50%"
+%}
+
+The variables are grouped by granularity, starting with the global project-level variables and ending with the trigger-level variables with the highest granularity:
+* Project
+* Shared configuration
+* Pipeline
+* Trigger
+
+A variable with a strikethrough indicates an override by the same variable in a lower-level group. For rules on precedence and overrides for variables in builds, see [Variables]({{site.baseurl}}/docs/pipelines/variables/).
+
+>Notes:
+ * Variables exported across steps with `cf_export` are not identified as `cf-exported` variables in the list.
+ * Secret-type variables are always masked.
+
+
+
+### Reviewing the yaml for the pipeline
+
+From the step details you can also click on the yaml tab to see the yaml segment for that individual step:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/yaml-from-step.png"
+url="/images/pipeline/monitoring/yaml-from-step.png"
+alt="Step Yaml"
+caption="Step Yaml"
+max-width="60%"
+%}
+
+If you want to see the yaml for the whole pipeline,
+- Click the *YAML* tab on the bottom left corner without selecting a step first or
+- Select the three dots next to the "RESTART" button on the top-right, and click on *Show YAML*
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/view-pipeline-yaml.png"
+url="/images/pipeline/monitoring/view-pipeline-yaml.png"
+alt="Pipeline Yaml"
+caption="Pipeline Yaml"
+max-width="60%"
+%}
+
+In both cases you can copy to clipboard the yaml shown using the button at the top left corner.
+
+### Viewing pipeline metrics
+
+Codefresh offers several metrics for pipeline steps that allow you to get a better overview on the resources
+consumed by your pipeline.
+
+At the most basic level Codefresh will show some quick metrics while the pipeline is running that include
+memory consumed and size of logs:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/quick-pipeline-metrics.png"
+url="/images/pipeline/monitoring/quick-pipeline-metrics.png"
+alt="Pipeline running metrics"
+caption="Pipeline running metrics"
+max-width="70%"
+%}
+
+You can then get the memory usage for the whole pipeline by clicking on the metrics tab at the bottom of the screen.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/pipeline-metrics.png"
+url="/images/pipeline/monitoring/pipeline-metrics.png"
+alt="Pipeline detailed metrics"
+caption="Pipeline detailed metrics"
+max-width="70%"
+%}
+
+
+If you click on an individual step before clicking the *Metrics* tab you will get metrics for that specific step only.
+
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/step-metrics.png"
+url="/images/pipeline/monitoring/step-metrics.png"
+alt="Step metrics"
+caption="Step metrics"
+max-width="70%"
+%}
+
+
+### Restarting the pipeline
+
+You can choose to restart any pipeline by clicking the button at the top right corner.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/restart-pipeline.png"
+url="/images/pipeline/monitoring/restart-pipeline.png"
+alt="Restart a pipeline"
+caption="Restart a pipeline"
+max-width="70%"
+%}
+
+>It is important to note that "Restart from beginning" will restart a pipeline with the **same** state that it had in its original execution (including the original git commit). If you want to execute a pipeline again with a new state instead, you need to use the *Run* button in the [pipeline editor]({{site.baseurl}}/docs/pipelines/pipelines/#using-the-inline-pipeline-editor) and selecting any of the available [triggers]({{site.baseurl}}/docs/pipelines/triggers/).
+
+
+
+If the pipeline has failed, you can choose to restart it only from the failed step and onwards.
+
+You can also restart from a failed step right from the graphical view:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/restart-failed.png"
+url="/images/pipeline/monitoring/restart-failed.png"
+alt="Restart from a failed step"
+caption="Restart from a failed step"
+max-width="70%"
+%}
+
+>Notice again that restarting a pipeline from a failed step means restarting the pipeline with the **same** state that it had at the point in time (including the original git commit).
+
+If your pipeline has some flaky steps, you can also use the [retry syntax]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/#retrying-a-step) in your yaml instead of restarting them manually each time they fail.
+
+
+## Monitoring pipelines outside the Codefresh UI
+
+You don't always have to be in the Codefresh UI in order to monitor the status of your builds.
+
+
+### Monitoring pipelines that check Pull Requests
+
+One of the most
+important roles of a CI platform is to automatically update the status of a GIT Pull request with the result
+of the respective build.
+
+{% include
+image.html
+lightbox="true"
+file="/images/getting-started/quick-start-test-pr/auto-build-pr.png"
+url="/images/getting-started/quick-start-test-pr/auto-build-pr.png"
+alt="Pull Request Status"
+caption="Pull Request Status (click image to enlarge)"
+max-width="50%"
+%}
+
+If you have setup a [GIT trigger]({{site.baseurl}}/docs/pipelines/triggers/git-triggers/) in Codefresh then by default this happens automatically without any other configuration
+for all automated commits (that are coming from webhooks).
+
+If you start a build manually then by default the git status will **not** be updated (i.e. the result of the pipeline
+will not affect the status of Pull request)
+
+If you don't want this behavior to happen, you can enable the git status update checkbox when you launch a pipeline.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/report-notification-checkbox.png"
+url="/images/pipeline/monitoring/report-notification-checkbox.png"
+alt="Update git status for pipelines triggered manually "
+caption="Update git status for pipelines triggered manually (click image to enlarge)"
+max-width="50%"
+%}
+
+This way the pipeline status *will* change the build status even with manual builds.
+
+The same behavior is also available to the [Codefresh CLI](https://codefresh-io.github.io/cli/pipelines/run-pipeline/). In that case use the parameter `--enable-notifications`
+to specify if manually triggering a build will also change the GIT status.
+
+For open source projects you also have the ability to [trigger builds from external forks]({{site.baseurl}}/docs/pipelines/triggers/git-triggers/#support-for-building-pull-requests-from-forks).
+
+### Viewing pipeline status from text/html files
+
+Codefresh also supports build badges that allow you to show the
+status of a Pipeline in Text files or web pages.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/monitoring/build-badge.png"
+url="/images/pipeline/monitoring/build-badge.png"
+alt="Codefresh build badges"
+caption="Codefresh build badges"
+max-width="100%"
+%}
+
+See the [build badges page]({{site.baseurl}}/docs/pipelines/build-status/) for more information.
+
+
+## Related articles
+[Codefresh YAML]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/)
+[Pipeline steps]({{site.baseurl}}/docs/pipelines/steps/)
+[Test report]({{site.baseurl}}/docs/pipelines/test-reports/)
+[Status badges]({{site.baseurl}}/docs/pipelines/build-status/)
diff --git a/_docs/pipelines/pipeline-caching.md b/_docs/pipelines/pipeline-caching.md
new file mode 100644
index 00000000..95ff6dd8
--- /dev/null
+++ b/_docs/pipelines/pipeline-caching.md
@@ -0,0 +1,314 @@
+---
+title: "Caching in pipelines"
+description: "Faster builds with Codefresh caching"
+group: configure-ci-cd-pipeline
+toc: true
+
+---
+
+One of the unique features of Codefresh is the multitude of caching systems that take part in a pipeline, and in particular the caching mechanisms targeted specifically at Docker builds. Most types of caching are completely automatic and require zero configuration in order to activate. Caching is a built-in feature in all Codefresh accounts regardless of pricing tier (even free accounts have all types of caching enabled).
+
+## Types of caching
+
+Here is a quick overview of all types of caching used in a Codefresh pipeline:
+
+{: .table .table-bordered .table-hover}
+| Caching mechanism | Activation | Used in | Comments |
+| -------------- | ---------------------------- |-------------------------| -------------------------|
+| Distributed Docker step/image caching | Automatic | All pipeline [steps]({{site.baseurl}}/docs/pipelines/steps/) | |
+| Distributed Docker layer caching | Automatic | Pipeline [build steps]({{site.baseurl}}/docs/pipelines/steps/build/) | Mimics local Docker layer cache|
+| Caching from previous built image | Automatic | Pipeline build steps | Distributed version of `--cache-from`|
+| Docker registry caching | Automatic | Pipeline build steps | Works for all [connected Docker registries]({{site.baseurl}}/docs/docker-registries/external-docker-registries/)|
+| Traditional build caching | Automatic/manual | Pipeline [freestyle steps]({{site.baseurl}}/docs/pipelines/steps/freestyle/) | See notes for [parallel builds]({{site.baseurl}}/docs/pipelines/advanced-workflows/)|
+
+All these caching mechanisms are enabled by default and you can [freely disable them]({{site.baseurl}}/docs/troubleshooting/common-issues/disabling-codefresh-caching-mechanisms/) if you encounter any issues with caching.
+
+Let's see these caches in order and how to use them effectively.
+
+## Distributed Docker image caching
+
+This is the simplest mode of caching available. All Codefresh steps [are in fact docker images]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/). Once a pipeline runs for the first time, Codefresh will pull all required images from their registries (either public or private) and will cache them for the next build:
+
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/caching/image-caching.png"
+url="/images/pipeline/caching/image-caching.png"
+alt="Caching pipeline steps"
+caption="Caching pipeline steps"
+max-width="60%"
+%}
+
+The next time the pipeline runs all images will be fetched from cache. This includes built-in steps (e.g the [clone step]({{site.baseurl}}/docs/pipelines/steps/git-clone/)), custom steps from [the marketplace](https://codefresh.io/steps/) or your own [dynamic pipeline steps]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#creating-docker-images-dynamically-as-build-tools).
+
+This cache mechanism is completely automatic and is not user configurable. Some ways that you can affect it are:
+
+* If you use well known images in your pipeline (such as `alpine`, `node`, `maven` etc) they have more probabilities to be already cached by the Codefresh platform
+* Use specific tags for your images (e.g. `alpine:3.9.2` and `maven:3-jdk-11-openj9`) instead of generic ones (e.g `alpine:latest` and `node:buster`) that change all the time
+* Using small images in the pipeline will make caching/restoring of pipeline steps much faster.
+
+
+You can see in the [pipeline build logs]({{site.baseurl}}/docs/pipelines/steps/build/) if the images of your steps are found in cache or not. Here is an example of a cache hit:
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/caching/image-cache-hit.png"
+url="/images/pipeline/caching/image-cache-hit.png"
+alt="Docker image cache hit"
+caption="Docker image cache hit"
+max-width="50%"
+%}
+
+and a cache miss:
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/caching/image-cache-miss.png"
+url="/images/pipeline/caching/image-cache-miss.png"
+alt="Docker image cache miss"
+caption="Docker image cache miss"
+max-width="50%"
+%}
+
+This cache mechanism is applicable to all Codefresh pipelines and steps.
+
+
+## Distributed Docker layer caching
+
+This type of caching is **only** applicable to [build steps]({{site.baseurl}}/docs/pipelines/steps/build/) and mimics the ways docker layer caching behaves locally on your workstation.
+
+When you build images locally, Docker will cache intermediate layers making future builds much faster. You can see when caches are used in your build logs.
+
+{% highlight shell %}
+{% raw %}
+> docker build . -t my-app
+Sending build context to Docker daemon 81.92kB
+Step 1/10 : FROM golang:1.12-alpine
+ ---> 6a17089e5a3a
+Step 2/10 : RUN apk add --no-cache git
+ ---> Using cache
+ ---> 7b65bc6a6690
+Step 3/10 : WORKDIR /app/go-sample-app
+ ---> Using cache
+ ---> 8755d1490fe2
+Step 4/10 : COPY go.mod .
+ ---> Using cache
+ ---> 476d868ceddd
+Step 5/10 : COPY go.sum .
+ ---> Using cache
+ ---> 3239097e9bde
+[...]
+{% endraw %}
+{% endhighlight %}
+
+In a distributed build environment however, things work much differently as each build node has its own cache. If you run a pipeline on one node and then run a second build on another node everything will be recreated again because (normally) build nodes don't share any cache.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/caching/no-distributed-layer-cache.png"
+url="/images/pipeline/caching/no-distributed-layer-cache.png"
+alt="Without a distributed docker layer cache"
+caption="Without a distributed docker layer cache"
+max-width="60%"
+%}
+
+In the example above if you run another build that is picked up by build node 18 all Docker filesystem layers will be recreated again even though they are already present in other nodes.
+
+Codefresh is one of the few CI/CD solutions that has a *distributed* Docker layer cache. This makes layer caching available to all build nodes. It doesn't matter any more which build node runs which pipeline as all of them are equal regarding their caching capabilities.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/caching/distributed-layer-cache.png"
+url="/images/pipeline/caching/distributed-layer-cache.png"
+alt="Wit a distributed docker layer cache"
+caption="With a distributed docker layer cache"
+max-width="60%"
+%}
+
+With the distributed docker layer cache all build nodes are now equal. Any of the available nodes can pick your next pipeline build as all of them have access to all the previous docker filesystem layers.
+
+You can see if this cache is used in your [pipeline logs]({{site.baseurl}}/docs/pipelines/steps/build/):
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/caching/distributed-docker-layer-cache.png"
+url="/images/pipeline/caching/distributed-docker-layer-cache.png"
+alt="Docker layer caching regardless of build node"
+caption="Docker layer caching regardless of build node"
+max-width="60%"
+%}
+
+Codefresh will also automatically pass the `--cache-from` directive to docker builds with the previous successful build artifacts:
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/caching/cache-from.png"
+url="/images/pipeline/caching/cache-from.png"
+alt="Distributed version of `--cache-from`"
+caption="Distributed version of `--cache-from`"
+max-width="60%"
+%}
+
+To take advantage of this build cache just follow the official Docker guidelines and best practices such as
+
+* Download dependencies in a separate docker layer
+* Put layers that will not change frequently at the top of dockerfile (e.g. OS libs)
+* Put things that will change frequently at the bottom of the dockerfile (e.g. source code)
+* Don't use side effects in Dockerfiles
+
+Basically, if your Dockerfile is already optimized on your local workstation, it should also be optimized for Codefresh. More information can be found in the official documentation:
+
+* [https://www.docker.com/blog/intro-guide-to-dockerfile-best-practices/](https://www.docker.com/blog/intro-guide-to-dockerfile-best-practices/)
+* [https://docs.docker.com/develop/develop-images/dockerfile_best-practices/](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/)
+
+## Docker registry caching
+
+This is a caching mechanism unique to Codefresh and applicable only to [build steps]({{site.baseurl}}/docs/pipelines/steps/build/) when any of [connected Docker registries]({{site.baseurl}}/docs/docker-registries/external-docker-registries/) is used.
+
+Codefresh will check the internal Docker registry *before* a build step and if the exact same image is found (using the image hash), it will skip the build step completely:
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/caching/skip-build.png"
+url="/images/pipeline/caching/skip-build.png"
+alt="Skipping a previously built Docker image"
+caption="Skipping a previously built Docker image"
+max-width="60%"
+%}
+
+This is a very effective way to cut down the amount of time needed by pipelines but it obviously works only for Docker images that don't change often (helper images, plugins, build tools etc.) as the deployment docker images will always be different when a new git commit happens in the source code.
+
+You can take advantage of this mechanism by [not mixing deployment docker images with development docker images](https://codefresh.io/containers/docker-anti-patterns/). The former will change all the time, while the latter should be recreated less often.
+
+## Traditional build caching
+
+If you have read the [introduction to pipelines]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines) page you will already be familiar with the shared volume that is automatically mounted on all pipeline steps. This volume is not only used for data exchange between steps of the same pipeline, but is also stored/fetched for each subsequent build as well.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/caching/pipeline-volume-caching.png"
+url="/images/pipeline/caching/pipeline-volume-caching.png"
+alt="Pipeline workspace caching"
+caption="Pipeline workspace caching"
+max-width="90%"
+%}
+
+This means that unlike other CI solutions where you have to manually describe what folder you wish to cache, in Codefresh **everything that exists in `/codefresh/volume` and its subfolders is automatically cached between different builds** of the same pipeline. The volume mounting and caching/restoring process is completely automatic. You don't need any configuration about it.
+
+The main choice that you have is which files to place on the volume. For example, Node.js uses the folder `node_modules` for its dependencies which are placed under the project folder [which is automatically placed under the volume]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#cloning-the-source-code). So all contents of `node_modules` will be cached by default without any further action on your part.
+
+>Note that if you are using [Codefresh on-prem]({{site.baseurl}}/docs/installation/codefresh-on-prem/), this kind of caching is not available for the built-in runtime and you need to use the [Codefresh Runner]({{site.baseurl}}/docs/installation/codefresh-runner/)
+with your own runtime to activate volume caching.
+
+The simplest way to see this caching mechanism in action is this pipeline:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ write_sample_file:
+ title: Writing to shared volume
+ image: alpine:3.10.3
+ commands:
+ - date >> /codefresh/volume/sample.txt
+ read_sample_file:
+ title: Reading from shared volume
+ image: alpine:3.10.3
+ commands:
+ - cat /codefresh/volume/sample.txt
+{% endraw %}
+{% endhighlight %}
+
+If you run this pipeline multiple times you will see multiple entries in the file `sample.txt`.
+
+>Note that if you run concurrent builds too quickly after one another, the Codefresh Volume will refresh [from scratch]({{site.baseurl}}/docs/pipelines/pipeline-caching/#issues-with-parallel-builds-and-parallel-pipelines) instead of being cached between builds.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/caching/codefresh-shared-volume.png"
+url="/images/pipeline/caching/codefresh-shared-volume.png"
+alt="Shared volume after 3 builds of the same pipeline"
+caption="Shared volume after 3 builds of the same pipeline"
+max-width="60%"
+%}
+
+Notice also the complete lack of `volume` directives in the `codefresh.yml` file. The pipeline volume is mounted and cached/restored by Codefresh with no configuration on your part.
+
+Some important points on this caching mechanism:
+
+* The volume is handled and managed by Codefresh in a completely transparent manner. You **DO NOT** need any `volume` directives in your pipelines to take advantage of it. The volume is even present in [service containers]({{site.baseurl}}/docs/pipelines/service-containers/) for integration tests.
+* On each build the [clone step]({{site.baseurl}}/docs/pipelines/steps/git-clone/) will purge/delete everything that is not placed in `.gitignore`. So make sure that your `.gitignore` files contain all the things that you want to see cached (e.g. `node_modules`)
+* If you use the SAAS version of Codefresh, volumes will be reused across all your account pipelines. If you use the On-prem or Hybrid version of Codefresh, pipeline volumes can be scoped to different pipelines or triggers as well
+* You need at least one build of your pipeline in order for the cache mechanism to take any effect.
+* The volume is **NOT available** in [build steps]({{site.baseurl}}/docs/pipelines/steps/build/). This is not a Codefresh limitation. Docker itself [does not allow volumes during builds](https://github.com/moby/moby/issues/14080). There is no folder `/codefresh/volume` inside a Dockerfile for you to access.
+* This is the only caching mechanism that is not related to Docker images. So if you compile/package a traditional application with Codefresh that is not packaged as a Docker image this is the only way to get faster builds.
+
+See also a [full example]({{site.baseurl}}/docs/yaml-examples/examples/shared-volumes-between-builds/) that uses the volume at [https://github.com/codefreshdemo/cf-example-shared-volumes-between-builds](https://github.com/codefreshdemo/cf-example-shared-volumes-between-builds).
+
+### Caching folders which are outside your project folder
+
+By default if you checkout a Git project named `foo`, the source code [is placed under]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#cloning-the-source-code) `/codefresh/volume/foo`. This means that with zero configuration the following things are cached:
+
+* your source code of `foo` project
+* all dependencies under the project folder (e.g. `foo/node_modules`)
+* all project logs, test results that are inside the project module.
+
+Everything else found in external folders is NOT cached by default. So if you have things in folders such as `/root`, `/tmp/`, `/home/`, `/var/` that you need to cache you need to manually copy them to the volume.
+
+In practice, this means that you need to look at the documentation of your build system and test framework and make sure that all folders you want cached are placed under the Codefresh volume. This is a typical pattern with Java applications.
+
+ * For Maven use `mvn -Dmaven.repo.local=/codefresh/volume/m2_repository package` as shown in the [example]({{site.baseurl}}/docs/learn-by-example/java/spring-boot-2/).
+ * For Gradle use `gradle -g /codefresh/volume/.gradle -Dmaven.repo.local=/codefresh/volume/m2` as explained in the [example]({{site.baseurl}}/docs/learn-by-example/java/gradle/).
+ * For SBT use `-Dsbt.ivy.home=/codefresh/volume/ivy_cache`.
+ * For Pip use `pip install -r requirements.txt --cache-dir=/codefresh/volume/pip-cache` as shown in the [example]({{site.baseurl}}/docs/learn-by-example/python/django/)
+ * For Golang pass an environment variable `GOPATH=/codefresh/volume/go` to the freestyle step that is running go commands
+ * For Rust pass an environment variable `CARGO_HOME=/codefresh/volume/cargo` to the freestyle step that is running rust/cargo commands
+
+ This is only needed for traditional applications that are not dockerized. If you already use Docker containers the previous caching mechanisms are already enough.
+
+### Issues with parallel builds and parallel pipelines
+
+Codefresh supports two forms of parallelism, parallel steps within the same pipeline and parallel pipelines (as well as concurrent builds).
+
+All parallel steps inside the same pipeline use the same volume. Codefresh [does not perform any conflict detection in that case]({{site.baseurl}}/docs/pipelines/advanced-workflows/#shared-codefresh-volume-and-race-conditions).
+
+For concurrent builds of the same pipeline, notice that if you make too many commits very fast (triggering a second build while the previous one is still running), Codefresh will allocate a brand new volume for the subsequent builds. This will force all builds to start with a clean shared volume, resulting in longer build times. Be sure to set your [build termination settings]({{site.baseurl}}/docs/pipelines/pipelines/#pipeline-settings) correctly.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/caching/concurrent-build-caching.png"
+url="/images/pipeline/caching/concurrent-build-caching.png"
+alt="Concurrent build caching"
+caption="Concurrent build caching"
+max-width="80%"
+%}
+
+The diagram above shows the following sequence of events:
+
+1. The first build of a pipeline is triggered. Codefresh allocates a brand new volume and automatically mounts is as a workspace at `/codefresh/volume`.
+1. The first build runs and stores artifacts on the volume
+1. The first build finishes. Codefresh stores the volume in the cache
+1. A second build is triggered for the same pipeline and same git branch. Codefresh sees that there is already a volume in the cache and passes it to the second build. The second build correctly finds all artifacts in the cache
+1. *Before the second build finishes*, a third build is triggered.
+1. The pipeline volume is still locked by the second build and Codefresh cannot use it in the third build. Codefresh allocates a **brand new volume** that has no artifacts at all and passes it to the third build
+1. The second build finishes and its volume is saved into cache
+1. The third build finishes and its volume is saved into cache *overwriting* the volume of the second build.
+1. If a fourth build starts it will use the volume from the third build since this was the last saved volume.
+
+
+
+## Codefresh cache size and eviction policy
+
+If you use the SAAS version of Codefresh, then you don't have any control of cache policies.
+The SAAS version is fully controlled by Codefresh personnel and the cache policies in place might clear caches sooner than you think.
+
+If you run a pipeline very infrequently it is possible to suffer many cache misses. If you also use obscure Docker images you might see them downloaded again and again.
+
+If you run the [hybrid or on-prem versions]({{site.baseurl}}/docs/enterprise/installation-security/) of Codefresh, then your system administrator is responsible for fine-tuning the cache settings.
+
+## Related articles
+[Introduction to Codefresh pipelines]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines)
+[Codefresh YAML]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/)
+[Pipeline steps]({{site.baseurl}}/docs/pipelines/steps/)
+* [Parallel pipelines]({{site.baseurl}}/docs/pipelines/advanced-workflows/)
diff --git a/_docs/pipelines/pipeline-settings.md b/_docs/pipelines/pipeline-settings.md
new file mode 100644
index 00000000..6b857fd3
--- /dev/null
+++ b/_docs/pipelines/pipeline-settings.md
@@ -0,0 +1,86 @@
+---
+title: "Global settings for piplines"
+description: "Define global options for pipeline templates, yaml sources and approval behavior"
+group: administration
+toc: true
+---
+
+To access your global pipeline settings navigate to [https://g.codefresh.io/account-admin/account-conf/pipeline-settings](https://g.codefresh.io/account-admin/account-conf/pipeline-settings) or click on *Account settings* on the left sidebar and then choose *Pipeline settings* item on the next screen.
+
+On this page, you can define global parameters for the whole Codefresh account regarding pipeline options. Users can still override some of these options for individual pipelines.
+
+{% include image.html
+lightbox="true"
+file="/images/administration/pipeline-settings/pipeline-settings-ui.png"
+url="/images/administration/pipeline-settings/pipeline-settings-ui.png"
+alt="Pipeline settings"
+caption="Pipeline settings"
+max-width="80%"
+%}
+
+
+## Pause pipeline executions
+
+Pause builds for pipelines at the account level, for example, during maintenance.
+
+* **Pause build execution** is disabled by default.
+* When enabled:
+ * New pipelines in the account are paused immediately.
+ * Existing pipelines with running builds are paused only after the builds have completed execution.
+* Paused pipelines are set to status Pending, and remain in this status until **Pause build execution** is manually disabled for the account.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/pipeline-settings/pause-pipeline-enabled.png"
+url="/images/pipeline/pipeline-settings/pause-pipeline-enabled.png"
+alt="Pause Build Execution pipeline setting enabled"
+caption="Pause Build Execution pipeline setting enabled"
+max-width="80%"
+%}
+
+## Template section
+
+Here you can define global template behavior. The options are:
+
+* Enable [pipeline templates]({{site.baseurl}}/docs/docs/pipelines/pipelines/#using-pipeline-templates) for users. If this is enabled some pipelines can be marked as templates and users can still select them when creating a new pipeline.
+* Decide if users can clone an existing pipeline (along with its triggers and associated parameters) when [creating a new pipeline]({{site.baseurl}}/docs/docs/pipelines/pipelines/#creating-new-pipelines).
+
+Note that templates are simply normal pipelines “marked” as a template. There is no technical difference between templates and actual pipelines.
+
+## Pipeline YAML section
+
+Here you can restrict the sources of pipeline YAML that users can select. The options are:
+
+* Enable/Disable the [inline editor]({{site.baseurl}}/docs/docs/pipelines/pipelines/#using-the-inline-pipeline-editor) where YAML is stored in Codefresh SaaS
+* Enable/disable pipeline YAML from connected Git repositories
+* Enable/disable pipeline YAML from [external URLs]({{site.baseurl}}/docs/docs/pipelines/pipelines/#loading-codefreshyml-from-version-control)
+
+You need to allow at least one of these options so that users can create new pipelines. We suggest leaving the first option enabled when users are still learning about Codefresh and want to experiment.
+
+## Advanced pipeline options
+
+Here you can set the defaults for advanced pipeline behavior. The options are:
+
+* [Keep or discard]({{site.baseurl}}/docs/pipelines/steps/approval/#keeping-the-shared-volume-after-an-approval) the volume when a pipeline is entering approval state
+* Whether pipelines in approval state [count or not against concurrency]({{site.baseurl}}/docs/pipelines/steps/approval/#define-concurrency-limits)
+* Define the [Service Account]({{site.baseurl}}/docs/integrations/docker-registries/amazon-ec2-container-registry/#setting-up-ecr-integration---service-account) for Amazon ECR integration.
+* Set the default registry where all Public Marketplace Step images are pulled from. Registries listed are from the [Docker Registry]({{site.baseurl}}/docs/integrations/docker-registries/) integration page.
+ * Example: Public Marketplace Step image is defined to use Docker Hub. If you select a quay.io integration, all Public Marketplace Step images will be pulled from quay.io instead of Docker Hub.
+ * Note: This does not affect Freestyle Steps.
+
+Note that the first option affects pipeline resources and/or billing in the case of SaaS pricing. It will also affect users of existing pipelines that depend on this behavior. It is best to enable/disable this option only once at the beginning.
+
+## Default Behavior for Build Step
+
+Here you can decide if the build step will push images or not according to your organization’s needs. The options are:
+
+1. Users need to decide if an image will be pushed or not after it is built
+2. All built images are automatically pushed to the default registry
+3. All built images are NOT pushed anywhere by default
+
+Note that this behavior is simply a convenience feature for legacy pipelines. Users can still use a [push step]({{site.baseurl}}/docs/pipelines/steps/push/) in a pipeline and always push an image to a registry regardless of what was chosen in the build step.
+
+## Related articles
+[Creating Pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
+[Codefresh YAML]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/)
+[Git Integration]({{site.baseurl}}/docs/integrations/git-providers/)
diff --git a/_docs/pipelines/pipelines.md b/_docs/pipelines/pipelines.md
new file mode 100644
index 00000000..b7b41c0c
--- /dev/null
+++ b/_docs/pipelines/pipelines.md
@@ -0,0 +1,330 @@
+---
+title: "Creating pipelines"
+description: "How to define pipelines in Codefresh"
+group: pipelines
+redirect_from:
+ - /docs/pipeline
+ - /docs/pipeline/
+ - /docs/pipelines
+ - /docs/pipelines/
+ - /docs/pipelines/introduction/
+ - /docs/pipelines/introduction
+ - /docs/inline-yaml-editing
+ - /docs/inline-yaml-editing/
+toc: true
+---
+
+Before creating a pipeline, make sure you are familiar with the theory behind [Codefresh pipelines]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/).
+
+## Pipeline concepts
+
+The aim of Codefresh pipelines is to have re-usable sequences of steps that can be used for different applications (or micro-services) via the use of Git triggers.
+
+The main concepts are shown below:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/create/concepts.png"
+url="/images/pipeline/create/concepts.png"
+alt="Pipeline concepts"
+caption="Pipeline concepts"
+max-width="60%"
+%}
+
+* **Projects**: The top-level concept in Codefresh CI/CD. Projects are used to group related CI pipelines. In most cases, a single project will be a single application that itself contains many microservices. You are free to use projects as you see fit. For example, you could create a project for a specific Kubernetes cluster or for a specific team/department.
+
+* **Pipelines**: Each project can have multiple pipelines. Pipelines that belong to a single project can be managed as a unit. You can also create a new pipeline by copying an existing pipeline. Notice that unlike other CI solutions, a pipeline in Codefresh is **NOT** tied to a specific Git repository. You should try to make your pipelines generic enough so that they can be reused for similar applications even when they exist in different Git repositories (a fairly typical setup for microservices).
+
+* **Pipeline steps**: Each pipeline has a definition that defines the [pipeline steps]({{site.baseurl}}/docs/pipelines/steps/) that are executed each time the pipeline is triggered. The definition of a pipeline is described in a special [codefresh.yml]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/) file. The `codefresh.yml` file can be fetched from the same repository as that of the source code, from a completely different repository, or even defined in-place in the Codefresh pipeline editor. Again, notice you can have a pipeline that checks out its source code from Git repository A, but actually defines its steps in a `codefresh.yml` file that is fetched from Git repository B.
+
+* **Triggers**: A pipeline can have zero, one, or many [triggers]({{site.baseurl}}/docs/pipelines/triggers/). Triggers are the linking medium between a pipeline and a Git repository. Codefresh supports several kinds of triggers such as Git, Cron, and Docker push triggers.
+Triggers that happen with Git webhooks can come from the same Git repository that contains the git code, **OR**, a completely different repository. You can have a pipeline with multiple triggers to be executed when a code change happens to any of them.
+
+With these basic building blocks, you can define many complex workflows. In particular, it is very easy in Codefresh to create a scenario where:
+
+1. A pipeline is launched because a trigger exists for Git repository A
+1. The pipeline reads its `codefresh.yml` file from Git repository B
+1. The pipeline clones source code from Git repository C (and starts packaging/compiling it)
+
+Of course, you can also have a simpler scenario where the trigger, the pipeline steps and the source code of the application are all defined for the same Git repository.
+
+
+## Creating a pipeline
+
+You can create new projects by clicking on *Projects* in the left sidebar and then selecting the *New Project* button on the top right corner. A dialog will appear that will ask you for the project name and optional tags that you can use for [access control]({{site.baseurl}}/docs/enterprise/access-control/).
+
+Once you are inside the project view you can start editing pipelines with a UI environment that works similar to a traditional IDE.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/create/pipeline-manager.png"
+url="/images/pipeline/create/pipeline-manager.png"
+alt="Pipeline manager"
+caption="Pipeline manager"
+max-width="70%"
+%}
+
+1. On the top left you can see your current project. You can also change it by clicking on the drop-down on the top left corner.
+
+1. On the left side of the screen you can see all pipelines that currently belong to this project. Click on each one to edit it.
+On the bottom part of this panel the *New pipeline* button allows you to create a new pipeline on the same project either from scratch
+or by copying an existing one from the same project or a completely different project.
+
+1. The name of the currently edited pipeline is shown at the top of the window.
+
+1. The main window shows the definition of the current pipeline. The screenshot shows the inline editor but pipelines can also be defined from external files (checked into source control) as explained later.
+
+1. The right part of the window shows extra settings for this pipeline such as [predefined steps]({{site.baseurl}}/docs/codefresh-yaml/steps/), [triggers]({{site.baseurl}}/docs/pipelines/triggers/) and launch variables/parameters.
+
+
+
+
+### Using the Inline pipeline editor
+
+When first creating a pipeline you will see an inline editor that allows you to define the [pipeline yml]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/) right there in the Codefresh UI. This is great when you are starting a new project because it offers you really quick feedback. You can edit the yml steps, run a build, edit again, run a build and so on.
+
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/create/inline-editor.png"
+url="/images/pipeline/create/inline-editor.png"
+alt="Inline Pipeline editor"
+caption="Inline Pipeline editor"
+max-width="60%"
+%}
+
+On the top right of the panel you have additional controls:
+
+* The *import* button allows you to bring a `codefresh.yml` from your local workstation into the editor
+* The *comment* button allows you to quickly comment/uncomment the currently selected text. The hotkey `Ctrl-/` also performs the same action
+* The *formatting* button enriches the editor with special symbols for line breaks, spaces and tabs. This allows you to easily fix common formatting errors
+* The *copy* button quickly copies the **whole** pipeline text in your clipboard
+* You can use `Ctrl-]` and `Ctrl-[` to change indentation of the current line (use the Command key instead on MacOsX)
+
+
+Notice that in the editor you can expand/collapse individual yaml blocks using the arrow triangles on the left of each blocks. The initial pipeline presented in the editor is suggested by Codefresh according to the contents of your Git repository.
+
+> You can also see the suggested Codefresh pipeline for any public git repository by using the [analyze option](https://codefresh-io.github.io/cli/analyzer/){:target="\_blank"} of the Codefresh CLI.
+
+
+## Loading codefresh.yml from Version Control
+
+Working with the inline editor is very convenient in the beginning, but it makes your pipeline definition only exist within the Codefresh UI and therefore goes against the basic principles of [infrastructure as code](https://en.wikipedia.org/wiki/Infrastructure_as_Code){:target="\_blank"}. Once you are happy with how your pipeline works you should commit it to a Git repository (which can be the same one that has the source code of the application or a completely different one).
+
+You can click on the *Inline YAML* header and switch it to *Use YAML from URL* or *Use YAML from Repository*.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/create/pipeline-from-internal-repo.png"
+url="/images/pipeline/create/pipeline-from-internal-repo.png"
+alt="Pipeline from internal repo"
+caption="Pipeline from internal repo"
+max-width="60%"
+%}
+
+You can then copy and paste a URL to a raw Codefresh YAML file. This will allow you to load a Codefresh YAML from any public URL. Notice that a raw URL is needed in the case of GitHub.
+
+As an example, instead of using `https://github.com/codefresh-contrib/example-voting-app/blob/master/codefresh.yml` you should enter `https://raw.githubusercontent.com/codefresh-contrib/example-voting-app/master/codefresh.yml`
+
+## Pipeline settings
+
+Once you create your pipeline you can also click on the top tab called *Settings* for some extra parameters.
+
+### General
+
+- **Pipeline Name**: The name of your pipeline (useful for working with the [Codefresh CLI](https://codefresh-io.github.io/cli/){:target="\_blank"})
+- **Pipeline ID**: The ID of your pipeline (useful for working with the [Codefresh CLI](https://codefresh-io.github.io/cli/){:target="\_blank"})
+ > When working with the Codefresh CLI, the Pipeline Name and ID are interchangeable.
+- **Pipeline Description**: Freetext pdescription of the pipeline.
+- **Pipeline Tags**: One or more tags used for [access control]({{site.baseurl}}/docs/administration/access-control/)
+- **Public Build Logs**: If enabled, [users without a Codefresh account]({{site.baseurl}}/docs/pipelines/build-status/#public-build-logs) can view the builds of this pipeline.
+- **Template**: Convert this pipeline to a template (see the next section for details on templates).
+- **Badges**: Simple images that show you the last [build status]({{site.baseurl}}/docs/pipelines/build-status/).
+
+### Policies
+
+- **Pipeline Concurrency**: The maximum number of concurrent builds (0-14 or unlimited). Set the concurrency when your pipeline has only one trigger.
+ > A Pipeline Concurrency of **0** freezes execution of the pipeline, switching it to maintenance mode. Use this concurrency setting to modify existing pipelines and freeze execution until you complete the changes.
+- **Trigger Concurrency**: The maximum number of concurrent builds per trigger (1-15 or unlimited). Define the trigger concurrency when your pipeline has multiple triggers.
+- **Branch Concurrency**: The maximum number of concurrent builds per branch (1-15 or unlimited). Define this when your pipeline can build different branches.
+- **Build Termination**: Options that determine when a build from the pipeline should terminate:
+ - Once a build is created terminate previous builds from the same branch
+ - Once a build is created terminate previous builds only from a specific branch (name matches a regular expression)
+ - Once a build is created, terminate all other running builds
+ - Once a build is terminated, terminate all child builds initiated from it
+- **Pending approval volume**: Choose what happens with the [pipeline volume]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps) when a pipeline is waiting for [approval]({{site.baseurl}}/docs/pipelines/steps/approval/#keeping-the-shared-volume-after-an-approval)
+ - Keep the volume available
+ - Discard the volume
+ - Honor the option defined globally in your Codefresh account
+- **Pending approval concurrency limit effect**: Determines if a build that is pending approval [counts against]({{site.baseurl}}/docs/pipelines/steps/approval/#define-concurrency-limits) the concurrency limits or not
+ - Builds in pending approval will **not** be counted when determining the concurrency limit for a pipeline
+ - Builds in pending approval will **be** counted when determining the concurrency limit for a pipeline
+ - Honor the option defined globally in your Codefresh account
+
+The **Pipeline and Trigger Concurrency** limits are very important as they allow you to define how many instances of a pipeline can run in parallel when multiple commits or multiple pull requests take place.
+
+> Notice that these limits are *unrelated* to [parallelism within a single pipeline]({{site.baseurl}}/docs/pipelines/advanced-workflows/).
+
+Some common scenarios are:
+
+* a pipeline that uses a shared resource such as a database or queue and you want to limit how many pipelines can access it
+* a pipeline that deploys to a single production environment (in most cases you only want one active pipeline touching production
+
+The **Build Termination** settings are useful for pipelines where you commit too fast (i.e. faster then the actual runtime of the pipeline).
+All these settings allow you to lesser the build instance for pipelines when too many triggers are launched at the same time.
+You will find them very useful in cases where too many developers are performing small commits and builds take a long time to finish (i.e. build takes 10 minutes to finish and developers perform multiple pushes every 2 minutes)
+
+Some common scenarios are:
+
+* You are interested only on the latest commit of a branch. If pipelines from earlier commits are still running you want to terminate them.
+* You don't want to wait for children pipelines to finish (i.e. when a pipeline calls another pipeline) or when a new build starts for a parent pipeline.
+
+For the volume behavior during approvals, notice that if [you keep the volume available]({{site.baseurl}}/docs/pipelines/steps/approval/#keeping-the-shared-volume-after-an-approval) on the pipeline while it is waiting for approval it will still count as "running" against your pricing tier limit.
+
+### External resources
+
+In a big organization you might have some reusable scripts or other resources (such as Dockerfiles) that you want to use in multiple pipelines. Instead of fetching them manually in freestyle steps you can simply define them as *external resources*. When a pipeline runs, Codefresh will fetch them automatically and once the pipeline starts the files/folders will already be available in the paths that you define.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/create/external-resources.png"
+url="/images/pipeline/create/external-resources.png"
+alt="Bringing external resources into a pipeline"
+caption="Bringing external resources into a pipeline"
+max-width="80%"
+%}
+
+Currently Codefresh supports the automatic fetching of files or folders from another Git repository. To create an external resource click the *Add Resource* button and choose:
+
+* The Git repository that contains the files/folder you wish to bring in the pipeline workspace
+* The branch from the Git repository that contains the files/folders you wish to bring in the pipeline workspace
+* The source folder in the GIT repo (use relative path)
+* The target folder in the pipeline workspace where the file folder will be copied to (use absolute path)
+
+Once the pipeline starts, all files will be available to all freestyle steps in the paths mentioned in the target folder field.
+You can define multiple external resources in a single pipeline.
+
+### Runtime
+
+- **Runtime Environment**: (by default this is set to SaaS)
+- **Runtime OS**: (by default this is set to Linux)
+- **Resources Size**:
+ - Small (recommended for 1-2 concurrent steps)
+ - Medium (recommended 3-4 steps)
+ - Large (recommended 5-6 steps)
+
+#### Set disk space for pipeline builds
+Set the disk space you need for the pipeline's build volume. Configuring the disk space per pipeline build volume prevents out-of-space scenarios that lead to failed builds. The disk space set for the pipeline is inherited by all the builds run for the pipeline.
+
+Codefresh calculates the available range according to the disk size, and automatically sets the disk space for the build volume to 70% of the total disk space. You can either retain the default allocation or change as needed.
+
+>You can also configure the disk space for a [specific trigger]({{site.baseurl}}/docs/pipelines/triggers/git-triggers/#set-minimum-disk-space-for-build-volume-by-trigger) used by the pipeline or for a specific run, and override what's set for the pipeline.
+
+1. Select the pipeline for which to set the disk space.
+1. Select **Settings**, and then **Runtime**.
+1. Enable **Set minimum required disk space** and either retain the default displayed or change as needed.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/create/set-build-disk-space.png"
+url="/images/pipeline/create/set-build-disk-space.png"
+alt="Set disk space for pipeline builds"
+caption="Set disk space for pipeline builds"
+max-width="60%"
+%}
+
+
+## Using Pipeline Templates
+
+Codefresh also supports the creation of pipeline "templates", which are blueprints for creating new pipelines.
+To enable the creation of pipelines from templates first visit the global pipeline configuration at [https://g.codefresh.io/account-admin/account-conf/pipeline-settings](https://g.codefresh.io/account-admin/account-conf/pipeline-settings){:target="\_blank"} and toggle the *Enable Pipeline Templates* button.
+
+The easiest way to create a new template is by clicking the "3 dots menu" on the pipeline name:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/create/create-template-menu.png"
+url="/images/pipeline/create/create-template-menu.png"
+alt="Create template from pipeline"
+caption="Create template from pipeline"
+max-width="30%"
+%}
+
+From the dialog you can select if you want to copy this pipeline as a brand new template, or simply convert the pipeline itself to a template:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/create/template-dialog.png"
+url="/images/pipeline/create/template-dialog.png"
+alt="The template dialog"
+caption="The template dialog"
+max-width="80%"
+%}
+
+Once the template is created, you can edit it like any other pipeline. Pipeline templates are marked with the `template` tag and also have a special mark in the pipeline menu:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/create/template-tag.png"
+url="/images/pipeline/create/template-tag.png"
+alt="template identification"
+caption="template identification"
+max-width="90%"
+%}
+
+Now when you create a new pipeline, you can also select which pipeline template will be used as an initial pipeline definition:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/create/use-template.png"
+url="/images/pipeline/create/use-template.png"
+alt="Using a template"
+caption="Using a template"
+max-width="70%"
+%}
+
+>Notice that templates only take effect during pipeline creation. Changing a template afterwards, has no effect on pipelines that are already created from it.
+
+You can also quickly convert a pipeline to a template, by visiting the pipeline settings and clicking the *template* button under the *General* tab.
+
+
+## Pipelines that do not belong to any project
+
+Although we recommend adding all your pipelines to a project, this is not a hard requirement. You can create pipelines that do not belong to a project from the *Pipelines* section on the left sidebar.
+If you have a Codefresh account created before May 2019 you might already have several pipelines that are like this.
+
+If you change your mind, you can also add detached pipelines (i.e. pipelines that are not part of a project) manually from the 3-dot menu that is found on the right of each pipeline.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/create/add-pipeline-to-project.png"
+url="/images/pipeline/create/add-pipeline-to-project.png"
+alt="Changing the project of a pipeline"
+caption="Changing the project of a pipeline"
+max-width="90%"
+%}
+
+Pipelines that belong to a project will mention it below their name so it is very easy to understand which pipelines belong to a project and which do not.
+
+
+## Related articles
+[Codefresh YAML]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/)
+[Pipeline steps]({{site.baseurl}}/docs/pipelines/steps/)
+[External Docker Registries]({{site.baseurl}}/docs/docker-registries/external-docker-registries/)
+[YAML Examples]({{site.baseurl}}/docs/yaml-examples/examples/)
+
+
+
+
+
diff --git a/_docs/pipelines/post-step-operations.md b/_docs/pipelines/post-step-operations.md
new file mode 100644
index 00000000..1d367309
--- /dev/null
+++ b/_docs/pipelines/post-step-operations.md
@@ -0,0 +1,117 @@
+---
+title: "Post-Step Operations"
+description: "Annotate your builds and run extra steps"
+group: codefresh-yaml
+redirect_from:
+ - /docs/post-step-operations/
+toc: true
+---
+Post-step operations are a set of optional predefined processes that can be configured on any step. These operations will be executed once the step has completed. The post-step operations allow you to annotate your builds, images and pipelines with extra metadata or run other steps.
+
+
+## Result Aware Post-Step Operations
+You may execute post-step operations conditionally, based on the outcome of the step itself.
+
+To execute operations only when the step has completed successfully, use `on_success`:
+
+
+{% highlight yaml %}
+step_name:
+ ...
+ on_success:
+ ...
+{% endhighlight %}
+
+To execute operations only when the step has failed, use `on_fail`:
+
+
+{% highlight yaml %}
+step_name:
+ ...
+ on_fail:
+ ...
+{% endhighlight %}
+
+## Result Agnostic Post-Step Operations
+You may execute post-step operations regardless of the outcome of the step itself.
+
+To execute operations regardless of the result, use `on_finish`:
+
+
+{% highlight yaml %}
+step_name:
+ ...
+ on_finish:
+ ...
+{% endhighlight %}
+
+## Available Post-Step Operations
+
+- [Image Metadata]({{site.baseurl}}/docs/docker-registries/metadata-annotations/)
+- [Custom Annotations]({{site.baseurl}}/docs/codefresh-yaml/annotations/)
+- [Hooks]({{site.baseurl}}/docs/codefresh-yaml/hooks/)
+
+## Example
+
+Marking a Docker image with the results of unit tests:
+
+{% highlight yaml %}
+{% raw %}
+build_step:
+ title: Building My Docker image
+ type: build
+ image_name: my-app-image
+ tag: 1.0.1
+ dockerfile: Dockerfile
+run_tests:
+ title: Running unit tests
+ image: ${{build_step}}
+ commands:
+ - npm install
+ - npm run test
+ on_success: # Execute only once the step succeeded
+ metadata:
+ set:
+ - ${{build_step.imageId}}:
+ - unit_tests: passed
+{% endraw %}
+{% endhighlight %}
+
+## Running other steps
+
+If you want to run another step in the pipeline when another step fails or succeeds you need to use [conditional execution of steps]({{site.baseurl}}/docs/codefresh-yaml/conditional-execution-of-steps/) and the `fail_fast` property. You can also use [step hooks]({{site.baseurl}}/docs/codefresh-yaml/hooks/) for dedicated post step actions.
+
+{% highlight yaml %}
+{% raw %}
+run_tests:
+ title: Running unit tests
+ image: node:11
+ fail_fast: false
+ commands:
+ - npm install
+ - npm run test
+print_error_message:
+ image: alpine:latest
+ title: Marking pipeline status
+ commands:
+ - echo "Unit tests failed"
+ when:
+ condition:
+ all:
+ myCondition: run_tests.result == 'failure'
+{% endraw %}
+{% endhighlight %}
+
+In this example the step `print_error_message` will only run if step `run_tests` has failed.
+
+See also [advanced workflows]({{site.baseurl}}/docs/codefresh-yaml/advanced-workflows/#single-step-dependencies) and [Pipeline/Step hooks]({{site.baseurl}}/docs/codefresh-yaml/hooks/).
+
+## What to read next
+
+* [Conditional Execution of Steps]({{site.baseurl}}/docs/codefresh-yaml/conditional-execution-of-steps/)
+* [Condition Expression Syntax]({{site.baseurl}}/docs/codefresh-yaml/condition-expression-syntax/)
+* [Working Directories]({{site.baseurl}}/docs/codefresh-yaml/working-directories/)
+* [Annotations]({{site.baseurl}}/docs/codefresh-yaml/annotations/)
+* [Pipeline/Step hooks]({{site.baseurl}}/docs/codefresh-yaml/hooks/)
+
+
diff --git a/_docs/pipelines/running-pipelines-locally.md b/_docs/pipelines/running-pipelines-locally.md
new file mode 100644
index 00000000..3c513987
--- /dev/null
+++ b/_docs/pipelines/running-pipelines-locally.md
@@ -0,0 +1,124 @@
+---
+title: "Running pipelines locally"
+description: "How to run Codefresh pipelines on your workstation"
+group: configure-ci-cd-pipeline
+toc: true
+redirect_from:
+ - /docs/troubleshooting/common-issues/debugging-codefresh-builds-locally/
+ - /docs/troubleshooting/common-issues/access-and-debug-the-pipeline-volume-image/
+---
+
+Codefresh can run your pipelines locally. This is very handy when you need to debug a pipeline, or when you want to do quick changes to the [codefresh.yml file]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/) with the fastest turn-around time possible.
+
+## Prerequisites
+
+You need to have Docker installed on your local workstation. You can follow the [official instructions](https://docs.docker.com/install/) to install it. Notice that if you use Linux, the Docker version offered by your native
+package manager is not always the latest version.
+
+Once docker is installed, check that it runs correctly with:
+
+```
+docker run hello-world
+```
+
+You should get a short welcome message.
+
+>At the time of writing local builds can only run on Linux and Mac workstations. We are working to remove this limitation and allow developers with Windows machines to also run Codefresh pipelines locally.
+
+Then install the [open-source Codefresh CLI](https://codefresh-io.github.io/cli/installation/) and [setup authentication](https://codefresh-io.github.io/cli/getting-started/) with your Codefresh account.
+
+Once this is done check that your account is locally accessible by running
+
+```
+codefresh get pipelines
+```
+
+You should see a long list with your pipelines on the terminal output.
+
+## Running a pipeline locally
+
+The Codefresh Command Line Interface (CLI) comes with a [run parameter](https://codefresh-io.github.io/cli/pipelines/run-pipeline/) that allows you to trigger pipelines externally (outside the Codefresh UI).
+
+Normally, if you run a pipeline this way the CLI will just trigger it remotely (the pipeline itself will still run in the Codefresh infrastructure).
+
+You can pass however the `--local` option, and this will instruct the CLI to automatically:
+
+1. Download the Codefresh build engine locally to your workstation (which itself is a docker image at [codefresh/engine](https://hub.docker.com/r/codefresh/engine))
+1. Run the build locally using the Codefresh engine on your workstation
+1. Print all build logs to your terminal
+
+Note that the engine has transparent network access to all the other settings in your Codefresh account and therefore will work exactly the same way as if it was run on Codefresh infrastructure (e.g. use the connected Docker registries you have setup in the UI)
+
+Here is a full example:
+
+```
+codefresh run francisco-codefresh/jan_19/my-basic-pipeline --local -b master -t my-trigger
+```
+
+
+
+### Keeping the pipeline volume in the local workstation
+
+If you are familiar with
+[how Codefresh pipelines work]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines) you should know about the unique docker volume that is automatically shared between all pipeline steps.
+
+This volume (which also includes the project folder) makes data sharing between all steps very easy (e.g. with thing such as test reports or binary dependencies).
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/introduction/codefresh-volume.png"
+url="/images/pipeline/introduction/codefresh-volume.png"
+alt="Codefresh volume"
+caption="All steps share the same volume"
+max-width="80%"
+%}
+
+By default, if you run a Codefresh pipeline locally, this shared volume will automatically be discarded at the end of the build. You can still keep the volume after the build by adding the `--local-volume` parameter in your [run command](https://codefresh-io.github.io/cli/pipelines/run-pipeline/). Here is an example:
+
+```
+codefresh run francisco-codefresh/jan_19/my-basic-pipeline --local --local-volume -b master -t my-trigger
+```
+
+
+Once the build runs you will see in your terminal the path that holds the contents of the volume:
+
+```
+[...build logs...]
+Using /Users/fcocozza/.Codefresh/francisco-codefresh/jan_19/my-basic-pipeline as a local volume.
+[...more build logs]
+```
+
+After the build has finished you can freely explore this folder in your filesystem with any file manager.
+
+```
+$ ls -alh /Users/fcocozza/.Codefresh/francisco-codefresh/jan_19/my-basic-pipeline/
+total 16
+drwxr-xr-x 5 fcocozza staff 160B Jan 14 12:52 .
+drwxr-xr-x 3 fcocozza staff 96B Jan 14 12:52 ..
+-rwxr-xr-x 1 fcocozza staff 388B Jan 14 12:52 cf_export
+-rw-rw-r-- 1 fcocozza staff 189B Jan 14 12:52 env_vars_to_export
+drwxr-xr-x 5 fcocozza staff 160B Jan 14 12:52 jan_19
+```
+This way you can verify if the pipeline has access to the data you think it should have
+
+
+### Using a custom codefresh.yml file
+
+The ultimate way to run a pipeline locally is to override completely the `codefresh.yml` file it uses. A pipeline by default will read its steps from the respective file in git.
+
+You can force it to ignore that git version of the pipeline spec and instead load a custom `codefresh.yml` from your local file-system (which might not be even committed yet).
+
+The extra parameter is `--yaml` in that case.
+Here is a complete example
+
+```
+codefresh run francisco-codefresh/jan_19/my-basic-pipeline --local --local-volume --yaml=my-codefresh.yml -b master -t my-trigger
+```
+
+When this pipeline runs locally, it will use whatever steps exist in `my-codefresh.yml` instead of the git version. The shared data volume will also be left intact after the build is finished as explained in the previous section.
+
+## Related articles
+[Codefresh YAML]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/)
+[Pipeline steps]({{site.baseurl}}/docs/pipelines/steps/)
+[Introduction to Codefresh pipelines]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines)
diff --git a/_docs/pipelines/secrets-store.md b/_docs/pipelines/secrets-store.md
new file mode 100644
index 00000000..f9fc1101
--- /dev/null
+++ b/_docs/pipelines/secrets-store.md
@@ -0,0 +1,96 @@
+---
+title: "Secrets in pipelines"
+description: "Use Kubernetes secrets in Codefresh"
+group: configure-ci-cd-pipeline
+toc: true
+---
+
+Once you have [connected Codefresh to your secrets storage]({{site.baseurl}}/docs/integrations/secret-storage/), you can use them in any pipeline or UI screen.
+
+> Note: This feature is for Enterprise accounts only.
+
+## Using secrets in pipelines
+
+The syntax for using the secret is {% raw %}`${{secrets.NAME_IN_CODEFRESH.KEY}}`{% endraw %}.
+
+> If you did not include the resource-name as a part of your secret store context creation, the syntax for using your secret differs slightly:
+ {% raw %}${{secrets.NAME_IN_CODEFRESH.RESOURCE-NAME@KEY}}{% endraw %}
+ The previous KEY portion is now made of two parts separated using @, where the left side is the name of the resource in the namespace, and the right side the key in that resource.
+
+To use the secret in your pipeline, you have two options:
+
+* Define it as a pipeline variable:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/secrets/secrets-pipeline-var.png"
+url="/images/pipeline/secrets/secrets-pipeline-var.png"
+alt="Secrets Pipeline Variable"
+caption="Secrets stored in Pipeline Variable"
+max-width="80%"
+%}
+
+`codefresh.yaml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ step:
+ type: freestyle
+ arguments:
+ image: alpine
+ commands:
+ - echo $SECRET
+{% endraw %}
+{% endhighlight %}
+
+* Use the secret directly in your YAML
+
+`codefresh.yaml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ step:
+ type: freestyle
+ arguments:
+ image: alpine
+ environment:
+ - SECRET=${{secrets.test.key1}}
+ commands:
+ - echo $SECRET
+{% endraw %}
+{% endhighlight %}
+
+
+## Using secrets in the Codefresh UI
+
+You can also use secrets in the GUI screens that support them. Currently you can use secrets in:
+
+* Values in [shared configuration]({{site.baseurl}}/docs/pipelines/shared-configuration/)
+* Integration with [cloud storage]({{site.baseurl}}/docs/testing/test-reports/#connecting-your-storage-account)
+
+Where secret integration is supported, click on the lock icon and enable the toggle button. You will get a list of your connected secrets:
+
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/shared-configuration/shared-conf-secret-integration.png"
+url="/images/pipeline/shared-configuration/shared-conf-secret-integration.png"
+alt="Using external secrets in shared configuration values"
+caption="Using external secrets in shared configuration values"
+max-width="50%"
+%}
+
+If you have already specified the resource field during secret definition the just enter on the text field the name of the secret directly, i.e. `my-secret-key`.
+If you didn't include a resource name during secret creation then enter the full name in the field like `my-secret-resource@my-secret-key`.
+
+
+## Related articles
+[Shared Configuration]({{site.baseurl}}/docs/pipelines/shared-configuration/)
+[Git triggers]({{site.baseurl}}/docs/pipelines/triggers/git-triggers/)
+[Running pipelines locally]({{site.baseurl}}/docs/pipelines/running-pipelines-locally/)
+[Debugging Pipelines]({{site.baseurl}}/docs//yaml-examples/examples/trigger-a-k8s-deployment-from-docker-registry/)
+
diff --git a/_docs/pipelines/service-containers.md b/_docs/pipelines/service-containers.md
new file mode 100644
index 00000000..13adfc2e
--- /dev/null
+++ b/_docs/pipelines/service-containers.md
@@ -0,0 +1,571 @@
+---
+title: "Service Containers"
+description: "How to use sidecar services in your pipelines"
+group: codefresh-yaml
+toc: true
+---
+
+Sometimes you wish to run sidecar containers in a pipeline that offer additional services for your builds. The most common scenario is launching services such as databases in order to accommodate [integration tests]({{site.baseurl}}/docs/testing/integration-tests/). Or you might wish to launch the application itself in order to run integration tests **against** it as part of the pipeline.
+
+>Note that while [composition steps]({{site.baseurl}}/docs/codefresh-yaml/steps/composition/) are still supported, the recommended way to run integrations tests going forward is with service containers. The underlying implementation is shared so check the composition documentation page for more available options
+and properties.
+
+Codefresh includes a handy mechanism (based on Docker compose) that can help you run sidecar containers along your main pipeline. Here is a very simple example.
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+services:
+ name: my_database
+ composition:
+ my-redis-db-host:
+ image: redis:latest
+ ports:
+ - 6379
+steps:
+ my_integration_tests:
+ image: my-app-image
+ title: Running integration tests
+ commands:
+ - npm run test
+ services:
+ - my_database
+{% endraw %}
+{% endhighlight %}
+
+This pipeline will run integration tests during the freestyle step called `my_integration_tests` and at that point a Redis instance will be available at hostname `my-redis-db-host` and port 6379. Note how in this example, the service container is placed at the root of the pipeline (as opposed to inside a specific step). This ensures that the Redis instance is running for [the duration of the pipeline]({{site.baseurl}}/docs/codefresh-yaml/service-containers/#running-services-for-the-duration-of-the-pipeline).
+
+>Service Containers are based on Docker Compose. This document does not have the complete list of available options available. Please refer to Docker Compose versions [2](https://docs.docker.com/compose/compose-file/compose-file-v2/) and [3](https://docs.docker.com/compose/compose-file/), but not point releases such as 2.1.
+
+
+## Viewing Service containers
+
+The service containers have their own output tab in Codefresh UI
+
+{% include image.html
+ lightbox="true"
+ file="/images/codefresh-yaml/services/services-tab.png"
+ url="/images/codefresh-yaml/services/services-tab.png"
+ alt="Output tab from extra services"
+ caption="Output tab from extra services"
+ max-width="100%"
+ %}
+
+This way it is very easy to differentiate between the output logs of the step itself and its supporting container services.
+
+
+## Launching multiple sidecar containers
+
+Like Docker compose it is possible to launch multiple services this way. For example, let's say that a Java application needs both Redis and MongoDB during integration tests. Here is the respective pipeline:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+services:
+ name: my_extra_services
+ composition:
+ my-redis-db-host:
+ image: redis:latest
+ ports:
+ - 6379
+ my-mongo-db-host:
+ image: mongo:latest
+ ports:
+ - 27017
+steps:
+ main_clone:
+ type: "git-clone"
+ description: "Cloning main repository..."
+ repo: "kostis-codefresh/my-java-app"
+ git: github
+ revision: "master"
+ my_tests:
+ image: maven:3.5.2-jdk-8-alpine
+ title: "Running Integration tests"
+ commands:
+ - 'mvn integration-test'
+{% endraw %}
+{% endhighlight %}
+
+The Redis instance will be available through the networks at `my-redis-db-host:6379` while the MongoDB instance will run at `my-mongo-db-host:27017`.
+
+Instead of mentioning all your services directly in the YAML file you might also reuse an existing composition you have already defined in Codefresh by mentioning it by name.
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+services:
+ name: my_extra_services
+ composition: redis_and_mongo
+steps:
+ main_clone:
+ type: "git-clone"
+ description: "Cloning main repository..."
+ repo: "kostis-codefresh/my-java-app"
+ revision: "master"
+ git: github
+ my_tests:
+ image: maven:3.5.2-jdk-8-alpine
+ title: "Unit tests"
+ commands:
+ - 'mvn integration-test'
+{% endraw %}
+{% endhighlight %}
+
+This pipeline mentions an existing composition called `redis_and_mongo`:
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/existing-composition.png"
+url="/images/codefresh-yaml/existing-composition.png"
+alt="Using an existing composition"
+caption="Using an existing composition"
+max-width="70%"
+%}
+
+This makes very easy to reuse compositions that you have already defined for other reasons [in the Codefresh UI](https://codefresh.io/docs/docs/testing/create-composition/).
+
+
+## Running services for the duration of the pipeline
+
+Notice that unlike compositions, the services defined in the root of the pipeline yaml are present for the **whole** pipeline duration. They are available in all pipeline steps. This can be seen in the following example:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+services:
+ name: my_database
+ composition:
+ my-redis-db-host:
+ image: redis:latest
+ ports:
+ - 6379
+steps:
+ my_first_step:
+ image: alpine:latest
+ title: Storing Redis data
+ commands:
+ - apk --update add redis
+ - redis-cli -u redis://my-redis-db-host:6379 -n 0 LPUSH mylist "hello world"
+ - echo finished
+ services:
+ - my_database
+ my_second_step:
+ image: alpine:latest
+ commands:
+ - echo "Another step in the middle of the pipeline"
+ my_third_step:
+ image: alpine:latest
+ title: Reading Redis data
+ commands:
+ - apk --update add redis
+ - redis-cli -u redis://my-redis-db-host:6379 -n 0 LPOP mylist
+ services:
+ - my_database
+{% endraw %}
+{% endhighlight %}
+
+This pipeline:
+
+1. Starts a single Redis instance
+1. Saves some data in the first step on the pipeline
+1. Runs an unrelated step (that itself is not using the redis instance)
+1. Reads the data saved in the third steps
+
+If you run this pipeline you will see that that data read in the third step of the pipeline was the same one as the data saved in the first step.
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/redis-example.png"
+url="/images/codefresh-yaml/redis-example.png"
+alt="Redis read/write example"
+caption="Redis read/write example"
+max-width="90%"
+%}
+
+This means that you can easily use the extra services in different steps of a single pipeline, without relaunching them each time (which is what happens with composition steps).
+
+## Using sidecar services in specific steps
+
+It is important to understand that any services you launch in a pipeline, are sharing its memory. If for example your pipeline has 4GBs of memory and your service (e.g. a mongdb instance) consumes 1GB, then you only have 3GB available for the actual pipeline.
+
+It is therefore possible to a assign a service to a specific step if you don't wish to have it running for the duration of the whole pipeline:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ main_clone:
+ type: "git-clone"
+ description: "Cloning main repository..."
+ repo: "kostis-codefresh/my-java-example"
+ revision: "master"
+ git: github
+ build_image:
+ title: "Building Docker Image"
+ type: "build"
+ image_name: "my-java-app"
+ dockerfile: "Dockerfile"
+ tag: latest
+ my_unit_tests:
+ image: '${{build_image}}'
+ title: "Unit tests"
+ commands:
+ - 'echo start testing my app'
+ services:
+ composition:
+ my_redis_service:
+ image: 'redis:latest'
+ ports:
+ - 6379
+ my_integration_tests:
+ image: '${{build_image}}'
+ title: "Integration tests"
+ commands:
+ - 'echo start testing my app'
+ services:
+ composition:
+ my_mongo_Service:
+ image: 'mongo:latest'
+ ports:
+ - 27017
+{% endraw %}
+{% endhighlight %}
+
+In this pipeline, the Redis instance is only launched during the Unit test step, while the MongoDB service is active only during integration tests.
+
+You can also use a `docker-compose.yml` file that you might have in your git repository.
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ main_clone:
+ type: "git-clone"
+ description: "Cloning main repository..."
+ repo: "kostis-codefresh/my-java-example"
+ revision: "master"
+ git: github
+ build_image:
+ title: "Building Docker Image"
+ type: "build"
+ image_name: "my-java-app"
+ dockerfile: "Dockerfile"
+ tag: latest
+ my_unit_tests:
+ image: '${{build_image}}'
+ title: "Unit tests"
+ commands:
+ - 'echo start testing my app'
+ services:
+ composition:
+ my_redis_service:
+ image: 'redis:latest'
+ ports:
+ - 6379
+ my_integration_tests:
+ image: '${{build_image}}'
+ title: "Integration tests"
+ commands:
+ - 'echo start testing my app'
+ services:
+ composition: 'docker-compose.yml'
+{% endraw %}
+{% endhighlight %}
+
+Note that in this case the `docker-compose.yml` file must mention [specific images](https://docs.docker.com/compose/compose-file/#image) (and not use [build properties](https://docs.docker.com/compose/compose-file/#build)).
+
+
+## Launching a custom service
+
+So far all the examples of extra services used predefined docker images (i.e. Redis and Mongo). You are free however to launch any custom docker image you have already created or even the main application of the pipeline.
+
+This happens by mentioning a build step as a service image. Here is an example:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ main_clone:
+ type: "git-clone"
+ description: "Cloning main repository..."
+ repo: "kostis-codefresh/my-back-end"
+ revision: "master"
+ git: github
+ build_image:
+ title: "Building Docker Image"
+ type: "build"
+ image_name: "my-backend-app"
+ tag: latest
+ dockerfile: "Dockerfile"
+ run_integration_tests:
+ title: Test backend
+ image: 'my-front-end:latest'
+ commands:
+ - 'curl my_backend_app:8080'
+ - 'echo Backend is up. Starting tests'
+ - npm run integration-test
+ services:
+ composition:
+ my_backend_app:
+ image: '${{build_image}}'
+ ports:
+ - 8080
+{% endraw %}
+{% endhighlight %}
+
+Here a Dockerfile for a backend application is built on the spot and then is launched as sidecar container in the next step (with a hostname of `my_backend_app`). Notice that the `image` property in the sidecar service actually refers to a [Codefresh variable]({{site.baseurl}}/docs/codefresh-yaml/variables/) that holds the name of the build step.
+
+We then run a `curl` command against the sidecar container to verify the correct health of the application. This is a great way to run integration tests against multiple micro-services.
+
+
+## Checking readiness of a service
+
+When you launch multiple services in your pipelines, you don't know exactly when they will start. Maybe they will be ready once you expect them, but maybe they take too long to start. For example if you use a MySQL database in your integration tests, your integration tests need to know that the database is actually up before trying to use it.
+
+This is the same issue that is present in [vanilla Docker compose](https://docs.docker.com/compose/startup-order/). You can use solutions such as [wait-for-it](https://github.com/vishnubob/wait-for-it) to overcome this limitation, but Codefresh offers a better way in the form of *service readiness*.
+
+With a readiness block you can guarantee that a sidecar service will be actually up before the pipeline will continue. Here is an example:
+
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ main_clone:
+ type: "git-clone"
+ description: "Cloning main repository..."
+ repo: "kostis-codefresh/my-back-end"
+ revision: "master"
+ git: github
+ build_image:
+ title: "Building Docker Image"
+ type: "build"
+ image_name: "my-backend-app"
+ tag: latest
+ dockerfile: "Dockerfile"
+ run_integration_tests:
+ title: Test backend
+ image: 'my-front-end:latest'
+ commands:
+ # Backend is certainly up at this point.
+ - npm run integration-test
+ services:
+ composition:
+ my_backend_app:
+ image: '${{build_image}}'
+ ports:
+ - 8080
+ readiness:
+ image: 'byrnedo/alpine-curl'
+ timeoutSeconds: 30
+ commands:
+ - "curl my_backend_app:8080"
+{% endraw %}
+{% endhighlight %}
+
+
+This is an improvement over the previous example because the healthcheck of the back-end is managed by Codefresh. The added `readiness` block makes sure that the back-end service is actually up before the integration tests start by using a `curl` command to check that `my_backend_app:8080` is up and running. Codefresh will run the commands defined in the `readiness` in a loop until they succeed. You are free to use any of your favorite commands there (ping, curl, nc etc) that check one or more services. We also define a timeout for the healthcheck. The `readiness` block supports the following options:
+
+* `periodSeconds`: How often (in seconds) to perform the probe. Default to 10 seconds. Minimum value is 1.
+* `timeoutSeconds`: Number of seconds after which the probe times out. Defaults to 10 seconds. Minimum value is 1.
+* `successThreshold`: Minimum consecutive successes for the probe to be considered successful after having failed. Defaults to 1. Must be 1 for readiness. Minimum value is 1.
+* `failureThreshold`: failureThreshold times before giving up. In case of readiness probe the Pod will be marked Unready. Defaults to 3. Minimum value is 1
+
+If you know already how [Kubernetes readiness probes](https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/) work, then these settings will be very familiar to you.
+
+Here is another example where we use the `pg_isready` command to make sure that a PostgreSQL database is ready to accept connections
+before we run the integration tests.
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ main_clone:
+ type: "git-clone"
+ description: "Cloning main repository..."
+ repo: "kostis-codefresh/my-rails-app"
+ revision: "master"
+ git: github
+ build_image:
+ title: "Building Docker Image"
+ type: "build"
+ image_name: "my-rails-app"
+ tag: "latest"
+ dockerfile: "Dockerfile"
+ run_integration_tests:
+ image: '${{build_image}}'
+ commands:
+ # PostgreSQL is certainly up at this point
+ - rails db:migrate
+ - rails test
+ services:
+ composition:
+ my_postgresql_db:
+ image: postgres:latest
+ ports:
+ - 5432
+ readiness:
+ timeoutSeconds: 30
+ periodSeconds: 15
+ image: 'postgres:latest'
+ commands:
+ - "pg_isready -h my_postgresql_db"
+{% endraw %}
+{% endhighlight %}
+
+In summary `readiness` make sure that your services are actually up before you use them in a Codefresh pipeline.
+
+## Preloading data to databases
+
+A very common scenario when using databases in integration tests is the need to preload some test data in the database.
+While you could do that in a normal pipeline step, sidecar services have a special `setup` block for this purpose. This way not only you can make sure that the database is up (using the `readiness` property explained in the previous section) but also that it is preloaded with the correct data.
+
+To use this capability add a `setup` block in your pipeline service container:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ main_clone:
+ type: "git-clone"
+ description: "Cloning main repository..."
+ repo: "kostis-codefresh/my-rails-app"
+ revision: "master"
+ git: github
+ build_image:
+ title: "Building Docker Image"
+ type: "build"
+ image_name: "my-rails-app"
+ tag: "latest"
+ dockerfile: "Dockerfile"
+ run_integration_tests:
+ image: '${{build_image}}'
+ commands:
+ # PostgreSQL is certainly up at this point and has the correct data
+ - rails test
+ services:
+ composition:
+ my_postgresql_db:
+ image: postgres:latest
+ ports:
+ - 5432
+ readiness:
+ timeoutSeconds: 30
+ periodSeconds: 15
+ image: 'postgres:latest'
+ commands:
+ - "pg_isready -h my_postgresql_db"
+ setup:
+ image: 'postgres:latest'
+ commands:
+ - "wget my-staging-server.exaple.com/testdata/preload.sql"
+ - "psql -h my_postgresql_db < testdata/preload.sql"
+{% endraw %}
+{% endhighlight %}
+
+Notice that in that case the sequence of events is the following
+
+1. Codefresh will launch the container image(s) mentioned in the composition block
+1. The `readiness` block will run until the service image is ready to accept connections
+1. The `setup` block will run and preload data or setup any custom commands you have placed in the property
+1. The actual pipeline step will now run with the service container attached in the same network.
+
+## Accessing containers via localhost
+
+Ideally, your application should be able to access other services by other DNS names that are fully configurable (this is a very good practice for [integration tests]({{site.baseurl}}/docs/testing/integration-tests/) as well).
+
+Sometimes, however, and especially in legacy applications, your application might be hardcoded to look at other services at `localhost`.
+In that case, you can use the attribute `shared_host_network: true` on the services definition. Now all linked containers can access each other's services via localhost.
+When `composition: ./docker-compose.yml` is used, this parameter is supported only in on-premises and hybrid environments. In cloud environments, for security reasons, this parameter is ignored.
+
+Here is an example:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ my_first_step:
+ image: goodsmileduck/redis-cli
+ title: Storing Redis data
+ commands:
+ - apk add curl
+ - 'redis-cli -u redis://localhost:6379 -n 0 LPUSH mylist "hello world"'
+ - 'curl http://localhost:80'
+ - echo finished
+ services:
+ shared_host_network: true
+ composition:
+ my_redis_service:
+ image: 'redis:latest'
+ my_nginx:
+ image: nginx
+{% endraw %}
+{% endhighlight %}
+
+You can also do the same thing with top level services:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+services:
+ name: my_database
+ shared_host_network: true
+ composition:
+ my_redis_service:
+ image: 'redis:latest'
+ my_nginx:
+ image: nginx
+steps:
+ my_first_step:
+ image: goodsmileduck/redis-cli
+ title: Storing Redis data
+ commands:
+ - apk add curl
+ - 'redis-cli -u redis://localhost:6379 -n 0 LPUSH mylist "hello world"'
+ - 'curl http://localhost:80'
+ - echo finished
+ services:
+ - my_database
+{% endraw %}
+{% endhighlight %}
+
+Note: we do recommend you only use this option as a last resort. You should not hardcode "localhost" as a requirement in your services as
+it adds extra constraints with integration tests (and especially with dynamic test environments).
+
+
+## Limitations
+
+Service containers are not compatible with [custom pipeline steps]({{site.baseurl}}/docs/codefresh-yaml/steps/#limitations-of-custom-plugins).
+
+
+
+
+## What to read next
+
+* [Unit tests]({{site.baseurl}}/docs/testing/unit-tests/)
+* [Integration tests]({{site.baseurl}}/docs/testing/integration-tests/)
+* [Integration test with database]({{site.baseurl}}/docs/yaml-examples/examples/integration-tests-with-database/)
+* [Creating Compositions]({{site.baseurl}}/docs/on-demand-test-environment/create-composition/)
+
+
+
+
+
+
+
+
diff --git a/_docs/pipelines/shared-configuration.md b/_docs/pipelines/shared-configuration.md
new file mode 100644
index 00000000..a738b7d5
--- /dev/null
+++ b/_docs/pipelines/shared-configuration.md
@@ -0,0 +1,264 @@
+---
+title: "Shared configuration for piplines"
+description: "How to keep your pipelines DRY"
+group: configure-ci-cd-pipeline
+toc: true
+---
+
+After creating several pipelines in Codefresh, you will start to notice several common values between them. Common examples are access tokens, environment URLs, configuration properties etc.
+
+Codefresh allows you to create those shared values in a central place and then reuse them in your pipelines
+avoiding the use of copy-paste.
+
+You can share:
+
+* Environment parameters (easy)
+* Helm values (easy)
+* Any kind of YAML data (advanced)
+
+
+## Creating shared configuration
+
+From the left sidebar click *Account settings* to enter your global settings. Then choose *Shared Configuration* from the left menu.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/shared-configuration/shared-configuration.png"
+url="/images/pipeline/shared-configuration/shared-configuration.png"
+alt="Creating shared configuration snippets"
+caption="Creating shared configuration snippets"
+max-width="50%"
+%}
+
+You can create four types of shared configuration:
+
+* **Shared Configuration**: for environment variables
+* **Shared Secret**: for encrypted environment variables of sensitive data (access tokens, etc.)
+* **YAML**: for Helm values or any other generic information
+* **Secret YAML**: for above, but encrypts the contents
+
+>RBAC is supported for all types of shared configurations.
+
+You can create as many shared snippets as you want (with unique names).
+
+### Using external secrets as values
+
+Note that the default "shared secrets" and "secret yaml" entities use the built-in secret storage of Codefresh. You can also
+use any [external secrets that you have defined]({{site.baseurl}}/docs/integrations/secret-storage/) (such as Kubernetes secrets), by using the normal entities and then clicking on the lock icon that appears.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/shared-configuration/shared-conf-secret-integration.png"
+url="/images/pipeline/shared-configuration/shared-conf-secret-integration.png"
+alt="Using external secrets in shared configuration values"
+caption="Using external secrets in shared configuration values"
+max-width="50%"
+%}
+
+If you have already specified the resource field during secret definition the just enter on the text field the name of the secret directly, i.e. `my-secret-key`.
+If you didn't include a resource name during secret creation then enter the full name in the field like `my-secret-resource@my-secret-key`.
+
+### Level of access
+
+For each set of values you can toggle the level of access by [non-admin users]({{site.baseurl}}/docs/administration/access-control/#users-and-administrators). If it is off, users will **not** be able to use the [CLI](https://codefresh-io.github.io/cli/) or [API]({{site.baseurl}}/docs/integrations/codefresh-api/)
+to access these [values](https://codefresh-io.github.io/cli/contexts/). If it is on, all users from all your Codefresh teams will be able to access this set of values
+with CLI commands or API calls.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/shared-configuration/shared-config-access.png"
+url="/images/pipeline/shared-configuration/shared-config-access.png"
+alt="Allow access to non-admin users"
+caption="Allow access to non-admin users"
+max-width="60%"
+%}
+
+We recommend that you disable access for all values of type *shared secret* and *secret YAML* unless your organization has different needs.
+
+
+## Using shared environment variables
+
+Each pipeline has a set of environment variables that can be defined in the *Workflow* screen.
+To import a shared configuration open the pipeline editor, and from the tabs on the right side select *VARIABLES*. Then click the gear icon to *Open Advanced Options*:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/shared-configuration/environment-variables.png"
+url="/images/pipeline/shared-configuration/environment-variables.png"
+alt="Pipeline environment variables"
+caption="Pipeline environment variables"
+max-width="50%"
+%}
+
+To use your shared configuration, click the *Import from shared configuration* button and select the snippet from the list:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/shared-configuration/import-variables.png"
+url="/images/pipeline/shared-configuration/import-variables.png"
+alt="Importing shared configuration"
+caption="Importing shared configuration"
+max-width="50%"
+%}
+
+Once you click *Add* the values from the shared configuration will be appended to the ones
+you have in your pipelines. In case of similar values the shared configuration will follow the [precedence rules]({{site.baseurl}}/docs/pipelines/variables/#user-provided-variables).
+
+
+## Using shared Helm values
+
+To use a shared YAML snippet for Helm values you can install a new Helm chart either from:
+
+* The [Helm chart list]({{site.baseurl}}/docs/new-helm/add-helm-repository/#install-chart-from-your-helm-repository)
+* The [Helm environment board]({{site.baseurl}}/docs/new-helm/helm-environment-promotion/#moving-releases-between-environments).
+
+In both cases, when you see the Helm installation dialog you can import any of your YAML snippets
+to override the default chart values.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/shared-configuration/helm-import.png"
+url="/images/pipeline/shared-configuration/helm-import.png"
+alt="Importing Helm values"
+caption="Importing Helm values"
+max-width="50%"
+%}
+
+From the same dialog you can also create a brand-new shared configuration snippet of type YAML.
+Not only it will be used for this Helm chart, but it will be added in your global shared configuration as well.
+
+## Using values from the Shared Configuration in your Helm step
+
+Additionally, you can define shared variables in your account settings and reuse those across your Helm steps, and specifically, in your [custom Helm values]({{site.baseurl}}/docs/docs/new-helm/using-helm-in-codefresh-pipeline/#helm-values).
+
+Under *Account Setting* > *Shared Configuration*, add the variable to your shared configuration.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/shared-configuration/helm-shared-variables.png"
+url="/images/pipeline/shared-configuration/helm-version-shared.png"
+alt="Adding shared configuration variables"
+caption="Adding shared configuration variables"
+max-width="50%"
+%}
+
+Go to the workflow of the Codefresh pipeline to which you want to add the variable. Then select *variables* from the right sidebar. *Open advanced configuration* and select *Import from shared configuration*.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/shared-configuration/environment-variables.png"
+url="/images/pipeline/shared-configuration/environment-variables.png"
+alt="Pipeline environment variables"
+caption="Pipeline environment variables"
+max-width="50%"
+%}
+
+This will allow you to add shared variables.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/shared-configuration/shared-helm-variables.png"
+url="/images/pipeline/shared-configuration/shared-helm-variables.png"
+alt="Shared helm variable"
+caption="Shared helm variable"
+max-width="50%"
+%}
+
+Add the shared variables to your Helm step:
+
+{% highlight shell %}
+{% raw %}
+deploy:
+ type: "helm"
+ working_directory: "./react-article-display"
+ stage: "deploy"
+ arguments:
+ action: "install"
+ chart_name: "charts/example-chart"
+ release_name: "test-chart"
+ helm_version: "${{HELM_VERSION}}"
+ kube_context: "anais-cluster@codefresh-sa"
+ custom_values:
+ - 'pullPolicy=${{PULL_POLICY}}'
+{% endraw %}
+{% endhighlight %}
+
+The shared variables can now be used across your pipelines.
+
+## Sharing any kind of YAML data in pipelines
+
+All the snippets from shared configuration are also available as context in the [Codefresh CLI](https://codefresh-io.github.io/cli/contexts/)
+
+This means that you can manipulate them programmatically and read their values in the pipeline in any way you see fit.
+
+If for example you have a shared configuration named `my-global-config` you can easily read its contents programmatically using the CLI:
+
+{% highlight shell %}
+$codefresh get context my-global-config --output=yaml
+
+apiVersion: v1
+kind: context
+metadata:
+ default: false
+ system: false
+ name: my-global-config
+type: config
+spec:
+ type: config
+ data:
+ foo: bar
+{% endhighlight %}
+
+### Example - custom value manipulation
+
+Let's say that you have a YAML segment with the following contents:
+
+{% highlight yaml %}
+favorite:
+ drink: coffee
+ food: pizza
+{% endhighlight %}
+
+Here is a pipeline step that is reading the yaml snippet and extracts a value
+
+ `YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ MyFavoriteFoodStep:
+ title: Favorite food
+ image: codefresh/cli
+ commands:
+ - echo I love eating $(codefresh get context my-food-values --output=json | jq -r '.spec.data.favorite.food')
+{% endraw %}
+{% endhighlight %}
+
+Once the pipeline runs, you will see in the logs:
+
+```
+I love eating pizza
+```
+
+## Manipulating shared configuration programmatically
+
+You can also create/update/delete shared configuration via the [Codefresh CLI](https://codefresh-io.github.io/cli/) or [API]({{site.baseurl}}/docs/integrations/codefresh-api/).
+
+See the [context section](https://codefresh-io.github.io/cli/contexts/create-context/) in the CLI documentation.
+
+
+
+## Related articles
+[Variables]({{site.baseurl}}/docs/pipelines/variables/)
+[Codefresh YAML]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/)
+[Pipeline steps]({{site.baseurl}}/docs/pipelines/steps/)
+
diff --git a/_docs/pipelines/stages.md b/_docs/pipelines/stages.md
new file mode 100644
index 00000000..e6e0b103
--- /dev/null
+++ b/_docs/pipelines/stages.md
@@ -0,0 +1,195 @@
+---
+title: "Grouping steps in pipelines"
+description: "Group steps into stages for better visualization"
+group: pipelines
+toc: true
+---
+
+With Codefresh you can [create really complex pipelines]({{site.baseurl}}/docs/pipelines/pipelines/) with any number of steps.
+
+To better visualize the pipeline, you can group several steps into a single _stage_. The _stage_ with the group of steps are displayed as a separate column in the [pipeline view]({{site.baseurl}}/docs/pipelines/monitoring-pipelines/).
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/stages/complex-pipeline.png"
+url="/images/codefresh-yaml/stages/complex-pipeline.png"
+alt="Complex pipeline"
+caption="Complex pipeline"
+max-width="70%"
+%}
+
+In this example, the pipeline has four stages.
+
+## Assigning steps to a stage
+
+Stages are completely optional, and for really small pipelines they are not needed at all.
+By default, all pipeline steps are shown one after the other.
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/stages/linear-view.png"
+url="/images/codefresh-yaml/stages/linear-view.png"
+alt="Default pipeline view"
+caption="Default pipeline view"
+max-width="50%"
+%}
+
+This view works ok for small pipelines, but for a big number of steps it is better to group them into pipeline *stages* like shown below:
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/stages/example.png"
+url="/images/codefresh-yaml/stages/example.png"
+alt="Different pipeline stages"
+caption="Different pipeline stages"
+max-width="80%"
+%}
+
+The number of stages (i.e columns) and their titles is completely configurable.
+To enable this view, you need to make two modifications at the `codefresh.yml` file:
+
+Here is the skeleton:
+
+ `codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+stages:
+ - [stage-name-1]
+ - [stage-name-2]
+
+steps:
+ step-name:
+ [step-contents]
+ stage: [name-of-stage]
+ another-step:
+ [step-contents]
+ stage: [name-of-stage]
+ the-very-last-step:
+ [step-contents]
+ stage: [name-of-stage]
+{% endhighlight %}
+
+As you can see the modifications needed are:
+
+1. To list all the stage names at the root of the pipeline file
+1. To use the `stage` property on each step to assign it to a stage
+
+>This updated pipeline view affects only the visualization of the pipeline. It does not affect the order of step execution. Steps are still executed in the same order as listed in the `codefresh.yml` file.
+ If you wish to use parallel execution and advanced workflows see the [parallel steps]({{site.baseurl}}/docs/codefresh-yaml/advanced-workflows/) page.
+
+
+## Example pipeline with several stages
+
+Here is a more concrete example that you can use as a starting point:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+ - prepare
+ - test
+ - build
+ - scan
+ - integration
+ - deploy
+steps:
+ step1:
+ stage: 'prepare'
+ image: node
+ commands:
+ - 'echo "Hello Step 1!"'
+ step2:
+ image: node
+ stage: 'prepare'
+ commands:
+ - 'echo "Hello Step 2!"'
+ step3:
+ image: node
+ stage: 'test'
+ commands:
+ - 'echo "Hello Step 3!"'
+ step4:
+ image: node
+ stage: 'build'
+ commands:
+ - 'echo "Hello Step 4!"'
+ step5:
+ image: node
+ stage: 'scan'
+ commands:
+ - 'echo "Hello Step 5!"'
+ step6:
+ image: node
+ stage: 'scan'
+ commands:
+ - 'echo "Hello Step 6!"'
+ step7:
+ image: node
+ stage: 'integration'
+ commands:
+ - 'echo "Hello Step 7!"'
+ step8:
+ image: node
+ stage: 'deploy'
+ commands:
+ - 'echo "Hello Step 8!"'
+ step9:
+ image: node
+ stage: 'deploy'
+ commands:
+ - 'echo "Hello Step 9!"'
+{% endraw %}
+{% endhighlight %}
+
+If you run the pipeline you will see this view
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/stages/complex.png"
+url="/images/codefresh-yaml/stages/complex.png"
+alt="Complex Pipeline view"
+caption="Complex Pipeline view"
+max-width="80%"
+%}
+
+Remember that the assignment of a step to a stage is happening only for graphical grouping purposes. It does
+not affect the way your steps run. All steps will still run in the same order mentioned in the `codefresh.yml` file.
+
+Also notice if you enable this view a stage called *default* will show all build steps that are not explicitly assigned to a stage.
+
+## Using spaces in stage names
+
+If you wish to have spaces in stage names you need to quote them like this:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+- 'my build phase'
+- 'my test phase'
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ stage: 'my build phase'
+ type: build
+ image_name: my-app
+ dockerfile: Dockerfile
+ MyUnitTests:
+ title: Unit testing
+ stage: 'my test phase'
+ image: ${{MyAppDockerImage}}
+ commands:
+ - npm run test
+{% endraw %}
+{% endhighlight %}
+
+
+## Related articles
+[Pipeline steps]({{site.baseurl}}/docs/pipelines/steps/)
+[Parallel workflows]({{site.baseurl}}/docs/pipelines/advanced-workflows/)
diff --git a/_docs/pipelines/steps.md b/_docs/pipelines/steps.md
new file mode 100644
index 00000000..468aef4d
--- /dev/null
+++ b/_docs/pipelines/steps.md
@@ -0,0 +1,1224 @@
+---
+title: "Steps in pipelines"
+description: "Types of steps in Codefresh pipelines"
+group: pipelines
+redirect_from:
+ - /docs/steps/
+toc: true
+---
+
+Codefresh [pipelines]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/) are composed of a series of steps.
+
+You can create your own pipelines by writing a [codefresh.yml]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/) file that describes your pipeline. This file can then be version controlled on its own (pipeline as code).
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipelines/stages/complex-pipeline.png"
+url="/images/pipelines/stages/complex-pipeline.png"
+alt="Pipeline steps"
+caption="Pipeline steps"
+max-width="80%"
+%}
+
+## Built-in step types
+
+The steps offered by Codefresh are:
+
+* [Git clone]({{site.baseurl}}/docs/pipelines/steps/git-clone/)
+ **Git clone** steps allow you to checkout code in your pipeline from any internal or external repository. Existing accounts that still use repositories instead of [projects]({{site.baseurl}}/docs/configure-ci-cd-pipeline/pipelines/#pipeline-concepts) have an implicit clone step in the pipelines.
+
+* [Freestyle]({{site.baseurl}}/docs/pipelines/steps/freestyle/)
+ **Freestyle** steps are the cornerstone of Codefresh pipelines. They allow you to run any command within the context of a Docker container. A lot of Codefresh optimizations such as the [shared docker volume]({{site.baseurl}}/docs/configure-ci-cd-pipeline/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps) are designed specifically for freestyle steps.
+Freestyle steps are a secure replacement for `docker run` commands.
+
+* [Build]({{site.baseurl}}/docs/pipelines/steps/build/)
+ **Build** steps are the main way where you get access to the Docker daemon (Docker as a service) in Codefresh pipelines. Build steps take as input any Dockerfile and run it on the cloud in a similar manner to what you do on your workstation. Build steps automatically push the result to the default Docker registry of your account (no need for docker login commands). Codefresh also comes with a global Docker cache that automatically gets attached to all build nodes. Build steps are a secure replacement for `docker build` commands.
+
+* [Push]({{site.baseurl}}/docs/pipelines/steps/push/)
+**Push** steps allow you to push and tag your docker images (created by the build step) in any [external Docker registry]({{site.baseurl}}/docs/docker-registries/external-docker-registries/). Push steps are *not* needed at all if you work with only the internal Codefresh registry. Push steps are a secure replacement for the `docker tag` and `docker push` commands.
+
+* [Composition]({{site.baseurl}}/docs/pipelines/steps/composition/)
+ **Composition** steps allow you to run multiple services together in the Codefresh infrastructure and execute unit tests or other commands against them. They are discarded once a pipeline finishes. Composition steps are a secure replacement for `docker-compose` definitions.
+
+* [Launch test environment]({{site.baseurl}}/docs/pipelines/steps/launch-composition/)
+ **Launch test environment** steps behave similar to compositions, but they persist after the pipeline ends. This is a great way to create preview environment from your pull requests and send to colleagues.
+
+* [Deploy]({{site.baseurl}}/docs/pipelines/steps/deploy/)
+ **Deploy steps** allow you to [perform Kubernetes deployments]({{site.baseurl}}/docs/deploy-to-kubernetes/deployment-options-to-kubernetes/) in a declarative manner. They embody the Continuous Deployment aspect of Codefresh.
+
+* [Approval]({{site.baseurl}}/docs/pipelines/steps/approval/)
+ **Approval steps** allow you to pause pipelines and wait for human intervention before resuming. They allow you to embrace the concepts of Continuous Delivery.
+
+
+
+>Codefresh also supports [parallel workflows]({{site.baseurl}}/docs/pipelines/advanced-workflows/), as well as running pipelines [locally on your workstation]({{site.baseurl}}/docs/pipelines/running-pipelines-locally/).
+
+## Step directory
+
+In the case of freestyle steps we also offer a [plugin marketplace](https://codefresh.io/steps/) with several existing plugins for popular integrations.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/plugin-directory.png"
+url="/images/pipeline/plugin-directory.png"
+alt="Codefresh steps directory"
+caption="Codefresh steps directory"
+max-width="80%"
+%}
+
+Codefresh steps can be:
+
+* Private (visible only to you and your team) or public (visible to everybody via the marketplace)
+* Official (supported by the Codefresh team) or community based
+* Ready for production or still incubating.
+
+You can use any your pipelines any of the public steps already in the marketplace, any steps created by your team and any steps that you create for yourself.
+
+## Using custom pipeline steps
+
+When you create a pipeline, you will have access to two categories of steps:
+
+* Public steps that exist in the marketplace
+* Steps that you or your team have created (visible only to you)
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipelines/steps/choose-step.png"
+url="/images/pipelines/steps/choose-step.png"
+alt="Choosing a custom step"
+caption="Choosing a custom step"
+max-width="60%"
+%}
+
+To use a step, first click on the pipeline section where you want to insert the step.
+You will get a new dialog with all the details of the step along with a live preview of the exact
+[yaml]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/) that will be inserted in your pipeline.
+
+For all steps you can define:
+
+* The title of the text (which will also be visible in the pipeline UI)
+* A freetext description
+* The [stage]({{site.baseurl}}/docs/pipelines/stages/) that will contain the step
+
+The rest of the fields are specific to each step. See the documentation of each step in order to understand what each field should contain. There are fields for each step that are marked as required and are essential for the step to work. These are marked with an asterisk.
+
+Once a step is added to the pipeline, you are fee to change the resulting yaml even further by just typing in the pipeline editor.
+
+## Creating your own step
+
+There are two ways to create custom steps in Codefresh. The simplest way is to package an existing CLI tool into a Docker image and use it as a freestyle step. The more advanced way is creating a typed step with explicit input and output parameters.
+
+Here is a summary on the two ways:
+
+{: .table .table-bordered .table-hover}
+| | Custom freestyle step | Codefresh typed plugin |
+| -------------- | ---------------------------- |-------------------------|
+| Assets needed | A Docker image | A Docker image and a plugin manifest|
+| Knowledge required | Docker building/pushing | Docker and Codefresh CLI |
+| Step can be used | In any Docker based CI/CD platform | In Codefresh |
+| Effort required | Minimal | Medium |
+| Distribution via | Dockerhub | Codefresh marketplace |
+| Input variables | Yes | Yes|
+| Output variables | No | Yes |
+| Versioning via | Docker tags | Manifest entry |
+| Grouping of multiple steps | No | Yes |
+| Marketplace entry | Not possible| Possible/optional |
+| Best for sharing steps | with your team/company | with the world |
+
+
+
+We suggest that you start with custom freestyle steps first and only create typed plugins once you are familiar with Codefresh pipelines or want your plugin to appear in the marketplace.
+
+
+### Creating a custom freestyle step
+
+As an example let's say that you need to use the [JFrog CLI](https://jfrog.com/getcli/) in a pipeline in order to interact with a Artifactory or Bintray. JFrog does not offer any Docker image that contains the CLI and you already know that all Codefresh steps [are actually Docker images]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/).
+
+Therefore you can easily package the CLI into a Docker image and then make it available to any Codefresh pipeline that wishes to use it.
+First you create [a Dockerfile](https://github.com/kostis-codefresh/step-examples/blob/master/jfrog-cli-wrapper/Dockerfile) that packages the CLI
+
+ `Dockerfile`
+{% highlight docker %}
+{% raw %}
+FROM debian:stable-slim
+
+WORKDIR /jfrog-cli
+
+ENV DEBIAN_FRONTEND noninteractive
+
+RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/*
+
+RUN curl -fL https://getcli.jfrog.io | sh
+
+ENV JFROG_CLI_OFFER_CONFIG false
+ENV BINTRAY_LICENCES MIT
+
+RUN /jfrog-cli/jfrog bt config --licenses $BINTRAY_LICENCES
+
+RUN ln -s /jfrog-cli/jfrog /usr/local/bin/jfrog
+
+CMD ["/jfrog-cli/jfrog"]
+{% endraw %}
+{% endhighlight %}
+
+This is a standard Dockerfile. There is nothing specific to Codefresh in the image that gets created. You can test this Dockerfile locally with
+
+{% highlight shell %}
+{% raw %}
+docker build . -t jfrog-cli
+docker run jfrog-cli
+{% endraw %}
+{% endhighlight %}
+
+In a similar manner you can package any other executable and its dependencies. You could even just package `curl` with an external URL that hosts the service that you want to interact in a Codefresh pipeline.
+
+Once the Dockerfile is ready, you need to push it to Dockerhub. You can either do it manually from your workstation, but it is best if you actually create a [Codefresh pipeline](https://github.com/kostis-codefresh/step-examples/blob/master/jfrog-cli-wrapper/codefresh.yml) that does it for you.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipelines/steps/create-custom-step.png"
+url="/images/pipelines/steps/create-custom-step.png"
+alt="Creating a custom freestyle step"
+caption="Creating a custom freestyle step"
+max-width="80%"
+%}
+
+Now that the image is ready and public you can notify your team that the new plugin is ready.
+Everybody who wants to interact with JFrog Bintray and/or Artifactory can place [the following snippet](https://github.com/kostis-codefresh/step-examples/blob/master/jfrog-cli-wrapper/codefresh-example.yml) in a pipeline:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ run_frog_cli:
+ title: Running jfrog CLI inside Docker
+ image: kkapelon/jfrog-cli
+ commands:
+ - jfrog bt --help
+ - jfrog rt --help
+{% endraw %}
+{% endhighlight %}
+
+You can then customize the exact command(s) that you want to run with the tool. All capabilities of freestyle steps are possible, such as passing environment variables as input parameters.
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ run_frog_cli:
+ title: Running jfrog CLI inside Docker
+ image: kkapelon/jfrog-cli
+ commands:
+ - jfrog bt package-show google/tensorflow/tensorflow
+ environment:
+ - BINTRAY_USER=my-user
+ - BINTRAY_KEY=my-secret-key
+{% endraw %}
+{% endhighlight %}
+
+If you want to use multiple versions of the step in the same pipeline, you can just create different docker tags. Notice that you can also use a [private registry]({{site.baseurl}}/docs/docker-registries/external-docker-registries/) instead of Dockerhub if you wish your step to be used only within your organization.
+
+
+
+### Creating a typed Codefresh plugin
+
+You can use the [Codefresh CLI](https://codefresh-io.github.io/cli/) and more specifically the [step-type resource](https://codefresh-io.github.io/cli/steps/) to create your own typed step. Each Codefresh step is composed from two parts:
+
+1. The step description in the special yaml syntax for describing Codefresh steps
+1. A Docker image that implements the step (optional)
+
+The easiest way to create your own step is to start by using the definition of an existing step.
+
+{% highlight bash %}
+codefresh get step-type vault -o yaml > vault-step.yml
+{% endhighlight %}
+
+Here is the resulting yaml:
+
+ `vault-step.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+kind: step-type
+metadata:
+ name: /
+ isPublic: false
+ description: >-
+ The plugin exports KV pairs from Hashicorp Vault to Codefresh pipeline ENV
+ variables
+ sources:
+ - 'https://github.com/codefresh-io/steps/tree/master/incubating/vault'
+ stage: incubating
+ maintainers:
+ - name: Alexander Aladov
+ categories:
+ - featured
+ official: false
+ tags: []
+ icon:
+ type: svg
+ url: 'https://cdn.jsdelivr.net/gh/codefresh-io/steps/incubating/vault/icon.svg'
+ background: '#f4f4f4'
+ examples:
+ - description: example-1
+ workflow:
+ version: '1.0'
+ steps:
+ Vault_to_Env:
+ title: Importing vault values
+ type: vault
+ arguments:
+ VAULT_ADDR: '${{VAULT_ADDR}}'
+ VAULT_PATH: '${{VAULT_PATH}}'
+ VAULT_AUTH_TOKEN: '${{VAULT_AUTH_TOKEN}}'
+ VAULT_CLIENT_CERT_BASE64: '${{VAULT_CLIENT_CERT_BASE64}}'
+ VAULT_CLIENT_KEY_BASE64: '${{VAULT_CLIENT_KEY_BASE64}}'
+ created_at: '2019-07-03T14:57:02.057Z'
+ updated_at: '2019-09-18T08:15:28.476Z'
+ latest: true
+ version: 0.0.1
+ id: 5d1cc23ea7e22e40227ea75d
+spec:
+ arguments: |-
+ {
+ "definitions": {},
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "type": "object",
+ "additionalProperties": false,
+ "patterns": [],
+ "required": [
+ "VAULT_ADDR",
+ "VAULT_PATH",
+ "VAULT_AUTH_TOKEN"
+ ],
+ "properties": {
+ "VAULT_ADDR": {
+ "type": "string",
+ "description": "Vault server URI. Example: https://vault.testdomain.io:8200 (required)"
+ },
+ "VAULT_PATH": {
+ "type": "string",
+ "description": "Path to secrets in vault. Example: secret/codefreshsecret (required)"
+ },
+ "VAULT_AUTH_TOKEN": {
+ "type": "string",
+ "description": "Vault authentication token (required)"
+ },
+ "VAULT_CLIENT_CERT_BASE64": {
+ "type": "string",
+ "description": "Base64 encoded client cerificate"
+ },
+ "VAULT_CLIENT_KEY_BASE64": {
+ "type": "string",
+ "description": "Base64 encoded client key"
+ }
+ }
+ }
+ steps:
+ main:
+ name: vault
+ image: codefreshplugins/vault
+ environment:
+ - 'VAULT_ADDR=${{VAULT_ADDR}}'
+ - 'VAULT_PATH=${{VAULT_PATH}}'
+ - 'VAULT_AUTH_TOKEN=${{VAULT_AUTH_TOKEN}}'
+ - 'VAULT_CLIENT_CERT_BASE64=${{VAULT_CLIENT_CERT_BASE64}}'
+ - 'VAULT_CLIENT_KEY_BASE64=${{VAULT_CLIENT_KEY_BASE64}}'
+{% endraw %}
+{% endhighlight %}
+
+For each step you define the following sections:
+
+* Metadata to describe the characteristics of the step
+* The description of its arguments
+* The implementation (i.e. what yaml gets inserted in the pipeline)
+
+For the metadata section note the following:
+
+* `isPublic` decides if this step is visible only to your and your team, or visible to all (in the marketplace)
+* The `name` of the step **must** be prefixed with your Codefresh account name. Steps created by the Codefresh team are on the root level of the hierarchy (without prefix). This is the same pattern that Dockerhub is using for images.
+* `stage` shown if this step is ready for production or still incubating. This is just an indication to users. It doesn't affect the implementation of the step in any way
+* `icon`. Ideally you provide a transparent svg so that the icon is scalable. The icon for a step is used both in the marketplace as well as the pipeline view. You can also select a default background to be used. Alternatively, you can define jpg/png icons for large/medium/small sizes. We suggest the svg approach
+* The `version` property allows you to update your plugin and keep multiple variants of it in the marketplace
+* The `examples` section will be shown in the marketplace as documentation for your step
+
+For the argument section we follow the [JSON Schema](http://json-schema.org/learn/miscellaneous-examples.html). You can use the [Schema generator](https://jsonschema.net/) to easily create a schema. JSON schema is used for arguments (i.e. input parameters) as well as output parameters as we will see later on.
+
+The property `additionalProperties` defines how strict the plugin will be with its arguments. If you set it to `false` (which is usually what you want) the pipeline will fail if the plugin is given more arguments that it is expecting. If you set it to `true`, then the plugin will only use the arguments it understands and will ignore the rest.
+
+The final part is the step implementation. Here you can define exactly the yaml that this step will insert in the pipeline. You can use any of the built-in steps in Codefresh and even add multiple steps.
+
+>Note that currently you cannot nest custom pipeline steps. We are aware of this limitation and are actively working on it, but at the time or writing you cannot use a typed step inside another typed step.
+
+Once you are done with your step, use the Codefresh CLI to upload it to the marketplace. If you want the step to be available only to you and your team make sure that the property `isPublic` is false (and then it will not be shown in the marketplace).
+
+{% highlight bash %}
+codefresh create step-type -f my-custom-step.yml
+{% endhighlight %}
+
+If you make further changes to your step you can update it:
+
+{% highlight bash %}
+codefresh replace step-type -f my-custom-step.yml
+{% endhighlight %}
+
+If you want to remove your step from the marketplace, you can delete it completely:
+
+{% highlight bash %}
+codefresh delete step-type kostis-codefresh/sample
+{% endhighlight %}
+
+### Versioning of typed steps
+
+The `version` property under `metadata` in the plugin manifest allows you to publish multiple releases of the same plugin in the marketplace. Codefresh will keep all previous plugins and users are free to choose which version they want.
+
+To create a new version of your plugin:
+
+1. Update the `version` property under `metadata` in your custom YAML.
+2. Run:
+
+{% highlight bash %}
+codefresh create step-type -f custom-plugin.yaml
+{% endhighlight %}
+
+You will now be able to see the new versions of your plugin in the step marketplace drop-down:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipelines/steps/step-versions.png"
+url="/images/pipelines/steps/step-versions.png"
+alt="Different step versions"
+caption="Different step versions"
+max-width="60%"
+%}
+
+You can also use the Codefresh CLI to list all version:
+
+{% highlight bash %}
+codefresh get step-types kostis-codefresh/sample --versions
+{% endhighlight %}
+
+To delete a specific version, use:
+
+{% highlight bash %}
+codefresh delete step-type 'account/plugin:'
+{% endhighlight %}
+
+Note that Codefresh step versions function like Docker tags in the sense that they are *mutable*. You can overwrite an existing plugin version with a new plugin manifest by using the `codefresh replace step-type` command.
+
+If users do not define a version once they use the plugin, the latest one (according to [semantic versioning](https://semver.org/)) will be used. Alternatively they can specify the exact version they need (even different versions within the same pipeline.)
+
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ my_step_1:
+ title: Running old custom step
+ type: kostis-codefresh/sample:1.2.1
+ my_step_2:
+ title: Running new custom step
+ type: kostis-codefresh/sample:1.3.5
+{% endraw %}
+{% endhighlight %}
+
+### Example with input parameters
+
+Let's create a very simple step called *node-version*. This step will read the application version from a NodeJS project and expose it as an environment variable. This way we can use the application version later in the pipeline (for example to tag a docker image).
+
+Here is the respective [step yaml](https://github.com/kostis-codefresh/step-examples/blob/master/node-version-plugin/read-app-version.yml).
+
+ `plugin.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+kind: step-type
+metadata:
+ name: kostis-codefresh/node-version
+ isPublic: false
+ description: >-
+ The plugin exports as an environment variable the application version from package.json
+ sources:
+ - 'https://github.com/kostis-codefresh/step-examples'
+ stage: incubating
+ maintainers:
+ - name: Kostis Kapelonis
+ categories:
+ - utility
+ official: false
+ tags: []
+ icon:
+ type: svg
+ url: https://cdn.worldvectorlogo.com/logos/nodejs-icon.svg
+ background: '#f4f4f4'
+ examples:
+ - description: example-1
+ workflow:
+ version: '1.0'
+ steps:
+ main_clone:
+ title: Cloning main repository...
+ type: git-clone
+ repo: 'my-github-user/my-github-repo'
+ revision: 'master'
+ git: github
+ read_app_version:
+ title: Reading app version
+ type: kostis-codefresh/node-version
+ arguments:
+ PACKAGE_JSON_FOLDER: './my-github-repo'
+ print_app_version:
+ title: Printing app version
+ image: alpine
+ commands:
+ - echo $APP_VERSION
+ latest: true
+ version: 1.0.0
+spec:
+ arguments: |-
+ {
+ "definitions": {},
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "type": "object",
+ "additionalProperties": false,
+ "patterns": [],
+ "required": [
+ "PACKAGE_JSON_FOLDER"
+ ],
+ "properties": {
+ "PACKAGE_JSON_FOLDER": {
+ "type": "string",
+ "description": "folder where package.json is located"
+ }
+ }
+ }
+ steps:
+ main:
+ name: kostis-codefresh/node-version
+ image: node
+ commands:
+ - cd $WORK_DIR
+ - pwd
+ - APP_VERSION=$(node -p -e "require('./package.json').version")
+ - echo $APP_VERSION
+ - export APP_VERSION
+ - cf_export APP_VERSION
+ environment:
+ - 'WORK_DIR=${{PACKAGE_JSON_FOLDER}}'
+{% endraw %}
+{% endhighlight %}
+
+If you look at the `spec` section you will see that the plugin expects a single parameter called `PACKAGE_JSON_FOLDER`. This will
+be passed by the plugin user to specify the folder that contains the `package.json` file. This way this plugin can be used for multiple applications. For example, the plugin user might check out 3 different Node.js projects and use the plugin to read the versions of all of them.
+
+The plugin implementation is specified in the `steps` sections. We use the standard [Node Docker image](https://hub.docker.com/_/node) to read the version from the `package.json` file. Notice how we convert the plugin argument to an environment variable called `WORK_DIR`
+
+By default all plugins start with the Codefresh volume at `/codefresh/volume` as a working folder. So with the `cd` command we enter the project folder (which we assume was checked out in a previous pipeline step). Once the version is read it is made available to all the other pipeline steps with the [cf_export command]({{site.baseurl}}/docs/pipelines/variables/#using-cf_export-command).
+
+We now insert our plugin in the marketplace with the following command:
+
+{% highlight bash %}
+codefresh create step-type -f read-app-version.yml
+{% endhighlight %}
+
+The step is now ready to be used by anybody.
+
+An example user pipeline is shown at [codefresh.yml](https://github.com/kostis-codefresh/step-examples/blob/master/node-version-plugin/codefresh.yml)
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ main_clone:
+ title: Cloning main repository...
+ type: git-clone
+ repo: 'codefreshdemo/example_nodejs_postgres'
+ revision: 'master'
+ git: github
+ read_app_version:
+ title: Reading app version
+ type: kostis-codefresh/node-version
+ arguments:
+ PACKAGE_JSON_FOLDER: './example_nodejs_postgres'
+ print_app_version:
+ title: Printing app version
+ image: alpine
+ commands:
+ - echo $APP_VERSION
+{% endraw %}
+{% endhighlight %}
+
+This is a very simple pipeline that checks out a NodeJS project and uses our plugin. Notice how we pass as argument the required parameter `example_nodejs_postgres` to tell the plugin where our `package.json` file is located. Once the plugin runs the application version is available as an environment variable that we can use in other steps as `APP_VERSION`.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipelines/steps/input-parameters.png"
+url="/images/pipelines/steps/input-parameters.png"
+alt="Step input parameters"
+caption="Step input parameters"
+max-width="60%"
+%}
+
+The input parameter is also shown as required in the marketplace.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipelines/steps/input-parameters-definition.png"
+url="/images/pipelines/steps/input-parameters-definition.png"
+alt="Input parameters on marketplace"
+caption="Input parameters on marketplace"
+max-width="40%"
+%}
+
+This is a trivial example, but is still shows how Codefresh pipeline can be declarative while actually doing a lot of imperative actions behind the scenes.
+
+### Example with output parameters
+
+In the previous example our plugin had an output parameter (`APP_VERSION`) that is created by the custom step and given back to the user. Even though creating an output parameter using only `cf_export` will work just fine in the technical level, it is best to formally define output parameters in the step definition.
+
+If you define output parameters in the step definition their names will appear on the marketplace and users will have an easier time understand what your step produces. You will be able to define complete JSON objects in addition to output strings. Formal output parameters are also available under a special notation (`step.outputs`) that we will explain in this example.
+
+We suggest you always formalize your output parameters in your step definition, especially when your step is having a large number of output parameters.
+
+The same [JSON Schema](http://json-schema.org/learn/miscellaneous-examples.html) is also used for output parameters as with input ones.
+Here is a [very simple example](https://github.com/kostis-codefresh/step-examples/blob/master/output-parameters/output-parameters-sample.yml) that shows the different types of output parameters you can have.
+
+ `plugin.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+kind: step-type
+metadata:
+ name: kostis-codefresh/output-parameters-example
+ isPublic: false
+ description: >-
+ The plugin shows how you can export output parameters
+ sources:
+ - 'https://github.com/kostis-codefresh/step-examples'
+ stage: incubating
+ maintainers:
+ - name: Kostis Kapelonis
+ categories:
+ - utility
+ official: false
+ tags: []
+ icon:
+ type: svg
+ url: https://cdn.worldvectorlogo.com/logos/bash-1.svg
+ background: '#f4f4f4'
+ examples:
+ - description: example-1
+ workflow:
+ version: '1.0'
+ steps:
+ dummy_parameters:
+ title: Creating output parameters
+ type: kostis-codefresh/output-parameters-example
+ print_my_variables:
+ title: Printing dummy content
+ image: alpine
+ commands:
+ - echo $MY_NUMBER
+ - echo $MY_CITY
+ - echo $MY_FAVORITE_FOOD
+ - echo ${{steps.dummy_parameters.output.MY_NUMBER}}
+ - echo ${{steps.dummy_parameters.output.MY_CITY}}
+ - echo ${{steps.dummy_parameters.output.MY_FAVORITE_FOOD}}
+ latest: true
+ version: 1.0.0
+spec:
+ returns: |-
+ {
+ "definitions": {},
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "type": "object",
+ "additionalProperties": true,
+ "patterns": [],
+ "required": [
+ "MY_NUMBER",
+ "MY_CITY",
+ "MY_FAVORITE_FOOD"
+ ]
+ ,
+ "properties": {
+ "MY_NUMBER": {
+ "type": "number",
+ "description": "an example variable that holds a number"
+ },
+ "MY_CITY": {
+ "type": "object",
+ "description": "an example variable that holds a JSON object",
+ "required": ["city_name", "country", "population"],
+ "properties": {
+ "city_name": {"type": "string"},
+ "country": {"type": "string"},
+ "population": {"type": "integer"}
+ }
+ },
+ "MY_FAVORITE_FOOD": {
+ "description": "an example variable that holds a number",
+ "type": "array",
+ "maxItems": 3,
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ steps:
+ main:
+ name: kostis-codefresh/output-parameters-example
+ image: alpine
+ commands:
+ - cf_export MY_NUMBER=42
+ - cf_export MY_CITY='{"city_name":"San Francisco", "country":"usa","population":884363}'
+ - cf_export MY_FAVORITE_FOOD='["pizza", "ramen", "hot dogs"]'
+
+{% endraw %}
+{% endhighlight %}
+
+This plugin exports 3 output parameters
+
+* `MY_NUMBER` - a single number
+* `MY_CITY` - an object with fields `city_name`, `country`, `population`
+* `MY_FAVORITE_FOOD` - an array.
+
+Output parameters are defined in the `returns` block.
+The output parameters of the step are now shown in the marketplace so consumers of this plugin know what to expect when they use it.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipelines/steps/output-parameters-definition.png"
+url="/images/pipelines/steps/output-parameters-definition.png"
+alt="Output parameters on marketplace"
+caption="Output parameters on marketplace"
+max-width="40%"
+%}
+
+As can be seen from the `examples` block, when you have formal output parameters you can also access them by mentioning the specific steps in your pipeline that creates it. The following are two equal ways to use an output parameter in your pipeline:
+
+```
+{% raw %}
+echo $MY_NUMBER
+echo ${{steps.dummy_parameters.output.MY_NUMBER}}
+{% endraw %}
+```
+
+In the case of output parameters that are objects you can also use `jq` to get specific properties like this:
+
+```
+{% raw %}
+echo ${{steps.dummy_parameters.output.MY_CITY}} | jq '.city_name'
+{% endraw %}
+```
+
+This will print "San Francisco".
+
+
+### Example with input/output parameters
+
+Let's take everything we learned from the previous examples and create a custom step that has
+
+1. A custom Docker image
+1. Formal input parameters
+1. Format output parameters
+
+In this simple example we will create a custom step that reads the Maven coordinates from a `pom.xml` file. Unlike `package.json`, a Maven file has 3 characteristics (group, artifact name and version). First we create a [very simple executable](https://github.com/kostis-codefresh/step-examples/blob/master/maven-version-plugin/mvncoords.go) that reads a Maven file and gives us these coordinates in JSON format.
+
+{% highlight shell %}
+{% raw %}
+mvncoords -f pom.xml
+{"groupId":"com.example.codefresh","artifactId":"my-java-app","version":"3.0.2"}
+{% endraw %}
+{% endhighlight %}
+
+Next, we package this executable in a [Dockerfile](https://github.com/kostis-codefresh/step-examples/blob/master/maven-version-plugin/Dockerfile).
+
+ `Dockerfile`
+{% highlight docker %}
+{% raw %}
+FROM golang:1.12-alpine AS build_base
+
+WORKDIR /tmp/
+
+COPY . .
+
+# Unit tests
+RUN go test -v
+
+# Build the Go app
+RUN go build -o ./out/mvncoords .
+
+# Start fresh from a smaller image
+FROM alpine:3.9
+
+COPY --from=build_base /tmp/out/mvncoords /usr/local/bin/mvncoords
+
+CMD ["mvncoords"]
+{% endraw %}
+{% endhighlight %}
+
+We now have a custom Docker image that contains our executable. If we want other people to use it, we need to push it to Dockerhub. You can do this manually from your workstation using `docker login` and `docker push` commands, but it is much better to automate this with a Codefresh pipeline.
+
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipelines/steps/create-plugin-image.png"
+url="/images/pipelines/steps/create-plugin-image.png"
+alt="Building a public Docker image"
+caption="Building a public Docker image"
+max-width="60%"
+%}
+
+This [pipeline](https://github.com/kostis-codefresh/step-examples/blob/master/maven-version-plugin/codefresh.yml) checks out the Dockerfile plus source code, builds the docker image and then pushes it to Dockerhub (so that the image is public).
+
+Finally we are ready to create our Codefresh plugin. Here is the [specification](https://github.com/kostis-codefresh/step-examples/blob/master/maven-version-plugin/read-maven-version.yml):
+
+
+
+ `plugin.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+kind: step-type
+metadata:
+ name: kostis-codefresh/mvn-version
+ isPublic: false
+ description: >-
+ The plugin exports as an environment variable the mvn coordinates from pom.xml
+ sources:
+ - 'https://github.com/kostis-codefresh/step-examples'
+ stage: incubating
+ maintainers:
+ - name: Kostis Kapelonis
+ categories:
+ - utility
+ official: false
+ tags: []
+ icon:
+ type: svg
+ url: https://cdn.worldvectorlogo.com/logos/java-4.svg
+ background: '#f4f4f4'
+ examples:
+ - description: example-1
+ workflow:
+ version: '1.0'
+ steps:
+ main_clone:
+ title: Cloning main repository...
+ type: git-clone
+ repo: 'my-github-user/my-github-repo'
+ revision: 'master'
+ git: github
+ read_app_version:
+ title: Reading app version
+ type: kostis-codefresh/mvn-version
+ arguments:
+ POM_XML_FOLDER: './my-github-repo'
+ print_app_version:
+ title: Printing app coordinates
+ image: alpine
+ commands:
+ - echo $MVN_COORDS
+ - echo ${{steps.read_app_version.output.MVN_COORDS}}
+ latest: true
+ version: 1.0.0
+spec:
+ arguments: |-
+ {
+ "definitions": {},
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "type": "object",
+ "additionalProperties": false,
+ "patterns": [],
+ "required": [
+ "POM_XML_FOLDER"
+ ],
+ "properties": {
+ "POM_XML_FOLDER": {
+ "type": "string",
+ "description": "folder where pom.xml is located"
+ }
+ }
+ }
+ returns: |-
+ {
+ "definitions": {},
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "type": "object",
+ "additionalProperties": true,
+ "patterns": [],
+ "required": [
+ "MVN_COORDS"
+ ],
+ "properties": {
+ "MVN_COORDS": {
+ "type": "object",
+ "required": ["groupId", "artifactId", "version"],
+ "properties": {
+ "groupId": {"type": "string"},
+ "artifactId": {"type": "string"},
+ "version": {"type": "string"}
+ }
+ }
+ }
+ }
+ steps:
+ main:
+ name: kostis-codefresh/mvn-version
+ image: kkapelon/maven-version-extract
+ commands:
+ - cd $WORK_DIR
+ - MVN_COORDS=$(mvncoords -json)
+ - export MVN_COORDS
+ - cf_export MVN_COORDS
+ environment:
+ - 'WORK_DIR=${{POM_XML_FOLDER}}'
+{% endraw %}
+{% endhighlight %}
+
+We place this plugin into the marketplace with
+
+```
+codefresh create step-type -f read-maven-version.yml
+```
+
+If you look at the plugin entry in the marketplace you will see both input (the folder of the pom.xml) and output parameters (mvn coordinates) defined:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipelines/steps/plugin-parameters.png"
+url="/images/pipelines/steps/plugin-parameters.png"
+alt="Input and output parameters"
+caption="Input and output parameters"
+max-width="60%"
+%}
+
+The plugin is now ready to be used in a pipeline:
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipelines/steps/plugin-usage.png"
+url="/images/pipelines/steps/plugin-usage.png"
+alt="Plugin usage"
+caption="Plugin usage"
+max-width="60%"
+%}
+
+If you look at the [pipeline definition](https://github.com/kostis-codefresh/step-examples/blob/master/maven-version-plugin/codefresh-example.yml) you will see how we pass arguments in the plugin and get its output with the `steps.output` syntax.
+
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ main_clone:
+ title: Cloning main repository...
+ type: git-clone
+ repo: 'codefresh-contrib/spring-boot-2-sample-app'
+ revision: 'master'
+ git: github
+ read_app_version:
+ title: Reading app version
+ type: kostis-codefresh/mvn-version
+ arguments:
+ POM_XML_FOLDER: './spring-boot-2-sample-app'
+ print_app_version:
+ title: Printing app version
+ image: alpine
+ commands:
+ - echo $MVN_COORDS
+ - echo ${{steps.read_app_version.output.MVN_COORDS}}
+{% endraw %}
+{% endhighlight %}
+
+This was a trivial example, but it clearly demonstrates how a custom step communicates with the rest of the pipeline by getting input from the previous steps and preparing output for the steps that follow it.
+
+### Exporting parameters manually inside a plugin
+
+Normally, in a pipeline you can either use the [cf_export]({{site.baseurl}}/docs/pipelines/variables/#using-cf_export-command) command or write directly to the [/codefresh/volume/env_vars_to_export]({{site.baseurl}}/docs/pipelines/variables/#directly-writing-to-the-file) file.
+
+However, inside a plugin you can also use the `/meta/env_vars_to_export` file that has the same semantics, but is used for exporting variables in the same scope as the plugin only.
+
+The rules for using `/meta/env_vars_to_export` are:
+- When the step-type (plugin) does not define the `return` schema, all the output variables from substeps will be projected and exported as the root step (they may override each other).
+- When `return` schema is defined, only the variables that matched the definition will be exported as root step.
+
+`plugin.yaml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+kind: step-type
+metadata:
+ name: /my-step
+ ...
+spec:
+ arguments: |-
+ {
+ ...
+ }
+ returns: |-
+ {
+ "definitions": {},
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "type": "object",
+ "additionalProperties": true,
+ "patterns": [],
+ "required": [
+ "ROOT_VAR"
+ ]
+ ,
+ "properties": {
+ "ROOT_VAR": {
+ "type": "string",
+ "description": "an example variable"
+ }
+ }
+ }
+ steps:
+ export_my_variable:
+ title: "Exporting custom variable"
+ image: alpine
+ commands:
+ - echo PLUGIN_VAR=Alice >> /meta/env_vars_to_export
+ - echo ROOT_VAR=Bob >> /meta/env_vars_to_export
+ read_my_variable:
+ title: "Reading custom variable"
+ image: alpine
+ commands:
+ - source /meta/env_vars_to_export
+ - echo $PLUGIN_VAR #Alice
+ - echo $ROOT_VAR #Bob
+{% endraw %}
+{% endhighlight %}
+
+
+`codefresh.yaml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ plugin:
+ type: /my-step
+ echo:
+ image: alpine
+ commands:
+ - echo $PLUGIN_VAR #empty
+ - echo $ROOT_VAR #Bob
+{% endraw %}
+{% endhighlight %}
+
+You can still use `cf_export` command inside the plugin as well (as shown in the previous examples).
+
+
+### Example with step templating
+
+As an advanced technique, Codefresh allows you to define a custom step using templating instead of fixed YAML. We support templates inside the `spec:` block of a plugin definition by taking advantage of the [Gomplate](https://github.com/hairyhenderson/gomplate) library that offers additional templating functions on top of vanilla [Go templates](https://golang.org/pkg/text/template/).
+
+> Note: Gomplate Data functions will not work since Codefresh does not pass the Data object to gomplate functions.
+
+As a simple example lets say we want to create a single step that checks out any number of git repositories. Of course you could just copy-paste the git clone step multiple times in a single pipeline. To make things easier we will create a single step that takes an array of git repositories and checks them out on its own:
+
+{% highlight yaml %}
+{% raw %}
+checkout_many_projects:
+ title: Checking out my Git projects
+ type: kostis-codefresh/multi-git-clone
+ arguments:
+ GIT_PROJECTS:
+ - 'codefresh-contrib/ruby-on-rails-sample-app'
+ - 'kubernetes/sample-apiserver'
+ - 'kostis-codefresh/nestjs-example'
+ - 'spring-projects/spring-petclinic'
+{% endraw %}
+{% endhighlight %}
+
+The GitHub projects are passed as an array, so if we want to check out an additional project, we simply add items to that array.
+
+Here is the [step specification](https://github.com/kostis-codefresh/step-examples/blob/master/multi-clone/multi-clone-step.yml):
+
+ `plugin.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+kind: step-type
+metadata:
+ name: kostis-codefresh/multi-git-clone
+ isPublic: false
+ description: >-
+ This pipeline plugin shows templating of custom steps
+ sources:
+ - 'https://github.com/kostis-codefresh/step-examples'
+ stage: incubating
+ maintainers:
+ - name: Kostis Kapelonis
+ categories:
+ - git
+ official: false
+ tags: []
+ icon:
+ type: svg
+ url: https://cdn.worldvectorlogo.com/logos/git.svg
+ background: '#f4f4f4'
+ examples:
+ - description: example-1
+ workflow:
+ version: '1.0'
+ steps:
+ checkout_many_projects:
+ title: Checking out my Git projects
+ type: kostis-codefresh/multi-git-clone
+ arguments:
+ GIT_REVISION: 'master'
+ GIT_PROVIDER: 'github'
+ GIT_PROJECTS:
+ - 'codefresh-contrib/ruby-on-rails-sample-app'
+ - 'kubernetes/sample-apiserver'
+ - 'kostis-codefresh/nestjs-example'
+ - 'spring-projects/spring-petclinic'
+ latest: true
+ version: 1.0.0
+spec:
+ arguments: |-
+ {
+ "definitions": {},
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "type": "object",
+ "additionalProperties": false,
+ "patterns": [],
+ "required": [
+ "GIT_PROJECTS",
+ "GIT_REVISION",
+ "GIT_PROVIDER"
+ ],
+ "properties": {
+ "GIT_REVISION": {
+ "type": "string",
+ "description": "branch or tag or revision to checkout (same for all projects)"
+ },
+ "GIT_PROVIDER": {
+ "type": "string",
+ "description": "Name of git provider to use from Codefresh integrations screen"
+ },
+ "GIT_PROJECTS": {
+ "description": "A list/array of git projects to checkout",
+ "type": "array",
+ "maxItems": 10,
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ delimiters:
+ left: '[['
+ right: ']]'
+ stepsTemplate: |-
+ print_info_message:
+ name: kostis-codefresh/multi-git-clone
+ title: Info message
+ image: alpine
+ commands:
+ - echo "Checking out [[ len .Arguments.GIT_PROJECTS ]] git projects"
+ [[ range $index, $git_project :=.Arguments.GIT_PROJECTS ]]
+ clone_project_[[$index]]:
+ title: Cloning [[$git_project]] ...
+ type: git-clone
+ repo: '[[$git_project]]'
+ revision: [[$.Arguments.GIT_REVISION]]
+ git: [[$.Arguments.GIT_PROVIDER]]
+ [[end]]
+{% endraw %}
+{% endhighlight %}
+
+There are two important points here:
+
+1. Instead of using a `steps:` block, we instead define a block called `stepsTemplate:`. This block name instructs Codefresh that we will use templates
+1. Because the Codefresh runtime is already using the double curly braces for variables mentioned as {% raw %}`${{MY_VARIABLE_EXAMPLE}}`{% endraw %}, we instead define templates with the characters {% raw %}`[[]]`{% endraw %}. You can see the definitions for these characters inside the `delimiters:` block. You are free to use any other replacement characters of your choosing.
+
+In the `stepsTemplate` block we use Golang template keywoards such as `range`, `len` and template variables (such as `git_project`). You can use all the capabilities of Go templates (e.g. `if`, `range`, `with`) as well as the extra methods of [gomplate](https://docs.gomplate.ca/) such as math and net functions.
+
+Creating the [marketplace entry](https://codefresh.io/steps/step/kostis-codefresh%2Fmulti-git-clone) for a step with templates is exactly the same as any other step:
+
+```
+codefresh create step-type -f multi-clone-step.yml
+```
+
+You can then use the step in [any pipeline](https://github.com/kostis-codefresh/step-examples/blob/master/multi-clone/codefresh.yml) and pass the arguments that will fill the template:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ checkout_many_projects:
+ title: Checking out my Git projects
+ type: kostis-codefresh/multi-git-clone
+ arguments:
+ GIT_REVISION: 'master'
+ GIT_PROVIDER: 'github'
+ GIT_PROJECTS:
+ - 'codefresh-contrib/ruby-on-rails-sample-app'
+ - 'kubernetes/sample-apiserver'
+ - 'kostis-codefresh/nestjs-example'
+ - 'spring-projects/spring-petclinic'
+ print_my_workspace:
+ title: Show projects
+ image: alpine
+ commands:
+ - ls -l
+ - pwd
+{% endraw %}
+{% endhighlight %}
+
+We have also added two extra parameters, one for the git revision and one for the [git provider]({{site.baseurl}}/docs/integrations/git-providers/) that will be used during checkout.
+
+The end result is that with a single step you can checkout many projects. Checking out an additional project is as simple as adding a new entry in the `GIT_PROJECTS` array.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipelines/steps/multi-checkout.png"
+url="/images/pipelines/steps/multi-checkout.png"
+alt="Checking out multiple Git repositories in a single step"
+caption="Checking out multiple Git repositories in a single step"
+max-width="60%"
+%}
+
+This was a contrived example to demonstrate how you can use templates in the Codefresh plugin specification. Note that using templates in Codefresh steps is an advanced technique and should be used sparingly.
+
+### Limitations of custom plugins
+
+[Parallel steps]({{site.baseurl}}/docs/pipelines/advanced-workflows/) are not supported inside custom steps.
+
+Within a custom step, the [fail_fast field]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/#execution-flow) does not work. Use the `failFast` field instead.
+
+Custom steps are not compatible with [service containers]({{site.baseurl}}/docs/pipelines/service-containers/).
+More specifically:
+
+ * If you have a [service container in the pipeline-level]({{site.baseurl}}/docs/pipelines/service-containers/#running-services-for-the-duration-of-the-pipeline), steps inside the custom plugin will not be able to access it
+ * If you try to attach a service container to a custom plugin, the plugin will fail when executed
+ * If you try to define a custom plugin where a step inside it has a service container attached, the custom plugin will fail when executed
+
+## Related articles
+[Introduction to Pipelines]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/)
+[Freestyle step]({{site.baseurl}}/docs/pipelines/steps/freestyle/)
+[Build step]({{site.baseurl}}/docs/pipelines/steps/build/)
+[Push step]({{site.baseurl}}/docs/pipelines/steps/push/)
+
diff --git a/_docs/pipelines/steps/approval.md b/_docs/pipelines/steps/approval.md
new file mode 100644
index 00000000..dee497d2
--- /dev/null
+++ b/_docs/pipelines/steps/approval.md
@@ -0,0 +1,348 @@
+---
+title: "Approval"
+description: "How to Pause Pipelines for Manual Approval"
+group: codefresh-yaml
+sub_group: steps
+toc: true
+---
+
+The approval step allows you to pause a pipeline and wait for human intervention before going on.
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/approval/approval-waiting.png"
+url="/images/codefresh-yaml/approval/approval-waiting.png"
+alt="Manual Approval step"
+caption="Manual Approval step"
+max-width="80%"
+%}
+
+Some example scenarios for using the approval step:
+
+* Pause before deploying to production
+* Pause before destroying an environment
+* Pause for some manual smoke tests or metric collection
+
+## Usage
+
+ `YAML`
+{% highlight yaml %}
+{% raw %}
+step_name:
+ type: pending-approval
+ title: Step Title
+ description: Step description
+ timeout:
+ duration: 2
+ finalState: approved
+ timeUnit: minutes
+ when:
+ branch:
+ only: [ master ]
+
+{% endraw %}
+{% endhighlight %}
+
+## Fields
+
+{: .table .table-bordered .table-hover}
+| Field | Description | Required/Optional/Default |
+| ------------------------------------------ | ---------------------------------------------- | ------------------------- |
+| `title` | The free-text display name of the step. | Optional |
+| `description` | A basic, free-text description of the step. | Optional |
+| `timeout` | Defines an automatic approval/rejection if a specified amount of time has passed. The `duration` field is hours. By default it is set to 168 (i.e, 7 days). The `finalState` field defines what will happen after the duration time has elapsed. Possible values are `approved`/`denied`/`terminated` | Optional |
+| `timeUnit` | This field defines possible options of `minutes`, or `hours`. If the field is not set, the default is `hours` | Optional
+| `fail_fast` | If set to false, the pipeline will continue even when the step is rejected | Optional |
+| `stage` | Parent group of this step. See [using stages]({{site.baseurl}}/docs/codefresh-yaml/stages/) for more information. | Optional |
+| `when` | Define a set of conditions that need to be satisfied in order to execute this step. You can find more information in the [Conditional Execution of Steps]({{site.baseurl}}/docs/codefresh-yaml/conditional-execution-of-steps/) article. | Optional |
+
+
+## Pausing the Pipeline
+
+Once the pipeline reaches an approval step it will stop. At this point it **does not** consume any resources.
+In the Codefresh UI you will see the *Approve/Reject* buttons.
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/approval/build-waiting.png"
+url="/images/codefresh-yaml/approval/build-waiting.png"
+alt="Build waiting for input"
+caption="Build waiting for input"
+max-width="80%"
+%}
+
+Once you click any of them the pipeline will continue. Further steps in the pipeline can be enabled/disabled
+according to the approval result.
+
+## Automatic Approvals/Rejections
+
+By default, a pipeline that contains an approval step will pause for 7 days (168 hours) onces it reaches that step. If you want some automatic action to happen after a specified time period you can define it in advance with the `timeout` property:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ waitForInputBeforeProduction:
+ type: pending-approval
+ title: Deploy to Production?
+ timeout:
+ duration: 2
+ finalState: denied
+{% endraw %}
+{% endhighlight %}
+
+This pipeline will wait for approval for two hours. If somebody approves it, it will continue. If nothing happens after two hours
+the approval step will be automatically rejected.
+
+## Approval Restrictions
+
+By default, any Codefresh user can approve any pipeline that is paused at the approval state. If you want to restrict
+the approval action to a subset of people, you can use the [Access Control facilities]({{site.baseurl}}/docs/enterprise/access-control/) that Codefresh provides.
+
+This is a two-step process. First you need to tag your pipeline with one or more tags (tag names are arbitrary). You can edit tags in the pipeline settings screen.
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/approval/pipeline-tag.png"
+url="/images/codefresh-yaml/approval/pipeline-tag.png"
+alt="Marking a pipeline with tags"
+caption="Marking a pipeline with tags"
+max-width="40%"
+%}
+
+Once you have tagged your pipelines you can create one or more access rules that restrict approval to specific teams within your organization.
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/approval/approval-rule.png"
+url="/images/codefresh-yaml/approval/approval-rule.png"
+alt="Rules for approvals"
+caption="Rules for approvals"
+max-width="80%"
+%}
+
+
+For more details on access control and users see also the [access control page]({{site.baseurl}}/docs/administration/access-control/).
+
+## Keeping the Shared Volume after an Approval
+
+As soon as a pipeline starts waiting for an approval, all contents of the [shared Codefresh volume]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps) are lost. Once the pipeline continues running all files that were created manually inside the volume are not available any more.
+
+If you want to keep any temporary files that were there before the approval, you need to enable the respective policy in your [pipeline settings]({{site.baseurl}}/docs/pipelines/pipelines/#policies).
+
+You can either set this option differently per pipeline, or globally in your account at your [account settings](https://g.codefresh.io/account-admin/account-conf/pipeline-settings).
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/approval/keep-volume.png"
+url="/images/codefresh-yaml/approval/keep-volume.png"
+alt="Preserve Codefresh volume after an approval"
+caption="Preserve Codefresh volume after an approval"
+max-width="90%"
+%}
+
+>Notice that if you do decide to keep the volume after an approval, the pipeline will still count as "running" against your pricing plan (if you use the SAAS version of Codefresh). If you don't keep the volume, the pipeline is stopped/paused while it is waiting for approval and doesn't count against your pricing plan. We advise you to keep the volume only for pipelines that really need this capability.
+
+>Notice also that you if you use the [Codefresh Runner]({{site.baseurl}}/docs/reference/behind-the-firewall/) and your [Runner]({{site.baseurl}}/docs/installation/codefresh-runner/) is setup with local volumes, then the volume will only be present if the dind pod
+is scheduled in the same node once the pipeline resumes. Otherwise the volume will not be reused.
+
+## Controlling the Rejection Behavior
+
+By default if you reject a pipeline, it will stop right away and it will be marked as failed. All subsequent steps after the approval one will not run at all.
+
+You might want to continue running the pipeline even when it is rejected by adding the `fail_fast` property in the approval step:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ waitForInputBeforeProduction:
+ fail_fast: false
+ type: pending-approval
+ title: Deploy to Production?
+{% endraw %}
+{% endhighlight %}
+
+In this case you can also read the approval result and make the pipeline work differently according to each choice (demonstrated in the following section).
+
+
+## Getting the Approval Result
+
+As also explained in [step dependencies]({{site.baseurl}}/docs/pipelines/advanced-workflows/#custom-steps-dependencies) all steps in the Codefresh pipeline belong to a global object
+called `steps` (indexed by name). You can read the `result` property for an approval step to see if it was approved or rejected.
+
+Here is an example:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ askForPermission:
+ type: pending-approval
+ title: Destroy QA environment?
+ destroyQaEnvNow:
+ image: alpine:3.8
+ title: Destroying env
+ commands:
+ - echo "Destroy command running"
+ when:
+ steps:
+ - name: askForPermission
+ on:
+ - approved
+{% endraw %}
+{% endhighlight %}
+
+In this example the second step that is destroying an environment will only run if the user
+approves the first step. In case of rejection the second step will be skipped.
+
+You can follow the same pattern for running steps when an approval step was rejected.
+Here is a full example with both cases.
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+- prepare
+- yesPleaseDo
+- noDont
+
+steps:
+ step_1:
+ image: alpine:3.8
+ title: building chart
+ stage: prepare
+ commands:
+ - echo "prepare"
+ deployToProdNow:
+ fail_fast: false
+ type: pending-approval
+ title: Should we deploy to prod
+ stage: prepare
+ step_2:
+ image: alpine:3.8
+ title: prepare environment
+ stage: yesPleaseDo
+ commands:
+ - echo "world"
+ when:
+ steps:
+ - name: deployToProdNow
+ on:
+ - approved
+ step_3:
+ image: alpine:3.8
+ title: deploy to production
+ stage: yesPleaseDo
+ commands:
+ - echo "world"
+ when:
+ steps:
+ - name: deployToProdNow
+ on:
+ - approved
+ step_4:
+ image: alpine:3.8
+ title: prepare environment
+ stage: noDont
+ commands:
+ - echo "world"
+ when:
+ steps:
+ - name: deployToProdNow
+ on:
+ - denied
+ step_5:
+ image: alpine:3.8
+ title: deploy to staging
+ stage: noDont
+ commands:
+ - echo "world"
+ when:
+ steps:
+ - name: deployToProdNow
+ on:
+ - denied
+{% endraw %}
+{% endhighlight %}
+
+Here is the pipeline state after a rejection:
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/approval/pipeline-rejected.png"
+url="/images/codefresh-yaml/approval/pipeline-rejected.png"
+alt="Rejecting a pipeline"
+caption="Rejecting a pipeline"
+max-width="80%"
+%}
+
+>Note that we have added the `fail_fast` property in the approval step because we want the pipeline to continue even when the step is rejected.
+
+
+You can see that only two steps were ignored. If you rerun the pipeline and approve
+it, the other two steps will be ignored.
+
+## Define Concurrency Limits
+
+Codefresh has the ability to limit the amount of running builds for a specific pipeline with several concurrency policies in the pipeline settings. You can choose if a build that is in a pending approval state will count against the concurrency limits or not.
+
+As an example let's say that the concurrency limit for a specific pipeline is set to 2. Currently there is one active/running build and a second build that is pending approval.
+
+1. If the pipeline settings define that builds in pending approval **count** against concurrency, then if you launch a third build it will wait until one of the first two has finished
+1. If the pipeline settings define that builds in pending approval **do not** count against concurrency, then if you launch a third build it will execute right away.
+
+There isn't a correct or wrong way to set this option. It depends on your organization and if your consider builds pending approval as "active" or not.
+
+You can either set this option [differently per pipeline]({{site.baseurl}}/docs/configure-ci-cd-pipeline/pipelines/#policies), or globally in your account at your [account settings](https://g.codefresh.io/account-admin/account-conf/pipeline-settings).
+
+
+## Slack Integration
+
+If you also enable [Slack integration]({{site.baseurl}}/docs/integrations/notifications/slack-integration/) in Codefresh you will have the choice of approving/rejecting a pipeline
+via a Slack channel
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/approval/slack-approval.png"
+url="/images/codefresh-yaml/approval/slack-approval.png"
+alt="Approval step in a slack channel"
+caption="Approval step in a slack channel"
+max-width="80%"
+%}
+
+To enable this behavior, you need to activate it in the Slack settings page:
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/approval/slack-settings.png"
+url="/images/codefresh-yaml/approval/slack-settings.png"
+alt="Slack settings"
+caption="Slack settings"
+max-width="50%"
+%}
+
+Also, if you run a pipeline manually that includes an approval step you should check
+the "Report notification of pipeline execution" checkbox as explained in [Monitoring Pipelines]({{site.baseurl}}/docs/pipelines/monitoring-pipelines/#monitoring-pipelines-outside-the-codefresh-ui).
+
+
+
+## Related articles
+[Post-Step Operations]({{site.baseurl}}/docs/pipelines/post-step-operations/)
+[Advanced Workflows ]({{site.baseurl}}/docs/pipelines/advanced-workflows/)
+[Conditional Execution of Steps]({{site.baseurl}}/docs/pipelines/conditional-execution-of-steps/)
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
+
+
diff --git a/_docs/pipelines/steps/build.md b/_docs/pipelines/steps/build.md
new file mode 100644
index 00000000..26fdb2fb
--- /dev/null
+++ b/_docs/pipelines/steps/build.md
@@ -0,0 +1,379 @@
+---
+title: "Build"
+description: "Building Docker images in Codefresh pipelines"
+group: pipelines
+sub_group: steps
+redirect_from:
+ - /docs/build-1/
+ - /docs/codefresh-yaml/steps/build-1/
+toc: true
+---
+Use Docker to build an image and store it in Codefresh.
+
+## Purpose of build steps
+
+In Codefresh, docker containers are first-class citizens
+and special typed steps are offered for the most usual docker commands. Build steps are a secure replacement for `docker build` commands.
+
+Therefore, this command on your local workstation:
+
+```
+docker build . -t my-app-image:1.0.1
+```
+
+will become in Codefresh the following build step.
+
+```yaml
+BuildMyImage:
+ title: Building My Docker image
+ type: build
+ image_name: my-app-image
+ tag: 1.0.1
+```
+
+## Usage
+
+ `YAML`
+{% highlight yaml %}
+step_name:
+ type: build
+ title: Step Title
+ description: Free text description
+ working_directory: {% raw %}${{clone_step_name}}{% endraw %}
+ dockerfile: path/to/Dockerfile
+ image_name: owner/new-image-name
+ tag: develop
+ build_arguments:
+ - key=value
+ target: stage1
+ no_cache: false
+ no_cf_cache: false
+ tag_policy: original
+ fail_fast: false
+ metadata:
+ set:
+ - qa: pending
+ when:
+ condition:
+ all:
+ noDetectedSkipCI: "includes('{% raw %}${{CF_COMMIT_MESSAGE}}{% endraw %}', '[skip ci]') == false"
+ on_success:
+ ...
+ on_fail:
+ ...
+ on_finish:
+ ...
+ retry:
+ ...
+{% endhighlight %}
+
+## Fields
+
+{: .table .table-bordered .table-hover}
+| Field | Description | Required/Optional/Default |
+| ------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------- |
+| `title` | The free-text display name of the step. | Optional |
+| `description` | A basic, free-text description of the step. | Optional |
+| `stage` | Parent group of this step. See [using stages]({{site.baseurl}}/docs/codefresh-yaml/stages/) for more information. | Optional |
+| `working_directory` | The directory in which the build command is executed. It can be an explicit path in the container's file system, or a variable that references another step. The default is {% raw %} `${{main_clone}}` {% endraw %}. This only changes the Docker build context and is unrelated to the `WORKDIR` inside the Dockerile | Default |
+| `dockerfile` | The path to the `Dockerfile` from which the image is built. The default is `Dockerfile`. | Default |
+| `image_name` | The name for the image you build. | Required |
+| `region` | Relevant only for [Amazon ECR]({{site.baseurl}}/docs/integrations/docker-registries/amazon-ec2-container-registry/) integrations using either service accounts or explicit credentials. The names of the regions for which to perform cross-region replication. The names of the source region and the destination region name must be defined in separate steps. | Optional |
+| `tag` | The tag that is assigned to the image you build. The default is the name of the branch or revision that is built. | Default |
+| `tags` | Multiple tags under which to push the image. Use either this or `tag`. This is an array, so should be of the following style: {::nomarkdown}
tags: -tag1 -tag2 -{% raw %}${{CF_BRANCH_TAG_NORMALIZED}}{% endraw %} -tag4
{:/}or {::nomarkdown}
tags:['tag1','tag2','{% raw %}${{CF_BRANCH_TAG_NORMALIZED}}{% endraw %}','tag4']
{:/} | Optional |
+| `registry` | The registry logical name of one of the inserted registries from the integration view. The default value will be your default registry [if you have more than one]({{site.baseurl}}/docs/docker-registries/external-docker-registries/). | Optional |
+| `registry_contexts` | Advanced property for resolving Docker images when [working with multiple registries with the same domain]({{site.baseurl}}/docs/docker-registries/working-with-docker-registries/#working-with-multiple-registries-with-the-same-domain) | Optional |
+|`disable_push` | Do not push to any registry automatically. | Optional |
+|`tag_policy` | Push the tag name without change or lowercase it automatically. By default `tag: MixedCase` will be pushed as `image_name:mixedcase`. Possible options are `original` and `lowercase`. Default is `lowercase` | Default |
+| `no_cache` | Disable Docker engine cache for the build [more info](https://codefresh.io/docs/docs/troubleshooting/common-issues/disabling-codefresh-caching-mechanisms/) | Optional |
+| `no_cf_cache` | Disable Codefresh build optimization for the build [more info](https://codefresh.io/docs/docs/troubleshooting/common-issues/disabling-codefresh-caching-mechanisms/)
+| `build_arguments` | A set of [Docker build arguments](https://docs.docker.com/engine/reference/commandline/build/#set-build-time-variables-build-arg) to pass to the build process. | Optional |
+| `target` | target stage in a multistage build (build will run until this stage) | Optional |
+| `fail_fast` | If a step fails, and the process is halted. The default value is `true`. | Default |
+| `when` | Define a set of conditions that need to be satisfied in order to execute this step. You can find more information in the [Conditional Execution of Steps]({{site.baseurl}}/docs/pipelines/conditional-execution-of-steps/) article. | Optional |
+| `metadata` | Annotate the built image with [key-value metadata]({{site.baseurl}}/docs/docker-registries/metadata-annotations/). | Optional |
+| `on_success`, `on_fail` and `on_finish` | Define operations to perform upon step completion using a set of predefined [Post-Step Operations]({{site.baseurl}}/docs/pipelines/post-step-operations/). | Optional |
+| `retry` | Define retry behavior as described in [Retrying a step]({{site.baseurl}}/docs/codefresh-yaml/what-is-the-codefresh-yaml/#retrying-a-step). | Optional |
+| `buildkit` | Set to `true` to enable [Buildkit]({{site.baseurl}}/docs/pipelines/steps/build/#buildkit-support) and all of its enhancements | Optional |
+
+**Exported resources:**
+- Working Directory
+- Image ID
+
+## Examples
+
+Build an image using a Dockerfile in the root project folder:
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ BuildMyImage:
+ title: Building My Docker image
+ image_name: my-app-image
+ type: build
+{% endhighlight %}
+
+Build an image using a different Dockerfile and a specific version tag
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ BuildMyImage:
+ title: Building My Docker image
+ type: build
+ image_name: my-app-image
+ dockerfile: my-custom.Dockerfile
+ tag: 1.0.1
+{% endhighlight %}
+
+Build an image using a different Dockerfile and push multiple tags to the default registry.
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ BuildMyImage:
+ title: Building My Docker image
+ type: build
+ image_name: my-app-image
+ dockerfile: my-custom.Dockerfile
+ tags:
+ - latest
+ - ${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}
+ - v1.1
+{% endraw %}
+{% endhighlight %}
+
+Build an image and automatically push to the [registry]({{site.baseurl}}/docs/docker-registries/external-docker-registries/) with name `my-registry`.
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ BuildMyImage:
+ title: Building My Docker image
+ type: build
+ image_name: my-app-image
+ dockerfile: my-custom.Dockerfile
+ tag: 1.0.1
+ registry: my-registry
+{% endhighlight %}
+
+Build two images in two different folders using [Codefresh variables]({{site.baseurl}}/docs/pipelines/variables/) as tags.
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ BuildNodeImage:
+ title: Building My Node app
+ type: build
+ image_name: my-department/my-team/my-node-image
+ dockerfile: Dockerfile
+ working_directory: ./project1
+ tag: ${{CF_BRANCH_TAG_NORMALIZED}}-${{CF_SHORT_REVISION}}
+ BuildGoImage:
+ title: Building My Go app
+ type: build
+ image_name: my-company/my-go-image
+ dockerfile: Dockerfile
+ working_directory: ./project2
+ tag: ${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}
+{% endraw %}
+{% endhighlight %}
+
+It also possible to build Docker images in [parallel]({{site.baseurl}}/docs/pipelines/advanced-workflows/) for faster builds.
+
+### Inline Dockerfile
+
+If your project does not already have a Dockerfile, you can also define one within the pipeline:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ BuildingDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: my-own-go-app
+ working_directory: ./
+ tag: '${{CF_BRANCH_TAG_NORMALIZED}}'
+ dockerfile:
+ content: |-
+ # ---
+ # Go Builder Image
+ FROM golang:1.8-alpine AS builder
+ # set build arguments: GitHub user and repository
+ ARG GH_USER
+ ARG GH_REPO
+ # Create and set working directory
+ RUN mkdir -p /go/src/github.com/$GH_USER/$GH_REPO
+ # copy file from builder image
+ COPY --from=builder /go/src/github.com/$GH_USER/$GH_REPO/dist/myapp
+ /usr/bin/myapp
+ CMD ["myapp", "--help"]
+{% endraw %}
+{% endhighlight %}
+
+Use this technique only as a last resort. It is better if the Dockerfile exists as an actual file in source control.
+
+
+## Automatic pushing
+
+All images built successfully with the build step, will be automatically pushed to the default Docker registry in your account. This behavior is completely automatic and happens without any extra configuration on your part. If you want to disable this then add the `disable_push` property in your build step.
+
+>Notice that the [push step]({{site.baseurl}}/docs/pipelines/steps/push/) in Codefresh is optional and is only needed if you want to push to [external Docker registries]({{site.baseurl}}/docs/docker-registries/external-docker-registries/).
+
+{%
+ include image.html
+ lightbox="true"
+ file="/images/artifacts/cfcr/codefresh-registry-list.png"
+ url="/images/artifacts/cfcr/codefresh-registry-list.png"
+ alt="Docker Images pushed automatically"
+ caption="Docker Images pushed automatically"
+ max-width="80%"
+%}
+
+## Buildkit support
+
+Codefresh also allows you to use [buildkit](https://github.com/moby/buildkit) with all its [enhancements](https://docs.docker.com/develop/develop-images/build_enhancements/) and [experimental features](https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/experimental.md#experimental-syntaxes).
+
+Using buildkit you can get:
+
+* Improved build output logs
+* Mounting of external secrets that will never be stored in the image
+* Access to SSH keys and sockets from within the Dockerfile
+* Use cache and bind-mounts at build time
+
+These capabilities are offered as extra arguments in the build step and using any of them will automatically enable buildkit. You can utilize the different mount-options for the Dockerfile instruction `RUN` as long as buildkit is enabled for your build step. Mounts of type [`cache`](https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/experimental.md#example-cache-go-packages) work out of the box and are persisted between pipeline runs.
+
+The simplest way to use buildkit is by enabling it explicitly:
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ BuildMyImage:
+ title: Building My Docker image
+ image_name: my-app-image
+ type: build
+ buildkit: true
+{% endhighlight %}
+
+Buildkit is also automatically enabled if you use any of its features such as the `progress` property:
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ BuildMyImage:
+ title: Building My Docker image
+ image_name: my-app-image
+ type: build
+ progress: tty
+{% endhighlight %}
+
+Possible values for `progress` are `tty` and `plain`.
+
+For secrets you can either mention them in a single line:
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ BuildMyImage:
+ title: Building My Docker image
+ image_name: my-app-image
+ type: build
+ secrets:
+ - id=secret1,src=./my-secret-file1.txt
+ - id=secret2,src=./my-secret-file2.txt
+{% endhighlight %}
+
+or multiple lines:
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ BuildMyImage:
+ title: Building My Docker image
+ image_name: my-app-image
+ type: build
+ secrets:
+ - id: secret1
+ src: ./my-secret-file1.txt
+ - id: secret2
+ src: ./my-secret-file2.txt
+{% endhighlight %}
+
+For the SSH connection you can either use the default:
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ BuildMyImage:
+ title: Building My Docker image
+ image_name: my-app-image
+ type: build
+ ssh: default
+{% endhighlight %}
+
+
+or define different keys:
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ BuildMyImage:
+ title: Building My Docker image
+ image_name: my-app-image
+ type: build
+ ssh:
+ - github=~/.ssh/github_rsa
+ - bitbucket=~/.ssh/bitbucket_rsa
+{% endhighlight %}
+
+You might want to use an environment variable to store and retrieve a ssh key. This can be achieved by converting you ssh key into a one-line string:
+```
+tr '\n' ',' < /path/to/id_rsa
+```
+
+Copy the output and place it an [environment variable]({{site.baseurl}}/docs/pipelines/variables/#user-provided-variables). To make the SSH key availabe to the build step, you can write it to the codefresh volume:
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ SetupSshKeys:
+ title: Setting up ssh key
+ image: alpine:latest
+ commands:
+ - mkdir /codefresh/volume/keys
+ - echo "${SSH_KEY}" | tr ',' '\n' > /codefresh/volume/keys/github_rsa
+
+ BuildMyImage:
+ title: Building My Docker image
+ image_name: my-app-image
+ type: build
+ tag: latest
+ ssh:
+ - github=/codefresh/volume/keys/github_rsa
+{% endraw %}
+{% endhighlight %}
+
+
+You can combine all options (`ssh`, `progress`, `secrets`) in a single build step if desired.
+
+
+
+## Related articles
+[Codefresh YAML]({{site.baseurl}}/docs/codefresh-yaml/what-is-the-codefresh-yaml/)
+[Pipeline steps]({{site.baseurl}}/docs/pipelines/steps/)
diff --git a/_docs/pipelines/steps/composition.md b/_docs/pipelines/steps/composition.md
new file mode 100644
index 00000000..40764e68
--- /dev/null
+++ b/_docs/pipelines/steps/composition.md
@@ -0,0 +1,434 @@
+---
+title: "Composition step"
+description: "Run a Docker container with its dependencies inside a pipeline"
+group: pipelines
+sub_group: steps
+redirect_from:
+ - /docs/composition-1/
+ - /docs/codefresh-yaml/steps/composition-1/
+toc: true
+---
+The composition step runs a Docker Composition as a means to execute finite commands in a more complex interaction of services.
+
+>Note that while composition steps are still supported, the recommended way to run integrations tests going forward is with [service containers]({{site.baseurl}}/docs/codefresh-yaml/service-containers/).
+
+## Motivation for Compositions
+
+The primary purpose of compositions is to run tests that require multiple services for their execution (often known as integration tests).
+
+The syntax offered by Codefresh closely follows the syntax for [Docker-compose](https://docs.docker.com/compose/overview/) files, but is technically not 100% the same (there are some important differences). However, if you are already familiar with Docker compose, you will be immediately familiar with Codefresh compositions.
+
+> Codefresh only understands Docker compose versions [2](https://docs.docker.com/compose/compose-file/compose-file-v2/) and [3](https://docs.docker.com/compose/compose-file/), but not point releases such as 2.1.
+
+The big difference between the Codefresh and Docker compose is that Codefresh is distinguishes between two kinds of services:
+
+* Composition Services
+* Composition Candidates
+
+**Composition Services** are helper services that are needed for the tests to run. These can be a database, a queue, a cache, or the backend docker image of your application -- these closely parallel the services that you might define in Docker compose.
+
+**Composition Candidates** are special services that will execute the tests. Codefresh will monitor their execution and the build will fail if they do not succeed. Composition candidates are almost always Docker images that contain unit/integration tests or other kinds of tests (e.g. performance)
+
+You need at least one composition service and one candidate for the composition step.
+
+
+## Usage
+
+Here is an example of a composition step. Note that there is one composition service (PostgreSQL database, named `db`) and one composition candidate (tests executed with gulp)
+
+The most important part is the `command` line that executes the tests: `command: gulp integration_test`. If it fails, then the whole composition step will fail.
+
+
+
+ `codefresh.yml`
+{% highlight yaml %}
+step_name:
+ type: composition
+ title: Step Title
+ description: Free text description
+ working_directory: {% raw %}${{a_clone_step}}{% endraw %}
+ composition:
+ version: '2'
+ services:
+ db:
+ image: postgres
+ composition_candidates:
+ test_service:
+ image: {% raw %}${{build_step}}{% endraw %}
+ command: gulp integration_test
+ working_dir: /app
+ environment:
+ - key=value
+ composition_variables:
+ - key=value
+ fail_fast: false
+ when:
+ condition:
+ all:
+ notFeatureBranch: 'match("{% raw %}${{CF_BRANCH}}{% endraw %}", "/FB-/", true) == false'
+ on_success:
+ ...
+ on_fail:
+ ...
+ on_finish:
+ ...
+ retry:
+ ...
+{% endhighlight %}
+
+## Caveats on sharing a docker-compose.yml
+
+Although Codefresh's composition syntax closely follows the syntax used in `docker-compose.yml` files, it is not 100% the same. If you are using `docker-compose.yml` locally, you may experience some problems if you try to have Codefresh reference the file (by passing it as an argument to `compose`, e.g. `compose: docker-compose.yml`).
+
+One subtle difference is that Docker compose will interpolate environment variables that are quoted in single-braces, e.g. `${DATABASE_URL}`, whereas Codefresh interpolates variables that are quoted in double-braces, e.g. {% raw %}`${{DATABASE_URL}}`{% endraw %}. So if your `docker-compose.yml` file relies on the parsing of ENV variables, it may not be a good candidate for sharing with Codefresh.
+
+## Fields
+
+The following describes the fields available in a step of type `composition`
+
+{: .table .table-bordered .table-hover}
+| Field | Description | Required/Optional/Default |
+| ------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------- |
+| `title` | The free-text display name of the step. | Optional |
+| `description` | A basic, free-text description of the step. | Optional |
+| `stage` | Parent group of this step. See [using stages]({{site.baseurl}}/docs/codefresh-yaml/stages/) for more information. | Optional |
+| `working_directory` | The directory in which to search for the composition file. It can be an explicit path in the container's file system, or a variable that references another step. The default is {% raw %}`${{main_clone}}`{% endraw %}. Note that this is completely different from `working_dir` which is on the service level. | Default |
+| `composition` | The composition you want to run. This can be an inline YAML definition or a path to a composition file on the file system, e.g. `docker-compose.yml`, or the logical name of a composition stored in the Codefresh system. We support most features of [Docker compose version 2.0](https://docs.docker.com/compose/compose-file/compose-file-v2/) and [3.0](https://docs.docker.com/compose/compose-file/) | Required |
+| `version` | Version for docker compose. Use `2` or `3` | Required |
+| `composition_candidates` | The definition of the service to monitor. Each candidate has a **single** `command` parameter that decides what will be tested. | Required |
+| `environment` (service level) | environment that will be accessible to the container | Optional |
+| `working_dir` (service level) | defines the working directory that will be used in a service before running a command. By default it is defined by the docker image that is used by the service. | Optional |
+| `registry_contexts` | Advanced property for resolving Docker images when [working with multiple registries with the same domain]({{site.baseurl}}/docs/docker-registries/working-with-docker-registries/#working-with-multiple-registries-with-the-same-domain) | Optional |
+| `volumes` (service level) | Extra volumes for individual services. Used for transferring information between your steps. Explained in detail later in this page. | Optional |
+| `composition_variables` | A set of environment variables to substitute in the composition. Notice that these variables are docker-compose variables and **NOT** environment variables | Optional |
+| `fail_fast` | If a step fails, and the process is halted. The default value is `true`. | Default |
+| `when` | Define a set of conditions which need to be satisfied in order to execute this step. You can find more information in the [Conditional Execution of Steps]({{site.baseurl}}/docs/pipelines/conditional-execution-of-steps/) article. | Optional |
+| `on_success`, `on_fail` and `on_finish` | Define operations to perform upon step completion using a set of predefined [Post-Step Operations]({{site.baseurl}}/docs/pipelines/post-step-operations/). | Optional |
+| `retry` | Define retry behavior as described in [Retrying a step]({{site.baseurl}}/docs/codefresh-yaml/what-is-the-codefresh-yaml/#retrying-a-step). | Optional |
+
+## Composition versus Composition Candidates
+
+For Codefresh to determine if the step and operations were successfully executed, you must specify at least one `composition_candidate`.
+
+A `composition_candidate` is a single service component of the normal Docker composition that is monitored for a successful exit code and determines the outcome of the step. During runtime, the `composition_candidate` is merged into the specified `composition`and is monitored for successful execution.
+
+The critical part of each candidate is the `command` parameter. This takes [a single command](https://docs.docker.com/compose/compose-file/#command) that will
+be executed inside the Docker container of the candidate and will decide if the whole composition is successful or not. Only one command is allowed (similar to Docker compose). If you wish to test multiple commands you need to connect them with `&&` like this.
+
+{% highlight yaml %}
+ composition_candidates:
+ my_unit_tests:
+ image: node
+ command: bash -c "sleep 60 && pwd && npm run test"
+{% endhighlight %}
+
+
+## Working directories in a composition
+
+By default, all services that take part in a composition will use as working directory the one defined by the respective image. If you want to change that, you need to use the `working_dir` parameter at the service level.
+
+Here is an example:
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ my_composition:
+ type: composition
+ title: Sample composition
+ composition:
+ version: '2'
+ services:
+ my_service:
+ image: alpine
+ command: 'pwd'
+ working_dir: /tmp
+ composition_candidates:
+ my_test_service:
+ image: python
+ working_dir: /root
+ command: 'pwd'
+{% endhighlight %}
+
+If you run this composition, you will see in the logs that the alpine image will use `/tmp` as a working directory and the python one will use `/root`
+
+```
+my_service_1 | /tmp
+my_test_service_1 | /root
+```
+
+## Composition networking
+
+The networking in Codefresh compositions works just like normal Docker-compose. Each service is assigned a hostname that matches
+its name and is accessible by other services.
+
+Here is an example
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ build_step:
+ type: build
+ image_name: my-node-app
+ dockerfile: Dockerfile
+ tag: ${{CF_BRANCH}}
+ my_db_tests:
+ type: composition
+ composition:
+ version: '2'
+ services:
+ db:
+ image: mysql:latest
+ ports:
+ - 3306
+ environment:
+ MYSQL_ROOT_PASSWORD: admin
+ MYSQL_USER: my_user
+ MYSQL_PASSWORD: admin
+ MYSQL_DATABASE: nodejs
+ composition_candidates:
+ test:
+ image: ${{build_step}}
+ links:
+ - db
+ command: bash -c 'sleep 30 && MYSQL_ROOT_PASSWORD=admin MYSQL_USER=my_user MYSQL_HOST=db MYSQL_PASSWORD=admin MYSQL_DATABASE=nodejs npm test'
+{% endraw %}
+{% endhighlight %}
+
+In this composition the MySql instance will be available at host `db:3306` accessible from the node image. When the node tests run, they will be pointed to that host and port combination to access it.
+
+Notice also that like docker compose the order that the services are launched is not guaranteed. A quick way to solve this issue
+is with a sleep statement like shown above. This will make sure that the database is truly up before the tests run.
+
+A better approach would be to use solutions such as [wait-for-it](https://github.com/vishnubob/wait-for-it) which are much more robust. Here is an example:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ build_image:
+ type: build
+ description: Building the image...
+ image_name: my-spring-boot-app
+ tag: ${{CF_BRANCH_TAG_NORMALIZED}}
+ build_image_with_tests:
+ type: build
+ description: Building the Test image...
+ image_name: maven-integration-tests
+ dockerfile: Dockerfile.testing
+ integration_tests:
+ type: composition
+ title: Launching QA environment
+ description: Temporary test environment
+ composition:
+ version: '2'
+ services:
+ app:
+ image: ${{build_image}}
+ ports:
+ - 8080
+ composition_candidates:
+ test_service:
+ image: ${{build_image_with_tests}}
+ links:
+ - app
+ command: bash -c '/usr/bin/wait-for-it.sh -t 20 app:8080 -- mvn verify -Dserver.host=app'
+{% endraw %}
+{% endhighlight %}
+
+In this composition a Java application is launched at `app:8080` and then a second image is used for integration tests that target that URL (passed as a parameter to Maven).
+
+The `wait-for-it.sh` script will make sure that the Java application is truly up before the tests are started. Notice that in the example above the script is included in the testing image (created by `Dockerfile.testing`)
+
+## Using public Docker images in a composition
+
+It is important to notice that Docker images used in a composition (both as services and candidates) will be looked from your connected registries first before looking at Dockerhub:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ my_composition:
+ type: composition
+ title: Sample composition
+ composition:
+ version: '2'
+ services:
+ my_service:
+ image: mysql
+ ports:
+ - 3306
+ composition_candidates:
+ my_test_service:
+ image: alpine
+ working_dir: /root
+ command: 'pwd'
+
+{% endraw %}
+{% endhighlight %}
+
+In the example above if you already have two images in your private registries named `mysql` and `alpine`, then *THEY* will be used instead of the respective images in Dockerhub.
+
+You can see which images are used in the logs of the builds:
+
+```
+Running composition step: Sample composition
+Pulling kostisazureregistry.azurecr.io/mysql@sha256:1ee5515fed3dae4f13d0f7320e600a38522fd7e510b225e68421e1f90
+Pulling kostisazureregistry.azurecr.io/alpine@sha256:eddb7866364ec96861a7eb83ae7977b3efb98e8e978c1c9277262d327
+```
+
+
+## Accessing your project folder from a composition
+
+By default, the services of a composition run in a completely isolated manner. There are several scenarios however where you wish to access your Git files such as:
+
+* Using test data that is available in the project folder
+* Preloading a database with a data script found in Git
+* Running integration tests and then using their [results for reporting]({{site.baseurl}}/docs/testing/test-reports/)
+
+The Codefresh [shared volume]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps) is automatically mounted in [freestyle steps]({{site.baseurl}}/docs/pipelines/steps/freestyle/) but **NOT** in compositions. You have to mount it yourself if you use that functionality.
+
+Here is an example where the shared volume is mounted in a composition -- {% raw %}`'${{CF_VOLUME_NAME}}:${{CF_VOLUME_PATH}}'`{% endraw %} is listed under `volumes`:
+
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ create_test_data_step:
+ title: Creating dummy data
+ image: alpine
+ commands:
+ - echo "Writing in shared volume" > /codefresh/volume/sample_text.txt
+ my_sample_composition:
+ type: composition
+ title: Composition with volume
+ composition:
+ version: '2'
+ services:
+ my_sample_service:
+ image: node
+ volumes:
+ - '${{CF_VOLUME_NAME}}:${{CF_VOLUME_PATH}}'
+ working_dir: '${{CF_VOLUME_PATH}}'
+ command: bash -c "pwd && cat sample_text.txt"
+ composition_candidates:
+ my_unit_tests:
+ image: python
+ volumes:
+ - '${{CF_VOLUME_NAME}}:${{CF_VOLUME_PATH}}'
+ working_dir: '${{CF_VOLUME_PATH}}'
+ command: bash -c "pwd && echo 'Finished tests' > test_result.txt"
+ read_test_data_step:
+ title: Reading dummy data
+ image: alpine
+ commands:
+ - ls -l /codefresh/volume
+ - cat /codefresh/volume/test_result.txt
+{% endraw %}
+{% endhighlight %}
+
+In this pipeline:
+
+1. The first freestyle step writes a simple test file in the shared volume.
+1. The composition starts and both services (`my_sample_service` and `my_unit_tests`) attach the same volume.
+1. The sample service reads from the shared volume (i.e. using test data that was created before).
+1. The sample unit test service writes to the shared volume (emulating test results).
+1. The last freestyle step reads the file that was written by the composition.
+
+Therefore, in this pipeline you can see both ways of data sharing, bringing files into a composition and getting results out of it. Notice that we need to mount the shared volume only in the composition services. The freestyle steps automatically mount `/codefresh/volume` on their own.
+
+
+>Note: In order to mount the shared volume in one of your composition services, you must mount it in the `composition_candidate` also. It is not compulsory to mount the shared volume in all services of a composition. Only those that actually use it for file transfer, should mount it.
+
+
+## Composition variables versus environment variables
+
+Docker compose supports [two kinds of variables in its syntax](https://docs.docker.com/compose/environment-variables/):
+
+* There are environment variables that are used in the docker-compose file itself (`${VAR}` syntax).
+* There are environment variables that are passed in containers (`environment:` yaml group).
+
+Codefresh supports both kinds, but notice that variables mentioned in the
+`composition_variables` yaml group refer to the *first* kind. Any variables defined there are **NOT** passed automatically to containers (use the `environment` yaml group for that purpose).
+
+This can be illustrated with the following example:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ comp1:
+ type: composition
+ title: Composition example 1
+ description: Free text description
+ composition:
+ version: '2'
+ services:
+ db:
+ image: alpine
+ composition_candidates:
+ test_service:
+ image: alpine
+ command: printenv
+ environment:
+ - FIRST_KEY=VALUE
+ composition_variables:
+ - ANOTHER_KEY=ANOTHER_VALUE
+{% endraw %}
+{% endhighlight %}
+
+If you run the compositio,n you will see that the `printenv` command shows the following:
+
+```
+test_service_1 | FIRST_KEY=VALUE
+```
+
+The `FIRST_KEY` variable which is defined explicitly in the `environment` yaml part is correctly passed to the alpine container. The `ANOTHER_KEY` is not visible in the container at all.
+
+You should use the `composition_variables` yaml group for variables that you wish to reuse in other parts of your composition using the `${ANOTHER_KEY}` syntax.
+
+## Merging services
+
+If the `composition` already contains a service with the same name as the `composition_candidate`, the two service definitions are combined, with preference given to the `composition_candidate`'s definition.
+
+For example, we create a new Codefresh composition named 'test_composition':
+
+ `test-composition.yml`
+{% highlight yaml %}
+version: '2'
+ services:
+ db:
+ image: postgres
+ test_service:
+ image: myuser/mytestservice:latest
+ command: gulp integration_test
+{% endhighlight %}
+
+Now we want to reuse this composition during our build for testing purposes.
+We can add the following composition step to our `codefresh.yml` file and define the composition step so that `test_service` always uses the latest image that was built.
+
+ `YAML`
+{% highlight yaml %}
+run_tests:
+ type: composition
+ composition: test_composition
+ composition_candidates:
+ test_service:
+ image: {% raw %}${{build_step}}{% endraw %}
+{% endhighlight %}
+
+In the above example, both `composition` and `composition_candidates` define a service named `test_service`. After merging these definitions, `test_service` will maintain the `command` that was defined in the original composition but will refer to the image built by the step named `build_step`.
+
+## Related articles
+[Pipeline steps]({{site.baseurl}}/docs/pipelines/steps/)
+[Variables]({{site.baseurl}}/docs/pipelines/variables/)
+[Introduction to pipelines]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/)
+[Integration tests]({{site.baseurl}}/docs/testing/integration-tests/)
+
diff --git a/_docs/pipelines/steps/deploy.md b/_docs/pipelines/steps/deploy.md
new file mode 100644
index 00000000..6ad0a4e38
--- /dev/null
+++ b/_docs/pipelines/steps/deploy.md
@@ -0,0 +1,185 @@
+---
+title: "Deploy"
+description: "Deploying to Kubernetes from a Codefresh pipeline"
+group: codefresh-yaml
+sub_group: steps
+redirect_from:
+ - /docs/deploy/
+toc: true
+---
+The *Deploy* step can be used as a step to deploy a pre-built Docker image to a cluster
+
+This step allows to (re)deploy a Kubernetes application in your cluster
+
+It has two modes:
+
+1. Using the `service` option. In this case it will redeploy to an [existing service/deployment in your cluster]({{site.baseurl}}/docs/getting-started/deployment-to-kubernetes-quick-start-guide/) . Codefresh will
+automatically update the service/deployment with the new docker image.
+1. Using the `file_path` option. In this case you provide your own Kubernetes manifest and Codefresh deploys it as-is. It is **your
+own responsibility** to do [custom replacements]({{site.baseurl}}/docs/deploy-to-kubernetes/kubernetes-templating/) here (for example using [awk](https://en.wikipedia.org/wiki/AWK), [sed](https://www.gnu.org/software/sed/manual/sed.html) or [yq](http://mikefarah.github.io/yq/)). The deploy step is also using the [Codefresh templating mechanism]({{site.baseurl}}/docs/deploy-to-kubernetes/kubernetes-templating/#using-the-codefresh-deploy-image) behind the scenes if you want to take advantage of it. For a full templating solution we also
+suggest you look at [Helm]({{site.baseurl}}/docs/getting-started/helm-quick-start-guide/).
+
+You need to define either one of these fields in the deploy step. If you define `service` you also can select the exact Docker image
+with the `candidate` field (otherwise Codefresh will just reuse the docker image defined in the existing deployment)
+
+## Usage
+
+ `YAML`
+{% highlight yaml %}
+ step_name:
+ title: deploying to cluster
+ type: deploy
+ kind: kubernetes
+ ## cluster name as the shown in account's integration page
+ cluster: --my-cluster-name--
+ # desired namespace
+ namespace: default
+
+ ## Two ways to distinguish which deployment YAML to deploy - service or file_path:
+ # The Kubernetes service that associated with the deployment using selector
+ service: --my-service--
+ # Path to deployment.yml location inside the image volume
+ file_path: ./deployment.yml
+ # In seconds, how long the step will wait until the rolling update is complete (default is 120)
+ timeout: '150'
+ # Candidate is optional, if not specified will redeploy the same image that specified in the deployment file
+ # When candidate exists it should have both: image and registry
+ candidate:
+ # The image that will replace the original deployment image
+ # The image that been build using Build step
+ image: {% raw %}${{build_step}}{% endraw %}
+ # The registry that the user's Kubernetes cluster can pull the image from
+ # Codefresh will generate (if not found) secret and add it to the deployment so the Kubernetes master can pull it
+ registry: dockerhub
+ # Condition to run the step
+ when:
+ branch:
+ only:
+ - master
+ on_success:
+ ...
+ on_fail:
+ ...
+ on_finish:
+ ...
+ retry:
+ ...
+{% endhighlight %}
+
+## Fields
+
+{: .table .table-bordered .table-hover}
+| Field | Description | Required/Optional/Default |
+| ------------------------------------------ | ---------------------------------------------------------- -------- | ------------------------- |
+| `title` | The free-text display name of the step | Optional |
+| `description` | A basic, free-text description of the step. | Optional |
+| `stage` | Parent group of this step. See [using stages]({{site.baseurl}}/docs/codefresh-yaml/stages/) for more information. | Optional |
+| `kind` | Currently only `kubernetes` is supported | Required |
+| `cluster` | Name of your K8s cluster as found in the dashboard | Required |
+| `namespace` | Namespace where the deployment will take place | Required |
+| `service` | Name of the existing service that will updated. You need to provide `service` OR `file_path` | Required/Optional |
+| `file_path` | A deployment manifest. You need to provide `service` OR `file_path` | Required/Optional |
+| `timeout` | Seconds to wait for the deployment to be completed. Default is 120 seconds | Default |
+| `candidate` | Docker image that will be deployed. Only valid if `service` is defined. Should contain `image` and name of registry as it appears in the [registry integration page]({{site.baseurl}}/docs/docker-registries/external-docker-registries/). | Optional |
+| `fail_fast` | If a step fails, and the process is halted. The default value is `true`. | Default |
+| `when` | Define a set of conditions which need to be satisfied in order to execute this step. You can find more information in the [Conditional Execution of Steps]({{site.baseurl}}/docs/pipelines/conditional-execution-of-steps/) article. | Optional |
+| `on_success`, `on_fail` and `on_finish` | Define operations to perform upon step completion using a set of predefined [Post-Step Operations]({{site.baseurl}}/docs/piplines/post-step-operations/). | Optional |
+| `retry` | Define retry behavior as described in [Retrying a step]({{site.baseurl}}/docs/codefresh-yaml/what-is-the-codefresh-yaml/#retrying-a-step). | Optional |
+
+## Examples
+
+Update an existing service using the same Docker image (tagged with branch)
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: python-flask-sample-app
+ working_directory: ./
+ tag: ${{CF_BRANCH_TAG_NORMALIZED}}
+ dockerfile: Dockerfile
+ deploy_to_k8:
+ title: deploying to cluster
+ type: deploy
+ kind: kubernetes
+ cluster: myDemoAKSCluster
+ namespace: demo
+ service: my-python-app
+{% endraw %}
+{% endhighlight %}
+
+Update an existing service using a different Docker image (tagged with git hash)
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: python-flask-sample-app
+ working_directory: ./
+ tag: ${{CF_SHORT_REVISION}}
+ dockerfile: Dockerfile
+ deploy_to_k8:
+ title: deploying to cluster
+ type: deploy
+ kind: kubernetes
+ cluster: myDemoAKSCluster
+ namespace: demo
+ service: my-python-app
+ candidate:
+ # The image that will replace the original deployment image
+ # The image that been build using Build step
+ image: ${{MyAppDockerImage}}
+ # The registry that the user's Kubernetes cluster can pull the image from
+ # Codefresh will generate (if not found) secret and add it to the deployment so the Kubernetes master can pull it
+ registry: cfcr
+{% endraw %}
+{% endhighlight %}
+
+
+Deploy a custom Kuberentes Manifest as is. (Only a deployment will be created)
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: python-flask-sample-app
+ working_directory: ./
+ tag: ${{CF_BRANCH}}
+ dockerfile: Dockerfile
+ deploy_to_k8:
+ title: deploying to cluster
+ type: deploy
+ kind: kubernetes
+ cluster: myDemoAKSCluster
+ namespace: demo
+ file_path: ./deploy/deployment.yml
+{% endraw %}
+{% endhighlight %}
+
+## Advanced Kubernetes deployments
+
+If you find the deploy step limited, feel free to look at the other deployment options offered by Codefresh:
+
+* [The cf-deploy-kubernetes step]({{site.baseurl}}/docs/deploy-to-kubernetes/kubernetes-templating/)
+* [Custom kubectl commands]({{site.baseurl}}/docs/deploy-to-kubernetes/custom-kubectl-commands/)
+* [Helm]({{site.baseurl}}/docs/getting-started/helm-quick-start-guide/)
+
+## Related articles
+[Kubernetes Quick start guide]({{site.baseurl}}/docs/getting-started/deployment-to-kubernetes-quick-start-guide/)
+[Manage your Kubernetes cluster]({{site.baseurl}}/docs/deploy-to-kubernetes/manage-kubernetes/)
+[Install HELM chart using Codefresh pipeline]({{site.baseurl}}/docs/new-helm/using-helm-in-codefresh-pipeline/)
+
+
+
diff --git a/_docs/pipelines/steps/freestyle.md b/_docs/pipelines/steps/freestyle.md
new file mode 100644
index 00000000..4610bd95
--- /dev/null
+++ b/_docs/pipelines/steps/freestyle.md
@@ -0,0 +1,352 @@
+---
+title: "Freestyle"
+description: "Run commands inside a Docker container"
+group: codefresh-yaml
+sub_group: steps
+redirect_from:
+ - /docs/freestyle/
+toc: true
+---
+The Freestyle step is designed so you can execute a series of commands in a container. Freestyle steps
+are the bread and butter of [Codefresh pipelines]({{site.baseurl}}/docs/configure-ci-cd-pipeline/introduction-to-codefresh-pipelines/).
+
+## Purpose of freestyle steps
+
+In Codefresh, docker containers are first-class citizens
+and special typed steps are offered for the most usual docker commands. Freestyle steps are a secure replacement for `docker run` commands.
+
+
+Therefore, this command on your local workstation:
+
+```
+docker run python:3.6.4-alpine3.6 pip install .
+```
+
+will become in Codefresh the following freestyle step.
+
+```yaml
+CollectAllMyDeps:
+ title: Install dependencies
+ image: python:3.6.4-alpine3.6
+ commands:
+ - pip install .
+```
+
+
+Select an image to start a container, then you can specify a working directory, and commands.
+If you do not specify a working directory or commands, the step runs the organic commands specified by the image.
+In all freestyle steps Codefresh automatically [uses a shared docker volume]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps) that contains your git source code.
+
+## Usage
+
+ `YAML`
+{% highlight yaml %}
+{% raw %}
+step_name:
+ title: Step Title
+ description: Step description
+ image: image/id
+ working_directory: ${{step_id}}
+ commands:
+ - bash-command1
+ - bash-command2
+ cmd:
+ - arg1
+ - arg2
+ environment:
+ - key=value
+ entry_point:
+ - cmd
+ - arg1
+ shell: sh
+ fail_fast: false
+ volumes:
+ - ./relative-dir-under-cf-volume1:/absolute-dir-in-container1
+ - ./relative-dir-under-cf-volume2:/absolute-dir-in-container2
+ when:
+ branch:
+ only: [ master ]
+ on_success:
+ ...
+ on_fail:
+ ...
+ on_finish:
+ ...
+ retry:
+ ...
+{% endraw %}
+{% endhighlight %}
+
+## Fields
+
+{: .table .table-bordered .table-hover}
+| Field | Description | Required/Optional/Default |
+| ------------------------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------- |
+| `title` | The free-text display name of the step. | Optional |
+| `description` | A basic, free-text description of the step. | Optional |
+| `stage` | Parent group of this step. See [using stages]({{site.baseurl}}/docs/pipelines/stages/) for more information. | Optional |
+| `image` | The image from which the executable container is created. It can be an explicit ID of a Docker image, or a variable that references a **Build** or **Push** step. | Required |
+| `working_directory` | The directory from which the commands are executed. It can be an explicit path in the container's file system, or a variable that references another step. The default `working_directory` is the cloned repository directory and not the working directory specified by the image. If you need to use the default working directory of the image use `IMAGE_WORK_DIR`. | Default |
+| `commands` | One or more commands to execute in a shell in the container, as array of strings. | Optional |
+| `cmd` | docker CMD arguments to use along with the container entry point. can be string or array of strings. | Optional |
+| `entry_point` | Override the default container entry point. can be string or array of strings. | Optional |
+| `shell` | Explicitly set the executing shell to bash or sh. If not set the default will be sh. Note the `bash` option requires that you specify an `image` that includes `/bin/bash`; many images do not. | Optional |
+| `environment` | A set of environment variables for the container. | Optional |
+| `fail_fast` | If a step fails, and the process is halted. The default value is `true`. | Default |
+| `registry_context` | Advanced property for resolving Docker images when [working with multiple registries with the same domain]({{site.baseurl}}/docs/docker-registries/working-with-docker-registries/#working-with-multiple-registries-with-the-same-domain) | Optional |
+| `volumes` | One or more volumes for the container. All volumes must be mounted from the existing shared volume (see details below) |Optional
+| `when` | Define a set of conditions that need to be satisfied in order to execute this step. You can find more information in the [Conditional Execution of Steps]({{site.baseurl}}/docs/pipelines/conditional-execution-of-steps/) article. | Optional |
+| `on_success`, `on_fail` and `on_finish` | Define operations to perform upon step completion using a set of predefined [Post-Step Operations]({{site.baseurl}}/docs/pipelines/post-step-operations/). | Optional |
+| `retry` | Define retry behavior as described in [Retrying a step]({{site.baseurl}}/docs/codefresh-yaml/what-is-the-codefresh-yaml/#retrying-a-step). | Optional |
+
+**Exported resources:**
+- Working Directory.
+
+## Examples
+
+Here are some full pipelines with freestyle steps. Notice that in all cases the pipelines are connected to [git repositories]({{site.baseurl}}/docs/pipelines/pipelines/#pipeline-creation-modes)
+so the source code is already checked out and available to all pipeline steps.
+
+**Creating a [JAR file]({{site.baseurl}}/docs/learn-by-example/java/spring-boot-2/):**
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ my_jar_compilation:
+ title: Compile/Unit test
+ image: maven:3.5.2-jdk-8-alpine
+ commands:
+ - mvn -Dmaven.repo.local=/codefresh/volume/m2_repository package
+{% endhighlight %}
+
+Note how we [cache Maven dependencies]({{site.baseurl}}/docs/learn-by-example/java/spring-boot-2/#caching-the-maven-dependencies) using the internal Codefresh Volume.
+
+**Running unit tests in [Node.JS]({{site.baseurl}}/docs/learn-by-example/nodejs/):**
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ my_node_app:
+ title: Running unit tests
+ image: node:11
+ commands:
+ - npm install
+ - npm run test
+{% endhighlight %}
+
+**Packaging a [GO application]({{site.baseurl}}/docs/learn-by-example/golang/golang-hello-world/):**
+
+`codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ my_go_app:
+ title: Compiling GO code
+ image: golang:1.7.1
+ commands:
+ - go get github.com/example-user/example-repo
+ - go build
+{% endhighlight %}
+
+**Performing a [blue/green deployment](https://github.com/codefresh-io/k8s-blue-green-deployment):**
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ blueGreenDeploy:
+ title: Deploying new version
+ image: codefresh/k8s-blue-green:master
+ environment:
+ - SERVICE_NAME=my-demo-app
+ - DEPLOYMENT_NAME=my-demo-app
+ - NEW_VERSION=${{CF_SHORT_REVISION}}
+ - HEALTH_SECONDS=60
+ - NAMESPACE=colors
+ - KUBE_CONTEXT=myDemoAKSCluster
+{% endraw %}
+{% endhighlight %}
+
+## Dynamic freestyle steps
+
+Codefresh has the unique ability to allow you to run freestyle steps in the context of a docker image
+created on the same pipeline. This means that you can dynamically [create docker images]({{site.baseurl}}/docs/pipelines/steps/build/) on demand within the pipeline
+that needs them.
+
+Creating a custom docker image with extra tools (Terraform and Ansible)
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ CreateMyCustomImage:
+ title: Creating custom Docker image
+ type: build
+ dockerfile: tf_and_ansible.Dockerfile
+ image_name: my-iac-tools-container
+ UseMyCustomImage:
+ title: Running IAC tools
+ image: ${{CreateMyCustomImage}}
+ commands:
+ - terraform --version
+ - ansible --version
+{% endraw %}
+{% endhighlight %}
+
+Here the `UseMyCustomImage` freestyle step is running in the [context]({{site.baseurl}}/docs/pipelines/variables/#context-related-variables) of the Docker image that was created in the previous step.
+In fact, a very common pattern that you will see in Codefresh pipelines is the executions of [unit tests]({{site.baseurl}}/docs/testing/unit-tests/) in the image that was created in a build step:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: my-own-app
+ MyUnitTests:
+ title: Running Unit tests
+ image: ${{MyAppDockerImage}}
+ commands:
+ - ./my-unit-tests.sh
+{% endraw %}
+{% endhighlight %}
+
+Here the `MyAppDockerImage` step is creating a custom docker image. That image is used to run the `MyUnitTests` step.
+This pattern works very well for cases where testing tools are already part of the image (usually with dynamic languages).
+In other case you can have a second Dockerfile in your application that is designed explicitly to hold all your testing tools.
+
+## Entry point
+
+When using the original container entry point, you can use the `cmd` field to specify additional arguments to be used with the entry point. This can be a string, or an array of strings. For example:
+
+```yaml
+image: mwendler/cowsay
+cmd:
+ - "Hello"
+```
+
+is equivalent to running `docker run mwendler/cowsay Hello` which is equivalent to running `cowsay Hello` inside the container.
+
+
+You can override the container's default entry point using the `entry_point` field. This can be a string, or an array of strings. For example:
+
+```yaml
+
+image: mwendler/cowsay
+entry_point:
+ - echo
+ - Hello
+```
+
+## Commands
+
+When you use the `commands` field, it will override the container original `entry_point` and will execute the commands in a shell inside the container.
+The provided commands are concatenated into a single command using the shell's `;` operator, and are run using the default shell `/bin/sh` as an entry point.
+Additional settings that are set only when using commands are `set -e`, and the [`cf_export`]({{site.baseurl}}/docs/pipelines/variables/#using-cf_export-command) utility.
+
+> Using complex commands in the freestyle step requires use of [YAML block scalars](http://stackoverflow.com/questions/3790454/in-yaml-how-do-i-break-a-string-over-multiple-lines).
+
+### Commands and Entry point
+
+If you want to retain the original entry point, do not use the `commands` field.
+
+However, this example:
+
+```yaml
+image: mwendler/cowsay
+commands:
+ - "Hello"
+```
+
+will cause and error because the engine will attempt to run the command `Hello` in a shell inside the container, and the command `Hello` is not a valid command.
+In order to use the `commands` form with an `entrypoint` enabled container, you can add the commands from the entry point to the list of commands, like so:
+
+```yaml
+image: mwendler/cowsay
+commands:
+ - cowsay "Hello"
+```
+
+## Custom volumes
+
+If you are familiar with [Codefresh pipelines]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps) you should know that all freestyle steps automatically share a [volume](https://docs.docker.com/storage/) mounted at `/codefresh/volume` which can be used to transfer data (e.g. dependencies and test results) from each step to the next.
+
+**This volume is automatically mounted by Codefresh and needs no configuration at all**. All you have to do to access it, is read/write the `/codefresh/volume` folder from your application. This folder also [includes by default the source code]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#cloning-the-source-code) of the git repository connected to the pipeline (at the `/codefresh/volume/` subfolder)
+
+You can use the `volumes` property to create your own custom volumes that can be mounted in different folders. **For security reasons however all source volume data (i.e. the "host" folder) still needs to be bound with `/codefresh/volume` or any of its subdirectories**:
+
+Attempting to mount a folder outside of `/codefresh/volume` will result in an error.
+
+### Simple volume example
+
+Let's assume that your application expects to find a configuration folder at `/config`. The folder however that contains the needed files in GIT is under `my-app-repo/my-sample-config`. When the application is checked out the files actually reside at `/codefresh/volume/my-app-repo/my-sample-config`.
+
+You can still run your application without any code changes by doing the following bind:
+
+```yaml
+title: Running my application with custom volume
+image: my-docker-app:latest
+volumes:
+ - ./my-app-repo/my-sample-config:/config # host path is relative to /codefresh/volume
+```
+
+Now the `my-docker-app` application will run and find all its needed files at `/config`.
+
+Notice that we use a relative path here but even if you used an absolute one (`/my-app/my-sample-config`) the result would be the same because Codefresh does not allow you to bind anything outside the shared Codefresh volume.
+
+### Injecting custom folders in a running container
+
+Here is another example pipeline with two steps. The first one creates a custom config file in the shared Codefresh volume (that is always available) at `/codefresh/volume/my-config`. The second step reads the config file at a different folder in `/my-own-config-folder-injected`.
+
+```yaml
+version: '1.0'
+steps:
+ CreateCustomConfiguration:
+ title: Creating configuration
+ image: alpine
+ commands:
+ - mkdir -p /codefresh/volume/my-config
+ - echo "foo=bar" > /codefresh/volume/my-config/custom.txt
+ - ls /codefresh/volume/my-config
+ InjectConfiguration:
+ title: Reading configuration
+ image: alpine
+ commands:
+ - ls /codefresh/volume/my-config # Codefresh default volume shared between all steps
+ - ls /my-own-config-folder-injected # Special volume just for this container
+ - cat /my-own-config-folder-injected/custom.txt
+ volumes:
+ - ./my-config:/my-own-config-folder-injected
+```
+
+When the second steps runs, the `custom.txt` file is available both at `/codefresh/volume/my-config` (the shared volume of all steps) as well as the `/my-own-config-folder-injected` folder which was mounted specifically for this step.
+
+
+## More freestyle steps
+
+You can use in a freestyle step any Docker image available in a public repository such as Dockerhub. This makes the integration of Codefresh and various cloud tools very easy.
+
+Codefresh also offers a plugin directory at [http://codefresh.io/steps/](http://codefresh.io/steps/) created specifically for CI/CD operations.
+
+{% include
+image.html
+lightbox="true"
+file="/images/pipeline/plugin-directory.png"
+url="/images/pipeline/plugin-directory.png"
+alt="Codefresh steps directory"
+caption="Codefresh steps directory"
+max-width="80%"
+%}
+
+
+## Related articles
+[Introduction to Pipelines]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/)
+[Codefresh YAML]({{site.baseurl}}/docs/codefresh-yaml/what-is-the-codefresh-yaml/)
+[Pipeline steps]({{site.baseurl}}/docs/pipelines/steps/)
+
diff --git a/_docs/pipelines/steps/git-clone.md b/_docs/pipelines/steps/git-clone.md
new file mode 100644
index 00000000..628841a6
--- /dev/null
+++ b/_docs/pipelines/steps/git-clone.md
@@ -0,0 +1,438 @@
+---
+title: "Git-Clone"
+description: "Checkout code in your pipelines"
+group: pipelines
+sub_group: steps
+redirect_from:
+ - /docs/git-clone/
+toc: true
+---
+Clones a Git repository to the filesystem.
+
+A pipeline can have any number of Git clone steps (even none). You can checkout code from any private or public repository. Cloning a repository is not constrained to the trigger of a pipeline. You can trigger a pipeline from a commit that happened on Git repository A while the pipeline is checking out code from Git Repository B.
+
+>Notice that if you are an existing customer before May 2019, Codefresh will automatically checkout the code from a [connected git repository]({{site.baseurl}}/docs/integrations/git-providers/) when a pipeline is created on that repository. In this case an implicit git clone step is included in your pipeline. You can still override it with your own git clone step as explained in this page
+
+## Usage
+
+ `YAML`
+{% highlight yaml %}
+step_name:
+ type: git-clone
+ title: Step Title
+ description: Step description
+ working_directory: /path
+ repo: owner/repo
+ git: my-git-provider
+ revision: abcdef12345'
+ use_proxy: false
+ credentials:
+ username: user
+ password: credentials
+ fail_fast: false
+ when:
+ branch:
+ ignore: [ develop ]
+ on_success:
+ ...
+ on_fail:
+ ...
+ on_finish:
+ ...
+ retry:
+ ...
+{% endhighlight %}
+
+## Fields
+
+{: .table .table-bordered .table-hover}
+| Field | Description | Required/Optional/Default |
+| ------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------- |
+| `title` | The free-text display name of the step. | Optional |
+| `description` | A basic, free-text description of the step. | Optional |
+| `stage` | Parent group of this step. See [using stages]({{site.baseurl}}/docs/pipelines/stages/) for more information. | Optional |
+| `working_directory` | The directory to which the repository is cloned. It can be an explicit path in the container's file system, or a variable that references another step. The default value is {% raw %}`${{main_clone}}`{% endraw %}, but note that the default will only be used if you name your step `main_clone`. See the example on [working inside the cloned directory]({{site.baseurl}}/docs/yaml-examples/examples/git-checkout/#working-inside-the-cloned-directory) for more information. | Default |
+| `git` | The name of the [git integration]({{site.baseurl}}/docs/integrations/git-providers/) you want to use. If left empty, Codefresh will attempt to use the git provider that was used during account sign-up. Note that this might have unexpected results if you are changing your Git integrations.| Required|
+| `repo` | path of the repository without the domain name in the form of `my_username/my_repo` | Required |
+| `revision` | The revision of the repository you are checking out. It can be a revision hash or a branch name. The default value is the branch you have specified in your Git provider (e.g `master` or `main`). | Default |
+| `use_proxy` | If set to true the Git clone process will honor `HTTP_PROXY` and `HTTPS_PROXY` variables if present for [working via a proxy](#using-git-behind-a-proxy). Default value is `false`. | Default |
+| `credentials` | Credentials to access the repository, if it requires authentication. It can an object containing `username` and `password` fields. Credentials are optional if you are using the [built-in git integrations]({{site.baseurl}}/docs/integrations/git-providers/) . | Optional |
+| `fail_fast` | If a step fails and the process is halted. The default value is `true`. | Default |
+| `when` | Define a set of conditions that need to be satisfied in order to execute this step. You can find more information in the [Conditional Execution of Steps]({{site.baseurl}}/docs/codefresh-yaml/conditional-execution-of-steps/) article. | Optional |
+| `on_success`, `on_fail` and `on_finish` | Define operations to perform upon step completion using a set of predefined [Post-Step Operations]({{site.baseurl}}/docs/codefresh-yaml/post-step-operations/). | Optional |
+| `retry` | Define retry behavior as described in [Retrying a step]({{site.baseurl}}/docs/codefresh-yaml/what-is-the-codefresh-yaml/#retrying-a-step). | Optional |
+
+**Exported resources:**
+- Working Directory
+
+{{site.data.callout.callout_info}}
+If you want to extend the git-clone step you can use the freestyle step. Example how to do it you can find [here]({{site.baseurl}}/docs/yaml-examples/examples/git-clone-private-repository-using-freestyle-step/)
+{{site.data.callout.end}}
+
+## Basic clone step (project-based pipeline)
+
+The easiest way to use a git clone step is to use your default git provider as configured in [built-in git integrations]({{site.baseurl}}/docs/integrations/git-providers/).
+
+Here is an example of a pipeline that will automatically check out the repository that triggered it (i.e. a commit happened on that repository).
+
+>Notice that the name of the clone step is `main_clone`. This will automatically set the working directory of all other steps that follow it **inside** the folder of the project that was checked out. This only applies to [built-in]({{site.baseurl}}/docs/pipelines/steps/#built-in-steps) Codefresh steps and not [custom plugins]({{site.baseurl}}/docs/pipelines/steps/#creating-a-typed-codefresh-plugin). This is normally what you want for a pipeline that only checks out a single project. If you use any other name apart from `main_clone` the working directory for all subsequent steps will not be affected and it will default on the [shared volume]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps) which is the [parent folder]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#cloning-the-source-code) of checkouts.
+
+
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ main_clone:
+ title: 'Cloning main repository...'
+ type: git-clone
+ repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}'
+ revision: '${{CF_REVISION}}'
+ git: my-git-provider
+ PrintFileList:
+ title: 'Listing files'
+ image: alpine:latest
+ commands:
+ - 'ls -l'
+{% endraw %}
+{% endhighlight %}
+
+The CF values will be automatically filled by Codefresh from the git trigger. See the [variables page]({{site.baseurl}}/docs/pipelines/variables/) for more details.
+
+## Choosing a specific git provider (project-based pipeline)
+
+If you don't want to use the default git provider you can explicitly set the provider by using the same name of the integration as it is shown in [the git integrations page]({{site.baseurl}}/docs/integrations/git-providers/).
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/steps/example-git-providers.png"
+url="/images/codefresh-yaml/steps/example-git-providers.png"
+alt="Example git integrations"
+caption="Example git integrations"
+max-width="40%"
+%}
+
+Here is an example for an integration with the GitLab provider already connected:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ main_clone:
+ title: 'Cloning main repository...'
+ type: git-clone
+ repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}'
+ revision: '${{CF_REVISION}}'
+ git: my-gitlab
+ PrintFileList:
+ title: 'Listing files'
+ image: alpine:latest
+ commands:
+ - 'ls -l'
+{% endraw %}
+{% endhighlight %}
+
+## Checkout a specific repository/revision (project based pipeline)
+
+If you want to check out a specific git repository regardless of what repository actually created the trigger
+you can just define all values in a non-static manner. For example, if you want your pipeline to always checkout git repository `foo` even when the trigger happened from repository `bar` you can define the checkout step as below:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ main_clone:
+ title: 'Cloning main repository...'
+ type: git-clone
+ repo: 'my-github-username/foo'
+ revision: '${{CF_REVISION}}'
+ git: my-github-integration
+ PrintFileList:
+ title: 'Listing files'
+ image: alpine:latest
+ commands:
+ - 'ls -l'
+{% endraw %}
+{% endhighlight %}
+
+In a similar manner you can also define that the pipeline will always checkout master, regardless of the commit that actually triggered it.
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ main_clone:
+ title: 'Cloning main repository...'
+ type: git-clone
+ repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}'
+ revision: 'master'
+ git: my-git-provider
+ PrintFileList:
+ title: 'Listing files'
+ image: alpine:latest
+ commands:
+ - 'ls -l'
+{% endraw %}
+{% endhighlight %}
+
+## Checkout code using the Codefresh Runner
+
+If you are using the [Codefresh runner]({{site.baseurl}}/docs/installation/codefresh-runner/), you need to use
+the fully qualified path of the Git repository:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ main_clone:
+ title: 'Cloning main repository...'
+ type: git-clone
+ repo: https://github-internal.example.com/my-username/my-app
+ revision: '${{CF_REVISION}}'
+ git: my-internal-git-provider
+ PrintFileList:
+ title: 'Listing files'
+ image: alpine:latest
+ commands:
+ - 'ls -l'
+{% endraw %}
+{% endhighlight %}
+
+More details can be found in the [private Git instructions page]({{site.baseurl}}/docs/reference/behind-the-firewall/#checking-out-code-from-a-private-git-repository).
+
+
+## Checking out multiple Git repositories
+
+It is very easy to checkout additional repositories in a single pipeline by adding more `git-clone` steps.
+In that case you should use different names for the steps (instead of `main_clone`) as this will make the working
+folder for all steps the [shared volume]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps).
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ my_first_checkout:
+ title: 'Cloning first repository...'
+ type: git-clone
+ repo: 'my-gitlab-username/foo'
+ revision: '${{CF_REVISION}}'
+ git: my-gitlab-integration
+ my_second_checkout:
+ title: 'Cloning second repository...'
+ type: git-clone
+ repo: 'my-github-username/bar'
+ revision: '${{CF_REVISION}}'
+ git: my-github-integration
+ PrintFileList:
+ title: 'Listing files'
+ image: alpine:latest
+ commands:
+ - 'ls -l'
+{% endraw %}
+{% endhighlight %}
+
+
+## Skip or customize default clone (repository-based pipeline)
+
+If you have existing pipelines connected to repositories (only for Codefresh accounts created before May 2019)
+a git clone step is transparently added to git attached pipelines without you having to explicitly add a step into the pipeline. This is a convenience to enable easy CI pipelines.
+If you do not require git cloning, or you would like to customize the implicit git cloning behavior, you can choose to skip the automatically added git clone step.
+
+There are 2 ways to do that:
+
+1. Add a pipeline environment variable called `CF_SKIP_MAIN_CLONE` with value of `true`.
+
+-or-
+
+2. Add a step with key `main_clone` to your pipeline. This step can be of any type and can do any action. This step will override the default clone implementation. for example:
+
+```yaml
+version: '1.0'
+steps:
+ main_clone:
+ title: Checking out code
+ image: alpine/git:latest
+ commands:
+ - git clone ...
+ another_step:
+ ...
+```
+
+## Reuse a Git token from Codefresh integrations
+
+You also have the capability to use one of your existing [git integrations]({{site.baseurl}}/docs/integrations/git-providers/)
+as an authentication mechanism.
+
+The [Codefresh CLI](https://codefresh-io.github.io/cli/) can read one of the connected [git authentication contexts](https://codefresh-io.github.io/cli/contexts/get-context/) and use that token for a custom clone step.
+
+Here is an example for GitHub
+
+
+```yaml
+version: '1.0'
+steps:
+ get_git_token:
+ title: Reading GitHub token
+ image: codefresh/cli
+ commands:
+ - cf_export GITHUB_TOKEN=$(codefresh get context github --decrypt -o yaml | yq -r .spec.data.auth.password)
+ main_clone:
+ title: Checking out code
+ image: alpine/git:latest
+ commands:
+ - git clone https://my-github-username:$GITHUB_TOKEN@github.com/my-github-username/my-repo.git
+ another_step:
+ ...
+```
+
+## Working with GIT submodules
+
+To checkout a git project including its submodules you can use the [Codefresh submodule plugin](https://github.com/codefresh-io/plugins/tree/master/plugins/gitsubmodules). This plugin is already offered as a public docker image at [Dockerhub](https://hub.docker.com/r/codefresh/cfstep-gitsubmodules/tags).
+
+To use this module in your pipeline, add a new step like the one shown below.
+
+```yaml
+version: '1.0'
+steps:
+ updateSubmodules:
+ image: codefresh/cfstep-gitsubmodules
+ environment:
+ - GITHUB_TOKEN=
+ - CF_SUBMODULE_SYNC=
+ - CF_SUBMODULE_UPDATE_RECURSIVE=
+```
+
+The GitHub token can be either defined in the pipeline on its own as an environment variable, or fetched from
+the existing [GIT integration]({{site.baseurl}}/docs/integrations/git-providers/) as shown in the previous section.
+
+Here is full pipeline example:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+ - checkout
+ - prepare
+ - build
+steps:
+ clone:
+ title: Cloning the repository
+ type: git-clone
+ stage: checkout
+ arguments:
+ repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}'
+ git: github
+ revision: '${{CF_REVISION}}'
+
+ updateSubmodules:
+ image: codefresh/cfstep-gitsubmodules
+ stage: prepare
+ working_directory: '${{clone}}'
+ environment:
+ - GITHUB_TOKEN=${{MY_GITHUB_TOKEN}}
+ docker_build:
+ title: Building docker image
+ type: build
+ stage: build
+ working_directory: '${{clone}}/k8s/docker'
+ tag: current
+ disable_push: true
+ image_name: 'my-docker-image'
+
+{% endraw %}
+{% endhighlight %}
+
+This pipeline does the following:
+
+1. Clones the main source code
+1. Updates submodules
+1. Creates a docker image
+
+
+## Use an SSH key with Git
+
+It is also possible to use an SSH key with git. When [creating your pipeline]({{site.baseurl}}/docs/pipelines/pipelines/) add your SSH key as an encrypted
+environment variable after processing it with `tr`:
+
+```
+cat ~/.ssh/my_ssh_key_file | tr '\n' ','
+```
+
+
+Then in the pipeline use it like this:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ main_clone:
+ title: Checking out code
+ image: alpine/git:latest
+ commands:
+ - mkdir -p ~/.ssh
+ - echo "${SSH_KEY}" | tr \'"${SPLIT_CHAR}"\' '\n' > ~/.ssh/id_rsa
+ - chmod 600 ~/.ssh/id_rsa
+ - git clone git@github.com:my-github-username/my-repo.git
+ # can also use go get or other similar command that uses git internally
+ another_step:
+ ...
+{% endraw %}
+{% endhighlight %}
+
+## Using Git behind a proxy
+
+If you use the [Codefresh Runner]({{site.baseurl}}/docs/installation/codefresh-runner/) and need to use a network proxy in your clone step you need to set the [variables]({{site.baseurl}}/docs/pipelines/variables/) `HTTP_PROXY` and/or `HTTPS_PROXY` in the pipeline
+and then activate the property `use_proxy: true` in the clone step. Example:
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ clone:
+ title: "Cloning repository"
+ type: "git-clone"
+ repo: "https://github.com/my-github-user/my-repo/"
+ revision: "master"
+ use_proxy: true
+ git: my-git-provider
+{% endraw %}
+{% endhighlight %}
+
+For setting the values of the proxy variables you can use any of the supported methods for defining variables such as [shared configuration]({{site.baseurl}}/docs/configure-ci-cd-pipeline/shared-configuration/).
+
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/steps/proxy-variables.png"
+url="/images/codefresh-yaml/steps/proxy-variables.png"
+alt="Pipeline variable"
+caption="Pipeline variable"
+max-width="40%"
+%}
+
+For more details see the [behind the firewall page]({{site.baseurl}}/docs/installation/behind-the-firewall/).
+
+
+## Related articles
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
+[Git integrations]({{site.baseurl}}/docs/integrations/git-providers/)
+[YAML steps]({{site.baseurl}}/docs/pipelines/steps/)
+[Git Checkout Examples]({{site.baseurl}}/docs/yaml-examples/examples/git-checkout/)
+[Custom Git Commands]({{site.baseurl}}/docs/yaml-examples/examples/git-checkout-custom/)
+
+
+
+
+
+
diff --git a/_docs/pipelines/steps/launch-composition.md b/_docs/pipelines/steps/launch-composition.md
new file mode 100644
index 00000000..a3bce2d5
--- /dev/null
+++ b/_docs/pipelines/steps/launch-composition.md
@@ -0,0 +1,92 @@
+---
+title: "Launch-Composition"
+description: "Create a test environment with its dependencies in Codefresh infrastructure"
+group: pipelines
+sub_group: steps
+redirect_from:
+ - /docs/launch-composition-2/
+ - /docs/codefresh-yaml/steps/launch-composition-2/
+toc: true
+---
+The Launch Composition step provides the ability to launch long term running environments that can live outside the context of a running pipeline.
+You can use this step to automate your test environment creation through a codefresh.yml file instead of manually launching an environment from the UI.
+
+>Note that "launch-composition" creates a permanent test environment that keeps running even after a pipeline has finished. If you just want temporary test environments that run *only while* a pipeline is running, see [service containers]({{site.baseurl}}/docs/pipelines/service-containers/) and the documentation page for [integration tests]({{site.baseurl}}/docs/testing/integration-tests/).
+
+## Usage
+
+ `ui defined composition`
+{% highlight yaml %}
+step_name:
+ title: Step Title
+ type: launch-composition
+ composition: 'ui_defined_composition_name'
+ environment_name: 'environment name'
+ on_success:
+ ...
+ on_fail:
+ ...
+ on_finish:
+ ...
+{% endhighlight %}
+
+ `inline composition`
+{% highlight yaml %}
+step_name:
+ type: launch-composition
+ composition:
+ version: '2'
+ services:
+ app:
+ image: owner/app:latest
+ db:
+ image: mongo
+ environment_name: 'environment name'
+ on_success:
+ ...
+ on_fail:
+ ...
+ on_finish:
+ ...
+ retry:
+ ...
+{% endhighlight %}
+
+ `from file composition`
+{% highlight yaml %}
+step_name:
+ type: launch-composition
+ working_directory: ${{a_clone_step}}
+ composition: './path/to/docker-compose.yaml'
+ environment_name: 'environment name'
+ on_success:
+ ...
+ on_fail:
+ ...
+ on_finish:
+ ...
+{% endhighlight %}
+
+## Fields
+
+{: .table .table-bordered .table-hover}
+| Field | Description | Required/Optional/Default |
+| ------------------------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------- |
+| `title` | The free-text display name of the step. | Optional |
+| `description` | A basic, free-text description of the step. | Optional |
+| `stage` | Parent group of this step. See [using stages]({{site.baseurl}}/docs/pipelines/stages/) for more information. | Optional |
+| `working_directory` | The directory in which to search for the composition file. It can be an explicit path in the container's file system, or a variable that references another step. The default is {% raw %}`${{main_clone}}`{% endraw %}. | Default |
+| `composition` | The composition you want to run. It can be an inline YAML definition, a path to a composition file on the file system, or the logical name of a composition stored in the Codefresh system. | Required |
+| `environment_name` | The environment name that will be given. In case a previous environment exists with the same name, it will first be terminated. The default value will the be the name/path provided in the 'composition' field. | Default |
+| `composition_variables` | A set of environment variables to substitute in the composition. | Optional |
+| `fail_fast` | If a step fails, and the process is halted. The default value is `true`. | Default |
+| `when` | Define a set of conditions which need to be satisfied in order to execute this step. You can find more information in the [[Conditional Execution of Steps]({{ site.baseurl }}/docs/pipelines/conditional-execution-of-steps/) article. | Optional |
+| `on_success`, `on_fail` and `on_finish` | Define operations to perform upon step completion using a set of predefined [Post-Step Operations]({{ site.baseurl }}/docs/pipelines/post-step-operations/). | Optional |
+| entry_point | The name of main service | Optional |
+| `retry` | Define retry behavior as described in [Retrying a step]({{site.baseurl}}/docs/codefresh-yaml/what-is-the-codefresh-yaml/#retrying-a-step). | Optional |
+
+## Related articles
+[Preview environments]({{site.baseurl}}/docs/getting-started/on-demand-environments/)
+[Launch Composition example]({{site.baseurl}}/docs/yaml-examples/examples/launch-composition/)
+[Integration tests]({{site.baseurl}}/docs/testing/integration-tests/)
+[Service Containers]({{site.baseurl}}/docs/pipelines/service-containers/)
\ No newline at end of file
diff --git a/_docs/pipelines/steps/push.md b/_docs/pipelines/steps/push.md
new file mode 100644
index 00000000..699f0200
--- /dev/null
+++ b/_docs/pipelines/steps/push.md
@@ -0,0 +1,257 @@
+---
+title: "Push step"
+description: "Pushing Docker images from your pipeline"
+group: pipelines
+sub_group: steps
+redirect_from:
+ - /docs/push-1/
+ - /docs/codefresh-yaml/steps/push-1/
+toc: true
+---
+
+{{site.data.callout.callout_info}}
+
+If you use only the default Docker registry of your account this step is optional as all successful Codefresh pipelines automatically push the Docker image they create in the default Docker registry. No further configuration is needed to achieve this behavior.
+{{site.data.callout.end}}
+
+Push a built image to a remote Docker registry with one or more tags. Supports standard Docker registries and ECR.
+
+Notice that when you use [any external registry]({{site.baseurl}}/docs/docker-registries/external-docker-registries/), you need to comply to the naming pattern used by that registry, otherwise the build step will fail. For example, if your Codefresh image is tagged as `foo_username/my_image` but your Dockerhub account is `bar_username` then the build will fail and you need to customize the push step to use `bar_username` instead. This is a limitation of external registries such as Dockerhub.
+
+## Usage
+
+ `YAML`
+{% highlight yaml %}
+step_name:
+ type: push
+ title: Step Title
+ description: Free text description
+ candidate: {% raw %}${{build_step_name}}{% endraw %}
+ tag: latest
+ image_name: codefresh/app
+ registry: my-registry
+ fail_fast: false
+ when:
+ branch:
+ only:
+ - /FB-/i
+ on_success:
+ ...
+ on_fail:
+ ...
+ on_finish:
+ ...
+ retry:
+ ...
+
+{% endhighlight %}
+
+## Fields
+
+{: .table .table-bordered .table-hover}
+| Field | Description | Required/Optional/Default |
+| ------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------- |
+| `title` | The free-text display name of the step. | Optional |
+| `description` | A basic, free-text description of the step. | Optional |
+| `stage` | Parent group of this step. See [using stages]({{site.baseurl}}/docs/pipelines/stages/) for more information. | Optional |
+| `candidate` | The identifier of the image to push to the remote Docker registry. It can be an explicit identifier of an image to push, or a variable that references a `Build` step. | Required |
+| `tag` | The tag under which to push the image. Use either this or `tags`. The default is `latest`. | Default |
+| `region` | Relevant only for [Amazon ECR]({{site.baseurl}}/docs/integrations/docker-registries/amazon-ec2-container-registry/) integrations using either service accounts or explicit credentials. The names of the regions for which to perform cross-region replication. The names of the source region and the destination region name must be defined in separate steps. | Optional |
+| `role_arn` | Relevant only for [Amazon ECR]({{site.baseurl}}/docs/integrations/docker-registries/amazon-ec2-container-registry/) integrations using either service accounts or explicit credentials. The role with the required permissions to use to pull the image. For example, `arn:aws:iam:::role/` | Required |
+| `aws_session_name` | Relevant only for [Amazon ECR]({{site.baseurl}}/docs/integrations/docker-registries/amazon-ec2-container-registry/) integrations using either service accounts or explicit credentials. The name of the AWS session. If not defined, `default-session-name` is used. | Default |
+| `aws_duration_seconds` | Relevant only for [Amazon ECR]({{site.baseurl}}/docs/integrations/docker-registries/amazon-ec2-container-registry/) integrations using either service accounts or explicit credentials. The length of time, in seconds, for which the role credentials are considered valid, and must be between `900-3600` seconds. If not defined, the duration is set to the default of `3600` seconds. | Default |
+| `tags` | Multiple tags under which to push the image. Use either this or `tag`. This is an array, so should be of the following style: {::nomarkdown}
tags: -tag1 -tag2 -{% raw %}${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}{% endraw %} -tag4
{:/}or {::nomarkdown}
tags:['tag1','tag2','{% raw %}${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}{% endraw %}','tag4']
{:/} | Default |
+| `image_name` | The tagged image name that will be used The default value will be the same image name as of the candidate. | Default |
+| `registry` | The registry logical name of one of the inserted registries from the integration view. The default value will be your default registry [if you have more than one]({{site.baseurl}}/docs/docker-registries/external-docker-registries/). | Default |
+| `registry_context` | Advanced property for resolving Docker images when [working with multiple registries with the same domain]({{site.baseurl}}/docs/docker-registries/working-with-docker-registries/#working-with-multiple-registries-with-the-same-domain) | Optional |
+| `fail_fast` | If a step fails, and the process is halted. The default value is `true`. | Default |
+| `when` | Define a set of conditions which need to be satisfied in order to execute this step. You can find more information in the [Conditional Execution of Steps]({{site.baseurl}}/docs/pipelines/conditional-execution-of-steps/) article. | Optional |
+| `on_success`, `on_fail` and `on_finish` | Define operations to perform upon step completion using a set of predefined [Post-Step Operations]({{site.baseurl}}/docs/pipelines/post-step-operations/). | Optional |
+| `retry` | Define retry behavior as described in [Retrying a step]({{site.baseurl}}/docs/pipelines/what-is-the-codefresh-yaml/#retrying-a-step). | Optional |
+
+## Examples
+
+Push an image to a registry connected with the [integration name]({{site.baseurl}}/docs/docker-registries/external-docker-registries/) of `myazureregistry`.
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+- 'my build phase'
+- 'my push phase'
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ stage: 'my build phase'
+ type: build
+ image_name: my-app-image
+ dockerfile: Dockerfile
+ pushToMyRegistry:
+ stage: 'my push phase'
+ type: push
+ title: Pushing to a registry
+ candidate: ${{MyAppDockerImage}}
+ tag: ${{CF_SHORT_REVISION}}
+ registry: myazureregistry
+{% endraw %}
+{% endhighlight %}
+
+Push an image as the name of the branch in the [external registry]({{site.baseurl}}/docs/docker-registries/external-docker-registries/) and also use a different image than the default. The same image will also by pushed as `latest` in the internal Codefresh registry (with the default name of `my-app-image`).
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+- 'my build phase'
+- 'my push phase'
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ stage: 'my build phase'
+ type: build
+ image_name: my-app-image
+ dockerfile: Dockerfile
+ tag: latest
+ pushToMyRegistry:
+ stage: 'my push phase'
+ type: push
+ title: Pushing to a registry
+ candidate: ${{MyAppDockerImage}}
+ tag: ${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}
+ registry: myazureregistry
+ image_name: my-user-name/a-different-image-name
+{% endraw %}
+{% endhighlight %}
+
+
+Push an image with multiple tags.
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+- 'my build phase'
+- 'my push phase'
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ stage: 'my build phase'
+ type: build
+ image_name: my-app-image
+ dockerfile: Dockerfile
+ pushToMyRegistry:
+ stage: 'my push phase'
+ type: push
+ title: Pushing to a registry
+ candidate: ${{MyAppDockerImage}}
+ tags:
+ - ${{CF_SHORT_REVISION}}
+ - latest
+ - 2.0.0
+ registry: myazureregistry
+{% endraw %}
+{% endhighlight %}
+
+Push an image with multiple tags to multiple Docker registries in [parallel]({{site.baseurl}}/docs/pipelines/advanced-workflows/).
+Both registries are connected first in the [integrations page]({{site.baseurl}}/docs/docker-registries/external-docker-registries/).
+
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+stages:
+- 'my build phase'
+- 'my push phase'
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ stage: 'my build phase'
+ type: build
+ image_name: my-app-image
+ dockerfile: Dockerfile
+ PushingToRegistries:
+ type: parallel
+ stage: 'push'
+ steps:
+ PushingToGoogleRegistry:
+ type: push
+ title: Pushing To Google Registry
+ candidate: ${{MyAppDockerImage}}
+ tags:
+ - ${{CF_BUILD_ID}}
+ - latest
+ - production
+ registry: gcr
+ PushingToDockerRegistry:
+ type: push
+ title: Pushing To Dockerhub Registry
+ candidate: ${{MyAppDockerImage}}
+ tag: '${{CF_SHORT_REVISION}}'
+ image_name: my-docker-hub-username/my-app-name
+ registry: dockerhub
+{% endraw %}
+{% endhighlight %}
+
+
+## Using passed credentials without pre-saving them
+
+This option enables you to push your images without pre-saving the credentials in Codefresh's registry integration view.
+
+>Note that this method of pushing images is offered as a workaround. The suggested way is to use the [central Codefresh integration for registries]({{site.baseurl}}/docs/docker-registries/external-docker-registries/) as explained in the previous section.
+
+ `YAML`
+{% highlight yaml %}
+step_name:
+ type: push
+ title: Step Title
+ description: Free text description
+ candidate: {% raw %}${{build_step_name}}{% endraw %}
+ tags: [ latest, {% raw %}${{CF_BRANCH}}{% endraw %} ]
+ image_name: codefresh/app
+ registry: dtr.host.com
+ credentials:
+ username: subject
+ password: credentials
+ fail_fast: false
+ when:
+ branch:
+ only:
+ - /FB-/i
+ on_success:
+ ...
+ on_fail:
+ ...
+ on_finish:
+ ...
+{% endhighlight %}
+
+{: .table .table-bordered .table-hover}
+| Field | Description | Required/Optional/Default |
+| ---------------------------- | ------------------------------------ | ----------------------------------------------- |
+| `title` | The free-text display name of the step. | Optional |
+| `description` | A basic, free-text description of the step. | Optional |
+| `provider` | The type of Docker registry provider. Can currently be either `docker` for a standard Docker registry, or `ecr` for the [Amazon EC2 Container Registry (ECR)](https://aws.amazon.com/ecr/). | Optional *Default value*: `docker` |
+| `candidate` | The identifier of the image to push to the remote Docker registry. It can be an explicit identifier of an image to push, or a variable that references a `Build` step. | Required |
+| `tag` | The tag under which to push the image. Use either this or `tags`. The default is `latest`. | Default |
+| `tags` | Multiple tags under which to push the image. Use either this or 'tag'. This is an array, so should be of the following style: {::nomarkdown}
tags: -tag1 -tag2 -{% raw %}${{CF_BRANCH_TAG_NORMALIZED}}{% endraw %} -tag4
{:/}or {::nomarkdown}
tags:['tag1','tag2','{% raw %}${{CF_BRANCH_TAG_NORMALIZED}}{% endraw %}','tag4']
{:/} | Default |
+| `image_name` | The tagged image name that will be used. The default value will be the same image name as of the candidate. | Default |
+| `registry` | The host address where the registry is located. The default is the registry configured in your Codefresh account, or Dockerhub. | Default **Ignored when provider is** `ecr` |
+| `credentials` | Credentials to access the registry if it requires authentication. It can be a has object containing `username` and `password` fields. The default is the credentials configured in your Codefresh account. | Optional **Ignored when provider is** `ecr` |
+| `accessKeyId` | Your AWS access key. | Optional **Ignored when provider is** `docker` |
+| `secretAccessKey` | Your AWS secret access key. | Optional **Ignored when provider is** `docker` |
+| `region` | The region where the ECR registry is accessible. | Optional **Ignored when provider is** `docker` |
+| `fail_fast` | If a step fails, and the process is halted. The default value is `true`. |Default |
+| `when` | Define a set of conditions which need to be satisfied in order to execute this step. You can find more information in [Conditional Execution of Steps]({{site.baseurl}}/docs/pipelines/conditional-execution-of-steps. | Optional |
+| `on_success`, `on_fail` and `on_finish` | Define operations to perform upon step completion using a set of predefined [Post-Step Operations]({{site.baseurl}}/docs/pipelines/post-step-operations/).| Optional |
+
+**Exported resources:**
+- Image ID.
+
+## Related articles
+[External Registry integrations]({{site.baseurl}}/docs/docker-registries/external-docker-registries/)
+[Custom Image annotations]({{site.baseurl}}/docs/docker-registries/metadata-annotations/)
+[Pipeline steps]({{site.baseurl}}/docs/pipelines/steps/)
\ No newline at end of file
diff --git a/_docs/pipelines/triggers.md b/_docs/pipelines/triggers.md
new file mode 100644
index 00000000..a074d9c5
--- /dev/null
+++ b/_docs/pipelines/triggers.md
@@ -0,0 +1,114 @@
+---
+title: "Triggers for pipelines"
+description: "Choose when your pipelines should run"
+group: pipelines
+redirect_from:
+ - /docs/pipeline-triggers/
+ - /docs/pipeline-triggers/introduction-triggers/
+toc: true
+---
+
+
+To create an effective CI/CD process, it should be possible to trigger a Codefresh pipeline execution not only on code repository events (like `push` or `PR`), but also on any "interesting" CD-related event, coming from some external system.
+
+Codefresh not only allows you to define different pipelines on a single project but it also offers you the capability to trigger them with completely separate mechanisms.
+
+
+## Pipeline trigger types
+
+The following types of triggers are currently supported pipelines:
+
+* [Git triggers](git-triggers)
+* [Dockerhub triggers](dockerhub-triggers)
+* [Azure Registry triggers](azure-triggers)
+* [Quay triggers](quay-triggers)
+* [Helm triggers](helm-triggers)
+* [Artifactory triggers](jfrog-triggers)
+* [Cron trigger](cron-triggers)
+* [API/CLI trigger]({{site.baseurl}}/docs/integrations/codefresh-api/)
+
+As an example, this project contains four pipelines:
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/pipeline-examples.png"
+url="/images/pipeline/triggers/pipeline-examples.png"
+alt="Sample pipelines"
+caption="Sample pipelines"
+max-width="70%"
+%}
+
+Behind the scenes these pipelines are triggered from different events:
+
+* Pipeline "CI-build" uses a GIT trigger and starts after every commit to the code repository
+* Pipeline "Sonarcloud" is executed every weekend using a cron (timed) trigger
+* Pipeline "integration-test" is executed whenever a commit happens in a Pull request on the code
+* Pipeline "deploy-prod-k8s" is executed whenever a Docker image is pushed to the Docker registry
+
+This is just an example. You are free to create your own triggers that match your own internal process.
+It is also possible to add multiple triggers for a pipeline so that it is executed for more than one type of events.
+
+If a pipeline has no defined trigger you can still start it manually.
+
+For all trigger types you can also use the [Codefresh CLI](https://codefresh-io.github.io/cli/triggers/) to manage them.
+
+
+
+## Creating a new trigger for a pipeline
+
+By default, when you create a new project from a Git provider, it will start with a Git trigger that runs on every commit.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/default-git-trigger.png"
+url="/images/pipeline/triggers/default-git-trigger.png"
+alt="Default GIT Trigger"
+caption="Default GIT Trigger"
+max-width="50%"
+%}
+
+You can either delete this trigger, modify it, or add new ones.
+
+To add a new trigger, go to the *Triggers* tab in your pipeline editor and click the *Add Trigger* button. This will bring up the respective dialog where you are adding a new trigger.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/add-trigger-dialog.png"
+url="/images/pipeline/triggers/add-trigger-dialog.png"
+alt="Adding new Trigger dialog"
+caption="Adding new Trigger dialog"
+max-width="70%"
+%}
+
+For more information see:
+
+* [Git triggers](git-triggers)
+* [Dockerhub triggers](dockerhub-triggers)
+* [Azure Registry triggers](azure-triggers)
+* [Quay triggers](quay-triggers)
+* [Helm triggers](helm-triggers)
+* [Artifactory triggers](jfrog-triggers)
+* [Cron trigger](cron-triggers)
+
+## Disabling triggers
+
+You can easily disable a trigger manually if you don't want it to be active anymore.
+On the triggers tab, click the gear icon on the top right (*Open advanced options*).
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/enable-triggers.png"
+url="/images/pipeline/triggers/enable-triggers.png"
+alt="Toggle a trigger on/off"
+caption="Toggle a trigger on/off"
+max-width="70%"
+%}
+
+
+Then click the toggle switch on each trigger that you want to enable/disable. You can later enable the same trigger again
+by clicking the same switch.
+
+## Related articles
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
+[Running pipelines locally]({{site.baseurl}}/docs/pipelines/running-pipelines-locally/)
+[Trigger a Kubernetes Deployment from a Dockerhub Push Event]({{site.baseurl}}/docs//yaml-examples/examples/trigger-a-k8s-deployment-from-docker-registry/)
diff --git a/_docs/pipelines/triggers/azure-triggers.md b/_docs/pipelines/triggers/azure-triggers.md
new file mode 100644
index 00000000..5356be27
--- /dev/null
+++ b/_docs/pipelines/triggers/azure-triggers.md
@@ -0,0 +1,88 @@
+---
+title: "Azure Registry trigger"
+description: "Trigger Codefresh pipelines from Azure Registry events"
+group: pipelines
+sub_group: triggers
+redirect_from:
+ - /docs/pipeline-triggers/configure-azure-trigger/
+toc: true
+---
+
+Define and manage Azure Registry triggers for pipelines with the Codefresh UI.
+
+This allows you to trigger Codefresh pipelines when an Azure Registry event happens (e.g. a new Docker image is uploaded).
+
+## Manage Azure triggers with Codefresh UI
+
+
+The process involves two parts:
+
+1. Creating a trigger in Codefresh. This will result in a special Codefresh webhook URL.
+1. Creating a new notification in the Azure Registry that will use this URL to call Codefresh.
+
+> Make sure that you have an Azure cloud account and have already [created a registry](https://docs.microsoft.com/en-us/azure/container-registry/).
+
+
+### Create a new Azure trigger
+
+To add a new Azure trigger, navigate to a Codefresh Pipeline *Configuration* view and expand the *Triggers* section. Press the `Add Trigger` button and select a `Registry` trigger type to add.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/add-trigger-dialog.png"
+url="/images/pipeline/triggers/add-trigger-dialog.png"
+alt="Adding new Trigger dialog"
+max-width="40%"
+%}
+
+Fill the following information:
+
+* Registry Provider - select `Azure`.
+* *Name of Registry* - put Azure name of registry (without `.azurecr.io`).
+* *Image Repository Name* - Azure image repository name.
+* *Action* - select `Push Image` action.
+* *Tags* - optional filter to specify which image *tags* will trigger pipeline execution: [Re2](https://github.com/google/re2/wiki/Syntax) regular expression.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/azure/add-trigger-dialog.png"
+url="/images/pipeline/triggers/azure/add-trigger-dialog.png"
+alt="Azure Registry settings"
+max-width="50%"
+%}
+
+Click next and a new dialog will appear that shows you the Codefresh webhook URL. Copy it to your clipboard.
+
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/azure/view-trigger-dialog.png"
+url="/images/pipeline/triggers/azure/view-trigger-dialog.png"
+alt="Codefresh webhook URL"
+max-width="50%"
+%}
+
+Now we must set Azure to call this URL when an event takes place.
+
+### Set up Azure notification
+
+The easiest way to create an Azure trigger is with the [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/acr/webhook?view=azure-cli-latest#az-acr-webhook-create) (Also available in the Azure portal)
+
+Here is the command:
+
+{% highlight shell %}
+{% raw %}
+az acr webhook create -n MyWebhook -r kostisregistry --uri "https://g.codefresh.io/nomios/azure?account=409f15bdd444&secret=7zyg5Zhb8xYBn4ms" --actions push delete
+{% endraw %}
+{% endhighlight %}
+
+The name can be anything you want. The URI is the Codefresh URL that was created in the previous step.
+
+
+### Triggering a Codefresh pipeline with Azure push
+
+Now, every time you push a new Docker image to the selected Azure Docker repository, manually, with Codefresh or any other CI/CD tool, Codefresh will trigger execution of all pipelines associated with that Azure Push trigger event.
+
+## Related articles
+[Triggers for pipelines]({{site.baseurl}}/docs/pipelines/triggers)
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
\ No newline at end of file
diff --git a/_docs/pipelines/triggers/cron-triggers.md b/_docs/pipelines/triggers/cron-triggers.md
new file mode 100644
index 00000000..93838253
--- /dev/null
+++ b/_docs/pipelines/triggers/cron-triggers.md
@@ -0,0 +1,104 @@
+---
+title: "Cron Trigger"
+description: "Run pipelines with a time schedule"
+group: pipelines
+sub_group: triggers
+redirect_from:
+ - /docs/configure-cron-trigger/
+ - /docs/pipeline-triggers/configure-cron-trigger/
+toc: true
+---
+
+Cron triggers allow you to create pipelines that start on a specific time schedule. This is very useful for cleanup jobs or periodic checks or any other workflow that needs to run after a time interval.
+
+>All times mentioned in Cron triggers use the UTC time zone.
+
+## Manage Cron Triggers with Codefresh UI
+
+It is possible to define and manage Cron-based pipeline triggers with Codefresh UI.
+
+### Create a new Cron Trigger
+
+To add a new Cron trigger, navigate to Codefresh Pipeline *Configuration* view and expand *Triggers* section. Press the `Add Trigger` button and select a `Cron` trigger type to add.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/add-trigger-dialog.png"
+url="/images/pipeline/triggers/add-trigger-dialog.png"
+alt="Adding new Trigger dialog"
+max-width="60%"
+%}
+
+
+Visit [this page](https://github.com/codefresh-io/cronus/blob/master/docs/expression.md) to learn about supported `cron` expression format and aliases.
+
+
+Fill the following information:
+
+* Use Cron helper wizard to build a valid `cron` expression or write custom `cron` expression on the last tab.
+* Add a free text message, that will be sent as an additional event payload every time `cron` is executed.
+
+{% include image.html
+lightbox="true"
+file="/images/cron_trigger.png"
+url="/images/cron_trigger.png"
+alt="Add Cron Trigger"
+max-width="70%"
+%}
+
+
+### Trigger Codefresh pipeline with cron timer
+
+Now, `cron` will trigger a recurrent pipeline execution based on the defined `cron expression`.
+
+## Manage Cron Triggers with Codefresh CLI
+
+It is also possible to use the Codefresh Command Line client (`CLI`) to manage Cron based pipeline triggers.
+
+### Cron trigger
+
+It is possible to trigger a Codefresh CD pipeline(s) periodically, using `cron` expression.
+
+You can use [Codefresh CLI](https://cli.codefresh.io/) to setup a Codefresh `cron` trigger.
+
+#### Create Cron trigger-event
+
+First, you need to create a new `cron` `trigger-event` to define a recurrent event.
+
+```sh
+# create DockerHub recurrent event 'once in 20 minutes'
+codefresh create trigger-event --type cron --kind codefresh --value expression="0 */20 * * * *" --value message="hello-once-in-20-min"
+
+# on success trigger-event UID will be printed out
+Trigger event: "cron:codefresh:codefresh:0 */20 * * * *:hello-once-in-20-min:107e9db97062" was successfully created.
+```
+
+When creating a `cron trigger-event`, it is possible to specify a short text message, that will be passed to linked pipelines, every time the specified `cron` timer is triggered.
+
+Visit [this page](https://github.com/codefresh-io/cronus/blob/master/docs/expression.md) to learn about the supported `cron` expression format and aliases.
+
+#### Set up pipeline trigger
+
+Now, lets create a new pipeline trigger, linking previously defined `cron` `trigger-event` to one or more Codefresh pipelines.
+
+```sh
+# create trigger, linking trigger-event UID to the pipeline UID
+codefresh create trigger "cron:codefresh:codefresh:0 */20 * * * *:hello-once-in-20-min:107e9db97062" 7a5622e4b1ad5ba0018a3c9c
+
+# create another trigger, linking the same trigger-event to another pipeline
+codefresh create trigger "cron:codefresh:codefresh:0 */20 * * * *:hello-once-in-20-min:107e9db97062" 4a5634e4b2cd6baf021a3c0a
+```
+
+From now on, every 20 minutes Codefresh will trigger a pipeline execution for 2 pipelines linked to the previously specified `cron` `trigger-event` (once in 20 minutes)
+
+#### Cron Event payload
+
+The following variables will be available for any Codefresh pipeline linked to a `cron` `trigger-event`:
+
+- `EVENT_MESSAGE` - free text message (specified during creation)
+- `EVENT_TIMESTAMP` - event timestamp in RFC 3339 format
+
+## Related articles
+[Triggers for pipelines]({{site.baseurl}}/docs/pipelines/triggers)
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
+
diff --git a/_docs/pipelines/triggers/dockerhub-triggers.md b/_docs/pipelines/triggers/dockerhub-triggers.md
new file mode 100644
index 00000000..1268e4d8
--- /dev/null
+++ b/_docs/pipelines/triggers/dockerhub-triggers.md
@@ -0,0 +1,152 @@
+---
+title: "DockerHub triggers"
+description: ""
+group: pipelines
+sub_group: triggers
+redirect_from:
+ - /docs/configure-dockerhub-trigger/
+ - /docs/pipeline-triggers/configure-dockerhub-trigger/
+toc: true
+---
+
+
+You can define and manage DockerHub triggers in Codefresh.
+
+### Create a new DockerHub trigger in Codefresh UI
+
+To add a new DockerHub trigger, navigate to Codefresh Pipeline *Configuration* view and expand *Triggers* section. Press the `Add Trigger` button and select a `Registry` trigger type to add.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/add-trigger-dialog.png"
+url="/images/pipeline/triggers/add-trigger-dialog.png"
+alt="Adding new Trigger dialog"
+max-width="60%"
+%}
+
+Fill the following information:
+
+* *Registry Provider* - select `DockerHub`.
+* *User/Organization Name* - put DockerHub user name or organization name here.
+* *Image Repository Name* - DockerHub image repository name.
+* *Action* - select `Push Image` action.
+* *Tag* - optional filter to specify which image *tags* will trigger pipeline execution: [Re2](https://github.com/google/re2/wiki/Syntax) regular expression.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/dockerhub/dockerhub_trigger_1.png"
+url="/images/pipeline/triggers/dockerhub/dockerhub_trigger_1.png"
+alt="Add Registry Trigger"
+max-width="70%"
+%}
+
+### Setup DockerHub Webhook
+
+Currently Codefresh does not support automatically setting up a DockerHub webhook. You need to do this manually. Press the *Next* button and see detailed instructions with URL links and secrets of how-to setup a DockerHub Webhook.
+
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/dockerhub/dockerhub_trigger_2.png"
+url="/images/pipeline/triggers/dockerhub/dockerhub_trigger_2.png"
+alt="Add Webhook"
+max-width="70%"
+%}
+
+1. Copy `Endpoint` URL
+1. Visit DockerHub image settings page following link in help
+1. Add a new DockerHub Webhook with previously copied `Endpoint` URL
+
+### Triggering Codefresh pipeline with DockerHub push
+
+Now, every time you push a new Docker image to selected DockerHub repository, manually, with Codefresh or any other CI/CD tool, Codefresh will trigger execution of all pipelines associated with this DockerHub Push trigger event.
+
+## Manage DockerHub triggers with Codefresh CLI
+
+It is possible to use `codefresh` command line client (`CLI`) to manage DockerHub pipeline triggers.
+
+### Docker Hub Trigger
+
+It is possible to trigger Codefresh CD pipeline(s) when a new Docker image pushed into DockerHub.
+
+You can use [Codefresh CLI](https://cli.codefresh.io/) to setup a Codefresh trigger for DockerHub.
+
+#### Create DockerHub trigger-event
+
+First, create a `trigger-event` for every DockerHub image, you would like to setup a Codefresh trigger.
+
+```
+# create DockerHub trigger event for codefresh/fortune
+codefresh create trigger-event --type registry --kind dockerhub --value namespace=codefresh --value name=fortune --value action=push
+
+# on success trigger-event UID will be printed out
+Trigger event: registry:dockerhub:codefresh:fortune:push:107e9db97062 was successfully created.
+```
+
+#### Set up DockerHub webhook
+
+Currently, an additional manual action is required to bind DockerHub `push` image event to the Codefresh `trigger-event`.
+
+```
+# get trigger-event details for previously created trigger-event
+codefresh get trigger-event -o yaml registry:dockerhub:codefresh:fortune:push:107e9db97062
+```
+
+... command output:
+
+```yaml
+uri: 'registry:dockerhub:codefresh:fortune:push:107e9db97062'
+type: registry
+kind: dockerhub
+public: false
+secret: aGao5weuez2G6WF9
+status: active
+endpoint: >-
+ https://g.codefresh.io/nomios/dockerhub?account=107e9db97062&secret=aGao5weuez2G6WF9
+description: Docker Hub codefresh/fortune push event
+help: >-
+ Docker Hub webhooks fire when an image is built in, pushed or a new tag is
+ added to, your repository.
+
+
+ Configure Docker Hub webhooks on
+ https://hub.docker.com/r/codefresh/fortune/~/settings/webhooks/
+
+
+ Add following Codefresh Docker Hub webhook endpoint
+ https://g.codefresh.io/nomios/dockerhub?account=107e9db97062&secret=aGao5weuez2G6WF9
+```
+
+1. Copy `endpoint` URL
+1. Visit DockerHub settings page [https://hub.docker.com/r/codefresh/fortune/~/settings/webhooks/](https://hub.docker.com/r/codefresh/fortune/~/settings/webhooks/).
+1. Add a new Webhook with previously copied `endpoint` URL.
+
+
+#### Set up pipeline trigger
+
+Now, lets set up a new pipeline trigger, linking previously defined DockerHub push `codefresh/fortune` `trigger-event` to one or more Codefresh pipelines.
+
+```
+# create trigger, linking trigger-event UID to the pipeline UID
+codefresh create trigger "registry:dockerhub:codefresh:fortune:push:107e9db97062" 7a5622e4b1ad5ba0018a3c9c
+
+# create another trigger, linking the same trigger-event to another pipeline
+codefresh create trigger "registry:dockerhub:codefresh:fortune:push:107e9db97062" 4a5634e4b2cd6baf021a3c0a
+```
+
+From now on, Codefresh will trigger pipeline execution when new `codefresh/fortune` image is pushed to the DockerHub.
+
+#### DockerHub Event payload
+
+The following variables will be available for any Codefresh pipeline linked to a DockerHub `trigger-event`:
+
+- `EVENT_NAMESPACE` - DockerHub namespace (alias `organization`).
+- `EVENT_NAME` - DockerHub image name (alias `repository`).
+- `EVENT_TAG` - Docker image tag.
+- `EVENT_PUSHER` - user who pushed this Docker image.
+- `EVENT_PUSHED_AT` - timestamp for push event.
+- `EVENT_PAYLOAD` - original DockerHub Webhook JSON payload.
+
+## Related articles
+[Triggers for pipelines]({{site.baseurl}}/docs/pipelines/triggers)
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
diff --git a/_docs/pipelines/triggers/git-triggers.md b/_docs/pipelines/triggers/git-triggers.md
new file mode 100644
index 00000000..de0a551e
--- /dev/null
+++ b/_docs/pipelines/triggers/git-triggers.md
@@ -0,0 +1,371 @@
+---
+title: "Git triggers"
+description: "Learn how to run pipelines from Git events"
+group: pipelines
+sub_group: triggers
+toc: true
+---
+
+Git triggers are the most basic of the trigger typesfor performing [Continuous Integration](https://en.wikipedia.org/wiki/Continuous_integration) with Codefresh.
+
+At the trigger level, you can select:
+
+* Which code repository will be used as a trigger
+* Which branches will be affected by a pipeline
+* If a trigger will apply to a Pull Request (PR) or not
+
+> You can select a repository other than the one the project itself belongs to. It is possible
+ to trigger a build on project A even though a commit happened on project B.
+
+You can also use [conditional expressions]({{site.baseurl}}/docs/codefresh-yaml/conditional-execution-of-steps/) at the pipeline level to further fine-tune the way specific steps (or other transitive pipelines) are executed.
+
+## Manage GIT triggers with Codefresh UI
+
+To add a new GIT trigger, navigate to the Codefresh Pipeline *Configuration* view and expand the *Triggers* section on the right side. Press the *Add Trigger* button and select a *GIT* trigger type to add.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/add-trigger-dialog.png"
+url="/images/pipeline/triggers/add-trigger-dialog.png"
+alt="Adding new Trigger dialog"
+max-width="60%"
+%}
+
+## General trigger Settings
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/add-git-trigger.png"
+url="/images/pipeline/triggers/add-git-trigger.png"
+alt="Adding GIT Trigger"
+max-width="50%"
+%}
+
+The Git trigger is comprised of the following settings:
+
+* *Trigger Name* - a freetext trigger name (required).
+* *Description* - a freetext description (optional).
+* *Repository* - you can select any repository even something different than the one that is used for the code checkout.
+* *Commit Checkbox* - if enabled will trigger this pipeline for any commit.
+* *PR Checkboxes* - various checkboxes for filtering the Pull request event.
+
+The commit checkbox (by default it is enabled) means that this pipeline will run for *any* commit as long as its source branch matches the naming scheme. This includes commits on pull requests.
+
+The PR checkboxes mean that this pipeline will run only on the respective events that happen on a Pull Request. You can select multiple checkboxes to further fine-tune the exact event. If you are interested in all events, select the checkbox *Any Pull Request event*.
+
+>The individual Pull request checkboxes are available only for GitHub repositories.
+
+## Configure Filter Settings
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/configure-filter-settings.png"
+url="/images/pipeline/triggers/configure-filter-settings.png"
+alt="Configure Filter Settings"
+max-width="50%"
+%}
+
+* *Support pull request events from forks* - toggle that is useful for open source projects.
+* *Branch Field* - this is a regular expression and will only trigger for branches that match this naming pattern.
+* *PR Comment (restricted) and PR Comment Fields* - useful for open source projects.
+* *Pull Request Target* branch - this is a regular expression and will trigger only when a Pull request is created against any branch that matches it.
+* *Modified Files* - allows you to constrain the build and trigger it only if the modified files from the commit match this [glob expression](https://en.wikipedia.org/wiki/Glob_(programming)).
+
+### Pull Request Target Branch and Branch
+
+The Pull Request Target Branch field allows you to trigger this pipeline only when the target of a Pull Request (i.e. where the pr is going to be merged at) matches the
+branch name regular expression. Common examples for branch names would be `master` or `production`.
+
+This field has only meaning when a commit happens in the context of a pull request and in that case:
+
+1. The Branch field will look at the branch that the commit is happening on
+1. The PR Target Branch field will look at the branch the PR is happening against
+
+For example, if you create a commit on a branch that is named `my-feature` which is currently part of PR against branch `staging` (i.e. somebody wants to merge `my-feature` **TO** `staging`) then:
+
+1. The `BRANCH` field value will try to match against `my-feature`
+1. the `PULL REQUEST TARGET BRANCH` will try to match against `staging`
+
+Here are some more syntax examples:
+
+* `/^((qa-release)$).*/g` - only run if branch is named `qa-release`.
+* `/^((production)$).*/g` - only run if branch is named `production`.
+* `/release/g` - only run if branch name contains `release` as substring.
+* `/feature-/gi` - only run if branch is `feature-foo`, `feature-bar`, `my-feature-123` etc.
+* `/^((?!^feature).)*$/gi` - only run if branch name does **not** start with `feature`.
+
+>The field *Pull Request Target* is available for all Git providers apart from Atlassian stash.
+>
+>When using the Terraform Provider, please use the [Go regex syntax](https://github.com/google/re2/wiki/Syntax) as some perl regex syntax is not compatible.
+
+The concept behind these checkboxes and branch name fields is to allow you to define which pipelines run for various workflows in your organization.
+
+As a simple example you can have a *production* pipeline that runs only on *master* branch (and therefore the branch field says "master") and a *testing* pipeline that runs user acceptance tests where only the Pull Request Open checkbox is active. This means that User Acceptance tests will run whenever a PR is created. Once it is merged the *production* pipeline will deploy the changes.
+
+In a more advanced example, you could add regular expressions in the branch field with names such as *feature-*, *hotfix-* etc. and the PR checkbox active on different pipelines. This way you could trigger the pull requests only when they happen on specific branches. So, a developer that creates a temporary feature with a name that doesn't match these naming patterns will not trigger those pipelines.
+
+Notice also that you can use Negative Lookahead in your Branch (Regex Expression) filter. An example to exclude tag events: `/^((?!tag)).*/gi` (the pattern here for tags to exclude is that they begin with `tag…`).
+
+This will make all push-events (including tags) that do follow the `tag...` pattern to be excluded.
+Therefore, all tags like `tag1`, `tag-X` **won't** trigger the pipeline.
+
+### Pull Requests from comments
+
+Pull Requests from comments are supported for all Git providers, for both private and public repositories.
+There are two options:
+* **Pull request comment added (restricted)**
+ This option triggers an event only when the PR comments are made by repository owners or collaborators.
+* **Pull request comment added**
+ This option triggers an event when PR comments are made by any user, regardless of their permissions.
+ Because it is not restricted to owners and collaborators, this option is useful in GitHub, to enable triggers for PR comments made by users in GitHub teams.
+
+ > We strongly recommend selecting this option only for _private repositories_.
+
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/pr-comment-trigger-options.png"
+url="/images/pipeline/triggers/pr-comment-trigger-options.png"
+alt="Trigger options for PR comments"
+caption="Trigger options for PR comments"
+max-width="50%"
+%}
+
+
+### Support for building pull requests from forks
+
+By default, the Git trigger works for events coming from your personal repository. You can also use triggers from events that are coming from forks. This is a very useful feature for open source projects, as it allows you to run your own unit tests and other checks against a new feature *before* actually merging it in your repo.
+
+To enable this behavior:
+
+* Toggle the *support pull request events from forks* switch
+* Select *Pull request comment added (restricted)*
+* In the *pr comment* field enter a custom string (accepts regex)
+
+Then once a contributor creates a fork of your repository and submits a pull request, you can review the code and then add a comment on your own that matches the PR comment expression.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/pr-from-fork.png"
+url="/images/pipeline/triggers/pr-from-fork.png"
+alt="Triggering a public build from a comment"
+caption="Triggering a public build from a comment"
+max-width="50%"
+%}
+
+Once that is done, Codefresh will launch your pipeline against the Pull Request. If you manage an open source project with Codefresh, remember to enable [public builds]({{site.baseurl}}/docs/configure-ci-cd-pipeline/build-status/#public-build-logs) as well.
+
+When supporting building of pull requests from forks there are a few "gotchas" to look out for:
+
+* Only comments made by repository owners and [collaborators](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/adding-outside-collaborators-to-repositories-in-your-organization) will result in the pipeline being triggered.
+* Only Git pushes by collaborators within the GitHub organization will result in the pipeline being triggered
+* If the repository is in a GitHub organization, comments made by private members of the organization will not activate the trigger, even if they are set as an owner or collaborator. Private members means that they need to be explicitly added to the repository.
+Access cannot be "inherited" by the GitHub team. Currently, only comments from Admins, or Collaborators (directly added, not via teams) are allowed, in order to be caught by this filter.
+* The *Pull request comment added* checkbox should likely be the only one checked, or your pipeline may trigger on other events that you don't anticipate.
+
+
+
+### Monorepo support (Modified files)
+
+The *modified files* field is a very powerful Codefresh feature that allows you to trigger a build only if the
+files affected by a commit are in a specific folder (or match a specific naming pattern). This means that
+you can have a big GIT repository with multiple projects and build only the parts that actually change.
+
+>Currently the field *modified files* is available only for GitHub, GitLab, Azure DevOps and [Bitbucket Server and Data Center](https://confluence.atlassian.com/bitbucketserver/add-a-post-service-webhook-776640367.html) repositories, since they are the only GIT providers
+that send this information in the webhook. We will support other GIT providers as soon as they add the respective feature.
+
+### Using the Modified files field to constrain triggers to specific folder/files
+
+The *modified files* field accepts glob expressions. The paths are relative to the root folder of the project (where the git repository was checked out). Some possible examples are:
+
+```
+**/package.json
+**/Dockerfile*
+my-subproject/**
+my-subproject/sub-subproject/package.json
+my-subproject/**/pom.xml
+!config/**
+
+```
+
+>You can also use relative paths with dot-slash. Therefore `./package.json` and `package.json` are exactly the same thing. They both refer to the file `package.json` found at the root of the git project that was checked out as part of the build.
+
+You can also define [multiple expressions](http://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm) like this (but notice that there is a limit of 150 characters for the field):
+
+```
+{app/**,test/**}
+{**/package.json,my-subproject/**}
+!{deployment/**,**/version.cfg}
+```
+
+Once a commit happens to a code repository, Codefresh will see which files are changed from the git provider and trigger the build **only** if the changed files match the glob expression. If there is no match no build will be triggered.
+
+> Notice that the `{}` characters are only needed if you have more than one expression. Do not use them if you have a single glob expression in the field.
+
+This is a very useful feature for organizations who have chosen to have multiple projects on the same GIT repository (monorepos). Let's assume for example that a single system has a Java backend, a NestJS frontend and a Ruby-on-Rails internal dashboard.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/monorepo.png"
+url="/images/pipeline/triggers/monorepo.png"
+alt="GIT monorepo"
+max-width="60%"
+%}
+
+Now we can define 3 different pipelines in Codefresh where each one builds the respective project
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/monorepo-pipelines.png"
+url="/images/pipeline/triggers/monorepo-pipelines.png"
+alt="GIT monorepo pipelines"
+max-width="70%"
+%}
+
+And then in the GIT trigger for each one we set the modified files field to the following values:
+
+* For the *build-nestjs-only* pipeline *MODIFIED FILES* has `my-nestjs-project/**`.
+* For the *build-java-only* pipeline *MODIFIED FILES* has `my-java-project/**`.
+* For the *build-rails-only* pipeline *MODIFIED FILES* has `my-rails-project/**`.
+
+This way as multiple developers work on the git repository only the affected projects will actually build. A change to the NestJS project will *not* build the Rails project as well. Also, if somebody changes *only* the README file and nothing else, no build will be triggered at all (which is a good thing as the source code is exactly the same).
+
+You can also use Glob expressions for files. For example:
+
+* An expression such as `my-subproject/sub-subproject/package.json` will trigger a build **only** if the dependencies of this specific project are changed
+* A pipeline with the expression `my-subproject/**/pom.xml` will trigger only if the Java dependencies for any project that belongs to `my-subproject` actually change
+* An expression such as `!config/manifest.yaml` will trigger a build if any file was changed *apart from* `config/manifest.yaml`
+
+Glob expressions have many more options not shown here. Visit the [official documentation](https://en.wikipedia.org/wiki/Glob_(programming)) to learn more. You can also use the [Glob Tester web application](https://www.digitalocean.com/community/tools/glob) to test your glob expressions beforehand so that you are certain they match the files you expect them to match.
+
+## Advanced Options
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/advanced-options.png"
+url="/images/pipeline/triggers/advanced-options.png"
+alt="Advanced Options"
+max-width="60%"
+%}
+
+* *Commit Status Title* - the commit status title pushed to the GIT version control system. By default, is the pipeline name, but you can override the name on GIT trigger.
+* *Build Variables* - import a [shared configuration]({{site.baseurl}}/docs/configure-ci-cd-pipeline/shared-configuration/) or manually add variables
+* *More Options*
+ * *Ignore Docker engine cache for build* - selecting this option may slow down your build. See #1 [here]({{site.baseurl}}/docs/troubleshooting/common-issues/disabling-codefresh-caching-mechanisms/)
+ * *Ignore Codefresh cache optimizations for build* - selecting this option may slow down your build. See #2 [here]({{site.baseurl}}/docs/troubleshooting/common-issues/disabling-codefresh-caching-mechanisms/)
+ * *Reset pipeline volume* - useful for troubleshooting a build that hangs on the first step. See [here]({{site.baseurl}}/docs/troubleshooting/common-issues/restoring-data-from-pre-existing-image-hangs-on/)
+ * *Report notification on pipeline execution* - Decide if [Slack notifications]({{site.baseurl}}/docs/integrations/notifications/slack-integration/) will be sent (as well as status updates back to your Git provider)
+* *Runtime Environment* - choose to use pipeline [settings]({{site.baseurl}}/docs/configure-ci-cd-pipeline/pipelines/#pipeline-settings) or override them
+
+### Set minimum disk space for build volume by trigger
+Set the disk space you need for the build volume in the context of the selected trigger. Setting the disk space for the trigger overrides that set for the pipeline.
+
+1. In **Workflow > Triggers**, expand **Advanced Options**.
+1. From the Runtime Environment list, select **Override pipeline settings**, and then select the runtime for which to override the pipeline setting.
+1. If required, change the resource size.
+1. Enable **Set minimum disk space**, and then change as required.
+
+## Manually adding the trigger to GitHub
+
+When creating a Git trigger in codefresh, sometimes the Git Integration does not have the permissions to create a webhook on the designated repository. When this happens, you get the following error: `Failed to add Trigger`.
+
+This error means that Codefresh could not create the webhook and verify that it works. With that, Codefresh will mark the Trigger as Unverified. Two additional fields (Endpoint and Secret) will appear under the "Verify Trigger" button when you get this error.
+
+- **Endpoint**: This will be the Webhook URL for the created Trigger
+- **Secret**: Token to add to Github for verification.
+
+### Adding Webhook to Github
+
+1. When you receive the `Failed to add Trigger`, log into GitHub.
+ - Make sure this user can access the repository settings and create Webhooks
+1. Go to the repository mentioned in the "REPOSITORY" section from Unverified Trigger.
+1. Go to Settings > Webhooks and click the "Add webhook" button.
+1. Fill in the form
+ - **Payload URL**: The URL from the Endpoint field from the Trigger
+ - **Content type**: application/json
+ - **Secret**: The token in the Secret field from the Trigger
+ - **SSL verification**: Enable SSL verification
+ - **Events**:
+ 1. Select let me select individual events
+ 2. Match the items selected in the Trigger By field from the Trigger
+ - **Active**: Make sure this is selected
+1. Click "Add webhook" when done.
+1. Click "Done" in the Add Trigger form.
+1. Test your webhook by making an event in the repository that will cause the Trigger to start the build.
+
+> **Note**:
+ * You will be responsible for syncing the Trigger By to the Events sent to us for the webhook. You can select "Send me everything" if you do not want to manually match the Trigger By in the Trigger with the Webhook Events in GitHub.
+ * The Trigger will remain "Unverified" until the integration has the correct permissions to the repository.
+
+## Accessing webhook content of the trigger directly
+
+If your Git trigger is coming from Github, you can also access the whole payload of the webhook that was responsible for the trigger.
+The webhook content is available at `/codefresh/volume/event.json`. You can read this file in any pipeline step and process it like any other json file (e.g. with the jq utility).
+
+`codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ read_trigger_webook:
+ title: "Reading Github webhook content"
+ type: "freestyle"
+ image: "alpine:3.9"
+ commands:
+ - 'cat /codefresh/volume/event.json'
+{% endraw %}
+{% endhighlight %}
+
+Notice however that this file is only available when the pipeline was triggered from a GitHub event. If you manually run the pipeline, the file is not present.
+
+## Using YAML and the Codefresh CLI to filter specific Webhook events
+
+The default GUI options exposed by Codefresh are just a starting point for GIT triggers and pull requests. Using [Codefresh YAML]({{site.baseurl}}/docs/codefresh-yaml/what-is-the-codefresh-yaml/) and the [Codefresh CLI plugin](https://codefresh-io.github.io/cli/) you can further create two-phase pipelines where the first one decides
+which webhook events will be honored and the second one contains the actual build.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/two-phase-pipeline.png"
+url="/images/pipeline/triggers/two-phase-pipeline.png"
+alt="Two phase pipeline"
+max-width="80%"
+%}
+
+The generic GIT trigger is placed on Pipeline A. This pipeline then filters the applicable webhooks using [conditional expressions]({{site.baseurl}}/docs/codefresh-yaml/conditional-execution-of-steps/). Then it uses the Codefresh CLI plugin (and specifically the [run pipeline capability](https://codefresh-io.github.io/cli/pipelines/run-pipeline/)) to trigger pipeline B that performs build.
+
+Some of the YAML variables that you might find useful (from the [full list]({{site.baseurl}}/docs/codefresh-yaml/variables/)):
+
+* `CF_PULL_REQUEST_ACTION` - open, close, synchronize, assign etc.
+* `CF_PULL_REQUEST_TARGET` - target branch of the pull request.
+* `CF_BRANCH` - the branch that contains the pull request.
+
+As an example, here is the `codefresh.yml` file of pipeline A where we want to run pipeline B only when a Pull Requested is opened against a branch named *production*.
+
+`codefresh.yml` of pipeline A
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ triggerstep:
+ title: trigger
+ image: codefresh/cli
+ commands:
+ - 'codefresh run -b=${{CF_BRANCH}}' -t
+ when:
+ condition:
+ all:
+ validateTargetBranch: '"${{CF_PULL_REQUEST_TARGET}}" == "production"'
+ validatePRAction: '''${{CF_PULL_REQUEST_ACTION}}'' == ''opened'''
+{% endraw %}
+{% endhighlight %}
+
+This is the build definition for the first pipeline that has a GIT trigger (with the Pull request checkbox enabled).
+It has only a single step which uses conditionals that check the name of the branch where the pull request is targeted to, as well as the pull request action. Only if *both* of these conditions are true then the build step is executed.
+
+The build step calls the second pipeline. The end result is that pipeline B runs only when the Pull Request is opened the first time. Any further commits on the pull request branch will **not** trigger pipeline B (pipeline A will still run but the conditionals will fail).
+
+## Related articles
+[Triggers for pipelines]({{site.baseurl}}/docs/pipelines/triggers)
+[Cron triggers]({{site.baseurl}}/docs/pipelines/triggers/cron-triggers/)
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
+[Multi-git trigger]({{site.baseurl}}/docs/troubleshooting/common-issues/multi-git-triggers/)
diff --git a/_docs/pipelines/triggers/helm-triggers.md b/_docs/pipelines/triggers/helm-triggers.md
new file mode 100644
index 00000000..98ede0e9
--- /dev/null
+++ b/_docs/pipelines/triggers/helm-triggers.md
@@ -0,0 +1,61 @@
+---
+title: "Helm Trigger"
+description: ""
+group: configure-ci-cd-pipeline
+sub_group: triggers
+toc: true
+---
+
+Codefresh has the option to create pipelines that respond to Helm events. For instance, one pipeline can be set-up to create a Docker image and chart. Once those are created, another pipeline is triggered to implement the actual deployment.
+
+Define and manage Helm pipeline triggers with the Codefresh UI.
+
+## Create a new Helm Trigger
+
+To add a new Helm trigger, navigate to Codefresh Pipeline *Configuration* view and expand *Triggers* section. Press the `Add Trigger` button and select the `Helm` trigger type to add.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/add-trigger-dialog.png"
+url="/images/pipeline/triggers/add-trigger-dialog.png"
+alt="Adding new Trigger dialog"
+max-width="60%"
+%}
+
+Fill the following information:
+* *Helm Provider* - select `JFrog Artifactory`.
+* *Repository* - put JFrog name of the Artifactory repository.
+* *Chart Name* - put name of the chart in the Artifactory repository.
+* *Action* - select `Push Chart` action.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/jfrog/configure-artifactory.png"
+url="/images/pipeline/triggers/jfrog/configure-artifactory.png"
+alt="Helm Artifactory settings"
+max-width="50%"
+%}
+
+Click next and a new Dialog will appear that shows you the Codefresh webhook URL. Copy it to your clipboard.
+
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/jfrog/view-trigger-dialog.png"
+url="/images/pipeline/triggers/jfrog/view-trigger-dialog.png"
+alt="Codefresh webhook URL"
+max-width="50%"
+%}
+
+Now we must set JFrog Artifactory to call this URL when an event takes place. This can either be done through the [JFrog Artifactory webhook plugin]({{site.baseurl}}/docs/pipelines/triggers/jfrog-triggers/) or through [setting up Webhooks](https://www.jfrog.com/confluence/display/JFROG/Webhooks) in the UI.
+
+## Trigger Codefresh pipeline with an Artifactory push
+
+Now, every time you push a Helm chart to the selected Artifactory repository, manually, with Codefresh or any other CI/CD tool, Codefresh will trigger execution of all pipelines associated with that Artifactory Push trigger event.
+
+
+## Related articles
+[Helm Releases management](https://codefresh.io/docs/docs/new-helm/helm-releases-management/)
+[Custom Helm uploads](https://codefresh.io/docs/docs/new-helm/custom-helm-uploads/)
+[Triggers for pipelines]({{site.baseurl}}/docs/pipelines/triggers)
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
diff --git a/_docs/pipelines/triggers/jfrog-triggers.md b/_docs/pipelines/triggers/jfrog-triggers.md
new file mode 100644
index 00000000..97471f4d
--- /dev/null
+++ b/_docs/pipelines/triggers/jfrog-triggers.md
@@ -0,0 +1,101 @@
+---
+title: "Artifactory trigger"
+description: "Trigger Codefresh pipelines from Artifactory"
+group: configure-ci-cd-pipeline
+sub_group: triggers
+redirect_from:
+ - /docs/pipeline-triggers/configure-jfrog-trigger/
+toc: true
+---
+
+Define and manage Artifactory pipeline triggers with the Codefresh UI.
+This allows you to trigger Codefresh pipelines when an Artifactory event occurs (i.e. a new Docker image is uploaded).
+
+## Manage Artifactory Triggers with Codefresh UI
+
+
+The process involves two parts:
+
+1. Creating a trigger in Codefresh. This will result in a special Codefresh webhook URL
+1. Activating the [webhook plugin](https://github.com/jfrog/artifactory-user-plugins/tree/master/webhook) in Artifactory and setting it up to call the Codefresh URL
+
+> Make sure that you have admin access to your Artifactory instance in order to setup its webhook plugin.
+
+### Create a new Artifactory trigger
+
+To add a new Artifactory trigger, navigate to a Codefresh Pipeline *Configuration* view and expand the *Triggers* section. Press the `Add Trigger` button and select a `Registry` trigger type to add.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/add-trigger-dialog.png"
+url="/images/pipeline/triggers/add-trigger-dialog.png"
+alt="Adding new Trigger dialog"
+max-width="40%"
+%}
+
+Fill the following information:
+
+* *Registry Provider* - select `JFrog`.
+* *Repository Name* - put JFrog name of repository.
+* *Docker Image Name* - put name of Docker image.
+* *Action* - select `Push Image` action.
+* *Tag* - optional filter to specify which image *tags* will trigger pipeline execution: [Re2](https://github.com/google/re2/wiki/Syntax) regular expression.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/jfrog/configure-trigger.png"
+url="/images/pipeline/triggers/jfrog/configure-trigger.png"
+alt="Artifactory Registry settings"
+max-width="50%"
+%}
+
+Click next and a new Dialog will appear that shows you the Codefresh webhook URL. Copy it to your clipboard.
+
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/jfrog/view-trigger-dialog.png"
+url="/images/pipeline/triggers/jfrog/view-trigger-dialog.png"
+alt="Codefresh webhook URL"
+max-width="50%"
+%}
+
+Now we must set JFrog Artifactory to call this URL when an event takes place.
+
+### Set up JFrog Artifactory webhook plugin
+
+The [webhook functionality](https://github.com/jfrog/artifactory-user-plugins/tree/master/webhook) in JFrog artifactory comes in plugin.
+You can read [detailed documentation](https://www.jfrog.com/confluence/display/RTF/User+Plugins) for JFrog plugins but in summary:
+
+* The file `webhook.groovy` needs to be copied to `ARTIFACTORY_HOME/etc/plugins` (the plugin itself)
+* A file `webhook.config.json` should also be placed in the same folder (the plugin setup)
+
+Here is an example for Codefresh.
+
+`webhook.config.json`
+{% highlight json %}
+{% raw %}
+{
+ "webhooks": {
+ "mywebhook": {
+ "url": "https://g.codefresh.io/nomios/jfrog?account=2dfdf89f235bfe&sefgt=EvQf9bBS55UPekCu",
+ "events": [
+ "docker.tagCreated"
+ ]
+ }
+ },
+ "debug": false,
+ "timeout": 15000
+}
+{% endraw %}
+{% endhighlight %}
+
+
+
+### Trigger a Codefresh pipeline with an Artifactory push
+
+Now, every time you push/tag a Docker image to the selected Artifactory repository, manually, with Codefresh or any other CI/CD tool, Codefresh will trigger execution of all pipelines associated with that Artifactory Push trigger event.
+
+## Related articles
+[Triggers for pipelines]({{site.baseurl}}/docs/pipelines/triggers)
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
\ No newline at end of file
diff --git a/_docs/pipelines/triggers/quay-triggers.md b/_docs/pipelines/triggers/quay-triggers.md
new file mode 100644
index 00000000..1e7e275f
--- /dev/null
+++ b/_docs/pipelines/triggers/quay-triggers.md
@@ -0,0 +1,102 @@
+---
+title: "Quay Trigger"
+description: "Trigger Codefresh pipelines from Quay"
+group: pipelines
+sub_group: triggers
+redirect_from:
+ - /docs/pipeline-triggers/configure-quay-trigger/
+toc: true
+---
+
+Define and manage Quay triggers for pipelines with the Codefresh UI.
+This allows you to trigger Codefresh pipelines when a Quay event happens (e.g. a new Docker image is uploaded).
+
+## Manage Quay triggers with Codefresh UI
+
+
+The process involves two parts:
+
+1. Creating a trigger in Codefresh (this will result in a special Codefresh webhook URL)
+1. Creating a new notification in Quay that will use this URL to call Codefresh
+
+> Make sure that you have a Quay account and have already [created a repository](https://docs.quay.io/guides/create-repo.html) (or pushed a Docker image at least once).
+
+
+### Create a new Quay Trigger
+
+To add a new Quay trigger, navigate to a Codefresh Pipeline *Configuration* view and expand the *Triggers* section. Press the `Add Trigger` button and select a `Registry` trigger type to add.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/add-trigger-dialog.png"
+url="/images/pipeline/triggers/add-trigger-dialog.png"
+alt="Adding new Trigger dialog"
+max-width="40%"
+%}
+
+Fill the following information:
+
+* *Registry Provider* - select `Quay`.
+* *User/Organization Name* - put Quay username or organization name here.
+* *Image Repository Name* - Quay image repository name.
+* *Action* - select `Push Image` action.
+* *Tag* - optional filter to specify which image *tags* will trigger pipeline execution: [Re2](https://github.com/google/re2/wiki/Syntax) regular expression.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/quay/add-trigger-dialog.png"
+url="/images/pipeline/triggers/quay/add-trigger-dialog.png"
+alt="Quay Registry settings"
+max-width="50%"
+%}
+
+Click next and a new Dialog will appear that shows you the Codefresh webhook URL. Copy it to your clipboard.
+
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/quay/view-trigger-dialog.png"
+url="/images/pipeline/triggers/quay/view-trigger-dialog.png"
+alt="Codefresh webhook URL"
+max-width="50%"
+%}
+
+Now we must set Quay to call this URL when an event takes place.
+
+### Set up Quay notification
+
+Log in your Quay account and go to the respective repository. You can also click the link shown in the Codefresh dialog to go directly to the settings of that repository.
+
+Scroll down and under *Events and Notifications* click *Create Notification*.
+
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/quay/add-quay-notification.png"
+url="/images/pipeline/triggers/quay/add-quay-notification.png"
+alt="Add Quay Notification"
+max-width="50%"
+%}
+
+In the new screen select *Push to repository* from the drop-down or any other event that you wish the Codefresh pipeline to trigger.
+
+{% include image.html
+lightbox="true"
+file="/images/pipeline/triggers/quay/edit-quay-notification.png"
+url="/images/pipeline/triggers/quay/edit-quay-notification.png"
+alt="Edit Quay Notification"
+max-width="50%"
+%}
+
+From the next dropdown choose *Webhook Post*. In the *Webhook URL entry* paste the Codefresh URL that was created in the Codefresh Trigger dialog.
+
+Finally click *Create Notification*.
+
+
+### Triggering a Codefresh pipeline with Quay push
+
+Now, every time you push a new Docker image to the selected Quay repository, manually, with Codefresh or any other CI/CD tool, Codefresh will trigger execution of all pipelines associated with that Quay Push trigger event.
+
+## Related articles
+[Triggers for pipelines]({{site.baseurl}}/docs/pipelines/triggers)
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
\ No newline at end of file
diff --git a/_docs/pipelines/using-secrets.md b/_docs/pipelines/using-secrets.md
deleted file mode 100644
index 58204057..00000000
--- a/_docs/pipelines/using-secrets.md
+++ /dev/null
@@ -1,8 +0,0 @@
----
-title: "Using secrets"
-description: ""
-group: pipelines
-toc: true
----
-
-Coming soon
diff --git a/_docs/pipelines/variables.md b/_docs/pipelines/variables.md
new file mode 100644
index 00000000..19f2c8a4
--- /dev/null
+++ b/_docs/pipelines/variables.md
@@ -0,0 +1,338 @@
+---
+title: "Variables in pipelines"
+description: ""
+group: codefresh-yaml
+redirect_from:
+ - /docs/variables/
+toc: true
+---
+Codefresh provides a set of predefined variables automatically in each build, that you can use to parameterize the way your pipeline works. You can also define your own variables. Some common examples of predefined variables include:
+
+* `CF_BRANCH` is the Git branch that was used for this pipeline.
+* `CF_REVISION` is the Git hash that was used for this pipeline.
+* `CF_BUILD_URL` is the url of the pipeline build.
+
+## Using Codefresh variables in your pipelines
+
+There are two ways to use a Codefresh variable in your pipelines:
+
+1. By default all variables will be exposed as UNIX environment variables in all freestyle steps as `$MY_VARIABLE_EXAMPLE`.
+1. Variables can be used in YAML properties with the syntax {% raw %}`${{MY_VARIABLE_EXAMPLE}}`{% endraw %}.
+
+> If you are unsure about which form you need to use, feel free to use {% raw %}`${{MY_VARIABLE_EXAMPLE}}`{% endraw %} everywhere. This is the Codefresh specific form and should function in all sections of `codefresh.yml`.
+
+For example, you can print out the branch as an environment variable like this:
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+MyOwnStep:
+ title: Variable example
+ image: alpine
+ commands:
+ - echo $CF_BUILD_ID
+ - echo $CF_BRANCH_TAG_NORMALIZED
+{% endraw %}
+{% endhighlight %}
+
+In the example above we are using simple `echo` commands, but any program or script that reads environment variables could also read them in the same manner.
+
+Using variables directly in yaml properties can be done like this:
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: my-own-app
+ tag: ${{CF_BRANCH_TAG_NORMALIZED}}
+{% endraw %}
+{% endhighlight %}
+
+You can also concatenate variables:
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: my-own-app
+ tag: ${{CF_BRANCH_TAG_NORMALIZED}}-${{CF_SHORT_REVISION}}
+{% endraw %}
+{% endhighlight %}
+
+This will create docker images with tags such as:
+
+```
+master-df6a04c
+develop-ba1cd68
+feature-vb145dh
+```
+
+
+
+
+Notice that this syntax is specific to Codefresh and is **only** available within the Codefresh YAML file itself. If you want to write scripts or programs that use the Codefresh variables, you need to make them aware of the environment variable form.
+
+
+## System variables
+
+System variables are automatically injected to any freestyle step as environment variables.
+
+> It is important to understand that all Git related variables such `CF_BRANCH`, `CF_COMMIT_MESSAGE`, `CF_REVISION` etc. are coming directly from the Git provider you use and have the same limitations of that provider. For example GitLab is sending less information in pull request events than normal pushes, and Bitbucket sends only the short hash of a commit in pull request events. We suggest you read the documentation of your Git provider first to understand what information is available for every Git event
+
+{: .table .table-bordered .table-hover}
+| Variable | Description |
+| ------------------------------------------------- | ------------------------------------------------------ |
+| {% raw %}`${{CF_REPO_OWNER}} `{% endraw %} | Repository owner. |
+| {% raw %}`${{CF_REPO_NAME}}`{% endraw %} | Repository name. |
+| {% raw %}`${{CF_BRANCH}}`{% endraw %} | Branch name (or Tag depending on the payload json) of the Git repository of the main pipeline, at the time of execution. You can also use {% raw %}`${{CF_BRANCH_TAG_NORMALIZED}}`{% endraw %} to get the branch name normalized. It will be without any chars that are illegal in case the branch name were to be used as the Docker image tag name. You can also use {% raw %}`${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}`{% endraw %} to force lowercase. |
+| {% raw %}`${{CF_BASE_BRANCH}}`{% endraw %} | The base branch used during creation of Tag |
+| {% raw %}`${{CF_PULL_REQUEST_ACTION}}`{% endraw %} | The pull request action. Values are those defined by your Git provider such as [GitHub](https://developer.github.com/webhooks/), [GitLab](https://docs.gitlab.com/ee/user/project/integrations/webhooks.html), [Bitbucket](https://confluence.atlassian.com/bitbucket/manage-webhooks-735643732.html) etc. |
+| {% raw %}`${{CF_PULL_REQUEST_TARGET}}`{% endraw %} | The pull request target branch |
+| {% raw %}`${{CF_PULL_REQUEST_NUMBER}}`{% endraw %} | The pull request number |
+| {% raw %}`${{CF_PULL_REQUEST_ID}}`{% endraw %} | The pull request id |
+| {% raw %}`${{CF_PULL_REQUEST_LABELS}}`{% endraw %} | The labels of pull request (GitHub and GitLab only) |
+| {% raw %}`${{CF_COMMIT_AUTHOR}}`{% endraw %} | Commit author. |
+| {% raw %}`${{CF_BUILD_INITIATOR}}`{% endraw %} | The person (username) that started the build. If the build was started by a Git webhook (e.g. from a Pull request) it will hold the webhook user. Notice that if a build is restarted manually it will always hold the username of the person that restarted it. |
+| {% raw %}`${{CF_ACCOUNT}}`{% endraw %} | Codefresh account for this build |
+| {% raw %}`${{CF_COMMIT_URL}}`{% endraw %} | Commit url. |
+| {% raw %}`${{CF_COMMIT_MESSAGE}}`{% endraw %} | Commit message of the Git repository revision, at the time of execution. The messages quotes are escaped (i.e. ' is not \', " is now \"). |
+| {% raw %}`${{CF_COMMIT_MESSAGE_ESCAPED}}`{% endraw %} | Commit message of the Git repository revision, at the time of execution. Special characters are escaped. |
+| {% raw %}`${{CF_REVISION}}`{% endraw %} | Revision of the Git repository of the main pipeline, at the time of execution. You can also use {% raw %}`${{CF_SHORT_REVISION}}`{% endraw %} to get the abbreviated 7-character revision hash, as used in Git. Note: use this variable as string with quotes to tag the image {% raw %}`${{CF_SHORT_REVISION}}`{% endraw %} |
+| {% raw %}`${{CF_VOLUME_NAME}}`{% endraw %} | Refers to the [shared volume]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps) between [freestyle steps]({{site.baseurl}}/docs/pipelines/steps/freestyle/). Normally you only need to define this in [compositions]({{site.baseurl}}/docs/pipelines/steps/composition/). In freestyle steps, it is automatically present without any extra configuration. |
+| {% raw %}`${{CF_VOLUME_PATH}}`{% endraw %} | Refers to the mounted path of the [shared volume]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps) inside a Freestyle container. In the current implementation it expands to `/codefresh/volume`. |
+| {% raw %}`${{CF_BUILD_TRIGGER}}`{% endraw %} | Will be an indication of the current build was triggered: *build: The build was triggered from the build button* webhook: The build was triggered from a control version webhook |
+| {% raw %}`${{CF_BUILD_ID}}`{% endraw %} | The build id. Note: use this variable as string with quotes to tag the image {% raw %}`${{CF_BUILD_ID}}`{% endraw %} |
+| {% raw %}`${{CF_BUILD_TIMESTAMP}}`{% endraw %} | The timestamp the build was created. Note: use this variable as string with quotes to tag the image {% raw %}`${{CF_BUILD_TIMESTAMP}}`{% endraw %} |
+| {% raw %}`${{CF_BUILD_URL}}`{% endraw %} | The URL to the build in Codefresh |
+| {% raw %}`${{CF_PIPELINE_NAME}}`{% endraw %} | The full path of the pipeline, i.e. "project/pipeline" |
+| {% raw %}`${{CF_STEP_NAME}}`{% endraw %} | the name of the step, i.e. "MyUnitTests" |
+| {% raw %}`${{CF_URL}}`{% endraw %} | The URL of Codefresh system |
+| {% raw %}`${{CI}}`{% endraw %} | The value is always `true` |
+| {% raw %}`${{CF_KUBECONFIG_PATH}}`{% endraw %} | Path to injected kubeconfig if at least one Kubernetes cluster [is configured]({{site.baseurl}}/docs/deploy-to-kubernetes/add-kubernetes-cluster/). You can easily run [custom kubectl commands]({{site.baseurl}}/docs/deploy-to-kubernetes/custom-kubectl-commands/) since it is automatically setup by Codefresh in all pipelines. |
+| Any variable specified in the pipeline settings | For example, if you configure the pipeline settings with a variable named PORT, you can put the variable in your YAML build descriptor as {% raw %}`${{PORT}}`{% endraw %}. |
+
+## Context-related Variables
+Context-related variables are created dynamically during the workflow execution and according to the used steps.
+
+{: .table .table-bordered .table-hover}
+| Variable | Description |
+| ------------------------------------------------- | ------------------------------------------------------ |
+| **Working Directories** | For example, you can set the working directory of step `A` with a variable named after a previously executed step, step `B`. Therefore, setting step `A` with {% raw %}`working-directory:${{B}}`{% endraw %} means that step `A` executes in the same working directory as step `B`. |
+| **Images** | You can set the candidate field of the push step with a variable named after a previously executed build step. Since the details of a created image are not necessarily known ahead of time, the variable can create an association to an optionally dynamic image name. Therefore, setting push step `A` with {% raw %}`candidate:${{B}}`{% endraw %} means that step `A` will push the image built by step `B`. Note that this capability works only for `candidate` and `image` fields in Codefresh steps. |
+
+A very common pattern in Codefresh pipelines, is to create a Docker image in one step, and then run a command on its container in the next step (e.g. run [unit tests]({{site.baseurl}}/docs/testing/unit-tests/)):
+
+`YAML`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: my-own-app
+ MyUnitTests:
+ title: Running Unit tests
+ image: ${{MyAppDockerImage}}
+ commands:
+ - ./my-unit-tests.sh
+{% endraw %}
+{% endhighlight %}
+
+In the example above you can see the `MyAppDockerImage` variable that denotes a Docker image created dynamically within this single pipeline. In the second step we use it as a Docker context in order to run unit tests. See also the [unit testing example app]({{site.baseurl}}/docs/yaml-examples/examples/run-unit-tests/).
+
+## Step variables
+
+Every [step]({{site.baseurl}}/docs/pipelines/steps/) in a Codefresh pipeline also exposes several built-in variables. You can access them via the global `steps` parent variable.
+
+ * Each step creates a variable based on the name of the step. You can then use the members of each variable for status conditions such as: `steps.MyUnitTests.result == 'error'` for a step called `MyUnitTests`.
+ * To access variables that have a non-standard (i.e. only alphanumeric and _ characters) names, use the Variable() function.
+
+### Step Member variables
+
+Variables that are created by steps can have members. The members depend on the step type. For example if you have a build step named `myBuildStep` you can get the ID of the docker image that gets created with {% raw %}`echo ${{steps.myBuildStep.imageId}}`{% endraw %}
+
+{: .table .table-bordered .table-hover}
+| Step Type | Members |
+| ----------------------- | -------------------------------------- |
+| All step types | {::nomarkdown}
{:/} |
+
+
+
+## GitHub release variables
+
+GitHub allows you to create [releases](https://help.github.com/articles/creating-releases/) for marking specific Git tags for general availability.
+
+You can set a [trigger]({{site.baseurl}}/docs/configure-ci-cd-pipeline/triggers/git-triggers/) for GitHub releases. When a GitHub release happens, the following variables are also available:
+
+
+
+{: .table .table-bordered .table-hover}
+| Variable | Description |
+| --------------- | ------------------------------------------------------ |
+| {% raw %}`${{CF_RELEASE_NAME}}`{% endraw %} | GitHub release title |
+| {% raw %}`${{CF_RELEASE_TAG}}`{% endraw %} | Git tag version |
+| {% raw %}`${{CF_RELEASE_ID}}`{% endraw %} | Internal ID for this release |
+| {% raw %}`${{CF_PRERELEASE_FLAG}}`{% endraw %} | true if the release if marked as non-production ready, false if it is ready for production |
+
+## GitHub Pull Request variables
+
+When a pull request is closed in GitHub, the following variables are also available
+
+{: .table .table-bordered .table-hover}
+| Variable | Description |
+| --------------- | ------------------------------------------------------ |
+| {% raw %}`${{CF_PULL_REQUEST_MERGED}}`{% endraw %} | true if the pull request was merged to base branch |
+| {% raw %}`${{CF_PULL_REQUEST_HEAD_BRANCH}}`{% endraw %} | the head branch of the PR (the branch that we want to merge to master) |
+| {% raw %}`${{CF_PULL_REQUEST_MERGED_COMMIT_SHA}}`{% endraw %} | the commit SHA on the base branch after the pull request was merged (in most cases it will be master) |
+| {% raw %}`${{CF_PULL_REQUEST_HEAD_COMMIT_SHA}}`{% endraw %} | the commit SHA on the head branch (the branch that we want to push) |
+
+## User-defined variables
+
+User variables can be defined at 6 levels:
+
+1. Manually within a step using the [export](http://linuxcommand.org/lc3_man_pages/exporth.html) command or in any **subsequent** step with the [cf_export]({{site.baseurl}}/docs/codefresh-yaml/variables/#using-cf_export-command) command
+1. [Freestyle Step Definition]({{site.baseurl}}/docs/codefresh-yaml/steps/freestyle/#examples) (using the `environment` field)
+1. Specific build Execution (after clicking the "Build" button open the "Build Variables" section, or use the [CLI]({{site.baseurl}}/docs/integrations/codefresh-api/#example---triggering-pipelines))
+1. Pipeline Definition (under "Environment variables" section in the [pipeline view]({{site.baseurl}}/docs/configure-ci-cd-pipeline/pipelines/#creating-new-pipelines))
+1. [Shared Configuration]({{site.baseurl}}/docs/configure-ci-cd-pipeline/shared-configuration/) (defined under your account settings, and used using the "Import from shared configuration" button under the "Environment Variables" section in the pipeline view)
+1. Variables defined on the Project level (Under the variables tab on any project view)
+
+The options are listed in order of priority (from the most important to the least important), so in case of multiple variables defined at different locations with the same name, the order of overriding will be as listed here.
+
+For example if a pipeline variable is defined both in project level and as an execution parameter of a specific build, then the final result will be the value defined as a build parameter and the project level variable will not take effect.
+
+## Exporting environment variables from a freestyle step
+
+Steps defined inside steps are scoped to the step they were created in (even if you used the `export` command). In order to allow using variables across steps, we provide a shared file that facilitates variables importing and exporting. There are two ways to add variables to this file:
+
+### Using cf_export command
+Within every freestyle step, the `cf_export` command allows you to export variables across steps (by writing to the shared variables file).
+
+> The variables exported with cf_export overrides those at the pipeline-level.
+
+You can either:
+- Explicitly state a VAR=VAL pair
+- State the name of an existing *exported* environment variable (like EXISTING_VAR).
+
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ freestyle-step-1:
+ description: Freestyle step..
+ title: Free styling
+ image: alpine:latest
+ commands:
+ # Normal export will only work in a single step
+ - export EXISTING_VAR=www.example.com
+
+ # CF export will now work in all other subsequent steps
+ - cf_export VAR1=alpine:latest VAR2=VALUE2 EXISTING_VAR
+
+ freestyle-step-2:
+ description: Freestyle step..
+ title: Free styling 2
+ image: ${{VAR1}}
+ commands:
+ - echo $VAR2
+ - echo http://$EXISTING_VAR/index.php
+{% endraw %}
+{% endhighlight %}
+
+Notice that `cf_export` has the same syntax structure as the [bash export command](https://www.gnu.org/software/bash/manual/html_node/Environment.html). This means that when you use it you **don't** need any dollar signs for the variable created/assigned.
+
+```
+cf_export $MY_VAR # Don't do this
+cf_export MY_VAR # Correct syntax
+```
+
+Also notice that `cf_export` works on *subsequent* steps only. If you want to export a variable right away in the present step and all the rest of the steps you need to do the following:
+
+```
+export MY_VAR='example' # Will make MY_VAR available in this step only
+cf_export MY_VAR='example' # Will also make MY_VAR available to all steps after this one
+```
+
+There is nothing really magic about `cf_export`. It is a normal script. You can see its contents on your own by entering the command `cat /codefresh/volume/cf_export` on any [Codefresh freestyle step]({{site.baseurl}}/docs/codefresh-yaml/steps/freestyle/) inside a pipeline.
+
+For more information on its limitations see the [troubleshooting page]({{site.baseurl}}/docs/troubleshooting/common-issues/cf-export-limitations/).
+
+
+
+### Directly writing to the file
+
+For more advanced use cases, you can write directly to the shared variable file that Codefresh reads to understand which variables need to be available to all steps. This file has a simple format where each line is a variable and its value in the form of `VARIABLE=VALUE`. The `cf_export` command mentioned in the previous section is just a shorthand for writing on this file.
+
+The variables file is available inside freestyle steps in the following path: **`{% raw %}${{CF_VOLUME_PATH}}{% endraw %}/env_vars_to_export`**
+
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ freestyle-step-1:
+ description: Freestyle step..
+ title: Free styling
+ image: alpine:latest
+ commands:
+ - echo VAR1=192.168.0.1 >> ${{CF_VOLUME_PATH}}/env_vars_to_export
+ - echo hey=alpine:3.9 >> ${{CF_VOLUME_PATH}}/env_vars_to_export
+
+ freestyle-step-2:
+ description: Freestyle step..
+ title: Free styling 2
+ image: ${{hey}}
+ commands:
+ - echo http://$VAR1/index.php
+{% endraw %}
+{% endhighlight %}
+
+Use this technique if you have complex expressions that have issues with the `cf_export` command.
+
+## Masking variables in logs
+
+Codefresh has the built-in capabililty to automatically mask variables in logs if they are encrypted. The values of encrypted variables will be replaced with asterisks in build logs.
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/variables/masked-variables.png"
+url="/images/codefresh-yaml/variables/masked-variables.png"
+alt="Masked variables"
+caption="Masked variables"
+max-width="80%"
+%}
+
+The variables can be defined in any of the usual ways Codefresh offers such as [shared configuration]({{site.baseurl}}/docs/configure-ci-cd-pipeline/shared-configuration/) or [within the pipeline]({{site.baseurl}}/docs/configure-ci-cd-pipeline/pipelines/#pipeline-settings):
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/variables/encrypted-variables.png"
+url="/images/codefresh-yaml/variables/encrypted-variables.png"
+alt="Encrypted variables"
+caption="Encrypted variables"
+max-width="60%"
+%}
+
+>Notice that this feature is currently available only in Enterprise accounts.
+
+
+## Escape characters
+When passing special characters through environmental variables `\` can be used as an escape character. For example if you were passing a cassandra connection string you might do something like `Points\=hostname\;Port\=16376\;Username\=user\;Password\=password`
+
+This will safely escape `;` and `=`.
+
+## Related articles
+[Pipeline steps]({{site.baseurl}}/docs/codefresh-yaml/steps/)
+[Codefresh Conditionals]({{site.baseurl}}/docs/codefresh-yaml/conditional-execution-of-steps/)
diff --git a/_docs/pipelines/what-is-the-codefresh-yaml.md b/_docs/pipelines/what-is-the-codefresh-yaml.md
new file mode 100644
index 00000000..51ba8a73
--- /dev/null
+++ b/_docs/pipelines/what-is-the-codefresh-yaml.md
@@ -0,0 +1,378 @@
+---
+title: "Codefresh YAML"
+description: "How to define Codefresh pipelines in a declarative manner"
+group: codefresh-yaml
+redirect_from:
+ - /docs/codefresh-yaml/
+ - /docs/what-is-the-codefresh-yaml
+ - /docs/what-is-the-codefresh-yaml/
+ - /docs/codefresh-yaml/working-directories/
+ - /docs/working-directories/
+toc: true
+---
+
+Codefresh offers its own built-in format for creating pipelines. The pipeline specification is
+based on the YAML syntax allowing you to describe your pipelines in a completely declarative manner.
+
+Using Codefresh yaml is the recommended way to [create pipelines]({{site.baseurl}}/docs/pipelines/pipelines/).
+
+## Simple example for codefresh.yml
+
+Here is a very minimal example:
+
+ `codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ build_image:
+ type: build
+ description: Building the image...
+ image-name: myuser/myservice
+ tag: develop # {% raw %}${{CF_BRANCH}}{% endraw %}
+
+ perform_tests:
+ image: node:5
+ working_directory: {% raw %}${{main_clone}}{% endraw %}
+ description: Performing unit tests...
+ commands:
+ - npm install gulp -g
+ - npm install
+ - gulp unit_test
+{% endhighlight %}
+
+It contains two [steps]({{site.baseurl}}/docs/pipelines/steps/), one named *build_image* that creates a docker image, and another one called *perform_tests* that runs unit test with `gulp`.
+
+If you want to know more about how steps work in Codefresh make sure to read [the introduction to pipelines]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/) first, before moving on.
+
+## Basic pipeline syntax
+
+You can customize your build environment (pipeline) by using the Codefresh YAML file, ```codefresh.yml```. Codefresh uses the build specifications in the ```codefresh.yml``` file to execute your build. The ```codefresh.yml``` can be basic or it can include intricate build specifications.
+
+A YAML file is comprised of a series of steps that are executed in the order in which they are specified.
+
+ `codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+
+steps:
+ step-name:
+ [step-contents]
+ another-step:
+ [step-contents]
+ the-very-last-step:
+ [step-contents]
+{% endhighlight %}
+
+You must define a step type for each step, unless you are using a [freestyle step]({{site.baseurl}}/docs/pipelines/steps/freestyle/). Each step uses Docker images and containers as facilitators for execution. For example, the **Freestyle** step spins up a container and executes the specified shell commands from the YAML file.
+
+The step names should be unique within the same pipeline. This mainly affects the visualization of the pipeline when it runs.
+
+Each step produces a resource, which you can [reference](https://github.com/codefresh-contrib/python-flask-sample-app/blob/master/codefresh.yml#L23) in other steps, and are executed in real-time. For example, a **Freestyle** step can reference an image that was produced by a [**Build**]({{site.baseurl}}/docs/pipelines/steps/build/) step. This allows you to chain steps together and create highly-customized builds.
+
+
+##### Variables
+
+Steps chaining and referencing is possible due to implementation of variables in the YAML file - read more on relevant [section]({{site.baseurl}}/docs/pipelines/variables/).
+
+
+{: .table .table-bordered .table-hover}
+| Step Type | Description |
+| ----------------------------------------------------------------------------------------------------------------- | ---------------------------------------------- |
+| [Freestyle]({{site.baseurl}}/docs/pipelines/steps/freestyle/) | Executes one or more shell commands in a container similar to `docker run`. |
+| [Build]({{site.baseurl}}/docs/pipelines/steps/build/) | Builds a Docker image like `docker build`. |
+| [Push]({{site.baseurl}}/docs/pipelines/steps/push/) | Pushes a Docker image to an external registry similar to `docker tag` and `docker push`. |
+| [Git Clone]({{site.baseurl}}/docs/pipelines/steps/git-clone/) | Overrides the default git clone behavior. |
+| [Composition]({{site.baseurl}}/docs/pipelines/steps/composition/) | Starts a Docker Composition like `docker-compose`. Discarded once pipelines finishes. |
+| [Launch Composition]({{site.baseurl}}/docs/pipelines/steps/launch-composition/) | Starts a long term Docker composition that stays up after the end of the pipeline. |
+| [Deploy]({{site.baseurl}}/docs/pipelines/steps/deploy/) | Deploys to Kubernetes clusters. |
+| [Approval]({{site.baseurl}}/docs/pipelines/steps/approval/) | Pauses a pipeline and waits for human intervention. |
+
+
+For more information on creating your own step, see the [Steps in piplines]({{site.baseurl}}/docs/pipelines/steps/).
+
+You can also see the [full YAML specification]({{site.baseurl}}/docs/integrations/codefresh-api/#full-pipeline-specification) supported for pipelines. Note however that several fields are only accessible by using the [Codefresh API]({{site.baseurl}}/docs/integrations/codefresh-api) or [CLI](https://codefresh-io.github.io/cli/).
+
+## Yaml validation
+
+If you are editing Codefresh yaml within the Codefresh UI, the editor will automatically highlight errors as they happen.
+
+This allows you to make quick edits (and possibly run some builds) straight from the GUI. Once you are happy with your pipeline you should commit it to your repository as `codefresh.yml` (pipeline as code).
+
+{% include
+image.html
+lightbox="true"
+file="/images/codefresh-yaml/inline-editor.png"
+url="/images/codefresh-yaml/inline-editor.png"
+alt="Graphical Inline Yaml Editor"
+caption="Graphical Inline Yaml Editor"
+max-width="50%"
+%}
+
+You can also validate the pipeline yaml outside of the UI by using the [Codefresh CLI](https://codefresh-io.github.io/cli/). The CLI has a [validate parameter](https://codefresh-io.github.io/cli/validation/) that can check one or more files for syntax errors
+
+{% highlight shell %}
+{% raw %}
+$ codefresh validate codefresh.yml
+Yaml not valid:
+ - "invalid-property" is not allowed
+{% endraw %}
+{% endhighlight %}
+
+For more information on where the YAML file can be stored see the [creating pipelines page]({{site.baseurl}}/docs/configure-ci-cd-pipeline/pipelines/).
+
+## Execution flow
+
+By default, Codefresh will execute all steps in the yaml file and instantly fail the build, if any step
+presents an error. To change this behavior add the `fail_fast:false` property in any step that you wish to be ignored
+in case of errors.
+
+For example, if you have a [freestyle step]({{site.baseurl}}/docs/pipelines/steps/freestyle/) that runs integration tests, and you don't want the whole pipeline
+to fail if any of the tests fail, add the `fail_fast` line to that step:
+
+
+{% highlight yaml %}
+perform_tests:
+ image: node:9
+ description: Running integration tests
+ fail_fast: false
+ commands:
+ - gulp integration_test
+{% endhighlight %}
+
+Now the pipeline will continue to run even if the step `perform_tests` fails.
+
+Notice also that by default Codefresh pipelines run in *sequential mode*. All steps will be executed one after
+the other and in the same order as included in the `codefresh.yml` file.
+
+If you wish to use parallel steps in your pipelines, see the [parallel steps]({{site.baseurl}}/docs/pipelines/advanced-workflows/) page.
+
+## Working directories
+
+In the context of a step, a working directory can be of the following type:
+
+{: .table .table-bordered .table-hover}
+| Working Directory | Description |
+| --------------------- | -------------------------------------------- |
+| Empty | Defaults to the [Codefresh volume]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#sharing-the-workspace-between-build-steps) (found at `/codefresh/volume`). If there is a [git clone step]({{site.baseurl}}/docs/pipelines/steps/git-clone/) with the special name `main_clone` then the default working directory for built-in steps is now the [project folder]({{site.baseurl}}/docs/pipelines/introduction-to-codefresh-pipelines/#cloning-the-source-code) that was checked out - this only applies to [built-in]({{site.baseurl}}/docs/pipelines/steps/#built-in-steps) Codefresh steps and not [custom plugins]({{site.baseurl}}/docs/pipelines/steps/#creating-a-typed-codefresh-plugin). |
+| Variable that contains the ID of a [Git-Clone]({{site.baseurl}}/docs/codefresh-yaml/steps/git-clone/) step | Runs the step within the cloned directory. |
+| Variable that contains the ID of any other step | Runs the step within the same working directory that the specified was executed. This option is not available for for [**Git-Clone**]({{site.baseurl}}/docs/codefresh-yaml/steps/git-clone/) steps. |
+| Absolute filesystem path | Treated as is within the container. |
+| Relative filesystem path | Treated as relative path from the cloned directory of the service |
+| 'IMAGE_WORK_DIR' | Use this value in order to use the image working directory for example: `working_directory: IMAGE_WORK_DIR` |
+
+
+## Retrying a step
+
+Sometimes you want to retry a step that has a problem. Network hiccups, transient failures and flaky test environments are common problems that prevent pipelines from working in a predictable manner.
+
+Codefresh allows you to retry any of your steps with the built-in syntax:
+
+ `yaml`
+{% highlight yaml %}
+{% raw %}
+step-name:
+ [step-contents]
+ retry:
+ maxAttempts: 5
+ delay: 5
+ exponentialFactor: 2
+{% endraw %}
+{% endhighlight %}
+
+The `retry:` block has the following parameters:
+
+ * `maxAttempts` defines how many times this step will run again if there are execution errors (default is 1 and the Max. is 10).
+ * `delay` is the number of seconds to wait before each attempt (default is 5 seconds and the Max. is 60 seconds).
+ * `exponentialFactor` defines how many times the delay should be multiplied by itself after each attempt (default is 1 and Max. is 5).
+
+All parameters are optional. The exponentialFactor works like this:
+* exponentialFactor=1, delay=5 => each time wait 5 seconds before trying again, no matter the number of attempts.
+* exponentialFactor=2, delay=5 => first retry will have a delay of 25 seconds, third will have 125 and so on.
+
+
+Here is a full example:
+
+ `codefresh.yml`
+{% highlight yaml %}
+{% raw %}
+version: '1.0'
+steps:
+ MyAppDockerImage:
+ title: Building Docker Image
+ type: build
+ image_name: my-own-app
+ retry:
+ maxAttempts: 2
+ MyUnitTests:
+ title: Running Unit tests
+ image: ${{MyAppDockerImage}}
+ commands:
+ - ./my_unit_tests.sh
+ retry:
+ maxAttempts: 3
+ delay: 5
+ PushingToRegistry:
+ type: push
+ title: Pushing To Registry
+ candidate: ${{MyAppDockerImage}}
+ tag: '${{CF_BRANCH}}'
+ retry:
+ maxAttempts: 3
+ delay: 3
+ exponentialFactor: 2
+{% endraw %}
+{% endhighlight %}
+
+Notice that Codefresh also provides the following variables that allow you change your script/applications according to the retry attempts:
+
+* `CF_CURRENT_ATTEMPT` contains the number of current retry attempt.
+* `CF_MAX_ATTEMPTS` contains all the number of total attempts defined.
+
+The retry mechanism is available for all kinds of [steps]({{site.baseurl}}/docs/pipelines/steps/).
+
+## Escaping strings
+
+If you want to use strings inside your pipeline that create conflicts with the Codefresh syntax parser (for example they are YAML themselves) you need
+to escape them using multi-line strings with the `>-` and `|-` characters.
+
+The following pipeline is not parsed correctly because the echo command is using the yaml `:` character
+
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ test:
+ title: "Running test"
+ type: "freestyle"
+ image: "alpine:3.9"
+ commands:
+ - echo hello: world
+{% endraw %}
+{% endhighlight %}
+
+You can fix this issue by using a multi-line YAML string:
+
+{% highlight yaml %}
+{% raw %}
+version: "1.0"
+steps:
+ test:
+ title: "Running test"
+ type: "freestyle"
+ image: "alpine:3.9"
+ commands:
+ - |-
+ echo hello: world
+{% endraw %}
+{% endhighlight %}
+
+The `|-` character keeps the line breaks of the text (but removes the last one). Use the `>-` character if you want to convert line breaks to spaces.
+For more information see the [YAML specification](https://yaml.org/spec/1.2/spec.html).
+
+## Using YAML anchors to avoid repetition
+
+Codefresh also supports yaml anchors, references and extends. These allow you to keep
+your pipeline [DRY](https://en.wikipedia.org/wiki/Don%27t_repeat_yourself).
+
+For example, let's say that you have two freestyle steps:
+
+1. The first one fills a MySQL server with data.
+1. The second one runs integration tests that use the MySQL server.
+
+Here is the respective pipeline:
+
+ `codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ preLoadDatabase:
+ title: Loading Data
+ image: alpine
+ commands:
+ - printenv
+ - echo "Loading DB"
+ environment: &my_common_envs
+ - MYSQL_HOST=mysql
+ - MYSQL_USER=user
+ - MYSQL_PASS=password
+ - MYSQL_PORT=3351
+ runTests:
+ title: Integration tests
+ image: alpine
+ commands:
+ - printenv
+ - echo "Running tests"
+ environment: *my_common_envs # Same MYSQL_HOST, MYSQL_USER etc.
+{% endhighlight %}
+
+Instead of repeating the same environment variables in both steps, we can create them once and then just reference them in the second step with the `*` character.
+
+You also define anchors at the top of the pipeline in the special `indicators` block:
+
+ `codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+
+indicators:
+ - environment: &my_common_envs
+ - MYSQL_HOST=mysql
+ - MYSQL_USER=user
+ - MYSQL_PASS=password
+ - MYSQL_PORT=3351
+
+steps:
+ preLoadDatabase:
+ title: Loading Data
+ image: alpine
+ commands:
+ - printenv
+ - echo "Loading DB"
+ environment: *my_common_envs # Same MYSQL_HOST, MYSQL_USER etc.
+ runTests:
+ title: Integration tests
+ image: alpine
+ commands:
+ - printenv
+ - echo "Running tests"
+ environment: *my_common_envs # Same MYSQL_HOST, MYSQL_USER etc.
+
+{% endhighlight %}
+
+
+Finally. you also extend steps like below:
+
+ `codefresh.yml`
+{% highlight yaml %}
+version: '1.0'
+steps:
+ deploy_to_k8_staging: &my_basic_deployment
+ title: deploying to cluster
+ type: deploy
+ kind: kubernetes
+ cluster: myStagingCluster
+ namespace: sales
+ service: my-python-app
+ deploy_to_k8_prod:
+ <<: *my_basic_deployment
+ cluster: myProdCluster # only cluster differs, everything else is the same
+
+{% endhighlight %}
+
+Here we deploy to two kubernetes clusters. The first step defines the staging deployment.
+For the second step, we extend the first one and only change the name of the cluster
+to point to production. Everything else (i.e. namespace and service) are exactly the same.
+
+
+## Related articles
+[Pipeline steps]({{site.baseurl}}/docs/pipelines/steps/)
+[Variables]({{site.baseurl}}/docs/pipelines/variables/)
+[Advanced workflows]({{site.baseurl}}/docs/pipelines/advanced-workflows/)
+[Creating pipelines]({{site.baseurl}}/docs/pipelines/pipelines/)
+[YAML examples]({{site.baseurl}}/docs/yaml-examples/examples/)
+
+
+
+
+
+
+
diff --git a/_docs/pipelines/concurrency-limit.md b/_docs/workflows/concurrency-limit.md
similarity index 98%
rename from _docs/pipelines/concurrency-limit.md
rename to _docs/workflows/concurrency-limit.md
index 780a53a4..405361dd 100644
--- a/_docs/pipelines/concurrency-limit.md
+++ b/_docs/workflows/concurrency-limit.md
@@ -1,13 +1,13 @@
---
title: "Selectors for concurrency synchronization"
description: ""
-group: pipelines
+group: workflows
toc: true
---
Argo Workflows has a synchronization mechanism to limit parallel execution of specific workflows or templates within workflows, as required.
-The mechanism enforces this with either semaphore or mutex synchronization configurations. For detailed information, see [Synchronization](https://argoproj.github.io/argo-workflows/synchronization/).
+The mechanism enforces this with either semaphore or mutex synchronization configurations. For detailed information, see [Synchronization](https://argoproj.github.io/argo-workflows/synchronization/){:target="\_blank"}.
Codefresh supports an additional level of concurrency synchronization, with _selectors_, for both workflows and templates.
@@ -163,4 +163,6 @@ synchronization:
- synchronization-wf-8lf9b
semaphore: argo/ConfigMap/semaphore-config/workflow?repository=denis-codefresh/argo-workflows&branch=feature
```
+## Related articles
+[Creating workflows]({{site.baseurl}}/docs/workflows/create-pipeline)
diff --git a/_docs/pipelines/configure-artifact-repository.md b/_docs/workflows/configure-artifact-repository.md
similarity index 98%
rename from _docs/pipelines/configure-artifact-repository.md
rename to _docs/workflows/configure-artifact-repository.md
index 894ec98c..3f4b6057 100644
--- a/_docs/pipelines/configure-artifact-repository.md
+++ b/_docs/workflows/configure-artifact-repository.md
@@ -1,7 +1,7 @@
---
title: "Configure artifact repository"
description: ""
-group: pipelines
+group: workflows
toc: true
---
@@ -179,3 +179,6 @@ As the final step in configuring the artifact repository, for the `argo-server`
1. Wait for the configuration changes to take effect on the cluster.
1. Check the `argo-server` service account and verify that it is updated with the user-provided `annotation`.
1. Select the `argo-server-<#>` pod or pods and delete them.
+
+## Related articles
+[Creating workflows]({{site.baseurl}}/docs/workflows/create-pipeline)
\ No newline at end of file
diff --git a/_docs/pipelines/create-pipeline.md b/_docs/workflows/create-pipeline.md
similarity index 97%
rename from _docs/pipelines/create-pipeline.md
rename to _docs/workflows/create-pipeline.md
index 0f11bf6a..60cb8076 100644
--- a/_docs/pipelines/create-pipeline.md
+++ b/_docs/workflows/create-pipeline.md
@@ -1,7 +1,7 @@
---
-title: "Pipeline creation"
+title: "Creating workflows"
description: ""
-group: pipelines
+group: workflows
toc: true
---
@@ -33,7 +33,7 @@ An intuitive selection mechanism enables you to easily select and configure each
### Delivery Pipeline creation flow
Here's a high-level overview of the Delivery Pipeline creation flow.
-For step-by-step instructions, see [How to: create a Delivery Pipeline]({{site.baseurl}}/docs/pipelines/create-pipeline/#how-to-create-a-delivery-pipeline).
+For step-by-step instructions, see [How to: create a Delivery Pipeline]({{site.baseurl}}/docs/workflows/create-pipeline/#how-to-create-a-delivery-pipeline).
1. Define pipeline name and select Workflow Template to execute
1. Define default values for pipeline workflow template arguments
@@ -56,7 +56,7 @@ In the Delivery Pipeline wizard, we have our starter Workflow Template to use as
-> To share artifacts between steps in workflows, and to view archived logs for completed workflows, you must [configure an artifact repository in Codefresh]({{site.baseurl}}/docs/pipelines/configure-artifact-repository).
+> To share artifacts between steps in workflows, and to view archived logs for completed workflows, you must [configure an artifact repository in Codefresh]({{site.baseurl}}/docs/workflows/configure-artifact-repository).
@@ -278,3 +278,7 @@ Follow the step-by-step instructions to guide you through Delivery Pipeline wiza
Codefresh commits the pipeline to the Git repository, and then syncs it to the cluster. Wait a few seconds for the sync to complete, and verify that the pipeline is displayed in the [Delivery Pipelines](https://g.codefresh.io/2.0/pipelines){:target="\_blank"} page.
+## Related articles
+[Selectors for concurrency synchronization]({{site.baseurl}}/docs/workflows/concurrency-limit)
+[Nested workflows]({{site.baseurl}}/docs/workflows/nested-workflows)
+[Configure artifact repository]({{site.baseurl}}/docs/workflows/configure-artifact-repository)
diff --git a/_docs/pipelines/nested-workflows.md b/_docs/workflows/nested-workflows.md
similarity index 99%
rename from _docs/pipelines/nested-workflows.md
rename to _docs/workflows/nested-workflows.md
index 7264b1bf..7539c44d 100644
--- a/_docs/pipelines/nested-workflows.md
+++ b/_docs/workflows/nested-workflows.md
@@ -1,7 +1,7 @@
---
title: "Nested workflows"
description: ""
-group: pipelines
+group: workflows
toc: true
---
diff --git a/_docs/pipelines/sharing-file-system.md b/_docs/workflows/sharing-file-system.md
similarity index 99%
rename from _docs/pipelines/sharing-file-system.md
rename to _docs/workflows/sharing-file-system.md
index 12722a43..c01e0125 100644
--- a/_docs/pipelines/sharing-file-system.md
+++ b/_docs/workflows/sharing-file-system.md
@@ -1,7 +1,7 @@
---
title: "Sharing file systems"
description: ""
-group: pipelines
+group: workflows
toc: true
---
diff --git a/_docs/pipelines/workflows.md b/_docs/workflows/workflows.md
similarity index 100%
rename from _docs/pipelines/workflows.md
rename to _docs/workflows/workflows.md
diff --git a/images/pipeline/badges/view-public-logs.png b/images/pipeline/badges/view-public-logs.png
index 395c4046..5589bd62 100644
Binary files a/images/pipeline/badges/view-public-logs.png and b/images/pipeline/badges/view-public-logs.png differ
diff --git a/images/pipeline/create/add-pipeline-to-project.png b/images/pipeline/create/add-pipeline-to-project.png
new file mode 100644
index 00000000..41fd8dd8
Binary files /dev/null and b/images/pipeline/create/add-pipeline-to-project.png differ
diff --git a/images/pipeline/create/create-template-menu.png b/images/pipeline/create/create-template-menu.png
new file mode 100644
index 00000000..1259fa3e
Binary files /dev/null and b/images/pipeline/create/create-template-menu.png differ
diff --git a/images/pipeline/create/custom-yml.png b/images/pipeline/create/custom-yml.png
new file mode 100644
index 00000000..925d9ad8
Binary files /dev/null and b/images/pipeline/create/custom-yml.png differ
diff --git a/images/pipeline/create/editor.png b/images/pipeline/create/editor.png
new file mode 100644
index 00000000..cea04bef
Binary files /dev/null and b/images/pipeline/create/editor.png differ
diff --git a/images/pipeline/create/external-resources.png b/images/pipeline/create/external-resources.png
new file mode 100644
index 00000000..559439dc
Binary files /dev/null and b/images/pipeline/create/external-resources.png differ
diff --git a/images/pipeline/create/inline-editor.png b/images/pipeline/create/inline-editor.png
new file mode 100644
index 00000000..94c82166
Binary files /dev/null and b/images/pipeline/create/inline-editor.png differ
diff --git a/images/pipeline/create/pipeline-from-internal-repo.png b/images/pipeline/create/pipeline-from-internal-repo.png
new file mode 100644
index 00000000..29d47914
Binary files /dev/null and b/images/pipeline/create/pipeline-from-internal-repo.png differ
diff --git a/images/pipeline/create/pipelines-from-repository.png b/images/pipeline/create/pipelines-from-repository.png
new file mode 100644
index 00000000..b1205e53
Binary files /dev/null and b/images/pipeline/create/pipelines-from-repository.png differ
diff --git a/images/pipeline/create/pipelines-no-repository.png b/images/pipeline/create/pipelines-no-repository.png
new file mode 100644
index 00000000..7b9c7e39
Binary files /dev/null and b/images/pipeline/create/pipelines-no-repository.png differ
diff --git a/images/pipeline/create/predefined-steps.png b/images/pipeline/create/predefined-steps.png
new file mode 100644
index 00000000..15a6939d
Binary files /dev/null and b/images/pipeline/create/predefined-steps.png differ
diff --git a/images/pipeline/create/set-build-disk-space.png b/images/pipeline/create/set-build-disk-space.png
new file mode 100644
index 00000000..374d2cd0
Binary files /dev/null and b/images/pipeline/create/set-build-disk-space.png differ
diff --git a/images/pipeline/pipeline-settings/pause-pipeline-enabled.png b/images/pipeline/pipeline-settings/pause-pipeline-enabled.png
new file mode 100644
index 00000000..8c7c4305
Binary files /dev/null and b/images/pipeline/pipeline-settings/pause-pipeline-enabled.png differ
diff --git a/images/pipeline/pipeline-settings/pipeline-settings-ui.png b/images/pipeline/pipeline-settings/pipeline-settings-ui.png
new file mode 100644
index 00000000..105544c9
Binary files /dev/null and b/images/pipeline/pipeline-settings/pipeline-settings-ui.png differ