diff --git a/samples-v2/fan_in_fan_out/.funcignore b/samples-v2/fan_in_fan_out/.funcignore
new file mode 100644
index 00000000..0678ea2b
--- /dev/null
+++ b/samples-v2/fan_in_fan_out/.funcignore
@@ -0,0 +1,5 @@
+.git*
+.vscode
+local.settings.json
+test
+.venv
\ No newline at end of file
diff --git a/samples-v2/fan_in_fan_out/.gitignore b/samples-v2/fan_in_fan_out/.gitignore
new file mode 100644
index 00000000..a10127be
--- /dev/null
+++ b/samples-v2/fan_in_fan_out/.gitignore
@@ -0,0 +1,130 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don’t work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# Azure Functions artifacts
+bin
+obj
+appsettings.json
+local.settings.json
+.python_packages
\ No newline at end of file
diff --git a/samples-v2/fan_in_fan_out/README.md b/samples-v2/fan_in_fan_out/README.md
new file mode 100644
index 00000000..2c8a8484
--- /dev/null
+++ b/samples-v2/fan_in_fan_out/README.md
@@ -0,0 +1,3 @@
+# Fan-Out Fan-In
+
+This directory contains an executable version of [this](https://docs.microsoft.com/en-us/azure/azure-functions/durable/durable-functions-cloud-backup?tabs=python) tutorial. Please review the link above for instructions on how to run it.
\ No newline at end of file
diff --git a/samples-v2/fan_in_fan_out/extensions.csproj b/samples-v2/fan_in_fan_out/extensions.csproj
new file mode 100644
index 00000000..1a58b47a
--- /dev/null
+++ b/samples-v2/fan_in_fan_out/extensions.csproj
@@ -0,0 +1,11 @@
+
+
+ netcoreapp3.1
+
+ **
+
+
+
+
+
+
\ No newline at end of file
diff --git a/samples-v2/fan_in_fan_out/function_app.py b/samples-v2/fan_in_fan_out/function_app.py
new file mode 100644
index 00000000..ff313c2e
--- /dev/null
+++ b/samples-v2/fan_in_fan_out/function_app.py
@@ -0,0 +1,87 @@
+from typing import List
+
+import os
+from os.path import dirname
+
+import json
+import pathlib
+import logging
+
+from azure.storage.blob import BlobServiceClient
+from azure.core.exceptions import ResourceExistsError
+
+import azure.functions as func
+import azure.durable_functions as df
+
+myApp = df.DFApp(http_auth_level=func.AuthLevel.ANONYMOUS)
+
+@myApp.route(route="orchestrators/{functionName}")
+@myApp.durable_client_input(client_name="client")
+async def HttpStart(req: func.HttpRequest, client):
+ payload: str = json.loads(req.get_body().decode()) # Load JSON post request data
+ instance_id = await client.start_new(req.route_params["functionName"], client_input=payload)
+
+ logging.info(f"Started orchestration with ID = '{instance_id}'.")
+
+ return client.create_check_status_response(req, instance_id)
+
+@myApp.orchestration_trigger(context_name="context")
+def E2_BackupSiteContent(context: df.DurableOrchestrationContext):
+ root_directory: str = context.get_input()
+
+ if not root_directory:
+ raise Exception("A directory path is required as input")
+
+ files = yield context.call_activity("E2_GetFileList", root_directory)
+ tasks = []
+ for file in files:
+ tasks.append(context.call_activity("E2_CopyFileToBlob", file))
+
+ results = yield context.task_all(tasks)
+ total_bytes = sum(results)
+ return total_bytes
+
+connect_str = os.getenv('AzureWebJobsStorage')
+
+@myApp.activity_trigger(input_name="rootDirectory")
+def E2_GetFileList(rootDirectory):
+ all_file_paths = []
+ # We walk the file system
+ for path, _, files in os.walk(rootDirectory):
+ # We copy the code for activities and orchestrators
+ if "E2_" in path:
+ # For each file, we add their full-path to the list
+ for name in files:
+ if name == "__init__.py" or name == "function.json":
+ file_path = os.path.join(path, name)
+ all_file_paths.append(file_path)
+
+ return all_file_paths
+
+@myApp.activity_trigger(input_name="filePath")
+def E2_CopyFileToBlob(filePath):
+ # Create the BlobServiceClient object which will be used to create a container client
+ blob_service_client = BlobServiceClient.from_connection_string(connect_str)
+
+ # Create a unique name for the container
+ container_name = "backups"
+
+ # Create the container if it does not exist
+ try:
+ blob_service_client.create_container(container_name)
+ except ResourceExistsError:
+ pass
+
+ # Create a blob client using the local file name as the name for the blob
+ parent_dir, fname = pathlib.Path(filePath).parts[-2:] # Get last two path components
+ blob_name = parent_dir + "_" + fname
+ blob_client = blob_service_client.get_blob_client(container=container_name, blob=blob_name)
+
+ # Count bytes in file
+ byte_count = os.path.getsize(filePath)
+
+ # Upload the created file
+ with open(filePath, "rb") as data:
+ blob_client.upload_blob(data)
+
+ return byte_count
\ No newline at end of file
diff --git a/samples-v2/fan_in_fan_out/host.json b/samples-v2/fan_in_fan_out/host.json
new file mode 100644
index 00000000..278b52cd
--- /dev/null
+++ b/samples-v2/fan_in_fan_out/host.json
@@ -0,0 +1,11 @@
+{
+ "version": "2.0",
+ "logging": {
+ "applicationInsights": {
+ "samplingSettings": {
+ "isEnabled": true,
+ "excludedTypes": "Request"
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/samples-v2/fan_in_fan_out/proxies.json b/samples-v2/fan_in_fan_out/proxies.json
new file mode 100644
index 00000000..b385252f
--- /dev/null
+++ b/samples-v2/fan_in_fan_out/proxies.json
@@ -0,0 +1,4 @@
+{
+ "$schema": "http://json.schemastore.org/proxies",
+ "proxies": {}
+}
diff --git a/samples-v2/fan_in_fan_out/requirements.txt b/samples-v2/fan_in_fan_out/requirements.txt
new file mode 100644
index 00000000..1b13a440
--- /dev/null
+++ b/samples-v2/fan_in_fan_out/requirements.txt
@@ -0,0 +1,7 @@
+# DO NOT include azure-functions-worker in this file
+# The Python Worker is managed by Azure Functions platform
+# Manually managing azure-functions-worker may cause unexpected issues
+
+azure-functions
+azure-functions-durable
+azure-storage-blob
\ No newline at end of file
diff --git a/samples-v2/function_chaining/.funcignore b/samples-v2/function_chaining/.funcignore
new file mode 100644
index 00000000..0678ea2b
--- /dev/null
+++ b/samples-v2/function_chaining/.funcignore
@@ -0,0 +1,5 @@
+.git*
+.vscode
+local.settings.json
+test
+.venv
\ No newline at end of file
diff --git a/samples-v2/function_chaining/.gitignore b/samples-v2/function_chaining/.gitignore
new file mode 100644
index 00000000..a10127be
--- /dev/null
+++ b/samples-v2/function_chaining/.gitignore
@@ -0,0 +1,130 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don’t work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# Azure Functions artifacts
+bin
+obj
+appsettings.json
+local.settings.json
+.python_packages
\ No newline at end of file
diff --git a/samples-v2/function_chaining/README.md b/samples-v2/function_chaining/README.md
new file mode 100644
index 00000000..b4e3123b
--- /dev/null
+++ b/samples-v2/function_chaining/README.md
@@ -0,0 +1,3 @@
+# Function Chaining
+
+This directory contains an executable version of [this](https://docs.microsoft.com/en-us/azure/azure-functions/durable/durable-functions-sequence?tabs=python) tutorial. Please review the link above for instructions on how to run it.
\ No newline at end of file
diff --git a/samples-v2/function_chaining/extensions.csproj b/samples-v2/function_chaining/extensions.csproj
new file mode 100644
index 00000000..1a58b47a
--- /dev/null
+++ b/samples-v2/function_chaining/extensions.csproj
@@ -0,0 +1,11 @@
+
+
+ netcoreapp3.1
+
+ **
+
+
+
+
+
+
\ No newline at end of file
diff --git a/samples-v2/function_chaining/function_app.py b/samples-v2/function_chaining/function_app.py
new file mode 100644
index 00000000..d92c83ac
--- /dev/null
+++ b/samples-v2/function_chaining/function_app.py
@@ -0,0 +1,25 @@
+import logging
+import azure.functions as func
+import azure.durable_functions as df
+
+myApp = df.DFApp(http_auth_level=func.AuthLevel.ANONYMOUS)
+
+@myApp.route(route="orchestrators/{functionName}")
+@myApp.durable_client_input(client_name="client")
+async def HttpStart(req: func.HttpRequest, client):
+ function_name = req.route_params.get('functionName')
+ instance_id = await client.start_new(function_name)
+
+ logging.info(f"Started orchestration with ID = '{instance_id}'.")
+ return client.create_check_status_response(req, instance_id)
+
+@myApp.orchestration_trigger(context_name="context")
+def E1_SayHello(context: df.DurableOrchestrationContext):
+ result1 = yield context.call_activity('say_hello', "Tokyo")
+ result2 = yield context.call_activity('say_hello', "Seattle")
+ result3 = yield context.call_activity('say_hello', "London")
+ return [result1, result2, result3]
+
+@myApp.activity_trigger(input_name="city")
+def E1_SayHello(city: str) -> str:
+ return f"Hello {city}!"
\ No newline at end of file
diff --git a/samples-v2/function_chaining/host.json b/samples-v2/function_chaining/host.json
new file mode 100644
index 00000000..278b52cd
--- /dev/null
+++ b/samples-v2/function_chaining/host.json
@@ -0,0 +1,11 @@
+{
+ "version": "2.0",
+ "logging": {
+ "applicationInsights": {
+ "samplingSettings": {
+ "isEnabled": true,
+ "excludedTypes": "Request"
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/samples-v2/function_chaining/proxies.json b/samples-v2/function_chaining/proxies.json
new file mode 100644
index 00000000..b385252f
--- /dev/null
+++ b/samples-v2/function_chaining/proxies.json
@@ -0,0 +1,4 @@
+{
+ "$schema": "http://json.schemastore.org/proxies",
+ "proxies": {}
+}
diff --git a/samples-v2/function_chaining/requirements.txt b/samples-v2/function_chaining/requirements.txt
new file mode 100644
index 00000000..58ba02bf
--- /dev/null
+++ b/samples-v2/function_chaining/requirements.txt
@@ -0,0 +1,6 @@
+# DO NOT include azure-functions-worker in this file
+# The Python Worker is managed by Azure Functions platform
+# Manually managing azure-functions-worker may cause unexpected issues
+
+azure-functions
+azure-functions-durable