Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions samples-v2/fan_in_fan_out/.funcignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
.git*
.vscode
local.settings.json
test
.venv
130 changes: 130 additions & 0 deletions samples-v2/fan_in_fan_out/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
.python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don’t work, or not
# install all needed dependencies.
#Pipfile.lock

# celery beat schedule file
celerybeat-schedule

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/

# Azure Functions artifacts
bin
obj
appsettings.json
local.settings.json
.python_packages
3 changes: 3 additions & 0 deletions samples-v2/fan_in_fan_out/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Fan-Out Fan-In

This directory contains an executable version of [this](https://docs.microsoft.com/en-us/azure/azure-functions/durable/durable-functions-cloud-backup?tabs=python) tutorial. Please review the link above for instructions on how to run it.
11 changes: 11 additions & 0 deletions samples-v2/fan_in_fan_out/extensions.csproj
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netcoreapp3.1</TargetFramework>
<WarningsAsErrors></WarningsAsErrors>
<DefaultItemExcludes>**</DefaultItemExcludes>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Azure.WebJobs.Extensions.DurableTask" Version="2.9.1" />
<PackageReference Include="Microsoft.Azure.WebJobs.Script.ExtensionsMetadataGenerator" Version="1.1.3" />
</ItemGroup>
</Project>
87 changes: 87 additions & 0 deletions samples-v2/fan_in_fan_out/function_app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
from typing import List

import os
from os.path import dirname

import json
import pathlib
import logging

from azure.storage.blob import BlobServiceClient
from azure.core.exceptions import ResourceExistsError

import azure.functions as func
import azure.durable_functions as df

myApp = df.DFApp(http_auth_level=func.AuthLevel.ANONYMOUS)

@myApp.route(route="orchestrators/{functionName}")
@myApp.durable_client_input(client_name="client")
async def HttpStart(req: func.HttpRequest, client):
payload: str = json.loads(req.get_body().decode()) # Load JSON post request data
instance_id = await client.start_new(req.route_params["functionName"], client_input=payload)

logging.info(f"Started orchestration with ID = '{instance_id}'.")

return client.create_check_status_response(req, instance_id)

@myApp.orchestration_trigger(context_name="context")
def E2_BackupSiteContent(context: df.DurableOrchestrationContext):
root_directory: str = context.get_input()

if not root_directory:
raise Exception("A directory path is required as input")

files = yield context.call_activity("E2_GetFileList", root_directory)
tasks = []
for file in files:
tasks.append(context.call_activity("E2_CopyFileToBlob", file))

results = yield context.task_all(tasks)
total_bytes = sum(results)
return total_bytes

connect_str = os.getenv('AzureWebJobsStorage')

@myApp.activity_trigger(input_name="rootDirectory")
def E2_GetFileList(rootDirectory):
all_file_paths = []
# We walk the file system
for path, _, files in os.walk(rootDirectory):
# We copy the code for activities and orchestrators
if "E2_" in path:
# For each file, we add their full-path to the list
for name in files:
if name == "__init__.py" or name == "function.json":
file_path = os.path.join(path, name)
all_file_paths.append(file_path)

return all_file_paths

@myApp.activity_trigger(input_name="filePath")
def E2_CopyFileToBlob(filePath):
# Create the BlobServiceClient object which will be used to create a container client
blob_service_client = BlobServiceClient.from_connection_string(connect_str)

# Create a unique name for the container
container_name = "backups"

# Create the container if it does not exist
try:
blob_service_client.create_container(container_name)
except ResourceExistsError:
pass

# Create a blob client using the local file name as the name for the blob
parent_dir, fname = pathlib.Path(filePath).parts[-2:] # Get last two path components
blob_name = parent_dir + "_" + fname
blob_client = blob_service_client.get_blob_client(container=container_name, blob=blob_name)

# Count bytes in file
byte_count = os.path.getsize(filePath)

# Upload the created file
with open(filePath, "rb") as data:
blob_client.upload_blob(data)

return byte_count
11 changes: 11 additions & 0 deletions samples-v2/fan_in_fan_out/host.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"version": "2.0",
"logging": {
"applicationInsights": {
"samplingSettings": {
"isEnabled": true,
"excludedTypes": "Request"
}
}
}
}
4 changes: 4 additions & 0 deletions samples-v2/fan_in_fan_out/proxies.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"$schema": "http://json.schemastore.org/proxies",
"proxies": {}
}
7 changes: 7 additions & 0 deletions samples-v2/fan_in_fan_out/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# DO NOT include azure-functions-worker in this file
# The Python Worker is managed by Azure Functions platform
# Manually managing azure-functions-worker may cause unexpected issues

azure-functions
azure-functions-durable
azure-storage-blob
5 changes: 5 additions & 0 deletions samples-v2/function_chaining/.funcignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
.git*
.vscode
local.settings.json
test
.venv
Loading