Skip to content
This repository was archived by the owner on Nov 27, 2024. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 40 additions & 0 deletions OnnxStack.Console/appsettings.json
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,46 @@
"OnnxModelPath": "D:\\Repositories\\photon\\vae_decoder\\model.onnx"
}
]
},
{
"Name": "InstaFlow",
"IsEnabled": true,
"PadTokenId": 49407,
"BlankTokenId": 49407,
"TokenizerLimit": 77,
"EmbeddingsLength": 768,
"ScaleFactor": 0.18215,
"PipelineType": "InstaFlow",
"Diffusers": [
"TextToImage"
],
"DeviceId": 0,
"InterOpNumThreads": 0,
"IntraOpNumThreads": 0,
"ExecutionMode": "ORT_SEQUENTIAL",
"ExecutionProvider": "DirectML",
"ModelConfigurations": [
{
"Type": "Tokenizer",
"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\tokenizer\\model.onnx"
},
{
"Type": "Unet",
"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\unet\\model.onnx"
},
{
"Type": "TextEncoder",
"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\text_encoder\\model.onnx"
},
{
"Type": "VaeEncoder",
"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\vae_encoder\\model.onnx"
},
{
"Type": "VaeDecoder",
"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\vae_decoder\\model.onnx"
}
]
}
]
}
Expand Down
6 changes: 0 additions & 6 deletions OnnxStack.StableDiffusion/Common/IScheduler.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
using Microsoft.ML.OnnxRuntime.Tensors;
using OnnxStack.StableDiffusion.Enums;
using OnnxStack.StableDiffusion.Schedulers;
using System;
using System.Collections.Generic;
Expand All @@ -8,11 +7,6 @@ namespace OnnxStack.StableDiffusion.Common
{
public interface IScheduler : IDisposable
{
/// <summary>
/// Gets the compatible pipeline
/// </summary>
DiffuserPipelineType PipelineType { get; }

/// <summary>
/// Gets the initial noise sigma.
/// </summary>
Expand Down
144 changes: 144 additions & 0 deletions OnnxStack.StableDiffusion/Diffusers/InstaFlow/InstaFlowDiffuser.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
using Microsoft.Extensions.Logging;
using Microsoft.ML.OnnxRuntime.Tensors;
using OnnxStack.Core;
using OnnxStack.Core.Config;
using OnnxStack.Core.Model;
using OnnxStack.Core.Services;
using OnnxStack.StableDiffusion.Common;
using OnnxStack.StableDiffusion.Config;
using OnnxStack.StableDiffusion.Enums;
using OnnxStack.StableDiffusion.Helpers;
using OnnxStack.StableDiffusion.Schedulers.InstaFlow;
using System;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;

namespace OnnxStack.StableDiffusion.Diffusers.InstaFlow
{
public abstract class InstaFlowDiffuser : DiffuserBase, IDiffuser
{
/// <summary>
/// Initializes a new instance of the <see cref="InstaFlowDiffuser"/> class.
/// </summary>
/// <param name="configuration">The configuration.</param>
/// <param name="onnxModelService">The onnx model service.</param>
public InstaFlowDiffuser(IOnnxModelService onnxModelService, IPromptService promptService, ILogger<InstaFlowDiffuser> logger)
: base(onnxModelService, promptService, logger) { }


/// <summary>
/// Gets the type of the pipeline.
/// </summary>
public override DiffuserPipelineType PipelineType => DiffuserPipelineType.InstaFlow;


/// <summary>
/// Runs the scheduler steps.
/// </summary>
/// <param name="modelOptions">The model options.</param>
/// <param name="promptOptions">The prompt options.</param>
/// <param name="schedulerOptions">The scheduler options.</param>
/// <param name="promptEmbeddings">The prompt embeddings.</param>
/// <param name="performGuidance">if set to <c>true</c> [perform guidance].</param>
/// <param name="progressCallback">The progress callback.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns></returns>
protected override async Task<DenseTensor<float>> SchedulerStepAsync(IModelOptions modelOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions, DenseTensor<float> promptEmbeddings, bool performGuidance, Action<int, int> progressCallback = null, CancellationToken cancellationToken = default)
{
// Get Scheduler
using (var scheduler = GetScheduler(schedulerOptions))
{
// Get timesteps
var timesteps = GetTimesteps(schedulerOptions, scheduler);

// Create latent sample
var latents = await PrepareLatentsAsync(modelOptions, promptOptions, schedulerOptions, scheduler, timesteps);

// Get Model metadata
var metadata = _onnxModelService.GetModelMetadata(modelOptions, OnnxModelType.Unet);

// Get the distilled Timestep
var distilledTimestep = 1.0f / timesteps.Count;

// Loop though the timesteps
var step = 0;
foreach (var timestep in timesteps)
{
step++;
var stepTime = Stopwatch.GetTimestamp();
cancellationToken.ThrowIfCancellationRequested();

// Create input tensor.
var inputLatent = performGuidance ? latents.Repeat(2) : latents;
var inputTensor = scheduler.ScaleInput(inputLatent, timestep);
var timestepTensor = CreateTimestepTensor(inputLatent, timestep);

var outputChannels = performGuidance ? 2 : 1;
var outputDimension = schedulerOptions.GetScaledDimension(outputChannels);
using (var inferenceParameters = new OnnxInferenceParameters(metadata))
{
inferenceParameters.AddInputTensor(inputTensor);
inferenceParameters.AddInputTensor(timestepTensor);
inferenceParameters.AddInputTensor(promptEmbeddings);
inferenceParameters.AddOutputBuffer(outputDimension);

var results = await _onnxModelService.RunInferenceAsync(modelOptions, OnnxModelType.Unet, inferenceParameters);
using (var result = results.First())
{
var noisePred = result.ToDenseTensor();

// Perform guidance
if (performGuidance)
noisePred = PerformGuidance(noisePred, schedulerOptions.GuidanceScale);

// Scheduler Step
latents = scheduler.Step(noisePred, timestep, latents).Result;

latents = noisePred
.MultiplyTensorByFloat(distilledTimestep)
.AddTensors(latents);
}
}

progressCallback?.Invoke(step, timesteps.Count);
_logger?.LogEnd($"Step {step}/{timesteps.Count}", stepTime);
}

// Decode Latents
return await DecodeLatentsAsync(modelOptions, promptOptions, schedulerOptions, latents);
}
}


/// <summary>
/// Creates the timestep tensor.
/// </summary>
/// <param name="latents">The latents.</param>
/// <param name="timestep">The timestep.</param>
/// <returns></returns>
private DenseTensor<float> CreateTimestepTensor(DenseTensor<float> latents, int timestep)
{
var timestepTensor = new DenseTensor<float>(new[] { latents.Dimensions[0] });
timestepTensor.Fill(timestep);
return timestepTensor;
}


/// <summary>
/// Gets the scheduler.
/// </summary>
/// <param name="options">The options.</param>
/// <param name="schedulerConfig">The scheduler configuration.</param>
/// <returns></returns>
protected override IScheduler GetScheduler(SchedulerOptions options)
{
return options.SchedulerType switch
{
SchedulerType.InstaFlow => new InstaFlowScheduler(options),
_ => default
};
}
}
}
56 changes: 56 additions & 0 deletions OnnxStack.StableDiffusion/Diffusers/InstaFlow/TextDiffuser.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
using Microsoft.Extensions.Logging;
using Microsoft.ML.OnnxRuntime.Tensors;
using OnnxStack.Core.Services;
using OnnxStack.StableDiffusion.Common;
using OnnxStack.StableDiffusion.Config;
using OnnxStack.StableDiffusion.Enums;
using System.Collections.Generic;
using System.Threading.Tasks;

namespace OnnxStack.StableDiffusion.Diffusers.InstaFlow
{
public sealed class TextDiffuser : InstaFlowDiffuser
{
/// <summary>
/// Initializes a new instance of the <see cref="TextDiffuser"/> class.
/// </summary>
/// <param name="configuration">The configuration.</param>
/// <param name="onnxModelService">The onnx model service.</param>
public TextDiffuser(IOnnxModelService onnxModelService, IPromptService promptService, ILogger<InstaFlowDiffuser> logger)
: base(onnxModelService, promptService, logger)
{
}


/// <summary>
/// Gets the type of the diffuser.
/// </summary>
public override DiffuserType DiffuserType => DiffuserType.TextToImage;


/// <summary>
/// Gets the timesteps.
/// </summary>
/// <param name="prompt">The prompt.</param>
/// <param name="options">The options.</param>
/// <param name="scheduler">The scheduler.</param>
/// <returns></returns>
protected override IReadOnlyList<int> GetTimesteps(SchedulerOptions options, IScheduler scheduler)
{
return scheduler.Timesteps;
}


/// <summary>
/// Prepares the latents for inference.
/// </summary>
/// <param name="prompt">The prompt.</param>
/// <param name="options">The options.</param>
/// <param name="scheduler">The scheduler.</param>
/// <returns></returns>
protected override Task<DenseTensor<float>> PrepareLatentsAsync(IModelOptions model, PromptOptions prompt, SchedulerOptions options, IScheduler scheduler, IReadOnlyList<int> timesteps)
{
return Task.FromResult(scheduler.CreateRandomSample(options.GetScaledDimension(), scheduler.InitNoiseSigma));
}
}
}
3 changes: 2 additions & 1 deletion OnnxStack.StableDiffusion/Enums/DiffuserPipelineType.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
public enum DiffuserPipelineType
{
StableDiffusion = 0,
LatentConsistency = 10
LatentConsistency = 10,
InstaFlow = 11,
}
}
7 changes: 5 additions & 2 deletions OnnxStack.StableDiffusion/Enums/SchedulerType.cs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,10 @@ public enum SchedulerType
[Display(Name = "KDPM2")]
KDPM2 = 5,

[Display(Name = "LCM")]
LCM = 20
[Display(Name = "LCM")]
LCM = 20,

[Display(Name = "InstaFlow")]
InstaFlow = 21
}
}
20 changes: 10 additions & 10 deletions OnnxStack.StableDiffusion/Extensions.cs
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
using Microsoft.ML.OnnxRuntime;
using NumSharp;
using OnnxStack.StableDiffusion.Config;
using OnnxStack.StableDiffusion.Enums;

using System;
using System.Linq;
using System.Numerics;
using System.Threading.Tasks;

namespace OnnxStack.StableDiffusion
{
Expand Down Expand Up @@ -102,20 +99,23 @@ public static SchedulerType[] GetSchedulerTypes(this DiffuserPipelineType pipeli
{
return pipelineType switch
{
DiffuserPipelineType.StableDiffusion => new[]
DiffuserPipelineType.InstaFlow => new[]
{
SchedulerType.InstaFlow
},
DiffuserPipelineType.LatentConsistency => new[]
{
SchedulerType.LCM
},
_ => new[]
{
SchedulerType.LMS,
SchedulerType.Euler,
SchedulerType.EulerAncestral,
SchedulerType.DDPM,
SchedulerType.DDIM,
SchedulerType.KDPM2
},
DiffuserPipelineType.LatentConsistency => new[]
{
SchedulerType.LCM
},
_ => default
}
};
}

Expand Down
56 changes: 56 additions & 0 deletions OnnxStack.StableDiffusion/Pipelines/InstaFlowPipeline.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
using Microsoft.Extensions.Logging;
using OnnxStack.Core;
using OnnxStack.StableDiffusion.Common;
using OnnxStack.StableDiffusion.Diffusers;
using OnnxStack.StableDiffusion.Enums;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;

namespace OnnxStack.StableDiffusion.Pipelines
{
public sealed class InstaFlowPipeline : IPipeline
{
private readonly DiffuserPipelineType _pipelineType;
private readonly ILogger<InstaFlowPipeline> _logger;
private readonly ConcurrentDictionary<DiffuserType, IDiffuser> _diffusers;

/// <summary>
/// Initializes a new instance of the <see cref="InstaFlowPipeline"/> class.
/// </summary>
/// <param name="onnxModelService">The onnx model service.</param>
/// <param name="promptService">The prompt service.</param>
public InstaFlowPipeline(IEnumerable<IDiffuser> diffusers, ILogger<InstaFlowPipeline> logger)
{
_logger = logger;
_pipelineType = DiffuserPipelineType.InstaFlow;
_diffusers = diffusers
.Where(x => x.PipelineType == _pipelineType)
.ToConcurrentDictionary(k => k.DiffuserType, v => v);
}


/// <summary>
/// Gets the type of the pipeline.
/// </summary>
public DiffuserPipelineType PipelineType => _pipelineType;


/// <summary>
/// Gets the diffusers.
/// </summary>
public ConcurrentDictionary<DiffuserType, IDiffuser> Diffusers => _diffusers;


/// <summary>
/// Gets the diffuser.
/// </summary>
/// <param name="diffuserType">Type of the diffuser.</param>
/// <returns></returns>
public IDiffuser GetDiffuser(DiffuserType diffuserType)
{
_diffusers.TryGetValue(diffuserType, out var diffuser);
return diffuser;
}
}
}
Loading