Skip to content
This repository was archived by the owner on Nov 27, 2024. It is now read-only.

Commit 3c3600c

Browse files
authored
Merge pull request #79 from saddam213/LCM_LoRA
Revert commit 3808599
2 parents 6b27283 + 5863c6d commit 3c3600c

File tree

1 file changed

+1
-5
lines changed

1 file changed

+1
-5
lines changed

OnnxStack.StableDiffusion/Diffusers/LatentConsistency/LatentConsistencyDiffuser.cs

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -108,9 +108,6 @@ protected override async Task<DenseTensor<float>> SchedulerStepAsync(StableDiffu
108108
// Get Model metadata
109109
var metadata = _onnxModelService.GetModelMetadata(modelOptions, OnnxModelType.Unet);
110110

111-
// Some LCM variants require no guidance embeds
112-
var guidanceEmbeddingsRequired = metadata.Inputs.Count == 4;
113-
114111
// Loop though the timesteps
115112
var step = 0;
116113
foreach (var timestep in timesteps)
@@ -130,8 +127,7 @@ protected override async Task<DenseTensor<float>> SchedulerStepAsync(StableDiffu
130127
inferenceParameters.AddInputTensor(inputTensor);
131128
inferenceParameters.AddInputTensor(timestepTensor);
132129
inferenceParameters.AddInputTensor(promptEmbeddings.PromptEmbeds);
133-
if(guidanceEmbeddingsRequired)
134-
inferenceParameters.AddInputTensor(guidanceEmbeddings);
130+
inferenceParameters.AddInputTensor(guidanceEmbeddings);
135131
inferenceParameters.AddOutputBuffer(outputDimension);
136132

137133
var results = await _onnxModelService.RunInferenceAsync(modelOptions, OnnxModelType.Unet, inferenceParameters);

0 commit comments

Comments
 (0)