actually pass input prompt length size to inference

This commit is contained in:
mrq 2024-11-11 20:39:48 -06:00
parent b1df6a7bed
commit ec92613847
2 changed files with 2 additions and 0 deletions

View File

@ -96,6 +96,7 @@ def main():
layer_skip_varentropy_threshold=args.layer_skip_varentropy_threshold,
refine_on_stop=args.refine_on_stop,
denoise_start=args.denoise_start,
input_prompt_length=args.input_prompt_length,
input_prompt_prefix=args.input_prompt_prefix,
prefix_silence=args.prefix_silence,
cfg_strength=args.cfg_strength,

View File

@ -277,6 +277,7 @@ def do_inference_tts( progress=gr.Progress(track_tqdm=True), *args, **kwargs ):
denoise_start=args.denoise_start,
prefix_silence=args.prefix_silence,
input_prompt_prefix=args.input_prompt_prefix,
input_prompt_length=args.input_prompt_length,
)
with timer("Inferenced in", callback=lambda msg: gr.Info( msg )) as t: