added override for large training plots
This commit is contained in:
parent
3718e9d0fb
commit
6d7e143f53
|
@ -991,6 +991,13 @@ def run_training(config_path, verbose=False, gpus=1, keep_x_past_datasets=0, pro
|
||||||
if training_state and training_state.process:
|
if training_state and training_state.process:
|
||||||
return "Training already in progress"
|
return "Training already in progress"
|
||||||
|
|
||||||
|
try:
|
||||||
|
import altair as alt
|
||||||
|
alt.data_transformers.enable('default', max_rows=None)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
pass
|
||||||
|
|
||||||
# ensure we have the dvae.pth
|
# ensure we have the dvae.pth
|
||||||
get_model_path('dvae.pth')
|
get_model_path('dvae.pth')
|
||||||
|
|
||||||
|
@ -1043,7 +1050,7 @@ def reconnect_training(verbose=False, progress=gr.Progress(track_tqdm=True)):
|
||||||
return "Training not in progress"
|
return "Training not in progress"
|
||||||
|
|
||||||
for line in iter(training_state.process.stdout.readline, ""):
|
for line in iter(training_state.process.stdout.readline, ""):
|
||||||
result, percent, message = training_state.parse( line=line, verbose=verbose, keep_x_past_datasets=keep_x_past_datasets, progress=progress )
|
result, percent, message = training_state.parse( line=line, verbose=verbose, progress=progress )
|
||||||
print(f"[Training] [{datetime.now().isoformat()}] {line[:-1]}")
|
print(f"[Training] [{datetime.now().isoformat()}] {line[:-1]}")
|
||||||
if result:
|
if result:
|
||||||
yield result
|
yield result
|
||||||
|
|
Loading…
Reference in New Issue
Block a user