Spaces:
Runtime error
Runtime error
alan
commited on
Commit
·
6a925b7
1
Parent(s):
2ec60c2
zerogpu test
Browse files
app.py
CHANGED
@@ -6,6 +6,7 @@ import gradio as gr
|
|
6 |
import numpy as np
|
7 |
import torch
|
8 |
import torchaudio
|
|
|
9 |
from transformers import AutoProcessor, SeamlessM4TModel
|
10 |
|
11 |
from lang_list import (
|
@@ -39,10 +40,13 @@ DEFAULT_TARGET_LANGUAGE = "French"
|
|
39 |
|
40 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
41 |
|
42 |
-
processor = AutoProcessor.from_pretrained("ylacombe/hf-seamless-m4t-large")
|
43 |
-
model = SeamlessM4TModel.from_pretrained("ylacombe/hf-seamless-m4t-large").to(device)
|
|
|
|
|
44 |
|
45 |
|
|
|
46 |
def predict(
|
47 |
task_name: str,
|
48 |
audio_source: str,
|
@@ -430,7 +434,8 @@ with gr.Blocks(css="style.css") as demo:
|
|
430 |
outputs=[output_audio, output_text],
|
431 |
api_name="run",
|
432 |
)
|
433 |
-
demo.
|
|
|
434 |
|
435 |
# Linking models to the space
|
436 |
# 'facebook/seamless-m4t-large'
|
|
|
6 |
import numpy as np
|
7 |
import torch
|
8 |
import torchaudio
|
9 |
+
import spaces
|
10 |
from transformers import AutoProcessor, SeamlessM4TModel
|
11 |
|
12 |
from lang_list import (
|
|
|
40 |
|
41 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
42 |
|
43 |
+
# processor = AutoProcessor.from_pretrained("ylacombe/hf-seamless-m4t-large")
|
44 |
+
# model = SeamlessM4TModel.from_pretrained("ylacombe/hf-seamless-m4t-large").to(device)
|
45 |
+
processor = AutoProcessor.from_pretrained("facebook/hf-seamless-m4t-medium")
|
46 |
+
model = SeamlessM4TModel.from_pretrained("facebook/hf-seamless-m4t-medium").to(device)
|
47 |
|
48 |
|
49 |
+
@spaces.GPU
|
50 |
def predict(
|
51 |
task_name: str,
|
52 |
audio_source: str,
|
|
|
434 |
outputs=[output_audio, output_text],
|
435 |
api_name="run",
|
436 |
)
|
437 |
+
demo.launch()
|
438 |
+
# demo.queue(max_size=50).launch()
|
439 |
|
440 |
# Linking models to the space
|
441 |
# 'facebook/seamless-m4t-large'
|