Files
2025-07-29 17:20:42 +00:00

34 lines
923 B
Elixir

defmodule WhisperServer.Large do
use Supervisor
def start_link(_opts) do
Supervisor.start_link(__MODULE__, [], name: __MODULE__)
end
def init(_opts) do
model_name = "openai/whisper-large-v3"
{:ok, model} = Bumblebee.load_model({:hf, model_name})
{:ok, featurizer} = Bumblebee.load_featurizer({:hf, model_name})
{:ok, tokenizer} = Bumblebee.load_tokenizer({:hf, model_name})
{:ok, generation_config} = Bumblebee.load_generation_config({:hf, model_name})
serving = Bumblebee.Audio.speech_to_text_whisper(
model, featurizer, tokenizer, generation_config,
chunk_num_seconds: 30,
language: "es",
defn_options: [compiler: EXLA, client: :cuda]
)
children = [
{Nx.Serving,
serving: serving,
name: __MODULE__.Serving,
batch_size: 1,
batch_timeout: 5000}
]
Supervisor.init(children, strategy: :one_for_one)
end
end