mejora dictado

This commit is contained in:
2025-07-18 10:50:51 +00:00
parent 92e307db34
commit 5249af7d23
7 changed files with 330 additions and 145 deletions

View File

@ -65,135 +65,136 @@ defmodule WhisperLiveWeb.Live.Recorder do
</button>
</div>
<div id="status" class="text-sm text-gray-600"></div>
<div id="status" class="hidden"></div>
<div id="transcriptionContainer" class="space-y-2">
<div class="p-2 bg-gray-100 rounded shadow">
<h2 class="text-sm font-semibold text-gray-700 mb-1">🟠 Transcripción en vivo</h2>
<p id="transcription" class="text-orange-600 whitespace-pre-wrap"><%= @transcription %></p>
</div>
<%= if @transcription_m != "" do %>
<div class="p-2 bg-gray-100 rounded shadow">
<h2 class="text-sm font-semibold text-gray-700 mb-1">✅ Transcripción mejorada</h2>
<p class="text-green-600 whitespace-pre-wrap"><%= @transcription_m %></p>
<div id="transcriptionContainer" class="w-full max-w-2xl space-y-4">
<div class="p-4 bg-gray-100 rounded shadow-md">
<h2 class="text-sm font-semibold text-gray-700 mb-2">🟠 Transcripción en vivo</h2>
<p id="transcription" class="text-orange-600 whitespace-pre-wrap break-words text-sm leading-relaxed"><%= @transcription %></p>
</div>
<% end %>
<%= if @transcription_m != "" do %>
<div class="p-4 bg-gray-100 rounded shadow-md">
<h2 class="text-sm font-semibold text-gray-700 mb-2">✅ Transcripción mejorada</h2>
<p class="text-green-600 whitespace-pre-wrap break-words text-sm leading-relaxed"><%= @transcription_m %></p>
</div>
<% end %>
</div>
<script type="module">
import { Socket } from "https://cdn.skypack.dev/phoenix"
import { Socket } from "https://cdn.skypack.dev/phoenix"
const startButton = document.getElementById("startButton")
const stopButton = document.getElementById("stopButton")
const statusDiv = document.getElementById("status")
const startButton = document.getElementById("startButton")
const stopButton = document.getElementById("stopButton")
const statusDiv = document.getElementById("status")
let socket = null
let channel = null
let audioContext = null
let processor = null
let mediaStream = null
let buffer = []
let sendInterval = null
let socket = null
let channel = null
let audioContext = null
let processor = null
let mediaStream = null
let buffer = []
let sendInterval = null
const sampleRate = 48000
const sampleRate = 48000
async function startRecording() {
startButton.disabled = true
stopButton.disabled = false
statusDiv.textContent = "🎙 Grabando..."
async function startRecording() {
startButton.disabled = true
stopButton.disabled = false
statusDiv.textContent = "🎙 Grabando..."
socket = new Socket("ws://localhost:4004/socket")
socket.connect()
channel = socket.channel("audio:lobby")
socket = new Socket("ws://localhost:4004/socket")
socket.connect()
channel = socket.channel("audio:lobby")
await channel.join()
.receive("ok", () => {
console.log("✅ Canal conectado")
statusDiv.textContent = "✅ Canal conectado"
})
.receive("error", () => {
console.error("❌ Error al conectar canal")
statusDiv.textContent = "❌ Error canal"
})
await channel.join()
.receive("ok", () => {
console.log("✅ Canal conectado")
statusDiv.textContent = "✅ Canal conectado"
})
.receive("error", () => {
console.error("❌ Error al conectar canal")
statusDiv.textContent = "❌ Error canal"
})
try {
audioContext = new AudioContext({ sampleRate })
mediaStream = await navigator.mediaDevices.getUserMedia({ audio: true })
} catch (err) {
console.error("❌ Micrófono error:", err)
statusDiv.textContent = "❌ Error accediendo al micrófono"
return
}
const source = audioContext.createMediaStreamSource(mediaStream)
processor = audioContext.createScriptProcessor(4096, 1, 1)
source.connect(processor)
processor.connect(audioContext.destination)
buffer = []
processor.onaudioprocess = e => {
const input = e.inputBuffer.getChannelData(0)
const pcm = new Int16Array(input.length)
for (let i = 0; i < input.length; i++) {
let s = Math.max(-1, Math.min(1, input[i]))
pcm[i] = s < 0 ? s * 0x8000 : s * 0x7FFF
}
buffer.push(pcm)
}
sendInterval = setInterval(() => {
if (buffer.length === 0) return
const merged = flattenInt16(buffer)
buffer = []
function encodeBase64(uint8Array) {
let binary = ''
const len = uint8Array.byteLength
for (let i = 0; i < len; i++) {
binary += String.fromCharCode(uint8Array[i])
try {
audioContext = new AudioContext({ sampleRate })
mediaStream = await navigator.mediaDevices.getUserMedia({ audio: true })
} catch (err) {
console.error("❌ Micrófono error:", err)
statusDiv.textContent = "❌ Error accediendo al micrófono"
return
}
return btoa(binary)
const source = audioContext.createMediaStreamSource(mediaStream)
processor = audioContext.createScriptProcessor(4096, 1, 1)
source.connect(processor)
processor.connect(audioContext.destination)
buffer = []
processor.onaudioprocess = e => {
const input = e.inputBuffer.getChannelData(0)
const pcm = new Int16Array(input.length)
for (let i = 0; i < input.length; i++) {
let s = Math.max(-1, Math.min(1, input[i]))
pcm[i] = s < 0 ? s * 0x8000 : s * 0x7FFF
}
buffer.push(pcm)
}
sendInterval = setInterval(() => {
if (buffer.length === 0) return
const merged = flattenInt16(buffer)
buffer = []
function encodeBase64(uint8Array) {
let binary = ''
const len = uint8Array.byteLength
for (let i = 0; i < len; i++) {
binary += String.fromCharCode(uint8Array[i])
}
return btoa(binary)
}
const base64 = encodeBase64(new Uint8Array(merged.buffer))
channel.push("audio_chunk", { data: base64, sample_rate: sampleRate })
console.log("Chunk enviado")
}, 1000)
}
const base64 = encodeBase64(new Uint8Array(merged.buffer))
channel.push("audio_chunk", { data: base64, sample_rate: sampleRate })
console.log("📤 Enviado chunk")
}, 2000)
}
function stopRecording() {
stopButton.disabled = true
startButton.disabled = false
statusDiv.textContent = "🛑 Grabación detenida."
function stopRecording() {
stopButton.disabled = true
startButton.disabled = false
statusDiv.textContent = "🛑 Grabación detenida."
if (processor) processor.disconnect()
if (audioContext) audioContext.close()
if (mediaStream) mediaStream.getTracks().forEach(t => t.stop())
if (sendInterval) clearInterval(sendInterval)
if (processor) processor.disconnect()
if (audioContext) audioContext.close()
if (mediaStream) mediaStream.getTracks().forEach(t => t.stop())
if (sendInterval) clearInterval(sendInterval)
if (channel) {
channel.push("stop_audio")
setTimeout(() => {
channel.leave()
socket.disconnect()
console.log("🔌 Socket cerrado")
}, 500)
if (channel) {
channel.push("stop_audio")
setTimeout(() => {
channel.leave()
socket.disconnect()
console.log("🔌 Socket cerrado")
}, 500)
}
}
}
function flattenInt16(buffers) {
const length = buffers.reduce((acc, b) => acc + b.length, 0)
const out = new Int16Array(length)
let offset = 0
for (const b of buffers) {
out.set(b, offset)
offset += b.length
function flattenInt16(buffers) {
const length = buffers.reduce((acc, b) => acc + b.length, 0)
const out = new Int16Array(length)
let offset = 0
for (const b of buffers) {
out.set(b, offset)
offset += b.length
}
return out
}
return out
}
startButton.onclick = startRecording
stopButton.onclick = stopRecording
startButton.onclick = startRecording
stopButton.onclick = stopRecording
</script>
</div>
"""