Files

119 lines
3.7 KiB
JavaScript

// If you want to use Phoenix channels, run `mix help phx.gen.channel`
// to get started and then uncomment the line below.
// import "./user_socket.js"
// You can include dependencies in two ways.
//
// The simplest option is to put them in assets/vendor and
// import them using relative paths:
//
// import "../vendor/some-package.js"
//
// Alternatively, you can `npm install some-package --prefix assets` and import
// them using a path starting with the package name:
//
// import "some-package"
//
// Include phoenix_html to handle method=PUT/DELETE in forms and buttons.
import "phoenix_html"
// Establish Phoenix Socket and LiveView configuration.
import {Socket} from "phoenix"
import {LiveSocket} from "phoenix_live_view"
import topbar from "../vendor/topbar"
let Hooks = {};
Hooks.RecorderHook = {
mounted() {
console.log("[RecorderHook] mounted")
const el = document.getElementById("transcription")
el.innerText = "🎤 Hook activo!"
this.socket = null
this.audioContext = null
this.processor = null
this.mediaStream = null
this.buffer = []
this.handleEvent("start-recording", () => this.start())
this.handleEvent("stop-recording", () => this.stop())
},
start() {
this.socket = new WebSocket("ws://localhost:4000/ws/transcribe")
this.socket.onopen = () => console.log("✅ WebSocket abierto")
this.socket.onmessage = (event) => {
const data = JSON.parse(event.data)
document.getElementById("transcription").innerText += " " + data.text
}
navigator.mediaDevices.getUserMedia({ audio: true }).then(stream => {
console.log("🎤 Micrófono OK")
this.audioContext = new AudioContext({ sampleRate: 48000 })
this.mediaStream = stream
const source = this.audioContext.createMediaStreamSource(stream)
this.processor = this.audioContext.createScriptProcessor(4096, 1, 1)
source.connect(this.processor)
this.processor.connect(this.audioContext.destination)
this.processor.onaudioprocess = (e) => {
const input = e.inputBuffer.getChannelData(0)
const pcm = new Int16Array(input.length)
for (let i = 0; i < input.length; i++) {
let s = Math.max(-1, Math.min(1, input[i]))
pcm[i] = s < 0 ? s * 0x8000 : s * 0x7FFF
}
const uint8 = new Uint8Array(pcm.buffer)
if (this.socket.readyState === WebSocket.OPEN) {
this.socket.send(uint8)
}
}
console.log("⏺️ Grabación iniciada")
}).catch(err => {
console.error("❌ Error acceso micrófono", err)
})
},
stop() {
if (this.processor) this.processor.disconnect()
if (this.audioContext) this.audioContext.close()
if (this.mediaStream) this.mediaStream.getTracks().forEach(t => t.stop())
if (this.socket) {
this.socket.close()
this.socket = null
}
console.log("🛑 Grabación detenida")
}
};
let csrfToken = document.querySelector("meta[name='csrf-token']").getAttribute("content")
let liveSocket = new LiveSocket("/live", Socket, {
longPollFallbackMs: 2500,
hooks: Hooks,
params: {_csrf_token: csrfToken}
})
// Show progress bar on live navigation and form submits
topbar.config({barColors: {0: "#29d"}, shadowColor: "rgba(0, 0, 0, .3)"})
window.addEventListener("phx:page-loading-start", _info => topbar.show(300))
window.addEventListener("phx:page-loading-stop", _info => topbar.hide())
// connect if there are any LiveViews on the page
liveSocket.connect()
// expose liveSocket on window for web console debug logs and latency simulation:
// >> liveSocket.enableDebug()
// >> liveSocket.enableLatencySim(1000) // enabled for duration of browser session
// >> liveSocket.disableLatencySim()
window.liveSocket = liveSocket