1 line
19 KiB
JavaScript
1 line
19 KiB
JavaScript
!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t(require("onnxruntime-web")):"function"==typeof define&&define.amd?define(["onnxruntime-web"],t):"object"==typeof exports?exports.vad=t(require("onnxruntime-web")):e.vad=t(e.ort)}(self,(e=>(()=>{"use strict";var t={485:(e,t)=>{Object.defineProperty(t,"__esModule",{value:!0}),t.baseAssetPath=void 0;const s="undefined"!=typeof window&&void 0!==window.document?window.document.currentScript:null;let r="/";s&&(r=s.src.replace(/#.*$/,"").replace(/\?.*$/,"").replace(/\/[^\/]+$/,"/")),t.baseAssetPath=r},973:(e,t)=>{Object.defineProperty(t,"__esModule",{value:!0}),t.defaultModelFetcher=void 0,t.defaultModelFetcher=e=>fetch(e).then((e=>e.arrayBuffer()))},362:(e,t,s)=>{Object.defineProperty(t,"__esModule",{value:!0}),t.FrameProcessor=t.validateOptions=t.defaultV5FrameProcessorOptions=t.defaultLegacyFrameProcessorOptions=void 0;const r=s(710),o=s(954),i=[512,1024,1536];t.defaultLegacyFrameProcessorOptions={positiveSpeechThreshold:.5,negativeSpeechThreshold:.35,preSpeechPadFrames:1,redemptionFrames:8,frameSamples:1536,minSpeechFrames:3,submitUserSpeechOnPause:!1},t.defaultV5FrameProcessorOptions={positiveSpeechThreshold:.5,negativeSpeechThreshold:.35,preSpeechPadFrames:3,redemptionFrames:24,frameSamples:512,minSpeechFrames:9,submitUserSpeechOnPause:!1},t.validateOptions=function(e){i.includes(e.frameSamples)||r.log.warn("You are using an unusual frame size"),(e.positiveSpeechThreshold<0||e.positiveSpeechThreshold>1)&&r.log.error("positiveSpeechThreshold should be a number between 0 and 1"),(e.negativeSpeechThreshold<0||e.negativeSpeechThreshold>e.positiveSpeechThreshold)&&r.log.error("negativeSpeechThreshold should be between 0 and positiveSpeechThreshold"),e.preSpeechPadFrames<0&&r.log.error("preSpeechPadFrames should be positive"),e.redemptionFrames<0&&r.log.error("redemptionFrames should be positive")};const a=e=>{const t=e.reduce(((e,t)=>(e.push(e.at(-1)+t.length),e)),[0]),s=new Float32Array(t.at(-1));return e.forEach(((e,r)=>{const o=t[r];s.set(e,o)})),s};t.FrameProcessor=class{constructor(e,t,s){this.modelProcessFunc=e,this.modelResetFunc=t,this.options=s,this.speaking=!1,this.redemptionCounter=0,this.speechFrameCount=0,this.active=!1,this.speechRealStartFired=!1,this.reset=()=>{this.speaking=!1,this.speechRealStartFired=!1,this.audioBuffer=[],this.modelResetFunc(),this.redemptionCounter=0,this.speechFrameCount=0},this.pause=e=>{this.active=!1,this.options.submitUserSpeechOnPause?this.endSegment(e):this.reset()},this.resume=()=>{this.active=!0},this.endSegment=e=>{const t=this.audioBuffer;this.audioBuffer=[];const s=this.speaking;if(this.reset(),s)if(t.reduce(((e,t)=>t.isSpeech?e+1:e),0)>=this.options.minSpeechFrames){const s=a(t.map((e=>e.frame)));e({msg:o.Message.SpeechEnd,audio:s})}else e({msg:o.Message.VADMisfire});return{}},this.process=async(e,t)=>{if(!this.active)return;const s=await this.modelProcessFunc(e),r=s.isSpeech>=this.options.positiveSpeechThreshold;if(t({probs:s,msg:o.Message.FrameProcessed,frame:e}),this.audioBuffer.push({frame:e,isSpeech:r}),r&&(this.speechFrameCount++,this.redemptionCounter=0),r&&!this.speaking&&(this.speaking=!0,t({msg:o.Message.SpeechStart})),this.speaking&&this.speechFrameCount===this.options.minSpeechFrames&&!this.speechRealStartFired&&(this.speechRealStartFired=!0,t({msg:o.Message.SpeechRealStart})),s.isSpeech<this.options.negativeSpeechThreshold&&this.speaking&&++this.redemptionCounter>=this.options.redemptionFrames){this.redemptionCounter=0,this.speechFrameCount=0,this.speaking=!1,this.speechRealStartFired=!1;const e=this.audioBuffer;if(this.audioBuffer=[],e.reduce(((e,t)=>t.isSpeech?e+1:e),0)>=this.options.minSpeechFrames){const s=a(e.map((e=>e.frame)));t({msg:o.Message.SpeechEnd,audio:s})}else t({msg:o.Message.VADMisfire})}if(!this.speaking){for(;this.audioBuffer.length>this.options.preSpeechPadFrames;)this.audioBuffer.shift();this.speechFrameCount=0}},this.audioBuffer=[],this.reset()}}},710:(e,t)=>{Object.defineProperty(t,"__esModule",{value:!0}),t.log=t.LOG_PREFIX=void 0,t.LOG_PREFIX="[VAD]";const s=["error","debug","warn"].reduce(((e,s)=>(e[s]=function(e){return(...s)=>{console[e](t.LOG_PREFIX,...s)}}(s),e)),{});t.log=s},954:(e,t)=>{var s;Object.defineProperty(t,"__esModule",{value:!0}),t.Message=void 0,function(e){e.AudioFrame="AUDIO_FRAME",e.SpeechStart="SPEECH_START",e.VADMisfire="VAD_MISFIRE",e.SpeechEnd="SPEECH_END",e.SpeechStop="SPEECH_STOP",e.SpeechRealStart="SPEECH_REAL_START",e.FrameProcessed="FRAME_PROCESSED"}(s||(t.Message=s={}))},650:(e,t)=>{Object.defineProperty(t,"__esModule",{value:!0})},559:function(e,t,s){var r=this&&this.__createBinding||(Object.create?function(e,t,s,r){void 0===r&&(r=s);var o=Object.getOwnPropertyDescriptor(t,s);o&&!("get"in o?!t.__esModule:o.writable||o.configurable)||(o={enumerable:!0,get:function(){return t[s]}}),Object.defineProperty(e,r,o)}:function(e,t,s,r){void 0===r&&(r=s),e[r]=t[s]}),o=this&&this.__exportStar||function(e,t){for(var s in e)"default"===s||Object.prototype.hasOwnProperty.call(t,s)||r(t,e,s)};Object.defineProperty(t,"__esModule",{value:!0}),t.SileroV5=t.SileroLegacy=void 0,o(s(650),t);var i=s(143);Object.defineProperty(t,"SileroLegacy",{enumerable:!0,get:function(){return i.SileroLegacy}});var a=s(508);Object.defineProperty(t,"SileroV5",{enumerable:!0,get:function(){return a.SileroV5}})},143:(e,t,s)=>{var r;Object.defineProperty(t,"__esModule",{value:!0}),t.SileroLegacy=void 0;const o=s(710);class i{constructor(e,t,s,r,o){this.ortInstance=e,this._session=t,this._h=s,this._c=r,this._sr=o,this.reset_state=()=>{const e=Array(128).fill(0);this._h=new this.ortInstance.Tensor("float32",e,[2,1,64]),this._c=new this.ortInstance.Tensor("float32",e,[2,1,64])},this.process=async e=>{const t={input:new this.ortInstance.Tensor("float32",e,[1,e.length]),h:this._h,c:this._c,sr:this._sr},s=await this._session.run(t);this._h=s.hn,this._c=s.cn;const[r]=s.output?.data;return{notSpeech:1-r,isSpeech:r}}}}t.SileroLegacy=i,r=i,i.new=async(e,t)=>{o.log.debug("initializing vad");const s=await t(),i=await e.InferenceSession.create(s),a=new e.Tensor("int64",[16000n]),n=Array(128).fill(0),c=new e.Tensor("float32",n,[2,1,64]),h=new e.Tensor("float32",n,[2,1,64]);return o.log.debug("vad is initialized"),new r(e,i,c,h,a)}},508:(e,t,s)=>{var r;Object.defineProperty(t,"__esModule",{value:!0}),t.SileroV5=void 0;const o=s(710);function i(e){const t=Array(256).fill(0);return new e.Tensor("float32",t,[2,1,128])}class a{constructor(e,t,s,r){this._session=e,this._state=t,this._sr=s,this.ortInstance=r,this.reset_state=()=>{this._state=i(this.ortInstance)},this.process=async e=>{const t={input:new this.ortInstance.Tensor("float32",e,[1,e.length]),state:this._state,sr:this._sr},s=await this._session.run(t);this._state=s.stateN;const[r]=s.output?.data;return{notSpeech:1-r,isSpeech:r}}}}t.SileroV5=a,r=a,a.new=async(e,t)=>{o.log.debug("Loading VAD...");const s=await t(),a=await e.InferenceSession.create(s),n=new e.Tensor("int64",[16000n]),c=i(e);return o.log.debug("...finished loading VAD"),new r(a,c,n,e)}},202:function(e,t,s){var r=this&&this.__createBinding||(Object.create?function(e,t,s,r){void 0===r&&(r=s);var o=Object.getOwnPropertyDescriptor(t,s);o&&!("get"in o?!t.__esModule:o.writable||o.configurable)||(o={enumerable:!0,get:function(){return t[s]}}),Object.defineProperty(e,r,o)}:function(e,t,s,r){void 0===r&&(r=s),e[r]=t[s]}),o=this&&this.__setModuleDefault||(Object.create?function(e,t){Object.defineProperty(e,"default",{enumerable:!0,value:t})}:function(e,t){e.default=t}),i=this&&this.__importStar||function(e){if(e&&e.__esModule)return e;var t={};if(null!=e)for(var s in e)"default"!==s&&Object.prototype.hasOwnProperty.call(e,s)&&r(t,e,s);return o(t,e),t};Object.defineProperty(t,"__esModule",{value:!0}),t.NonRealTimeVAD=t.defaultNonRealTimeVADOptions=void 0;const a=i(s(656)),n=s(485),c=s(973),h=s(362),d=s(954),u=s(559),l=s(825);t.defaultNonRealTimeVADOptions={...h.defaultLegacyFrameProcessorOptions,ortConfig:void 0,modelURL:n.baseAssetPath+"silero_vad_legacy.onnx",modelFetcher:c.defaultModelFetcher},t.NonRealTimeVAD=class{static async new(e={}){const s={...t.defaultNonRealTimeVADOptions,...e};(0,h.validateOptions)(s),void 0!==s.ortConfig&&s.ortConfig(a);const r=()=>s.modelFetcher(s.modelURL),o=await u.SileroLegacy.new(a,r),i=new h.FrameProcessor(o.process,o.reset_state,{frameSamples:s.frameSamples,positiveSpeechThreshold:s.positiveSpeechThreshold,negativeSpeechThreshold:s.negativeSpeechThreshold,redemptionFrames:s.redemptionFrames,preSpeechPadFrames:s.preSpeechPadFrames,minSpeechFrames:s.minSpeechFrames,submitUserSpeechOnPause:s.submitUserSpeechOnPause});return i.resume(),new this(r,a,s,i)}constructor(e,t,s,r){this.modelFetcher=e,this.ort=t,this.options=s,this.frameProcessor=r}async*run(e,t){const s={nativeSampleRate:t,targetSampleRate:16e3,targetFrameSize:this.options.frameSamples},r=new l.Resampler(s);let o=0,i=0,a=0,n=[];for await(const t of r.stream(e)){await this.frameProcessor.process(t,(e=>{n.push(e)}));for(const e of n)switch(e.msg){case d.Message.SpeechStart:o=a*this.options.frameSamples/16;break;case d.Message.SpeechEnd:i=(a+1)*this.options.frameSamples/16,yield{audio:e.audio,start:o,end:i}}a++}const{msg:c,audio:h}=this.frameProcessor.endSegment((e=>{n.push(e)}));for(const e of n)e.msg===d.Message.SpeechEnd&&(yield{audio:e.audio,start:o,end:a*this.options.frameSamples/16})}}},746:function(e,t,s){var r=this&&this.__createBinding||(Object.create?function(e,t,s,r){void 0===r&&(r=s);var o=Object.getOwnPropertyDescriptor(t,s);o&&!("get"in o?!t.__esModule:o.writable||o.configurable)||(o={enumerable:!0,get:function(){return t[s]}}),Object.defineProperty(e,r,o)}:function(e,t,s,r){void 0===r&&(r=s),e[r]=t[s]}),o=this&&this.__setModuleDefault||(Object.create?function(e,t){Object.defineProperty(e,"default",{enumerable:!0,value:t})}:function(e,t){e.default=t}),i=this&&this.__importStar||function(e){if(e&&e.__esModule)return e;var t={};if(null!=e)for(var s in e)"default"!==s&&Object.prototype.hasOwnProperty.call(e,s)&&r(t,e,s);return o(t,e),t};Object.defineProperty(t,"__esModule",{value:!0}),t.AudioNodeVAD=t.MicVAD=t.getDefaultRealTimeVADOptions=t.ort=t.DEFAULT_MODEL=void 0;const a=i(s(656)),n=s(973),c=s(362),h=s(710),d=s(954),u=s(559),l=s(825);t.DEFAULT_MODEL="legacy",t.ort=a,t.getDefaultRealTimeVADOptions=e=>({..."v5"===e?c.defaultV5FrameProcessorOptions:c.defaultLegacyFrameProcessorOptions,onFrameProcessed:(e,t)=>{},onVADMisfire:()=>{h.log.debug("VAD misfire")},onSpeechStart:()=>{h.log.debug("Detected speech start")},onSpeechEnd:()=>{h.log.debug("Detected speech end")},onSpeechRealStart:()=>{h.log.debug("Detected real speech start")},baseAssetPath:"https://cdn.jsdelivr.net/npm/@ricky0123/vad-web@latest/dist/",onnxWASMBasePath:"https://cdn.jsdelivr.net/npm/onnxruntime-web@1.14.0/dist/",stream:void 0,ortConfig:void 0,model:e,workletOptions:{}});class p{static async new(e={}){const s={...(0,t.getDefaultRealTimeVADOptions)(e.model??t.DEFAULT_MODEL),...e};let r;(0,c.validateOptions)(s),r=void 0===s.stream?await navigator.mediaDevices.getUserMedia({audio:{...s.additionalAudioConstraints,channelCount:1,echoCancellation:!0,autoGainControl:!0,noiseSuppression:!0}}):s.stream;const o=new AudioContext,i=new MediaStreamAudioSourceNode(o,{mediaStream:r}),a=await f.new(o,s);return a.receive(i),new p(s,o,r,a,i)}constructor(e,t,s,r,o,i=!1){this.options=e,this.audioContext=t,this.stream=s,this.audioNodeVAD=r,this.sourceNode=o,this.listening=i,this.pause=()=>{this.audioNodeVAD.pause(),this.listening=!1},this.start=()=>{this.audioNodeVAD.start(),this.listening=!0},this.destroy=()=>{this.listening&&this.pause(),void 0===this.options.stream&&this.stream.getTracks().forEach((e=>e.stop())),this.sourceNode.disconnect(),this.audioNodeVAD.destroy(),this.audioContext.close()},this.setOptions=e=>{this.audioNodeVAD.setFrameProcessorOptions(e)}}}t.MicVAD=p;class f{static async new(e,s={}){const r={...(0,t.getDefaultRealTimeVADOptions)(s.model??t.DEFAULT_MODEL),...s};(0,c.validateOptions)(r),t.ort.env.wasm.wasmPaths=r.onnxWASMBasePath,void 0!==r.ortConfig&&r.ortConfig(t.ort);const o="v5"===r.model?"silero_vad_v5.onnx":"silero_vad_legacy.onnx",i=r.baseAssetPath+o,a="v5"===r.model?u.SileroV5.new:u.SileroLegacy.new;let h;try{h=await a(t.ort,(()=>(0,n.defaultModelFetcher)(i)))}catch(e){throw console.error(`Encountered an error while loading model file ${i}`),e}const d=new c.FrameProcessor(h.process,h.reset_state,{frameSamples:r.frameSamples,positiveSpeechThreshold:r.positiveSpeechThreshold,negativeSpeechThreshold:r.negativeSpeechThreshold,redemptionFrames:r.redemptionFrames,preSpeechPadFrames:r.preSpeechPadFrames,minSpeechFrames:r.minSpeechFrames,submitUserSpeechOnPause:r.submitUserSpeechOnPause}),l=new f(e,r,d);return await l.setupAudioNode(),l}constructor(e,t,s){this.ctx=e,this.options=t,this.bufferIndex=0,this.pause=()=>{this.frameProcessor.pause(this.handleFrameProcessorEvent)},this.start=()=>{this.frameProcessor.resume()},this.receive=e=>{e.connect(this.audioNode)},this.processFrame=async e=>{await this.frameProcessor.process(e,this.handleFrameProcessorEvent)},this.handleFrameProcessorEvent=e=>{switch(e.msg){case d.Message.FrameProcessed:this.options.onFrameProcessed(e.probs,e.frame);break;case d.Message.SpeechStart:this.options.onSpeechStart();break;case d.Message.SpeechRealStart:this.options.onSpeechRealStart();break;case d.Message.VADMisfire:this.options.onVADMisfire();break;case d.Message.SpeechEnd:this.options.onSpeechEnd(e.audio)}},this.destroy=()=>{this.audioNode instanceof AudioWorkletNode&&this.audioNode.port.postMessage({message:d.Message.SpeechStop}),this.audioNode.disconnect(),this.gainNode?.disconnect()},this.setFrameProcessorOptions=e=>{this.frameProcessor.options={...this.frameProcessor.options,...e}},this.frameProcessor=s}async setupAudioNode(){if("audioWorklet"in this.ctx&&"function"==typeof AudioWorkletNode)try{const e=this.options.baseAssetPath+"vad.worklet.bundle.min.js";await this.ctx.audioWorklet.addModule(e);const t=this.options.workletOptions??{};return t.processorOptions={...t.processorOptions??{},frameSamples:this.options.frameSamples},this.audioNode=new AudioWorkletNode(this.ctx,"vad-helper-worklet",t),void(this.audioNode.port.onmessage=async e=>{if(e.data?.message===d.Message.AudioFrame){let t=e.data.data;t instanceof ArrayBuffer||(t=new ArrayBuffer(e.data.data.byteLength),new Uint8Array(t).set(new Uint8Array(e.data.data)));const s=new Float32Array(t);await this.processFrame(s)}})}catch(e){console.log("AudioWorklet setup failed, falling back to ScriptProcessor",e)}this.resampler=new l.Resampler({nativeSampleRate:this.ctx.sampleRate,targetSampleRate:16e3,targetFrameSize:this.options.frameSamples??480}),this.audioNode=this.ctx.createScriptProcessor(4096,1,1),this.gainNode=this.ctx.createGain(),this.gainNode.gain.value=0;let e=!1;this.audioNode.onaudioprocess=async t=>{if(!e){e=!0;try{const e=t.inputBuffer.getChannelData(0);if(t.outputBuffer.getChannelData(0).fill(0),this.resampler){const t=this.resampler.process(e);for(const e of t)await this.processFrame(e)}}catch(e){console.error("Error processing audio:",e)}finally{e=!1}}},this.audioNode.connect(this.gainNode),this.gainNode.connect(this.ctx.destination)}}t.AudioNodeVAD=f},825:(e,t,s)=>{Object.defineProperty(t,"__esModule",{value:!0}),t.Resampler=void 0;const r=s(710);t.Resampler=class{constructor(e){this.options=e,this.process=e=>{const t=[];for(const s of e)for(this.inputBuffer.push(s);this.hasEnoughDataForFrame();){const e=this.generateOutputFrame();t.push(e)}return t},e.nativeSampleRate<16e3&&r.log.error("nativeSampleRate is too low. Should have 16000 = targetSampleRate <= nativeSampleRate"),this.inputBuffer=[]}async*stream(e){for(const t of e)for(this.inputBuffer.push(t);this.hasEnoughDataForFrame();){const e=this.generateOutputFrame();yield e}}hasEnoughDataForFrame(){return this.inputBuffer.length*this.options.targetSampleRate/this.options.nativeSampleRate>=this.options.targetFrameSize}generateOutputFrame(){const e=new Float32Array(this.options.targetFrameSize);let t=0,s=0;for(;t<this.options.targetFrameSize;){let r=0,o=0;for(;s<Math.min(this.inputBuffer.length,(t+1)*this.options.nativeSampleRate/this.options.targetSampleRate);){const e=this.inputBuffer[s];void 0!==e&&(r+=e,o++),s++}e[t]=r/o,t++}return this.inputBuffer=this.inputBuffer.slice(s),e}}},787:(e,t)=>{function s(e,t,s){for(var r=0;r<s.length;r++)e.setUint8(t+r,s.charCodeAt(r))}Object.defineProperty(t,"__esModule",{value:!0}),t.audioFileToArray=t.encodeWAV=t.arrayBufferToBase64=t.minFramesForTargetMS=void 0,t.minFramesForTargetMS=function(e,t,s=16e3){return Math.ceil(e*s/1e3/t)},t.arrayBufferToBase64=function(e){const t=new Uint8Array(e),s=t.byteLength,r=new Array(s);for(var o=0;o<s;o++){const e=t[o];if(void 0===e)break;r[o]=String.fromCharCode(e)}return btoa(r.join(""))},t.encodeWAV=function(e,t=3,r=16e3,o=1,i=32){var a=i/8,n=o*a,c=new ArrayBuffer(44+e.length*a),h=new DataView(c);return s(h,0,"RIFF"),h.setUint32(4,36+e.length*a,!0),s(h,8,"WAVE"),s(h,12,"fmt "),h.setUint32(16,16,!0),h.setUint16(20,t,!0),h.setUint16(22,o,!0),h.setUint32(24,r,!0),h.setUint32(28,r*n,!0),h.setUint16(32,n,!0),h.setUint16(34,i,!0),s(h,36,"data"),h.setUint32(40,e.length*a,!0),1===t?function(e,t,s){for(var r=0;r<s.length;r++,t+=2){var o=Math.max(-1,Math.min(1,s[r]));e.setInt16(t,o<0?32768*o:32767*o,!0)}}(h,44,e):function(e,t,s){for(var r=0;r<s.length;r++,t+=4)e.setFloat32(t,s[r],!0)}(h,44,e),c},t.audioFileToArray=async function(e){const t=new OfflineAudioContext(1,1,44100),s=new FileReader;let r=null;if(await new Promise((o=>{s.addEventListener("loadend",(e=>{const i=s.result;t.decodeAudioData(i,(e=>{r=e,t.startRendering().then((e=>{console.log("Rendering completed successfully"),o()})).catch((e=>{console.error(`Rendering failed: ${e}`)}))}),(e=>{console.log(`Error with decoding audio data: ${e}`)}))})),s.readAsArrayBuffer(e)})),null===r)throw Error("some shit");let o=r,i=new Float32Array(o.length);for(let e=0;e<o.length;e++)for(let t=0;t<o.numberOfChannels;t++)i[e]+=o.getChannelData(t)[e];return{audio:i,sampleRate:o.sampleRate}}},656:t=>{t.exports=e}},s={};function r(e){var o=s[e];if(void 0!==o)return o.exports;var i=s[e]={exports:{}};return t[e].call(i.exports,i,i.exports,r),i.exports}var o={};return(()=>{var e=o;Object.defineProperty(e,"__esModule",{value:!0}),e.getDefaultRealTimeVADOptions=e.MicVAD=e.DEFAULT_MODEL=e.AudioNodeVAD=e.utils=e.NonRealTimeVAD=e.Message=e.FrameProcessor=e.defaultModelFetcher=e.baseAssetPath=void 0;var t=r(485);Object.defineProperty(e,"baseAssetPath",{enumerable:!0,get:function(){return t.baseAssetPath}});var s=r(973);Object.defineProperty(e,"defaultModelFetcher",{enumerable:!0,get:function(){return s.defaultModelFetcher}});var i=r(362);Object.defineProperty(e,"FrameProcessor",{enumerable:!0,get:function(){return i.FrameProcessor}});var a=r(954);Object.defineProperty(e,"Message",{enumerable:!0,get:function(){return a.Message}});var n=r(202);Object.defineProperty(e,"NonRealTimeVAD",{enumerable:!0,get:function(){return n.NonRealTimeVAD}});const c=r(787);e.utils={audioFileToArray:c.audioFileToArray,minFramesForTargetMS:c.minFramesForTargetMS,arrayBufferToBase64:c.arrayBufferToBase64,encodeWAV:c.encodeWAV};var h=r(746);Object.defineProperty(e,"AudioNodeVAD",{enumerable:!0,get:function(){return h.AudioNodeVAD}}),Object.defineProperty(e,"DEFAULT_MODEL",{enumerable:!0,get:function(){return h.DEFAULT_MODEL}}),Object.defineProperty(e,"MicVAD",{enumerable:!0,get:function(){return h.MicVAD}}),Object.defineProperty(e,"getDefaultRealTimeVADOptions",{enumerable:!0,get:function(){return h.getDefaultRealTimeVADOptions}})})(),o})())); |