sdk zwischenstand

This commit is contained in:
2025-10-05 12:55:59 +00:00
parent 33cab69410
commit 2be72187ec
9 changed files with 468 additions and 492 deletions

View File

@@ -21,7 +21,6 @@ meta:
- type: simpleList - type: simpleList
mediaQuery: "(max-width:599px)" mediaQuery: "(max-width:599px)"
primaryText: status primaryText: status
secondaryText: request.email
# Desktop # Desktop
- type: table - type: table
@@ -32,21 +31,7 @@ meta:
de: Status de: Status
en: Status en: Status
filter: true filter: true
- source: request.email
label:
de: E-Mail
en: E-Mail
filter: true
- source: request.name
label:
de: Name
en: Name
filter: true
- source: request.description
label:
de: Beschreibung
en: Description
filter: true
- source: insertTime - source: insertTime
label: label:
de: Erstellt am de: Erstellt am
@@ -138,48 +123,15 @@ fields:
id: inProgress id: inProgress
- name: abgeschlossen - name: abgeschlossen
id: done id: done
- name: request - name: request
type: object type: object
meta: meta:
label: label:
de: Anfrage de: Anfrage
en: Request en: Request
subFields: widget: jsonField
- name: email
type: string
meta:
label:
de: E-Mail
en: E-Mail
containerProps:
layout:
size:
default: "col-6"
small: "col-12"
large: "col-6"
- name: name
type: string
meta:
label:
de: Name
en: Name
containerProps:
layout:
size:
default: "col-6"
small: "col-12"
large: "col-6"
- name: description
type: string
meta:
label:
de: Beschreibung
en: Description
inputProps:
multiline: true
indexes: indexes:
- name: fulltext # Ein eindeutiger Name für den Index. Es ist optional, wird jedoch empfohlen, um den Index später leicht identifizieren zu können. - name: fulltext # Ein eindeutiger Name für den Index. Es ist optional, wird jedoch empfohlen, um den Index später leicht identifizieren zu können.
key: # Bestimmt, auf welche Felder der Index angewendet werden soll. Dies kann ein einfacher String sein, wenn der Index nur ein Feld umfasst, oder ein Array von Strings, wenn der Index mehrere Felder umfasst. key: # Bestimmt, auf welche Felder der Index angewendet werden soll. Dies kann ein einfacher String sein, wenn der Index nur ein Feld umfasst, oder ein Array von Strings, wenn der Index mehrere Felder umfasst.

View File

@@ -1,10 +1,10 @@
const apiSsrBaseURL = "http://localhost:8080/api/v1/_/bkdf_tibi_2024/" const apiSsrBaseURL = "http://localhost:8080/api/v1/_/bkdf_tibi_2024/"
const { frontendBase, tibiUrl } = require("./config-client") const { frontendBase, tibiUrl } = require("./config-client")
module.exports = { module.exports = {
operatorEmail: "info@binkrassdufass.de", operatorEmail: "about@kontextwerk.info",
operatorName: "BinKrassDuFass", operatorName: "KontextWerk",
contactEmail: "support@binkrassdufass.de", contactEmail: "about@kontextwerk.info",
noReplyEmail: "noreply@binkrassdufass.de", noReplyEmail: "about@kontextwerk.info",
frontendBase, frontendBase,
apiBase: frontendBase + "/api/", apiBase: frontendBase + "/api/",
tibiUrl, tibiUrl,

View File

@@ -1,8 +1,3 @@
const { cryptchaCheck } = require("../lib/utils")
;(function () { ;(function () {
throw {
status: 500,
data: "Hello, World!",
}
cryptchaCheck()
})() })()

View File

@@ -3,7 +3,7 @@ const { noReplyEmail, contactEmail } = require("../config")
context.smtp.sendMail({ context.smtp.sendMail({
to: contactEmail, to: contactEmail,
from: noReplyEmail, from: noReplyEmail,
fromName: "BinKrassDuFass", fromName: "Kontextwerk",
replyTo: noReplyEmail, replyTo: noReplyEmail,
subject: "New Contact Request", subject: "New Contact Request",
plain: "Neue Kontaktanfrage", plain: "Neue Kontaktanfrage",

View File

@@ -1,10 +1,9 @@
<script lang="ts"> <script lang="ts">
import { onDestroy } from "svelte" import { onMount } from "svelte"
import { mdiBookAccountOutline, mdiCreation, mdiFaceAgent, mdiHours24 } from "@mdi/js" import { mdiBookAccountOutline, mdiCreation, mdiFaceAgent, mdiHours24 } from "@mdi/js"
import ProductCategoryFrame from "../widgets/ProductCategoryFrame.svelte" import ProductCategoryFrame from "../widgets/ProductCategoryFrame.svelte"
import CrinkledSection from "../CrinkledSection.svelte" import CrinkledSection from "../CrinkledSection.svelte"
import { base64ToUint8, createPlayer, createRecorder, SAMPLE_RATE, uint8ToBase64 } from "../voicebotDemo/helper" import { createVoicebotPreviewController } from "./voicebotPreviewController"
import { RealtimeServerEvent as RSE } from "../voicebotDemo/events"
const voiceProperties: Array<{ title: string; icon: string; color: string }> = [ const voiceProperties: Array<{ title: string; icon: string; color: string }> = [
{ {
@@ -29,300 +28,18 @@
}, },
] ]
const browser = typeof window !== "undefined" const controller = createVoicebotPreviewController()
const VOICE_WS_URL = const { status, statusHint, toggle, setup, teardown } = controller
browser && window.location.protocol === "http:"
? "ws://2svoice-server.kontextwerk.info/api/v1/voicebot/ws"
: "wss://2svoice-server.kontextwerk.info/api/v1/voicebot/ws"
const CHUNK_DURATION_MS = 200
const CHUNK_SIZE_BYTES = Math.round((SAMPLE_RATE * CHUNK_DURATION_MS) / 1000) * 2
type VoiceStatus = "idle" | "connecting" | "connected" | "error"
let status: VoiceStatus = "idle"
let errorMessage = ""
let ws: WebSocket | null = null
let recorder: ReturnType<typeof createRecorder> | null = null
let player: ReturnType<typeof createPlayer> | null = null
let outboundBuffer = new Uint8Array(0)
let closing = false
let cleanupPromise: Promise<void> | null = null
let startPromise: Promise<void> | null = null
$: statusHint =
status === "idle"
? "Tippen, um die Voice-Demo zu starten"
: status === "connecting"
? "Verbindung wird aufgebaut …"
: status === "connected"
? "Live sprechen Sie jetzt"
: errorMessage || "Verbindung fehlgeschlagen"
const toggleVoiceDemo = async () => {
if (status === "connecting") return
if (status === "connected") {
await stopVoiceDemo()
return
}
await startVoiceDemo()
}
const handleKeydown = (event: KeyboardEvent) => { const handleKeydown = (event: KeyboardEvent) => {
if (event.key !== "Enter" && event.key !== " ") return if (event.key !== "Enter" && event.key !== " ") return
event.preventDefault() event.preventDefault()
void toggleVoiceDemo() void toggle()
} }
const startVoiceDemo = async () => { onMount(() => {
if (!browser) { setup()
status = "error" return () => teardown()
errorMessage = "Die Sprach-Demo steht nur im Browser zur Verfügung."
return
}
if (startPromise || status === "connecting" || status === "connected") return
startPromise = (async () => {
await stopVoiceDemo({ resetStatus: false })
status = "connecting"
errorMessage = ""
outboundBuffer = new Uint8Array(0)
closing = false
try {
const newPlayer = createPlayer()
await newPlayer.init()
player = newPlayer
const handleChunk = (pcm: Int16Array) => {
if (pcm.length === 0) return
const bytes = new Uint8Array(pcm.byteLength)
bytes.set(new Uint8Array(pcm.buffer, pcm.byteOffset, pcm.byteLength))
appendToOutboundBuffer(bytes)
}
const newRecorder = createRecorder(handleChunk)
await newRecorder.start()
recorder = newRecorder
} catch (err) {
const message = extractErrorMessage(err, "Mikrofon konnte nicht gestartet werden.")
handleConnectionError(message, err)
return
}
try {
ws = new WebSocket(VOICE_WS_URL)
} catch (err) {
const message = extractErrorMessage(err, "WebSocket-Verbindung konnte nicht aufgebaut werden.")
handleConnectionError(message, err)
return
}
if (!ws) return
ws.onopen = () => {
status = "connected"
flushOutboundBuffer(true)
}
ws.onmessage = (event) => handleServerMessage(event)
ws.onerror = (event) => {
handleConnectionError("WebSocket-Fehler bitte später erneut versuchen.", event)
}
ws.onclose = () => {
if (!closing && status === "connected") {
status = "idle"
errorMessage = ""
}
}
})()
try {
await startPromise
} finally {
startPromise = null
}
}
const stopVoiceDemo = async ({ resetStatus = true }: { resetStatus?: boolean } = {}) => {
if (cleanupPromise) {
await cleanupPromise
if (resetStatus && status !== "error") {
status = "idle"
errorMessage = ""
}
return
}
closing = true
cleanupPromise = (async () => {
try {
flushOutboundBuffer(true)
} catch {
/* ignore */
}
if (recorder) {
try {
await recorder.stop()
} catch {
/* ignore */
}
}
recorder = null
if (player) {
try {
player.stop()
await player.destroy()
} catch {
/* ignore */
}
}
player = null
if (ws && ws.readyState === WebSocket.OPEN) {
try {
ws.close(1000, "client-stop")
} catch {
/* ignore */
}
}
ws = null
outboundBuffer = new Uint8Array(0)
})()
try {
await cleanupPromise
} finally {
cleanupPromise = null
closing = false
if (resetStatus && status !== "error") {
status = "idle"
errorMessage = ""
}
}
}
const handleServerMessage = (event: MessageEvent) => {
if (!player) return
let payload: unknown = event.data
if (typeof payload !== "string") return
try {
payload = JSON.parse(payload)
} catch (err) {
console.warn("VoiceBot Preview: Konnte Nachricht nicht parsen.", err)
return
}
const message = payload as Record<string, unknown>
const type = typeof message.type === "string" ? message.type : "<unbekannt>"
if (type === RSE.INPUT_AUDIO_BUFFER_SPEECH_STARTED) {
const { item_id, played_ms } = player.getNowPlaying()
if (item_id) {
player.stop()
ws?.send(
JSON.stringify({
type: "last_item_played_ms.truncate",
details: { item_id, played_ms: played_ms || 0 },
})
)
}
return
}
if (type === RSE.RESPONSE_AUDIO_DELTA) {
const bytes = base64ToUint8((message as any).delta)
const pcm = new Int16Array(bytes.buffer, bytes.byteOffset, bytes.byteLength / 2)
player.play({
response_id: message.response_id,
item_id: message.item_id,
delta: message.delta,
pcmInt16: pcm,
})
return
}
if (type === "error") {
console.log("VoiceBot Preview: Server hat einen Fehler gemeldet.", message)
}
}
const appendToOutboundBuffer = (chunk: Uint8Array) => {
if (!chunk.length) return
const combined = new Uint8Array(outboundBuffer.length + chunk.length)
combined.set(outboundBuffer)
combined.set(chunk, outboundBuffer.length)
outboundBuffer = combined
flushOutboundBuffer()
}
const flushOutboundBuffer = (force = false) => {
if (!ws || ws.readyState !== WebSocket.OPEN || outboundBuffer.length === 0) {
return
}
const chunkSize = CHUNK_SIZE_BYTES > 0 ? CHUNK_SIZE_BYTES : outboundBuffer.length
let buffer = outboundBuffer
outboundBuffer = new Uint8Array(0)
while (buffer.length >= chunkSize && chunkSize > 0) {
const part = buffer.slice(0, chunkSize)
buffer = buffer.slice(chunkSize)
sendChunk(part)
}
if (force && buffer.length > 0) {
sendChunk(buffer)
} else if (buffer.length > 0) {
outboundBuffer = buffer
}
}
const sendChunk = (chunk: Uint8Array) => {
if (!ws || ws.readyState !== WebSocket.OPEN) return
try {
ws.send(JSON.stringify({ type: "input_audio_buffer.append", audio: uint8ToBase64(chunk) }))
} catch (err) {
if (!closing) {
handleConnectionError("Senden des Audiostreams fehlgeschlagen.", err)
}
}
}
const handleConnectionError = (message: string, err?: unknown) => {
console.error("VoiceBot Preview Fehler:", err ?? message)
errorMessage = message
status = "error"
void stopVoiceDemo({ resetStatus: false })
}
const extractErrorMessage = (err: unknown, fallback: string) => {
if (err instanceof DOMException) {
if (err.name === "NotAllowedError") {
return "Zugriff auf das Mikrofon wurde verweigert."
}
if (err.name === "NotFoundError") {
return "Kein Mikrofon gefunden oder verfügbar."
}
if (err.name === "NotReadableError") {
return "Auf das Mikrofon konnte nicht zugegriffen werden (ggf. bereits in Verwendung)."
}
if (err.name === "SecurityError") {
return "Der Browser blockiert den Zugriff bitte die Seite über HTTPS öffnen."
}
}
if (err instanceof Error && err.message) return err.message
return fallback
}
onDestroy(() => {
void stopVoiceDemo({ resetStatus: false })
}) })
</script> </script>
@@ -341,26 +58,27 @@
{#snippet primaryContent()} {#snippet primaryContent()}
<div <div
class="img" class="img"
class:connected={status === "connected"} class:connected={$status === "connected"}
class:errored={status === "error"} class:errored={$status === "error"}
role="button" role="button"
tabindex="0" tabindex="0"
aria-pressed={status === "connected"} aria-pressed={$status === "connected"}
aria-busy={status === "connecting"} aria-busy={$status === "connecting"}
aria-label="Voicebot Demo starten" aria-label="Voicebot Demo starten"
on:click={() => void toggleVoiceDemo()} on:click={() => void toggle()}
on:keydown={handleKeydown} on:keydown={handleKeydown}
> >
<img <img
src="/media/iphone.png" src="/media/iphone.png"
alt="Kontextwerk is calling" alt="Kontextwerk is calling"
/> />
<div class="shadow"></div>
<div <div
class="voice-overlay" class="voice-overlay"
data-status={status} data-status={$status}
aria-live="polite" aria-live="polite"
> >
<span>{statusHint}</span> <span>{$statusHint}</span>
</div> </div>
</div> </div>
{/snippet} {/snippet}
@@ -432,11 +150,9 @@
} }
&.connected { &.connected {
border-color: rgba(76, 175, 80, 0.4);
} }
&.errored { &.errored {
border-color: rgba(235, 87, 87, 0.45);
} }
&:focus-visible { &:focus-visible {

View File

@@ -0,0 +1,207 @@
import { derived, get, writable, type Readable } from "svelte/store"
import {
ConnectorLifecycleEvents,
createVoiceConnector,
WS_URL,
} from "../voicebotDemo/helper"
import type { WerkRealtimeConnector } from "../voicebotDemo/helper"
export type VoiceStatus = "idle" | "connecting" | "connected" | "error"
interface VoicebotPreviewController {
status: Readable<VoiceStatus>
errorMessage: Readable<string>
statusHint: Readable<string>
setup: () => void
teardown: () => void
start: () => Promise<void>
stop: (silent?: boolean) => Promise<void>
toggle: () => Promise<void>
}
const isBrowser = typeof window !== "undefined"
const extractErrorMessage = (err: unknown, fallback: string) => {
if (err instanceof DOMException) {
if (err.name === "NotAllowedError") return "Zugriff auf das Mikrofon wurde verweigert."
if (err.name === "NotFoundError") return "Kein Mikrofon gefunden oder verfügbar."
if (err.name === "NotReadableError") return "Auf das Mikrofon konnte nicht zugegriffen werden."
if (err.name === "SecurityError") return "Der Browser blockiert den Zugriff bitte die Seite über HTTPS öffnen."
}
if (err instanceof Error && err.message) return err.message
return fallback
}
export const createVoicebotPreviewController = (): VoicebotPreviewController => {
const statusStore = writable<VoiceStatus>("idle")
const errorStore = writable("")
const statusHint = derived([statusStore, errorStore], ([$status, $error]) => {
switch ($status) {
case "idle":
return "Tippen, um die Voice-Demo zu starten"
case "connecting":
return "Verbindung wird aufgebaut …"
case "connected":
return "Live sprechen Sie jetzt"
case "error":
return $error || "Verbindung fehlgeschlagen"
default:
return "Voice-Demo"
}
})
let connector: WerkRealtimeConnector | null = null
let detachHandlers: Array<() => void> = []
let startPromise: Promise<void> | null = null
let stopPromise: Promise<void> | null = null
let closing = false
const handleRealtimeEvent = (rawType: string, msg: any) => {
const type = rawType
if (!type) return
const now = new Date()
}
const ensureConnector = () => {
if (!isBrowser || connector) return
const instance = createVoiceConnector()
const detachConnecting = instance.onLifecycle(ConnectorLifecycleEvents.CONNECTING, () => {
statusStore.set("connecting")
errorStore.set("")
})
const detachConnected = instance.onLifecycle(ConnectorLifecycleEvents.CONNECTED, () => {
statusStore.set("connected")
closing = false
})
const detachDisconnected = instance.onLifecycle(ConnectorLifecycleEvents.DISCONNECTED, () => {
if (closing) {
closing = false
return
}
if (get(statusStore) !== "error") {
statusStore.set("idle")
errorStore.set("")
}
})
const detachError = instance.onLifecycle(ConnectorLifecycleEvents.ERROR, (evt) => {
const message =
typeof evt?.message === "string" && evt.message.trim().length
? evt.message
: "Verbindung fehlgeschlagen"
errorStore.set(`${message} (${WS_URL})`)
statusStore.set("error")
closing = false
})
detachHandlers = [detachConnecting, detachConnected, detachDisconnected, detachError]
connector = instance
}
const cleanupConnector = () => {
detachHandlers.forEach((fn) => fn())
detachHandlers = []
const instance = connector
connector = null
if (instance) {
void instance.stop()
}
startPromise = null
stopPromise = null
closing = false
}
const stop = async (silent = false) => {
if (!connector) return
if (stopPromise) {
await stopPromise
return
}
closing = true
if (!silent && get(statusStore) !== "error") {
statusStore.set("idle")
errorStore.set("")
}
stopPromise = connector.stop()
try {
await stopPromise
} catch (err) {
console.error("VoicebotPreview stop error", err)
if (!silent) {
errorStore.set(extractErrorMessage(err, "Verbindung konnte nicht beendet werden."))
statusStore.set("error")
}
} finally {
stopPromise = null
closing = false
}
}
const start = async () => {
if (!isBrowser) {
statusStore.set("error")
errorStore.set("Die Sprach-Demo steht nur im Browser zur Verfügung.")
return
}
ensureConnector()
if (!connector || startPromise) return
await stop(true)
statusStore.set("connecting")
errorStore.set("")
startPromise = (async () => {
await connector!.start()
connector!.setInputMuted(false)
connector!.setOutputMuted(false)
})()
try {
await startPromise
} catch (err) {
console.error("VoicebotPreview start error", err)
errorStore.set(extractErrorMessage(err, "Verbindung konnte nicht aufgebaut werden."))
statusStore.set("error")
closing = false
} finally {
startPromise = null
}
}
const toggle = async () => {
const current = get(statusStore)
if (current === "connecting") return
if (current === "connected") {
await stop()
return
}
await start()
}
const setup = () => {
ensureConnector()
}
const teardown = () => {
void stop(true)
cleanupConnector()
}
return {
status: { subscribe: statusStore.subscribe },
errorMessage: { subscribe: errorStore.subscribe },
statusHint,
setup,
teardown,
start,
stop,
toggle,
}
}

View File

@@ -1,148 +1,236 @@
const SAMPLE_RATE = 24_000 import { ConnectorLifecycleEvents, WerkRealtimeConnector, inlineWorklet } from "@kontextwerk/web-sdk"
const BUFFER_SIZE = 4_800
const AUDIO_PLAYBACK_WORKLET_URL = "/assets/audio-playback-worklet.js" const isBrowser = typeof window !== "undefined"
const AUDIO_PROCESSOR_WORKLET_URL = "/assets/audio-processor-worklet.js" const WS_HOST = "2svoice-server.kontextwerk.info"
const uint8ToBase64 = (u8: Uint8Array): string => { export const SAMPLE_RATE = 24_000
let bin = "" export const BUFFER_SIZE = 4_800
for (let i = 0; i < u8.length; i++) bin += String.fromCharCode(u8[i]) export const WS_URL = isBrowser && window.location.protocol === "http:"
return btoa(bin) ? `ws://${WS_HOST}/api/v1/voicebot/ws`
} : `wss://${WS_HOST}/api/v1/voicebot/ws`
const base64ToUint8 = (b64: string): Uint8Array => { const playbackWorkletCode = `class AudioPlaybackWorklet extends AudioWorkletProcessor {
const bin = atob(b64) constructor() {
const out = new Uint8Array(bin.length) super()
for (let i = 0; i < bin.length; i++) out[i] = bin.charCodeAt(i) this.srcRate = ${SAMPLE_RATE}
return out this.dstRate = sampleRate
} this.step = this.srcRate / this.dstRate
this.queue = []
interface NowPlayingMessage { this.cur = null
type: "nowPlaying" this.hold = 0
item_id: string this.phase = 0
played_ms: number this._x0 = undefined
} this._x1 = undefined
this._nextItemId = null
interface NowPlayingState { this.nowItemId = null
item_id: string | null this.nowItemSamples = 0
played_ms: number this._notifyFrames = 0
} this.muted = false
this.port.onmessage = (e) => this._onMessage(e.data)
interface Player {
init: (sampleRate?: number) => Promise<void>
play: (delta) => void
deleteItem: (item_id: string) => void
stop: () => void
setSourceRate: (hz: number) => void
getNowPlaying: () => NowPlayingState
destroy: () => Promise<void>
mute: () => void
unmute: () => void
node?: AudioWorkletNode | null
}
const createPlayer = (defaultSampleRate = 48000): Player => {
let ctx: AudioContext | null = null
let node: AudioWorkletNode | null = null
let nowItemId: string | null = null
let playedMs = 0
const isNowPlayingMessage = (m: unknown): m is NowPlayingMessage => {
if (!m || typeof m !== "object") return false
const x = m as Record<string, unknown>
return x["type"] === "nowPlaying" && "played_ms" in x
} }
const init = async (sampleRate = defaultSampleRate): Promise<void> => { _onMessage(msg) {
ctx = new AudioContext({ sampleRate }) if (!msg || !msg.type) return
await ctx.audioWorklet.addModule(AUDIO_PLAYBACK_WORKLET_URL) if (msg.type === "appendDelta" && msg.delta && msg.delta.pcmInt16 instanceof Int16Array) {
node = new AudioWorkletNode(ctx, "audio-playback-worklet") this.queue.push({ item_id: msg.delta.item_id, data: msg.delta.pcmInt16, off: 0 })
node.port.onmessage = (e: MessageEvent) => { return
const m = e.data }
if (isNowPlayingMessage(m)) { if (msg.type === "deleteItem") {
nowItemId = m.item_id const id = msg.item_id
playedMs = m.played_ms | 0 this.queue = this.queue.filter((ch) => ch.item_id !== id)
if (this.cur && this.cur.item_id === id) {
this.cur = null
this.hold = 0
}
if (this.nowItemId === id) {
this.nowItemId = null
this.nowItemSamples = 0
this._notifyFrames = 0
}
return
}
if (msg.type === "clear") {
this.queue.length = 0
this.cur = null
this.hold = 0
this.phase = 0
this._x0 = undefined
this._x1 = undefined
this._nextItemId = null
this.nowItemId = null
this.nowItemSamples = 0
this._notifyFrames = 0
return
}
if (msg.type === "setSourceRate" && Number.isFinite(msg.hz) && msg.hz > 0) {
this.srcRate = msg.hz | 0
this.step = this.srcRate / this.dstRate
return
}
if (msg.type === "mute") {
this.muted = true
return
}
if (msg.type === "unmute") {
this.muted = false
}
}
_ensureCurrent() {
if (this.cur == null) {
if (this.queue.length === 0) return false
this.cur = this.queue.shift() || null
if (this.cur == null) return false
}
return true
}
_nextInt16() {
for (;;) {
if (!this._ensureCurrent()) {
this._nextItemId = null
this.hold = 0
return 0
}
const d = this.cur.data
const o = this.cur.off | 0
if (o < d.length) {
const s = d[o]
this.cur.off = o + 1
this.hold = s
this._nextItemId = this.cur.item_id
return s
}
this.cur = null
}
}
process(_inputs, outputs) {
const out = outputs[0]
if (!out || out.length === 0) return true
const ch0 = out[0]
const N = ch0.length
if (this._x1 === undefined) {
this._x1 = this._nextInt16()
this._x0 = this._x1
this.phase = 0
this.nowItemId = this._nextItemId
this.nowItemSamples = 0
}
const advance = () => {
this.phase += this.step
while (this.phase >= 1) {
this.phase -= 1
this._x0 = this._x1
this._x1 = this._nextInt16()
if (this.nowItemId !== this._nextItemId) {
this.nowItemId = this._nextItemId
this.nowItemSamples = 0
}
if (this.nowItemId) this.nowItemSamples += 1
} }
} }
node.connect(ctx.destination)
if (this.muted) {
for (let i = 0; i < N; i++) {
ch0[i] = 0
for (let c = 1; c < out.length; c++) out[c][i] = 0
advance()
}
} else {
for (let i = 0; i < N; i++) {
const x0 = this._x0 ?? 0
const x1 = this._x1 ?? x0
const value = x0 + (x1 - x0) * this.phase
const sample = value <= -32768 ? -1 : value >= 32767 ? 1 : value / 32768
ch0[i] = sample
for (let c = 1; c < out.length; c++) out[c][i] = sample
advance()
this._notifyFrames += 1
if (this._notifyFrames >= this.dstRate / 10) {
this._notifyFrames = 0
}
}
}
return true
}
}
registerProcessor("audio-playback-worklet", AudioPlaybackWorklet)
`
const processorWorkletCode = `class PCMAudioProcessor extends AudioWorkletProcessor {
constructor() {
super()
this._inRate = sampleRate
this._pos = 0
this._carry = null
this._outRate = ${SAMPLE_RATE}
} }
const play = (delta: ResponseAudioDelta): void => { process(inputs) {
if (!node) return const chs = inputs[0]
const buf = delta.pcmInt16.buffer if (!chs || chs.length === 0) return true
node.port.postMessage({ type: "appendDelta", delta }, [buf]) const inF32 = chs[0]
} const step = this._inRate / this._outRate
const deleteItem = (item_id: string): void => {
node?.port.postMessage({ type: "deleteItem", item_id })
}
const stop = (): void => { let src = inF32
node?.port.postMessage({ type: "clear" }) if (this._carry !== null) {
} const tmp = new Float32Array(1 + inF32.length)
tmp[0] = this._carry
tmp.set(inF32, 1)
src = tmp
}
const setSourceRate = (hz: number): void => { const avail = src.length - 1 - this._pos
node?.port.postMessage({ type: "setSourceRate", hz }) const outLen = avail > 0 ? Math.ceil(avail / step) : 0
} const outI16 = new Int16Array(outLen)
const getNowPlaying = (): NowPlayingState => { let pos = this._pos
return { item_id: nowItemId, played_ms: playedMs } for (let k = 0; k < outLen; k++) {
} const i = Math.floor(pos)
const mute = (): void => { const frac = pos - i
node?.port.postMessage({ type: "mute" }) const x0 = src[i]
} const x1 = src[i + 1]
const unmute = (): void => { let y = x0 + frac * (x1 - x0)
node?.port.postMessage({ type: "unmute" }) if (y > 1) y = 1
} else if (y < -1) y = -1
const s = y <= -1 ? -0x8000 : Math.round(y * 0x7fff)
outI16[k] = s
pos += step
}
this._pos = pos - (src.length - 1)
if (this._pos < 0) this._pos = 0
this._carry = src[src.length - 1]
const destroy = async (): Promise<void> => {
if (!ctx) return
try { try {
await ctx.close() this.port.postMessage(outI16, [outI16.buffer])
} finally { } catch {
ctx = null this.port.postMessage(outI16)
node = null
nowItemId = null
playedMs = 0
} }
}
return { init, play, deleteItem, stop, setSourceRate, getNowPlaying, destroy, mute, unmute } return true
}
} }
const createRecorder = (onChunk: (pcm: Int16Array) => void) => { registerProcessor("audio-processor-worklet", PCMAudioProcessor)
let ctx: AudioContext | null = null `
let stream: MediaStream | null = null
let source: MediaStreamAudioSourceNode | null = null
let worklet: AudioWorkletNode | null = null
const start = async () => { export const playbackWorklet = inlineWorklet(playbackWorkletCode)
stream = await navigator.mediaDevices.getUserMedia({ audio: true }) export const processorWorklet = inlineWorklet(processorWorkletCode)
if (ctx) await ctx.close()
ctx = new (window.AudioContext || (window as any).webkitAudioContext)({ sampleRate: SAMPLE_RATE })
await ctx.audioWorklet.addModule(AUDIO_PROCESSOR_WORKLET_URL)
source = ctx.createMediaStreamSource(stream) export const createVoiceConnector = () =>
worklet = new AudioWorkletNode(ctx, "audio-processor-worklet") new WerkRealtimeConnector({
worklet.port.onmessage = (ev: MessageEvent<Int16Array>) => onChunk(ev.data) wsUrl: WS_URL,
playbackWorklet,
processorWorklet,
sampleRate: SAMPLE_RATE,
bufferSize: BUFFER_SIZE,
})
source.connect(worklet) export { ConnectorLifecycleEvents }
worklet.connect(ctx.destination) export type { WerkRealtimeConnector }
}
const stop = async () => {
if (stream) {
stream.getTracks().forEach((t) => t.stop())
stream = null
}
if (ctx) {
try {
await ctx.close()
} finally {
ctx = null
}
}
source = null
worklet = null
}
return { start, stop }
}
export { uint8ToBase64, base64ToUint8, createPlayer, createRecorder,SAMPLE_RATE }

View File

@@ -55,6 +55,7 @@
"typescript": "^5.8.3" "typescript": "^5.8.3"
}, },
"dependencies": { "dependencies": {
"@kontextwerk/web-sdk": "0.1.0",
"@mdi/js": "^7.4.47", "@mdi/js": "^7.4.47",
"@microsoft/fetch-event-source": "^2.0.1", "@microsoft/fetch-event-source": "^2.0.1",
"@okrad/svelte-progressbar": "^2.2.0", "@okrad/svelte-progressbar": "^2.2.0",

View File

@@ -1779,6 +1779,15 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@kontextwerk/web-sdk@npm:0.1.0":
version: 0.1.0
resolution: "@kontextwerk/web-sdk@npm:0.1.0"
dependencies:
zod: ^3.23.8
checksum: b99d6a71584c1db40ab2eb83f5b269bca6a70c4eec4c644eed0873e909c08fbae9600556599b7012ff13fb6150086687612c59c98c2f03699ad8559302090f83
languageName: node
linkType: hard
"@mdi/js@npm:^7.0.96, @mdi/js@npm:^7.4.47": "@mdi/js@npm:^7.0.96, @mdi/js@npm:^7.4.47":
version: 7.4.47 version: 7.4.47
resolution: "@mdi/js@npm:7.4.47" resolution: "@mdi/js@npm:7.4.47"
@@ -6087,6 +6096,7 @@ __metadata:
"@babel/core": ^7.27.1 "@babel/core": ^7.27.1
"@babel/plugin-transform-async-to-generator": ^7.27.1 "@babel/plugin-transform-async-to-generator": ^7.27.1
"@babel/preset-env": ^7.27.2 "@babel/preset-env": ^7.27.2
"@kontextwerk/web-sdk": 0.1.0
"@mdi/js": ^7.4.47 "@mdi/js": ^7.4.47
"@microsoft/fetch-event-source": ^2.0.1 "@microsoft/fetch-event-source": ^2.0.1
"@okrad/svelte-progressbar": ^2.2.0 "@okrad/svelte-progressbar": ^2.2.0
@@ -7364,3 +7374,10 @@ __metadata:
checksum: f7917916db73ad09c4870dc7045fdefb9f0122257878ec53e75ff6ea633718369b99185a21aae1fed1d258e7d66d95080169ef1a386c599b8b912467f17932bc checksum: f7917916db73ad09c4870dc7045fdefb9f0122257878ec53e75ff6ea633718369b99185a21aae1fed1d258e7d66d95080169ef1a386c599b8b912467f17932bc
languageName: node languageName: node
linkType: hard linkType: hard
"zod@npm:^3.23.8":
version: 3.25.76
resolution: "zod@npm:3.25.76"
checksum: c9a403a62b329188a5f6bd24d5d935d2bba345f7ab8151d1baa1505b5da9f227fb139354b043711490c798e91f3df75991395e40142e6510a4b16409f302b849
languageName: node
linkType: hard