This commit is contained in:
2025-12-01 17:21:38 +08:00
parent 32fee2b8ab
commit fab8c13cb3
7511 changed files with 996300 additions and 0 deletions

View File

@@ -0,0 +1,50 @@
import AudioPlayer from '@/app/components/base/audio-btn/audio'
declare global {
// eslint-disable-next-line ts/consistent-type-definitions
interface AudioPlayerManager {
instance: AudioPlayerManager
}
}
export class AudioPlayerManager {
private static instance: AudioPlayerManager
private audioPlayers: AudioPlayer | null = null
private msgId: string | undefined
public static getInstance(): AudioPlayerManager {
if (!AudioPlayerManager.instance) {
AudioPlayerManager.instance = new AudioPlayerManager()
this.instance = AudioPlayerManager.instance
}
return AudioPlayerManager.instance
}
public getAudioPlayer(url: string, isPublic: boolean, id: string | undefined, msgContent: string | null | undefined, voice: string | undefined, callback: ((event: string) => void) | null): AudioPlayer {
if (this.msgId && this.msgId === id && this.audioPlayers) {
this.audioPlayers.setCallback(callback)
return this.audioPlayers
}
else {
if (this.audioPlayers) {
try {
this.audioPlayers.pauseAudio()
this.audioPlayers.cacheBuffers = []
this.audioPlayers.sourceBuffer?.abort()
}
catch {
}
}
this.msgId = id
this.audioPlayers = new AudioPlayer(url, isPublic, id, msgContent, voice, callback)
return this.audioPlayers
}
}
public resetMsgId(msgId: string) {
this.msgId = msgId
this.audioPlayers?.resetMsgId(msgId)
}
}

View File

@@ -0,0 +1,246 @@
import Toast from '@/app/components/base/toast'
import { textToAudioStream } from '@/service/share'
declare global {
// eslint-disable-next-line ts/consistent-type-definitions
interface Window {
ManagedMediaSource: any
}
}
export default class AudioPlayer {
mediaSource: MediaSource | null
audio: HTMLAudioElement
audioContext: AudioContext
sourceBuffer?: any
cacheBuffers: ArrayBuffer[] = []
pauseTimer: number | null = null
msgId: string | undefined
msgContent: string | null | undefined = null
voice: string | undefined = undefined
isLoadData = false
url: string
isPublic: boolean
callback: ((event: string) => void) | null
constructor(streamUrl: string, isPublic: boolean, msgId: string | undefined, msgContent: string | null | undefined, voice: string | undefined, callback: ((event: string) => void) | null) {
this.audioContext = new AudioContext()
this.msgId = msgId
this.msgContent = msgContent
this.url = streamUrl
this.isPublic = isPublic
this.voice = voice
this.callback = callback
// Compatible with iphone ios17 ManagedMediaSource
const MediaSource = window.ManagedMediaSource || window.MediaSource
if (!MediaSource) {
Toast.notify({
message: 'Your browser does not support audio streaming, if you are using an iPhone, please update to iOS 17.1 or later.',
type: 'error',
})
}
this.mediaSource = MediaSource ? new MediaSource() : null
this.audio = new Audio()
this.setCallback(callback)
if (!window.MediaSource) { // if use ManagedMediaSource
this.audio.disableRemotePlayback = true
this.audio.controls = true
}
this.audio.src = this.mediaSource ? URL.createObjectURL(this.mediaSource) : ''
this.audio.autoplay = true
const source = this.audioContext.createMediaElementSource(this.audio)
source.connect(this.audioContext.destination)
this.listenMediaSource('audio/mpeg')
}
public resetMsgId(msgId: string) {
this.msgId = msgId
}
private listenMediaSource(contentType: string) {
this.mediaSource?.addEventListener('sourceopen', () => {
if (this.sourceBuffer)
return
this.sourceBuffer = this.mediaSource?.addSourceBuffer(contentType)
})
}
public setCallback(callback: ((event: string) => void) | null) {
this.callback = callback
if (callback) {
this.audio.addEventListener('ended', () => {
callback('ended')
}, false)
this.audio.addEventListener('paused', () => {
callback('paused')
}, true)
this.audio.addEventListener('loaded', () => {
callback('loaded')
}, true)
this.audio.addEventListener('play', () => {
callback('play')
}, true)
this.audio.addEventListener('timeupdate', () => {
callback('timeupdate')
}, true)
this.audio.addEventListener('loadeddate', () => {
callback('loadeddate')
}, true)
this.audio.addEventListener('canplay', () => {
callback('canplay')
}, true)
this.audio.addEventListener('error', () => {
callback('error')
}, true)
}
}
private async loadAudio() {
try {
const audioResponse: any = await textToAudioStream(this.url, this.isPublic, { content_type: 'audio/mpeg' }, {
message_id: this.msgId,
streaming: true,
voice: this.voice,
text: this.msgContent,
})
if (audioResponse.status !== 200) {
this.isLoadData = false
if (this.callback)
this.callback('error')
}
const reader = audioResponse.body.getReader()
while (true) {
const { value, done } = await reader.read()
if (done) {
this.receiveAudioData(value)
break
}
this.receiveAudioData(value)
}
}
catch {
this.isLoadData = false
this.callback?.('error')
}
}
// play audio
public playAudio() {
if (this.isLoadData) {
if (this.audioContext.state === 'suspended') {
this.audioContext.resume().then((_) => {
this.audio.play()
this.callback?.('play')
})
}
else if (this.audio.ended) {
this.audio.play()
this.callback?.('play')
}
this.callback?.('play')
}
else {
this.isLoadData = true
this.loadAudio()
}
}
private theEndOfStream() {
const endTimer = setInterval(() => {
if (!this.sourceBuffer?.updating) {
this.mediaSource?.endOfStream()
clearInterval(endTimer)
}
}, 10)
}
private finishStream() {
const timer = setInterval(() => {
if (!this.cacheBuffers.length) {
this.theEndOfStream()
clearInterval(timer)
}
if (this.cacheBuffers.length && !this.sourceBuffer?.updating) {
const arrayBuffer = this.cacheBuffers.shift()!
this.sourceBuffer?.appendBuffer(arrayBuffer)
}
}, 10)
}
public async playAudioWithAudio(audio: string, play = true) {
if (!audio || !audio.length) {
this.finishStream()
return
}
const audioContent = Buffer.from(audio, 'base64')
this.receiveAudioData(new Uint8Array(audioContent))
if (play) {
this.isLoadData = true
if (this.audio.paused) {
this.audioContext.resume().then((_) => {
this.audio.play()
this.callback?.('play')
})
}
else if (this.audio.ended) {
this.audio.play()
this.callback?.('play')
}
else if (this.audio.played) { /* empty */ }
else {
this.audio.play()
this.callback?.('play')
}
}
}
public pauseAudio() {
this.callback?.('paused')
this.audio.pause()
this.audioContext.suspend()
}
private receiveAudioData(unit8Array: Uint8Array) {
if (!unit8Array) {
this.finishStream()
return
}
const audioData = this.byteArrayToArrayBuffer(unit8Array)
if (!audioData.byteLength) {
if (this.mediaSource?.readyState === 'open')
this.finishStream()
return
}
if (this.sourceBuffer?.updating) {
this.cacheBuffers.push(audioData)
}
else {
if (this.cacheBuffers.length && !this.sourceBuffer?.updating) {
this.cacheBuffers.push(audioData)
const cacheBuffer = this.cacheBuffers.shift()!
this.sourceBuffer?.appendBuffer(cacheBuffer)
}
else {
this.sourceBuffer?.appendBuffer(audioData)
}
}
}
private byteArrayToArrayBuffer(byteArray: Uint8Array): ArrayBuffer {
const arrayBuffer = new ArrayBuffer(byteArray.length)
const uint8Array = new Uint8Array(arrayBuffer)
uint8Array.set(byteArray)
return arrayBuffer
}
}

View File

@@ -0,0 +1,75 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import { useEffect } from 'react'
import type { ComponentProps } from 'react'
import AudioBtn from '.'
import { ensureMockAudioManager } from '../../../../.storybook/utils/audio-player-manager.mock'
ensureMockAudioManager()
const StoryWrapper = (props: ComponentProps<typeof AudioBtn>) => {
useEffect(() => {
ensureMockAudioManager()
}, [])
return (
<div className="flex items-center justify-center space-x-3">
<AudioBtn {...props} />
<span className="text-xs text-gray-500">Click to toggle playback</span>
</div>
)
}
const meta = {
title: 'Base/General/AudioBtn',
component: AudioBtn,
tags: ['autodocs'],
parameters: {
layout: 'centered',
docs: {
description: {
component: 'Audio playback toggle that streams assistant responses. The story uses a mocked audio player so you can inspect loading and playback states without calling the real API.',
},
},
nextjs: {
appDirectory: true,
navigation: {
pathname: '/apps/demo-app/text-to-audio',
params: { appId: 'demo-app' },
},
},
},
argTypes: {
id: {
control: 'text',
description: 'Message identifier used to scope the audio stream.',
},
value: {
control: 'text',
description: 'Text content that would be converted to speech.',
},
voice: {
control: 'text',
description: 'Voice profile used for playback.',
},
isAudition: {
control: 'boolean',
description: 'Switches to the audition style with minimal padding.',
},
className: {
control: 'text',
description: 'Optional custom class for the wrapper.',
},
},
} satisfies Meta<typeof AudioBtn>
export default meta
type Story = StoryObj<typeof meta>
export const Default: Story = {
render: args => <StoryWrapper {...args} />,
args: {
id: 'message-1',
value: 'This is an audio preview for the current assistant response.',
voice: 'alloy',
},
}

View File

@@ -0,0 +1,110 @@
'use client'
import { useState } from 'react'
import { t } from 'i18next'
import { useParams, usePathname } from 'next/navigation'
import s from './style.module.css'
import Tooltip from '@/app/components/base/tooltip'
import Loading from '@/app/components/base/loading'
import { AudioPlayerManager } from '@/app/components/base/audio-btn/audio.player.manager'
type AudioBtnProps = {
id?: string
voice?: string
value?: string
className?: string
isAudition?: boolean
noCache?: boolean
}
type AudioState = 'initial' | 'loading' | 'playing' | 'paused' | 'ended'
const AudioBtn = ({
id,
voice,
value,
className,
isAudition,
}: AudioBtnProps) => {
const [audioState, setAudioState] = useState<AudioState>('initial')
const params = useParams()
const pathname = usePathname()
const audio_finished_call = (event: string): void => {
switch (event) {
case 'ended':
setAudioState('ended')
break
case 'paused':
setAudioState('ended')
break
case 'loaded':
setAudioState('loading')
break
case 'play':
setAudioState('playing')
break
case 'error':
setAudioState('ended')
break
}
}
let url = ''
let isPublic = false
if (params.token) {
url = '/text-to-audio'
isPublic = true
}
else if (params.appId) {
if (pathname.search('explore/installed') > -1)
url = `/installed-apps/${params.appId}/text-to-audio`
else
url = `/apps/${params.appId}/text-to-audio`
}
const handleToggle = async () => {
if (audioState === 'playing' || audioState === 'loading') {
setTimeout(() => setAudioState('paused'), 1)
AudioPlayerManager.getInstance().getAudioPlayer(url, isPublic, id, value, voice, audio_finished_call).pauseAudio()
}
else {
setTimeout(() => setAudioState('loading'), 1)
AudioPlayerManager.getInstance().getAudioPlayer(url, isPublic, id, value, voice, audio_finished_call).playAudio()
}
}
const tooltipContent = {
initial: t('appApi.play'),
ended: t('appApi.play'),
paused: t('appApi.pause'),
playing: t('appApi.playing'),
loading: t('appApi.loading'),
}[audioState]
return (
<div className={`inline-flex items-center justify-center ${(audioState === 'loading' || audioState === 'playing') ? 'mr-1' : className}`}>
<Tooltip
popupContent={tooltipContent}
>
<button type="button"
disabled={audioState === 'loading'}
className={`box-border flex h-6 w-6 cursor-pointer items-center justify-center ${isAudition ? 'p-0.5' : 'rounded-md bg-white p-0'}`}
onClick={handleToggle}
>
{audioState === 'loading'
? (
<div className='flex h-full w-full items-center justify-center rounded-md'>
<Loading />
</div>
)
: (
<div className={'flex h-full w-full items-center justify-center rounded-md hover:bg-gray-50'}>
<div className={`h-4 w-4 ${(audioState === 'playing') ? s.pauseIcon : s.playIcon}`}></div>
</div>
)}
</button>
</Tooltip>
</div>
)
}
export default AudioBtn

View File

@@ -0,0 +1,10 @@
.playIcon {
background-image: url(~@/app/components/develop/secret-key/assets/play.svg);
background-position: center;
background-repeat: no-repeat;
}
.pauseIcon {
background-image: url(~@/app/components/develop/secret-key/assets/pause.svg);
background-position: center;
background-repeat: no-repeat;
}