Compare commits

..

2 Commits

Author SHA1 Message Date
David Zhao
02e2e00f9d hide other languages, slightly louder original audio 2025-07-28 00:48:25 +01:00
David Zhao
8ca5bc9f40 Customized meet for multi-user translation agent 2025-07-28 00:44:33 +01:00
16 changed files with 1652 additions and 681 deletions

View File

@ -1,16 +1,33 @@
# .github/workflows/sync-to-production.yaml
name: Sync main to sandbox-production name: Sync main to sandbox-production
on: on:
workflow_dispatch: push:
branches:
- main
permissions:
contents: write
pull-requests: write
jobs: jobs:
sync: sync:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: write
steps: steps:
- uses: livekit-examples/sandbox-deploy-action@v1 - name: Checkout code
uses: actions/checkout@v4
with: with:
production_branch: 'sandbox-production' fetch-depth: 0 # Fetch all history so we can force push
token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Git
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@livekit.io'
- name: Sync to sandbox-production
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
git checkout sandbox-production || git checkout -b sandbox-production
git merge --strategy-option theirs main
git push origin sandbox-production

View File

@ -11,12 +11,12 @@ jobs:
test: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- uses: pnpm/action-setup@v4 - uses: pnpm/action-setup@v4
- name: Use Node.js 22 - name: Use Node.js 20
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 24 node-version: 20
cache: 'pnpm' cache: 'pnpm'
- name: Install dependencies - name: Install dependencies

View File

@ -2,6 +2,7 @@ import { randomString } from '@/lib/client-utils';
import { getLiveKitURL } from '@/lib/getLiveKitURL'; import { getLiveKitURL } from '@/lib/getLiveKitURL';
import { ConnectionDetails } from '@/lib/types'; import { ConnectionDetails } from '@/lib/types';
import { AccessToken, AccessTokenOptions, VideoGrant } from 'livekit-server-sdk'; import { AccessToken, AccessTokenOptions, VideoGrant } from 'livekit-server-sdk';
import { RoomAgentDispatch, RoomConfiguration } from '@livekit/protocol';
import { NextRequest, NextResponse } from 'next/server'; import { NextRequest, NextResponse } from 'next/server';
const API_KEY = process.env.LIVEKIT_API_KEY; const API_KEY = process.env.LIVEKIT_API_KEY;
@ -17,9 +18,11 @@ export async function GET(request: NextRequest) {
const participantName = request.nextUrl.searchParams.get('participantName'); const participantName = request.nextUrl.searchParams.get('participantName');
const metadata = request.nextUrl.searchParams.get('metadata') ?? ''; const metadata = request.nextUrl.searchParams.get('metadata') ?? '';
const region = request.nextUrl.searchParams.get('region'); const region = request.nextUrl.searchParams.get('region');
const language = request.nextUrl.searchParams.get('language') ?? 'en';
if (!LIVEKIT_URL) { if (!LIVEKIT_URL) {
throw new Error('LIVEKIT_URL is not defined'); throw new Error('LIVEKIT_URL is not defined');
} }
const livekitServerUrl = region ? getLiveKitURL(LIVEKIT_URL, region) : LIVEKIT_URL; const livekitServerUrl = region ? getLiveKitURL(LIVEKIT_URL, region) : LIVEKIT_URL;
let randomParticipantPostfix = request.cookies.get(COOKIE_KEY)?.value; let randomParticipantPostfix = request.cookies.get(COOKIE_KEY)?.value;
if (livekitServerUrl === undefined) { if (livekitServerUrl === undefined) {
@ -33,7 +36,6 @@ export async function GET(request: NextRequest) {
return new NextResponse('Missing required query parameter: participantName', { status: 400 }); return new NextResponse('Missing required query parameter: participantName', { status: 400 });
} }
// Generate participant token
if (!randomParticipantPostfix) { if (!randomParticipantPostfix) {
randomParticipantPostfix = randomString(4); randomParticipantPostfix = randomString(4);
} }
@ -42,10 +44,15 @@ export async function GET(request: NextRequest) {
identity: `${participantName}__${randomParticipantPostfix}`, identity: `${participantName}__${randomParticipantPostfix}`,
name: participantName, name: participantName,
metadata, metadata,
attributes: {
language,
}
}, },
roomName, roomName,
); );
console.info("token:", participantToken);
// Return connection details // Return connection details
const data: ConnectionDetails = { const data: ConnectionDetails = {
serverUrl: livekitServerUrl, serverUrl: livekitServerUrl,
@ -75,8 +82,14 @@ function createParticipantToken(userInfo: AccessTokenOptions, roomName: string)
canPublish: true, canPublish: true,
canPublishData: true, canPublishData: true,
canSubscribe: true, canSubscribe: true,
canUpdateOwnMetadata: true,
}; };
at.addGrant(grant); at.addGrant(grant);
at.roomConfig = new RoomConfiguration({
agents: [new RoomAgentDispatch({
agentName: "translator",
})],
})
return at.toJwt(); return at.toJwt();
} }

View File

@ -21,7 +21,6 @@ export function VideoConferenceClientImpl(props: {
liveKitUrl: string; liveKitUrl: string;
token: string; token: string;
codec: VideoCodec | undefined; codec: VideoCodec | undefined;
singlePeerConnection: boolean | undefined;
}) { }) {
const keyProvider = new ExternalE2EEKeyProvider(); const keyProvider = new ExternalE2EEKeyProvider();
const { worker, e2eePassphrase } = useSetupE2EE(); const { worker, e2eePassphrase } = useSetupE2EE();
@ -44,7 +43,6 @@ export function VideoConferenceClientImpl(props: {
worker, worker,
} }
: undefined, : undefined,
singlePeerConnection: props.singlePeerConnection,
}; };
}, [e2eeEnabled, props.codec, keyProvider, worker]); }, [e2eeEnabled, props.codec, keyProvider, worker]);

View File

@ -7,10 +7,9 @@ export default async function CustomRoomConnection(props: {
liveKitUrl?: string; liveKitUrl?: string;
token?: string; token?: string;
codec?: string; codec?: string;
singlePC?: string;
}>; }>;
}) { }) {
const { liveKitUrl, token, codec, singlePC } = await props.searchParams; const { liveKitUrl, token, codec } = await props.searchParams;
if (typeof liveKitUrl !== 'string') { if (typeof liveKitUrl !== 'string') {
return <h2>Missing LiveKit URL</h2>; return <h2>Missing LiveKit URL</h2>;
} }
@ -23,12 +22,7 @@ export default async function CustomRoomConnection(props: {
return ( return (
<main data-lk-theme="default" style={{ height: '100%' }}> <main data-lk-theme="default" style={{ height: '100%' }}>
<VideoConferenceClientImpl <VideoConferenceClientImpl liveKitUrl={liveKitUrl} token={token} codec={codec} />
liveKitUrl={liveKitUrl}
token={token}
codec={codec}
singlePeerConnection={singlePC === 'true'}
/>
</main> </main>
); );
} }

View File

@ -44,6 +44,8 @@ function Tabs(props: React.PropsWithChildren<{}>) {
function DemoMeetingTab(props: { label: string }) { function DemoMeetingTab(props: { label: string }) {
const router = useRouter(); const router = useRouter();
const [e2ee, setE2ee] = useState(false); const [e2ee, setE2ee] = useState(false);
// TODO(dz): we need to set this to the default language of the browser
const [language, setLanguage] = useState("en")
const [sharedPassphrase, setSharedPassphrase] = useState(randomString(64)); const [sharedPassphrase, setSharedPassphrase] = useState(randomString(64));
const startMeeting = () => { const startMeeting = () => {
if (e2ee) { if (e2ee) {
@ -60,6 +62,12 @@ function DemoMeetingTab(props: { label: string }) {
</button> </button>
<div style={{ display: 'flex', flexDirection: 'column', gap: '1rem' }}> <div style={{ display: 'flex', flexDirection: 'column', gap: '1rem' }}>
<div style={{ display: 'flex', flexDirection: 'row', gap: '1rem' }}> <div style={{ display: 'flex', flexDirection: 'row', gap: '1rem' }}>
<select
id="language"
onChange={(ev) => setLanguage(ev.target.value)}
>
</select>
<input <input
id="use-e2ee" id="use-e2ee"
type="checkbox" type="checkbox"

View File

@ -6,13 +6,12 @@ import { DebugMode } from '@/lib/Debug';
import { KeyboardShortcuts } from '@/lib/KeyboardShortcuts'; import { KeyboardShortcuts } from '@/lib/KeyboardShortcuts';
import { RecordingIndicator } from '@/lib/RecordingIndicator'; import { RecordingIndicator } from '@/lib/RecordingIndicator';
import { SettingsMenu } from '@/lib/SettingsMenu'; import { SettingsMenu } from '@/lib/SettingsMenu';
import { ConnectionDetails } from '@/lib/types'; import { ConnectionDetails, LocalUserChoices } from '@/lib/types';
import { VideoConference } from './VideoConference';
import { PreJoin } from './PreJoin';
import { import {
formatChatMessageLinks, formatChatMessageLinks,
LocalUserChoices,
PreJoin,
RoomContext, RoomContext,
VideoConference,
} from '@livekit/components-react'; } from '@livekit/components-react';
import { import {
ExternalE2EEKeyProvider, ExternalE2EEKeyProvider,
@ -43,6 +42,7 @@ export function PageClientImpl(props: {
const [preJoinChoices, setPreJoinChoices] = React.useState<LocalUserChoices | undefined>( const [preJoinChoices, setPreJoinChoices] = React.useState<LocalUserChoices | undefined>(
undefined, undefined,
); );
const preJoinDefaults = React.useMemo(() => { const preJoinDefaults = React.useMemo(() => {
return { return {
username: '', username: '',
@ -50,6 +50,7 @@ export function PageClientImpl(props: {
audioEnabled: true, audioEnabled: true,
}; };
}, []); }, []);
const [connectionDetails, setConnectionDetails] = React.useState<ConnectionDetails | undefined>( const [connectionDetails, setConnectionDetails] = React.useState<ConnectionDetails | undefined>(
undefined, undefined,
); );
@ -59,6 +60,9 @@ export function PageClientImpl(props: {
const url = new URL(CONN_DETAILS_ENDPOINT, window.location.origin); const url = new URL(CONN_DETAILS_ENDPOINT, window.location.origin);
url.searchParams.append('roomName', props.roomName); url.searchParams.append('roomName', props.roomName);
url.searchParams.append('participantName', values.username); url.searchParams.append('participantName', values.username);
if (values.language) {
url.searchParams.append('language', values.language);
}
if (props.region) { if (props.region) {
url.searchParams.append('region', props.region); url.searchParams.append('region', props.region);
} }
@ -129,7 +133,6 @@ function VideoConferenceComponent(props: {
adaptiveStream: true, adaptiveStream: true,
dynacast: true, dynacast: true,
e2ee: keyProvider && worker && e2eeEnabled ? { keyProvider, worker } : undefined, e2ee: keyProvider && worker && e2eeEnabled ? { keyProvider, worker } : undefined,
singlePeerConnection: true,
}; };
}, [props.userChoices, props.options.hq, props.options.codec]); }, [props.userChoices, props.options.hq, props.options.codec]);

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,60 @@
import { getTrackReferenceId } from '@livekit/components-core';
import { Track, ParticipantKind } from 'livekit-client';
import * as React from 'react';
import { useLocalParticipant, useTracks } from '@livekit/components-react/hooks';
import { AudioTrack, TrackReference } from '@livekit/components-react';
export function RoomAudioRenderer() {
const tracks = useTracks(
[Track.Source.Microphone, Track.Source.ScreenShareAudio, Track.Source.Unknown],
{
updateOnlyOn: [],
onlySubscribed: true,
},
).filter((ref) => !ref.participant.isLocal && ref.publication.kind === Track.Kind.Audio);
const {localParticipant} = useLocalParticipant();
const currentLanguage = localParticipant?.attributes?.language;
// we don't have a language set so we don't know how to handle the multiple audio tracks
// this should not happen
if (!currentLanguage) {
return null;
}
const matchingTracks: TrackReference[] = [];
const originalTracks: TrackReference[] = [];
for (const track of tracks) {
if (track.participant.attributes?.language === currentLanguage ||
(track.participant.kind === ParticipantKind.AGENT && track.publication.trackName.endsWith(`-${currentLanguage}`))
) {
matchingTracks.push(track);
} else if (track.participant.kind !== ParticipantKind.AGENT) {
originalTracks.push(track);
}
}
return (
<div style={{ display: 'none' }}>
{matchingTracks.map((trackRef) => (
<AudioTrack
key={getTrackReferenceId(trackRef)}
trackRef={trackRef}
volume={1.0}
muted={false}
/>
))}
{originalTracks.map((trackRef) => (
<AudioTrack
key={getTrackReferenceId(trackRef)}
trackRef={trackRef}
volume={0.5}
muted={false}
/>
))}
</div>
);
}

View File

@ -0,0 +1,179 @@
import * as React from 'react';
import { useEnsureRoom, useLocalParticipant } from '@livekit/components-react';
export interface Transcript {
id: string;
text: string;
isTranslation: boolean;
participantId?: string;
timestamp: number;
complete?: boolean;
}
export interface TranscriptDisplayProps {
}
/**
* TranscriptDisplay component shows captions of what users are saying
* It displays up to two different transcripts (original and translation)
* and removes them after 5 seconds of no changes or when new transcripts arrive
*/
export function TranscriptDisplay() {
const [visibleTranscripts, setVisibleTranscripts] = React.useState<Transcript[]>([]);
const timeoutRef = React.useRef<NodeJS.Timeout | null>(null);
const transcriptsRef = React.useRef<Record<string, Transcript>>({});
const room = useEnsureRoom();
const {localParticipant} = useLocalParticipant();
const currentLanguage = localParticipant?.attributes?.language;
const updateTranscriptState = React.useCallback(() => {
const allTranscripts = Object.values(transcriptsRef.current);
// Sort by timestamp (newest first) and take the most recent 2
// One original and one translation if available
const sortedTranscripts = allTranscripts
.sort((a, b) => b.timestamp - a.timestamp);
// Find the most recent original transcript
const originalTranscript = sortedTranscripts.find(t => !t.isTranslation);
// Find the most recent translation transcript
const translationTranscript = sortedTranscripts.find(t => t.isTranslation);
// Combine them into the visible transcripts array
const newVisibleTranscripts: Transcript[] = [];
if (originalTranscript) newVisibleTranscripts.push(originalTranscript);
if (translationTranscript) newVisibleTranscripts.push(translationTranscript);
setVisibleTranscripts(newVisibleTranscripts);
// Reset the timeout
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
}
// Set timeout to clear transcripts after 5 seconds
timeoutRef.current = setTimeout(() => {
setVisibleTranscripts([]);
// Also clear the transcripts reference
transcriptsRef.current = {};
}, 5000);
}, []);
React.useEffect(() => {
if (room) {
room.registerTextStreamHandler('lk.transcription', async (reader, participantInfo) => {
const info = reader.info;
const isTranslation = info.attributes?.translated === "true";
// ignore translations for other languages
if (isTranslation && info.attributes?.language !== currentLanguage) {
return;
}
const id = info.id;
const participantId = participantInfo?.identity;
const isFinal = info.attributes?.["lk.transcription_final"] === "true";
console.log("transcript", id, isFinal);
// Create or update the transcript in our reference object
if (!transcriptsRef.current[id]) {
transcriptsRef.current[id] = {
id,
text: '',
isTranslation,
participantId,
timestamp: Date.now(),
};
}
try {
for await (const chunk of reader) {
// Update the transcript with the new chunk
if (chunk) {
const transcript = transcriptsRef.current[id];
transcript.text += chunk;
transcript.timestamp = Date.now();
transcript.complete = isFinal;
updateTranscriptState();
}
}
if (transcriptsRef.current[id]) {
transcriptsRef.current[id].complete = true;
updateTranscriptState();
}
} catch (e) {
console.error('Error processing transcript stream:', e);
}
});
return () => {
room.unregisterTextStreamHandler('lk.transcription');
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
}
};
}
}, [room, currentLanguage, updateTranscriptState]);
React.useEffect(() => {
return () => {
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
}
};
}, []);
if (!currentLanguage) {
return null;
}
if (visibleTranscripts.length === 0) {
return null;
}
return (
<div className="lk-transcript-container">
{visibleTranscripts.map((transcript) => (
<div
key={transcript.id}
className={`lk-transcript ${transcript.isTranslation ? 'lk-transcript-translation' : 'lk-transcript-original'}`}
>
{transcript.text}
</div>
))}
<style jsx>{`
.lk-transcript-container {
position: absolute;
bottom: 80px;
left: 20%;
right: 20%;
display: flex;
flex-direction: column;
align-items: center;
z-index: 10;
}
.lk-transcript {
background-color: rgba(0, 0, 0, 0.7);
color: white;
padding: 8px 16px;
margin-bottom: 8px;
border-radius: 4px;
max-width: 100%;
text-align: center;
font-size: 1rem;
line-height: 1.5;
}
.lk-transcript-translation {
font-style: italic;
background-color: rgba(0, 0, 0, 0.6);
}
`}</style>
</div>
);
}

View File

@ -0,0 +1,176 @@
import * as React from 'react';
import type {
MessageDecoder,
MessageEncoder,
TrackReferenceOrPlaceholder,
WidgetState,
} from '@livekit/components-core';
import { isEqualTrackRef, isTrackReference, isWeb, log } from '@livekit/components-core';
import { ParticipantKind, RoomEvent, Track } from 'livekit-client';
import { RoomAudioRenderer } from './RoomAudioRenderer';
import { TranscriptDisplay } from './TranscriptDisplay';
import {
CarouselLayout,
ConnectionStateToast,
FocusLayout,
FocusLayoutContainer,
GridLayout,
LayoutContextProvider,
ParticipantTile,
useCreateLayoutContext,
Chat,
ControlBar,
MessageFormatter,
} from '@livekit/components-react';
import { usePinnedTracks, useTracks } from '@livekit/components-react/hooks';
/**
* @public
*/
export interface VideoConferenceProps extends React.HTMLAttributes<HTMLDivElement> {
chatMessageFormatter?: MessageFormatter;
chatMessageEncoder?: MessageEncoder;
chatMessageDecoder?: MessageDecoder;
/** @alpha */
SettingsComponent?: React.ComponentType;
}
/**
* The `VideoConference` ready-made component is your drop-in solution for a classic video conferencing application.
* It provides functionality such as focusing on one participant, grid view with pagination to handle large numbers
* of participants, basic non-persistent chat, screen sharing, and more.
*
* @remarks
* The component is implemented with other LiveKit components like `FocusContextProvider`,
* `GridLayout`, `ControlBar`, `FocusLayoutContainer` and `FocusLayout`.
* You can use these components as a starting point for your own custom video conferencing application.
*
* @example
* ```tsx
* <LiveKitRoom>
* <VideoConference />
* <LiveKitRoom>
* ```
* @public
*/
export function VideoConference({
chatMessageFormatter,
chatMessageDecoder,
chatMessageEncoder,
SettingsComponent,
...props
}: VideoConferenceProps) {
const [widgetState, setWidgetState] = React.useState<WidgetState>({
showChat: false,
unreadMessages: 0,
showSettings: false,
});
const lastAutoFocusedScreenShareTrack = React.useRef<TrackReferenceOrPlaceholder | null>(null);
let tracks = useTracks(
[
{ source: Track.Source.Camera, withPlaceholder: true },
{ source: Track.Source.ScreenShare, withPlaceholder: false },
],
{ updateOnlyOn: [RoomEvent.ActiveSpeakersChanged], onlySubscribed: false },
);
tracks = tracks.filter((track) => track.participant.kind !== ParticipantKind.AGENT)
const widgetUpdate = (state: WidgetState) => {
log.debug('updating widget state', state);
setWidgetState(state);
};
const layoutContext = useCreateLayoutContext();
const screenShareTracks = tracks
.filter(isTrackReference)
.filter((track) => track.publication.source === Track.Source.ScreenShare);
const focusTrack = usePinnedTracks(layoutContext)?.[0];
const carouselTracks = tracks.filter((track) => !isEqualTrackRef(track, focusTrack));
React.useEffect(() => {
// If screen share tracks are published, and no pin is set explicitly, auto set the screen share.
if (
screenShareTracks.some((track) => track.publication.isSubscribed) &&
lastAutoFocusedScreenShareTrack.current === null
) {
log.debug('Auto set screen share focus:', { newScreenShareTrack: screenShareTracks[0] });
layoutContext.pin.dispatch?.({ msg: 'set_pin', trackReference: screenShareTracks[0] });
lastAutoFocusedScreenShareTrack.current = screenShareTracks[0];
} else if (
lastAutoFocusedScreenShareTrack.current &&
!screenShareTracks.some(
(track) =>
track.publication.trackSid ===
lastAutoFocusedScreenShareTrack.current?.publication?.trackSid,
)
) {
log.debug('Auto clearing screen share focus.');
layoutContext.pin.dispatch?.({ msg: 'clear_pin' });
lastAutoFocusedScreenShareTrack.current = null;
}
if (focusTrack && !isTrackReference(focusTrack)) {
const updatedFocusTrack = tracks.find(
(tr) =>
tr.participant.identity === focusTrack.participant.identity &&
tr.source === focusTrack.source,
);
if (updatedFocusTrack !== focusTrack && isTrackReference(updatedFocusTrack)) {
layoutContext.pin.dispatch?.({ msg: 'set_pin', trackReference: updatedFocusTrack });
}
}
}, [
screenShareTracks
.map((ref) => `${ref.publication.trackSid}_${ref.publication.isSubscribed}`)
.join(),
focusTrack?.publication?.trackSid,
tracks,
]);
return (
<div className="lk-video-conference" {...props}>
{isWeb() && (
<LayoutContextProvider
value={layoutContext}
onWidgetChange={widgetUpdate}
>
<RoomAudioRenderer />
<div className="lk-video-conference-inner">
{!focusTrack ? (
<div className="lk-grid-layout-wrapper">
<GridLayout tracks={tracks}>
<ParticipantTile />
</GridLayout>
</div>
) : (
<div className="lk-focus-layout-wrapper">
<FocusLayoutContainer>
<CarouselLayout tracks={carouselTracks}>
<ParticipantTile />
</CarouselLayout>
{focusTrack && <FocusLayout trackRef={focusTrack} />}
</FocusLayoutContainer>
</div>
)}
<TranscriptDisplay />
<ControlBar controls={{ chat: false, settings: !!SettingsComponent }} />
</div>
{SettingsComponent && (
<div
className="lk-settings-menu-modal"
style={{ display: widgetState.showSettings ? 'block' : 'none' }}
>
<SettingsComponent />
</div>
)}
</LayoutContextProvider>
)}
<ConnectionStateToast />
</div>
);
}

View File

@ -23,7 +23,3 @@ export function randomString(length: number): string {
export function isLowPowerDevice() { export function isLowPowerDevice() {
return navigator.hardwareConcurrency < 6; return navigator.hardwareConcurrency < 6;
} }
export function isMeetStaging() {
return new URL(location.origin).host === 'meet.staging.livekit.io';
}

View File

@ -1,5 +1,15 @@
import { LocalAudioTrack, LocalVideoTrack, videoCodecs } from 'livekit-client'; import { LocalAudioTrack, LocalVideoTrack, videoCodecs } from 'livekit-client';
import { VideoCodec } from 'livekit-client'; import { VideoCodec } from 'livekit-client';
import { LocalUserChoices as LiveKitLocalUserChoices } from '@livekit/components-core';
// Extend the LocalUserChoices type with our additional properties
export interface LocalUserChoices extends LiveKitLocalUserChoices {
/**
* The language code selected by the user.
* @defaultValue 'en'
*/
language?: string;
}
export interface SessionProps { export interface SessionProps {
roomName: string; roomName: string;

View File

@ -14,31 +14,33 @@
}, },
"dependencies": { "dependencies": {
"@datadog/browser-logs": "^5.23.3", "@datadog/browser-logs": "^5.23.3",
"@livekit/components-react": "2.9.19", "@livekit/components-core": "^0.12.9",
"@livekit/components-styles": "1.2.0", "@livekit/components-react": "2.9.13",
"@livekit/krisp-noise-filter": "0.4.1", "@livekit/components-styles": "1.1.6",
"@livekit/track-processors": "^0.7.0", "@livekit/krisp-noise-filter": "0.3.4",
"livekit-client": "2.17.2", "@livekit/protocol": "^1.39.3",
"livekit-server-sdk": "2.15.0", "@livekit/track-processors": "^0.5.4",
"next": "15.2.8", "livekit-client": "2.15.2",
"livekit-server-sdk": "2.13.1",
"next": "15.2.4",
"react": "18.3.1", "react": "18.3.1",
"react-dom": "18.3.1", "react-dom": "18.3.1",
"react-hot-toast": "^2.5.2", "react-hot-toast": "^2.5.2",
"tinykeys": "^3.0.0" "tinykeys": "^3.0.0"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "24.10.13", "@types/node": "22.15.31",
"@types/react": "18.3.27", "@types/react": "18.3.23",
"@types/react-dom": "18.3.7", "@types/react-dom": "18.3.7",
"eslint": "9.39.1", "eslint": "9.29.0",
"eslint-config-next": "15.5.6", "eslint-config-next": "15.3.3",
"prettier": "3.7.3", "prettier": "3.5.3",
"source-map-loader": "^5.0.0", "source-map-loader": "^5.0.0",
"typescript": "5.9.3", "typescript": "5.8.3",
"vitest": "^3.2.4" "vitest": "^3.2.4"
}, },
"engines": { "engines": {
"node": ">=18" "node": ">=18"
}, },
"packageManager": "pnpm@10.18.2" "packageManager": "pnpm@10.9.0"
} }

1233
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -65,3 +65,15 @@ h2 a {
h2 a { h2 a {
text-decoration: none; text-decoration: none;
} }
.lk-form-control-wrapper {
margin-top: 10px;
width: 100%;
}
.lk-form-label {
display: block;
margin-bottom: 5px;
font-size: 0.9rem;
color: #666;
}