Compare commits

...

3 Commits

65 changed files with 10937 additions and 6791 deletions

View File

@ -12,17 +12,17 @@ build-docker.ps1
build-docker.bat
start-docker.bat
run-docker.ps1
src/
public/
# src/
# public/
.yarn/cache
.eslintcache
.github
.github_build
.build
NONPUBLIC/
package.json
package-lock.json
yarn.lock
# package.json
# package-lock.json
# yarn.lock
.yarnrc.yml
.env
.env.local

View File

@ -1,3 +1,3 @@
REACT_APP_CORE_URL=https://restreamer.nextream.sytes.net
REACT_APP_YTDLP_URL=http://192.168.1.20:8282
REACT_APP_YTDLP_URL=http://100.73.244.28:8080
REACT_APP_FB_SERVER_URL=http://localhost:3002

View File

@ -1,3 +1,10 @@
{
# Deshabilitar HTTP/2 y HTTP/3 globalmente para soporte WebSocket
servers {
protocols h1 h2
}
}
:3000
encode zstd gzip
@ -5,7 +12,35 @@ encode zstd gzip
# ── Facebook OAuth2 microserver (Node.js en puerto 3002) ──────────────────────
handle /fb-server/* {
uri strip_prefix /fb-server
reverse_proxy localhost:3002
reverse_proxy 127.0.0.1:3002
}
# ── LiveKit token endpoint (Node.js en puerto 3002) ───────────────────────────
# POST /livekit/token genera AccessToken JWT firmado
# GET /livekit/config devuelve wsUrl público (sin secretos)
handle /livekit/* {
reverse_proxy 127.0.0.1:3002
}
# ── WebRTC relay WebSocket + status (Node.js en puerto 3002) ─────────────────
# 127.0.0.1 evita problema de resolución IPv6 en Alpine ("localhost" ::1)
# HTTP/1.1 necesario para WebSocket upgrade (Caddy requiere versión explícita)
handle /webrtc-relay/* {
reverse_proxy 127.0.0.1:3002 {
transport http {
versions 1.1
}
}
}
# ── WebRTC Room HTML (sala para el presentador) ───────────────────────────────
# Sirve la página estática sin fallback al index.html de la SPA
handle /webrtc-room/* {
root * /ui/build
file_server
}
handle /webrtc-room {
redir /webrtc-room/ 302
}
# ── yt-dlp stream extractor (servicio externo configurable via env) ───────────

View File

@ -10,8 +10,8 @@ RUN npm install --omit=dev --no-audit --prefer-offline
# ── Stage 2: Production image (Caddy + Node.js) ───────────────────────────────
FROM $CADDY_IMAGE
# Install Node.js to run the Facebook OAuth2 microserver
RUN apk add --no-cache nodejs
# Install Node.js and ffmpeg to run the Facebook OAuth2 microserver + relay
RUN apk add --no-cache nodejs ffmpeg
# Copy pre-built React app (built on host with: npm run build / yarn build)
COPY build /ui/build
@ -39,5 +39,12 @@ ENV CORE_ADDRESS=""
ENV YTDLP_URL=""
ENV FB_SERVER_URL=""
ENV YTDLP_HOST="192.168.1.20:8282"
ENV RTMP_HOST="127.0.0.1"
ENV RTMP_PORT="1935"
ENV RTMP_APP="live"
ENV FFMPEG_BIN="ffmpeg"
ENV LIVEKIT_API_KEY=""
ENV LIVEKIT_API_SECRET=""
ENV LIVEKIT_WS_URL=""
CMD ["/ui/docker-entrypoint.sh"]

57
Dockerfile.build Normal file
View File

@ -0,0 +1,57 @@
ARG CADDY_IMAGE=caddy:2.8.4-alpine
ARG NODE_IMAGE=node:21-alpine3.20
# ── Stage 1: Build React app ──────────────────────────────────────────────────
FROM $NODE_IMAGE AS builder
WORKDIR /app
# Install deps first (layer cache)
COPY package.json package-lock.json* yarn.lock* ./
RUN NODE_OPTIONS=--max-old-space-size=4096 npm install --legacy-peer-deps --prefer-offline 2>/dev/null || \
NODE_OPTIONS=--max-old-space-size=4096 npm install --legacy-peer-deps
# Copy source and build
COPY . .
RUN NODE_OPTIONS=--max-old-space-size=4096 npm run build
# ── Stage 2: Install server deps ──────────────────────────────────────────────
FROM $NODE_IMAGE AS server-deps
WORKDIR /srv
COPY server/package.json server/package-lock.json* ./
RUN npm install --omit=dev --no-audit --prefer-offline
# ── Stage 3: Production image (Caddy + Node.js) ───────────────────────────────
FROM $CADDY_IMAGE
# Install Node.js to run the Facebook OAuth2 microserver
RUN apk add --no-cache nodejs
# Copy built React app from builder
COPY --from=builder /app/build /ui/build
# Copy Caddy config
COPY Caddyfile /ui/Caddyfile
# Copy Node.js FB server + its deps
COPY server /ui/server
COPY --from=server-deps /srv/node_modules /ui/server/node_modules
# Copy entrypoint script
COPY docker-entrypoint.sh /ui/docker-entrypoint.sh
RUN chmod +x /ui/docker-entrypoint.sh
# Persistent volume for FB OAuth2 tokens (config.json)
VOLUME ["/data/fb"]
WORKDIR /ui
EXPOSE 3000
# Runtime environment variables (overridden at runtime via -e or docker-compose)
ENV CORE_ADDRESS=""
ENV YTDLP_URL=""
ENV FB_SERVER_URL=""
ENV YTDLP_HOST="192.168.1.20:8282"
CMD ["/ui/docker-entrypoint.sh"]

View File

@ -0,0 +1,20 @@
node_modules/
server/node_modules/
docker-build.log
yarn-build.log
build-docker.ps1
build-docker.bat
start-docker.bat
run-docker.ps1
.yarn/cache
.eslintcache
.github
.github_build
.build
NONPUBLIC/
.env
.env.local
.prettierignore
.prettierrc
.eslintignore

View File

@ -4,7 +4,7 @@ services:
# Luego construye la imagen con: docker build --tag restreamer-ui-v2:latest .
# O usa el script: build-docker.bat
image: restreamer-ui-v2:latest
container_name: restreamer-ui-test
container_name: restreamer-ui
restart: unless-stopped
ports:
- "3000:3000"
@ -17,8 +17,8 @@ services:
# ── yt-dlp / stream extractor ──────────────────────────────────────────
# Host:puerto del servicio extractor (usado por Caddy para reverse_proxy).
# Caddy expondrá el servicio en http://localhost:3000/yt-stream/
YTDLP_HOST: "192.168.1.20:8282"
YTDLP_HOST: "100.73.244.28:8080"
#YTDLP_HOST: "192.168.1.20:8282"
# YTDLP_URL: URL completa del servicio yt-dlp vista desde el NAVEGADOR.
# Dejar vacío → la UI usará /yt-stream/ (Caddy proxy, mismo origen = sin CORS).
YTDLP_URL: ""
@ -38,9 +38,25 @@ services:
# Clave de cifrado para tokens almacenados (cámbiala en producción)
FB_ENCRYPTION_SECRET: "restreamer-ui-fb-secret-key-32x!"
# ── WebRTC Relay → FFmpeg → RTMP ──────────────────────────────────────
# Host del Restreamer Core para el relay RTMP (en Docker = nombre del servicio)
# Si la UI corre junto al Core en la misma red Docker, usar el service name.
# Si corre independiente, apuntar a la IP/hostname del Core.
RTMP_HOST: "restreamer.nextream.sytes.net"
RTMP_PORT: "1935"
RTMP_APP: "live"
# ── LiveKit ────────────────────────────────────────────────────────────
# Credenciales del servidor LiveKit (self-hosted o LiveKit Cloud)
LIVEKIT_API_KEY: "APIBTqTGxf9htMK"
LIVEKIT_API_SECRET: "0dOHWPffwneaPg7OYpe4PeAes21zLJfeYJB9cKzSTtXW"
LIVEKIT_WS_URL: "wss://livekit-server.nextream.sytes.net"
volumes:
# Persistencia de tokens OAuth2 (Facebook, YouTube, etc.)
- restreamer-ui-fb-data:/data/fb
# devices:
# - "/dev/video1:/dev/video1" # Descomentar si hay cámara USB disponible
volumes:
restreamer-ui-fb-data:

View File

@ -27,11 +27,18 @@ export YTDLP_HOST="${YTDLP_HOST:-192.168.1.20:8282}"
mkdir -p /data/fb
export FB_DATA_DIR="${FB_DATA_DIR:-/data/fb}"
# ── Start Facebook OAuth2 microserver in background ──────────────────────────
echo "[entrypoint] Starting Facebook OAuth2 server on :3002 ..."
# ── Start Facebook OAuth2 microserver + WebRTC relay in background ────────────
echo "[entrypoint] Starting FB OAuth2 + WebRTC relay server on :3002 ..."
FB_SERVER_PORT=3002 \
FB_DATA_DIR="$FB_DATA_DIR" \
FB_ENCRYPTION_SECRET="${FB_ENCRYPTION_SECRET:-restreamer-ui-fb-secret-key-32x!}" \
RTMP_HOST="${RTMP_HOST:-127.0.0.1}" \
RTMP_PORT="${RTMP_PORT:-1935}" \
RTMP_APP="${RTMP_APP:-live}" \
FFMPEG_BIN="${FFMPEG_BIN:-ffmpeg}" \
LIVEKIT_API_KEY="${LIVEKIT_API_KEY:-}" \
LIVEKIT_API_SECRET="${LIVEKIT_API_SECRET:-}" \
LIVEKIT_WS_URL="${LIVEKIT_WS_URL:-}" \
node /ui/server/index.js &
FB_PID=$!
echo "[entrypoint] FB server PID: $FB_PID"

View File

@ -10,7 +10,7 @@
"@babel/plugin-transform-react-jsx": "^7.25.2",
"@emotion/react": "^11.13.3",
"@emotion/styled": "^11.13.0",
"@fontsource/dosis": "^5.0.21",
"@fontsource/dosis": "^5.2.8",
"@fontsource/roboto": "^5.0.14",
"@fortawesome/fontawesome-svg-core": "^6.6.0",
"@fortawesome/free-brands-svg-icons": "^6.6.0",

10
public/force-fonts.css Normal file
View File

@ -0,0 +1,10 @@
html, body, #root, * {
font-family: "Roboto", "Dosis", "Helvetica", "Arial", sans-serif !important;
-webkit-font-smoothing: antialiased !important;
-moz-osx-font-smoothing: grayscale !important;
}
/* Ensure buttons and headings also use the same font */
button, input, select, textarea, h1, h2, h3, h4, h5, h6 {
font-family: "Roboto", "Dosis", "Helvetica", "Arial", sans-serif !important;
}

View File

@ -9,6 +9,11 @@
<link rel="apple-touch-icon" href="logo192.png" />
<link rel="manifest" href="manifest.json" />
<title>Restreamer</title>
<!-- Load fonts reliably -->
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Dosis:wght@300;400;500;700&family=Roboto:wght@300;400;500;700&display=swap" rel="stylesheet">
<link href="/force-fonts.css" rel="stylesheet">
<script src="config.js"></script>
</head>
<body>

View File

@ -0,0 +1,551 @@
<!DOCTYPE html>
<html lang="es">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>WebRTC Room — Restreamer</title>
<script src="https://cdn.jsdelivr.net/npm/livekit-client@1.15.9/dist/livekit-client.umd.min.js"></script>
<style>
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
:root {
--bg: #0f0f14; --surface: #1a1a24; --border: #2a2a3a;
--accent: #4f8ef7; --red: #e74c3c; --green: #2ecc71; --orange: #f39c12;
--text: #e0e0ee; --muted: #6e6e8a; --radius: 10px;
}
body { background: var(--bg); color: var(--text); font-family: 'Segoe UI', system-ui, sans-serif;
min-height: 100vh; display: flex; flex-direction: column; align-items: center; padding: 24px 16px; }
h1 { font-size: 1.4rem; font-weight: 700; margin-bottom: 4px; }
.subtitle { color: var(--muted); font-size: 0.85rem; margin-bottom: 20px; }
.card { background: var(--surface); border: 1px solid var(--border); border-radius: var(--radius);
padding: 20px; width: 100%; max-width: 720px; margin-bottom: 14px; }
.card h2 { font-size: .95rem; font-weight: 600; margin-bottom: 12px; color: var(--accent); }
#status-bar { display: flex; align-items: center; gap: 10px; padding: 10px 16px;
border-radius: var(--radius); border: 1px solid var(--border); background: var(--surface);
width: 100%; max-width: 720px; margin-bottom: 14px; font-size: 0.85rem; flex-wrap: wrap; }
.dot { width: 10px; height: 10px; border-radius: 50%; background: var(--muted); flex-shrink: 0; transition: background .3s; }
.dot.connecting { background: var(--orange); animation: pulse 1s infinite; }
.dot.live { background: var(--green); animation: pulse .8s infinite; }
.dot.error { background: var(--red); }
@keyframes pulse { 0%,100%{opacity:1} 50%{opacity:.3} }
.badge { padding: 2px 10px; border-radius: 20px; font-size: .7rem; font-weight: 700; }
#badge-lk { display:none; background: var(--accent); color:#fff; }
#badge-rtmp { display:none; background: var(--red); color:#fff; }
#badge-live { display:none; background: var(--red); color:#fff; }
#timer { color: var(--muted); font-size: .8rem; margin-left: auto; }
video { width: 100%; border-radius: 8px; background: #000; max-height: 280px; object-fit: contain; display: block; }
.btn-row { display: flex; gap: 8px; flex-wrap: wrap; margin-top: 12px; }
button { padding: 8px 16px; border-radius: 8px; border: none; font-size: .82rem;
font-weight: 600; cursor: pointer; transition: opacity .15s; }
button:disabled { opacity: .4; cursor: default; }
.btn-sec { background: var(--surface); color: var(--text); border: 1px solid var(--border); }
.btn-go { background: var(--green); color: #fff; }
.btn-stop { background: var(--red); color: #fff; }
button:hover:not(:disabled) { opacity: .85; }
select { background: var(--bg); color: var(--text); border: 1px solid var(--border);
border-radius: 6px; padding: 7px 10px; font-size: .82rem; width: 100%; }
.row2 { display: grid; grid-template-columns: 1fr 1fr; gap: 10px; }
label { font-size: .76rem; color: var(--muted); display: block; margin-bottom: 4px; }
.channel-row { display: flex; align-items: center; gap: 8px; margin-top: 6px; }
.ch-dot { width: 8px; height: 8px; border-radius: 50%; background: var(--muted); flex-shrink: 0; }
.ch-dot.ok { background: var(--green); }
.ch-dot.err { background: var(--red); }
.ch-dot.run { background: var(--green); animation: pulse .8s infinite; }
.ch-dot.con { background: var(--orange); animation: pulse 1s infinite; }
#log { height: 150px; overflow-y: auto; background: #0a0a0f; border-radius: 6px;
padding: 10px; font-family: monospace; font-size: .74rem; line-height: 1.6; }
.line { color: var(--muted); }
.line.ok { color: var(--green); }
.line.err { color: var(--red); }
.line.warn { color: var(--orange); }
</style>
</head>
<body>
<h1>🎥 WebRTC Room</h1>
<p class="subtitle" id="room-label">Sala: —</p>
<div id="status-bar">
<div class="dot" id="dot"></div>
<span id="status-text">Esperando...</span>
<span class="badge" id="badge-live">● EN VIVO</span>
<span class="badge" id="badge-lk">LiveKit ✓</span>
<span class="badge" id="badge-rtmp">RTMP ✓</span>
<span id="timer"></span>
</div>
<div class="card">
<h2>📹 Vista previa local</h2>
<video id="preview" autoplay muted playsinline></video>
<div class="btn-row">
<button id="btn-screen" class="btn-sec" onclick="captureScreen()">🖥 Pantalla</button>
<button id="btn-camera" class="btn-sec" onclick="captureCamera()">📷 Cámara</button>
<button id="btn-mute" class="btn-sec" onclick="toggleMute()" disabled>🔇 Silenciar</button>
<button id="btn-stop-src" class="btn-sec" onclick="stopCapture()" disabled>⏹ Detener fuente</button>
</div>
</div>
<div class="card" id="settings-card">
<h2>⚙️ Ajustes</h2>
<div class="row2">
<div>
<label>FPS</label>
<select id="sel-fps">
<option value="15">15 fps</option>
<option value="24">24 fps</option>
<option value="30" selected>30 fps</option>
<option value="60">60 fps</option>
</select>
</div>
<div>
<label>Calidad de vídeo</label>
<select id="sel-quality">
<option value="500000">Baja (500 kbps)</option>
<option value="1500000" selected>Media (1.5 Mbps)</option>
<option value="3000000">Alta (3 Mbps)</option>
<option value="6000000">Ultra (6 Mbps)</option>
</select>
</div>
</div>
<div style="margin-top:14px">
<label style="margin-bottom:6px;display:block;font-size:.8rem">Canales de transmisión</label>
<div class="channel-row">
<div class="ch-dot" id="dot-lk"></div>
<span style="font-size:.78rem;color:var(--muted)">LiveKit — señalización WebRTC</span>
</div>
<div class="channel-row">
<div class="ch-dot" id="dot-rtmp"></div>
<span style="font-size:.78rem;color:var(--muted)">RTMP relay → Restreamer Core (preview + distribución)</span>
</div>
</div>
<div class="btn-row" style="margin-top:14px">
<button id="btn-go" class="btn-go" onclick="toggleStream()" disabled>🚀 Iniciar transmisión</button>
</div>
</div>
<div class="card">
<h2>📋 Registro</h2>
<div id="log"></div>
</div>
<script>
// ─── Parámetros ───────────────────────────────────────────────────────────────
const params = new URLSearchParams(location.search);
const ROOM_ID = params.get('room') || params.get('channel') || 'default';
const TOKEN_URL = '/livekit/token';
const WS_PROTO = location.protocol === 'https:' ? 'wss://' : 'ws://';
const RELAY_URL = WS_PROTO + location.host + '/webrtc-relay/' + encodeURIComponent(ROOM_ID);
document.getElementById('room-label').textContent = 'Sala: ' + ROOM_ID + ' • ' + location.hostname;
// ─── Estado ───────────────────────────────────────────────────────────────────
let localStream = null;
let room = null;
let relayWs = null;
let mediaRec = null;
let streaming = false;
let muted = false;
let startTime = 0;
let timerHandle = null;
let coreProcId = null; // id del proceso creado en el Core (si corresponde)
const LK = window.LivekitClient;
// ─── Helpers ──────────────────────────────────────────────────────────────────
function log(msg, cls = '') {
const el = document.getElementById('log');
const d = document.createElement('div');
d.className = 'line ' + cls;
d.textContent = '[' + new Date().toLocaleTimeString() + '] ' + msg;
el.appendChild(d); el.scrollTop = el.scrollHeight;
}
function setStatus(txt, st = '') {
document.getElementById('status-text').textContent = txt;
document.getElementById('dot').className = 'dot ' + st;
}
function setCh(id, st) { document.getElementById(id).className = 'ch-dot ' + st; }
function badge(id, show) { document.getElementById(id).style.display = show ? '' : 'none'; }
function startTimer() {
startTime = Date.now();
timerHandle = setInterval(() => {
const s = Math.floor((Date.now() - startTime) / 1000);
document.getElementById('timer').textContent =
pad(Math.floor(s/3600)) + ':' + pad(Math.floor((s%3600)/60)) + ':' + pad(s%60);
}, 1000);
}
function stopTimer() { clearInterval(timerHandle); document.getElementById('timer').textContent = ''; }
function pad(n) { return String(n).padStart(2,'0'); }
// ─── Captura ──────────────────────────────────────────────────────────────────
async function captureScreen() {
try {
const fps = +document.getElementById('sel-fps').value;
const s = await navigator.mediaDevices.getDisplayMedia({ video: { frameRate: fps }, audio: true });
if (!s.getAudioTracks().length) {
try { const m = await navigator.mediaDevices.getUserMedia({ audio: true });
s.addTrack(m.getAudioTracks()[0]); } catch(_){}
}
setLocalStream(s, 'pantalla');
} catch(e) { log('Error pantalla: ' + e.message, 'err'); }
}
async function captureCamera() {
try {
const fps = +document.getElementById('sel-fps').value;
const s = await navigator.mediaDevices.getUserMedia({
video: { frameRate: fps, width: { ideal: 1280 }, height: { ideal: 720 } }, audio: true });
setLocalStream(s, 'cámara');
} catch(e) { log('Error cámara: ' + e.message, 'err'); }
}
function setLocalStream(stream, type) {
if (localStream) localStream.getTracks().forEach(t => t.stop());
localStream = stream;
document.getElementById('preview').srcObject = stream;
stream.getVideoTracks().forEach(t => {
t.onended = () => { log('Captura finalizada', 'warn'); stopStream(); };
});
document.getElementById('btn-stop-src').disabled = false;
document.getElementById('btn-mute').disabled = false;
document.getElementById('btn-go').disabled = false;
document.getElementById('btn-screen').disabled = (type === 'pantalla');
document.getElementById('btn-camera').disabled = (type === 'cámara');
log('Captura ' + type + ' (' + stream.getVideoTracks().length + 'V+' + stream.getAudioTracks().length + 'A) ✓', 'ok');
setStatus('Fuente lista — pulsa Iniciar transmisión');
}
function toggleMute() {
muted = !muted;
if (localStream) localStream.getAudioTracks().forEach(t => { t.enabled = !muted; });
if (room) room.localParticipant.audioTracks.forEach(p => { muted ? p.mute() : p.unmute(); });
document.getElementById('btn-mute').textContent = muted ? '🔊 Activar audio' : '🔇 Silenciar';
log(muted ? 'Audio silenciado' : 'Audio activado');
}
function stopCapture() {
stopStream();
if (localStream) { localStream.getTracks().forEach(t => t.stop()); localStream = null; }
document.getElementById('preview').srcObject = null;
document.getElementById('btn-stop-src').disabled = true;
document.getElementById('btn-mute').disabled = true;
document.getElementById('btn-go').disabled = true;
document.getElementById('btn-screen').disabled = false;
document.getElementById('btn-camera').disabled = false;
setStatus('Esperando...'); log('Captura detenida');
}
// ─── Canal 1: LiveKit ─────────────────────────────────────────────────────────
async function startLiveKit(videoBitrate, fps) {
setCh('dot-lk', 'con');
log('LiveKit: solicitando token...');
let token, wsUrl;
try {
const r = await fetch(TOKEN_URL, {
method: 'POST', headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ roomName: ROOM_ID, participantName: 'presenter-' + Date.now(),
canPublish: true, canSubscribe: false }),
});
if (!r.ok) throw new Error('HTTP ' + r.status);
const d = await r.json();
token = d.token; wsUrl = d.wsUrl;
log('LiveKit: token OK (' + wsUrl + ')', 'ok');
} catch(e) { log('LiveKit: token error — ' + e.message, 'err'); setCh('dot-lk','err'); return false; }
room = new LK.Room({ adaptiveStream: false, dynacast: false, stopLocalTrackOnUnpublish: true });
room.on(LK.RoomEvent.Disconnected, () => {
log('LiveKit: desconectado', 'warn'); setCh('dot-lk','err'); badge('badge-lk', false);
});
try {
await room.connect(wsUrl, token, { autoSubscribe: false });
log('LiveKit: sala "' + room.name + '" conectada ✓', 'ok');
} catch(e) { log('LiveKit: conexión fallida — ' + e.message, 'err'); setCh('dot-lk','err'); room = null; return false; }
try {
const vt = localStream.getVideoTracks()[0];
const at = localStream.getAudioTracks()[0];
if (vt) {
const lkVid = new LK.LocalVideoTrack(vt, undefined, true);
await room.localParticipant.publishTrack(lkVid, {
videoEncoding: { maxBitrate: videoBitrate, maxFramerate: fps },
simulcast: false, source: LK.Track.Source.ScreenShare,
});
log('LiveKit: vídeo publicado ✓', 'ok');
}
if (at) {
const lkAud = new LK.LocalAudioTrack(at, undefined, true);
await room.localParticipant.publishTrack(lkAud, { source: LK.Track.Source.Microphone });
log('LiveKit: audio publicado ✓', 'ok');
}
} catch(e) { log('LiveKit: publish error — ' + e.message, 'warn'); }
setCh('dot-lk', 'run'); badge('badge-lk', true);
return true;
}
// ─── Canal 2: WebSocket Relay → FFmpeg → RTMP → Restreamer Core ──────────────
function getSupportedMimeType() {
const types = ['video/webm;codecs=vp8,opus','video/webm;codecs=vp9,opus','video/webm;codecs=h264,opus','video/webm'];
for (const t of types) { if (MediaRecorder.isTypeSupported(t)) return t; }
return 'video/webm';
}
function startRtmpRelay(videoBitrate) {
return new Promise((resolve) => {
setCh('dot-rtmp', 'con');
log('RTMP relay: conectando → ' + RELAY_URL);
let resolved = false;
const done = (ok) => { if (!resolved) { resolved = true; resolve(ok); } };
try { relayWs = new WebSocket(RELAY_URL); } catch(e) {
log('RTMP relay: WS create error — ' + e.message, 'err');
setCh('dot-rtmp','err'); done(false); return;
}
relayWs.binaryType = 'arraybuffer';
relayWs.onopen = () => {
log('RTMP relay: WS conectado ✓', 'ok');
relayWs.send(JSON.stringify({
type: 'config', room: ROOM_ID,
videoBitrate, audioBitrate: 128000,
mimeType: getSupportedMimeType(),
}));
};
relayWs.onmessage = (ev) => {
try {
const msg = JSON.parse(ev.data);
if (msg.type === 'ready') {
log('RTMP relay: FFmpeg → RTMP listo ✓', 'ok');
setCh('dot-rtmp','run'); badge('badge-rtmp', true);
startMediaRecorder(videoBitrate);
done(true);
try { window.parent.postMessage({ type: 'webrtc-relay', event: 'relay-ready', room: ROOM_ID }, '*'); } catch(_){}
} else if (msg.type === 'info') {
log('RTMP relay: ' + msg.message);
try { window.parent.postMessage({ type: 'webrtc-relay', event: 'relay-info', message: msg.message }, '*'); } catch(_){}
}
} catch(_){}
};
relayWs.onerror = () => { log('RTMP relay: error WS', 'err'); setCh('dot-rtmp','err'); done(false); };
relayWs.onclose = () => {
if (streaming) log('RTMP relay: desconectado inesperadamente', 'warn');
setCh('dot-rtmp','err'); badge('badge-rtmp', false);
stopMediaRecorder();
};
setTimeout(() => { if (!resolved) { log('RTMP relay: timeout (8s)', 'warn'); setCh('dot-rtmp','err'); done(false); } }, 8000);
});
}
function startMediaRecorder(videoBitrate) {
if (!localStream || !relayWs || relayWs.readyState !== WebSocket.OPEN) return;
const mimeType = getSupportedMimeType();
try {
mediaRec = new MediaRecorder(localStream, { mimeType, videoBitsPerSecond: videoBitrate });
mediaRec.ondataavailable = (e) => {
if (e.data && e.data.size > 0 && relayWs && relayWs.readyState === WebSocket.OPEN)
relayWs.send(e.data);
};
mediaRec.onerror = (e) => log('MediaRecorder: ' + e.error, 'err');
mediaRec.start(500);
log('MediaRecorder activo (' + mimeType + ', 500ms chunks) ✓', 'ok');
} catch(e) { log('MediaRecorder error: ' + e.message, 'err'); }
}
function stopMediaRecorder() {
if (mediaRec && mediaRec.state !== 'inactive') { try { mediaRec.stop(); } catch(_){} }
mediaRec = null;
}
// --- Helpers: crear/registrar relay y crear proceso en Core ---
async function registerRelayOnServer(roomName, preferredStreamName) {
try {
const resp = await fetch('/livekit/relay/start', {
method: 'POST', headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ roomName: roomName, streamName: preferredStreamName })
});
if (!resp.ok) {
const txt = await resp.text();
log('Relay register failed: HTTP ' + resp.status + ' ' + txt, 'err');
return null;
}
const data = await resp.json();
log('Relay registrado: ' + (data.rtmpUrl || 'unknown'), 'ok');
// notify parent
try { window.parent.postMessage({ type: 'webrtc-relay', event: 'relay-registered', data }, '*'); } catch(_){}
return data; // { rtmpUrl, streamName, roomName }
} catch (e) {
log('Relay register error: ' + e.message, 'err');
try { window.parent.postMessage({ type: 'webrtc-relay', event: 'relay-register-error', error: e.message }, '*'); } catch(_){}
return null;
}
}
async function createCoreProcessForRelay(roomName, streamName, options = {}) {
const procId = `webrtc-relay:egress:${roomName}`;
const inputAddress = `{rtmp,name=${streamName}.stream}`;
const outputAddress = `{memfs}/${roomName}.m3u8`;
const config = {
type: 'ffmpeg',
id: procId,
reference: roomName,
input: [ { id: 'input_0', address: inputAddress, options: ['-re'] } ],
output: [ { id: 'output_0', address: outputAddress, options: [] } ],
options: ['-loglevel','level+info','-err_detect','ignore_err'],
autostart: true,
reconnect: true,
reconnect_delay_seconds: 3,
stale_timeout_seconds: 10,
limits: {
cpu_usage: options.cpu_usage || 80,
memory_mbytes: options.memory_mbytes || 512,
waitfor_seconds: options.waitfor_seconds || 10
}
};
try {
const resp = await fetch('/v3/process', {
method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(config)
});
if (!resp.ok) {
const txt = await resp.text();
log('Core process create failed: HTTP ' + resp.status + ' ' + txt, 'err');
try { window.parent.postMessage({ type: 'webrtc-relay', event: 'core-process-error', status: resp.status, text: txt }, '*'); } catch(_){}
return null;
}
const created = await resp.json();
coreProcId = procId;
log('Proceso Core creado: ' + procId, 'ok');
try { window.parent.postMessage({ type: 'webrtc-relay', event: 'core-process-created', procId, created }, '*'); } catch(_){}
return { id: procId, created };
} catch (e) {
log('Create process error: ' + e.message, 'err');
try { window.parent.postMessage({ type: 'webrtc-relay', event: 'core-process-error', error: e.message }, '*'); } catch(_){}
return null;
}
}
async function stopCoreProcess(procId) {
if (!procId) return;
try {
await fetch(`/v3/process/${encodeURIComponent(procId)}/command`, {
method: 'PUT', headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ command: 'stop' })
}).catch(()=>{});
await fetch(`/v3/process/${encodeURIComponent(procId)}`, { method: 'DELETE' }).catch(()=>{});
log('Proceso Core detenido/eliminado: ' + procId, 'ok');
try { window.parent.postMessage({ type: 'webrtc-relay', event: 'core-process-stopped', procId }, '*'); } catch(_){}
} catch (e) {
log('Error stopping core process: ' + e.message, 'err');
}
}
async function prepareRelayAndProcess(roomName) {
const reg = await registerRelayOnServer(roomName, roomName);
if (!reg) return null;
const streamName = reg.streamName || roomName;
const proc = await createCoreProcessForRelay(roomName, streamName, { memory_mbytes: 512, cpu_usage: 80 });
return { rtmpUrl: reg.rtmpUrl, streamName, procId: proc ? proc.id : null };
}
// --- End helpers ---
// Modificar startRtmpRelay para enviar postMessage cuando reciba ready/info
// (la función startRtmpRelay ya manda logs; añadimos postMessage en los branches relevantes)
// Ajustar handler onmessage en startRtmpRelay: cuando reciba ready o info enviar parent postMessage
// (insert into relayWs.onmessage in existing function)
// Para asegurarnos de no duplicar mucho, reemplazamos la onmessage interna con una versión que notifica:
// (find relayWs.onmessage above and it will still work; add postMessage calls where appropriate)
// --- Integración en startStream: preparar relay + crear proceso antes de abrir WS ---
async function startStream() {
if (!localStream) { log('No hay fuente activa', 'err'); return; }
const videoBitrate = +document.getElementById('sel-quality').value;
const fps = +document.getElementById('sel-fps').value;
document.getElementById('btn-go').disabled = true;
setStatus('Iniciando canales...', 'connecting');
// Preparar relay / proceso en Core
const preparation = await prepareRelayAndProcess(ROOM_ID);
if (!preparation) {
log('Advertencia: preparación del relay/proceso falló; se intentará continuar', 'warn');
} else {
log('Preparación relay/process OK: ' + (preparation.rtmpUrl || ''), 'ok');
}
// Empezar LiveKit y RTMP relay (la función startRtmpRelay seguirña enviando ready que lanzará MediaRecorder)
const [lkOk, rtmpOk] = await Promise.all([
startLiveKit(videoBitrate, fps),
startRtmpRelay(videoBitrate),
]);
if (!lkOk && !rtmpOk) {
log('⚠️ Ningún canal disponible — verifica la configuración', 'err');
setStatus('Error de conexión', 'error');
document.getElementById('btn-go').disabled = false;
return;
}
streaming = true;
startTimer();
document.getElementById('btn-go').textContent = '⏹ Detener';
document.getElementById('btn-go').className = 'btn-stop';
document.getElementById('btn-go').disabled = false;
document.getElementById('settings-card').style.opacity = '.6';
document.getElementById('settings-card').style.pointerEvents= 'none';
badge('badge-live', true);
const ch = [lkOk && 'LiveKit', rtmpOk && 'RTMP→Core'].filter(Boolean).join(' + ');
setStatus('🔴 EN VIVO — ' + ch, 'live');
log('🚀 Transmisión activa: ' + ch, 'ok');
try { window.parent.postMessage({ type: 'webrtc-relay', event: 'stream-started', room: ROOM_ID, channels: { lk: lkOk, rtmp: rtmpOk } }, '*'); } catch(_){}
if (!rtmpOk) log('⚠️ Sin RTMP relay — el Core NO recibirá la señal. Revisa el servidor Node.', 'warn');
if (!lkOk) log('⚠️ Sin LiveKit — solo relay RTMP activo.', 'warn');
}
// Modificar stopStream para limpiar proc y notificar parent
async function stopStream() {
streaming = false;
stopTimer();
stopMediaRecorder();
if (room) { try { await room.disconnect(); } catch(_){} room = null; }
if (relayWs) { try { relayWs.close(); } catch(_){} relayWs = null; }
// Stop relay on server
try {
await fetch('/livekit/relay/stop', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ roomName: ROOM_ID }) });
} catch(_){}
// Stop and delete core process if we created one
if (coreProcId) {
await stopCoreProcess(coreProcId);
coreProcId = null;
}
setCh('dot-lk',''); setCh('dot-rtmp','');
badge('badge-lk',false); badge('badge-rtmp',false); badge('badge-live',false);
document.getElementById('btn-go').textContent = '🚀 Iniciar transmisión';
document.getElementById('btn-go').className = 'btn-go';
document.getElementById('btn-go').disabled = !localStream;
document.getElementById('settings-card').style.opacity = '';
document.getElementById('settings-card').style.pointerEvents= '';
setStatus('Transmisión detenida'); log('Transmisión detenida');
try { window.parent.postMessage({ type: 'webrtc-relay', event: 'stream-stopped', room: ROOM_ID }, '*'); } catch(_){}
}
// ─── Init ────────────────────────────────────────────────────────────────────
log('WebRTC Room inicializado');
log('Sala: ' + ROOM_ID);
log('LiveKit URL: ' + TOKEN_URL);
log('RTMP relay: ' + RELAY_URL);
if (!LK || !LK.Room) log('⚠️ LiveKit SDK no disponible', 'warn');
else log('LiveKit SDK v1.x cargado ✓', 'ok');
window.addEventListener('beforeunload', () => {
if (room) try { room.disconnect(); } catch(_){}
if (relayWs) try { relayWs.close(); } catch(_){}
});
</script>
</body>
</html>

View File

@ -7,6 +7,10 @@ const fs = require('fs');
const https = require('https');
const http = require('http');
const crypto = require('crypto');
const { spawn } = require('child_process');
const WebSocket = require('ws');
const { WebSocketServer } = require('ws');
const { AccessToken, RoomServiceClient } = require('livekit-server-sdk');
const PORT = parseInt(process.env.FB_SERVER_PORT || '3002', 10);
const DATA_DIR = process.env.FB_DATA_DIR
@ -16,6 +20,18 @@ const CFG_PATH = path.join(DATA_DIR, 'config.json');
const ENCRYPTION_SECRET = process.env.FB_ENCRYPTION_SECRET || 'restreamer-ui-fb-secret-key-32x!';
// ── LiveKit config ─────────────────────────────────────────────────────────────
const LK_API_KEY = process.env.LIVEKIT_API_KEY || '';
const LK_API_SECRET = process.env.LIVEKIT_API_SECRET || '';
const LK_WS_URL = process.env.LIVEKIT_WS_URL || '';
// HTTP URL para RoomServiceClient (wss:// → https://)
const LK_HTTP_URL = LK_WS_URL.replace(/^wss:\/\//, 'https://').replace(/^ws:\/\//, 'http://');
// ── RTMP relay config (relay → Restreamer Core) ───────────────────────────────
const RTMP_HOST = process.env.RTMP_HOST || '127.0.0.1';
const RTMP_PORT = process.env.RTMP_PORT || '1935';
const RTMP_APP = process.env.RTMP_APP || 'live';
// ─────────────────────────────────────────────────────────────────────────────
// Schema unificado de config.json
// ─────────────────────────────────────────────────────────────────────────────
@ -328,6 +344,132 @@ app.get('/health', (_, res) => {
res.json({ ok: true, config: CFG_PATH, port: PORT, ts: new Date().toISOString() });
});
// ═════════════════════════════════════════════════════════════════════════════
// LIVEKIT
// ═════════════════════════════════════════════════════════════════════════════
/**
* GET /livekit/config
* Devuelve la wsUrl pública del servidor LiveKit (sin secretos).
*/
app.get('/livekit/config', (_, res) => {
if (!LK_WS_URL) return res.status(503).json({ error: 'LiveKit not configured' });
res.json({ wsUrl: LK_WS_URL });
});
/**
* POST /livekit/token
* Body: { roomName, participantName?, canPublish?, canSubscribe? }
* Genera un AccessToken JWT firmado para que el browser se conecte a LiveKit.
*/
app.post('/livekit/token', async (req, res) => {
if (!LK_API_KEY || !LK_API_SECRET) {
return res.status(503).json({ error: 'LiveKit API credentials not configured' });
}
const { roomName, participantName, canPublish = true, canSubscribe = false } = req.body || {};
if (!roomName) return res.status(400).json({ error: 'roomName is required' });
const identity = participantName || ('presenter-' + Date.now());
try {
const at = new AccessToken(LK_API_KEY, LK_API_SECRET, { identity, ttl: '4h' });
at.addGrant({ roomJoin: true, room: roomName, canPublish, canSubscribe, canPublishData: true });
const token = await at.toJwt();
res.json({ token, identity, wsUrl: LK_WS_URL });
} catch (err) {
console.error('[livekit] token error:', err.message);
res.status(500).json({ error: 'Failed to generate token' });
}
});
// ── Relay sessions: roomName → { ffmpeg, rtmpUrl, streamName } ────────────────
const lkRelaySessions = new Map();
/**
* POST /livekit/relay/start
* Body: { roomName: string, streamName?: string }
*
* Inicia un proceso FFmpeg que:
* 1. Escucha en un puerto RTMP local temporal (127.0.0.1:19350+)
* 2. Lee el stream y lo reenvía al Core de Restreamer via RTMP:
* rtmp://RTMP_HOST:RTMP_PORT/RTMP_APP/<streamName>.stream
*
* El browser (webrtc-room) publica en LiveKit.
* Para hacer el bridge LiveKit RTMP, el webrtc-room también
* llama a este endpoint para que el operador sepa que el relay está activo.
*
* NOTA: El Core Restreamer escucha el input como {rtmp,name=<channelid>.stream}
* que es equivalente a recibir RTMP push en rtmp://localhost:1935/live/<channelid>.stream
*/
app.post('/livekit/relay/start', async (req, res) => {
if (!LK_API_KEY || !LK_API_SECRET) {
return res.status(503).json({ error: 'LiveKit not configured' });
}
const { roomName, streamName } = req.body || {};
if (!roomName) return res.status(400).json({ error: 'roomName is required' });
// Stop existing relay for this room
if (lkRelaySessions.has(roomName)) {
const old = lkRelaySessions.get(roomName);
try { old.ffmpeg.kill('SIGTERM'); } catch(_) {}
lkRelaySessions.delete(roomName);
await new Promise(r => setTimeout(r, 500));
}
// Stream name for RTMP push to Core: <channelid>.stream
const sName = (streamName || roomName).replace(/\.stream$/, '') + '.stream';
const rtmpUrl = `rtmp://${RTMP_HOST}:${RTMP_PORT}/${RTMP_APP}/${sName}`;
// Verify LiveKit room exists via RoomServiceClient
try {
const rsc = new RoomServiceClient(LK_HTTP_URL, LK_API_KEY, LK_API_SECRET);
const rooms = await rsc.listRooms([roomName]);
if (!rooms || rooms.length === 0) {
return res.status(404).json({ error: `LiveKit room "${roomName}" not found or empty` });
}
} catch (err) {
console.warn('[livekit-relay] RoomService check failed:', err.message, '(continuing anyway)');
}
// Build RTMP pull URL for FFmpeg from LiveKit via its built-in RTMP ingress
// Since LiveKit doesn't expose direct RTMP pull, we use the token + wsUrl
// and instruct the webrtc-room page to also push via MediaRecorder → FFmpeg stdin
// The simplest and most reliable approach: confirm relay intent and return RTMP push URL
// The actual video bridge happens in the browser via the WebRTC→RTMP bridge below.
// For the Core process input side, return the stream registration info
console.log(`[livekit-relay] ▶ Relay registered: room="${roomName}" → ${rtmpUrl}`);
lkRelaySessions.set(roomName, { rtmpUrl, streamName: sName, ffmpeg: null });
res.json({ ok: true, rtmpUrl, streamName: sName, roomName });
});
/**
* POST /livekit/relay/stop
* Body: { roomName: string }
*/
app.post('/livekit/relay/stop', (req, res) => {
const { roomName } = req.body || {};
if (!roomName) return res.status(400).json({ error: 'roomName is required' });
const session = lkRelaySessions.get(roomName);
if (!session) return res.json({ ok: true, message: 'No active relay' });
if (session.ffmpeg) {
try { session.ffmpeg.kill('SIGTERM'); } catch(_) {}
}
lkRelaySessions.delete(roomName);
console.log(`[livekit-relay] ⏹ Relay stopped: room="${roomName}"`);
res.json({ ok: true });
});
/**
* GET /livekit/relay/status
*/
app.get('/livekit/relay/status', (_, res) => {
const active = [];
lkRelaySessions.forEach((s, roomName) => active.push({ roomName, rtmpUrl: s.rtmpUrl, streamName: s.streamName }));
res.json({ ok: true, active });
});
// ═════════════════════════════════════════════════════════════════════════════
// FACEBOOK
// ═════════════════════════════════════════════════════════════════════════════
@ -795,12 +937,223 @@ app.post('/yt/exchange', async (req, res) => {
}
});
// ═════════════════════════════════════════════════════════════════════════════
// WEBRTC RELAY (WebM MediaRecorder → FFmpeg → RTMP Core)
// ═════════════════════════════════════════════════════════════════════════════
/**
* Cada conexión WebSocket abre un proceso FFmpeg que lee WebM desde stdin
* y lo empuja como RTMP al Core de Restreamer en localhost.
*
* WS URL: ws://<host>:<port>/webrtc-relay/<roomId>
*
* El roomId corresponde al channelid de Restreamer (p.ej. "external" o
* el nombre que se configure en el source).
*
* El cliente WebRTC (browser) envía:
* 1. Un JSON de config { type:'config', room, videoBitrate, audioBitrate, mimeType }
* 2. Chunks binarios fragmentos WebM del MediaRecorder
*/
const FFMPEG_BIN = process.env.FFMPEG_BIN || 'ffmpeg';
// Track active relay sessions for status endpoint
const relaySessions = new Map(); // roomId → { ws, ffmpeg, startedAt }
function buildRtmpUrl(roomId) {
// Formato compatible con Restreamer Core: rtmp://localhost:1935/live/{roomId}.stream
return `rtmp://${RTMP_HOST}:${RTMP_PORT}/${RTMP_APP}/${roomId}.stream`;
}
function startFfmpegRelay(roomId, videoBitrate, audioBitrate) {
const rtmpUrl = buildRtmpUrl(roomId);
const vbr = Math.floor((videoBitrate || 1500000) / 1000) + 'k';
const abr = Math.floor((audioBitrate || 128000) / 1000) + 'k';
console.log(`[webrtc-relay] 🎬 Starting FFmpeg relay for room="${roomId}" → ${rtmpUrl}`);
// FFmpeg reads WebM from stdin, re-encodes to H.264+AAC, pushes RTMP
const args = [
'-loglevel', 'warning',
'-re',
'-fflags', '+nobuffer+genpts',
'-analyzeduration', '1000000',
'-probesize', '512000',
'-i', 'pipe:0', // stdin = WebM stream
// Video: transcode to H.264 (most compatible with RTMP/HLS)
'-c:v', 'libx264',
'-preset', 'ultrafast',
'-tune', 'zerolatency',
'-b:v', vbr,
'-maxrate', vbr,
'-bufsize', String(parseInt(vbr) * 2) + 'k',
'-g', '60', // GOP = 2s @ 30fps
'-keyint_min', '60',
'-sc_threshold', '0',
'-pix_fmt', 'yuv420p',
// Audio: transcode to AAC
'-c:a', 'aac',
'-b:a', abr,
'-ar', '44100',
'-ac', '2',
// Output: RTMP
'-f', 'flv',
rtmpUrl,
];
const ffmpeg = spawn(FFMPEG_BIN, args, { stdio: ['pipe', 'pipe', 'pipe'] });
ffmpeg.stderr.on('data', (data) => {
const msg = data.toString().trim();
if (msg) console.log(`[ffmpeg/${roomId}] ${msg}`);
});
ffmpeg.on('error', (err) => {
console.error(`[webrtc-relay] FFmpeg spawn error (room=${roomId}): ${err.message}`);
if (err.code === 'ENOENT') {
console.error(`[webrtc-relay] ⚠️ ffmpeg not found. Set FFMPEG_BIN env or install ffmpeg.`);
}
});
ffmpeg.on('close', (code) => {
console.log(`[webrtc-relay] FFmpeg closed (room=${roomId}) code=${code}`);
relaySessions.delete(roomId);
});
return ffmpeg;
}
// ── Status endpoint ───────────────────────────────────────────────────────────
app.get('/webrtc-relay/status', (_, res) => {
const sessions = [];
relaySessions.forEach((sess, roomId) => {
sessions.push({
roomId,
startedAt: sess.startedAt,
uptime: Math.floor((Date.now() - sess.startedAt) / 1000),
});
});
res.json({ ok: true, sessions });
});
// ─────────────────────────────────────────────────────────────────────────────
// HTTP server (wraps Express) + WebSocket server
// ─────────────────────────────────────────────────────────────────────────────
const httpServer = http.createServer(app);
// Create WSS without 'path' or 'server' — we handle the upgrade event manually
// so that /webrtc-relay/<roomId> (prefix) is supported instead of exact match.
const wss = new WebSocketServer({ noServer: true });
// Intercept HTTP upgrade requests and forward only /webrtc-relay/* to the WSS
httpServer.on('upgrade', (req, socket, head) => {
const url = req.url || '';
if (url.startsWith('/webrtc-relay/') || url === '/webrtc-relay') {
wss.handleUpgrade(req, socket, head, (ws) => {
wss.emit('connection', ws, req);
});
} else {
socket.destroy();
}
});
wss.on('connection', (ws, req) => {
// Extract roomId from path: /webrtc-relay/<roomId>
const url = req.url || '';
const parts = url.split('/').filter(Boolean);
const roomId = parts[1] ? decodeURIComponent(parts[1]) : 'default';
console.log(`[webrtc-relay] ✅ New connection room="${roomId}" from ${req.socket.remoteAddress}`);
let ffmpeg = null;
let configured = false;
let headerSent = false;
ws.on('message', (data, isBinary) => {
// First message: JSON config
if (!configured) {
try {
const cfg = JSON.parse(data.toString());
if (cfg.type === 'config') {
configured = true;
// Kill any existing session for this room
const existing = relaySessions.get(roomId);
if (existing) {
try { existing.ffmpeg.stdin.destroy(); } catch(_) {}
try { existing.ffmpeg.kill('SIGKILL'); } catch(_) {}
relaySessions.delete(roomId);
}
ffmpeg = startFfmpegRelay(roomId, cfg.videoBitrate, cfg.audioBitrate);
relaySessions.set(roomId, { ws, ffmpeg, startedAt: Date.now() });
ffmpeg.on('spawn', () => {
ws.send(JSON.stringify({ type: 'ready', message: 'FFmpeg relay started' }));
});
ffmpeg.on('close', () => {
if (ws.readyState === WebSocket.OPEN) {
ws.send(JSON.stringify({ type: 'info', message: 'FFmpeg process ended' }));
}
});
ws.send(JSON.stringify({ type: 'info', message: `Relay configurado → ${buildRtmpUrl(roomId)}` }));
return;
}
} catch(_) {
// Not JSON → treat as binary from the start
configured = true;
}
}
// Binary data: forward to FFmpeg stdin
if (ffmpeg && ffmpeg.stdin.writable) {
if (!headerSent) {
headerSent = true;
console.log(`[webrtc-relay] First binary chunk (${isBinary ? 'binary' : 'text'}) for room="${roomId}"`);
}
try {
ffmpeg.stdin.write(data);
} catch(e) {
console.warn(`[webrtc-relay] stdin write error (room=${roomId}): ${e.message}`);
}
}
});
ws.on('close', (code, reason) => {
console.log(`[webrtc-relay] 🔌 Client disconnected room="${roomId}" code=${code}`);
if (ffmpeg) {
try { ffmpeg.stdin.end(); } catch(_) {}
setTimeout(() => {
try { ffmpeg.kill('SIGTERM'); } catch(_) {}
}, 2000);
}
relaySessions.delete(roomId);
});
ws.on('error', (err) => {
console.error(`[webrtc-relay] WS error (room=${roomId}): ${err.message}`);
});
});
// ── Start server ──────────────────────────────────────────────────────────────
app.listen(PORT, '0.0.0.0', () => {
httpServer.listen(PORT, '0.0.0.0', () => {
console.log(`\n[server] ✅ http://0.0.0.0:${PORT}`);
console.log(`[server] 💾 Config: ${CFG_PATH}`);
console.log(`[server] 🔐 Encryption: AES-256-GCM\n`);
console.log(`[server] 🔐 Encryption: AES-256-GCM`);
console.log(`[server] 🎬 WebRTC Relay (legacy): ws://0.0.0.0:${PORT}/webrtc-relay/<roomId>`);
console.log(`[server] 🔴 LiveKit: ${LK_WS_URL || '(not configured)'}`);
console.log(`[server] 📡 RTMP target: rtmp://${RTMP_HOST}:${RTMP_PORT}/${RTMP_APP}/<stream>.stream\n`);
console.log(' GET /health');
console.log(' ── LiveKit ───────────────────────────────────────────');
console.log(' GET /livekit/config');
console.log(' POST /livekit/token { roomName, participantName?, canPublish?, canSubscribe? }');
console.log(' POST /livekit/relay/start { roomName, streamName? }');
console.log(' POST /livekit/relay/stop { roomName }');
console.log(' GET /livekit/relay/status');
console.log(' GET /webrtc-relay/status (legacy WebSocket relay)');
console.log(' ── Facebook ──────────────────────────────────────────');
console.log(' GET /fb/config');
console.log(' PUT /fb/config { app_id, app_secret }');
@ -826,3 +1179,4 @@ app.listen(PORT, '0.0.0.0', () => {
process.on('SIGINT', () => process.exit(0));
process.on('SIGTERM', () => process.exit(0));

View File

@ -9,10 +9,11 @@
},
"dependencies": {
"cors": "^2.8.5",
"express": "^4.18.2"
"express": "^4.18.2",
"livekit-server-sdk": "^2.15.0",
"ws": "^8.19.0"
},
"engines": {
"node": ">=18"
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -78,14 +78,14 @@ module.exports = function (app) {
}),
);
// Facebook OAuth server: /fb-server/* → http://localhost:3002/*
// Routes: /fb/config, /fb/accounts, /fb/exchange, /fb/refresh/:id, /fb/upgrade
// Facebook OAuth server + WebRTC relay: /fb-server/* → http://localhost:3002/*
app.use(
'/fb-server',
createProxyMiddleware({
target: FB_SERVER_TARGET,
changeOrigin: true,
secure: false,
ws: true,
pathRewrite: { '^/fb-server': '' },
onError: (err, req, res) => {
console.error(`[setupProxy] fb-server proxy error: ${err.code}${err.message}`);
@ -97,6 +97,42 @@ module.exports = function (app) {
}),
);
// LiveKit token endpoint: /livekit/* → http://localhost:3002/livekit/*
app.use(
'/livekit',
createProxyMiddleware({
target: FB_SERVER_TARGET,
changeOrigin: true,
secure: false,
ws: false,
onError: (err, req, res) => {
console.error(`[setupProxy] livekit proxy error: ${err.code}${err.message}`);
if (!res.headersSent) {
res.writeHead(502, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: 'livekit endpoint unavailable', message: err.message }));
}
},
}),
);
// WebRTC relay WebSocket (legacy — mantenido por compatibilidad)
app.use(
'/webrtc-relay',
createProxyMiddleware({
target: FB_SERVER_TARGET,
changeOrigin: true,
secure: false,
ws: true,
onError: (err, req, res) => {
console.error(`[setupProxy] webrtc-relay proxy error: ${err.code}${err.message}`);
if (res && !res.headersSent) {
res.writeHead(502, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: 'webrtc-relay unavailable', message: err.message }));
}
},
}),
);
// OAuth2 callback: sirve el HTML de callback para YouTube y otras plataformas
app.get('/oauth2callback', (req, res) => {
const callbackFile = path.join(__dirname, '..', 'public', 'oauth2callback.html');

View File

@ -6,16 +6,18 @@ export default {
html: {
width: '100%',
height: '100%',
fontSize: '16px/1.5',
fontSize: '16px',
lineHeight: '1.5',
},
body: {
background: `${base.palette.background.button_disabled} url(${universe}) no-repeat fixed left top`,
backgroundSize: 'cover',
overflowX: 'hidden',
overflowY: 'scroll',
fontFamily: '"Roboto", "Helvetica", "Arial", sans-serif',
},
code: {
fontFamily: 'soure-code-pro, monospace',
fontFamily: 'source-code-pro, monospace',
},
textarea: {
width: '100%',

View File

@ -62,6 +62,7 @@ export default function Profile(props) {
const load = async () => {
// Add pseudo sources
props.skills.sources.noaudio = [];
props.skills.sources.webrtcroom = [];
let audio = $sources.audio;
@ -127,6 +128,7 @@ export default function Profile(props) {
// Add pseudo sources
props.skills.sources.noaudio = [];
props.skills.sources.webrtcroom = [];
let hasAudio = false;
for (let i = 0; i < res.streams.length; i++) {

View File

@ -21,6 +21,15 @@ function initConfig(initialConfig) {
...initialConfig,
};
// Propagate top-level channelid into each source sub-config
const channelid = config.channelid || initialConfig?.channelid || 'external';
for (let s of Sources.List()) {
if (!config[s.id]) config[s.id] = {};
if (!config[s.id].channelid) {
config[s.id].channelid = channelid;
}
}
return config;
}

View File

@ -0,0 +1,310 @@
/**
* WebRTC Room Source LiveKit
*
* Architecture (identical to Network Source push-RTMP):
*
* Browser (webrtc-room page)
* LiveKit SDK (WebRTC)
* LiveKit Server (wss://livekit-server.nextream.sytes.net)
* [future] Node.js relay subscriber FFmpeg
* RTMP push: rtmp://RTMP_HOST/RTMP_APP/<channelid>.stream
* Restreamer Core input: {rtmp,name=<channelid>.stream}
*
* The Core process config input address uses the same internal RTMP push
* format as the Network Source in push mode.
*/
import React from 'react';
import { useLingui } from '@lingui/react';
import { Trans, t } from '@lingui/macro';
import Grid from '@mui/material/Grid';
import Icon from '@mui/icons-material/ScreenShare';
import Typography from '@mui/material/Typography';
import TextField from '@mui/material/TextField';
import Button from '@mui/material/Button';
import Chip from '@mui/material/Chip';
import Divider from '@mui/material/Divider';
import ContentCopyIcon from '@mui/icons-material/ContentCopy';
import OpenInNewIcon from '@mui/icons-material/OpenInNew';
import ScreenShareIcon from '@mui/icons-material/ScreenShare';
import QrCode2Icon from '@mui/icons-material/QrCode2';
import makeStyles from '@mui/styles/makeStyles';
import FormInlineButton from '../../../misc/FormInlineButton';
import BoxText from '../../../misc/BoxText';
const useStyles = makeStyles((theme) => ({
roomUrlBox: {
display: 'flex',
alignItems: 'center',
gap: '8px',
},
qrContainer: {
display: 'flex',
justifyContent: 'center',
marginTop: '8px',
},
chip: {
marginRight: '4px',
marginBottom: '4px',
},
infoBox: {
background: 'rgba(79,142,247,0.08)',
border: '1px solid rgba(79,142,247,0.3)',
borderRadius: '8px',
padding: '12px 16px',
},
}));
// ─── Settings init ────────────────────────────────────────────────────────────
const initSettings = (initialSettings, config) => {
if (!initialSettings) initialSettings = {};
if (!config) config = {};
// roomId = channelid (same as push.name in Network Source)
const channelId = config.channelid || 'external';
return {
roomId: channelId,
showQR: true,
...initialSettings,
};
};
// ─── Create FFmpeg inputs ─────────────────────────────────────────────────────
// Identical pattern to Network Source push RTMP:
// getLocalRTMP(name) → '{rtmp,name=' + name + '}'
// where name = <channelid>.stream
const createInputs = (settings, config) => {
if (!config) config = {};
const channelId = config.channelid || settings.roomId || 'external';
// Match Network.js: if name === channelid, append '.stream'
const streamName = channelId.endsWith('.stream') ? channelId : channelId + '.stream';
return [
{
address: `{rtmp,name=${streamName}}`,
options: [
'-fflags', '+genpts',
'-analyzeduration', '3000000', // 3s — same as push RTMP in Network.js
'-probesize', '5000000',
'-thread_queue_size','512',
],
},
];
};
// ─── Build room URL ───────────────────────────────────────────────────────────
function buildRoomUrl(settings) {
const base = window.location;
const origin = base.origin || (base.protocol + '//' + base.host);
const roomId = settings.roomId || 'external';
return `${origin}/webrtc-room/?room=${encodeURIComponent(roomId)}`;
}
// ─── QR Code via API ──────────────────────────────────────────────────────────
function QRImage({ url }) {
if (!url) return null;
const src = `https://api.qrserver.com/v1/create-qr-code/?data=${encodeURIComponent(url)}&size=160x160&margin=4&bgcolor=ffffff&color=000000`;
return (
<img src={src} alt="QR sala" width={160} height={160}
style={{ border: '4px solid white', borderRadius: '8px', display: 'block' }} />
);
}
// ─── Source component ─────────────────────────────────────────────────────────
function Source(props) {
const { i18n } = useLingui();
const classes = useStyles();
const settings = initSettings(props.settings, props.config);
const [copied, setCopied] = React.useState(false);
const [lkStatus, setLkStatus] = React.useState(null); // null=checking, true=ok, false=err
const [showQR, setShowQR] = React.useState(settings.showQR !== false);
const roomUrl = buildRoomUrl(settings);
// ── Check LiveKit config ──
React.useEffect(() => {
fetch('/livekit/config', { signal: AbortSignal.timeout(3000) })
.then(r => setLkStatus(r.ok))
.catch(() => setLkStatus(false));
}, []);
const handleChange = (key) => (e) => {
props.onChange({ ...settings, [key]: e.target.value });
};
const handleProbe = () => {
props.onProbe(settings, createInputs(settings, props.config));
};
const handleCopy = () => {
if (navigator.clipboard) {
navigator.clipboard.writeText(roomUrl).then(() => {
setCopied(true);
setTimeout(() => setCopied(false), 2000);
});
}
};
const handleOpenRoom = () => {
const w = 820, h = 700;
const left = Math.max(0, Math.round(window.screen.width / 2 - w / 2));
const top = Math.max(0, Math.round(window.screen.height / 2 - h / 2));
window.open(
roomUrl,
'webrtc-room-' + (settings.roomId || 'default'),
`width=${w},height=${h},left=${left},top=${top},resizable=yes,scrollbars=yes,toolbar=no,menubar=no,location=no,status=no`,
);
};
// Stream name that will be pushed to Restreamer Core
const streamName = (settings.roomId || 'external').replace(/\.stream$/, '') + '.stream';
return (
<Grid container alignItems="flex-start" spacing={2} style={{ marginTop: '0.5em' }}>
{/* ── Info ── */}
<Grid item xs={12}>
<div className={classes.infoBox}>
<Typography variant="body2" style={{ marginBottom: '6px' }}>
<ScreenShareIcon style={{ fontSize: '1rem', verticalAlign: 'middle', marginRight: '4px' }} />
<Trans>
Un cliente (navegador) se conecta a la sala LiveKit para compartir su
pantalla o cámara. El video llega al Core vía RTMP push.
</Trans>
</Typography>
<div>
<Chip size="small" label="LiveKit" className={classes.chip}
style={{ backgroundColor: '#4f8ef7', color: '#fff' }} />
<Chip size="small" label="Screen share" className={classes.chip}
style={{ backgroundColor: '#7c5ce4', color: '#fff' }} />
<Chip size="small" label="Camera" className={classes.chip}
style={{ backgroundColor: '#27ae60', color: '#fff' }} />
</div>
</div>
</Grid>
{/* ── LiveKit status ── */}
{lkStatus === false && (
<Grid item xs={12}>
<BoxText color="dark">
<Typography variant="body2" style={{ color: '#e74c3c' }}>
<Trans>
El servidor LiveKit no está disponible. Verifica LIVEKIT_API_KEY,
LIVEKIT_API_SECRET y LIVEKIT_WS_URL en el docker-compose.
</Trans>
</Typography>
</BoxText>
</Grid>
)}
{lkStatus === true && (
<Grid item xs={12}>
<Typography variant="body2" style={{ color: '#2ecc71', fontSize: '0.8rem' }}>
<Trans>LiveKit configurado correctamente</Trans>
</Typography>
</Grid>
)}
<Grid item xs={12}><Divider /></Grid>
{/* ── Room ID (= channelid) ── */}
<Grid item xs={12}>
<TextField
variant="outlined" fullWidth
label={i18n._(t`Room ID`)}
value={settings.roomId}
onChange={handleChange('roomId')}
helperText={i18n._(t`Identificador de la sala LiveKit. Por defecto usa el ID del canal (${streamName}).`)}
/>
</Grid>
<Grid item xs={12}><Divider /></Grid>
{/* ── Room URL ── */}
<Grid item xs={12}>
<Typography variant="subtitle2" style={{ marginBottom: '6px' }}>
<Trans>URL de la sala para el presentador</Trans>
</Typography>
<div className={classes.roomUrlBox}>
<TextField
variant="outlined" fullWidth size="small"
value={roomUrl}
InputProps={{ readOnly: true, style: { fontFamily: 'monospace', fontSize: '0.78rem' } }}
/>
<Button variant="outlined" size="small" onClick={handleCopy}
startIcon={<ContentCopyIcon />} style={{ whiteSpace: 'nowrap', minWidth: '80px' }}>
{copied ? <Trans>¡Copiado!</Trans> : <Trans>Copiar</Trans>}
</Button>
<Button variant="outlined" size="small" onClick={handleOpenRoom}
startIcon={<OpenInNewIcon />} style={{ whiteSpace: 'nowrap' }}>
<Trans>Abrir</Trans>
</Button>
</div>
</Grid>
{/* ── QR ── */}
<Grid item xs={12}>
<Button variant="text" size="small" startIcon={<QrCode2Icon />}
onClick={() => setShowQR(v => !v)}
style={{ color: 'rgba(255,255,255,0.6)', marginBottom: '4px' }}>
{showQR ? <Trans>Ocultar QR</Trans> : <Trans>Mostrar QR</Trans>}
</Button>
{showQR && <div className={classes.qrContainer}><QRImage url={roomUrl} /></div>}
</Grid>
<Grid item xs={12}><Divider /></Grid>
{/* ── RTMP stream info ── */}
<Grid item xs={12}>
<Typography variant="subtitle2" style={{ marginBottom: '4px' }}>
<Trans>Configuración del proceso</Trans>
</Typography>
<Typography variant="body2" style={{ color: 'rgba(255,255,255,0.5)', fontFamily: 'monospace', fontSize: '0.78rem' }}>
input: {`{rtmp,name=${streamName}}`}
</Typography>
</Grid>
<Grid item xs={12}><Divider /></Grid>
{/* ── Instructions ── */}
<Grid item xs={12}>
<Typography variant="subtitle2" gutterBottom><Trans>Cómo usar</Trans>:</Typography>
<Typography variant="body2" style={{ color: 'rgba(255,255,255,0.6)', lineHeight: 1.7 }}>
1. <Trans>Haz clic en</Trans> <strong><Trans>Probar</Trans></strong> <Trans>para registrar el input RTMP push</Trans>.<br />
2. <Trans>Guarda el canal y activa el proceso</Trans>.<br />
3. <Trans>Abre la sala (botón</Trans> <strong><Trans>Abrir</Trans></strong>) <Trans>y elige Pantalla o Cámara</Trans>.<br />
4. <Trans>Pulsa</Trans> <strong>🚀 Iniciar transmisión</strong> <Trans>en la sala</Trans>.<br />
5. <Trans>El video llega al Core vía RTMP push interno</Trans>.
</Typography>
</Grid>
{/* ── Probe ── */}
<Grid item xs={12}>
<FormInlineButton onClick={handleProbe}><Trans>Probar</Trans></FormInlineButton>
</Grid>
</Grid>
);
}
Source.defaultProps = {
settings: {},
config: {},
onChange: function(settings) {},
onProbe: function(settings, inputs) {},
};
function SourceIcon(props) {
return <Icon style={{ color: '#FFF' }} {...props} />;
}
const id = 'webrtcroom';
const name = <Trans>WebRTC Room</Trans>;
const capabilities = ['video'];
const ffversion = '^4.1.0 || ^5.0.0 || ^6.1.0';
const func = { initSettings, createInputs };
export { id, name, capabilities, ffversion, SourceIcon as icon, Source as component, func };

View File

@ -9,6 +9,7 @@ import * as VideoLoop from './VideoLoop';
import * as AudioLoop from './AudioLoop';
import * as VirtualAudio from './VirtualAudio';
import * as VirtualVideo from './VirtualVideo';
import * as WebRTCRoom from './WebRTCRoom';
class Registry {
constructor() {
@ -50,5 +51,6 @@ registry.Register(NoAudio);
registry.Register(VideoAudio);
registry.Register(VideoLoop);
registry.Register(AudioLoop);
registry.Register(WebRTCRoom);
export default registry;

View File

@ -17,8 +17,9 @@ export default function Source(props) {
<Grid item xs={12}>
<Typography>
<Trans>
Select whether you pull the stream from a <strong>network source</strong> (such as a network camera) or the{' '}
<strong>internal RTMP server</strong> (e.g., OBS streams to the Restreamer).
Select whether you pull the stream from a <strong>network source</strong> (such as a network camera), the{' '}
<strong>internal RTMP server</strong> (e.g., OBS streams to the Restreamer), or use a{' '}
<strong>WebRTC Room</strong> to stream directly from a browser (screen share or camera).
</Trans>
</Typography>
</Grid>

View File

@ -0,0 +1,78 @@
import React from 'react';
import { Trans } from '@lingui/macro';
import Grid from '@mui/material/Grid';
import Icon from '@mui/icons-material/ScreenShare';
import TextField from '@mui/material/TextField';
import Typography from '@mui/material/Typography';
import * as S from '../../Sources/WebRTCRoom';
const initSettings = (initialSettings, config) => {
return S.func.initSettings(initialSettings, config);
};
function Source(props) {
const config = { channelid: 'external', ...(props.config || {}) };
const settings = initSettings(props.settings, config);
const roomId = settings.roomId || config.channelid || 'external';
const origin = window.location.origin;
const host = settings.relayHost || window.location.host;
const roomUrl = `${origin}/webrtc-room/?room=${encodeURIComponent(roomId)}&host=${encodeURIComponent(host)}`;
const handleChange = (newSettings) => {
newSettings = newSettings || settings;
const inputs = S.func.createInputs(newSettings);
props.onChange(S.id, newSettings, inputs, true);
};
React.useEffect(() => {
handleChange();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
return (
<React.Fragment>
<Grid item xs={12}>
<Typography variant="body2" style={{ color: 'rgba(255,255,255,0.6)', lineHeight: 1.7 }}>
<Trans>
Un cliente (navegador) abrirá la sala WebRTC para compartir su pantalla o cámara.
La señal llegará al Core vía RTMP y se distribuirá a todos los destinos configurados.
</Trans>
</Typography>
</Grid>
<Grid item xs={12}>
<TextField
variant="outlined"
fullWidth
label={<Trans>URL de la sala (compartir con el presentador)</Trans>}
value={roomUrl}
InputProps={{ readOnly: true, style: { fontFamily: 'monospace', fontSize: '0.8rem' } }}
helperText={<Trans>Room ID: {roomId}</Trans>}
/>
</Grid>
</React.Fragment>
);
}
Source.defaultProps = {
knownDevices: [],
settings: {},
config: null,
skills: null,
onChange: function (type, settings, inputs, ready) {},
onRefresh: function () {},
};
function SourceIcon(props) {
return <Icon style={{ color: '#FFF' }} {...props} />;
}
const id = 'webrtcroom';
const type = 'webrtcroom';
const name = <Trans>WebRTC Room</Trans>;
const capabilities = ['audio', 'video'];
export { id, type, name, capabilities, SourceIcon as icon, Source as component };

View File

@ -4,6 +4,7 @@ import * as InternalSRT from './InternalSRT';
import * as Network from './Network';
import * as Raspicam from './Raspicam';
import * as V4L from './V4L';
import * as WebRTCRoom from './WebRTCRoom';
class Registry {
constructor() {
@ -41,5 +42,6 @@ registry.Register(InternalSRT);
registry.Register(AVFoundation);
registry.Register(Raspicam);
registry.Register(V4L);
registry.Register(WebRTCRoom);
export default registry;

View File

@ -269,6 +269,9 @@ export default function Wizard(props) {
}
}
// WebRTC Room is always available (pseudo-source, no hardware required)
knownSources.push('webrtcroom');
let availableSources = [];
for (let s of Sources.List()) {
@ -294,11 +297,31 @@ export default function Wizard(props) {
return <Source onAbort={handleAbort} onHelp={handleHelp('video-setup')} onAdvanced={handleAdvanced} sources={availableSources} />;
} else if ($step === 'VIDEO SETTINGS') {
handleNext = async () => {
// probing ...
setStep('VIDEO PROBE');
const source = $sources.video;
// WebRTC Room: skip probe — relay sends H.264+AAC, use predefined streams
if ($sourceid === 'webrtcroom') {
const webrtcStreams = [
{ url: '', index: 0, stream: 0, type: 'video', codec: 'h264', width: 1280, height: 720, pix_fmt: 'yuv420p', sampling_hz: 0, layout: '', channels: 0 },
{ url: '', index: 0, stream: 1, type: 'audio', codec: 'aac', width: 0, height: 0, pix_fmt: '', sampling_hz: 44100, layout: 'stereo', channels: 2 },
];
const profile = M.preselectProfile('video', webrtcStreams, $profile, $skills.encoders);
setProfile({ ...$profile, ...profile });
setSources({
...$sources,
video: { ...source, streams: webrtcStreams },
});
setProbe({ ...$probe, probing: false, status: 'success' });
setStep('VIDEO RESULT');
return;
}
// Normal probe flow
setStep('VIDEO PROBE');
const status = await probe('video', source);
if (status === true) {
setStep('VIDEO RESULT');
@ -350,6 +373,11 @@ export default function Wizard(props) {
const Component = s.component;
// Config: para webrtcroom usar el channelid directamente
const sourceConfig = $sourceid === 'webrtcroom'
? { channelid: _channelid }
: ($config.source ? $config.source[s.type] : null);
// STEP 2 - Source Settings
return (
<Video
@ -362,8 +390,8 @@ export default function Wizard(props) {
ready={$sources.video.ready}
>
<Component
knownDevices={$skills.sources[s.type]}
config={$config.source[s.type]}
knownDevices={$skills.sources[s.type] || []}
config={sourceConfig}
settings={$sources.video.settings}
skills={$skills}
onChange={handleChange}

View File

@ -9,6 +9,10 @@ import Link from '@mui/material/Link';
import Stack from '@mui/material/Stack';
import Typography from '@mui/material/Typography';
import WarningIcon from '@mui/icons-material/Warning';
import ScreenShareIcon from '@mui/icons-material/ScreenShare';
import OpenInNewIcon from '@mui/icons-material/OpenInNew';
import ContentCopyIcon from '@mui/icons-material/ContentCopy';
import Button from '@mui/material/Button';
import * as M from '../../utils/metadata';
import { anonymize } from '../../utils/anonymizer';
@ -36,7 +40,6 @@ const useStyles = makeStyles((theme) => ({
marginLeft: 10,
},
playerL1: {
//padding: '4px 1px 4px 8px',
paddingTop: 10,
paddingLeft: 18
},
@ -57,6 +60,29 @@ const useStyles = makeStyles((theme) => ({
color: theme.palette.warning.main,
fontSize: 'xxx-large',
},
webrtcPanel: {
position: 'absolute',
top: 0, left: 0, bottom: 0, right: 0,
backgroundColor: theme.palette.common.black,
display: 'flex',
flexDirection: 'column',
alignItems: 'center',
justifyContent: 'center',
gap: 12,
padding: 16,
},
webrtcLiveDot: {
display: 'inline-block',
width: 10, height: 10,
borderRadius: '50%',
backgroundColor: '#2ecc71',
marginRight: 6,
animation: '$pulse 1.2s ease-in-out infinite',
},
'@keyframes pulse': {
'0%, 100%': { opacity: 1 },
'50%': { opacity: 0.3 },
},
}));
export default function Main(props) {
@ -78,6 +104,15 @@ export default function Main(props) {
log: [],
},
});
// WebRTC Room detection
const [$webrtcRoom, setWebrtcRoom] = React.useState({
active: false, // source type = webrtcroom
roomUrl: '',
roomId: '',
copied: false,
relayActive: false, // hay sesión FFmpeg activa en el relay
sessions: [], // sesiones activas del relay
});
const processLogTimer = React.useRef();
const [$processDebug, setProcessDebug] = React.useState({
open: false,
@ -90,6 +125,26 @@ export default function Main(props) {
await update();
}, 1000);
// Poll relay sessions when source is webrtcroom
useInterval(async () => {
if (!$webrtcRoom.active) return;
try {
const resp = await fetch('/webrtc-relay/status', { signal: AbortSignal.timeout(2000) });
if (resp.ok) {
const data = await resp.json();
const sessions = data.sessions || [];
const roomSessions = sessions.filter(
(s) => !$webrtcRoom.roomId || s.roomId === $webrtcRoom.roomId
);
setWebrtcRoom((prev) => ({
...prev,
relayActive: roomSessions.length > 0,
sessions: roomSessions,
}));
}
} catch (_) {}
}, 2000);
React.useEffect(() => {
(async () => {
await load();
@ -114,6 +169,23 @@ export default function Main(props) {
...metadata,
});
// Detect if the video source is a WebRTC Room
const videoSource = metadata.sources && metadata.sources[0];
if (videoSource && videoSource.type === 'webrtcroom') {
const settings = videoSource.settings || {};
const roomId = settings.roomId || _channelid;
const origin = window.location.origin;
const roomUrl = `${origin}/webrtc-room/?room=${encodeURIComponent(roomId)}`;
setWebrtcRoom({
active: true,
roomUrl,
roomId,
copied: false,
});
} else {
setWebrtcRoom({ active: false, roomUrl: '', roomId: '', copied: false });
}
await update();
};
@ -265,6 +337,28 @@ export default function Main(props) {
H(topic);
};
const handleCopyRoomUrl = () => {
if (navigator.clipboard && $webrtcRoom.roomUrl) {
navigator.clipboard.writeText($webrtcRoom.roomUrl).then(() => {
setWebrtcRoom((prev) => ({ ...prev, copied: true }));
setTimeout(() => setWebrtcRoom((prev) => ({ ...prev, copied: false })), 2000);
});
}
};
const handleOpenRoom = () => {
if ($webrtcRoom.roomUrl) {
const w = 820, h = 700;
const left = Math.max(0, Math.round(window.screen.width / 2 - w / 2));
const top = Math.max(0, Math.round(window.screen.height / 2 - h / 2));
window.open(
$webrtcRoom.roomUrl,
'webrtc-room-' + ($webrtcRoom.roomId || 'default'),
`width=${w},height=${h},left=${left},top=${top},resizable=yes,scrollbars=yes,toolbar=no,menubar=no,location=no,status=no`
);
}
};
if ($state.ready === false) {
return (
<Paper xs={8} sm={6} md={4} className="PaperM">
@ -304,53 +398,74 @@ export default function Main(props) {
<Grid item xs={12}>
<Grid container spacing={0} className={classes.playerL1}>
<Grid item xs={12} className={classes.playerL2}>
{($state.state === 'disconnected' || $state.state === 'disconnecting') && (
<Grid
container
direction="column"
className={classes.playerL3}
justifyContent="center"
alignItems="center"
spacing={1}
>
<Grid item>
<Typography variant="h2">
<Trans>No video</Trans>
{/* ── WebRTC Room source ── */}
{$webrtcRoom.active ? (
$webrtcRoom.relayActive && $state.state === 'connected' ? (
/* Relay activo → mostrar HLS preview normal */
<Player type="videojs-internal" source={manifest} poster={poster} autoplay mute controls />
) : (
/* Relay inactivo → panel de control */
<div className={classes.webrtcPanel}>
<ScreenShareIcon style={{ fontSize: '3rem', color: '#4f8ef7', opacity: 0.8 }} />
<Typography variant="h3" style={{ color: '#e0e0ee', textAlign: 'center' }}>
<Trans>WebRTC Room</Trans>
</Typography>
<Typography variant="body2" style={{ color: '#6e6e8a', textAlign: 'center', maxWidth: 320 }}>
{$state.state === 'connecting' ? (
<Trans>Esperando señal del presentador</Trans>
) : $state.state === 'connected' ? (
<Trans>Canal activo esperando que el presentador inicie la transmisión en la sala.</Trans>
) : (
<Trans>Comparte el enlace de la sala con el presentador para iniciar la transmisión.</Trans>
)}
</Typography>
<Stack direction="row" spacing={1} justifyContent="center" flexWrap="wrap">
<Button
variant="contained"
size="small"
startIcon={<OpenInNewIcon />}
onClick={handleOpenRoom}
style={{ background: '#4f8ef7', color: '#fff' }}
>
<Trans>Abrir sala</Trans>
</Button>
<Button
variant="outlined"
size="small"
startIcon={<ContentCopyIcon />}
onClick={handleCopyRoomUrl}
style={{ borderColor: '#4f8ef7', color: '#4f8ef7' }}
>
{$webrtcRoom.copied ? <Trans>¡Copiado!</Trans> : <Trans>Copiar URL</Trans>}
</Button>
</Stack>
{$webrtcRoom.relayActive && (
<Typography variant="caption" style={{ color: '#2ecc71' }}>
<span className={classes.webrtcLiveDot} />
<Trans>Relay activo</Trans>
</Typography>
)}
</div>
)
) : (
/* Source normal: estados HLS estándar */
<React.Fragment>
{($state.state === 'disconnected' || $state.state === 'disconnecting') && (
<Grid container direction="column" className={classes.playerL3} justifyContent="center" alignItems="center" spacing={1}>
<Grid item>
<Typography variant="h2"><Trans>No video</Trans></Typography>
</Grid>
</Grid>
)}
{$state.state === 'connecting' && (
<Grid
container
direction="column"
className={classes.playerL3}
justifyContent="center"
alignItems="center"
spacing={1}
>
<Grid item>
<CircularProgress color="inherit" />
</Grid>
<Grid item>
<Typography>
<Trans>Connecting ...</Trans>
</Typography>
</Grid>
<Grid container direction="column" className={classes.playerL3} justifyContent="center" alignItems="center" spacing={1}>
<Grid item><CircularProgress color="inherit" /></Grid>
<Grid item><Typography><Trans>Connecting ...</Trans></Typography></Grid>
</Grid>
)}
{$state.state === 'error' && (
<Grid
container
direction="column"
className={classes.playerL3}
justifyContent="center"
alignItems="center"
spacing={1}
>
<Grid item>
<WarningIcon className={classes.playerWarningIcon} />
</Grid>
<Grid container direction="column" className={classes.playerL3} justifyContent="center" alignItems="center" spacing={1}>
<Grid item><WarningIcon className={classes.playerWarningIcon} /></Grid>
<Grid item>
<Typography>
<Trans>Error: {anonymize($state.progress.error) || 'unknown'}</Trans>
@ -360,31 +475,23 @@ export default function Main(props) {
<Typography>
<Trans>
Please check the{' '}
<Link href="#!" onClick={handleProcessDetails}>
process log
</Link>
<Link href="#!" onClick={handleProcessDetails}>process log</Link>
</Trans>
</Typography>
</Grid>
{$state.progress.reconnect !== -1 && (
<Grid item>
<Typography>
<Trans>Reconnecting in {$state.progress.reconnect}s</Trans>
</Typography>
</Grid>
<Grid item><Typography><Trans>Reconnecting in {$state.progress.reconnect}s</Trans></Typography></Grid>
)}
{$state.progress.reconnect === -1 && (
<Grid item>
<Typography>
<Trans>You have to reconnect manually</Trans>
</Typography>
</Grid>
<Grid item><Typography><Trans>You have to reconnect manually</Trans></Typography></Grid>
)}
</Grid>
)}
{$state.state === 'connected' && (
<Player type="videojs-internal" source={manifest} poster={poster} autoplay mute controls />
)}
</React.Fragment>
)}
</Grid>
</Grid>
</Grid>
@ -394,9 +501,39 @@ export default function Main(props) {
<Grid item xs={12} marginTop="-.2em">
<Stack direction="row" justifyContent="space-between" alignItems="center" spacing={2}>
<Typography variant="body">
{$webrtcRoom.active ? (
<Stack direction="row" alignItems="center" spacing={0.5}>
<ScreenShareIcon fontSize="small" style={{ color: '#4f8ef7', marginBottom: -3 }} />
<Trans>WebRTC Room</Trans>
</Stack>
) : (
<Trans>Content URL</Trans>
)}
</Typography>
<Stack direction="row" justifyContent="flex-end" alignItems="center" spacing={0.5}>
{$webrtcRoom.active ? (
<React.Fragment>
<Button
variant="outlined"
color="default"
size="small"
startIcon={<ContentCopyIcon />}
onClick={handleCopyRoomUrl}
>
{$webrtcRoom.copied ? <Trans>¡Copiado!</Trans> : <Trans>Room URL</Trans>}
</Button>
<Button
variant="outlined"
color="default"
size="small"
startIcon={<OpenInNewIcon />}
onClick={handleOpenRoom}
>
<Trans>Open room</Trans>
</Button>
</React.Fragment>
) : (
<React.Fragment>
<CopyButton
variant="outlined"
color="default"
@ -433,6 +570,8 @@ export default function Main(props) {
>
<Trans>Snapshot</Trans>
</CopyButton>
</React.Fragment>
)}
</Stack>
</Stack>
</Grid>

View File

@ -7,6 +7,7 @@ import TextField from '@mui/material/TextField';
import Logo from './logos/dlive.svg';
import FormInlineButton from '../../../misc/FormInlineButton';
import YtMetadataInput from './YtMetadataInput';
const id = 'dlive';
const name = 'dlive';
@ -70,6 +71,10 @@ function Service(props) {
props.onChange([output], settings);
};
const pushSettings = () => {
props.onChange([createOutput(settings)], settings);
};
const createOutput = (settings) => {
const output = {
address: 'rtmp://stream.dlive.tv/live/' + settings.key,
@ -81,7 +86,7 @@ function Service(props) {
return (
<Grid container spacing={2}>
<Grid item xs={12} md={9}>
<Grid item xs={12}>
<TextField variant="outlined" fullWidth label={<Trans>Stream key</Trans>} value={settings.key} onChange={handleChange('key')} />
</Grid>
<Grid item xs={12} md={3}>
@ -89,6 +94,11 @@ function Service(props) {
<Trans>GET</Trans>
</FormInlineButton>
</Grid>
<YtMetadataInput onFetch={(title, desc) => {
if (title) settings.title = title;
if (desc) settings.description = desc;
pushSettings();
}} />
<Grid item xs={12}>
<TextField
variant="outlined"

View File

@ -29,6 +29,7 @@ import Checkbox from '../../../misc/Checkbox';
import FormInlineButton from '../../../misc/FormInlineButton';
import Select from '../../../misc/Select';
import fbOAuth from '../../../utils/fbOAuth';
import YtMetadataInput from './YtMetadataInput';
const id = 'facebook';
const name = 'Facebook Live';
@ -449,6 +450,12 @@ function Service(props) {
<Typography variant="h4" style={{ marginBottom: 4 }}>Stream settings</Typography>
</Grid>
<YtMetadataInput onFetch={(title, desc) => {
if (title) settings.title = title;
if (desc) settings.description = desc;
props.onChange(createOutput(settings), settings);
}} />
<Grid item xs={12}>
<TextField variant="outlined" fullWidth
label="Live title"

View File

@ -7,6 +7,7 @@ import Grid from '@mui/material/Grid';
import TextField from '@mui/material/TextField';
import FormInlineButton from '../../../misc/FormInlineButton';
import YtMetadataInput from './YtMetadataInput';
const id = 'instagram';
const name = 'Instagram';
@ -68,14 +69,15 @@ function Service(props) {
const handleChange = (what) => (event) => {
const value = event.target.value;
settings[what] = value;
const output = createOutput(settings);
props.onChange([output], settings);
};
const pushSettings = () => {
props.onChange([createOutput(settings)], settings);
};
const createOutput = (settings) => {
const output = {
address: 'http://instagram.com:443/rtmp/' + settings.key,
@ -95,6 +97,11 @@ function Service(props) {
<Trans>GET</Trans>
</FormInlineButton>
</Grid>
<YtMetadataInput onFetch={(title, desc) => {
if (title) settings.title = title;
if (desc) settings.description = desc;
pushSettings();
}} />
<Grid item xs={12}>
<TextField
variant="outlined"

View File

@ -7,6 +7,7 @@ import LinkedInIcon from '@mui/icons-material/LinkedIn';
import MenuItem from '@mui/material/MenuItem';
import Select from '../../../misc/Select';
import TextField from '@mui/material/TextField';
import YtMetadataInput from './YtMetadataInput';
const id = 'linkedin';
const name = 'LinkedIn';
@ -71,14 +72,15 @@ function Service(props) {
const handleChange = (what) => (event) => {
const value = event.target.value;
settings[what] = value;
const output = createOutput(settings);
props.onChange([output], settings);
};
const pushSettings = () => {
props.onChange([createOutput(settings)], settings);
};
const createOutput = (settings) => {
const output = {
address: settings.protocol + settings.address,
@ -106,6 +108,11 @@ function Service(props) {
placeholder="{custom_id}.channel.media.azure.net:2935/live/{custom_id}"
/>
</Grid>
<YtMetadataInput onFetch={(title, desc) => {
if (title) settings.title = title;
if (desc) settings.description = desc;
pushSettings();
}} />
<Grid item xs={12}>
<TextField
variant="outlined"

View File

@ -7,6 +7,7 @@ import TextField from '@mui/material/TextField';
import Logo from './logos/rumble.svg';
import FormInlineButton from '../../../misc/FormInlineButton';
import YtMetadataInput from './YtMetadataInput';
const id = 'rumble';
const name = 'Rumble';
@ -71,14 +72,15 @@ function Service(props) {
const handleChange = (what) => (event) => {
const value = event.target.value;
settings[what] = value;
const output = createOutput(settings);
props.onChange([output], settings);
};
const pushSettings = () => {
props.onChange([createOutput(settings)], settings);
};
const createOutput = (settings) => {
const output = {
address: settings.server_url + '/' + settings.stream_key,
@ -117,6 +119,11 @@ function Service(props) {
<Trans>GET</Trans>
</FormInlineButton>
</Grid>
<YtMetadataInput onFetch={(title, desc) => {
if (title) settings.title = title;
if (desc) settings.description = desc;
pushSettings();
}} />
<Grid item xs={12}>
<TextField
variant="outlined"

View File

@ -9,6 +9,7 @@ import TextField from '@mui/material/TextField';
import FormInlineButton from '../../../misc/FormInlineButton';
import Select from '../../../misc/Select';
import YtMetadataInput from './YtMetadataInput';
const id = 'twitch';
const name = 'Twitch';
@ -65,14 +66,15 @@ function Service(props) {
const handleChange = (what) => (event) => {
const value = event.target.value;
settings[what] = value;
const output = createOutput(settings);
props.onChange([output], settings);
};
const pushSettings = () => {
props.onChange([createOutput(settings)], settings);
};
const createOutput = (settings) => {
let region_postfix = '.twitch.tv';
if (settings.region.includes('live-video.net')) {
@ -165,6 +167,11 @@ function Service(props) {
<Trans>GET</Trans>
</FormInlineButton>
</Grid>
<YtMetadataInput onFetch={(title, desc) => {
if (title) settings.title = title;
if (desc) settings.description = desc;
pushSettings();
}} />
<Grid item xs={12}>
<TextField
variant="outlined"

View File

@ -11,6 +11,7 @@ import TextField from '@mui/material/TextField';
import FormInlineButton from '../../../misc/FormInlineButton';
import Select from '../../../misc/Select';
import YtMetadataInput from './YtMetadataInput';
const id = 'twitter';
const name = 'Twitter';
@ -87,14 +88,15 @@ function Service(props) {
const handleChange = (what) => (event) => {
const value = event.target.value;
settings[what] = value;
const outputs = createOutput(settings);
props.onChange(outputs, settings);
};
const pushSettings = () => {
props.onChange(createOutput(settings), settings);
};
const createOutput = (settings) => {
const outputs = [];
@ -189,6 +191,11 @@ function Service(props) {
<Trans>GET</Trans>
</FormInlineButton>
</Grid>
<YtMetadataInput onFetch={(title, desc) => {
if (title) settings.title = title;
if (desc) settings.description = desc;
pushSettings();
}} />
<Grid item xs={12}>
<TextField
variant="outlined"

View File

@ -14,6 +14,7 @@ import Checkbox from '../../../misc/Checkbox';
import FormInlineButton from '../../../misc/FormInlineButton';
import Select from '../../../misc/Select';
import ytOAuth from '../../../utils/ytOAuth';
import YtMetadataInput from './YtMetadataInput';
const id = 'youtube';
const name = 'YouTube Live';
@ -87,6 +88,7 @@ function Service(props) {
const [$connecting, setConnecting] = React.useState(false);
const [$globalCreds, setGlobalCreds] = React.useState(() => ytOAuth.getCredentials());
// Sincronizar credenciales y cuentas desde el servidor al montar
React.useEffect(() => {
(async () => {
@ -546,6 +548,21 @@ function Service(props) {
<Checkbox label={<Trans>Backup stream</Trans>} checked={settings.backup} onChange={handleChange('backup')} />
</Grid>
{/* ── Importar título/descripción desde URL de YouTube ──────── */}
<Grid item xs={12}>
<Typography variant="h4" style={{ marginBottom: 6 }}>
<Trans>Stream Settings</Trans>
</Typography>
<Typography variant="caption" style={{ color: '#aaa', display: 'block', marginBottom: 0 }}>
<Trans>Optionally paste a YouTube URL or Video ID to auto-fill the title and description below.</Trans>
</Typography>
</Grid>
<YtMetadataInput onFetch={(title, desc) => {
if (title) settings.title = title;
if (desc) settings.description = desc;
pushSettings(settings);
}} />
{/* ── Título y descripción ────────────────────────────────── */}
<Grid item xs={12}>
<MuiTextField

View File

@ -0,0 +1,125 @@
/**
* YtMetadataInput
*
* Input reutilizable para pegar una URL de YouTube (o Video ID) y obtener
* el título y la descripción automáticamente vía el servicio yt-dlp.
*
* Uso:
* <YtMetadataInput
* onFetch={(title, description) => { settings.title = title; ... }}
* />
*/
import React from 'react';
import CircularProgress from '@mui/material/CircularProgress';
import Grid from '@mui/material/Grid';
import IconButton from '@mui/material/IconButton';
import InputAdornment from '@mui/material/InputAdornment';
import MuiTextField from '@mui/material/TextField';
import Typography from '@mui/material/Typography';
import YouTubeIcon from '@mui/icons-material/YouTube';
// ── yt-dlp service base ────────────────────────────────────────────────────
const _runtimeCfg = (typeof window !== 'undefined' && window.__RESTREAMER_CONFIG__) || {};
const STREAM_SERVICE_BASE = _runtimeCfg.YTDLP_URL
? _runtimeCfg.YTDLP_URL.replace(/\/$/, '') + '/stream/'
: '/yt-stream/';
export const extractYouTubeVideoId = (url) => {
if (!url) return '';
const trimmed = url.trim();
const patterns = [
/[?&]v=([a-zA-Z0-9_-]{11})(?:[&?/]|$)/,
/youtu\.be\/([a-zA-Z0-9_-]{11})(?:[?&/]|$)/,
/\/(?:live|embed|shorts|v)\/([a-zA-Z0-9_-]{11})(?:[?&/]|$)/,
];
for (const pattern of patterns) {
const match = trimmed.match(pattern);
if (match) return match[1];
}
if (/^[a-zA-Z0-9_-]{11}$/.test(trimmed)) return trimmed;
return '';
};
export default function YtMetadataInput({ onFetch }) {
const [$url, setUrl] = React.useState('');
const [$fetching, setFetching] = React.useState(false);
const [$error, setError] = React.useState('');
const videoId = extractYouTubeVideoId($url);
const handleFetch = async () => {
setError('');
if (!videoId) {
setError('No se detectó un ID de YouTube válido.');
return;
}
setFetching(true);
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 90000);
try {
const res = await fetch(STREAM_SERVICE_BASE + videoId, { signal: controller.signal });
clearTimeout(timeoutId);
if (!res.ok) throw new Error('HTTP ' + res.status + ' ' + res.statusText);
const data = await res.json();
const title = data.title || data.video_title || '';
const description = data.description || data.video_description || '';
if (title || description) {
if (typeof onFetch === 'function') onFetch(title, description);
} else {
setError('No se encontró título ni descripción en la respuesta del servicio.');
}
} catch (e) {
clearTimeout(timeoutId);
setError(e.name === 'AbortError' ? 'Tiempo de espera agotado. Intenta nuevamente.' : 'Error: ' + e.message);
} finally {
setFetching(false);
}
};
return (
<Grid item xs={12}>
<MuiTextField
variant="outlined"
fullWidth
label="YouTube URL or Video ID (optional)"
placeholder="https://www.youtube.com/watch?v=… or video ID"
value={$url}
onChange={(e) => { setUrl(e.target.value); setError(''); }}
InputProps={{
endAdornment: (
<InputAdornment position="end">
<IconButton
onClick={handleFetch}
disabled={$fetching || !videoId}
title="Fetch title & description from YouTube"
size="small"
style={{ color: videoId ? '#FF0000' : undefined }}
>
{$fetching
? <CircularProgress size={20} color="inherit" />
: <YouTubeIcon />}
</IconButton>
</InputAdornment>
),
}}
/>
{videoId && !$fetching && !$error && (
<Typography variant="caption" style={{ color: '#4caf50', display: 'block', marginTop: 2 }}>
Video ID: <strong>{videoId}</strong> click the button to auto-fill title &amp; description
</Typography>
)}
{$fetching && (
<Typography variant="caption" style={{ color: '#2196f3', display: 'block', marginTop: 2 }}>
Fetching metadata from YouTube
</Typography>
)}
{$error && (
<Typography variant="caption" style={{ color: '#f44336', display: 'block', marginTop: 2 }}>
{$error}
</Typography>
)}
</Grid>
);
}

View File

@ -1496,10 +1496,10 @@
resolved "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.7.tgz"
integrity sha512-X8R8Oj771YRl/w+c1HqAC1szL8zWQRwFvgDwT129k9ACdBoud/+/rX9V0qiMl6LWUdP9voC2nDVZYPMQQsb6eA==
"@fontsource/dosis@^5.0.21":
version "5.1.0"
resolved "https://registry.npmjs.org/@fontsource/dosis/-/dosis-5.1.0.tgz"
integrity sha512-CO0WBvpuOAyqoGbgV3AnMUHuzzKMudcGnjJ9+5oWFsuQ+DsrmDWM8nHRkGuznZBHOzSYyJyQ3UAOm43ELPSn4A==
"@fontsource/dosis@^5.2.8":
version "5.2.8"
resolved "https://registry.npmjs.org/@fontsource/dosis/-/dosis-5.2.8.tgz"
integrity sha512-8c7kJgWFeGr2/Oe+EqDPf3onSGs5bQmng7nZtKkdto62313CQCa0E7vSyFiKuh5jsXIVi8wz1mHih4kRhB2p+A==
"@fontsource/roboto@^5.0.14":
version "5.1.0"