Compare commits

..

No commits in common. "dev/ffmpeg_update" and "main" have entirely different histories.

14 changed files with 31 additions and 523 deletions

View File

@ -282,7 +282,6 @@ app.get('/version.js', (req, res) => {
app.use('/images', express.static(path.join(process.env.DATABASE, 'images')))
app.use(express.static(path.join(__dirname, 'web','public')))
app.use('/images', express.static(path.join(process.env.DATABASE, 'images')))
app.use('/videos', express.static(path.join(process.env.DATABASE, 'videos')))
app.use('/cache/images', cacheImageService.routerInterceptor())
app.use('/cache/images', express.static(path.join(process.env.DATABASE, 'cache','images')))
app.use('/favicon.svg', express.static(

View File

@ -1,6 +1,6 @@
{
"name": "dizquetv",
"version": "1.0.1",
"version": "1.0.0",
"description": "Create LiveTV channels from your Plex media",
"main": "index.js",
"scripts": {
@ -12,8 +12,6 @@
"compile": "babel index.js -d dist && babel src -d dist/src",
"package": "sh ./make_dist.sh",
"clean": "del-cli --force ./bin ./dist ./.dizquetv ./web/public/bundle.js"
,"docker:build": "docker build -t dizquetv:local -f Dockerfile .",
"docker:build-nvidia": "docker build -t dizquetv:local -f Dockerfile-nvidia ."
},
"author": "vexorian",
"license": "Zlib",

View File

@ -383,113 +383,6 @@ function api(db, channelService, fillerDB, customShowDB, xmltvInterval, guideSe
}
})
router.post('/api/upload/video', async (req, res) => {
try {
if(!req.files) {
res.send({
status: false,
message: 'No file uploaded'
});
return;
}
const vid = req.files.video;
// Basic validation: extension
const allowed = ['.mp4', '.m4v'];
const ext = path.extname(vid.name).toLowerCase();
if (!allowed.includes(ext)) {
return res.status(400).send({ status: false, message: 'Unsupported file type' });
}
// If upload is associated to a channel, enforce 15 MB limit and store under channel hierarchy
let channelNumber = null;
try {
if (req.body && typeof(req.body.channel) !== 'undefined' && req.body.channel !== null && req.body.channel !== '') {
channelNumber = parseInt(req.body.channel, 10);
if (isNaN(channelNumber)) channelNumber = null;
}
} catch (e) {
channelNumber = null;
}
const MAX_BYTES_CHANNEL = 15 * 1024 * 1024;
const MAX_BYTES_GENERIC = 25 * 1024 * 1024;
const maxAllowed = (channelNumber !== null) ? MAX_BYTES_CHANNEL : MAX_BYTES_GENERIC;
if (vid.size > maxAllowed) {
return res.status(400).send({ status: false, message: 'File too large' });
}
if (channelNumber !== null) {
const uploadDir = path.join(process.env.DATABASE, '/videos/', '' + channelNumber, '/split-screen/');
if (!fs.existsSync(uploadDir)) {
fs.mkdirSync(uploadDir, { recursive: true });
}
// avoid collisions
const safeName = `${Date.now()}-${vid.name}`;
const dest = path.join(uploadDir, safeName);
await vid.mv(dest);
const fileUrl = `${req.protocol}://${req.get('host')}/videos/${channelNumber}/split-screen/${safeName}`;
// Persist into channel record if possible
try {
let channel = await channelService.getChannel(channelNumber);
if (channel != null) {
if (typeof(channel.splitScreen) === 'undefined' || channel.splitScreen == null) {
channel.splitScreen = {
useGlobal: false,
enabled: true,
source: fileUrl,
widthPercent: 35,
loop: true,
}
} else {
channel.splitScreen.source = fileUrl;
channel.splitScreen.useGlobal = false;
channel.splitScreen.enabled = true;
}
await channelService.saveChannel(channelNumber, channel);
}
} catch (e) {
console.error('Error persisting channel splitScreen info', e);
}
return res.send({
status: true,
message: 'File is uploaded',
data: {
name: safeName,
mimetype: vid.mimetype,
size: vid.size,
fileUrl: fileUrl
}
});
} else {
// Generic uploads (no channel) -> previous behavior
const uploadDir = path.join(process.env.DATABASE, '/videos/uploads/');
if (!fs.existsSync(uploadDir)) {
fs.mkdirSync(uploadDir, { recursive: true });
}
const dest = path.join(uploadDir, vid.name);
await vid.mv(dest);
return res.send({
status: true,
message: 'File is uploaded',
data: {
name: vid.name,
mimetype: vid.mimetype,
size: vid.size,
fileUrl: `${req.protocol}://${req.get('host')}/videos/uploads/${vid.name}`
}
});
}
} catch (err) {
console.error('Error in /api/upload/video', err);
res.status(500).send(err);
}
})
// Filler
router.get('/api/fillers', async (req, res) => {
try {

View File

@ -411,14 +411,6 @@ function ffmpeg() {
maxFPS: 60,
scalingAlgorithm: "bicubic",
deinterlaceFilter: "none",
// Split-screen/secondary looped video (optional)
splitScreenEnabled: false,
// URL or local path to the secondary video to loop (can be http(s) or file path)
splitScreenSource: "",
// Percentage width of the secondary (right) video relative to output width
splitScreenWidthPercent: 35,
// If true, ffmpeg will attempt to loop the secondary input (via -stream_loop)
splitScreenLoop: true,
}
}

View File

@ -2,12 +2,12 @@ const spawn = require('child_process').spawn
const events = require('events')
const MAXIMUM_ERROR_DURATION_MS = 60000;
const REALLY_RIDICULOUSLY_HIGH_FPS_FOR_DIZQUETVS_USECASE = 120;
class FFMPEG extends events.EventEmitter {
constructor(opts, channel) {
super()
// Clone opts so per-channel overrides don't mutate the global settings
this.opts = JSON.parse(JSON.stringify(opts || {}));
this.opts = opts;
this.errorPicturePath = `http://localhost:${process.env.PORT}/images/generic-error-screen.png`;
this.ffmpegName = "unnamed ffmpeg";
if (! this.opts.enableFFMPEGTranscoding) {
@ -22,9 +22,9 @@ class FFMPEG extends events.EventEmitter {
this.opts.maxFPS = REALLY_RIDICULOUSLY_HIGH_FPS_FOR_DIZQUETVS_USECASE;
}
this.channel = channel
this.ffmpegPath = this.opts.ffmpegPath
this.ffmpegPath = opts.ffmpegPath
let resString = this.opts.targetResolution;
let resString = opts.targetResolution;
if (
(typeof(channel.transcoding) !== 'undefined')
&& (channel.transcoding.targetResolution != null)
@ -40,7 +40,7 @@ class FFMPEG extends events.EventEmitter {
&& (typeof(channel.transcoding.videoBitrate) != 'undefined')
&& (channel.transcoding.videoBitrate != 0)
) {
this.opts.videoBitrate = channel.transcoding.videoBitrate;
opts.videoBitrate = channel.transcoding.videoBitrate;
}
if (
@ -49,36 +49,13 @@ class FFMPEG extends events.EventEmitter {
&& (typeof(channel.transcoding.videoBufSize) != 'undefined')
&& (channel.transcoding.videoBufSize != 0)
) {
this.opts.videoBufSize = channel.transcoding.videoBufSize;
opts.videoBufSize = channel.transcoding.videoBufSize;
}
let parsed = parseResolutionString(resString);
this.wantedW = parsed.w;
this.wantedH = parsed.h;
// Apply per-channel transcoding overrides
if (channel && typeof(channel.transcoding) !== 'undefined') {
if (typeof channel.transcoding.targetResolution === 'string' && channel.transcoding.targetResolution !== '') {
let parsed = resolutionMap.parseResolutionString(channel.transcoding.targetResolution);
this.wantedW = parsed.w;
this.wantedH = parsed.h;
}
if (typeof channel.transcoding.videoBitrate === 'number' && channel.transcoding.videoBitrate > 0) {
this.opts.videoBitrate = channel.transcoding.videoBitrate;
}
if (typeof channel.transcoding.videoBufSize === 'number' && channel.transcoding.videoBufSize > 0) {
this.opts.videoBufSize = channel.transcoding.videoBufSize;
}
// Get videoFlip from channel transcoding settings
if (typeof channel.transcoding.videoFlip === 'string' && channel.transcoding.videoFlip !== '') {
this.opts.videoFlip = channel.transcoding.videoFlip;
}
}
// Ensure videoFlip default
if (typeof this.opts.videoFlip !== 'string') {
this.opts.videoFlip = 'none';
}
this.sentData = false;
this.apad = this.opts.normalizeAudio;
this.audioChannelsSampleRate = this.opts.normalizeAudio;
@ -90,11 +67,11 @@ class FFMPEG extends events.EventEmitter {
setAudioOnly(audioOnly) {
this.audioOnly = audioOnly;
}
async spawnConcat(streamUrl, splitScreen) {
return await this.spawn(streamUrl, undefined, undefined, undefined, true, false, undefined, true, splitScreen)
async spawnConcat(streamUrl) {
return await this.spawn(streamUrl, undefined, undefined, undefined, true, false, undefined, true)
}
async spawnStream(streamUrl, streamStats, startTime, duration, enableIcon, type, splitScreen) {
return await this.spawn(streamUrl, streamStats, startTime, duration, true, enableIcon, type, false, splitScreen);
async spawnStream(streamUrl, streamStats, startTime, duration, enableIcon, type) {
return await this.spawn(streamUrl, streamStats, startTime, duration, true, enableIcon, type, false);
}
async spawnError(title, subtitle, duration) {
if (! this.opts.enableFFMPEGTranscoding || this.opts.errorScreen == 'kill') {
@ -113,7 +90,7 @@ class FFMPEG extends events.EventEmitter {
videoHeight : this.wantedH,
duration : duration,
};
return await this.spawn({ errorTitle: title , subtitle: subtitle }, streamStats, undefined, `${streamStats.duration}ms`, true, false, 'error', false, null)
return await this.spawn({ errorTitle: title , subtitle: subtitle }, streamStats, undefined, `${streamStats.duration}ms`, true, false, 'error', false)
}
async spawnOffline(duration) {
if (! this.opts.enableFFMPEGTranscoding) {
@ -127,11 +104,10 @@ class FFMPEG extends events.EventEmitter {
videoHeight : this.wantedH,
duration : duration,
};
return await this.spawn( {errorTitle: 'offline'}, streamStats, undefined, `${duration}ms`, true, false, 'offline', false, null);
return await this.spawn( {errorTitle: 'offline'}, streamStats, undefined, `${duration}ms`, true, false, 'offline', false);
}
async spawn(streamUrl, streamStats, startTime, duration, limitRead, watermark, type, isConcatPlaylist, splitScreen) {
async spawn(streamUrl, streamStats, startTime, duration, limitRead, watermark, type, isConcatPlaylist) {
console.log("[DEBUG] FFMPEG.spawn received splitScreen:", JSON.stringify(splitScreen));
let ffmpegArgs = [
`-threads`, isConcatPlaylist? 1 : this.opts.threads,
`-fflags`, `+genpts+discardcorrupt+igndts`];
@ -343,48 +319,12 @@ class FFMPEG extends events.EventEmitter {
currentVideo = "[videox]";
}
if (doOverlay) {
// Support for channel watermark (image) and optional split-screen
// secondary video. If the watermark is provided as before, use it
// as an image overlay. If ffmpeg settings enable a split-screen
// secondary, add it as another input and later compose using
// filter_complex.
if (watermark.animated === true) {
ffmpegArgs.push('-ignore_loop', '0');
}
ffmpegArgs.push(`-i`, `${watermark.url}` );
overlayFile = inputFiles++;
this.ensureResolution = true;
// If split-screen secondary is provided via parameter, attach it
if (splitScreen && splitScreen.enabled && splitScreen.source && splitScreen.source.trim() !== '' && this.audioOnly !== true) {
// If requested, try to loop the source using -stream_loop
if (splitScreen.loop === true) {
// -stream_loop value must come before the -i for the input
ffmpegArgs.push('-stream_loop', '-1');
}
ffmpegArgs.push('-i', `${splitScreen.source}`);
var splitOverlayFile = inputFiles++;
// mark that we will need to compose the secondary video later
this._splitOverlayFile = splitOverlayFile;
this._splitScreenConfig = splitScreen;
this.ensureResolution = true;
}
}
// If watermark handling was skipped (watermark === null) we still
// want to allow adding the split-screen secondary input. Add the
// secondary input here if it wasn't already added above.
if (typeof this._splitOverlayFile === 'undefined') {
if (splitScreen && splitScreen.enabled && splitScreen.source && splitScreen.source.trim() !== '' && this.audioOnly !== true) {
if (splitScreen.loop === true) {
ffmpegArgs.push('-stream_loop', '-1');
}
ffmpegArgs.push('-i', `${splitScreen.source}`);
var splitOverlayFile = inputFiles++;
this._splitOverlayFile = splitOverlayFile;
this._splitScreenConfig = splitScreen;
this.ensureResolution = true;
}
}
// Resolution fix: Add scale filter, current stream becomes [siz]
@ -449,15 +389,6 @@ class FFMPEG extends events.EventEmitter {
iH = this.wantedH;
}
// Apply videoFlip to main video before watermark/split-screen composition
if (this.opts.videoFlip === 'hflip') {
videoComplex += `;${currentVideo}hflip[flipped]`;
currentVideo = '[flipped]';
} else if (this.opts.videoFlip === 'vflip') {
videoComplex += `;${currentVideo}vflip[flipped]`;
currentVideo = '[flipped]';
}
// Channel watermark:
if (doOverlay && (this.audioOnly !== true) ) {
var pW =watermark.width;
@ -494,67 +425,6 @@ class FFMPEG extends events.EventEmitter {
currentVideo = '[comb]';
}
// Split-screen composition (independent of watermark)
// If we have a split-screen secondary input configured, compose
// it with the main video based on position (right/left/top/bottom)
if (typeof(this._splitOverlayFile) !== 'undefined' && this._splitScreenConfig && this.audioOnly !== true) {
console.log("[DEBUG] Building split-screen composition...");
try {
const splitPercent = Number(this._splitScreenConfig.widthPercent) || 35;
const position = this._splitScreenConfig.position || 'right';
const stretch = !!this._splitScreenConfig.stretch;
let mainW, mainH, sideW, sideH;
let stackType; // 'hstack' or 'vstack'
let stackOrder; // 'main-first' or 'side-first'
// Calculate dimensions based on position
if (position === 'right' || position === 'left') {
// Horizontal split
sideW = Math.max(16, Math.round(this.wantedW * splitPercent / 100.0));
mainW = Math.max(16, this.wantedW - sideW);
mainH = this.wantedH;
sideH = this.wantedH;
stackType = 'hstack';
stackOrder = (position === 'left') ? 'side-first' : 'main-first';
} else {
// Vertical split (top or bottom)
sideH = Math.max(16, Math.round(this.wantedH * splitPercent / 100.0));
mainH = Math.max(16, this.wantedH - sideH);
mainW = this.wantedW;
sideW = this.wantedW;
stackType = 'vstack';
stackOrder = (position === 'top') ? 'side-first' : 'main-first';
}
console.log(`[DEBUG] Split-screen: position=${position}, stretch=${stretch}, main=${mainW}x${mainH}, side=${sideW}x${sideH}`);
// Scale filters based on stretch mode
if (stretch) {
// Stretch mode: scale exactly to dimensions (no aspect ratio preservation)
videoComplex += `;[${this._splitOverlayFile}:v]scale=${sideW}:${sideH}[side_scaled]`;
videoComplex += `;${currentVideo}scale=${mainW}:${mainH}[main_scaled]`;
} else {
// Aspect ratio mode: preserve aspect ratio with padding
videoComplex += `;[${this._splitOverlayFile}:v]scale=${sideW}:${sideH}:force_original_aspect_ratio=decrease[side_prescale]`;
videoComplex += `;[side_prescale]pad=${sideW}:${sideH}:(ow-iw)/2:(oh-ih)/2[side_scaled]`;
videoComplex += `;${currentVideo}scale=${mainW}:${mainH}:force_original_aspect_ratio=decrease[main_prescale]`;
videoComplex += `;[main_prescale]pad=${mainW}:${mainH}:(ow-iw)/2:(oh-ih)/2[main_scaled]`;
}
// Stack based on type and order
if (stackOrder === 'main-first') {
videoComplex += `;[main_scaled][side_scaled]${stackType}=inputs=2[comb2]`;
} else {
videoComplex += `;[side_scaled][main_scaled]${stackType}=inputs=2[comb2]`;
}
currentVideo = '[comb2]';
console.log("[DEBUG] Split-screen composition added to filter_complex");
} catch (e) {
console.error("Error while building split-screen filters:", e);
}
}
if (this.volumePercent != 100) {
var f = this.volumePercent / 100.0;
@ -699,14 +569,7 @@ class FFMPEG extends events.EventEmitter {
if (this.hasBeenKilled) {
return ;
}
// Log the full ffmpeg command when requested so it's easy to debug
if (this.opts.logFfmpeg) {
try {
console.log("FFMPEG CMD:", this.ffmpegPath, ffmpegArgs.join(' '));
} catch (e) {
console.log("FFMPEG CMD (could not join args)");
}
}
//console.log(this.ffmpegPath + " " + ffmpegArgs.join(" ") );
this.ffmpeg = spawn(this.ffmpegPath, ffmpegArgs, { stdio: ['ignore', 'pipe', (doLogs?process.stderr:"ignore") ] } );
if (this.hasBeenKilled) {
console.log("Send SIGKILL to ffmpeg");

View File

@ -2,7 +2,6 @@ module.exports = {
getCurrentProgramAndTimeElapsed: getCurrentProgramAndTimeElapsed,
createLineup: createLineup,
getWatermark: getWatermark,
getSplitScreen: getSplitScreen,
generateChannelContext: generateChannelContext,
}
@ -347,43 +346,6 @@ function getWatermark( ffmpegSettings, channel, type) {
}
// Return split-screen configuration. This is intentionally independent
// from watermark logic and reads global settings with optional per-channel
// overrides when `channel.splitScreen.useGlobal` is false.
function getSplitScreen(ffmpegSettings, channel) {
let result = null;
// Check if channel has overrides (not using global)
if (channel && typeof channel.splitScreen !== 'undefined' && channel.splitScreen !== null && channel.splitScreen.useGlobal !== true) {
// Use channel-specific settings
result = {
enabled: !!channel.splitScreen.enabled,
source: (typeof channel.splitScreen.source === 'string') ? channel.splitScreen.source : '',
widthPercent: Number(channel.splitScreen.widthPercent) || 35,
position: (typeof channel.splitScreen.position === 'string') ? channel.splitScreen.position : 'right',
stretch: !!channel.splitScreen.stretch,
loop: !!channel.splitScreen.loop,
};
} else {
// Use global settings
if (!ffmpegSettings || !ffmpegSettings.splitScreenEnabled) {
return null;
}
result = {
enabled: !!ffmpegSettings.splitScreenEnabled,
source: (typeof ffmpegSettings.splitScreenSource === 'string') ? ffmpegSettings.splitScreenSource : '',
widthPercent: Number(ffmpegSettings.splitScreenWidthPercent) || 35,
position: (typeof ffmpegSettings.splitScreenPosition === 'string') ? ffmpegSettings.splitScreenPosition : 'right',
stretch: !!ffmpegSettings.splitScreenStretch,
loop: !!ffmpegSettings.splitScreenLoop,
};
}
if (!result.enabled || !result.source || result.source.trim() === '') return null;
return result;
}
function getFillerMedian(programPlayTime, channel, filler) {
let times = [];

View File

@ -61,7 +61,6 @@ class PlexPlayer {
let plexTranscoder = new PlexTranscoder(this.clientId, server, plexSettings, channel, lineupItem);
this.plexTranscoder = plexTranscoder;
let watermark = this.context.watermark;
let splitScreen = this.context.splitScreen;
let ffmpeg = new FFMPEG(ffmpegSettings, channel); // Set the transcoder options
ffmpeg.setAudioOnly( this.context.audioOnly );
this.ffmpeg = ffmpeg;
@ -86,7 +85,7 @@ class PlexPlayer {
let emitter = new EventEmitter();
//setTimeout( () => {
let ff = await ffmpeg.spawnStream(stream.streamUrl, stream.streamStats, streamStart, streamDuration, watermark, lineupItem.type, splitScreen); // Spawn the ffmpeg process
let ff = await ffmpeg.spawnStream(stream.streamUrl, stream.streamStats, streamStart, streamDuration, watermark, lineupItem.type); // Spawn the ffmpeg process
ff.pipe(outStream, {'end':false} );
//}, 100);
plexTranscoder.startUpdatingPlex();

View File

@ -58,9 +58,6 @@ class ProgramPlayer {
this.delegate = new PlexPlayer(context);
}
this.context.watermark = helperFuncs.getWatermark( context.ffmpegSettings, context.channel, context.lineupItem.type);
// Compute split-screen separately from watermark so it's independent
// of overlay settings and filler overlay disabling.
this.context.splitScreen = helperFuncs.getSplitScreen( context.ffmpegSettings, context.channel );
}
cleanUp() {

View File

@ -125,13 +125,7 @@ function video( channelService, fillerDB, db, programmingService, activeChannelS
})
let channelNum = parseInt(req.query.channel, 10)
// For concat mode, attempt to read splitScreen from channel
let splitScreen = null;
try {
const helperFuncs = require('./helperFuncs');
splitScreen = helperFuncs.getSplitScreen(ffmpegSettings, channel);
} catch (e) {}
let ff = await ffmpeg.spawnConcat(`http://localhost:${process.env.PORT}/playlist?channel=${channelNum}&audioOnly=${audioOnly}&stepNumber={step}`, splitScreen);
let ff = await ffmpeg.spawnConcat(`http://localhost:${process.env.PORT}/playlist?channel=${channelNum}&audioOnly=${audioOnly}&stepNumber={step}`);
ff.pipe(res, { end: false} );
};
router.get('/video', async(req, res) => {
@ -533,24 +527,25 @@ function video( channelService, fillerDB, db, programmingService, activeChannelS
var data = "#EXTM3U\n"
data += `#EXT-X-VERSION:3\n#EXT-X-MEDIA-SEQUENCE:0\n#EXT-X-ALLOW-CACHE:YES\n#EXT-X-TARGETDURATION:60\n#EXT-X-PLAYLIST-TYPE:VOD\n`;
data += `#EXT-X-VERSION:3
#EXT-X-MEDIA-SEQUENCE:0
#EXT-X-ALLOW-CACHE:YES
#EXT-X-TARGETDURATION:60
#EXT-X-PLAYLIST-TYPE:VOD\n`;
let ffmpegSettings = db['ffmpeg-settings'].find()[0]
// Duration to advertise for each pseudo-segment (seconds). Keep slightly
// below TARGETDURATION to avoid premature reloads.
let cur = "59.0";
function addSegment(url) {
data += `#EXTINF:${cur},\n${url}\n`;
}
cur ="59.0";
if ( ffmpegSettings.enableFFMPEGTranscoding === true) {
addSegment(`${req.protocol}://${req.get('host')}/stream?channel=${channelNum}&first=0&m3u8=1&session=${sessionId}`)
//data += `#EXTINF:${cur},\n`;
data += `${req.protocol}://${req.get('host')}/stream?channel=${channelNum}&first=0&m3u8=1&session=${sessionId}\n`;
}
addSegment(`${req.protocol}://${req.get('host')}/stream?channel=${channelNum}&first=1&m3u8=1&session=${sessionId}`)
//data += `#EXTINF:${cur},\n`;
data += `${req.protocol}://${req.get('host')}/stream?channel=${channelNum}&first=1&m3u8=1&session=${sessionId}\n`
for (var i = 0; i < maxStreamsToPlayInARow - 1; i++) {
addSegment(`${req.protocol}://${req.get('host')}/stream?channel=${channelNum}&m3u8=1&session=${sessionId}`)
//data += `#EXTINF:${cur},\n`;
data += `${req.protocol}://${req.get('host')}/stream?channel=${channelNum}&m3u8=1&session=${sessionId}\n`
}
res.send(data)

View File

@ -86,18 +86,6 @@ module.exports = function ($timeout, $location, dizquetv, resolutionOptions, get
scope.showRotatedNote = false;
scope.channel.transcoding = {
targetResolution: "",
videoFlip: 'none',
}
scope.channel.splitScreen = {
// If true, use the global ffmpeg split-screen settings instead
// of the channel specific ones
useGlobal: true,
enabled: false,
source: "",
widthPercent: 35,
position: 'right',
stretch: false,
loop: true,
}
scope.channel.onDemand = {
isOnDemand : false,
@ -149,29 +137,7 @@ module.exports = function ($timeout, $location, dizquetv, resolutionOptions, get
}
if (typeof(scope.channel.transcoding) ==='undefined') {
scope.channel.transcoding = {
videoFlip: 'none',
};
}
if (typeof(scope.channel.transcoding.videoFlip) === 'undefined') {
scope.channel.transcoding.videoFlip = 'none';
}
if (typeof(scope.channel.splitScreen) === 'undefined') {
scope.channel.splitScreen = {
useGlobal: true,
enabled: false,
source: "",
widthPercent: 35,
position: 'right',
stretch: false,
loop: true,
}
}
if (typeof(scope.channel.splitScreen.position) === 'undefined') {
scope.channel.splitScreen.position = 'right';
}
if (typeof(scope.channel.splitScreen.stretch) === 'undefined') {
scope.channel.splitScreen.stretch = false;
scope.channel.transcoding = {};
}
if (
(scope.channel.transcoding.targetResolution == null)
@ -1830,20 +1796,6 @@ module.exports = function ($timeout, $location, dizquetv, resolutionOptions, get
})
}
scope.splitScreenOnChange = (event) => {
const formData = new FormData();
formData.append('video', event.target.files[0]);
// include channel number so server stores file under channel hierarchy and persists it
if (typeof(scope.channel) !== 'undefined' && scope.channel != null && typeof(scope.channel.number) !== 'undefined') {
formData.append('channel', scope.channel.number);
}
dizquetv.uploadVideo(formData).then((response) => {
scope.channel.splitScreen.source = response.data.fileUrl;
}).catch((err) => {
console.error('Error uploading split-screen video', err);
})
}
},

View File

@ -9,18 +9,11 @@ module.exports = function (dizquetv, resolutionOptions) {
//add validations to ffmpeg settings, speciall commas in codec name
dizquetv.getFfmpegSettings().then((settings) => {
scope.settings = settings
// ensure videoFlip default exists
scope.settings.videoFlip = scope.settings.videoFlip || 'none';
scope.settings.splitScreenPosition = scope.settings.splitScreenPosition || 'right';
scope.settings.splitScreenStretch = scope.settings.splitScreenStretch || false;
})
scope.updateSettings = (settings) => {
delete scope.settingsError;
dizquetv.updateFfmpegSettings(settings).then((_settings) => {
scope.settings = _settings
scope.settings.videoFlip = scope.settings.videoFlip || 'none';
scope.settings.splitScreenPosition = scope.settings.splitScreenPosition || 'right';
scope.settings.splitScreenStretch = scope.settings.splitScreenStretch || false;
}).catch( (err) => {
if ( typeof(err.data) === "string") {
scope.settingsError = err.data;
@ -30,9 +23,6 @@ module.exports = function (dizquetv, resolutionOptions) {
scope.resetSettings = (settings) => {
dizquetv.resetFfmpegSettings(settings).then((_settings) => {
scope.settings = _settings
scope.settings.videoFlip = scope.settings.videoFlip || 'none';
scope.settings.splitScreenPosition = scope.settings.splitScreenPosition || 'right';
scope.settings.splitScreenStretch = scope.settings.splitScreenStretch || false;
})
}
scope.isTranscodingNotNeeded = () => {
@ -90,18 +80,6 @@ module.exports = function (dizquetv, resolutionOptions) {
{value: "yadif=1", description: "yadif send field"}
];
scope.uploadGlobalSplitVideoOnChange = (event) => {
const formData = new FormData();
formData.append('video', event.target.files[0]);
// No channel parameter -> generic upload
dizquetv.uploadVideo(formData).then((response) => {
scope.settings.splitScreenSource = response.data.fileUrl;
scope.$applyAsync();
}).catch((err) => {
console.error('Error uploading global split-screen video', err);
})
}
}
}
}

View File

@ -758,58 +758,6 @@
<small class='text-muted form-ext' >Renders a channel icon (also known as bug or Digital On-screen Graphic) on top of the channel&apos;s stream.</small>
</div>
<div class='row'>
<div class='col-sm-12'>
</div>
</div>
<!-- Split-Screen (per-channel) - moved out from watermark block -->
<hr />
<h7>Split-Screen (per-channel)</h7>
<div class='form-group'>
<input id="channelUseGlobalSplit" type="checkbox" ng-model="channel.splitScreen.useGlobal"></input>
<label for="channelUseGlobalSplit">Use Global Split-Screen Settings</label>
<small class='form-text text-muted'>If unchecked, the channel-specific split-screen settings below will be used instead of the global ones.</small>
</div>
<div ng-show="!channel.splitScreen.useGlobal">
<div class='form-check'>
<input class="form-check-input" type="checkbox" ng-model="channel.splitScreen.enabled" id="channelSplitEnabled"></input>
<label class="form-check-label" for="channelSplitEnabled">Enable Split-Screen Secondary Loop</label>
</div>
<div class='form-group' ng-show="channel.splitScreen.enabled">
<label>Secondary Source URL or Path</label>
<div class="input-group">
<input type="text" class="form-control form-control-sm" ng-model="channel.splitScreen.source" placeholder="http://example.com/side.m3u8 or /path/to/file.mp4"></input>
<div class="input-group-append">
<input type="file" accept="video/mp4,video/x-m4v" class="form-control-file" onchange="angular.element(this).scope().splitScreenOnChange(event)" />
</div>
</div>
<br></br>
<label>Secondary Width (%)</label>
<input type="number" class="form-control form-control-sm" ng-model="channel.splitScreen.widthPercent"></input>
<small class='form-text text-muted'>Percentage of width/height depending on position.</small>
<br></br>
<label>Position</label>
<select class="form-control form-control-sm" ng-model="channel.splitScreen.position">
<option value="right">Right</option>
<option value="left">Left</option>
<option value="top">Top</option>
<option value="bottom">Bottom</option>
</select>
<br></br>
<input id="channelSplitStretch" type="checkbox" ng-model="channel.splitScreen.stretch"></input>
<label for="channelSplitStretch">Stretch secondary (fill entire area)</label>
<small class='form-text text-muted'>If unchecked, maintains aspect ratio with padding.</small>
<br></br>
<input id="channelSplitLoop" type="checkbox" ng-model="channel.splitScreen.loop"></input>
<label for="channelSplitLoop">Loop secondary source</label>
</div>
</div>
<br>
<div ng-show="channel.watermark.enabled" class='row' >
<div class='col-md-3 col-lg-4 col-xl-5'>
@ -892,7 +840,7 @@
<small class='text-muted form-text' >Tick this if and only if the watermark is an animated GIF or PNG. It will make it loop or not loop according to the image&apos;s configuration. If the image is not animated, there will be playback errors.</small>
</div>
<br>
<div class='form-group'>
<label for="overlayDuration">
@ -932,16 +880,6 @@
</input>
<small class='text-muted form-text'>Leave unassigned to use the global setting</small>
</div>
<div class="form-group col-sm-auto">
<label for="channelVideoFlip">Video Flip:</label>
<select class="form-control custom-select" id="channelVideoFlip" ng-model="channel.transcoding.videoFlip">
<option value="none">None</option>
<option value="hflip">Horizontal Flip</option>
<option value="vflip">Vertical Flip</option>
</select>
<small class='text-muted form-text'>Leave unassigned to use the global setting</small>
</div>
</div>

View File

@ -151,15 +151,6 @@
<select class='form-control custom-select' ng-model="settings.deinterlaceFilter" ria-describedby="deinterlaceHelp"
ng-options="o.value as o.description for o in deinterlaceOptions" ></select>
<small id='deinterlaceHelp' class='form-text text-muted'>Deinterlace filter to use when video is interlaced. This is only needed when Plex transcoding is not used.</small>
<br ></br>
<label>Video Flip</label>
<select class='form-control custom-select' ng-model="settings.videoFlip" ria-describedby="videoFlipHelp">
<option value="none">None</option>
<option value="hflip">Horizontal Flip</option>
<option value="vflip">Vertical Flip</option>
</select>
<small id='videoFlipHelp' class='form-text text-muted'>Apply horizontal or vertical flip to the video stream.</small>
</div>
<div class="form-group">
@ -248,47 +239,6 @@
</div>
</div>
<br ></br>
<div class="row">
<div class="col-sm-9">
<div class="form-group">
<input id="enableSplitScreen" type="checkbox" ng-model="settings.splitScreenEnabled" ng-disabled="isTranscodingNotNeeded()" ></input>
<label for="enableSplitScreen">Enable Split-Screen Secondary Loop</label>
<small class="form-text text-muted">When enabled, a secondary video (local path or remote URL) will be played repeatedly and composited on the side of the main video.</small>
</div>
</div>
</div>
<div class="form-group" ng-show="settings.splitScreenEnabled">
<label>Secondary Source URL or Path</label>
<div class="input-group">
<input type="text" class="form-control form-control-sm" ng-model="settings.splitScreenSource" placeholder="http://example.com/side.m3u8 or /path/to/file.mp4"></input>
<div class="input-group-append">
<input type="file" accept="video/mp4,video/x-m4v" class="form-control-file" onchange="angular.element(this).scope().uploadGlobalSplitVideoOnChange(event)" />
</div>
</div>
<br></br>
<label>Secondary Width (%)</label>
<input type="number" class="form-control form-control-sm" ng-model="settings.splitScreenWidthPercent"></input>
<small class="form-text text-muted">Percentage of width/height depending on position.</small>
<br></br>
<label>Position</label>
<select class="form-control form-control-sm" ng-model="settings.splitScreenPosition">
<option value="right">Right</option>
<option value="left">Left</option>
<option value="top">Top</option>
<option value="bottom">Bottom</option>
</select>
<br></br>
<input id="splitScreenStretch" type="checkbox" ng-model="settings.splitScreenStretch"></input>
<label for="splitScreenStretch">Stretch secondary (fill entire area)</label>
<small class="form-text text-muted">If unchecked, maintains aspect ratio with padding.</small>
<br></br>
<input id="splitScreenLoop" type="checkbox" ng-model="settings.splitScreenLoop"></input>
<label for="splitScreenLoop">Loop secondary source</label>
<small class="form-text text-muted">If checked, dizqueTV will request ffmpeg to loop the secondary source when possible.</small>
</div>
<br ></br>
<div class="row">
<div class="col-sm-9">

View File

@ -173,14 +173,6 @@ module.exports = function ($http, $q) {
headers: { 'Content-Type': undefined }
}).then((d) => { return d.data })
},
uploadVideo: (file) => {
return $http({
method: 'POST',
url: '/api/upload/video',
data: file,
headers: { 'Content-Type': undefined }
}).then((d) => { return d.data })
},
updateChannel: (channel) => {
return $http({
method: 'PUT',