Merge branch 'dev/1.4.x' into main

This commit is contained in:
vexorian 2021-09-19 14:07:56 -04:00
commit 601d52a4c2
11 changed files with 183 additions and 110 deletions

View File

@ -1,4 +1,4 @@
# dizqueTV 1.4.3
# dizqueTV 1.4.4-development
![Discord](https://img.shields.io/discord/711313431457693727?logo=discord&logoColor=fff&style=flat-square) ![GitHub top language](https://img.shields.io/github/languages/top/vexorian/dizquetv?logo=github&style=flat-square) ![Docker Pulls](https://img.shields.io/docker/pulls/vexorian/dizquetv?logo=docker&logoColor=fff&style=flat-square)
Create live TV channel streams from media on your Plex servers.

View File

@ -1053,6 +1053,19 @@ function api(db, channelDB, fillerDB, customShowDB, xmltvInterval, guideService
delete program.streams;
delete program.durationStr;
delete program.commercials;
if (
(typeof(program.duration) === 'undefined')
||
(program.duration <= 0)
) {
console.error(`Input contained a program with invalid duration: ${program.duration}. This program has been deleted`);
return [];
}
if (! Number.isInteger(program.duration) ) {
console.error(`Input contained a program with invalid duration: ${program.duration}. Duration got fixed to be integer.`);
program.duration = Math.ceil(program.duration);
}
return [ program ];
}
function cleanUpChannel(channel) {
@ -1063,10 +1076,15 @@ function api(db, channelDB, fillerDB, customShowDB, xmltvInterval, guideService
) {
channel.groupTitle = "dizqueTV";
}
channel.programs.forEach( cleanUpProgram );
channel.programs = channel.programs.flatMap( cleanUpProgram );
delete channel.fillerContent;
delete channel.filler;
channel.fallback.forEach( cleanUpProgram );
channel.fallback = channel.fallback.flatMap( cleanUpProgram );
channel.duration = 0;
for (let i = 0; i < channel.programs.length; i++) {
channel.duration += channel.programs[i].duration;
}
}
async function streamToolResult(toolRes, res) {

View File

@ -140,6 +140,10 @@ function recordPlayback(channelId, t0, lineupItem) {
}
}
function clearPlayback(channelId) {
delete cache[channelId];
}
function clear() {
//it's not necessary to clear the playback cache and it may be undesirable
configCache = {};
@ -156,4 +160,5 @@ module.exports = {
getChannelConfig: getChannelConfig,
saveChannelConfig: saveChannelConfig,
getFillerLastPlayTime: getFillerLastPlayTime,
clearPlayback: clearPlayback,
}

View File

@ -5,5 +5,5 @@ module.exports = {
TVGUIDE_MAXIMUM_FLEX_DURATION : 6 * 60 * 60 * 1000,
TOO_FREQUENT: 100,
VERSION_NAME: "1.4.3"
VERSION_NAME: "1.4.4-development"
}

View File

@ -20,7 +20,7 @@
const path = require('path');
var fs = require('fs');
const TARGET_VERSION = 802;
const TARGET_VERSION = 803;
const STEPS = [
// [v, v2, x] : if the current version is v, call x(db), and version becomes v2
@ -42,6 +42,7 @@ const STEPS = [
[ 702, 800, (db,channels,dir) => reAddIcon(dir) ],
[ 800, 801, (db) => addImageCache(db) ],
[ 801, 802, () => addGroupTitle() ],
[ 802, 803, () => fixNonIntegerDurations() ],
]
const { v4: uuidv4 } = require('uuid');
@ -834,6 +835,48 @@ function addGroupTitle() {
console.log("Done migrating group titles in channels.");
}
function fixNonIntegerDurations() {
function migrateChannel(channel) {
let programs = channel.programs;
let fixedCount = 0;
channel.duration = 0;
for (let i = 0; i < programs.length; i++) {
let program = programs[i];
if ( ! Number.isInteger(program.duration) ) {
fixedCount++;
program.duration = Math.ceil(program.duration);
programs[i] = program;
}
channel.duration += program.duration;
}
if (fixedCount != 0) {
console.log(`Found ${fixedCount} non-integer durations in channel ${channel.number}, they were fixed but you should consider running random slots again so that the milliseconds are accurate.`);
}
return {
fixed: (fixedCount != 0),
newChannel: channel,
};
}
console.log("Checking channels to make sure they weren't corrupted by random slots bug #350...");
let channels = path.join(process.env.DATABASE, 'channels');
let channelFiles = fs.readdirSync(channels);
for (let i = 0; i < channelFiles.length; i++) {
if (path.extname( channelFiles[i] ) === '.json') {
console.log("Checking durations in channel : " + channelFiles[i] +"..." );
let channelPath = path.join(channels, channelFiles[i]);
let channel = JSON.parse(fs.readFileSync(channelPath, 'utf-8'));
let { fixed, newChannel } = migrateChannel(channel);
if (fixed) {
fs.writeFileSync( channelPath, JSON.stringify(newChannel), 'utf-8');
}
}
}
console.log("Done checking channels.");
}

View File

@ -2,6 +2,7 @@ module.exports = {
getCurrentProgramAndTimeElapsed: getCurrentProgramAndTimeElapsed,
createLineup: createLineup,
getWatermark: getWatermark,
generateChannelContext: generateChannelContext,
}
let channelCache = require('./channel-cache');
@ -10,6 +11,17 @@ const randomJS = require("random-js");
const Random = randomJS.Random;
const random = new Random( randomJS.MersenneTwister19937.autoSeed() );
const CHANNEL_CONTEXT_KEYS = [
"disableFillerOverlay",
"watermark",
"icon",
"offlinePicture",
"offlineSoundtrack",
"name",
"transcoding",
"number",
];
module.exports.random = random;
function getCurrentProgramAndTimeElapsed(date, channel) {
@ -17,7 +29,6 @@ function getCurrentProgramAndTimeElapsed(date, channel) {
if (channelStartTime > date) {
let t0 = date;
let t1 = channelStartTime;
console.log(t0, t1);
console.log("Channel start time is above the given date. Flex time is picked till that.");
return {
program: {
@ -173,10 +184,11 @@ function pickRandomWithMaxDuration(channel, fillers, maxDuration) {
list = list.concat(fillers[i].content);
}
let pick1 = null;
let pick2 = null;
let t0 = (new Date()).getTime();
let minimumWait = 1000000000;
const D = 7*24*60*60*1000;
const E = 5*60*60*1000;
if (typeof(channel.fillerRepeatCooldown) === 'undefined') {
channel.fillerRepeatCooldown = 30*60*1000;
}
@ -186,7 +198,7 @@ function pickRandomWithMaxDuration(channel, fillers, maxDuration) {
list = fillers[j].content;
let pickedList = false;
let n = 0;
let m = 0;
for (let i = 0; i < list.length; i++) {
let clip = list[i];
// a few extra milliseconds won't hurt anyone, would it? dun dun dun
@ -194,7 +206,6 @@ function pickRandomWithMaxDuration(channel, fillers, maxDuration) {
let t1 = channelCache.getProgramLastPlayTime( channel.number, clip );
let timeSince = ( (t1 == 0) ? D : (t0 - t1) );
if (timeSince < channel.fillerRepeatCooldown - SLACK) {
let w = channel.fillerRepeatCooldown - timeSince;
if (clip.duration + w <= maxDuration + SLACK) {
@ -211,6 +222,7 @@ function pickRandomWithMaxDuration(channel, fillers, maxDuration) {
if ( weighedPick(fillers[j].weight, listM) ) {
pickedList = true;
fillerId = fillers[j].id;
n = 0;
} else {
break;
}
@ -223,29 +235,20 @@ function pickRandomWithMaxDuration(channel, fillers, maxDuration) {
break;
}
}
if (timeSince >= D) {
let p = 200, q = Math.max( maxDuration - clip.duration, 1 );
let pq = Math.min( Math.ceil(p / q), 10 );
let w = pq;
n += w;
if ( weighedPick(w, n) ) {
pick1 = clip;
}
} else {
let adjust = Math.floor(timeSince / (60*1000));
if (adjust > 0) {
adjust = adjust * adjust;
//weighted
m += adjust;
if ( weighedPick(adjust, m) ) {
pick2 = clip;
}
}
if (timeSince <= 0) {
continue;
}
let s = norm_s( (timeSince >= E) ? E : timeSince );
let d = norm_d( clip.duration);
let w = s + d;
n += w;
if (weighedPick(w,n)) {
pick1 = clip;
}
}
}
}
let pick = (pick1 == null) ? pick2: pick1;
let pick = pick1;
let pickTitle = "null";
if (pick != null) {
pickTitle = pick.title;
@ -260,6 +263,23 @@ function pickRandomWithMaxDuration(channel, fillers, maxDuration) {
}
}
function norm_d(x) {
x /= 60 * 1000;
if (x >= 3.0) {
x = 3.0 + Math.log(x);
}
let y = 10000 * ( Math.ceil(x * 1000) + 1 );
return Math.ceil(y / 1000000) + 1;
}
function norm_s(x) {
let y = Math.ceil(x / 600) + 1;
y = y*y;
return Math.ceil(y / 1000000) + 1;
}
// any channel thing used here should be added to channel context
function getWatermark( ffmpegSettings, channel, type) {
if (! ffmpegSettings.enableFFMPEGTranscoding || ffmpegSettings.disableChannelOverlay ) {
return null;
@ -301,3 +321,15 @@ function getWatermark( ffmpegSettings, channel, type) {
return result;
}
function generateChannelContext(channel) {
let channelContext = {};
for (let i = 0; i < CHANNEL_CONTEXT_KEYS.length; i++) {
let key = CHANNEL_CONTEXT_KEYS[i];
if (typeof(channel[key]) !== 'undefined') {
channelContext[key] = JSON.parse( JSON.stringify(channel[key] ) );
}
}
return channelContext;
}

View File

@ -41,7 +41,7 @@ class M3uService {
channels.sort((a, b) => {
return a.number < b.number ? -1 : 1
return parseInt(a.number) < parseInt(b.number) ? -1 : 1
});
const tvg = `{{host}}/api/xmltv.xml`;

View File

@ -405,10 +405,14 @@ module.exports = async( programs, schedule ) => {
}
} else if (flexBetween) {
//just distribute it equitatively
let div = rem / pads.length;
let div = Math.floor( rem / pads.length );
let totalAdded = 0;
for (let i = 0; i < pads.length; i++) {
pads[i].pad += div;
totalAdded += div;
}
pads[0].pad += rem - totalAdded;
} else {
//also add div to the latest item
pads[ pads.length - 1].pad += rem;

View File

@ -2,7 +2,7 @@ const express = require('express')
const helperFuncs = require('./helperFuncs')
const FFMPEG = require('./ffmpeg')
const FFMPEG_TEXT = require('./ffmpegText')
const PlexTranscoder = require('./plexTranscoder')
const constants = require('./constants')
const fs = require('fs')
const ProgramPlayer = require('./program-player');
const channelCache = require('./channel-cache')
@ -121,7 +121,7 @@ function video( channelDB , fillerDB, db) {
} );
// Stream individual video to ffmpeg concat above. This is used by the server, NOT the client
router.get('/stream', async (req, res) => {
let streamFunction = async (req, res, t0, allowSkip) => {
// Check if channel queried is valid
res.on("error", (e) => {
console.error("There was an unexpected error in stream.", e);
@ -166,7 +166,6 @@ function video( channelDB , fillerDB, db) {
// Get video lineup (array of video urls with calculated start times and durations.)
let t0 = (new Date()).getTime();
let lineupItem = channelCache.getCurrentLineupItem( channel.number, t0);
let prog = null;
let brandChannel = channel;
@ -242,12 +241,15 @@ function video( channelDB , fillerDB, db) {
duration: t,
isOffline : true,
};
} else if (prog.program.isOffline && prog.program.duration - prog.timeElapsed <= 10000) {
} else if ( allowSkip && (prog.program.isOffline && prog.program.duration - prog.timeElapsed <= constants.SLACK + 1) ) {
//it's pointless to show the offline screen for such a short time, might as well
//skip to the next program
prog.programIndex = (prog.programIndex + 1) % channel.programs.length;
prog.program = channel.programs[prog.programIndex ];
prog.timeElapsed = 0;
let dt = prog.program.duration - prog.timeElapsed;
for (let i = 0; i < redirectChannels.length; i++) {
channelCache.clearPlayback(redirectChannels[i].number );
}
console.log("Too litlle time before the filler ends, skip to next slot");
return await streamFunction(req, res, t0 + dt + 1, false);
}
if ( (prog == null) || (typeof(prog) === 'undefined') || (prog.program == null) || (typeof(prog.program) == "undefined") ) {
throw "No video to play, this means there's a serious unexpected bug or the channel db is corrupted."
@ -305,7 +307,7 @@ function video( channelDB , fillerDB, db) {
};
}
let combinedChannel = JSON.parse( JSON.stringify(brandChannel) );
let combinedChannel = helperFuncs.generateChannelContext(brandChannel);
combinedChannel.transcoding = channel.transcoding;
let playerContext = {
@ -360,6 +362,11 @@ function video( channelDB , fillerDB, db) {
console.log("Client Closed");
stop();
});
};
router.get('/stream', async (req, res) => {
let t0 = (new Date).getTime();
return await streamFunction(req, res, t0, true);
});

View File

@ -123,8 +123,19 @@ module.exports = function (plex, dizquetv, $timeout, commonProgramTools) {
}
scope.fillNestedIfNecessary = async (x, isLibrary) => {
if ( (typeof(x.nested) === 'undefined') && (x.type !== 'collection') ) {
if (typeof(x.nested) === 'undefined') {
x.nested = await plex.getNested(scope.plexServer, x, isLibrary, scope.errors);
if (x.type === "collection" && x.collectionType === "show") {
let nested = x.nested;
x.nested = [];
for (let i = 0; i < nested.length; i++) {
let subNested = await plex.getNested(scope.plexServer, nested[i], false, scope.errors);
for (let j = 0; j < subNested.length; j++) {
subNested[j].title = nested[i].title + " - " + subNested[j].title;
x.nested.push( subNested[j] );
}
}
}
}
}
scope.getNested = (list, isLibrary) => {

View File

@ -172,13 +172,13 @@ module.exports = function ($http, $window, $interval) {
var client = new Plex(server)
const key = lib.key
const res = await client.Get(key)
const size = res.Metadata !== 'undefined' ? res.Metadata.length : 0;
const size = (typeof(res.Metadata) !== 'undefined') ? res.Metadata.length : 0;
var nested = []
if (typeof (lib.genres) !== 'undefined') {
nested = Array.from(lib.genres)
}
var seenFiles = {};
var collections = {};
let albumKeys = {};
let albums = {};
@ -276,43 +276,6 @@ module.exports = function ($http, $window, $interval) {
program.episode = 1
program.season = 1
}
if (typeof (res.Metadata[i].Collection) !== 'undefined') {
let coll = res.Metadata[i].Collection;
if (coll.length == 2) {
// the /all endpoint returns incomplete data, so we
// might have to complete the list of collections
// when there are already 2 collections there.
//console.log(res.Metadata[i]);
let complete = {}
try {
complete = await client.Get(`/library/metadata/${res.Metadata[i].ratingKey}`);
} catch (err) {
console.error("Error attempting to load collections", err);
}
if (
(typeof(complete.Metadata) !== 'undefined')
&&
(complete.Metadata.length == 1)
&&
(typeof(complete.Metadata[0].Collection) !== 'undefined')
&&
( complete.Metadata[0].Collection.length > 2)
) {
coll = complete.Metadata[0].Collection;
}
}
for (let j = 0; j < coll.length; j++) {
let tag = coll[j].tag;
if ( (typeof(tag)!== "undefined") && (tag.length > 0) ) {
let collection = collections[tag];
if (typeof(collection) === 'undefined') {
collection = [];
collections[tag] = collection;
}
collection.push( program );
}
}
}
nested.push(program)
} catch(err) {
let msg = "Error when attempting to read nested data for " + key + " " + res.Metadata[i].title;
@ -320,40 +283,30 @@ module.exports = function ($http, $window, $interval) {
console.error(msg , err);
}
}
if (includeCollections === true) {
if ( (includeCollections === true) && (res.viewGroup !== "artist" ) ) {
let k = res.librarySectionID;
k = `/library/sections/${k}/collections`;
let collections = await client.Get(k);
if ( typeof(collections.Metadata) === 'undefined') {
collections.Metadata = [];
}
let directories = collections.Metadata;
let nestedCollections = [];
let keys = [];
Object.keys(collections).forEach(function(key,index) {
keys.push(key);
});
for (let k = 0; k < keys.length; k++) {
let key = keys[k];
if ( !(collections[key].length >= 1) ) {
//it's pointless to include it.
continue;
for (let i = 0; i < directories.length; i++) {
let title;
if (res.viewGroup === "show") {
title = directories[i].title + " Collection"
} else {
title = directories[i].title;
}
let collection = {
title: key,
key: "#collection",
icon : "",
type : "collection",
nested: collections[key],
}
if (res.viewGroup === 'show') {
collection.title = collection.title + " Collection";
//nest the seasons directly because that's way too many depth levels already
let shows = collection.nested;
let collectionContents = [];
for (let i = 0; i < shows.length; i++) {
let seasons = await exported.getNested(server, shows[i], false);
for (let j = 0; j < seasons.length; j++) {
seasons[j].title = shows[i].title + " - " + seasons[j].title;
collectionContents.push(seasons[j]);
}
}
collection.nested = collectionContents;
}
nestedCollections.push( collection );
nestedCollections.push( {
key : directories[i].key,
title : title,
type: "collection",
collectionType : res.viewGroup,
} );
}
nested = nestedCollections.concat(nested);
}