Merge branch 'dev/1.4.x' into dev/1.5.x

This commit is contained in:
vexorian 2021-09-13 10:27:46 -04:00
commit a8f8622072
4 changed files with 70 additions and 96 deletions

View File

@ -29,7 +29,6 @@ function getCurrentProgramAndTimeElapsed(date, channel) {
if (channelStartTime > date) {
let t0 = date;
let t1 = channelStartTime;
console.log(t0, t1);
console.log("Channel start time is above the given date. Flex time is picked till that.");
return {
program: {
@ -185,10 +184,11 @@ function pickRandomWithMaxDuration(channel, fillers, maxDuration) {
list = list.concat(fillers[i].content);
}
let pick1 = null;
let pick2 = null;
let t0 = (new Date()).getTime();
let minimumWait = 1000000000;
const D = 7*24*60*60*1000;
const E = 5*60*60*1000;
if (typeof(channel.fillerRepeatCooldown) === 'undefined') {
channel.fillerRepeatCooldown = 30*60*1000;
}
@ -198,7 +198,7 @@ function pickRandomWithMaxDuration(channel, fillers, maxDuration) {
list = fillers[j].content;
let pickedList = false;
let n = 0;
let m = 0;
for (let i = 0; i < list.length; i++) {
let clip = list[i];
// a few extra milliseconds won't hurt anyone, would it? dun dun dun
@ -206,7 +206,6 @@ function pickRandomWithMaxDuration(channel, fillers, maxDuration) {
let t1 = channelCache.getProgramLastPlayTime( channel.number, clip );
let timeSince = ( (t1 == 0) ? D : (t0 - t1) );
if (timeSince < channel.fillerRepeatCooldown - SLACK) {
let w = channel.fillerRepeatCooldown - timeSince;
if (clip.duration + w <= maxDuration + SLACK) {
@ -223,6 +222,7 @@ function pickRandomWithMaxDuration(channel, fillers, maxDuration) {
if ( weighedPick(fillers[j].weight, listM) ) {
pickedList = true;
fillerId = fillers[j].id;
n = 0;
} else {
break;
}
@ -235,29 +235,23 @@ function pickRandomWithMaxDuration(channel, fillers, maxDuration) {
break;
}
}
if (timeSince >= D) {
let p = 200, q = Math.max( maxDuration - clip.duration, 1 );
let pq = Math.min( Math.ceil(p / q), 10 );
let w = pq;
n += w;
if ( weighedPick(w, n) ) {
pick1 = clip;
}
if (timeSince <= 0) {
continue;
}
let s = norm_s( (timeSince >= E) ? E : timeSince );
let d = norm_d( clip.duration);
let w = s + d;
n += w;
if (weighedPick(w,n)) {
console.log(`${s} ${d} ${clip.title} picked `);
pick1 = clip;
} else {
let adjust = Math.floor(timeSince / (60*1000));
if (adjust > 0) {
adjust = adjust * adjust;
//weighted
m += adjust;
if ( weighedPick(adjust, m) ) {
pick2 = clip;
}
}
console.log(`${s} ${d} ${clip.title} not picked `);
}
}
}
}
let pick = (pick1 == null) ? pick2: pick1;
let pick = pick1;
let pickTitle = "null";
if (pick != null) {
pickTitle = pick.title;
@ -272,6 +266,22 @@ function pickRandomWithMaxDuration(channel, fillers, maxDuration) {
}
}
function norm_d(x) {
x /= 60 * 1000;
if (x >= 3.0) {
x = 3.0 + Math.log(x);
}
let y = 10000 * ( Math.ceil(x * 1000) + 1 );
return Math.ceil(y / 1000000) + 1;
}
function norm_s(x) {
let y = Math.ceil(x / 600) + 1;
y = y*y;
return Math.ceil(y / 1000000) + 1;
}
// any channel thing used here should be added to channel context
function getWatermark( ffmpegSettings, channel, type) {
if (! ffmpegSettings.enableFFMPEGTranscoding || ffmpegSettings.disableChannelOverlay ) {
@ -319,7 +329,10 @@ function generateChannelContext(channel) {
let channelContext = {};
for (let i = 0; i < CHANNEL_CONTEXT_KEYS.length; i++) {
let key = CHANNEL_CONTEXT_KEYS[i];
channelContext[key] = JSON.parse( JSON.stringify(channel[key] ) );
if (typeof(channel[key]) !== 'undefined') {
channelContext[key] = JSON.parse( JSON.stringify(channel[key] ) );
}
}
return channelContext;
}

View File

@ -43,7 +43,7 @@ class M3uService {
channels.sort((a, b) => {
return a.number < b.number ? -1 : 1
return parseInt(a.number) < parseInt(b.number) ? -1 : 1
});
const tvg = `{{host}}/api/xmltv.xml`;

View File

@ -123,8 +123,19 @@ module.exports = function (plex, dizquetv, $timeout, commonProgramTools) {
}
scope.fillNestedIfNecessary = async (x, isLibrary) => {
if ( (typeof(x.nested) === 'undefined') && (x.type !== 'collection') ) {
if (typeof(x.nested) === 'undefined') {
x.nested = await plex.getNested(scope.plexServer, x, isLibrary, scope.errors);
if (x.type === "collection" && x.collectionType === "show") {
let nested = x.nested;
x.nested = [];
for (let i = 0; i < nested.length; i++) {
let subNested = await plex.getNested(scope.plexServer, nested[i], false, scope.errors);
for (let j = 0; j < subNested.length; j++) {
subNested[j].title = nested[i].title + " - " + subNested[j].title;
x.nested.push( subNested[j] );
}
}
}
}
}
scope.getNested = (list, isLibrary) => {

View File

@ -172,13 +172,13 @@ module.exports = function ($http, $window, $interval) {
var client = new Plex(server)
const key = lib.key
const res = await client.Get(key)
const size = res.Metadata !== 'undefined' ? res.Metadata.length : 0;
const size = (typeof(res.Metadata) !== 'undefined') ? res.Metadata.length : 0;
var nested = []
if (typeof (lib.genres) !== 'undefined') {
nested = Array.from(lib.genres)
}
var seenFiles = {};
var collections = {};
let albumKeys = {};
let albums = {};
@ -276,43 +276,6 @@ module.exports = function ($http, $window, $interval) {
program.episode = 1
program.season = 1
}
if (typeof (res.Metadata[i].Collection) !== 'undefined') {
let coll = res.Metadata[i].Collection;
if (coll.length == 2) {
// the /all endpoint returns incomplete data, so we
// might have to complete the list of collections
// when there are already 2 collections there.
//console.log(res.Metadata[i]);
let complete = {}
try {
complete = await client.Get(`/library/metadata/${res.Metadata[i].ratingKey}`);
} catch (err) {
console.error("Error attempting to load collections", err);
}
if (
(typeof(complete.Metadata) !== 'undefined')
&&
(complete.Metadata.length == 1)
&&
(typeof(complete.Metadata[0].Collection) !== 'undefined')
&&
( complete.Metadata[0].Collection.length > 2)
) {
coll = complete.Metadata[0].Collection;
}
}
for (let j = 0; j < coll.length; j++) {
let tag = coll[j].tag;
if ( (typeof(tag)!== "undefined") && (tag.length > 0) ) {
let collection = collections[tag];
if (typeof(collection) === 'undefined') {
collection = [];
collections[tag] = collection;
}
collection.push( program );
}
}
}
nested.push(program)
} catch(err) {
let msg = "Error when attempting to read nested data for " + key + " " + res.Metadata[i].title;
@ -320,40 +283,27 @@ module.exports = function ($http, $window, $interval) {
console.error(msg , err);
}
}
if (includeCollections === true) {
if ( (includeCollections === true) && (res.viewGroup !== "artist" ) ) {
let k = res.librarySectionID;
k = `/library/sections/${k}/collection`;
let collections = await client.Get(k);
let directories = collections.Directory;
let nestedCollections = [];
let keys = [];
Object.keys(collections).forEach(function(key,index) {
keys.push(key);
});
for (let k = 0; k < keys.length; k++) {
let key = keys[k];
if ( !(collections[key].length >= 1) ) {
//it's pointless to include it.
continue;
for (let i = 0; i < directories.length; i++) {
let title;
if (res.viewGroup === "show") {
title = directories[i].title + " Collection"
} else {
title = directories[i].title;
}
let collection = {
title: key,
key: "#collection",
icon : "",
type : "collection",
nested: collections[key],
}
if (res.viewGroup === 'show') {
collection.title = collection.title + " Collection";
//nest the seasons directly because that's way too many depth levels already
let shows = collection.nested;
let collectionContents = [];
for (let i = 0; i < shows.length; i++) {
let seasons = await exported.getNested(server, shows[i], false);
for (let j = 0; j < seasons.length; j++) {
seasons[j].title = shows[i].title + " - " + seasons[j].title;
collectionContents.push(seasons[j]);
}
}
collection.nested = collectionContents;
}
nestedCollections.push( collection );
nestedCollections.push( {
key : directories[i].fastKey,
title : title,
type: "collection",
collectionType : res.viewGroup,
} );
}
nested = nestedCollections.concat(nested);
}