mirror of
https://github.com/iptv-org/iptv.git
synced 2025-05-12 10:00:05 -04:00
Update commands/
This commit is contained in:
parent
eade07df8e
commit
bf25ad1a12
9 changed files with 14 additions and 403 deletions
|
@ -4,9 +4,7 @@ mkdir -p scripts/data
|
||||||
curl -L -o scripts/data/blocklist.json https://iptv-org.github.io/api/blocklist.json
|
curl -L -o scripts/data/blocklist.json https://iptv-org.github.io/api/blocklist.json
|
||||||
curl -L -o scripts/data/categories.json https://iptv-org.github.io/api/categories.json
|
curl -L -o scripts/data/categories.json https://iptv-org.github.io/api/categories.json
|
||||||
curl -L -o scripts/data/channels.json https://iptv-org.github.io/api/channels.json
|
curl -L -o scripts/data/channels.json https://iptv-org.github.io/api/channels.json
|
||||||
curl -L -o scripts/data/streams.json https://iptv-org.github.io/api/streams.json
|
|
||||||
curl -L -o scripts/data/countries.json https://iptv-org.github.io/api/countries.json
|
curl -L -o scripts/data/countries.json https://iptv-org.github.io/api/countries.json
|
||||||
curl -L -o scripts/data/guides.json https://iptv-org.github.io/api/guides.json
|
|
||||||
curl -L -o scripts/data/languages.json https://iptv-org.github.io/api/languages.json
|
curl -L -o scripts/data/languages.json https://iptv-org.github.io/api/languages.json
|
||||||
curl -L -o scripts/data/regions.json https://iptv-org.github.io/api/regions.json
|
curl -L -o scripts/data/regions.json https://iptv-org.github.io/api/regions.json
|
||||||
curl -L -o scripts/data/subdivisions.json https://iptv-org.github.io/api/subdivisions.json
|
curl -L -o scripts/data/subdivisions.json https://iptv-org.github.io/api/subdivisions.json
|
|
@ -1,65 +0,0 @@
|
||||||
const { db, logger, timer, checker, store, file, parser } = require('../../core')
|
|
||||||
const { program } = require('commander')
|
|
||||||
|
|
||||||
const options = program
|
|
||||||
.requiredOption('-c, --cluster-id <cluster-id>', 'The ID of cluster to load', parser.parseNumber)
|
|
||||||
.option('-t, --timeout <timeout>', 'Set timeout for each request', parser.parseNumber, 60000)
|
|
||||||
.option('-d, --delay <delay>', 'Set delay for each request', parser.parseNumber, 0)
|
|
||||||
.option('--debug', 'Enable debug mode', false)
|
|
||||||
.parse(process.argv)
|
|
||||||
.opts()
|
|
||||||
|
|
||||||
const config = {
|
|
||||||
timeout: options.timeout,
|
|
||||||
delay: options.delay,
|
|
||||||
debug: options.debug
|
|
||||||
}
|
|
||||||
|
|
||||||
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/cluster/load'
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
logger.info('starting...')
|
|
||||||
logger.info(`timeout: ${options.timeout}ms`)
|
|
||||||
logger.info(`delay: ${options.delay}ms`)
|
|
||||||
timer.start()
|
|
||||||
|
|
||||||
const clusterLog = `${LOGS_DIR}/cluster_${options.clusterId}.log`
|
|
||||||
logger.info(`loading cluster: ${options.clusterId}`)
|
|
||||||
logger.info(`creating '${clusterLog}'...`)
|
|
||||||
await file.create(clusterLog)
|
|
||||||
await db.streams.load()
|
|
||||||
const items = await db.streams.find({ cluster_id: options.clusterId })
|
|
||||||
const total = items.length
|
|
||||||
logger.info(`found ${total} links`)
|
|
||||||
|
|
||||||
logger.info('checking...')
|
|
||||||
const results = {}
|
|
||||||
for (const [i, item] of items.entries()) {
|
|
||||||
const message = `[${i + 1}/${total}] ${item.filepath}: ${item.url}`
|
|
||||||
const request = {
|
|
||||||
_id: item._id,
|
|
||||||
url: item.url,
|
|
||||||
http: {
|
|
||||||
referrer: item.http_referrer,
|
|
||||||
'user-agent': item.user_agent
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const result = await checker.check(request, config)
|
|
||||||
if (!result.error) {
|
|
||||||
logger.info(message)
|
|
||||||
} else {
|
|
||||||
logger.info(`${message} (${result.error.message})`)
|
|
||||||
}
|
|
||||||
const output = {
|
|
||||||
_id: result._id,
|
|
||||||
error: result.error,
|
|
||||||
streams: result.streams,
|
|
||||||
requests: result.requests
|
|
||||||
}
|
|
||||||
await file.append(clusterLog, JSON.stringify(output) + '\n')
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`done in ${timer.format('HH[h] mm[m] ss[s]')}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
main()
|
|
|
@ -1,47 +0,0 @@
|
||||||
const { logger, parser, db, date } = require('../../core')
|
|
||||||
const { program } = require('commander')
|
|
||||||
|
|
||||||
const options = program
|
|
||||||
.option(
|
|
||||||
'-t, --threshold <threshold>',
|
|
||||||
'Number of days after which the stream should be deleted',
|
|
||||||
parser.parseNumber,
|
|
||||||
7
|
|
||||||
)
|
|
||||||
.option('--input-dir <input-dir>', 'Set path to input directory', 'streams')
|
|
||||||
.parse(process.argv)
|
|
||||||
.opts()
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
await db.streams.load()
|
|
||||||
|
|
||||||
const streams = await db.streams.all()
|
|
||||||
|
|
||||||
let buffer = {}
|
|
||||||
let removed = 0
|
|
||||||
logger.info('searching...')
|
|
||||||
for (const stream of streams) {
|
|
||||||
if (
|
|
||||||
stream.status === 'error' &&
|
|
||||||
date.utc().diff(stream.updated_at, 'day') >= options.threshold
|
|
||||||
) {
|
|
||||||
logger.info(`${stream.url} (offline)`)
|
|
||||||
removed += await db.streams.remove({ url: stream.url }, { multi: true })
|
|
||||||
}
|
|
||||||
|
|
||||||
const key = stream.url.toLowerCase()
|
|
||||||
if (buffer[key]) {
|
|
||||||
logger.info(`${stream.url} (duplicate)`)
|
|
||||||
await db.streams.remove({ _id: stream._id })
|
|
||||||
removed++
|
|
||||||
} else {
|
|
||||||
buffer[key] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await db.streams.compact()
|
|
||||||
|
|
||||||
logger.info(`removed ${removed} streams`)
|
|
||||||
}
|
|
||||||
|
|
||||||
main()
|
|
|
@ -1,34 +1,25 @@
|
||||||
const { db, file, parser, store, logger, id, api } = require('../../core')
|
const { db, file, parser, store, logger, api } = require('../../core')
|
||||||
const { program } = require('commander')
|
const { program } = require('commander')
|
||||||
const _ = require('lodash')
|
const _ = require('lodash')
|
||||||
|
|
||||||
const options = program
|
const options = program
|
||||||
.option(
|
|
||||||
'--max-clusters <max-clusters>',
|
|
||||||
'Set maximum number of clusters',
|
|
||||||
parser.parseNumber,
|
|
||||||
256
|
|
||||||
)
|
|
||||||
.option('--input-dir <input-dir>', 'Set path to input directory', 'streams')
|
.option('--input-dir <input-dir>', 'Set path to input directory', 'streams')
|
||||||
.parse(process.argv)
|
.parse(process.argv)
|
||||||
.opts()
|
.opts()
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
logger.info('starting...')
|
logger.info('starting...')
|
||||||
logger.info(`number of clusters: ${options.maxClusters}`)
|
|
||||||
|
|
||||||
await saveToDatabase(await findStreams())
|
await saveToDatabase(await findStreams())
|
||||||
|
|
||||||
logger.info('done')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
main()
|
main()
|
||||||
|
|
||||||
async function findStreams() {
|
async function findStreams() {
|
||||||
logger.info(`looking for streams...`)
|
logger.info(`loading channels...`)
|
||||||
|
|
||||||
await api.channels.load()
|
await api.channels.load()
|
||||||
await api.streams.load()
|
|
||||||
|
logger.info(`looking for streams...`)
|
||||||
await db.streams.load()
|
await db.streams.load()
|
||||||
|
|
||||||
const streams = []
|
const streams = []
|
||||||
|
@ -40,7 +31,6 @@ async function findStreams() {
|
||||||
|
|
||||||
const stream = store.create()
|
const stream = store.create()
|
||||||
const channel = await api.channels.find({ id: item.tvg.id })
|
const channel = await api.channels.find({ id: item.tvg.id })
|
||||||
const cached = (await api.streams.find({ url: item.url })) || {}
|
|
||||||
|
|
||||||
stream.set('channel', { channel: channel ? channel.id : null })
|
stream.set('channel', { channel: channel ? channel.id : null })
|
||||||
stream.set('title', { title: item.name })
|
stream.set('title', { title: item.name })
|
||||||
|
@ -48,14 +38,6 @@ async function findStreams() {
|
||||||
stream.set('url', { url: item.url })
|
stream.set('url', { url: item.url })
|
||||||
stream.set('http_referrer', { http_referrer: item.http.referrer })
|
stream.set('http_referrer', { http_referrer: item.http.referrer })
|
||||||
stream.set('user_agent', { user_agent: item.http['user-agent'] })
|
stream.set('user_agent', { user_agent: item.http['user-agent'] })
|
||||||
stream.set('status', { status: cached.status || 'online' })
|
|
||||||
stream.set('width', { width: cached.width || 0 })
|
|
||||||
stream.set('height', { height: cached.height || 0 })
|
|
||||||
stream.set('bitrate', { bitrate: cached.bitrate || 0 })
|
|
||||||
stream.set('frame_rate', { frame_rate: cached.frame_rate || 0 })
|
|
||||||
stream.set('added_at', { added_at: cached.added_at })
|
|
||||||
stream.set('updated_at', { updated_at: cached.updated_at })
|
|
||||||
stream.set('checked_at', { checked_at: cached.checked_at })
|
|
||||||
|
|
||||||
streams.push(stream)
|
streams.push(stream)
|
||||||
}
|
}
|
||||||
|
@ -69,20 +51,7 @@ async function saveToDatabase(streams = []) {
|
||||||
logger.info('saving to the database...')
|
logger.info('saving to the database...')
|
||||||
|
|
||||||
await db.streams.reset()
|
await db.streams.reset()
|
||||||
const chunks = split(_.shuffle(streams), options.maxClusters)
|
for (const stream of streams) {
|
||||||
for (const [i, chunk] of chunks.entries()) {
|
|
||||||
for (const stream of chunk) {
|
|
||||||
stream.set('cluster_id', { cluster_id: i + 1 })
|
|
||||||
|
|
||||||
await db.streams.insert(stream.data())
|
await db.streams.insert(stream.data())
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function split(arr, n) {
|
|
||||||
let result = []
|
|
||||||
for (let i = n; i > 0; i--) {
|
|
||||||
result.push(arr.splice(0, Math.ceil(arr.length / i)))
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,15 +1,12 @@
|
||||||
const { logger, db, api, file } = require('../../core')
|
const { logger, db, file } = require('../../core')
|
||||||
const _ = require('lodash')
|
const _ = require('lodash')
|
||||||
const dayjs = require('dayjs')
|
|
||||||
const utc = require('dayjs/plugin/utc')
|
|
||||||
dayjs.extend(utc)
|
|
||||||
|
|
||||||
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.api'
|
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.api'
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
await api.streams.load()
|
logger.info(`loading streams...`)
|
||||||
await db.streams.load()
|
await db.streams.load()
|
||||||
const now = dayjs.utc().format()
|
|
||||||
let streams = await db.streams.find({})
|
let streams = await db.streams.find({})
|
||||||
streams = _.sortBy(streams, 'channel')
|
streams = _.sortBy(streams, 'channel')
|
||||||
streams = streams.map(stream => {
|
streams = streams.map(stream => {
|
||||||
|
@ -17,36 +14,14 @@ async function main() {
|
||||||
channel: stream.channel,
|
channel: stream.channel,
|
||||||
url: stream.url,
|
url: stream.url,
|
||||||
http_referrer: stream.http_referrer,
|
http_referrer: stream.http_referrer,
|
||||||
user_agent: stream.user_agent,
|
user_agent: stream.user_agent
|
||||||
status: stream.status,
|
|
||||||
width: stream.width,
|
|
||||||
height: stream.height,
|
|
||||||
bitrate: stream.bitrate,
|
|
||||||
frame_rate: stream.frame_rate
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let addedAt = now
|
|
||||||
let updatedAt = now
|
|
||||||
let found = api.streams.find({ url: stream.url })
|
|
||||||
if (found) {
|
|
||||||
data = JSON.parse(JSON.stringify(data))
|
|
||||||
normalized = _.omit(found, ['added_at', 'updated_at', 'checked_at'])
|
|
||||||
if (_.isEqual(data, normalized)) {
|
|
||||||
addedAt = found.added_at || now
|
|
||||||
updatedAt = found.updated_at || now
|
|
||||||
} else {
|
|
||||||
addedAt = found.added_at || now
|
|
||||||
updatedAt = now
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
data.added_at = addedAt
|
|
||||||
data.updated_at = updatedAt
|
|
||||||
data.checked_at = now
|
|
||||||
|
|
||||||
return data
|
return data
|
||||||
})
|
})
|
||||||
|
logger.info(`found ${streams.length} streams`)
|
||||||
|
|
||||||
|
logger.info('saving to .api/streams.json...')
|
||||||
await file.create(`${PUBLIC_DIR}/streams.json`, JSON.stringify(streams))
|
await file.create(`${PUBLIC_DIR}/streams.json`, JSON.stringify(streams))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,16 +0,0 @@
|
||||||
const { logger, db } = require('../../core')
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
await db.streams.load()
|
|
||||||
const docs = await db.streams.find({}).sort({ cluster_id: 1 })
|
|
||||||
const cluster_id = docs.reduce((acc, curr) => {
|
|
||||||
if (!acc.includes(curr.cluster_id)) acc.push(curr.cluster_id)
|
|
||||||
return acc
|
|
||||||
}, [])
|
|
||||||
|
|
||||||
const matrix = { cluster_id }
|
|
||||||
const output = `MATRIX=${JSON.stringify(matrix)}`
|
|
||||||
logger.info(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
main()
|
|
|
@ -1,161 +0,0 @@
|
||||||
const { db, store, parser, file, logger } = require('../../core')
|
|
||||||
const _ = require('lodash')
|
|
||||||
const dayjs = require('dayjs')
|
|
||||||
const utc = require('dayjs/plugin/utc')
|
|
||||||
dayjs.extend(utc)
|
|
||||||
|
|
||||||
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/cluster/load'
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
const streams = await loadStreams()
|
|
||||||
const results = await loadResults()
|
|
||||||
const origins = await loadOrigins(results)
|
|
||||||
|
|
||||||
await updateStreams(streams, results, origins)
|
|
||||||
}
|
|
||||||
|
|
||||||
main()
|
|
||||||
|
|
||||||
async function updateStreams(items = [], results = {}, origins = {}) {
|
|
||||||
logger.info('updating streams...')
|
|
||||||
|
|
||||||
let updated = 0
|
|
||||||
const now = dayjs.utc().format()
|
|
||||||
for (const item of items) {
|
|
||||||
const stream = store.create(item)
|
|
||||||
const result = results[item._id]
|
|
||||||
if (result) {
|
|
||||||
const status = parseStatus(result.error)
|
|
||||||
if (status) {
|
|
||||||
stream.set('status', { status })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.streams.length) {
|
|
||||||
const { width, height, bitrate, frame_rate } = parseMediaInfo(result.streams)
|
|
||||||
stream.set('width', { width })
|
|
||||||
stream.set('height', { height })
|
|
||||||
stream.set('bitrate', { bitrate })
|
|
||||||
stream.set('frame_rate', { frame_rate })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.requests.length) {
|
|
||||||
const origin = findOrigin(result.requests, origins)
|
|
||||||
if (origin) {
|
|
||||||
stream.set('url', { url: origin })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stream.changed) {
|
|
||||||
stream.set('updated_at', { updated_at: now })
|
|
||||||
await db.streams.update({ _id: stream.get('_id') }, stream.data())
|
|
||||||
updated++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
db.streams.compact()
|
|
||||||
|
|
||||||
logger.info(`updated ${updated} streams`)
|
|
||||||
logger.info('done')
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadStreams() {
|
|
||||||
logger.info('loading streams...')
|
|
||||||
|
|
||||||
await db.streams.load()
|
|
||||||
const streams = await db.streams.find({})
|
|
||||||
|
|
||||||
logger.info(`found ${streams.length} streams`)
|
|
||||||
|
|
||||||
return streams
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadResults() {
|
|
||||||
logger.info('loading check results...')
|
|
||||||
|
|
||||||
const results = {}
|
|
||||||
const files = await file.list(`${LOGS_DIR}/cluster_*.log`)
|
|
||||||
for (const filepath of files) {
|
|
||||||
const parsed = await parser.parseLogs(filepath)
|
|
||||||
for (const item of parsed) {
|
|
||||||
results[item._id] = item
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`found ${Object.values(results).length} results`)
|
|
||||||
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadOrigins(results = {}) {
|
|
||||||
logger.info('loading origins...')
|
|
||||||
|
|
||||||
const origins = {}
|
|
||||||
for (const { error, requests } of Object.values(results)) {
|
|
||||||
if (error || !Array.isArray(requests) || !requests.length) continue
|
|
||||||
|
|
||||||
let origin = requests.shift()
|
|
||||||
origin = new URL(origin.url)
|
|
||||||
for (const request of requests) {
|
|
||||||
const curr = new URL(request.url)
|
|
||||||
const key = curr.href.replace(/(^\w+:|^)/, '')
|
|
||||||
if (!origins[key] && curr.host === origin.host) {
|
|
||||||
origins[key] = origin.href
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`found ${_.uniq(Object.values(origins)).length} origins`)
|
|
||||||
|
|
||||||
return origins
|
|
||||||
}
|
|
||||||
|
|
||||||
function findOrigin(requests = [], origins = {}) {
|
|
||||||
if (origins && Array.isArray(requests)) {
|
|
||||||
requests = requests.map(r => r.url.replace(/(^\w+:|^)/, ''))
|
|
||||||
for (const url of requests) {
|
|
||||||
if (origins[url]) {
|
|
||||||
return origins[url]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseMediaInfo(streams) {
|
|
||||||
streams = streams.filter(s => s.codec_type === 'video')
|
|
||||||
streams = streams.map(s => {
|
|
||||||
s.bitrate = s.tags && s.tags.variant_bitrate ? parseInt(s.tags.variant_bitrate) : 0
|
|
||||||
s.frame_rate = parseFrameRate(s.avg_frame_rate)
|
|
||||||
|
|
||||||
return s
|
|
||||||
})
|
|
||||||
streams = _.orderBy(streams, ['height', 'bitrate'], ['desc', 'desc'])
|
|
||||||
|
|
||||||
return _.head(streams) || {}
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseFrameRate(frame_rate = '0/0') {
|
|
||||||
const parts = frame_rate.split('/')
|
|
||||||
const number = parseInt(parts[0]) / parseInt(parts[1])
|
|
||||||
|
|
||||||
return number > 0 ? Math.round(number * 100) / 100 : 0
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseStatus(error) {
|
|
||||||
if (!error) return 'online'
|
|
||||||
|
|
||||||
switch (error.code) {
|
|
||||||
case 'FFMPEG_UNDEFINED':
|
|
||||||
return null
|
|
||||||
case 'HTTP_REQUEST_TIMEOUT':
|
|
||||||
return 'timeout'
|
|
||||||
case 'HTTP_FORBIDDEN':
|
|
||||||
case 'HTTP_UNAUTHORIZED':
|
|
||||||
case 'HTTP_UNAVAILABLE_FOR_LEGAL_REASONS':
|
|
||||||
return 'blocked'
|
|
||||||
default:
|
|
||||||
return 'error'
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -32,13 +32,7 @@ main()
|
||||||
async function loadStreams() {
|
async function loadStreams() {
|
||||||
await db.streams.load()
|
await db.streams.load()
|
||||||
let streams = await db.streams.find({})
|
let streams = await db.streams.find({})
|
||||||
streams = _.filter(streams, stream => stream.status !== 'error')
|
streams = orderBy(streams, ['channel', 'url'], ['asc', 'asc'])
|
||||||
const levels = { online: 1, blocked: 2, timeout: 3, error: 4, default: 5 }
|
|
||||||
streams = orderBy(
|
|
||||||
streams,
|
|
||||||
['channel', s => levels[s.status] || levels['default'], 'height', 'frame_rate', 'url'],
|
|
||||||
['asc', 'asc', 'desc', 'desc', 'asc']
|
|
||||||
)
|
|
||||||
streams = _.uniqBy(streams, stream => stream.channel || _.uniqueId())
|
streams = _.uniqBy(streams, stream => stream.channel || _.uniqueId())
|
||||||
|
|
||||||
await api.channels.load()
|
await api.channels.load()
|
||||||
|
@ -53,10 +47,6 @@ async function loadStreams() {
|
||||||
let languages = await api.languages.all()
|
let languages = await api.languages.all()
|
||||||
languages = _.keyBy(languages, 'code')
|
languages = _.keyBy(languages, 'code')
|
||||||
|
|
||||||
await api.guides.load()
|
|
||||||
let guides = await api.guides.all()
|
|
||||||
guides = _.groupBy(guides, 'channel')
|
|
||||||
|
|
||||||
streams = streams.map(stream => {
|
streams = streams.map(stream => {
|
||||||
const channel = channels[stream.channel] || null
|
const channel = channels[stream.channel] || null
|
||||||
const filename = file.getFilename(stream.filepath)
|
const filename = file.getFilename(stream.filepath)
|
||||||
|
@ -64,14 +54,12 @@ async function loadStreams() {
|
||||||
const defaultBroadcastArea = code ? [`c/${code.toUpperCase()}`] : []
|
const defaultBroadcastArea = code ? [`c/${code.toUpperCase()}`] : []
|
||||||
|
|
||||||
if (channel) {
|
if (channel) {
|
||||||
stream.guides = Array.isArray(guides[channel.id]) ? guides[channel.id] : []
|
|
||||||
stream.categories = channel.categories.map(id => categories[id]).filter(i => i)
|
stream.categories = channel.categories.map(id => categories[id]).filter(i => i)
|
||||||
stream.languages = channel.languages.map(id => languages[id]).filter(i => i)
|
stream.languages = channel.languages.map(id => languages[id]).filter(i => i)
|
||||||
stream.broadcast_area = channel.broadcast_area
|
stream.broadcast_area = channel.broadcast_area
|
||||||
stream.is_nsfw = channel.is_nsfw
|
stream.is_nsfw = channel.is_nsfw
|
||||||
stream.logo = channel.logo
|
stream.logo = channel.logo
|
||||||
} else {
|
} else {
|
||||||
stream.guides = []
|
|
||||||
stream.categories = []
|
stream.categories = []
|
||||||
stream.languages = []
|
stream.languages = []
|
||||||
stream.broadcast_area = defaultBroadcastArea
|
stream.broadcast_area = defaultBroadcastArea
|
||||||
|
|
|
@ -1,30 +0,0 @@
|
||||||
const { create: createPlaylist } = require('../../core/playlist')
|
|
||||||
const { db, logger, file } = require('../../core')
|
|
||||||
const { orderBy } = require('natural-orderby')
|
|
||||||
const _ = require('lodash')
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
await db.streams.load()
|
|
||||||
let streams = await db.streams.find({})
|
|
||||||
const levels = { online: 1, blocked: 2, timeout: 3, error: 4, default: 5 }
|
|
||||||
streams = orderBy(
|
|
||||||
streams,
|
|
||||||
[
|
|
||||||
'channel',
|
|
||||||
s => (s.channel ? '' : s.title),
|
|
||||||
s => levels[s.status] || levels['default'],
|
|
||||||
'height',
|
|
||||||
'frame_rate',
|
|
||||||
'url'
|
|
||||||
],
|
|
||||||
['asc', 'asc', 'asc', 'desc', 'desc', 'asc']
|
|
||||||
)
|
|
||||||
|
|
||||||
const files = _.groupBy(streams, 'filepath')
|
|
||||||
for (const filepath in files) {
|
|
||||||
const playlist = createPlaylist(files[filepath], { public: false })
|
|
||||||
await file.create(filepath, playlist.toString())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
main()
|
|
Loading…
Add table
Add a link
Reference in a new issue