Update commands/

This commit is contained in:
Aleksandr Statciuk 2023-04-27 17:41:54 +03:00
parent eade07df8e
commit bf25ad1a12
9 changed files with 14 additions and 403 deletions

View file

@ -1,34 +1,25 @@
const { db, file, parser, store, logger, id, api } = require('../../core')
const { db, file, parser, store, logger, api } = require('../../core')
const { program } = require('commander')
const _ = require('lodash')
const options = program
.option(
'--max-clusters <max-clusters>',
'Set maximum number of clusters',
parser.parseNumber,
256
)
.option('--input-dir <input-dir>', 'Set path to input directory', 'streams')
.parse(process.argv)
.opts()
async function main() {
logger.info('starting...')
logger.info(`number of clusters: ${options.maxClusters}`)
await saveToDatabase(await findStreams())
logger.info('done')
}
main()
async function findStreams() {
logger.info(`looking for streams...`)
logger.info(`loading channels...`)
await api.channels.load()
await api.streams.load()
logger.info(`looking for streams...`)
await db.streams.load()
const streams = []
@ -40,7 +31,6 @@ async function findStreams() {
const stream = store.create()
const channel = await api.channels.find({ id: item.tvg.id })
const cached = (await api.streams.find({ url: item.url })) || {}
stream.set('channel', { channel: channel ? channel.id : null })
stream.set('title', { title: item.name })
@ -48,14 +38,6 @@ async function findStreams() {
stream.set('url', { url: item.url })
stream.set('http_referrer', { http_referrer: item.http.referrer })
stream.set('user_agent', { user_agent: item.http['user-agent'] })
stream.set('status', { status: cached.status || 'online' })
stream.set('width', { width: cached.width || 0 })
stream.set('height', { height: cached.height || 0 })
stream.set('bitrate', { bitrate: cached.bitrate || 0 })
stream.set('frame_rate', { frame_rate: cached.frame_rate || 0 })
stream.set('added_at', { added_at: cached.added_at })
stream.set('updated_at', { updated_at: cached.updated_at })
stream.set('checked_at', { checked_at: cached.checked_at })
streams.push(stream)
}
@ -69,20 +51,7 @@ async function saveToDatabase(streams = []) {
logger.info('saving to the database...')
await db.streams.reset()
const chunks = split(_.shuffle(streams), options.maxClusters)
for (const [i, chunk] of chunks.entries()) {
for (const stream of chunk) {
stream.set('cluster_id', { cluster_id: i + 1 })
await db.streams.insert(stream.data())
}
for (const stream of streams) {
await db.streams.insert(stream.data())
}
}
function split(arr, n) {
let result = []
for (let i = n; i > 0; i--) {
result.push(arr.splice(0, Math.ceil(arr.length / i)))
}
return result
}