Update scripts

This commit is contained in:
freearhey 2023-09-15 18:40:35 +03:00
parent 8a83f23243
commit f1d2add19a
98 changed files with 2423 additions and 1499 deletions

View file

@ -1,33 +0,0 @@
const { create: createPlaylist } = require('../../core/playlist')
const { normalize: normalizeUrl } = require('../../core/url')
const { db, logger, file } = require('../../core')
const { orderBy } = require('natural-orderby')
const _ = require('lodash')
async function main() {
logger.info('loading streams...')
await db.streams.load()
let streams = await db.streams.find({})
streams = streams.map(stream => {
stream.url = normalizeUrl(stream.url)
return stream
})
logger.info('sorting links...')
streams = orderBy(
streams,
['channel', s => (s.channel ? '' : s.title), 'url'],
['asc', 'asc', 'asc']
)
logger.info('saving...')
const files = _.groupBy(streams, 'filepath')
for (const filepath in files) {
const playlist = createPlaylist(files[filepath], { public: false })
await file.create(filepath, playlist.toString())
}
}
main()

View file

@ -1,76 +0,0 @@
const { db, generator, api, logger, file } = require('../../core')
const { orderBy } = require('natural-orderby')
const _ = require('lodash')
async function main() {
const streams = await loadStreams()
logger.info('generating categories/...')
await generator.generate('categories', streams)
logger.info('generating countries/...')
await generator.generate('countries', streams)
logger.info('generating languages/...')
await generator.generate('languages', streams)
logger.info('generating regions/...')
await generator.generate('regions', streams)
logger.info('generating index.category.m3u...')
await generator.generate('index_category_m3u', streams)
logger.info('generating index.country.m3u...')
await generator.generate('index_country_m3u', streams)
logger.info('generating index.language.m3u...')
await generator.generate('index_language_m3u', streams)
logger.info('generating index.m3u...')
await generator.generate('index_m3u', streams)
logger.info('generating index.nsfw.m3u...')
await generator.generate('index_nsfw_m3u', streams)
logger.info('generating index.region.m3u...')
await generator.generate('index_region_m3u', streams)
}
main()
async function loadStreams() {
await db.streams.load()
let streams = await db.streams.find({})
streams = orderBy(streams, ['channel', 'url'], ['asc', 'asc'])
streams = _.uniqBy(streams, stream => stream.channel || _.uniqueId())
await api.channels.load()
let channels = await api.channels.all()
channels = _.keyBy(channels, 'id')
await api.categories.load()
let categories = await api.categories.all()
categories = _.keyBy(categories, 'id')
await api.languages.load()
let languages = await api.languages.all()
languages = _.keyBy(languages, 'code')
streams = streams.map(stream => {
const channel = channels[stream.channel] || null
const filename = file.getFilename(stream.filepath)
const [_, code] = filename.match(/^([a-z]{2})(_|$)/) || [null, null]
const defaultBroadcastArea = code ? [`c/${code.toUpperCase()}`] : []
if (channel) {
stream.categories = channel.categories.map(id => categories[id]).filter(i => i)
stream.languages = channel.languages.map(id => languages[id]).filter(i => i)
stream.broadcast_area = channel.broadcast_area
stream.is_nsfw = channel.is_nsfw
stream.logo = channel.logo
} else {
stream.categories = []
stream.languages = []
stream.broadcast_area = defaultBroadcastArea
stream.is_nsfw = false
stream.logo = null
}
return stream
})
streams = orderBy(streams, ['title'], ['asc'])
return streams
}

View file

@ -0,0 +1,148 @@
import { File, Storage } from '../../core'
import { Stream, Category, Channel, Language, Country, Region, Subdivision } from '../../models'
import { Database } from '../../core/database'
import { Collection } from '../../core/collection'
import { Logger } from '../../core/logger'
import _ from 'lodash'
import {
CategoriesGenerator,
CountriesGenerator,
LanguagesGenerator,
RegionsGenerator,
IndexGenerator,
IndexNsfwGenerator,
IndexCategoryGenerator,
IndexCountryGenerator,
IndexLanguageGenerator,
IndexRegionGenerator
} from '../../generators'
import { DATA_DIR, DB_DIR, LOGS_DIR } from '../../constants'
async function main() {
const logger = new Logger()
const storage = new Storage(DATA_DIR)
const channelsContent = await storage.json('channels.json')
const channels = new Collection(channelsContent).map(data => new Channel(data))
const categoriesContent = await storage.json('categories.json')
const categories = new Collection(categoriesContent).map(data => new Category(data))
const countriesContent = await storage.json('countries.json')
const countries = new Collection(countriesContent).map(data => new Country(data))
const languagesContent = await storage.json('languages.json')
const languages = new Collection(languagesContent).map(data => new Language(data))
const regionsContent = await storage.json('regions.json')
const regions = new Collection(regionsContent).map(data => new Region(data))
const subdivisionsContent = await storage.json('subdivisions.json')
const subdivisions = new Collection(subdivisionsContent).map(data => new Subdivision(data))
const streams = await loadStreams({ channels, categories, languages })
const generatorsLogger = new Logger({
stream: await new Storage(LOGS_DIR).createStream(`generators.log`)
})
logger.info('generating categories/...')
await new CategoriesGenerator({ categories, streams, logger: generatorsLogger }).generate()
logger.info('generating countries/...')
await new CountriesGenerator({
countries,
streams,
regions,
subdivisions,
logger: generatorsLogger
}).generate()
logger.info('generating languages/...')
await new LanguagesGenerator({ streams, logger: generatorsLogger }).generate()
logger.info('generating regions/...')
await new RegionsGenerator({
streams,
regions,
subdivisions,
logger: generatorsLogger
}).generate()
logger.info('generating index.m3u...')
await new IndexGenerator({ streams, logger: generatorsLogger }).generate()
logger.info('generating index.nsfw.m3u...')
await new IndexNsfwGenerator({ streams, logger: generatorsLogger }).generate()
logger.info('generating index.category.m3u...')
await new IndexCategoryGenerator({ streams, logger: generatorsLogger }).generate()
logger.info('generating index.country.m3u...')
await new IndexCountryGenerator({
streams,
countries,
regions,
subdivisions,
logger: generatorsLogger
}).generate()
logger.info('generating index.language.m3u...')
await new IndexLanguageGenerator({ streams, logger: generatorsLogger }).generate()
logger.info('generating index.region.m3u...')
await new IndexRegionGenerator({ streams, regions, logger: generatorsLogger }).generate()
}
main()
async function loadStreams({
channels,
categories,
languages
}: {
channels: Collection
categories: Collection
languages: Collection
}) {
const groupedChannels = channels.keyBy(channel => channel.id)
const groupedCategories = categories.keyBy(category => category.id)
const groupedLanguages = languages.keyBy(language => language.code)
const db = new Database(DB_DIR)
const dbStreams = await db.load('streams.db')
const docs = await dbStreams.find({})
const streams = new Collection(docs as any[])
.map((data: any) => new Stream(data))
.orderBy([(stream: Stream) => stream.channel, (stream: Stream) => stream.url], ['asc', 'asc'])
.uniqBy((stream: Stream) => stream.channel || _.uniqueId())
.map((stream: Stream) => {
const channel: Channel | undefined = groupedChannels.get(stream.channel)
if (channel) {
const channelCategories = channel.categories
.map((id: string) => groupedCategories.get(id))
.filter(Boolean)
const channelLanguages = channel.languages
.map((id: string) => groupedLanguages.get(id))
.filter(Boolean)
stream.categories = channelCategories
stream.languages = channelLanguages
stream.broadcastArea = channel.broadcastArea
stream.isNSFW = channel.isNSFW
if (channel.logo) stream.logo = channel.logo
} else {
const file = new File(stream.filepath)
const [_, countryCode] = file.getFilename().match(/^([a-z]{2})(_|$)/) || [null, null]
const defaultBroadcastArea = countryCode ? [`c/${countryCode.toUpperCase()}`] : []
stream.broadcastArea = new Collection(defaultBroadcastArea)
}
return stream
})
return streams
}

View file

@ -0,0 +1,143 @@
import { DB_DIR, DATA_DIR, STREAMS_DIR } from '../../constants'
import { Database, Storage, Logger, Collection, Dictionary, IssueLoader } from '../../core'
import { Stream, Playlist, Channel } from '../../models'
let processedIssues = new Collection()
let streams: Collection
let groupedChannels: Dictionary
async function main() {
const logger = new Logger({ disabled: true })
const loader = new IssueLoader()
logger.info('loading streams...')
const db = new Database(DB_DIR)
const docs = await db.load('streams.db')
const dbStreams = await docs.find({})
streams = new Collection(dbStreams as any[]).map(data => new Stream(data))
const storage = new Storage(DATA_DIR)
const channelsContent = await storage.json('channels.json')
groupedChannels = new Collection(channelsContent)
.map(data => new Channel(data))
.keyBy((channel: Channel) => channel.id)
logger.info('removing broken streams...')
await removeStreams(loader)
logger.info('edit stream description...')
await editStreams(loader)
logger.info('add new streams...')
await addStreams(loader)
logger.info('normalizing links...')
streams = streams.map(stream => {
stream.normalizeURL()
return stream
})
logger.info('sorting links...')
streams = streams.orderBy(
[
(stream: Stream) => stream.name,
(stream: Stream) => parseInt(stream.quality.replace('p', '')),
(stream: Stream) => stream.label,
(stream: Stream) => stream.url
],
['asc', 'desc', 'asc', 'asc']
)
logger.info('saving...')
const streamsStorage = new Storage(STREAMS_DIR)
const groupedStreams = streams.groupBy((stream: Stream) => stream.filepath)
for (let filepath of groupedStreams.keys()) {
const streams = groupedStreams.get(filepath) || []
if (!streams.length) return
const playlist = new Playlist(streams, { public: false })
await streamsStorage.save(filepath, playlist.toString())
}
const output = processedIssues.map(issue_number => `closes #${issue_number}`).join(', ')
console.log(`OUTPUT=${output}`)
}
main()
async function removeStreams(loader: IssueLoader) {
const issues = await loader.load({ labels: ['streams:remove', 'approved'] })
issues.forEach((data: Dictionary) => {
if (data.missing('stream_url')) return
const removed = streams.remove((_stream: Stream) => _stream.url === data.get('stream_url'))
if (removed.notEmpty()) {
processedIssues.add(data.get('issue_number'))
}
})
}
async function editStreams(loader: IssueLoader) {
const issues = await loader.load({ labels: ['streams:edit', 'approved'] })
issues.forEach((data: Dictionary) => {
if (data.missing('stream_url')) return
let stream = streams.first(
(_stream: Stream) => _stream.url === data.get('stream_url')
) as Stream
if (!stream) return
if (data.has('channel_id')) {
const channel = groupedChannels.get(data.get('channel_id'))
if (!channel) return
stream.channel = data.get('channel_id')
stream.filepath = `${channel.country.toLowerCase()}.m3u`
stream.line = -1
stream.name = channel.name
}
if (data.has('channel_name')) stream.name = data.get('channel_name')
if (data.has('label')) stream.label = data.get('label')
if (data.has('quality')) stream.quality = data.get('quality')
if (data.has('user_agent')) stream.userAgent = data.get('user_agent')
if (data.has('http_referrer')) stream.httpReferrer = data.get('http_referrer')
if (data.has('channel_name')) stream.name = data.get('channel_name')
streams.remove((_stream: Stream) => _stream.channel === stream.channel)
streams.add(stream)
processedIssues.add(data.get('issue_number'))
})
}
async function addStreams(loader: IssueLoader) {
const issues = await loader.load({ labels: ['streams:add', 'approved'] })
issues.forEach((data: Dictionary) => {
if (data.missing('channel_id') || data.missing('stream_url')) return
if (streams.includes((_stream: Stream) => _stream.url === data.get('stream_url'))) return
const channel = groupedChannels.get(data.get('channel_id'))
if (!channel) return
const stream = new Stream({
channel: data.get('channel_id'),
url: data.get('stream_url'),
label: data.get('label'),
quality: data.get('quality'),
userAgent: data.get('user_agent'),
httpReferrer: data.get('http_referrer'),
filepath: `${channel.country.toLowerCase()}.m3u`,
line: -1,
name: data.get('channel_name') || channel.name
})
streams.add(stream)
processedIssues.add(data.get('issue_number'))
})
}

View file

@ -1,106 +0,0 @@
const { file, logger, api, parser, id } = require('../../core')
const { program } = require('commander')
const chalk = require('chalk')
const _ = require('lodash')
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
async function main() {
const files = program.args.length ? program.args : await file.list('streams/*.m3u')
logger.info(`loading blocklist...`)
await api.channels.load()
await api.blocklist.load()
let blocklist = await api.blocklist.all()
blocklist = blocklist
.map(blocked => {
const channel = api.channels.find({ id: blocked.channel })
if (!channel) return null
return { ...blocked, name: channel.name }
})
.filter(i => i)
logger.info(`found ${blocklist.length} records`)
let errors = []
let warnings = []
for (const filepath of files) {
if (!filepath.endsWith('.m3u')) continue
const basename = file.basename(filepath)
const [__, country] = basename.match(/([a-z]{2})(|_.*)\.m3u/i) || [null, null]
const buffer = {}
const fileLog = []
try {
const playlist = await parser.parsePlaylist(filepath)
for (const item of playlist.items) {
if (item.tvg.id && !api.channels.find({ id: item.tvg.id })) {
fileLog.push({
type: 'warning',
line: item.line,
message: `"${item.tvg.id}" is not in the database`
})
}
if (item.url && buffer[item.url]) {
fileLog.push({
type: 'warning',
line: item.line,
message: `"${item.url}" is already on the playlist`
})
} else {
buffer[item.url] = true
}
const channel_id = id.generate(item.name, country)
const found = blocklist.find(
blocked =>
item.tvg.id.toLowerCase() === blocked.channel.toLowerCase() ||
channel_id.toLowerCase() === blocked.channel.toLowerCase()
)
if (found) {
fileLog.push({
type: 'error',
line: item.line,
message: `"${found.name}" is on the blocklist due to claims of copyright holders (${found.ref})`
})
}
}
} catch (err) {
fileLog.push({
type: 'error',
line: 0,
message: err.message.toLowerCase()
})
}
if (fileLog.length) {
logger.info(`\n${chalk.underline(filepath)}`)
fileLog.forEach(err => {
const position = err.line.toString().padEnd(6, ' ')
const type = err.type.padEnd(9, ' ')
const status = err.type === 'error' ? chalk.red(type) : chalk.yellow(type)
logger.info(` ${chalk.gray(position)}${status}${err.message}`)
})
errors = errors.concat(fileLog.filter(e => e.type === 'error'))
warnings = warnings.concat(fileLog.filter(e => e.type === 'warning'))
}
}
logger.error(
chalk.red(
`\n${errors.length + warnings.length} problems (${errors.length} errors, ${
warnings.length
} warnings)`
)
)
if (errors.length) {
process.exit(1)
}
}
main()

View file

@ -0,0 +1,132 @@
import { Logger, Storage, PlaylistParser, Collection, File, Dictionary } from '../../core'
import { Channel, Stream, Blocked } from '../../models'
import { program } from 'commander'
import chalk from 'chalk'
import { transliterate } from 'transliteration'
import _ from 'lodash'
import { DATA_DIR, STREAMS_DIR } from '../../constants'
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
type LogItem = {
type: string
line: number
message: string
}
async function main() {
const logger = new Logger()
logger.info(`loading blocklist...`)
const storage = new Storage(DATA_DIR)
const channelsContent = await storage.json('channels.json')
const channels = new Collection(channelsContent).map(data => new Channel(data))
const blocklistContent = await storage.json('blocklist.json')
const blocklist = new Collection(blocklistContent).map(data => new Blocked(data))
logger.info(`found ${blocklist.count()} records`)
let errors = new Collection()
let warnings = new Collection()
const streamsStorage = new Storage(STREAMS_DIR)
const parser = new PlaylistParser({ storage: streamsStorage })
const files = program.args.length ? program.args : await streamsStorage.list('**/*.m3u')
for (const filepath of files) {
const file = new File(filepath)
if (file.extension() !== 'm3u') continue
const [, countryCode] = file.basename().match(/([a-z]{2})(|_.*)\.m3u/i) || [null, '']
const log = new Collection()
const buffer = new Dictionary()
try {
const relativeFilepath = filepath.replace(STREAMS_DIR, '')
const playlist = await parser.parse(relativeFilepath)
playlist.streams.forEach((stream: Stream) => {
const channelNotInDatabase =
stream.channel && !channels.first((channel: Channel) => channel.id === stream.channel)
if (channelNotInDatabase) {
log.add({
type: 'warning',
line: stream.line,
message: `"${stream.channel}" is not in the database`
})
}
const alreadyOnPlaylist = stream.url && buffer.has(stream.url)
if (alreadyOnPlaylist) {
log.add({
type: 'warning',
line: stream.line,
message: `"${stream.url}" is already on the playlist`
})
} else {
buffer.set(stream.url, true)
}
const channelId = generateChannelId(stream.name, countryCode)
const blocked = blocklist.first(
blocked =>
stream.channel.toLowerCase() === blocked.channel.toLowerCase() ||
channelId.toLowerCase() === blocked.channel.toLowerCase()
)
if (blocked) {
log.add({
type: 'error',
line: stream.line,
message: `"${stream.name}" is on the blocklist due to claims of copyright holders (${blocked.ref})`
})
}
})
} catch (error) {
log.add({
type: 'error',
line: 0,
message: error.message.toLowerCase()
})
}
if (log.notEmpty()) {
logger.info(`\n${chalk.underline(filepath)}`)
log.forEach((logItem: LogItem) => {
const position = logItem.line.toString().padEnd(6, ' ')
const type = logItem.type.padEnd(9, ' ')
const status = logItem.type === 'error' ? chalk.red(type) : chalk.yellow(type)
logger.info(` ${chalk.gray(position)}${status}${logItem.message}`)
})
errors = errors.concat(log.filter((logItem: LogItem) => logItem.type === 'error'))
warnings = warnings.concat(log.filter((logItem: LogItem) => logItem.type === 'warning'))
}
}
logger.error(
chalk.red(
`\n${
errors.count() + warnings.count()
} problems (${errors.count()} errors, ${warnings.count()} warnings)`
)
)
if (errors.count()) {
process.exit(1)
}
}
main()
function generateChannelId(name: string, code: string) {
if (!name || !code) return ''
name = name.replace(/ *\([^)]*\) */g, '')
name = name.replace(/ *\[[^)]*\] */g, '')
name = name.replace(/\+/gi, 'Plus')
name = name.replace(/[^a-z\d]+/gi, '')
name = name.trim()
name = transliterate(name)
code = code.toLowerCase()
return `${name}.${code}`
}