mirror of
https://github.com/iptv-org/iptv.git
synced 2025-05-11 17:40:03 -04:00
Update scripts
This commit is contained in:
parent
8a83f23243
commit
f1d2add19a
98 changed files with 2423 additions and 1499 deletions
|
@ -1,28 +0,0 @@
|
|||
const { logger, db, file } = require('../../core')
|
||||
const _ = require('lodash')
|
||||
|
||||
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.api'
|
||||
|
||||
async function main() {
|
||||
logger.info(`loading streams...`)
|
||||
await db.streams.load()
|
||||
|
||||
let streams = await db.streams.find({})
|
||||
streams = _.sortBy(streams, 'channel')
|
||||
streams = streams.map(stream => {
|
||||
let data = {
|
||||
channel: stream.channel,
|
||||
url: stream.url,
|
||||
http_referrer: stream.http_referrer,
|
||||
user_agent: stream.user_agent
|
||||
}
|
||||
|
||||
return data
|
||||
})
|
||||
logger.info(`found ${streams.length} streams`)
|
||||
|
||||
logger.info('saving to .api/streams.json...')
|
||||
await file.create(`${PUBLIC_DIR}/streams.json`, JSON.stringify(streams))
|
||||
}
|
||||
|
||||
main()
|
25
scripts/commands/api/generate.ts
Normal file
25
scripts/commands/api/generate.ts
Normal file
|
@ -0,0 +1,25 @@
|
|||
import { API_DIR, DB_DIR } from '../../constants'
|
||||
import { Logger, Database, Collection, Storage } from '../../core'
|
||||
import { Stream } from '../../models'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info(`loading streams...`)
|
||||
const db = new Database(DB_DIR)
|
||||
const dbStreams = await db.load('streams.db')
|
||||
const docs = await dbStreams.find({})
|
||||
|
||||
const streams = new Collection(docs as any[])
|
||||
.map(data => new Stream(data))
|
||||
.orderBy((stream: Stream) => stream.channel)
|
||||
.map((stream: Stream) => stream.toJSON())
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
logger.info('saving to .api/streams.json...')
|
||||
const storage = new Storage(API_DIR)
|
||||
await storage.save('streams.json', streams.toJSON())
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,11 +1,11 @@
|
|||
#!/bin/bash
|
||||
|
||||
mkdir -p scripts/tmp/data
|
||||
curl -L -o scripts/tmp/data/blocklist.json https://iptv-org.github.io/api/blocklist.json
|
||||
curl -L -o scripts/tmp/data/categories.json https://iptv-org.github.io/api/categories.json
|
||||
curl -L -o scripts/tmp/data/channels.json https://iptv-org.github.io/api/channels.json
|
||||
curl -L -o scripts/tmp/data/streams.json https://iptv-org.github.io/api/streams.json
|
||||
curl -L -o scripts/tmp/data/countries.json https://iptv-org.github.io/api/countries.json
|
||||
curl -L -o scripts/tmp/data/languages.json https://iptv-org.github.io/api/languages.json
|
||||
curl -L -o scripts/tmp/data/regions.json https://iptv-org.github.io/api/regions.json
|
||||
curl -L -o scripts/tmp/data/subdivisions.json https://iptv-org.github.io/api/subdivisions.json
|
||||
mkdir -p temp/data
|
||||
curl -L -o temp/data/blocklist.json https://iptv-org.github.io/api/blocklist.json
|
||||
curl -L -o temp/data/categories.json https://iptv-org.github.io/api/categories.json
|
||||
curl -L -o temp/data/channels.json https://iptv-org.github.io/api/channels.json
|
||||
curl -L -o temp/data/streams.json https://iptv-org.github.io/api/streams.json
|
||||
curl -L -o temp/data/countries.json https://iptv-org.github.io/api/countries.json
|
||||
curl -L -o temp/data/languages.json https://iptv-org.github.io/api/languages.json
|
||||
curl -L -o temp/data/regions.json https://iptv-org.github.io/api/regions.json
|
||||
curl -L -o temp/data/subdivisions.json https://iptv-org.github.io/api/subdivisions.json
|
|
@ -1,40 +0,0 @@
|
|||
const { db, file, parser, store, logger } = require('../../core')
|
||||
const { program } = require('commander')
|
||||
const _ = require('lodash')
|
||||
|
||||
const options = program
|
||||
.option('--input-dir <input-dir>', 'Set path to input directory', 'streams')
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
async function main() {
|
||||
logger.info(`looking for streams...`)
|
||||
const streams = []
|
||||
const files = await file.list(`${options.inputDir}/**/*.m3u`)
|
||||
for (const filepath of files) {
|
||||
const playlist = await parser.parsePlaylist(filepath)
|
||||
for (const item of playlist.items) {
|
||||
item.filepath = filepath
|
||||
|
||||
const stream = store.create()
|
||||
|
||||
stream.set('channel', item.tvg.id)
|
||||
stream.set('title', item.name)
|
||||
stream.set('filepath', item.filepath)
|
||||
stream.set('url', item.url)
|
||||
stream.set('http_referrer', item.http.referrer)
|
||||
stream.set('user_agent', item.http['user-agent'])
|
||||
|
||||
streams.push(stream)
|
||||
}
|
||||
}
|
||||
logger.info(`found ${streams.length} streams`)
|
||||
|
||||
logger.info('saving to the database...')
|
||||
await db.streams.load()
|
||||
await db.streams.reset()
|
||||
const data = streams.map(stream => stream.data())
|
||||
await db.streams.insert(data)
|
||||
}
|
||||
|
||||
main()
|
33
scripts/commands/database/create.ts
Normal file
33
scripts/commands/database/create.ts
Normal file
|
@ -0,0 +1,33 @@
|
|||
import { Storage, Logger, PlaylistParser, Collection, Database } from '../../core'
|
||||
import { Stream, Playlist } from '../../models'
|
||||
import { STREAMS_DIR, DB_DIR } from '../../constants'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info(`looking for streams...`)
|
||||
const storage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage
|
||||
})
|
||||
const files = await storage.list(`**/*.m3u`)
|
||||
let streams = new Collection()
|
||||
for (let filepath of files) {
|
||||
const playlist: Playlist = await parser.parse(filepath)
|
||||
streams = streams.concat(playlist.streams)
|
||||
}
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
logger.info('clean up the storage...')
|
||||
const dbStorage = new Storage(DB_DIR)
|
||||
await dbStorage.clear('streams.db')
|
||||
|
||||
logger.info('saving streams to the database...')
|
||||
const db = new Database(DB_DIR)
|
||||
const dbStreams = await db.load('streams.db')
|
||||
const data = streams.map((stream: Stream) => stream.data()).all()
|
||||
await dbStreams.insert(data)
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,33 +0,0 @@
|
|||
const { create: createPlaylist } = require('../../core/playlist')
|
||||
const { normalize: normalizeUrl } = require('../../core/url')
|
||||
const { db, logger, file } = require('../../core')
|
||||
const { orderBy } = require('natural-orderby')
|
||||
const _ = require('lodash')
|
||||
|
||||
async function main() {
|
||||
logger.info('loading streams...')
|
||||
await db.streams.load()
|
||||
let streams = await db.streams.find({})
|
||||
|
||||
streams = streams.map(stream => {
|
||||
stream.url = normalizeUrl(stream.url)
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
logger.info('sorting links...')
|
||||
streams = orderBy(
|
||||
streams,
|
||||
['channel', s => (s.channel ? '' : s.title), 'url'],
|
||||
['asc', 'asc', 'asc']
|
||||
)
|
||||
|
||||
logger.info('saving...')
|
||||
const files = _.groupBy(streams, 'filepath')
|
||||
for (const filepath in files) {
|
||||
const playlist = createPlaylist(files[filepath], { public: false })
|
||||
await file.create(filepath, playlist.toString())
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,76 +0,0 @@
|
|||
const { db, generator, api, logger, file } = require('../../core')
|
||||
const { orderBy } = require('natural-orderby')
|
||||
const _ = require('lodash')
|
||||
|
||||
async function main() {
|
||||
const streams = await loadStreams()
|
||||
|
||||
logger.info('generating categories/...')
|
||||
await generator.generate('categories', streams)
|
||||
logger.info('generating countries/...')
|
||||
await generator.generate('countries', streams)
|
||||
logger.info('generating languages/...')
|
||||
await generator.generate('languages', streams)
|
||||
logger.info('generating regions/...')
|
||||
await generator.generate('regions', streams)
|
||||
logger.info('generating index.category.m3u...')
|
||||
await generator.generate('index_category_m3u', streams)
|
||||
logger.info('generating index.country.m3u...')
|
||||
await generator.generate('index_country_m3u', streams)
|
||||
logger.info('generating index.language.m3u...')
|
||||
await generator.generate('index_language_m3u', streams)
|
||||
logger.info('generating index.m3u...')
|
||||
await generator.generate('index_m3u', streams)
|
||||
logger.info('generating index.nsfw.m3u...')
|
||||
await generator.generate('index_nsfw_m3u', streams)
|
||||
logger.info('generating index.region.m3u...')
|
||||
await generator.generate('index_region_m3u', streams)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadStreams() {
|
||||
await db.streams.load()
|
||||
let streams = await db.streams.find({})
|
||||
streams = orderBy(streams, ['channel', 'url'], ['asc', 'asc'])
|
||||
streams = _.uniqBy(streams, stream => stream.channel || _.uniqueId())
|
||||
|
||||
await api.channels.load()
|
||||
let channels = await api.channels.all()
|
||||
channels = _.keyBy(channels, 'id')
|
||||
|
||||
await api.categories.load()
|
||||
let categories = await api.categories.all()
|
||||
categories = _.keyBy(categories, 'id')
|
||||
|
||||
await api.languages.load()
|
||||
let languages = await api.languages.all()
|
||||
languages = _.keyBy(languages, 'code')
|
||||
|
||||
streams = streams.map(stream => {
|
||||
const channel = channels[stream.channel] || null
|
||||
const filename = file.getFilename(stream.filepath)
|
||||
const [_, code] = filename.match(/^([a-z]{2})(_|$)/) || [null, null]
|
||||
const defaultBroadcastArea = code ? [`c/${code.toUpperCase()}`] : []
|
||||
|
||||
if (channel) {
|
||||
stream.categories = channel.categories.map(id => categories[id]).filter(i => i)
|
||||
stream.languages = channel.languages.map(id => languages[id]).filter(i => i)
|
||||
stream.broadcast_area = channel.broadcast_area
|
||||
stream.is_nsfw = channel.is_nsfw
|
||||
stream.logo = channel.logo
|
||||
} else {
|
||||
stream.categories = []
|
||||
stream.languages = []
|
||||
stream.broadcast_area = defaultBroadcastArea
|
||||
stream.is_nsfw = false
|
||||
stream.logo = null
|
||||
}
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
streams = orderBy(streams, ['title'], ['asc'])
|
||||
|
||||
return streams
|
||||
}
|
148
scripts/commands/playlist/generate.ts
Normal file
148
scripts/commands/playlist/generate.ts
Normal file
|
@ -0,0 +1,148 @@
|
|||
import { File, Storage } from '../../core'
|
||||
import { Stream, Category, Channel, Language, Country, Region, Subdivision } from '../../models'
|
||||
import { Database } from '../../core/database'
|
||||
import { Collection } from '../../core/collection'
|
||||
import { Logger } from '../../core/logger'
|
||||
import _ from 'lodash'
|
||||
import {
|
||||
CategoriesGenerator,
|
||||
CountriesGenerator,
|
||||
LanguagesGenerator,
|
||||
RegionsGenerator,
|
||||
IndexGenerator,
|
||||
IndexNsfwGenerator,
|
||||
IndexCategoryGenerator,
|
||||
IndexCountryGenerator,
|
||||
IndexLanguageGenerator,
|
||||
IndexRegionGenerator
|
||||
} from '../../generators'
|
||||
import { DATA_DIR, DB_DIR, LOGS_DIR } from '../../constants'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
const storage = new Storage(DATA_DIR)
|
||||
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
const channels = new Collection(channelsContent).map(data => new Channel(data))
|
||||
|
||||
const categoriesContent = await storage.json('categories.json')
|
||||
const categories = new Collection(categoriesContent).map(data => new Category(data))
|
||||
|
||||
const countriesContent = await storage.json('countries.json')
|
||||
const countries = new Collection(countriesContent).map(data => new Country(data))
|
||||
|
||||
const languagesContent = await storage.json('languages.json')
|
||||
const languages = new Collection(languagesContent).map(data => new Language(data))
|
||||
|
||||
const regionsContent = await storage.json('regions.json')
|
||||
const regions = new Collection(regionsContent).map(data => new Region(data))
|
||||
|
||||
const subdivisionsContent = await storage.json('subdivisions.json')
|
||||
const subdivisions = new Collection(subdivisionsContent).map(data => new Subdivision(data))
|
||||
|
||||
const streams = await loadStreams({ channels, categories, languages })
|
||||
|
||||
const generatorsLogger = new Logger({
|
||||
stream: await new Storage(LOGS_DIR).createStream(`generators.log`)
|
||||
})
|
||||
|
||||
logger.info('generating categories/...')
|
||||
await new CategoriesGenerator({ categories, streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating countries/...')
|
||||
await new CountriesGenerator({
|
||||
countries,
|
||||
streams,
|
||||
regions,
|
||||
subdivisions,
|
||||
logger: generatorsLogger
|
||||
}).generate()
|
||||
|
||||
logger.info('generating languages/...')
|
||||
await new LanguagesGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating regions/...')
|
||||
await new RegionsGenerator({
|
||||
streams,
|
||||
regions,
|
||||
subdivisions,
|
||||
logger: generatorsLogger
|
||||
}).generate()
|
||||
|
||||
logger.info('generating index.m3u...')
|
||||
await new IndexGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.nsfw.m3u...')
|
||||
await new IndexNsfwGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.category.m3u...')
|
||||
await new IndexCategoryGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.country.m3u...')
|
||||
await new IndexCountryGenerator({
|
||||
streams,
|
||||
countries,
|
||||
regions,
|
||||
subdivisions,
|
||||
logger: generatorsLogger
|
||||
}).generate()
|
||||
|
||||
logger.info('generating index.language.m3u...')
|
||||
await new IndexLanguageGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.region.m3u...')
|
||||
await new IndexRegionGenerator({ streams, regions, logger: generatorsLogger }).generate()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadStreams({
|
||||
channels,
|
||||
categories,
|
||||
languages
|
||||
}: {
|
||||
channels: Collection
|
||||
categories: Collection
|
||||
languages: Collection
|
||||
}) {
|
||||
const groupedChannels = channels.keyBy(channel => channel.id)
|
||||
const groupedCategories = categories.keyBy(category => category.id)
|
||||
const groupedLanguages = languages.keyBy(language => language.code)
|
||||
|
||||
const db = new Database(DB_DIR)
|
||||
const dbStreams = await db.load('streams.db')
|
||||
const docs = await dbStreams.find({})
|
||||
const streams = new Collection(docs as any[])
|
||||
.map((data: any) => new Stream(data))
|
||||
.orderBy([(stream: Stream) => stream.channel, (stream: Stream) => stream.url], ['asc', 'asc'])
|
||||
.uniqBy((stream: Stream) => stream.channel || _.uniqueId())
|
||||
.map((stream: Stream) => {
|
||||
const channel: Channel | undefined = groupedChannels.get(stream.channel)
|
||||
|
||||
if (channel) {
|
||||
const channelCategories = channel.categories
|
||||
.map((id: string) => groupedCategories.get(id))
|
||||
.filter(Boolean)
|
||||
const channelLanguages = channel.languages
|
||||
.map((id: string) => groupedLanguages.get(id))
|
||||
.filter(Boolean)
|
||||
|
||||
stream.categories = channelCategories
|
||||
stream.languages = channelLanguages
|
||||
stream.broadcastArea = channel.broadcastArea
|
||||
stream.isNSFW = channel.isNSFW
|
||||
if (channel.logo) stream.logo = channel.logo
|
||||
} else {
|
||||
const file = new File(stream.filepath)
|
||||
const [_, countryCode] = file.getFilename().match(/^([a-z]{2})(_|$)/) || [null, null]
|
||||
const defaultBroadcastArea = countryCode ? [`c/${countryCode.toUpperCase()}`] : []
|
||||
|
||||
stream.broadcastArea = new Collection(defaultBroadcastArea)
|
||||
}
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
return streams
|
||||
}
|
143
scripts/commands/playlist/update.ts
Normal file
143
scripts/commands/playlist/update.ts
Normal file
|
@ -0,0 +1,143 @@
|
|||
import { DB_DIR, DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { Database, Storage, Logger, Collection, Dictionary, IssueLoader } from '../../core'
|
||||
import { Stream, Playlist, Channel } from '../../models'
|
||||
|
||||
let processedIssues = new Collection()
|
||||
let streams: Collection
|
||||
let groupedChannels: Dictionary
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ disabled: true })
|
||||
const loader = new IssueLoader()
|
||||
|
||||
logger.info('loading streams...')
|
||||
const db = new Database(DB_DIR)
|
||||
const docs = await db.load('streams.db')
|
||||
const dbStreams = await docs.find({})
|
||||
|
||||
streams = new Collection(dbStreams as any[]).map(data => new Stream(data))
|
||||
|
||||
const storage = new Storage(DATA_DIR)
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
groupedChannels = new Collection(channelsContent)
|
||||
.map(data => new Channel(data))
|
||||
.keyBy((channel: Channel) => channel.id)
|
||||
|
||||
logger.info('removing broken streams...')
|
||||
await removeStreams(loader)
|
||||
|
||||
logger.info('edit stream description...')
|
||||
await editStreams(loader)
|
||||
|
||||
logger.info('add new streams...')
|
||||
await addStreams(loader)
|
||||
|
||||
logger.info('normalizing links...')
|
||||
streams = streams.map(stream => {
|
||||
stream.normalizeURL()
|
||||
return stream
|
||||
})
|
||||
|
||||
logger.info('sorting links...')
|
||||
streams = streams.orderBy(
|
||||
[
|
||||
(stream: Stream) => stream.name,
|
||||
(stream: Stream) => parseInt(stream.quality.replace('p', '')),
|
||||
(stream: Stream) => stream.label,
|
||||
(stream: Stream) => stream.url
|
||||
],
|
||||
['asc', 'desc', 'asc', 'asc']
|
||||
)
|
||||
|
||||
logger.info('saving...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.filepath)
|
||||
for (let filepath of groupedStreams.keys()) {
|
||||
const streams = groupedStreams.get(filepath) || []
|
||||
|
||||
if (!streams.length) return
|
||||
|
||||
const playlist = new Playlist(streams, { public: false })
|
||||
await streamsStorage.save(filepath, playlist.toString())
|
||||
}
|
||||
|
||||
const output = processedIssues.map(issue_number => `closes #${issue_number}`).join(', ')
|
||||
console.log(`OUTPUT=${output}`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function removeStreams(loader: IssueLoader) {
|
||||
const issues = await loader.load({ labels: ['streams:remove', 'approved'] })
|
||||
issues.forEach((data: Dictionary) => {
|
||||
if (data.missing('stream_url')) return
|
||||
|
||||
const removed = streams.remove((_stream: Stream) => _stream.url === data.get('stream_url'))
|
||||
if (removed.notEmpty()) {
|
||||
processedIssues.add(data.get('issue_number'))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function editStreams(loader: IssueLoader) {
|
||||
const issues = await loader.load({ labels: ['streams:edit', 'approved'] })
|
||||
issues.forEach((data: Dictionary) => {
|
||||
if (data.missing('stream_url')) return
|
||||
|
||||
let stream = streams.first(
|
||||
(_stream: Stream) => _stream.url === data.get('stream_url')
|
||||
) as Stream
|
||||
|
||||
if (!stream) return
|
||||
|
||||
if (data.has('channel_id')) {
|
||||
const channel = groupedChannels.get(data.get('channel_id'))
|
||||
|
||||
if (!channel) return
|
||||
|
||||
stream.channel = data.get('channel_id')
|
||||
stream.filepath = `${channel.country.toLowerCase()}.m3u`
|
||||
stream.line = -1
|
||||
stream.name = channel.name
|
||||
}
|
||||
|
||||
if (data.has('channel_name')) stream.name = data.get('channel_name')
|
||||
if (data.has('label')) stream.label = data.get('label')
|
||||
if (data.has('quality')) stream.quality = data.get('quality')
|
||||
if (data.has('user_agent')) stream.userAgent = data.get('user_agent')
|
||||
if (data.has('http_referrer')) stream.httpReferrer = data.get('http_referrer')
|
||||
if (data.has('channel_name')) stream.name = data.get('channel_name')
|
||||
|
||||
streams.remove((_stream: Stream) => _stream.channel === stream.channel)
|
||||
streams.add(stream)
|
||||
|
||||
processedIssues.add(data.get('issue_number'))
|
||||
})
|
||||
}
|
||||
|
||||
async function addStreams(loader: IssueLoader) {
|
||||
const issues = await loader.load({ labels: ['streams:add', 'approved'] })
|
||||
issues.forEach((data: Dictionary) => {
|
||||
if (data.missing('channel_id') || data.missing('stream_url')) return
|
||||
if (streams.includes((_stream: Stream) => _stream.url === data.get('stream_url'))) return
|
||||
|
||||
const channel = groupedChannels.get(data.get('channel_id'))
|
||||
|
||||
if (!channel) return
|
||||
|
||||
const stream = new Stream({
|
||||
channel: data.get('channel_id'),
|
||||
url: data.get('stream_url'),
|
||||
label: data.get('label'),
|
||||
quality: data.get('quality'),
|
||||
userAgent: data.get('user_agent'),
|
||||
httpReferrer: data.get('http_referrer'),
|
||||
filepath: `${channel.country.toLowerCase()}.m3u`,
|
||||
line: -1,
|
||||
name: data.get('channel_name') || channel.name
|
||||
})
|
||||
|
||||
streams.add(stream)
|
||||
processedIssues.add(data.get('issue_number'))
|
||||
})
|
||||
}
|
|
@ -1,106 +0,0 @@
|
|||
const { file, logger, api, parser, id } = require('../../core')
|
||||
const { program } = require('commander')
|
||||
const chalk = require('chalk')
|
||||
const _ = require('lodash')
|
||||
|
||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const files = program.args.length ? program.args : await file.list('streams/*.m3u')
|
||||
|
||||
logger.info(`loading blocklist...`)
|
||||
await api.channels.load()
|
||||
await api.blocklist.load()
|
||||
|
||||
let blocklist = await api.blocklist.all()
|
||||
blocklist = blocklist
|
||||
.map(blocked => {
|
||||
const channel = api.channels.find({ id: blocked.channel })
|
||||
if (!channel) return null
|
||||
return { ...blocked, name: channel.name }
|
||||
})
|
||||
.filter(i => i)
|
||||
logger.info(`found ${blocklist.length} records`)
|
||||
|
||||
let errors = []
|
||||
let warnings = []
|
||||
for (const filepath of files) {
|
||||
if (!filepath.endsWith('.m3u')) continue
|
||||
|
||||
const basename = file.basename(filepath)
|
||||
const [__, country] = basename.match(/([a-z]{2})(|_.*)\.m3u/i) || [null, null]
|
||||
|
||||
const buffer = {}
|
||||
const fileLog = []
|
||||
try {
|
||||
const playlist = await parser.parsePlaylist(filepath)
|
||||
for (const item of playlist.items) {
|
||||
if (item.tvg.id && !api.channels.find({ id: item.tvg.id })) {
|
||||
fileLog.push({
|
||||
type: 'warning',
|
||||
line: item.line,
|
||||
message: `"${item.tvg.id}" is not in the database`
|
||||
})
|
||||
}
|
||||
|
||||
if (item.url && buffer[item.url]) {
|
||||
fileLog.push({
|
||||
type: 'warning',
|
||||
line: item.line,
|
||||
message: `"${item.url}" is already on the playlist`
|
||||
})
|
||||
} else {
|
||||
buffer[item.url] = true
|
||||
}
|
||||
|
||||
const channel_id = id.generate(item.name, country)
|
||||
const found = blocklist.find(
|
||||
blocked =>
|
||||
item.tvg.id.toLowerCase() === blocked.channel.toLowerCase() ||
|
||||
channel_id.toLowerCase() === blocked.channel.toLowerCase()
|
||||
)
|
||||
if (found) {
|
||||
fileLog.push({
|
||||
type: 'error',
|
||||
line: item.line,
|
||||
message: `"${found.name}" is on the blocklist due to claims of copyright holders (${found.ref})`
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
fileLog.push({
|
||||
type: 'error',
|
||||
line: 0,
|
||||
message: err.message.toLowerCase()
|
||||
})
|
||||
}
|
||||
|
||||
if (fileLog.length) {
|
||||
logger.info(`\n${chalk.underline(filepath)}`)
|
||||
|
||||
fileLog.forEach(err => {
|
||||
const position = err.line.toString().padEnd(6, ' ')
|
||||
const type = err.type.padEnd(9, ' ')
|
||||
const status = err.type === 'error' ? chalk.red(type) : chalk.yellow(type)
|
||||
logger.info(` ${chalk.gray(position)}${status}${err.message}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(fileLog.filter(e => e.type === 'error'))
|
||||
warnings = warnings.concat(fileLog.filter(e => e.type === 'warning'))
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(
|
||||
chalk.red(
|
||||
`\n${errors.length + warnings.length} problems (${errors.length} errors, ${
|
||||
warnings.length
|
||||
} warnings)`
|
||||
)
|
||||
)
|
||||
|
||||
if (errors.length) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
132
scripts/commands/playlist/validate.ts
Normal file
132
scripts/commands/playlist/validate.ts
Normal file
|
@ -0,0 +1,132 @@
|
|||
import { Logger, Storage, PlaylistParser, Collection, File, Dictionary } from '../../core'
|
||||
import { Channel, Stream, Blocked } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import { transliterate } from 'transliteration'
|
||||
import _ from 'lodash'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
|
||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
type LogItem = {
|
||||
type: string
|
||||
line: number
|
||||
message: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info(`loading blocklist...`)
|
||||
const storage = new Storage(DATA_DIR)
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
const channels = new Collection(channelsContent).map(data => new Channel(data))
|
||||
const blocklistContent = await storage.json('blocklist.json')
|
||||
const blocklist = new Collection(blocklistContent).map(data => new Blocked(data))
|
||||
|
||||
logger.info(`found ${blocklist.count()} records`)
|
||||
|
||||
let errors = new Collection()
|
||||
let warnings = new Collection()
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({ storage: streamsStorage })
|
||||
const files = program.args.length ? program.args : await streamsStorage.list('**/*.m3u')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'm3u') continue
|
||||
|
||||
const [, countryCode] = file.basename().match(/([a-z]{2})(|_.*)\.m3u/i) || [null, '']
|
||||
|
||||
const log = new Collection()
|
||||
const buffer = new Dictionary()
|
||||
try {
|
||||
const relativeFilepath = filepath.replace(STREAMS_DIR, '')
|
||||
const playlist = await parser.parse(relativeFilepath)
|
||||
playlist.streams.forEach((stream: Stream) => {
|
||||
const channelNotInDatabase =
|
||||
stream.channel && !channels.first((channel: Channel) => channel.id === stream.channel)
|
||||
if (channelNotInDatabase) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.line,
|
||||
message: `"${stream.channel}" is not in the database`
|
||||
})
|
||||
}
|
||||
|
||||
const alreadyOnPlaylist = stream.url && buffer.has(stream.url)
|
||||
if (alreadyOnPlaylist) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.line,
|
||||
message: `"${stream.url}" is already on the playlist`
|
||||
})
|
||||
} else {
|
||||
buffer.set(stream.url, true)
|
||||
}
|
||||
|
||||
const channelId = generateChannelId(stream.name, countryCode)
|
||||
const blocked = blocklist.first(
|
||||
blocked =>
|
||||
stream.channel.toLowerCase() === blocked.channel.toLowerCase() ||
|
||||
channelId.toLowerCase() === blocked.channel.toLowerCase()
|
||||
)
|
||||
if (blocked) {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: stream.line,
|
||||
message: `"${stream.name}" is on the blocklist due to claims of copyright holders (${blocked.ref})`
|
||||
})
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: 0,
|
||||
message: error.message.toLowerCase()
|
||||
})
|
||||
}
|
||||
|
||||
if (log.notEmpty()) {
|
||||
logger.info(`\n${chalk.underline(filepath)}`)
|
||||
|
||||
log.forEach((logItem: LogItem) => {
|
||||
const position = logItem.line.toString().padEnd(6, ' ')
|
||||
const type = logItem.type.padEnd(9, ' ')
|
||||
const status = logItem.type === 'error' ? chalk.red(type) : chalk.yellow(type)
|
||||
|
||||
logger.info(` ${chalk.gray(position)}${status}${logItem.message}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(log.filter((logItem: LogItem) => logItem.type === 'error'))
|
||||
warnings = warnings.concat(log.filter((logItem: LogItem) => logItem.type === 'warning'))
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(
|
||||
chalk.red(
|
||||
`\n${
|
||||
errors.count() + warnings.count()
|
||||
} problems (${errors.count()} errors, ${warnings.count()} warnings)`
|
||||
)
|
||||
)
|
||||
|
||||
if (errors.count()) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
function generateChannelId(name: string, code: string) {
|
||||
if (!name || !code) return ''
|
||||
|
||||
name = name.replace(/ *\([^)]*\) */g, '')
|
||||
name = name.replace(/ *\[[^)]*\] */g, '')
|
||||
name = name.replace(/\+/gi, 'Plus')
|
||||
name = name.replace(/[^a-z\d]+/gi, '')
|
||||
name = name.trim()
|
||||
name = transliterate(name)
|
||||
code = code.toLowerCase()
|
||||
|
||||
return `${name}.${code}`
|
||||
}
|
|
@ -1,143 +0,0 @@
|
|||
const { file, markdown, parser, logger, api } = require('../../core')
|
||||
const { create: createTable } = require('../../core/table')
|
||||
const { program } = require('commander')
|
||||
|
||||
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/tmp/logs/generators'
|
||||
|
||||
const options = program
|
||||
.option('-c, --config <config>', 'Set path to config file', '.readme/config.json')
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
async function main() {
|
||||
await createCategoryTable()
|
||||
await createCountryTable()
|
||||
await createLanguageTable()
|
||||
await createRegionTable()
|
||||
await updateReadme()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function createCategoryTable() {
|
||||
logger.info('creating category table...')
|
||||
const rows = []
|
||||
await api.categories.load()
|
||||
const items = await parser.parseLogs(`${LOGS_DIR}/categories.log`)
|
||||
for (const item of items) {
|
||||
const id = file.getFilename(item.filepath)
|
||||
const category = await api.categories.find({ id })
|
||||
rows.push({
|
||||
name: category ? category.name : 'Undefined',
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
}
|
||||
|
||||
const table = createTable(rows, [
|
||||
{ name: 'Category' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', nowrap: true }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_categories.md', table)
|
||||
}
|
||||
|
||||
async function createCountryTable() {
|
||||
logger.info('creating country table...')
|
||||
const rows = []
|
||||
await api.countries.load()
|
||||
await api.subdivisions.load()
|
||||
const items = await parser.parseLogs(`${LOGS_DIR}/countries.log`)
|
||||
for (const item of items) {
|
||||
const code = file.getFilename(item.filepath)
|
||||
const country = await api.countries.find({ code: code.toUpperCase() })
|
||||
if (country) {
|
||||
rows.push({
|
||||
name: `${country.flag} ${country.name}`,
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
} else if (code === 'int') {
|
||||
rows.push({
|
||||
name: `🌍 International`,
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
} else {
|
||||
const subdivision = await api.subdivisions.find({ code: code.toUpperCase() })
|
||||
if (subdivision) {
|
||||
rows.push({
|
||||
name: ` ${subdivision.name}`,
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const table = createTable(rows, [
|
||||
{ name: 'Country' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', nowrap: true }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_countries.md', table)
|
||||
}
|
||||
|
||||
async function createLanguageTable() {
|
||||
logger.info('creating language table...')
|
||||
const rows = []
|
||||
await api.languages.load()
|
||||
const items = await parser.parseLogs(`${LOGS_DIR}/languages.log`)
|
||||
for (const item of items) {
|
||||
const code = file.getFilename(item.filepath)
|
||||
const language = await api.languages.find({ code })
|
||||
rows.push({
|
||||
name: language ? language.name : 'Undefined',
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
}
|
||||
|
||||
const table = createTable(rows, [
|
||||
{ name: 'Language', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left', nowrap: true }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_languages.md', table)
|
||||
}
|
||||
|
||||
async function createRegionTable() {
|
||||
logger.info('creating region table...')
|
||||
const rows = []
|
||||
await api.regions.load()
|
||||
const items = await parser.parseLogs(`${LOGS_DIR}/regions.log`)
|
||||
for (const item of items) {
|
||||
const code = file.getFilename(item.filepath)
|
||||
const region = await api.regions.find({ code: code.toUpperCase() })
|
||||
if (region) {
|
||||
rows.push({
|
||||
name: region.name,
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const table = createTable(rows, [
|
||||
{ name: 'Region', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left', nowrap: true }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_regions.md', table)
|
||||
}
|
||||
|
||||
async function updateReadme() {
|
||||
logger.info('updating readme.md...')
|
||||
const config = require(file.resolve(options.config))
|
||||
await file.createDir(file.dirname(config.build))
|
||||
await markdown.compile(options.config)
|
||||
}
|
24
scripts/commands/readme/update.ts
Normal file
24
scripts/commands/readme/update.ts
Normal file
|
@ -0,0 +1,24 @@
|
|||
import { CategoryTable, CountryTable, LanguageTable, RegionTable } from '../../tables'
|
||||
import { Logger, Markdown } from '../../core'
|
||||
import { README_DIR } from '../../constants'
|
||||
import path from 'path'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('creating category table...')
|
||||
await new CategoryTable().make()
|
||||
logger.info('creating country table...')
|
||||
await new CountryTable().make()
|
||||
logger.info('creating language table...')
|
||||
await new LanguageTable().make()
|
||||
logger.info('creating region table...')
|
||||
await new RegionTable().make()
|
||||
|
||||
logger.info('updating readme.md...')
|
||||
const configPath = path.join(README_DIR, 'config.json')
|
||||
const readme = new Markdown(configPath)
|
||||
readme.compile()
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,106 +0,0 @@
|
|||
const { api } = require('../../core')
|
||||
const { Octokit } = require('@octokit/core')
|
||||
const { paginateRest } = require('@octokit/plugin-paginate-rest')
|
||||
const CustomOctokit = Octokit.plugin(paginateRest)
|
||||
const _ = require('lodash')
|
||||
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
const DATA_DIR = process.env.DATA_DIR || './tmp/data'
|
||||
const OWNER = 'iptv-org'
|
||||
const REPO = 'iptv'
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
await api.channels.load()
|
||||
let channels = await api.channels.all()
|
||||
channels = _.keyBy(channels, 'id')
|
||||
|
||||
await api.blocklist.load()
|
||||
let blocklist = await api.blocklist.all()
|
||||
blocklist = _.keyBy(blocklist, 'channel')
|
||||
|
||||
await api.streams.load()
|
||||
let streams = await api.streams.all()
|
||||
streams = _.keyBy(streams, 'channel')
|
||||
|
||||
const channelRequests = await loadChannelRequests()
|
||||
const buffer = {}
|
||||
const report = channelRequests.map(r => {
|
||||
let result = {
|
||||
issueNumber: r.issue.number,
|
||||
channelId: r.channel.id || undefined,
|
||||
status: undefined
|
||||
}
|
||||
|
||||
if (!r.channel || !r.channel.id) result.status = 'error'
|
||||
else if (blocklist[r.channel.id]) result.status = 'blocked'
|
||||
else if (!channels[r.channel.id]) result.status = 'invalid_id'
|
||||
else if (streams[r.channel.id]) result.status = 'fullfilled'
|
||||
else if (buffer[r.channel.id] && !r.channel.url) result.status = 'duplicate'
|
||||
else result.status = 'pending'
|
||||
|
||||
buffer[r.channel.id] = true
|
||||
|
||||
return result
|
||||
})
|
||||
console.table(report)
|
||||
} catch (err) {
|
||||
console.log(err.message)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadChannelRequests() {
|
||||
const issues = await fetchIssues('channel request')
|
||||
|
||||
return issues.map(parseIssue)
|
||||
}
|
||||
|
||||
async function fetchIssues(labels) {
|
||||
const issues = await octokit.paginate('GET /repos/{owner}/{repo}/issues', {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
direction: 'asc',
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
|
||||
return issues
|
||||
}
|
||||
|
||||
function parseIssue(issue) {
|
||||
const buffer = {}
|
||||
const channel = {}
|
||||
const fields = {
|
||||
'Channel ID (required)': 'id',
|
||||
'Channel ID': 'id',
|
||||
'Stream URL (optional)': 'url',
|
||||
'Stream URL': 'url',
|
||||
'Notes (optional)': 'notes',
|
||||
Notes: 'notes'
|
||||
}
|
||||
|
||||
const matches = issue.body.match(/### ([^\r\n]+)\s+([^\r\n]+)/g)
|
||||
|
||||
if (!matches) return { issue, channel: null }
|
||||
|
||||
matches.forEach(item => {
|
||||
const [, fieldLabel, value] = item.match(/### ([^\r\n]+)\s+([^\r\n]+)/)
|
||||
const field = fields[fieldLabel]
|
||||
|
||||
if (!field) return
|
||||
|
||||
buffer[field] = value === '_No response_' ? undefined : value.trim()
|
||||
})
|
||||
|
||||
for (let field in buffer) {
|
||||
channel[field] = buffer[field]
|
||||
}
|
||||
|
||||
return { issue, channel }
|
||||
}
|
53
scripts/commands/report/create.ts
Normal file
53
scripts/commands/report/create.ts
Normal file
|
@ -0,0 +1,53 @@
|
|||
import { DATA_DIR } from '../../constants'
|
||||
import { Collection, Dictionary, IssueLoader, Storage } from '../../core'
|
||||
import { Blocked, Channel, Stream } from '../../models'
|
||||
|
||||
async function main() {
|
||||
const loader = new IssueLoader()
|
||||
|
||||
const storage = new Storage(DATA_DIR)
|
||||
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
const groupedChannels = new Collection(channelsContent)
|
||||
.map(data => new Channel(data))
|
||||
.groupBy((channel: Channel) => channel.id)
|
||||
|
||||
const streamsContent = await storage.json('streams.json')
|
||||
const groupedStreams = new Collection(streamsContent)
|
||||
.map(data => new Stream(data))
|
||||
.groupBy((stream: Stream) => stream.url)
|
||||
|
||||
const blocklistContent = await storage.json('blocklist.json')
|
||||
const groupedBlocklist = new Collection(blocklistContent)
|
||||
.map(data => new Blocked(data))
|
||||
.groupBy((blocked: Blocked) => blocked.channel)
|
||||
|
||||
const issues = await loader.load({ labels: ['streams:add'] })
|
||||
|
||||
const buffer = new Dictionary()
|
||||
const report = issues.map(data => {
|
||||
const channelId = data.get('channel_id') || undefined
|
||||
const streamUrl = data.get('stream_url') || undefined
|
||||
|
||||
const result = new Dictionary({
|
||||
issueNumber: data.get('issue_number'),
|
||||
channelId,
|
||||
status: undefined
|
||||
})
|
||||
|
||||
if (!channelId || !streamUrl) result.set('status', 'error')
|
||||
else if (groupedBlocklist.has(channelId)) result.set('status', 'blocked')
|
||||
else if (groupedChannels.missing(channelId)) result.set('status', 'invalid_id')
|
||||
else if (groupedStreams.has(streamUrl)) result.set('status', 'fullfilled')
|
||||
else if (buffer.has(streamUrl)) result.set('status', 'duplicate')
|
||||
else result.set('status', 'pending')
|
||||
|
||||
buffer.set(streamUrl, true)
|
||||
|
||||
return result.data()
|
||||
})
|
||||
|
||||
console.table(report.all())
|
||||
}
|
||||
|
||||
main()
|
Loading…
Add table
Add a link
Reference in a new issue