mirror of
https://github.com/iptv-org/epg.git
synced 2025-05-09 08:30:06 -04:00
Update scripts
This commit is contained in:
parent
b7214db4fb
commit
ca254a6df0
37 changed files with 1091 additions and 915 deletions
|
@ -1,7 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
mkdir -p scripts/tmp/data
|
||||
curl -L -o scripts/tmp/data/channels.json https://iptv-org.github.io/api/channels.json
|
||||
curl -L -o scripts/tmp/data/countries.json https://iptv-org.github.io/api/countries.json
|
||||
curl -L -o scripts/tmp/data/regions.json https://iptv-org.github.io/api/regions.json
|
||||
curl -L -o scripts/tmp/data/subdivisions.json https://iptv-org.github.io/api/subdivisions.json
|
18
scripts/commands/api/load.ts
Normal file
18
scripts/commands/api/load.ts
Normal file
|
@ -0,0 +1,18 @@
|
|||
import { Logger } from '@freearhey/core'
|
||||
import { ApiClient } from '../../core'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
const client = new ApiClient({ logger })
|
||||
|
||||
const requests = [
|
||||
client.download('channels.json'),
|
||||
client.download('countries.json'),
|
||||
client.download('regions.json'),
|
||||
client.download('subdivisions.json')
|
||||
]
|
||||
|
||||
await Promise.all(requests)
|
||||
}
|
||||
|
||||
main()
|
1
scripts/commands/channels/.gitignore
vendored
Normal file
1
scripts/commands/channels/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
/replace.ts
|
|
@ -1,160 +0,0 @@
|
|||
const { api, parser, xml, file, logger } = require('../../core')
|
||||
const { transliterate } = require('transliteration')
|
||||
const nodeCleanup = require('node-cleanup')
|
||||
const { program } = require('commander')
|
||||
const inquirer = require('inquirer')
|
||||
|
||||
program
|
||||
.argument('<filepath>', 'Path to *.channels.xml file to edit')
|
||||
.option('-c, --country <name>', 'Source country', 'us')
|
||||
.parse(process.argv)
|
||||
|
||||
const filepath = program.args[0]
|
||||
const options = program.opts()
|
||||
const defaultCountry = options.country
|
||||
const newLabel = ` [new]`
|
||||
|
||||
let site
|
||||
let channels = []
|
||||
|
||||
async function main() {
|
||||
if (!(await file.exists(filepath))) {
|
||||
throw new Error(`File "${filepath}" does not exists`)
|
||||
return
|
||||
}
|
||||
|
||||
let result = await parser.parseChannels(filepath)
|
||||
site = result.site
|
||||
channels = result.channels
|
||||
channels = channels.map(c => {
|
||||
c.xmltv_id = c.xmltv_id
|
||||
return c
|
||||
})
|
||||
await api.channels.load()
|
||||
const buffer = []
|
||||
for (const channel of channels) {
|
||||
if (channel.xmltv_id) {
|
||||
if (channel.xmltv_id !== '-') {
|
||||
buffer.push(`${channel.xmltv_id}/${channel.lang}`)
|
||||
}
|
||||
continue
|
||||
}
|
||||
let choices = await getOptions(channel)
|
||||
const question = {
|
||||
name: 'option',
|
||||
message: `Choose an option:`,
|
||||
type: 'list',
|
||||
choices,
|
||||
pageSize: 10
|
||||
}
|
||||
await inquirer.prompt(question).then(async selected => {
|
||||
switch (selected.option) {
|
||||
case 'Overwrite':
|
||||
const input = await getInput(channel)
|
||||
channel.xmltv_id = input.xmltv_id
|
||||
break
|
||||
case 'Skip':
|
||||
channel.xmltv_id = '-'
|
||||
break
|
||||
default:
|
||||
const [name, xmltv_id] = selected.option
|
||||
.replace(/ \[.*\]/, '')
|
||||
.split('|')
|
||||
.map(i => i.trim().replace(newLabel, ''))
|
||||
channel.xmltv_id = xmltv_id
|
||||
break
|
||||
}
|
||||
|
||||
const found = buffer.includes(`${channel.xmltv_id}/${channel.lang}`)
|
||||
if (found) {
|
||||
const question = {
|
||||
name: 'option',
|
||||
message: `"${channel.xmltv_id}" already on the list. Choose an option:`,
|
||||
type: 'list',
|
||||
choices: ['Skip', 'Add', 'Delete'],
|
||||
pageSize: 5
|
||||
}
|
||||
await inquirer.prompt(question).then(async selected => {
|
||||
switch (selected.option) {
|
||||
case 'Skip':
|
||||
channel.xmltv_id = '-'
|
||||
break
|
||||
case 'Delete':
|
||||
channel.delete = true
|
||||
break
|
||||
default:
|
||||
break
|
||||
}
|
||||
})
|
||||
} else {
|
||||
if (channel.xmltv_id !== '-') {
|
||||
buffer.push(`${channel.xmltv_id}/${channel.lang}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
function save() {
|
||||
if (!file.existsSync(filepath)) return
|
||||
|
||||
channels = channels.filter(c => !c.delete)
|
||||
|
||||
const output = xml.create(channels, site)
|
||||
|
||||
file.writeSync(filepath, output)
|
||||
|
||||
logger.info(`\nFile '${filepath}' successfully saved`)
|
||||
}
|
||||
|
||||
nodeCleanup(() => {
|
||||
save()
|
||||
})
|
||||
|
||||
async function getInput(channel) {
|
||||
const name = channel.name.trim()
|
||||
const input = await inquirer.prompt([
|
||||
{
|
||||
name: 'xmltv_id',
|
||||
message: ' ID:',
|
||||
type: 'input',
|
||||
default: generateCode(name, defaultCountry)
|
||||
}
|
||||
])
|
||||
|
||||
return { name, xmltv_id: input['xmltv_id'] }
|
||||
}
|
||||
|
||||
async function getOptions(channel) {
|
||||
const channels = await api.channels.all()
|
||||
const channelId = generateCode(channel.name, defaultCountry)
|
||||
const similar = await getSimilar(channels, channelId)
|
||||
let variants = []
|
||||
variants.push(`${channel.name.trim()} | ${channelId}${newLabel}`)
|
||||
similar.forEach(i => {
|
||||
let alt_names = i.alt_names.length ? ` (${i.alt_names.join(',')})` : ''
|
||||
let closed = i.closed ? `[closed:${i.closed}]` : ``
|
||||
let replaced_by = i.replaced_by ? `[replaced_by:${i.replaced_by}]` : ''
|
||||
variants.push(`${i.name}${alt_names} | ${i.id} ${closed}${replaced_by}[api]`)
|
||||
})
|
||||
variants.push(`Overwrite`)
|
||||
variants.push(`Skip`)
|
||||
|
||||
return variants
|
||||
}
|
||||
|
||||
async function getSimilar(list, channelId) {
|
||||
const normChannelId = channelId.split('.')[0].slice(0, 8).toLowerCase()
|
||||
return list.filter(i => i.id.split('.')[0].toLowerCase().startsWith(normChannelId))
|
||||
}
|
||||
|
||||
function generateCode(name, country) {
|
||||
const id = transliterate(name)
|
||||
.replace(/\+/gi, 'Plus')
|
||||
.replace(/^\&/gi, 'And')
|
||||
.replace(/[^a-z\d]+/gi, '')
|
||||
|
||||
return `${id}.${country}`
|
||||
}
|
181
scripts/commands/channels/editor.ts
Normal file
181
scripts/commands/channels/editor.ts
Normal file
|
@ -0,0 +1,181 @@
|
|||
import { DATA_DIR } from '../../constants'
|
||||
import { Storage, Collection, Dictionary, Logger } from '@freearhey/core'
|
||||
import { ChannelsParser, XML, ApiChannel } from '../../core'
|
||||
import { Channel } from 'epg-grabber'
|
||||
import { transliterate } from 'transliteration'
|
||||
import nodeCleanup from 'node-cleanup'
|
||||
import { program } from 'commander'
|
||||
import inquirer, { QuestionCollection } from 'inquirer'
|
||||
|
||||
program
|
||||
.argument('<filepath>', 'Path to *.channels.xml file to edit')
|
||||
.option('-c, --country <name>', 'Source country', 'us')
|
||||
.parse(process.argv)
|
||||
|
||||
const filepath = program.args[0]
|
||||
const programOptions = program.opts()
|
||||
const defaultCountry = programOptions.country
|
||||
const newLabel = ` [new]`
|
||||
|
||||
let site: string
|
||||
let options = new Collection()
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage()
|
||||
|
||||
if (!(await storage.exists(filepath))) {
|
||||
throw new Error(`File "${filepath}" does not exists`)
|
||||
}
|
||||
|
||||
const parser = new ChannelsParser({ storage })
|
||||
|
||||
const parsedChannels = await parser.parse(filepath)
|
||||
options = parsedChannels.map((channel: Channel) => {
|
||||
return {
|
||||
channel,
|
||||
delete: false
|
||||
}
|
||||
})
|
||||
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsContent = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsContent).map(data => new ApiChannel(data))
|
||||
|
||||
const buffer = new Dictionary()
|
||||
options.forEach(async (option: { channel: Channel; delete: boolean }) => {
|
||||
const channel = option.channel
|
||||
if (channel.xmltv_id) {
|
||||
if (channel.xmltv_id !== '-') {
|
||||
buffer.set(`${channel.xmltv_id}/${channel.lang}`, true)
|
||||
}
|
||||
return
|
||||
}
|
||||
let choices = getOptions(channels, channel)
|
||||
const question: QuestionCollection = {
|
||||
name: 'option',
|
||||
message: `Choose an option:`,
|
||||
type: 'list',
|
||||
choices,
|
||||
pageSize: 10
|
||||
}
|
||||
|
||||
await inquirer.prompt(question).then(async selected => {
|
||||
switch (selected.option) {
|
||||
case 'Overwrite':
|
||||
const input = await getInput(channel)
|
||||
channel.xmltv_id = input.xmltv_id
|
||||
break
|
||||
case 'Skip':
|
||||
channel.xmltv_id = '-'
|
||||
break
|
||||
default:
|
||||
const [, xmltv_id] = selected.option
|
||||
.replace(/ \[.*\]/, '')
|
||||
.split('|')
|
||||
.map((i: string) => i.trim().replace(newLabel, ''))
|
||||
channel.xmltv_id = xmltv_id
|
||||
break
|
||||
}
|
||||
|
||||
const found = buffer.has(`${channel.xmltv_id}/${channel.lang}`)
|
||||
if (found) {
|
||||
const question: QuestionCollection = {
|
||||
name: 'option',
|
||||
message: `"${channel.xmltv_id}" already on the list. Choose an option:`,
|
||||
type: 'list',
|
||||
choices: ['Skip', 'Add', 'Delete'],
|
||||
pageSize: 5
|
||||
}
|
||||
await inquirer.prompt(question).then(async selected => {
|
||||
switch (selected.option) {
|
||||
case 'Skip':
|
||||
channel.xmltv_id = '-'
|
||||
break
|
||||
case 'Delete':
|
||||
option.delete = true
|
||||
break
|
||||
default:
|
||||
break
|
||||
}
|
||||
})
|
||||
} else {
|
||||
if (channel.xmltv_id !== '-') {
|
||||
buffer.set(`${channel.xmltv_id}/${channel.lang}`, true)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
function save() {
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
|
||||
if (!storage.existsSync(filepath)) return
|
||||
|
||||
const channels = options
|
||||
.filter((option: { channel: Channel; delete: boolean }) => !option.delete)
|
||||
.map((option: { channel: Channel; delete: boolean }) => option.channel)
|
||||
|
||||
const xml = new XML(channels, site)
|
||||
|
||||
storage.saveSync(filepath, xml.toString())
|
||||
|
||||
logger.info(`\nFile '${filepath}' successfully saved`)
|
||||
}
|
||||
|
||||
nodeCleanup(() => {
|
||||
save()
|
||||
})
|
||||
|
||||
async function getInput(channel: Channel) {
|
||||
const name = channel.name.trim()
|
||||
const input = await inquirer.prompt([
|
||||
{
|
||||
name: 'xmltv_id',
|
||||
message: ' ID:',
|
||||
type: 'input',
|
||||
default: generateCode(name, defaultCountry)
|
||||
}
|
||||
])
|
||||
|
||||
return { name, xmltv_id: input['xmltv_id'] }
|
||||
}
|
||||
|
||||
function getOptions(channels: Collection, channel: Channel) {
|
||||
const channelId = generateCode(channel.name, defaultCountry)
|
||||
const similar = getSimilar(channels, channelId)
|
||||
|
||||
const variants = new Collection()
|
||||
variants.add(`${channel.name.trim()} | ${channelId}${newLabel}`)
|
||||
similar.forEach((_channel: ApiChannel) => {
|
||||
const altNames = _channel.altNames.notEmpty() ? ` (${_channel.altNames.join(',')})` : ''
|
||||
const closed = _channel.closed ? `[closed:${_channel.closed}]` : ``
|
||||
const replacedBy = _channel.replacedBy ? `[replaced_by:${_channel.replacedBy}]` : ''
|
||||
|
||||
variants.add(`${_channel.name}${altNames} | ${_channel.id} ${closed}${replacedBy}[api]`)
|
||||
})
|
||||
variants.add(`Overwrite`)
|
||||
variants.add(`Skip`)
|
||||
|
||||
return variants.all()
|
||||
}
|
||||
|
||||
function getSimilar(channels: Collection, channelId: string) {
|
||||
const normChannelId = channelId.split('.')[0].slice(0, 8).toLowerCase()
|
||||
|
||||
return channels.filter((channel: ApiChannel) =>
|
||||
channel.id.split('.')[0].toLowerCase().startsWith(normChannelId)
|
||||
)
|
||||
}
|
||||
|
||||
function generateCode(name: string, country: string) {
|
||||
const channelId: string = transliterate(name)
|
||||
.replace(/\+/gi, 'Plus')
|
||||
.replace(/^\&/gi, 'And')
|
||||
.replace(/[^a-z\d]+/gi, '')
|
||||
|
||||
return `${channelId}.${country}`
|
||||
}
|
|
@ -1,18 +1,10 @@
|
|||
const chalk = require('chalk')
|
||||
const libxml = require('libxmljs2')
|
||||
const { program } = require('commander')
|
||||
const { logger, file } = require('../../core')
|
||||
import chalk from 'chalk'
|
||||
import libxml, { ValidationError } from 'libxmljs2'
|
||||
import { program } from 'commander'
|
||||
import { Logger, Storage, File } from '@freearhey/core'
|
||||
|
||||
const xsd = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
|
||||
<xs:element name="site">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element ref="channels"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="site" use="required" type="xs:string"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:element name="channels">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
|
@ -22,43 +14,53 @@ const xsd = `<?xml version="1.0" encoding="UTF-8"?>
|
|||
</xs:element>
|
||||
<xs:element name="channel">
|
||||
<xs:complexType mixed="true">
|
||||
<xs:attribute name="site" use="required" type="xs:string"/>
|
||||
<xs:attribute name="lang" use="required" type="xs:string"/>
|
||||
<xs:attribute name="site_id" use="required" type="xs:string"/>
|
||||
<xs:attribute name="xmltv_id" use="required" type="xs:string"/>
|
||||
<xs:attribute name="logo" type="xs:string"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:schema>`
|
||||
|
||||
program.argument('<filepath>', 'Path to file to validate').parse(process.argv)
|
||||
program
|
||||
.option(
|
||||
'-c, --channels <path>',
|
||||
'Path to channels.xml file to validate',
|
||||
'sites/**/*.channels.xml'
|
||||
)
|
||||
.parse(process.argv)
|
||||
|
||||
const options = program.opts()
|
||||
|
||||
async function main() {
|
||||
if (!program.args.length) {
|
||||
logger.error('required argument "filepath" not specified')
|
||||
}
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
|
||||
let errors = []
|
||||
logger.info('options:')
|
||||
logger.tree(options)
|
||||
|
||||
for (const filepath of program.args) {
|
||||
if (!filepath.endsWith('.xml')) continue
|
||||
let errors: ValidationError[] = []
|
||||
|
||||
const xml = await file.read(filepath)
|
||||
let files: string[] = await storage.list(options.channels)
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
let localErrors = []
|
||||
const xml = await storage.load(filepath)
|
||||
|
||||
try {
|
||||
const xsdDoc = libxml.parseXml(xsd)
|
||||
const doc = libxml.parseXml(xml)
|
||||
let localErrors: ValidationError[] = []
|
||||
|
||||
if (!doc.validate(xsdDoc)) {
|
||||
localErrors = doc.validationErrors
|
||||
}
|
||||
} catch (error) {
|
||||
localErrors.push(error)
|
||||
const xsdDoc = libxml.parseXml(xsd)
|
||||
const doc = libxml.parseXml(xml)
|
||||
|
||||
if (!doc.validate(xsdDoc)) {
|
||||
localErrors = doc.validationErrors
|
||||
}
|
||||
|
||||
if (localErrors.length) {
|
||||
console.log(`\n${chalk.underline(filepath)}`)
|
||||
localErrors.forEach(error => {
|
||||
localErrors.forEach((error: ValidationError) => {
|
||||
const position = `${error.line}:${error.column}`
|
||||
console.log(` ${chalk.gray(position.padEnd(4, ' '))} ${error.message.trim()}`)
|
||||
})
|
|
@ -1,65 +0,0 @@
|
|||
const { logger, file, xml, parser } = require('../../core')
|
||||
const { Command } = require('commander')
|
||||
const path = require('path')
|
||||
const _ = require('lodash')
|
||||
|
||||
const program = new Command()
|
||||
program
|
||||
.requiredOption('-c, --config <config>', 'Config file')
|
||||
.option('-s, --set [args...]', 'Set custom arguments', [])
|
||||
.option('-o, --output <output>', 'Output file')
|
||||
.option('--clean', 'Delete the previous *.channels.xml if exists')
|
||||
.parse(process.argv)
|
||||
|
||||
const options = program.opts()
|
||||
|
||||
async function main() {
|
||||
const config = require(path.resolve(options.config))
|
||||
const dir = file.dirname(options.config)
|
||||
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
|
||||
|
||||
let channels = []
|
||||
if (!options.clean && (await file.exists(outputFilepath))) {
|
||||
let result = await parser.parseChannels(outputFilepath)
|
||||
|
||||
channels = result.channels
|
||||
}
|
||||
|
||||
const args = {}
|
||||
options.set.forEach(arg => {
|
||||
const [key, value] = arg.split(':')
|
||||
args[key] = value
|
||||
})
|
||||
|
||||
let parsedChannels = config.channels(args)
|
||||
if (isPromise(parsedChannels)) {
|
||||
parsedChannels = await parsedChannels
|
||||
}
|
||||
parsedChannels = parsedChannels.map(c => {
|
||||
c.lang = c.lang || 'en'
|
||||
|
||||
return c
|
||||
})
|
||||
|
||||
channels = channels.concat(parsedChannels)
|
||||
|
||||
channels = _.uniqBy(channels, c => c.site_id + c.lang)
|
||||
|
||||
channels = _.sortBy(channels, [
|
||||
'lang',
|
||||
c => (c.xmltv_id ? c.xmltv_id.toLowerCase() : '_'),
|
||||
'site_id'
|
||||
])
|
||||
|
||||
const output = xml.create(channels, config.site)
|
||||
|
||||
await file.write(outputFilepath, output)
|
||||
|
||||
logger.info(`File '${outputFilepath}' successfully saved`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
function isPromise(promise) {
|
||||
return !!promise && typeof promise.then === 'function'
|
||||
}
|
76
scripts/commands/channels/parse.ts
Normal file
76
scripts/commands/channels/parse.ts
Normal file
|
@ -0,0 +1,76 @@
|
|||
import { Logger, File, Collection, Storage } from '@freearhey/core'
|
||||
import { ChannelsParser, XML } from '../../core'
|
||||
import { Channel } from 'epg-grabber'
|
||||
import { Command, OptionValues } from 'commander'
|
||||
import path from 'path'
|
||||
|
||||
const program = new Command()
|
||||
program
|
||||
.requiredOption('-c, --config <config>', 'Config file')
|
||||
.option('-s, --set [args...]', 'Set custom arguments')
|
||||
.option('-o, --output <output>', 'Output file')
|
||||
.option('--clean', 'Delete the previous *.channels.xml if exists')
|
||||
.parse(process.argv)
|
||||
|
||||
type ParseOptions = {
|
||||
config: string
|
||||
set?: string
|
||||
output?: string
|
||||
clean?: boolean
|
||||
}
|
||||
|
||||
const options: ParseOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage()
|
||||
const parser = new ChannelsParser({ storage })
|
||||
const logger = new Logger()
|
||||
const file = new File(options.config)
|
||||
const dir = file.dirname()
|
||||
const config = require(path.resolve(options.config))
|
||||
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
|
||||
|
||||
let channels = new Collection()
|
||||
if (!options.clean && (await storage.exists(outputFilepath))) {
|
||||
channels = await parser.parse(outputFilepath)
|
||||
}
|
||||
|
||||
const args: {
|
||||
[key: string]: any
|
||||
} = {}
|
||||
|
||||
if (Array.isArray(options.set)) {
|
||||
options.set.forEach((arg: string) => {
|
||||
const [key, value] = arg.split(':')
|
||||
args[key] = value
|
||||
})
|
||||
}
|
||||
|
||||
let parsedChannels = config.channels(args)
|
||||
if (isPromise(parsedChannels)) {
|
||||
parsedChannels = await parsedChannels
|
||||
}
|
||||
|
||||
channels = channels
|
||||
.mergeBy(
|
||||
new Collection(parsedChannels),
|
||||
(channel: Channel) => channel.site_id.toString() + channel.lang
|
||||
)
|
||||
.orderBy([
|
||||
(channel: Channel) => channel.lang,
|
||||
(channel: Channel) => (channel.xmltv_id ? channel.xmltv_id.toLowerCase() : '_'),
|
||||
(channel: Channel) => channel.site_id
|
||||
])
|
||||
|
||||
const xml = new XML(channels, config.site)
|
||||
|
||||
await storage.save(outputFilepath, xml.toString())
|
||||
|
||||
logger.info(`File '${outputFilepath}' successfully saved`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
function isPromise(promise: any) {
|
||||
return !!promise && typeof promise.then === 'function'
|
||||
}
|
|
@ -1,68 +0,0 @@
|
|||
const { parser, logger, api } = require('../../core')
|
||||
const { program } = require('commander')
|
||||
const chalk = require('chalk')
|
||||
const langs = require('langs')
|
||||
|
||||
program.argument('<filepath>', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
await api.channels.load()
|
||||
|
||||
const stats = {
|
||||
files: 0,
|
||||
errors: 0
|
||||
}
|
||||
|
||||
if (!program.args.length) {
|
||||
logger.error('required argument "filepath" not specified')
|
||||
}
|
||||
|
||||
for (const filepath of program.args) {
|
||||
if (!filepath.endsWith('.xml')) continue
|
||||
|
||||
const { site, channels } = await parser.parseChannels(filepath)
|
||||
|
||||
const bufferById = {}
|
||||
const bufferBySiteId = {}
|
||||
const errors = []
|
||||
for (const channel of channels) {
|
||||
if (!bufferById[channel.xmltv_id + channel.lang]) {
|
||||
bufferById[channel.xmltv_id + channel.lang] = channel
|
||||
} else {
|
||||
errors.push({ type: 'duplicate', ...channel })
|
||||
stats.errors++
|
||||
}
|
||||
|
||||
if (!bufferBySiteId[channel.site_id + channel.lang]) {
|
||||
bufferBySiteId[channel.site_id + channel.lang] = channel
|
||||
} else {
|
||||
errors.push({ type: 'duplicate', ...channel })
|
||||
stats.errors++
|
||||
}
|
||||
|
||||
if (!api.channels.find({ id: channel.xmltv_id })) {
|
||||
errors.push({ type: 'wrong_xmltv_id', ...channel })
|
||||
stats.errors++
|
||||
}
|
||||
|
||||
if (!langs.where('1', channel.lang)) {
|
||||
errors.push({ type: 'wrong_lang', ...channel })
|
||||
stats.errors++
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.underline(filepath))
|
||||
console.table(errors, ['type', 'lang', 'xmltv_id', 'site_id', 'name'])
|
||||
console.log()
|
||||
stats.files++
|
||||
}
|
||||
}
|
||||
|
||||
if (stats.errors > 0) {
|
||||
console.log(chalk.red(`${stats.errors} error(s) in ${stats.files} file(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
95
scripts/commands/channels/validate.ts
Normal file
95
scripts/commands/channels/validate.ts
Normal file
|
@ -0,0 +1,95 @@
|
|||
import { Storage, Collection, Dictionary, File, Logger } from '@freearhey/core'
|
||||
import { ChannelsParser, ApiChannel } from '../../core'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import langs from 'langs'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import { Channel } from 'epg-grabber'
|
||||
|
||||
program
|
||||
.option(
|
||||
'-c, --channels <path>',
|
||||
'Path to channels.xml file to validate',
|
||||
'sites/**/*.channels.xml'
|
||||
)
|
||||
.parse(process.argv)
|
||||
|
||||
const options = program.opts()
|
||||
|
||||
type ValidationError = {
|
||||
type: 'duplicate' | 'wrong_xmltv_id' | 'wrong_lang'
|
||||
name: string
|
||||
lang?: string
|
||||
xmltv_id?: string
|
||||
site_id?: string
|
||||
logo?: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('options:')
|
||||
logger.tree(options)
|
||||
|
||||
const parser = new ChannelsParser({ storage: new Storage() })
|
||||
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsContent = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsContent).map(data => new ApiChannel(data))
|
||||
|
||||
let totalFiles = 0
|
||||
let totalErrors = 0
|
||||
const storage = new Storage()
|
||||
let files: string[] = await storage.list(options.channels)
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const parsedChannels = await parser.parse(filepath)
|
||||
|
||||
const bufferById = new Dictionary()
|
||||
const bufferBySiteId = new Dictionary()
|
||||
const errors: ValidationError[] = []
|
||||
parsedChannels.forEach((channel: Channel) => {
|
||||
const bufferId: string = `${channel.xmltv_id}:${channel.lang}`
|
||||
if (bufferById.missing(bufferId)) {
|
||||
bufferById.set(bufferId, true)
|
||||
} else {
|
||||
errors.push({ type: 'duplicate', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
const bufferSiteId: string = `${channel.site_id}:${channel.lang}`
|
||||
if (bufferBySiteId.missing(bufferSiteId)) {
|
||||
bufferBySiteId.set(bufferSiteId, true)
|
||||
} else {
|
||||
errors.push({ type: 'duplicate', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (channels.missing((_channel: ApiChannel) => _channel.id === channel.xmltv_id)) {
|
||||
errors.push({ type: 'wrong_xmltv_id', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!langs.where('1', channel.lang)) {
|
||||
errors.push({ type: 'wrong_lang', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
})
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.underline(filepath))
|
||||
console.table(errors, ['type', 'lang', 'xmltv_id', 'site_id', 'name'])
|
||||
console.log()
|
||||
totalFiles++
|
||||
}
|
||||
}
|
||||
|
||||
if (totalErrors > 0) {
|
||||
console.log(chalk.red(`${totalErrors} error(s) in ${totalFiles} file(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,220 +0,0 @@
|
|||
const { program } = require('commander')
|
||||
const _ = require('lodash')
|
||||
const { EPGGrabber, generateXMLTV, Channel, Program } = require('epg-grabber')
|
||||
const { db, logger, date, timer, file, parser, api, zip } = require('../../core')
|
||||
const path = require('path')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const CronJob = require('cron').CronJob
|
||||
|
||||
dayjs.extend(utc)
|
||||
|
||||
const BASE_DIR = process.env.BASE_DIR || '.'
|
||||
const CURR_DATE = process.env.CURR_DATE || new Date()
|
||||
|
||||
program
|
||||
.requiredOption('-s, --site <name>', 'Name of the site to parse')
|
||||
.option('-l, --lang <code>', 'Filter channels by language (ISO 639-2 code)')
|
||||
.option('-o, --output <path>', 'Path to output file')
|
||||
.option('--days <days>', 'Override the number of days for which the program will be loaded')
|
||||
.option('--cron <expression>', 'Schedule a script run')
|
||||
.option('--gzip', 'Create a compressed version of the guide as well', false)
|
||||
.parse(process.argv)
|
||||
|
||||
const options = program.opts()
|
||||
|
||||
options.output = options.output || file.resolve(`${BASE_DIR}/guides/{lang}/{site}.xml`)
|
||||
options.config = file.resolve(`${BASE_DIR}/sites/${options.site}/${options.site}.config.js`)
|
||||
options.channels = file.resolve(`${BASE_DIR}/sites/${options.site}/${options.site}*.channels.xml`)
|
||||
|
||||
let channels = []
|
||||
let programs = []
|
||||
let runIndex = 0
|
||||
|
||||
async function main() {
|
||||
logger.start('staring...')
|
||||
|
||||
logger.info('settings:')
|
||||
for (let prop in options) {
|
||||
logger.info(` ${prop}: ${options[prop]}`)
|
||||
}
|
||||
|
||||
const config = await loadConfig(options.config)
|
||||
const queue = await createQueue(options.channels, config)
|
||||
const outputPath = options.output
|
||||
|
||||
if (options.cron) {
|
||||
const job = new CronJob(options.cron, function () {
|
||||
runJob(config, queue, outputPath)
|
||||
})
|
||||
job.start()
|
||||
} else {
|
||||
await runJob(config, queue, outputPath)
|
||||
}
|
||||
}
|
||||
|
||||
async function loadConfig(configPath) {
|
||||
let config = require(file.resolve(configPath))
|
||||
config = _.merge(config, {})
|
||||
config.days = config.days || 1
|
||||
|
||||
logger.info('config:')
|
||||
logConfig(config)
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
function logConfig(config, level = 1) {
|
||||
let padLeft = ' '.repeat(level)
|
||||
for (let prop in config) {
|
||||
if (typeof config[prop] === 'string' || typeof config[prop] === 'number') {
|
||||
logger.info(`${padLeft}${prop}: ${config[prop]}`)
|
||||
} else if (typeof config[prop] === 'object') {
|
||||
level++
|
||||
logger.info(`${padLeft}${prop}:`)
|
||||
logConfig(config[prop], level)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function runJob(config, queue, outputPath) {
|
||||
runIndex++
|
||||
logger.info(`run #${runIndex}:`)
|
||||
|
||||
timer.start()
|
||||
|
||||
await grab(queue, config)
|
||||
|
||||
await save(outputPath, channels, programs)
|
||||
|
||||
logger.success(` done in ${timer.format('HH[h] mm[m] ss[s]')}`)
|
||||
}
|
||||
|
||||
async function grab(queue, config) {
|
||||
const grabber = new EPGGrabber(config)
|
||||
const total = queue.length
|
||||
|
||||
let i = 1
|
||||
for (const item of queue) {
|
||||
let channel = item.channel
|
||||
let date = item.date
|
||||
channels.push(item.channel)
|
||||
await grabber
|
||||
.grab(channel, date, (data, err) => {
|
||||
logger.info(
|
||||
` [${i}/${total}] ${channel.site} (${channel.lang}) - ${channel.xmltv_id} - ${dayjs
|
||||
.utc(data.date)
|
||||
.format('MMM D, YYYY')} (${data.programs.length} programs)`
|
||||
)
|
||||
if (i < total) i++
|
||||
|
||||
if (err) {
|
||||
logger.info(` ERR: ${err.message}`)
|
||||
}
|
||||
})
|
||||
.then(results => {
|
||||
programs = programs.concat(results)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function createQueue(channelsPath, config) {
|
||||
logger.info('creating queue...')
|
||||
let queue = {}
|
||||
await api.channels.load().catch(logger.error)
|
||||
const files = await file.list(channelsPath).catch(logger.error)
|
||||
const utcDate = date.getUTC(CURR_DATE)
|
||||
const days = options.days ? parseInt(options.days) : config.days
|
||||
for (const filepath of files) {
|
||||
logger.info(` loading "${filepath}"...`)
|
||||
try {
|
||||
const dir = file.dirname(filepath)
|
||||
const { channels } = await parser.parseChannels(filepath)
|
||||
const filename = file.basename(filepath)
|
||||
const dates = Array.from({ length: days }, (_, i) => utcDate.add(i, 'd'))
|
||||
for (const channel of channels) {
|
||||
if (!channel.site || !channel.xmltv_id) continue
|
||||
if (options.lang && channel.lang !== options.lang) continue
|
||||
const found = api.channels.find({ id: channel.xmltv_id })
|
||||
if (found) {
|
||||
channel.logo = found.logo
|
||||
}
|
||||
for (const d of dates) {
|
||||
const dateString = d.toJSON()
|
||||
const key = `${channel.site}:${channel.lang}:${channel.xmltv_id}:${dateString}`
|
||||
if (!queue[key]) {
|
||||
queue[key] = {
|
||||
channel,
|
||||
date: dateString,
|
||||
config,
|
||||
error: null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
queue = Object.values(queue)
|
||||
|
||||
logger.info(` added ${queue.length} items`)
|
||||
|
||||
return queue
|
||||
}
|
||||
|
||||
async function save(template, parsedChannels, programs = []) {
|
||||
const variables = file.templateVariables(template)
|
||||
|
||||
const groups = _.groupBy(parsedChannels, channel => {
|
||||
let groupId = ''
|
||||
for (let key in channel) {
|
||||
if (variables.includes(key)) {
|
||||
groupId += channel[key]
|
||||
}
|
||||
}
|
||||
|
||||
return groupId
|
||||
})
|
||||
|
||||
for (let groupId in groups) {
|
||||
const channels = groups[groupId]
|
||||
|
||||
let output = {
|
||||
channels,
|
||||
programs: [],
|
||||
date: CURR_DATE
|
||||
}
|
||||
|
||||
for (let program of programs) {
|
||||
let programLang = program.titles[0].lang
|
||||
let channel = channels.find(c => c.xmltv_id === program.channel && c.lang === programLang)
|
||||
if (!channel) continue
|
||||
|
||||
output.programs.push(new Program(program, channel))
|
||||
}
|
||||
|
||||
output.channels = _.sortBy(output.channels, 'xmltv_id')
|
||||
output.channels = _.uniqBy(output.channels, 'xmltv_id')
|
||||
|
||||
output.programs = _.sortBy(output.programs, ['channel', 'start'])
|
||||
output.programs = _.uniqBy(output.programs, p => p.channel + p.start)
|
||||
|
||||
const outputPath = file.templateFormat(template, output.channels[0])
|
||||
const xmlFilepath = outputPath
|
||||
const xmltv = generateXMLTV(output)
|
||||
logger.info(` saving to "${xmlFilepath}"...`)
|
||||
await file.create(xmlFilepath, xmltv)
|
||||
|
||||
if (options.gzip) {
|
||||
const gzFilepath = `${outputPath}.gz`
|
||||
const compressed = await zip.compress(xmltv)
|
||||
logger.info(` saving to "${gzFilepath}"...`)
|
||||
await file.create(gzFilepath, compressed)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
115
scripts/commands/epg/grab.ts
Normal file
115
scripts/commands/epg/grab.ts
Normal file
|
@ -0,0 +1,115 @@
|
|||
import { Logger, Timer, Storage, Collection } from '@freearhey/core'
|
||||
import { program } from 'commander'
|
||||
import { CronJob } from 'cron'
|
||||
import { Queue, Job, ChannelsParser } from '../../core'
|
||||
import { Channel } from 'epg-grabber'
|
||||
import path from 'path'
|
||||
import { SITES_DIR } from '../../constants'
|
||||
|
||||
program
|
||||
.option('-s, --site <name>', 'Name of the site to parse')
|
||||
.option(
|
||||
'-c, --channels <path>',
|
||||
'Path to *.channels.xml file (required if the "--site" attribute is not specified)'
|
||||
)
|
||||
.option('-o, --output <path>', 'Path to output file', 'guide.xml')
|
||||
.option('-l, --lang <code>', 'Filter channels by language (ISO 639-2 code)')
|
||||
.option('-t, --timeout <milliseconds>', 'Override the default timeout for each request')
|
||||
.option(
|
||||
'--days <days>',
|
||||
'Override the number of days for which the program will be loaded (defaults to the value from the site config)',
|
||||
value => parseInt(value)
|
||||
)
|
||||
.option(
|
||||
'--maxConnections <number>',
|
||||
'Limit on the number of concurrent requests',
|
||||
value => parseInt(value),
|
||||
1
|
||||
)
|
||||
.option('--cron <expression>', 'Schedule a script run (example: "0 0 * * *")')
|
||||
.option('--gzip', 'Create a compressed version of the guide as well', false)
|
||||
.parse(process.argv)
|
||||
|
||||
export type GrabOptions = {
|
||||
site?: string
|
||||
channels?: string
|
||||
output: string
|
||||
gzip: boolean
|
||||
maxConnections: number
|
||||
timeout?: string
|
||||
lang?: string
|
||||
days?: number
|
||||
cron?: string
|
||||
}
|
||||
|
||||
const options: GrabOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
if (!options.site && !options.channels)
|
||||
throw new Error('One of the arguments must be presented: `--site` or `--channels`')
|
||||
|
||||
const logger = new Logger()
|
||||
|
||||
logger.start('staring...')
|
||||
|
||||
logger.info('config:')
|
||||
logger.tree(options)
|
||||
|
||||
logger.info(`loading channels...`)
|
||||
const storage = new Storage()
|
||||
const parser = new ChannelsParser({ storage })
|
||||
|
||||
let files: string[] = []
|
||||
if (options.site) {
|
||||
files = await storage.list(path.join(SITES_DIR, `${options.site}/*.channels.xml`))
|
||||
} else if (options.channels) {
|
||||
files = await storage.list(options.channels)
|
||||
}
|
||||
|
||||
let parsedChannels = new Collection()
|
||||
for (let filepath of files) {
|
||||
parsedChannels = parsedChannels.concat(await parser.parse(filepath))
|
||||
}
|
||||
if (options.lang) {
|
||||
parsedChannels = parsedChannels.filter((channel: Channel) => channel.lang === options.lang)
|
||||
}
|
||||
logger.info(` found ${parsedChannels.count()} channels`)
|
||||
|
||||
logger.info('creating queue...')
|
||||
const queue = new Queue({
|
||||
parsedChannels,
|
||||
logger,
|
||||
options
|
||||
})
|
||||
await queue.create()
|
||||
logger.info(` added ${queue.size()} items`)
|
||||
|
||||
const job = new Job({
|
||||
queue,
|
||||
logger,
|
||||
options
|
||||
})
|
||||
|
||||
let runIndex = 1
|
||||
if (options.cron) {
|
||||
const cronJob = new CronJob(options.cron, async () => {
|
||||
logger.info(`run #${runIndex}:`)
|
||||
const timer = new Timer()
|
||||
timer.start()
|
||||
await job.run()
|
||||
runIndex++
|
||||
logger.success(` done in ${timer.format('HH[h] mm[m] ss[s]')}`)
|
||||
})
|
||||
cronJob.start()
|
||||
} else {
|
||||
logger.info(`run #${runIndex}:`)
|
||||
const timer = new Timer()
|
||||
timer.start()
|
||||
await job.run()
|
||||
logger.success(` done in ${timer.format('HH[h] mm[m] ss[s]')}`)
|
||||
}
|
||||
|
||||
logger.info('finished')
|
||||
}
|
||||
|
||||
main()
|
4
scripts/constants.ts
Normal file
4
scripts/constants.ts
Normal file
|
@ -0,0 +1,4 @@
|
|||
export const SITES_DIR = process.env.SITES_DIR || './sites'
|
||||
export const GUIDES_DIR = process.env.GUIDES_DIR || './guides'
|
||||
export const DATA_DIR = process.env.DATA_DIR || './temp/data'
|
||||
export const CURR_DATE = process.env.CURR_DATE || new Date().toISOString()
|
|
@ -1,32 +0,0 @@
|
|||
const _ = require('lodash')
|
||||
const file = require('./file')
|
||||
|
||||
const DATA_DIR = process.env.DATA_DIR || './scripts/tmp/data'
|
||||
|
||||
class API {
|
||||
constructor(filepath) {
|
||||
this.filepath = file.resolve(filepath)
|
||||
}
|
||||
|
||||
async load() {
|
||||
const data = await file.read(this.filepath)
|
||||
this.collection = JSON.parse(data)
|
||||
}
|
||||
|
||||
find(query) {
|
||||
return _.find(this.collection, query)
|
||||
}
|
||||
|
||||
all() {
|
||||
return this.collection
|
||||
}
|
||||
}
|
||||
|
||||
const api = {}
|
||||
|
||||
api.channels = new API(`${DATA_DIR}/channels.json`)
|
||||
api.regions = new API(`${DATA_DIR}/regions.json`)
|
||||
api.countries = new API(`${DATA_DIR}/countries.json`)
|
||||
api.subdivisions = new API(`${DATA_DIR}/subdivisions.json`)
|
||||
|
||||
module.exports = api
|
79
scripts/core/apiChannel.ts
Normal file
79
scripts/core/apiChannel.ts
Normal file
|
@ -0,0 +1,79 @@
|
|||
import { Collection } from '@freearhey/core'
|
||||
|
||||
type ApiChannelProps = {
|
||||
id: string
|
||||
name: string
|
||||
alt_names: string[]
|
||||
network: string
|
||||
owners: string[]
|
||||
country: string
|
||||
subdivision: string
|
||||
city: string
|
||||
broadcast_area: string[]
|
||||
languages: string[]
|
||||
categories: string[]
|
||||
is_nsfw: boolean
|
||||
launched: string
|
||||
closed: string
|
||||
replaced_by: string
|
||||
website: string
|
||||
logo: string
|
||||
}
|
||||
|
||||
export class ApiChannel {
|
||||
id: string
|
||||
name: string
|
||||
altNames: Collection
|
||||
network: string
|
||||
owners: Collection
|
||||
country: string
|
||||
subdivision: string
|
||||
city: string
|
||||
broadcastArea: Collection
|
||||
languages: Collection
|
||||
categories: Collection
|
||||
isNSFW: boolean
|
||||
launched: string
|
||||
closed: string
|
||||
replacedBy: string
|
||||
website: string
|
||||
logo: string
|
||||
|
||||
constructor({
|
||||
id,
|
||||
name,
|
||||
alt_names,
|
||||
network,
|
||||
owners,
|
||||
country,
|
||||
subdivision,
|
||||
city,
|
||||
broadcast_area,
|
||||
languages,
|
||||
categories,
|
||||
is_nsfw,
|
||||
launched,
|
||||
closed,
|
||||
replaced_by,
|
||||
website,
|
||||
logo
|
||||
}: ApiChannelProps) {
|
||||
this.id = id
|
||||
this.name = name
|
||||
this.altNames = new Collection(alt_names)
|
||||
this.network = network
|
||||
this.owners = new Collection(owners)
|
||||
this.country = country
|
||||
this.subdivision = subdivision
|
||||
this.city = city
|
||||
this.broadcastArea = new Collection(broadcast_area)
|
||||
this.languages = new Collection(languages)
|
||||
this.categories = new Collection(categories)
|
||||
this.isNSFW = is_nsfw
|
||||
this.launched = launched
|
||||
this.closed = closed
|
||||
this.replacedBy = replaced_by
|
||||
this.website = website
|
||||
this.logo = logo
|
||||
}
|
||||
}
|
59
scripts/core/apiClient.ts
Normal file
59
scripts/core/apiClient.ts
Normal file
|
@ -0,0 +1,59 @@
|
|||
import { Logger, Storage } from '@freearhey/core'
|
||||
import axios, { AxiosInstance, AxiosResponse, AxiosProgressEvent } from 'axios'
|
||||
import cliProgress, { MultiBar } from 'cli-progress'
|
||||
import numeral from 'numeral'
|
||||
|
||||
export class ApiClient {
|
||||
progressBar: MultiBar
|
||||
client: AxiosInstance
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ logger }: { logger: Logger }) {
|
||||
this.logger = logger
|
||||
this.client = axios.create({
|
||||
responseType: 'stream'
|
||||
})
|
||||
this.storage = new Storage()
|
||||
this.progressBar = new cliProgress.MultiBar({
|
||||
stopOnComplete: true,
|
||||
hideCursor: true,
|
||||
forceRedraw: true,
|
||||
barsize: 36,
|
||||
format(options, params, payload) {
|
||||
const filename = payload.filename.padEnd(18, ' ')
|
||||
const barsize = options.barsize || 40
|
||||
const percent = (params.progress * 100).toFixed(2)
|
||||
const speed = payload.speed ? numeral(payload.speed).format('0.0 b') + '/s' : 'N/A'
|
||||
const total = numeral(params.total).format('0.0 b')
|
||||
const completeSize = Math.round(params.progress * barsize)
|
||||
const incompleteSize = barsize - completeSize
|
||||
const bar =
|
||||
options.barCompleteString && options.barIncompleteString
|
||||
? options.barCompleteString.substr(0, completeSize) +
|
||||
options.barGlue +
|
||||
options.barIncompleteString.substr(0, incompleteSize)
|
||||
: '-'.repeat(barsize)
|
||||
|
||||
return `${filename} [${bar}] ${percent}% | ETA: ${params.eta}s | ${total} | ${speed}`
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async download(filename: string) {
|
||||
const stream = await this.storage.createStream(`/temp/data/${filename}`)
|
||||
|
||||
const bar = this.progressBar.create(0, 0, { filename })
|
||||
|
||||
this.client
|
||||
.get(`https://iptv-org.github.io/api/${filename}`, {
|
||||
onDownloadProgress({ total, loaded, rate }: AxiosProgressEvent) {
|
||||
if (total) bar.setTotal(total)
|
||||
bar.update(loaded, { speed: rate })
|
||||
}
|
||||
})
|
||||
.then((response: AxiosResponse) => {
|
||||
response.data.pipe(stream)
|
||||
})
|
||||
}
|
||||
}
|
24
scripts/core/channelsParser.ts
Normal file
24
scripts/core/channelsParser.ts
Normal file
|
@ -0,0 +1,24 @@
|
|||
import { parseChannels } from 'epg-grabber'
|
||||
import { Storage, Collection } from '@freearhey/core'
|
||||
|
||||
type ChannelsParserProps = {
|
||||
storage: Storage
|
||||
}
|
||||
|
||||
export class ChannelsParser {
|
||||
storage: Storage
|
||||
|
||||
constructor({ storage }: ChannelsParserProps) {
|
||||
this.storage = storage
|
||||
}
|
||||
|
||||
async parse(filepath: string) {
|
||||
let parsedChannels = new Collection()
|
||||
|
||||
const content = await this.storage.load(filepath)
|
||||
const channels = parseChannels(content)
|
||||
parsedChannels = parsedChannels.concat(new Collection(channels))
|
||||
|
||||
return parsedChannels
|
||||
}
|
||||
}
|
19
scripts/core/configLoader.ts
Normal file
19
scripts/core/configLoader.ts
Normal file
|
@ -0,0 +1,19 @@
|
|||
import { SiteConfig } from 'epg-grabber'
|
||||
import _ from 'lodash'
|
||||
|
||||
export class ConfigLoader {
|
||||
async load(filepath: string): Promise<SiteConfig> {
|
||||
const config = (await import(filepath)).default
|
||||
|
||||
return _.merge(
|
||||
{
|
||||
delay: 0,
|
||||
maxConnections: 1,
|
||||
request: {
|
||||
timeout: 30000
|
||||
}
|
||||
},
|
||||
config
|
||||
)
|
||||
}
|
||||
}
|
|
@ -1,76 +0,0 @@
|
|||
const nedb = require('nedb-promises')
|
||||
const file = require('./file')
|
||||
|
||||
const DB_DIR = process.env.DB_DIR || './scripts/tmp/database'
|
||||
|
||||
class Database {
|
||||
constructor(filepath) {
|
||||
this.filepath = filepath
|
||||
}
|
||||
|
||||
load() {
|
||||
this.db = nedb.create({
|
||||
filename: file.resolve(this.filepath),
|
||||
autoload: true,
|
||||
onload: err => {
|
||||
if (err) console.error(err)
|
||||
},
|
||||
compareStrings: (a, b) => {
|
||||
a = a.replace(/\s/g, '_')
|
||||
b = b.replace(/\s/g, '_')
|
||||
|
||||
return a.localeCompare(b, undefined, {
|
||||
sensitivity: 'accent',
|
||||
numeric: true
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
removeIndex(field) {
|
||||
return this.db.removeIndex(field)
|
||||
}
|
||||
|
||||
addIndex(options) {
|
||||
return this.db.ensureIndex(options)
|
||||
}
|
||||
|
||||
compact() {
|
||||
return this.db.persistence.compactDatafile()
|
||||
}
|
||||
|
||||
stopAutocompact() {
|
||||
return this.db.persistence.stopAutocompaction()
|
||||
}
|
||||
|
||||
reset() {
|
||||
return file.clear(this.filepath)
|
||||
}
|
||||
|
||||
count(query) {
|
||||
return this.db.count(query)
|
||||
}
|
||||
|
||||
insert(doc) {
|
||||
return this.db.insert(doc)
|
||||
}
|
||||
|
||||
update(query, update) {
|
||||
return this.db.update(query, update)
|
||||
}
|
||||
|
||||
find(query) {
|
||||
return this.db.find(query)
|
||||
}
|
||||
|
||||
remove(query, options) {
|
||||
return this.db.remove(query, options)
|
||||
}
|
||||
}
|
||||
|
||||
const db = {}
|
||||
|
||||
db.queue = new Database(`${DB_DIR}/queue.db`)
|
||||
db.programs = new Database(`${DB_DIR}/programs.db`)
|
||||
|
||||
module.exports = db
|
|
@ -1,93 +0,0 @@
|
|||
const path = require('path')
|
||||
const glob = require('glob')
|
||||
const fs = require('fs-extra')
|
||||
|
||||
const file = {}
|
||||
|
||||
file.templateVariables = function (template) {
|
||||
const match = template.match(/{[^}]+}/g)
|
||||
|
||||
return Array.isArray(match) ? match.map(s => s.substring(1, s.length - 1)) : []
|
||||
}
|
||||
|
||||
file.templateFormat = function (template, obj) {
|
||||
let output = template
|
||||
for (let key in obj) {
|
||||
const regex = new RegExp(`{${key}}`, 'g')
|
||||
const value = obj[key] || undefined
|
||||
output = output.replace(regex, value)
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
file.list = function (pattern) {
|
||||
return new Promise(resolve => {
|
||||
glob(pattern, function (err, files) {
|
||||
resolve(files)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
file.getFilename = function (filepath) {
|
||||
return path.parse(filepath).name
|
||||
}
|
||||
|
||||
file.createDir = async function (dir) {
|
||||
if (await file.exists(dir)) return
|
||||
|
||||
return fs.mkdir(dir, { recursive: true }).catch(console.error)
|
||||
}
|
||||
|
||||
file.exists = function (filepath) {
|
||||
return fs.exists(path.resolve(filepath))
|
||||
}
|
||||
|
||||
file.existsSync = function (filepath) {
|
||||
return fs.existsSync(path.resolve(filepath))
|
||||
}
|
||||
|
||||
file.read = function (filepath) {
|
||||
return fs.readFile(path.resolve(filepath), { encoding: 'utf8' }).catch(console.error)
|
||||
}
|
||||
|
||||
file.append = function (filepath, data) {
|
||||
return fs.appendFile(path.resolve(filepath), data).catch(console.error)
|
||||
}
|
||||
|
||||
file.create = function (filepath, data = '') {
|
||||
filepath = path.resolve(filepath)
|
||||
const dir = path.dirname(filepath)
|
||||
|
||||
return file
|
||||
.createDir(dir)
|
||||
.then(() => file.write(filepath, data))
|
||||
.catch(console.error)
|
||||
}
|
||||
|
||||
file.write = function (filepath, data = '') {
|
||||
return fs.writeFile(path.resolve(filepath), data, { encoding: 'utf8' }).catch(console.error)
|
||||
}
|
||||
|
||||
file.writeSync = function (filepath, data = '') {
|
||||
return fs.writeFileSync(path.resolve(filepath), data, { encoding: 'utf8' })
|
||||
}
|
||||
|
||||
file.clear = async function (filepath) {
|
||||
if (await file.exists(filepath)) return file.write(filepath, '')
|
||||
return true
|
||||
}
|
||||
|
||||
file.resolve = function (filepath) {
|
||||
return path.resolve(filepath)
|
||||
}
|
||||
|
||||
file.dirname = function (filepath) {
|
||||
return path.dirname(filepath)
|
||||
}
|
||||
|
||||
file.basename = function (filepath) {
|
||||
return path.basename(filepath)
|
||||
}
|
||||
|
||||
module.exports = file
|
75
scripts/core/grabber.ts
Normal file
75
scripts/core/grabber.ts
Normal file
|
@ -0,0 +1,75 @@
|
|||
import { EPGGrabber, GrabCallbackData, EPGGrabberMock, SiteConfig, Channel } from 'epg-grabber'
|
||||
import { Logger, Collection } from '@freearhey/core'
|
||||
import { Queue } from './'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
import { TaskQueue, PromisyClass } from 'cwait'
|
||||
|
||||
type GrabberProps = {
|
||||
logger: Logger
|
||||
queue: Queue
|
||||
options: GrabOptions
|
||||
}
|
||||
|
||||
export class Grabber {
|
||||
logger: Logger
|
||||
queue: Queue
|
||||
options: GrabOptions
|
||||
|
||||
constructor({ logger, queue, options }: GrabberProps) {
|
||||
this.logger = logger
|
||||
this.queue = queue
|
||||
this.options = options
|
||||
}
|
||||
|
||||
async grab(): Promise<{ channels: Collection; programs: Collection }> {
|
||||
const taskQueue = new TaskQueue(Promise as PromisyClass, this.options.maxConnections)
|
||||
|
||||
const total = this.queue.size()
|
||||
|
||||
const channels = new Collection()
|
||||
let programs = new Collection()
|
||||
let i = 1
|
||||
|
||||
await Promise.all(
|
||||
this.queue.items().map(
|
||||
taskQueue.wrap(
|
||||
async (queueItem: { channel: Channel; config: SiteConfig; date: string }) => {
|
||||
const { channel, config, date } = queueItem
|
||||
|
||||
channels.add(channel)
|
||||
|
||||
if (this.options.timeout !== undefined) {
|
||||
const timeout = parseInt(this.options.timeout)
|
||||
config.request = { ...config.request, ...{ timeout } }
|
||||
}
|
||||
|
||||
const grabber =
|
||||
process.env.NODE_ENV === 'test' ? new EPGGrabberMock(config) : new EPGGrabber(config)
|
||||
const _programs = await grabber.grab(
|
||||
channel,
|
||||
date,
|
||||
(data: GrabCallbackData, error: Error | null) => {
|
||||
const { programs, date } = data
|
||||
|
||||
this.logger.info(
|
||||
` [${i}/${total}] ${channel.site} (${channel.lang}) - ${
|
||||
channel.xmltv_id
|
||||
} - ${date.format('MMM D, YYYY')} (${programs.length} programs)`
|
||||
)
|
||||
if (i < total) i++
|
||||
|
||||
if (error) {
|
||||
this.logger.info(` ERR: ${error.message}`)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
programs = programs.concat(new Collection(_programs))
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return { channels, programs }
|
||||
}
|
||||
}
|
55
scripts/core/guide.ts
Normal file
55
scripts/core/guide.ts
Normal file
|
@ -0,0 +1,55 @@
|
|||
import { Collection, Logger, DateTime, Storage, Zip } from '@freearhey/core'
|
||||
import { Channel } from 'epg-grabber'
|
||||
import { XMLTV } from '../core'
|
||||
import { CURR_DATE } from '../constants'
|
||||
|
||||
type GuideProps = {
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
logger: Logger
|
||||
filepath: string
|
||||
gzip: boolean
|
||||
}
|
||||
|
||||
export class Guide {
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
logger: Logger
|
||||
storage: Storage
|
||||
filepath: string
|
||||
gzip: boolean
|
||||
|
||||
constructor({ channels, programs, logger, filepath, gzip }: GuideProps) {
|
||||
this.channels = channels
|
||||
this.programs = programs
|
||||
this.logger = logger
|
||||
this.storage = new Storage()
|
||||
this.filepath = filepath
|
||||
this.gzip = gzip || false
|
||||
}
|
||||
|
||||
async save() {
|
||||
const channels = this.channels.uniqBy(
|
||||
(channel: Channel) => `${channel.xmltv_id}:${channel.site}`
|
||||
)
|
||||
const programs = this.programs
|
||||
|
||||
const xmltv = new XMLTV({
|
||||
channels,
|
||||
programs,
|
||||
date: new DateTime(CURR_DATE, { zone: 'UTC' })
|
||||
})
|
||||
|
||||
const xmlFilepath = this.filepath
|
||||
this.logger.info(` saving to "${xmlFilepath}"...`)
|
||||
await this.storage.save(xmlFilepath, xmltv.toString())
|
||||
|
||||
if (this.gzip) {
|
||||
const zip = new Zip()
|
||||
const compressed = await zip.compress(xmltv.toString())
|
||||
const gzFilepath = `${this.filepath}.gz`
|
||||
this.logger.info(` saving to "${gzFilepath}"...`)
|
||||
await this.storage.save(gzFilepath, compressed)
|
||||
}
|
||||
}
|
||||
}
|
61
scripts/core/guideManager.ts
Normal file
61
scripts/core/guideManager.ts
Normal file
|
@ -0,0 +1,61 @@
|
|||
import { Collection, Logger, Storage, StringTemplate } from '@freearhey/core'
|
||||
import { OptionValues } from 'commander'
|
||||
import { Channel, Program } from 'epg-grabber'
|
||||
import { Guide } from '.'
|
||||
|
||||
type GuideManagerProps = {
|
||||
options: OptionValues
|
||||
logger: Logger
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
}
|
||||
|
||||
export class GuideManager {
|
||||
options: OptionValues
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
|
||||
constructor({ channels, programs, logger, options }: GuideManagerProps) {
|
||||
this.options = options
|
||||
this.logger = logger
|
||||
this.channels = channels
|
||||
this.programs = programs
|
||||
this.storage = new Storage()
|
||||
}
|
||||
|
||||
async createGuides() {
|
||||
const pathTemplate = new StringTemplate(this.options.output)
|
||||
|
||||
const groupedChannels = this.channels
|
||||
.orderBy([(channel: Channel) => channel.xmltv_id])
|
||||
.uniqBy((channel: Channel) => `${channel.xmltv_id}:${channel.site}:${channel.lang}`)
|
||||
.groupBy((channel: Channel) => {
|
||||
return pathTemplate.format({ lang: channel.lang || 'en', site: channel.site || '' })
|
||||
})
|
||||
|
||||
const groupedPrograms = this.programs
|
||||
.orderBy([(program: Program) => program.channel, (program: Program) => program.start])
|
||||
.groupBy((program: Program) => {
|
||||
const lang =
|
||||
program.titles && program.titles.length && program.titles[0].lang
|
||||
? program.titles[0].lang
|
||||
: 'en'
|
||||
|
||||
return pathTemplate.format({ lang, site: program.site || '' })
|
||||
})
|
||||
|
||||
for (const groupKey of groupedPrograms.keys()) {
|
||||
const guide = new Guide({
|
||||
filepath: groupKey,
|
||||
gzip: this.options.gzip,
|
||||
channels: new Collection(groupedChannels.get(groupKey)),
|
||||
programs: new Collection(groupedPrograms.get(groupKey)),
|
||||
logger: this.logger
|
||||
})
|
||||
|
||||
await guide.save()
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
exports.db = require('./db')
|
||||
exports.logger = require('./logger')
|
||||
exports.file = require('./file')
|
||||
exports.parser = require('./parser')
|
||||
exports.timer = require('./timer')
|
||||
exports.markdown = require('./markdown')
|
||||
exports.api = require('./api')
|
||||
exports.date = require('./date')
|
||||
exports.table = require('./table')
|
||||
exports.xml = require('./xml')
|
||||
exports.zip = require('./zip')
|
11
scripts/core/index.ts
Normal file
11
scripts/core/index.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
export * from './xml'
|
||||
export * from './channelsParser'
|
||||
export * from './xmltv'
|
||||
export * from './configLoader'
|
||||
export * from './grabber'
|
||||
export * from './job'
|
||||
export * from './queue'
|
||||
export * from './guideManager'
|
||||
export * from './guide'
|
||||
export * from './apiChannel'
|
||||
export * from './apiClient'
|
34
scripts/core/job.ts
Normal file
34
scripts/core/job.ts
Normal file
|
@ -0,0 +1,34 @@
|
|||
import { Logger } from '@freearhey/core'
|
||||
import { Queue, Grabber, GuideManager } from '.'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
|
||||
type JobProps = {
|
||||
options: GrabOptions
|
||||
logger: Logger
|
||||
queue: Queue
|
||||
}
|
||||
|
||||
export class Job {
|
||||
options: GrabOptions
|
||||
logger: Logger
|
||||
grabber: Grabber
|
||||
|
||||
constructor({ queue, logger, options }: JobProps) {
|
||||
this.options = options
|
||||
this.logger = logger
|
||||
this.grabber = new Grabber({ logger, queue, options })
|
||||
}
|
||||
|
||||
async run() {
|
||||
const { channels, programs } = await this.grabber.grab()
|
||||
|
||||
const manager = new GuideManager({
|
||||
channels,
|
||||
programs,
|
||||
options: this.options,
|
||||
logger: this.logger
|
||||
})
|
||||
|
||||
await manager.createGuides()
|
||||
}
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
const { consola } = require('consola')
|
||||
|
||||
module.exports = consola
|
|
@ -1,10 +0,0 @@
|
|||
const markdownInclude = require('markdown-include')
|
||||
const file = require('./file')
|
||||
|
||||
const markdown = {}
|
||||
|
||||
markdown.compile = function (filepath) {
|
||||
markdownInclude.compileFiles(file.resolve(filepath))
|
||||
}
|
||||
|
||||
module.exports = markdown
|
|
@ -1,29 +0,0 @@
|
|||
const file = require('./file')
|
||||
const grabber = require('epg-grabber')
|
||||
|
||||
const parser = {}
|
||||
|
||||
parser.parseChannels = async function (filepath) {
|
||||
const content = await file.read(filepath)
|
||||
|
||||
return grabber.parseChannels(content)
|
||||
}
|
||||
|
||||
parser.parseLogs = async function (filepath) {
|
||||
const content = await file.read(filepath)
|
||||
if (!content) return []
|
||||
const lines = content.split('\n')
|
||||
|
||||
return lines.map(line => (line ? JSON.parse(line) : null)).filter(l => l)
|
||||
}
|
||||
|
||||
parser.parseNumber = function (string) {
|
||||
const parsed = parseInt(string)
|
||||
if (isNaN(parsed)) {
|
||||
throw new Error('scripts/core/parser.js:parseNumber() Input value is not a number')
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
|
||||
module.exports = parser
|
94
scripts/core/queue.ts
Normal file
94
scripts/core/queue.ts
Normal file
|
@ -0,0 +1,94 @@
|
|||
import { Storage, Collection, DateTime, Logger, Dictionary } from '@freearhey/core'
|
||||
import { ChannelsParser, ConfigLoader, ApiChannel } from './'
|
||||
import { SITES_DIR, DATA_DIR, CURR_DATE } from '../constants'
|
||||
import { Channel, SiteConfig } from 'epg-grabber'
|
||||
import path from 'path'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
|
||||
export type QueueItem = {
|
||||
channel: Channel
|
||||
date: string
|
||||
config: SiteConfig
|
||||
error: string | null
|
||||
}
|
||||
|
||||
type QueueProps = {
|
||||
logger: Logger
|
||||
options: GrabOptions
|
||||
parsedChannels: Collection
|
||||
}
|
||||
|
||||
export class Queue {
|
||||
configLoader: ConfigLoader
|
||||
logger: Logger
|
||||
sitesStorage: Storage
|
||||
dataStorage: Storage
|
||||
parser: ChannelsParser
|
||||
parsedChannels: Collection
|
||||
options: GrabOptions
|
||||
date: DateTime
|
||||
_items: QueueItem[] = []
|
||||
|
||||
constructor({ parsedChannels, logger, options }: QueueProps) {
|
||||
this.parsedChannels = parsedChannels
|
||||
this.logger = logger
|
||||
this.sitesStorage = new Storage()
|
||||
this.dataStorage = new Storage(DATA_DIR)
|
||||
this.parser = new ChannelsParser({ storage: new Storage() })
|
||||
this.date = new DateTime(CURR_DATE)
|
||||
this.options = options
|
||||
this.configLoader = new ConfigLoader()
|
||||
}
|
||||
|
||||
async create() {
|
||||
const channelsContent = await this.dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsContent).map(data => new ApiChannel(data))
|
||||
|
||||
const queue = new Dictionary()
|
||||
|
||||
for (const channel of this.parsedChannels.all()) {
|
||||
if (!channel.site || !channel.xmltv_id) continue
|
||||
if (this.options.lang && channel.lang !== this.options.lang) continue
|
||||
|
||||
const configPath = path.resolve(SITES_DIR, `${channel.site}/${channel.site}.config.js`)
|
||||
const config: SiteConfig = await this.configLoader.load(configPath)
|
||||
|
||||
const found: ApiChannel = channels.first(
|
||||
(_channel: ApiChannel) => _channel.id === channel.xmltv_id
|
||||
)
|
||||
if (found) {
|
||||
channel.logo = found.logo
|
||||
}
|
||||
|
||||
const days = this.options.days || config.days || 1
|
||||
const dates = Array.from({ length: days }, (_, day) => this.date.add(day, 'd'))
|
||||
dates.forEach((date: DateTime) => {
|
||||
const dateString = date.toJSON()
|
||||
const key = `${channel.site}:${channel.lang}:${channel.xmltv_id}:${dateString}`
|
||||
|
||||
if (queue.missing(key)) {
|
||||
queue.set(key, {
|
||||
channel,
|
||||
date: dateString,
|
||||
config,
|
||||
error: null
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
this._items = Object.values(queue.data())
|
||||
}
|
||||
|
||||
size(): number {
|
||||
return this._items.length
|
||||
}
|
||||
|
||||
items(): QueueItem[] {
|
||||
return this._items
|
||||
}
|
||||
|
||||
isEmpty(): boolean {
|
||||
return this._items.length === 0
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
const table = {}
|
||||
|
||||
table.create = function (data, cols) {
|
||||
let output = '<table>\r\n'
|
||||
|
||||
output += ' <thead>\r\n <tr>'
|
||||
for (let column of cols) {
|
||||
output += `<th align="left">${column}</th>`
|
||||
}
|
||||
output += '</tr>\r\n </thead>\r\n'
|
||||
|
||||
output += ' <tbody>\r\n'
|
||||
output += getHTMLRows(data)
|
||||
output += ' </tbody>\r\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
function getHTMLRows(data) {
|
||||
let output = ''
|
||||
for (let group of data) {
|
||||
let rowspan = group.length
|
||||
for (let [j, row] of group.entries()) {
|
||||
output += ' <tr>'
|
||||
for (let [i, value] of row.entries()) {
|
||||
if (i === 0 && j === 0) {
|
||||
output += `<td valign="top" rowspan="${rowspan}">${value}</td>`
|
||||
} else if (i > 0) {
|
||||
if (typeof value === 'number') {
|
||||
output += `<td align="right" nowrap>${value}</td>`
|
||||
} else {
|
||||
output += `<td nowrap>${value}</td>`
|
||||
}
|
||||
}
|
||||
}
|
||||
output += '</tr>\r\n'
|
||||
}
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
function getSpan() {}
|
||||
|
||||
module.exports = table
|
|
@ -1,29 +0,0 @@
|
|||
const { performance } = require('perf_hooks')
|
||||
const dayjs = require('dayjs')
|
||||
const duration = require('dayjs/plugin/duration')
|
||||
const relativeTime = require('dayjs/plugin/relativeTime')
|
||||
|
||||
dayjs.extend(relativeTime)
|
||||
dayjs.extend(duration)
|
||||
|
||||
const timer = {}
|
||||
|
||||
let t0 = 0
|
||||
|
||||
timer.start = function () {
|
||||
t0 = performance.now()
|
||||
}
|
||||
|
||||
timer.format = function (f) {
|
||||
let t1 = performance.now()
|
||||
|
||||
return dayjs.duration(t1 - t0).format(f)
|
||||
}
|
||||
|
||||
timer.humanize = function (suffix = true) {
|
||||
let t1 = performance.now()
|
||||
|
||||
return dayjs.duration(t1 - t0).humanize(suffix)
|
||||
}
|
||||
|
||||
module.exports = timer
|
|
@ -1,25 +1,36 @@
|
|||
const xml = {}
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Channel } from 'epg-grabber'
|
||||
|
||||
xml.create = function (items, site) {
|
||||
let output = `<?xml version="1.0" encoding="UTF-8"?>\r\n<site site="${site}">\r\n <channels>\r\n`
|
||||
export class XML {
|
||||
items: Collection
|
||||
site: string
|
||||
|
||||
items.forEach(channel => {
|
||||
const logo = channel.logo ? ` logo="${channel.logo}"` : ''
|
||||
const xmltv_id = channel.xmltv_id || ''
|
||||
const lang = channel.lang || ''
|
||||
const site_id = channel.site_id || ''
|
||||
output += ` <channel lang="${lang}" xmltv_id="${escapeString(
|
||||
xmltv_id
|
||||
)}" site_id="${site_id}"${logo}>${escapeString(channel.name)}</channel>\r\n`
|
||||
})
|
||||
constructor(items: Collection, site: string) {
|
||||
this.items = items
|
||||
this.site = site
|
||||
}
|
||||
|
||||
output += ` </channels>\r\n</site>\r\n`
|
||||
toString() {
|
||||
let output = '<?xml version="1.0" encoding="UTF-8"?>\r\n<channels>\r\n'
|
||||
|
||||
return output
|
||||
this.items.forEach((channel: Channel) => {
|
||||
const logo = channel.logo ? ` logo="${channel.logo}"` : ''
|
||||
const xmltv_id = channel.xmltv_id || ''
|
||||
const lang = channel.lang || ''
|
||||
const site_id = channel.site_id || ''
|
||||
output += ` <channel site="${this.site}" lang="${lang}" xmltv_id="${escapeString(
|
||||
xmltv_id
|
||||
)}" site_id="${site_id}"${logo}>${escapeString(channel.name)}</channel>\r\n`
|
||||
})
|
||||
|
||||
output += '</channels>\r\n'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
|
||||
function escapeString(string, defaultValue = '') {
|
||||
if (!string) return defaultValue
|
||||
function escapeString(value: string, defaultValue: string = '') {
|
||||
if (!value) return defaultValue
|
||||
|
||||
const regex = new RegExp(
|
||||
'((?:[\0-\x08\x0B\f\x0E-\x1F\uFFFD\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]))|([\\x7F-\\x84]|[\\x86-\\x9F]|[\\uFDD0-\\uFDEF]|(?:\\uD83F[\\uDFFE\\uDFFF])|(?:\\uD87F[\\uDF' +
|
||||
|
@ -33,9 +44,9 @@ function escapeString(string, defaultValue = '') {
|
|||
'g'
|
||||
)
|
||||
|
||||
string = String(string || '').replace(regex, '')
|
||||
value = String(value || '').replace(regex, '')
|
||||
|
||||
return string
|
||||
return value
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
|
@ -45,5 +56,3 @@ function escapeString(string, defaultValue = '') {
|
|||
.replace(/ +/g, ' ')
|
||||
.trim()
|
||||
}
|
||||
|
||||
module.exports = xml
|
28
scripts/core/xmltv.ts
Normal file
28
scripts/core/xmltv.ts
Normal file
|
@ -0,0 +1,28 @@
|
|||
import { DateTime, Collection } from '@freearhey/core'
|
||||
import { generateXMLTV } from 'epg-grabber'
|
||||
|
||||
type XMLTVProps = {
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
date: DateTime
|
||||
}
|
||||
|
||||
export class XMLTV {
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
date: DateTime
|
||||
|
||||
constructor({ channels, programs, date }: XMLTVProps) {
|
||||
this.channels = channels
|
||||
this.programs = programs
|
||||
this.date = date
|
||||
}
|
||||
|
||||
toString() {
|
||||
return generateXMLTV({
|
||||
channels: this.channels.all(),
|
||||
programs: this.programs.all(),
|
||||
date: this.date.toJSON()
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
const { gzip, ungzip } = require('node-gzip')
|
||||
|
||||
const zip = {}
|
||||
|
||||
zip.compress = async function (string) {
|
||||
return gzip(string)
|
||||
}
|
||||
|
||||
zip.decompress = async function (string) {
|
||||
return ungzip(string)
|
||||
}
|
||||
|
||||
module.exports = zip
|
2
scripts/tmp/.gitignore
vendored
2
scripts/tmp/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
*
|
||||
!.gitignore
|
1
scripts/types/langs.d.ts
vendored
Normal file
1
scripts/types/langs.d.ts
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
declare module 'langs'
|
Loading…
Add table
Add a link
Reference in a new issue