mirror of
https://github.com/iptv-org/epg.git
synced 2025-05-09 08:30:06 -04:00
Merge branch 'master' into patch-2025.01.2
This commit is contained in:
commit
979db51d7e
182 changed files with 7599 additions and 8012 deletions
|
@ -1,51 +1,51 @@
|
|||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import path from 'path'
|
||||
import { SITES_DIR, API_DIR } from '../../constants'
|
||||
import { Channel } from 'epg-grabber'
|
||||
|
||||
type OutputItem = {
|
||||
channel: string | null
|
||||
site: string
|
||||
site_id: string
|
||||
site_name: string
|
||||
lang: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.start('staring...')
|
||||
|
||||
logger.info('loading channels...')
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const parser = new ChannelsParser({ storage: sitesStorage })
|
||||
|
||||
let files: string[] = []
|
||||
files = await sitesStorage.list('**/*.channels.xml')
|
||||
|
||||
let parsedChannels = new Collection()
|
||||
for (const filepath of files) {
|
||||
parsedChannels = parsedChannels.concat(await parser.parse(filepath))
|
||||
}
|
||||
|
||||
logger.info(` found ${parsedChannels.count()} channel(s)`)
|
||||
|
||||
const output = parsedChannels.map((channel: Channel): OutputItem => {
|
||||
return {
|
||||
channel: channel.xmltv_id || null,
|
||||
site: channel.site || '',
|
||||
site_id: channel.site_id || '',
|
||||
site_name: channel.name,
|
||||
lang: channel.lang || ''
|
||||
}
|
||||
})
|
||||
|
||||
const apiStorage = new Storage(API_DIR)
|
||||
const outputFilename = 'guides.json'
|
||||
await apiStorage.save('guides.json', output.toJSON())
|
||||
|
||||
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
|
||||
}
|
||||
|
||||
main()
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import path from 'path'
|
||||
import { SITES_DIR, API_DIR } from '../../constants'
|
||||
import { Channel } from 'epg-grabber'
|
||||
|
||||
type OutputItem = {
|
||||
channel: string | null
|
||||
site: string
|
||||
site_id: string
|
||||
site_name: string
|
||||
lang: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.start('staring...')
|
||||
|
||||
logger.info('loading channels...')
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const parser = new ChannelsParser({ storage: sitesStorage })
|
||||
|
||||
let files: string[] = []
|
||||
files = await sitesStorage.list('**/*.channels.xml')
|
||||
|
||||
let parsedChannels = new Collection()
|
||||
for (const filepath of files) {
|
||||
parsedChannels = parsedChannels.concat(await parser.parse(filepath))
|
||||
}
|
||||
|
||||
logger.info(` found ${parsedChannels.count()} channel(s)`)
|
||||
|
||||
const output = parsedChannels.map((channel: Channel): OutputItem => {
|
||||
return {
|
||||
channel: channel.xmltv_id || null,
|
||||
site: channel.site || '',
|
||||
site_id: channel.site_id || '',
|
||||
site_name: channel.name,
|
||||
lang: channel.lang || ''
|
||||
}
|
||||
})
|
||||
|
||||
const apiStorage = new Storage(API_DIR)
|
||||
const outputFilename = 'guides.json'
|
||||
await apiStorage.save('guides.json', output.toJSON())
|
||||
|
||||
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
|
|
@ -43,7 +43,7 @@ async function main() {
|
|||
const channelsIndex = sj.createIndex(channelsContent)
|
||||
|
||||
const buffer = new Dictionary()
|
||||
for (let option of options.all()) {
|
||||
for (const option of options.all()) {
|
||||
const channel: Channel = option.channel
|
||||
if (channel.xmltv_id) {
|
||||
if (channel.xmltv_id !== '-') {
|
||||
|
@ -150,7 +150,7 @@ function getOptions(channelsIndex, channel: Channel) {
|
|||
const query = channel.name
|
||||
.replace(/\s(SD|TV|HD|SD\/HD|HDTV)$/i, '')
|
||||
.replace(/(\(|\)|,)/gi, '')
|
||||
.replace(/\-/gi, ' ')
|
||||
.replace(/-/gi, ' ')
|
||||
.replace(/\+/gi, '')
|
||||
const similar = channelsIndex.search(query).map(item => new ApiChannel(item))
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import chalk from 'chalk'
|
||||
import libxml, { ValidationError } from 'libxmljs2'
|
||||
import { program } from 'commander'
|
||||
import { Logger, Storage, File } from '@freearhey/core'
|
||||
import { Storage, File } from '@freearhey/core'
|
||||
|
||||
const xsd = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
|
||||
|
@ -23,26 +23,14 @@ const xsd = `<?xml version="1.0" encoding="UTF-8"?>
|
|||
</xs:element>
|
||||
</xs:schema>`
|
||||
|
||||
program
|
||||
.option(
|
||||
'-c, --channels <path>',
|
||||
'Path to channels.xml file to validate',
|
||||
'sites/**/*.channels.xml'
|
||||
)
|
||||
.parse(process.argv)
|
||||
|
||||
const options = program.opts()
|
||||
program.argument('[filepath]', 'Path to *.channels.xml files to validate').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
|
||||
logger.info('options:')
|
||||
logger.tree(options)
|
||||
|
||||
let errors: ValidationError[] = []
|
||||
|
||||
const files: string[] = await storage.list(options.channels)
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
@ -51,11 +39,15 @@ async function main() {
|
|||
|
||||
let localErrors: ValidationError[] = []
|
||||
|
||||
const xsdDoc = libxml.parseXml(xsd)
|
||||
const doc = libxml.parseXml(xml)
|
||||
try {
|
||||
const xsdDoc = libxml.parseXml(xsd)
|
||||
const doc = libxml.parseXml(xml)
|
||||
|
||||
if (!doc.validate(xsdDoc)) {
|
||||
localErrors = doc.validationErrors
|
||||
if (!doc.validate(xsdDoc)) {
|
||||
localErrors = doc.validationErrors
|
||||
}
|
||||
} catch (error) {
|
||||
localErrors.push(error)
|
||||
}
|
||||
|
||||
if (localErrors.length) {
|
||||
|
|
|
@ -2,7 +2,7 @@ import { Logger, File, Collection, Storage } from '@freearhey/core'
|
|||
import { ChannelsParser, XML } from '../../core'
|
||||
import { Channel } from 'epg-grabber'
|
||||
import { Command } from 'commander'
|
||||
import path from 'path'
|
||||
import { pathToFileURL } from 'node:url'
|
||||
|
||||
const program = new Command()
|
||||
program
|
||||
|
@ -26,7 +26,7 @@ async function main() {
|
|||
const logger = new Logger()
|
||||
const file = new File(options.config)
|
||||
const dir = file.dirname()
|
||||
const config = require(path.resolve(options.config))
|
||||
const config = (await import(pathToFileURL(options.config))).default
|
||||
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
|
||||
|
||||
let channels = new Collection()
|
||||
|
|
|
@ -47,7 +47,6 @@ async function main() {
|
|||
|
||||
const parsedChannels = await parser.parse(filepath)
|
||||
|
||||
const bufferById = new Dictionary()
|
||||
const bufferBySiteId = new Dictionary()
|
||||
const errors: ValidationError[] = []
|
||||
parsedChannels.forEach((channel: Channel) => {
|
||||
|
|
|
@ -1,58 +1,58 @@
|
|||
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import { IssueLoader, HTMLTable, Markdown } from '../../core'
|
||||
import { Issue, Site } from '../../models'
|
||||
import { SITES_DIR, DOT_SITES_DIR } from '../../constants'
|
||||
import path from 'path'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ disabled: true })
|
||||
const loader = new IssueLoader()
|
||||
const storage = new Storage(SITES_DIR)
|
||||
const sites = new Collection()
|
||||
|
||||
logger.info('loading list of sites')
|
||||
const folders = await storage.list('*/')
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await loadIssues(loader)
|
||||
|
||||
logger.info('putting the data together...')
|
||||
folders.forEach((domain: string) => {
|
||||
const filteredIssues = issues.filter((issue: Issue) => domain === issue.data.get('site'))
|
||||
const site = new Site({
|
||||
domain,
|
||||
issues: filteredIssues
|
||||
})
|
||||
|
||||
sites.add(site)
|
||||
})
|
||||
|
||||
logger.info('creating sites table...')
|
||||
let data = new Collection()
|
||||
sites.forEach((site: Site) => {
|
||||
data.add([
|
||||
`<a href="sites/${site.domain}">${site.domain}</a>`,
|
||||
site.getStatus().emoji,
|
||||
site.getIssues().all().join(', ')
|
||||
])
|
||||
})
|
||||
|
||||
const table = new HTMLTable(data.all(), [{ name: 'Site' }, { name: 'Status' }, { name: 'Notes' }])
|
||||
|
||||
const readmeStorage = new Storage(DOT_SITES_DIR)
|
||||
await readmeStorage.save('_table.md', table.toString())
|
||||
|
||||
logger.info('updating sites.md...')
|
||||
const configPath = path.join(DOT_SITES_DIR, 'config.json')
|
||||
const sitesMarkdown = new Markdown(configPath)
|
||||
sitesMarkdown.compile()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadIssues(loader: IssueLoader) {
|
||||
const issuesWithStatusWarning = await loader.load({ labels: ['broken guide', 'status:warning'] })
|
||||
const issuesWithStatusDown = await loader.load({ labels: ['broken guide', 'status:down'] })
|
||||
|
||||
return issuesWithStatusWarning.concat(issuesWithStatusDown)
|
||||
}
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { IssueLoader, HTMLTable, Markdown } from '../../core'
|
||||
import { Issue, Site } from '../../models'
|
||||
import { SITES_DIR, DOT_SITES_DIR } from '../../constants'
|
||||
import path from 'path'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ disabled: true })
|
||||
const loader = new IssueLoader()
|
||||
const storage = new Storage(SITES_DIR)
|
||||
const sites = new Collection()
|
||||
|
||||
logger.info('loading list of sites')
|
||||
const folders = await storage.list('*/')
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await loadIssues(loader)
|
||||
|
||||
logger.info('putting the data together...')
|
||||
folders.forEach((domain: string) => {
|
||||
const filteredIssues = issues.filter((issue: Issue) => domain === issue.data.get('site'))
|
||||
const site = new Site({
|
||||
domain,
|
||||
issues: filteredIssues
|
||||
})
|
||||
|
||||
sites.add(site)
|
||||
})
|
||||
|
||||
logger.info('creating sites table...')
|
||||
const data = new Collection()
|
||||
sites.forEach((site: Site) => {
|
||||
data.add([
|
||||
`<a href="sites/${site.domain}">${site.domain}</a>`,
|
||||
site.getStatus().emoji,
|
||||
site.getIssues().all().join(', ')
|
||||
])
|
||||
})
|
||||
|
||||
const table = new HTMLTable(data.all(), [{ name: 'Site' }, { name: 'Status' }, { name: 'Notes' }])
|
||||
|
||||
const readmeStorage = new Storage(DOT_SITES_DIR)
|
||||
await readmeStorage.save('_table.md', table.toString())
|
||||
|
||||
logger.info('updating sites.md...')
|
||||
const configPath = path.join(DOT_SITES_DIR, 'config.json')
|
||||
const sitesMarkdown = new Markdown(configPath)
|
||||
sitesMarkdown.compile()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadIssues(loader: IssueLoader) {
|
||||
const issuesWithStatusWarning = await loader.load({ labels: ['broken guide', 'status:warning'] })
|
||||
const issuesWithStatusDown = await loader.load({ labels: ['broken guide', 'status:down'] })
|
||||
|
||||
return issuesWithStatusWarning.concat(issuesWithStatusDown)
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue