Merge branch 'patch-2025.01.7' into patch-2025.01.2

This commit is contained in:
freearhey 2025-02-01 03:25:54 +03:00
commit 7b008ccf71
106 changed files with 22862 additions and 11624 deletions

View file

@ -26,7 +26,7 @@ async function main() {
const logger = new Logger()
const file = new File(options.config)
const dir = file.dirname()
const config = (await import(pathToFileURL(options.config))).default
const config = (await import(pathToFileURL(options.config).toString())).default
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
let channels = new Collection()

View file

@ -25,6 +25,7 @@ program
'DELAY'
)
)
.addOption(new Option('-x, --proxy <url>', 'Use the specified proxy').env('PROXY'))
.addOption(
new Option(
'--days <days>',
@ -55,6 +56,7 @@ export type GrabOptions = {
delay?: string
lang?: string
days?: number
proxy?: string
}
const options: GrabOptions = program.opts()

View file

@ -1,51 +1,64 @@
import { Channel } from 'epg-grabber'
import { Logger, Storage, Collection } from '@freearhey/core'
import { IssueLoader, HTMLTable, Markdown } from '../../core'
import { IssueLoader, HTMLTable, ChannelsParser } from '../../core'
import { Issue, Site } from '../../models'
import { SITES_DIR, DOT_SITES_DIR } from '../../constants'
import path from 'path'
import { SITES_DIR, ROOT_DIR } from '../../constants'
async function main() {
const logger = new Logger({ disabled: true })
const loader = new IssueLoader()
const storage = new Storage(SITES_DIR)
const sitesStorage = new Storage(SITES_DIR)
const channelsParser = new ChannelsParser({ storage: sitesStorage })
const sites = new Collection()
logger.info('loading list of sites')
const folders = await storage.list('*/')
const folders = await sitesStorage.list('*/')
logger.info('loading issues...')
const issues = await loadIssues(loader)
logger.info('putting the data together...')
folders.forEach((domain: string) => {
for (const domain of folders) {
const filteredIssues = issues.filter((issue: Issue) => domain === issue.data.get('site'))
const site = new Site({
domain,
issues: filteredIssues
})
const files = await sitesStorage.list(`${domain}/*.channels.xml`)
for (const filepath of files) {
const channels = await channelsParser.parse(filepath)
site.totalChannels += channels.count()
site.markedChannels += channels.filter((channel: Channel) => channel.xmltv_id).count()
}
sites.add(site)
})
}
logger.info('creating sites table...')
const data = new Collection()
sites.forEach((site: Site) => {
data.add([
`<a href="sites/${site.domain}">${site.domain}</a>`,
site.getStatus().emoji,
site.getIssues().all().join(', ')
{ value: `<a href="sites/${site.domain}">${site.domain}</a>` },
{ value: site.totalChannels, align: 'right' },
{ value: site.markedChannels, align: 'right' },
{ value: site.getStatus().emoji, align: 'center' },
{ value: site.getIssues().all().join(', ') }
])
})
const table = new HTMLTable(data.all(), [{ name: 'Site' }, { name: 'Status' }, { name: 'Notes' }])
const readmeStorage = new Storage(DOT_SITES_DIR)
await readmeStorage.save('_table.md', table.toString())
logger.info('updating sites.md...')
const configPath = path.join(DOT_SITES_DIR, 'config.json')
const sitesMarkdown = new Markdown(configPath)
sitesMarkdown.compile()
const table = new HTMLTable(data.all(), [
{ name: 'Site', align: 'left' },
{ name: 'Channels<br>(total / with xmltv-id)', colspan: 2, align: 'left' },
{ name: 'Status', align: 'left' },
{ name: 'Notes', align: 'left' }
])
const rootStorage = new Storage(ROOT_DIR)
const sitesTemplate = await new Storage().load('scripts/templates/_sites.md')
const sitesContent = sitesTemplate.replace('_TABLE_', table.toString())
await rootStorage.save('SITES.md', sitesContent)
}
main()

View file

@ -1,3 +1,4 @@
export const ROOT_DIR = process.env.ROOT_DIR || '.'
export const SITES_DIR = process.env.SITES_DIR || './sites'
export const GUIDES_DIR = process.env.GUIDES_DIR || './guides'
export const DATA_DIR = process.env.DATA_DIR || './temp/data'

View file

@ -7,7 +7,7 @@ export class ConfigLoader {
const fileUrl = pathToFileURL(filepath).toString()
const config = (await import(fileUrl)).default
const defaultConfig = {
days: 2,
days: 1,
delay: 0,
output: 'guide.xml',
request: {

View file

@ -1,8 +1,9 @@
import { EPGGrabber, GrabCallbackData, EPGGrabberMock, SiteConfig, Channel } from 'epg-grabber'
import { Logger, Collection } from '@freearhey/core'
import { Queue } from './'
import { Queue, ProxyParser } from './'
import { GrabOptions } from '../commands/epg/grab'
import { TaskQueue, PromisyClass } from 'cwait'
import { SocksProxyAgent } from 'socks-proxy-agent'
type GrabberProps = {
logger: Logger
@ -14,6 +15,7 @@ export class Grabber {
logger: Logger
queue: Queue
options: GrabOptions
grabber: EPGGrabber | EPGGrabberMock
constructor({ logger, queue, options }: GrabberProps) {
this.logger = logger
@ -23,6 +25,7 @@ export class Grabber {
}
async grab(): Promise<{ channels: Collection; programs: Collection }> {
const proxyParser = new ProxyParser()
const taskQueue = new TaskQueue(Promise as PromisyClass, this.options.maxConnections)
const total = this.queue.size()
@ -49,6 +52,24 @@ export class Grabber {
config.delay = delay
}
if (this.options.proxy !== undefined) {
const proxy = proxyParser.parse(this.options.proxy)
if (
proxy.protocol &&
['socks', 'socks5', 'socks5h', 'socks4', 'socks4a'].includes(String(proxy.protocol))
) {
const socksProxyAgent = new SocksProxyAgent(this.options.proxy)
config.request = {
...config.request,
...{ httpAgent: socksProxyAgent, httpsAgent: socksProxyAgent }
}
} else {
config.request = { ...config.request, ...{ proxy } }
}
}
const _programs = await this.grabber.grab(
channel,
date,

View file

@ -2,9 +2,15 @@ type Column = {
name: string
nowrap?: boolean
align?: string
colspan?: number
}
type DataItem = string[]
type DataItem = {
value: string
nowrap?: boolean
align?: string
colspan?: number
}[]
export class HTMLTable {
data: DataItem[]
@ -20,20 +26,23 @@ export class HTMLTable {
output += ' <thead>\r\n <tr>'
for (const column of this.columns) {
output += `<th align="left">${column.name}</th>`
const nowrap = column.nowrap ? ' nowrap' : ''
const align = column.align ? ` align="${column.align}"` : ''
const colspan = column.colspan ? ` colspan="${column.colspan}"` : ''
output += `<th${align}${nowrap}${colspan}>${column.name}</th>`
}
output += '</tr>\r\n </thead>\r\n'
output += ' <tbody>\r\n'
for (const item of this.data) {
for (const row of this.data) {
output += ' <tr>'
let i = 0
for (const prop in item) {
const column = this.columns[i]
const nowrap = column.nowrap ? ' nowrap' : ''
const align = column.align ? ` align="${column.align}"` : ''
output += `<td${align}${nowrap}>${item[prop]}</td>`
i++
for (const item of row) {
const nowrap = item.nowrap ? ' nowrap' : ''
const align = item.align ? ` align="${item.align}"` : ''
const colspan = item.colspan ? ` colspan="${item.colspan}"` : ''
output += `<td${align}${nowrap}${colspan}>${item.value}</td>`
}
output += '</tr>\r\n'
}

View file

@ -13,4 +13,4 @@ export * from './queueCreator'
export * from './issueLoader'
export * from './issueParser'
export * from './htmlTable'
export * from './markdown'
export * from './proxyParser'

View file

@ -1,13 +0,0 @@
import markdownInclude from 'markdown-include'
export class Markdown {
filepath: string
constructor(filepath: string) {
this.filepath = filepath
}
compile() {
markdownInclude.compileFiles(this.filepath)
}
}

View file

@ -0,0 +1,27 @@
import { URL } from 'node:url'
type ProxyParserResult = {
protocol: string | null
auth: {
username: string | null
password: string | null
}
host: string
port: number | null
}
export class ProxyParser {
parse(_url: string): ProxyParserResult {
const parsed = new URL(_url)
return {
protocol: parsed.protocol.replace(':', '') || null,
auth: {
username: parsed.username || null,
password: parsed.password || null
},
host: parsed.hostname,
port: parsed.port ? parseInt(parsed.port) : null
}
}
}

View file

@ -43,12 +43,14 @@ export class QueueCreator {
const config: SiteConfig = await this.configLoader.load(configPath)
if (channel.xmltv_id) {
const found: ApiChannel = channels.first(
(_channel: ApiChannel) => _channel.id === channel.xmltv_id
)
if (found) {
channel.icon = found.logo
channel.name = found.name
if (!channel.icon) {
const found: ApiChannel = channels.first(
(_channel: ApiChannel) => _channel.id === channel.xmltv_id
)
if (found) {
channel.icon = found.logo
}
}
} else {
channel.xmltv_id = channel.site_id

View file

@ -14,15 +14,21 @@ type Status = {
type SiteProps = {
domain: string
totalChannels?: number
markedChannels?: number
issues: Collection
}
export class Site {
domain: string
totalChannels: number
markedChannels: number
issues: Collection
constructor({ domain, issues }: SiteProps) {
constructor({ domain, totalChannels = 0, markedChannels = 0, issues }: SiteProps) {
this.domain = domain
this.totalChannels = totalChannels
this.markedChannels = markedChannels
this.issues = issues
}

View file

@ -0,0 +1,3 @@
# Sites
_TABLE_

View file

@ -6,7 +6,7 @@ dayjs.extend(customParseFormat)
dayjs.extend(utc)
const date = dayjs.utc('2025-01-12', 'YYYY-MM-DD').startOf('d')
const channel = { site_id: 'bbc1', xmltv_id: 'BBCOne.uk' }
const channel = { site_id: 'bbc1' }
it('can generate valid url', () => {
expect(url({ channel, date })).toBe('https://example.com/api/bbc1/2025-01-12')
@ -32,11 +32,7 @@ it('can parse response', () => {
})
it('can handle empty guide', () => {
const result = parser({
date,
channel,
content: ''
})
const results = parser({ content: '' })
expect(result).toMatchObject([])
expect(results).toMatchObject([])
})