Merge branch 'patch-1'

This commit is contained in:
Aleksandr Statciuk 2022-02-28 21:20:34 +03:00
commit 73c9c005e5
52 changed files with 670 additions and 572 deletions

View file

@ -10,12 +10,22 @@ jobs:
- uses: actions/checkout@v2
with:
fetch-depth: 2
- name: Download channels from API
- name: Download data from API
run: |
mkdir -p scripts/data
curl -L -o scripts/data/channels.json https://iptv-org.github.io/api/channels.json
- id: files
uses: jitterbit/get-changed-files@v1
- run: npm install
- run: npm run lint -- ${{ steps.files.outputs.added_modified }}
- run: npm run validate -- ${{ steps.files.outputs.added_modified }}
- uses: actions/setup-node@v2
if: ${{ !env.ACT && steps.files.outputs.any_changed == 'true' }}
with:
node-version: '14'
cache: 'npm'
- uses: tj-actions/changed-files@v12.2
id: files
with:
files: 'sites'
- name: validate
if: steps.files.outputs.any_changed == 'true'
run: |
npm install
npm run channels:lint -- ${{ steps.files.outputs.all_changed_files }}
npm run channels:validate -- ${{ steps.files.outputs.all_changed_files }}

View file

@ -14,7 +14,6 @@ jobs:
load:
runs-on: ubuntu-latest
steps:
- run: echo running on branch ${GITHUB_REF##*/}
- uses: actions/checkout@v2
- name: Download data from API
run: |
@ -22,10 +21,10 @@ jobs:
curl -L -o scripts/data/channels.json https://iptv-org.github.io/api/channels.json
- uses: FedericoCarboni/setup-ffmpeg@v1
- run: npm install
- run: CHANNELS_PATH=sites/${{inputs.site}}/*.channels.xml node scripts/commands/create-queue.js --max-clusters=1 --days=2
- run: NODE_OPTIONS=--insecure-http-parser node scripts/commands/load-cluster.js --timeout=30000 --cluster-id=1
- run: node scripts/commands/save-results.js
- run: node scripts/commands/update-guides.js
- run: CHANNELS_PATH=sites/${{inputs.site}}/*.channels.xml npm run queue:create -- --max-clusters=1 --days=2
- run: NODE_OPTIONS=--insecure-http-parser npm run cluster:load -- --timeout=30000 --cluster-id=1
- run: npm run programs:save
- run: npm run guides:update
- uses: tibdex/github-app-token@v1
if: ${{ !env.ACT }}
id: create-app-token

View file

@ -9,7 +9,7 @@ jobs:
steps:
- uses: actions/checkout@v2
- run: npm install
- run: node scripts/commands/update-api.js
- run: npm run api:update
- uses: tibdex/github-app-token@v1
if: ${{ !env.ACT }}
id: create-app-token

52
.github/workflows/_update-readme.yml vendored Normal file
View file

@ -0,0 +1,52 @@
name: _update-readme
on:
workflow_dispatch:
schedule:
- cron: '0 12 * * *'
jobs:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: echo "::set-output name=branch_name::$(date +'bot/auto-update-%s')"
id: create-branch-name
- run: git config user.name 'iptv-bot[bot]'
- run: git config user.email '84861620+iptv-bot[bot]@users.noreply.github.com'
- run: git checkout -b ${{ steps.create-branch-name.outputs.branch_name }}
- name: Download data from API
run: |
mkdir -p scripts/data
curl -L -o scripts/data/countries.json https://iptv-org.github.io/api/countries.json
- run: npm install
- run: npm run readme:update
- name: Commit Changes
if: ${{ !env.ACT }}
run: |
git add README.md
git commit -m "[Bot] Update README.md"
git status
git push -u origin ${{ steps.create-branch-name.outputs.branch_name }}
- uses: tibdex/github-app-token@v1
if: ${{ !env.ACT }}
id: create-app-token
with:
app_id: ${{ secrets.APP_ID }}
private_key: ${{ secrets.APP_PRIVATE_KEY }}
- uses: repo-sync/pull-request@v2
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
id: pull-request
with:
github_token: ${{ steps.create-app-token.outputs.token }}
source_branch: ${{ steps.create-branch-name.outputs.branch_name }}
destination_branch: 'master'
pr_title: '[Bot] Daily update'
pr_body: |
This pull request is created via [update-readme][1] workflow.
[1]: https://github.com/iptv-org/epg/actions/runs/${{ github.run_id }}
- uses: juliangruber/merge-pull-request-action@v1
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
with:
github-token: ${{ secrets.PAT }}
number: ${{ steps.pull-request.outputs.pr_number }}
method: squash

View file

@ -17,10 +17,6 @@ The API documentation can be found in the [iptv-org/api](https://github.com/iptv
Links to other useful IPTV-related resources can be found in the [iptv-org/awesome-iptv](https://github.com/iptv-org/awesome-iptv) repository.
## API
The API documentation can be found in the [iptv-org/api](https://github.com/iptv-org/api) repository.
## Contribution
If you find a bug or want to contribute to the code or documentation, you can help by submitting an [issue](https://github.com/iptv-org/epg/issues) or a [pull request](https://github.com/iptv-org/epg/pulls).

74
package-lock.json generated
View file

@ -16,6 +16,7 @@
"epg-grabber": "^0.20.0",
"epg-parser": "^0.1.6",
"form-data": "^4.0.0",
"fs-extra": "^10.0.1",
"glob": "^7.2.0",
"iconv-lite": "^0.4.24",
"jest": "^27.3.1",
@ -2377,6 +2378,27 @@
"node": ">= 6"
}
},
"node_modules/fs-extra": {
"version": "10.0.1",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.1.tgz",
"integrity": "sha512-NbdoVMZso2Lsrn/QwLXOy6rm0ufY2zEOKCDzJR/0kBsb0E6qed0P3iYK+Ath3BfvXEeu4JhEtXLgILx5psUfag==",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1",
"universalify": "^2.0.0"
},
"engines": {
"node": ">=12"
}
},
"node_modules/fs-extra/node_modules/universalify": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz",
"integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==",
"engines": {
"node": ">= 10.0.0"
}
},
"node_modules/fs-minipass": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz",
@ -3614,6 +3636,25 @@
"node": ">=6"
}
},
"node_modules/jsonfile": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz",
"integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==",
"dependencies": {
"universalify": "^2.0.0"
},
"optionalDependencies": {
"graceful-fs": "^4.1.6"
}
},
"node_modules/jsonfile/node_modules/universalify": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz",
"integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==",
"engines": {
"node": ">= 10.0.0"
}
},
"node_modules/keyv": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/keyv/-/keyv-4.0.4.tgz",
@ -7382,6 +7423,23 @@
"mime-types": "^2.1.12"
}
},
"fs-extra": {
"version": "10.0.1",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.1.tgz",
"integrity": "sha512-NbdoVMZso2Lsrn/QwLXOy6rm0ufY2zEOKCDzJR/0kBsb0E6qed0P3iYK+Ath3BfvXEeu4JhEtXLgILx5psUfag==",
"requires": {
"graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1",
"universalify": "^2.0.0"
},
"dependencies": {
"universalify": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz",
"integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ=="
}
}
},
"fs-minipass": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz",
@ -8308,6 +8366,22 @@
"minimist": "^1.2.5"
}
},
"jsonfile": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz",
"integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==",
"requires": {
"graceful-fs": "^4.1.6",
"universalify": "^2.0.0"
},
"dependencies": {
"universalify": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz",
"integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ=="
}
}
},
"keyv": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/keyv/-/keyv-4.0.4.tgz",

View file

@ -1,13 +1,21 @@
{
"name": "epg",
"scripts": {
"lint": "node scripts/commands/lint.js",
"validate": "node scripts/commands/validate.js",
"channels:validate": "node scripts/commands/channels/validate.js",
"channels:lint": "node scripts/commands/channels/lint.js",
"channels:parse": "node scripts/commands/channels/parse.js",
"queue:create": "node scripts/commands/queue/create.js",
"cluster:load": "node scripts/commands/cluster/load.js",
"programs:save": "node scripts/commands/programs/save.js",
"guides:update": "node scripts/commands/guides/update.js",
"api:update": "node scripts/commands/api/update.js",
"readme:update": "node scripts/commands/readme/update.js",
"test": "npx jest --runInBand",
"test:commands": "npx jest --runInBand -- commands",
"test:sites": "npx jest --runInBand -- sites",
"act": "act workflow_dispatch",
"update-readme": "node scripts/commands/update-readme.js"
"act:check": "act workflow_dispatch -W .github/workflows/_check.yml",
"act:update-readme": "act workflow_dispatch -W .github/workflows/_update-readme.yml",
"act:update-api": "act workflow_dispatch -W .github/workflows/_update-api.yml"
},
"private": true,
"author": "Arhey",
@ -25,6 +33,7 @@
"epg-grabber": "^0.20.0",
"epg-parser": "^0.1.6",
"form-data": "^4.0.0",
"fs-extra": "^10.0.1",
"glob": "^7.2.0",
"iconv-lite": "^0.4.24",
"jest": "^27.3.1",

View file

@ -0,0 +1,42 @@
const { file, parser, logger } = require('../../core')
const { program } = require('commander')
const _ = require('lodash')
const CHANNELS_PATH = process.env.CHANNELS_PATH || 'sites/**/*.channels.xml'
const OUTPUT_DIR = process.env.OUTPUT_DIR || '.gh-pages/api'
async function main() {
let guides = []
try {
const files = await file.list(CHANNELS_PATH)
for (const filepath of files) {
const { site, channels } = await parser.parseChannels(filepath)
const dir = file.dirname(filepath)
const config = require(file.resolve(`${dir}/${site}.config.js`))
if (config.ignore) continue
const filename = file.basename(filepath)
const [__, suffix] = filename.match(/\_(.*)\.channels\.xml$/) || [null, null]
for (const channel of channels) {
guides.push({
channel: channel.xmltv_id,
site,
lang: channel.lang,
url: `https://iptv-org.github.io/epg/guides/${suffix}/${site}.epg.xml`
})
}
}
} catch (err) {
console.error(err)
}
guides = _.sortBy(guides, 'channel')
const outputFilepath = `${OUTPUT_DIR}/guides.json`
await file.create(outputFilepath, JSON.stringify(guides))
logger.info(`saved to "${outputFilepath}"...`)
}
main()

View file

@ -1,7 +1,7 @@
const chalk = require('chalk')
const libxml = require('libxmljs')
const { program } = require('commander')
const { logger, file } = require('../core')
const { logger, file } = require('../../core')
const xsd = `<?xml version="1.0" encoding="UTF-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">

View file

@ -0,0 +1,43 @@
const { logger, file, xml } = require('../../core')
const { Command } = require('commander')
const path = require('path')
const _ = require('lodash')
const program = new Command()
program
.requiredOption('-c, --config <config>', 'Config file')
.option('-s, --set [args...]', 'Set custom arguments', [])
.option('-o, --output <output>', 'Output file')
.parse(process.argv)
const options = program.opts()
async function main() {
const config = require(path.resolve(options.config))
const args = {}
options.set.forEach(arg => {
const [key, value] = arg.split(':')
args[key] = value
})
let channels = config.channels(args)
if (isPromise(channels)) {
channels = await channels
}
channels = _.sortBy(channels, 'xmltv_id')
const dir = file.dirname(options.config)
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
const output = xml.create(channels, config.site)
await file.write(outputFilepath, output)
logger.info(`File '${output}' successfully saved`)
}
main()
function isPromise(promise) {
return !!promise && typeof promise.then === 'function'
}

View file

@ -1,4 +1,4 @@
const { parser, logger, api } = require('../core')
const { parser, logger, api } = require('../../core')
const { program } = require('commander')
const chalk = require('chalk')

View file

@ -1,7 +1,7 @@
const _ = require('lodash')
const grabber = require('epg-grabber')
const { program } = require('commander')
const { db, logger, timer, file, parser } = require('../core')
const { db, logger, timer, file, parser } = require('../../core')
const options = program
.requiredOption('-c, --cluster-id <cluster-id>', 'The ID of cluster to load', parser.parseNumber)
@ -16,7 +16,7 @@ const options = program
.opts()
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
const CLUSTER_PATH = `${LOGS_DIR}/load-cluster/cluster_${options.clusterId}.log`
const CLUSTER_PATH = `${LOGS_DIR}/cluster/load/cluster_${options.clusterId}.log`
async function main() {
logger.info('Starting...')

View file

@ -1,16 +0,0 @@
const { logger, db } = require('../core')
async function main() {
await db.queue.load()
const docs = await db.queue.find({}).sort({ cluster_id: 1 })
const cluster_id = docs.reduce((acc, curr) => {
if (!acc.includes(curr.cluster_id)) acc.push(curr.cluster_id)
return acc
}, [])
const matrix = { cluster_id }
const output = `::set-output name=matrix::${JSON.stringify(matrix)}`
logger.info(output)
}
main()

View file

@ -0,0 +1,76 @@
const { db, logger, file, api } = require('../../core')
const grabber = require('epg-grabber')
const _ = require('lodash')
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.gh-pages'
async function main() {
logger.info(`Generating guides/...`)
logger.info('Loading "database/programs.db"...')
await db.programs.load()
await api.channels.load()
const grouped = groupByGroup(await loadQueue())
for (const key in grouped) {
let channels = {}
let programs = []
for (const item of grouped[key]) {
if (item.error) continue
const itemPrograms = await loadProgramsForItem(item)
programs = programs.concat(itemPrograms)
if (channels[item.channel.xmltv_id]) continue
const channel = api.channels.find({ id: item.channel.xmltv_id })
if (channel) {
channels[channel.id] = {
xmltv_id: channel.id,
name: item.channel.display_name,
logo: channel.logo,
site: item.channel.site
}
}
}
channels = Object.values(channels)
channels = _.sortBy(channels, 'xmltv_id')
programs = _.sortBy(programs, ['channel', 'start'])
const filepath = `${PUBLIC_DIR}/guides/${key}.epg.xml`
logger.info(`Creating "${filepath}"...`)
const output = grabber.convertToXMLTV({ channels, programs })
await file.create(filepath, output)
}
logger.info(`Done`)
}
main()
function groupByGroup(items = []) {
const groups = {}
items.forEach(item => {
item.groups.forEach(key => {
if (!groups[key]) {
groups[key] = []
}
groups[key].push(item)
})
})
return groups
}
async function loadQueue() {
logger.info('Loading queue...')
await db.queue.load()
return await db.queue.find({}).sort({ xmltv_id: 1 })
}
async function loadProgramsForItem(item) {
return await db.programs.find({ _qid: item._id }).sort({ channel: 1, start: 1 })
}

View file

@ -1,4 +1,4 @@
const { db, logger, file, parser } = require('../core')
const { db, logger, file, parser } = require('../../core')
const _ = require('lodash')
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
@ -7,7 +7,7 @@ async function main() {
await db.queue.load()
await db.programs.load()
await db.programs.reset()
const files = await file.list(`${LOGS_DIR}/load-cluster/cluster_*.log`)
const files = await file.list(`${LOGS_DIR}/cluster/load/cluster_*.log`)
for (const filepath of files) {
logger.info(`Parsing "${filepath}"...`)
const results = await parser.parseLogs(filepath)

View file

@ -1,4 +1,4 @@
const { db, file, parser, logger, date, api } = require('../core')
const { db, file, parser, logger, date, api } = require('../../core')
const { program } = require('commander')
const _ = require('lodash')
@ -37,6 +37,7 @@ async function createQueue() {
const utcDate = date.getUTC()
const dates = Array.from({ length: options.days }, (_, i) => utcDate.add(i, 'd'))
for (const filepath of files) {
try {
const dir = file.dirname(filepath)
const { site, channels: items } = await parser.parseChannels(filepath)
if (!site) continue
@ -85,6 +86,9 @@ async function createQueue() {
}
}
}
} catch (err) {
console.error(err)
}
}
queue = Object.values(queue)

View file

@ -1,4 +1,4 @@
const { file, markdown, parser, logger, api, table } = require('../core')
const { file, markdown, parser, logger, api, table } = require('../../core')
const { program } = require('commander')
const _ = require('lodash')
@ -10,9 +10,11 @@ const options = program
.opts()
async function main() {
const items = []
try {
await api.countries.load()
const files = await file.list(CHANNELS_PATH)
const items = []
for (const filepath of files) {
const { site, channels } = await parser.parseChannels(filepath)
const dir = file.dirname(filepath)
@ -30,6 +32,9 @@ async function main() {
group: `${suffix}/${site}`
})
}
} catch (err) {
console.error(err)
}
await generateCountriesTable(items)
await updateReadme()

View file

@ -1,37 +0,0 @@
const { file, parser, logger } = require('../core')
const { program } = require('commander')
const _ = require('lodash')
const CHANNELS_PATH = process.env.CHANNELS_PATH || 'sites/**/*.channels.xml'
const OUTPUT_DIR = process.env.OUTPUT_DIR || '.gh-pages/api'
async function main() {
const files = await file.list(CHANNELS_PATH)
let guides = []
for (const filepath of files) {
const { site, channels } = await parser.parseChannels(filepath)
const dir = file.dirname(filepath)
const config = require(file.resolve(`${dir}/${site}.config.js`))
if (config.ignore) continue
const filename = file.basename(filepath)
const [__, suffix] = filename.match(/\_(.*)\.channels\.xml$/) || [null, null]
for (const channel of channels) {
guides.push({
channel: channel.xmltv_id,
site,
lang: channel.lang,
url: `https://iptv-org.github.io/epg/guides/${suffix}/${site}.epg.xml`
})
}
}
guides = _.sortBy(guides, 'channel')
const outputFilepath = `${OUTPUT_DIR}/guides.json`
await file.create(outputFilepath, JSON.stringify(guides))
logger.info(`saved to "${outputFilepath}"...`)
}
main()

View file

@ -1,149 +0,0 @@
const { db, logger, file, api } = require('../core')
const grabber = require('epg-grabber')
const _ = require('lodash')
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.gh-pages'
const GUIDES_PATH = `${LOGS_DIR}/guides.log`
async function main() {
await setUp()
await generateGuides()
}
main()
async function generateGuides() {
logger.info(`Generating guides/...`)
logger.info('Loading "database/programs.db"...')
await db.programs.load()
await api.channels.load()
const grouped = groupByGroup(await loadQueue())
for (const key in grouped) {
const filepath = `${PUBLIC_DIR}/guides/${key}.epg.xml`
const criticalErrors = []
let channels = {}
let programs = []
for (const item of grouped[key]) {
const itemPrograms = await loadProgramsForItem(item)
programs = programs.concat(itemPrograms)
if (channels[item.channel.xmltv_id]) continue
if (item.error) {
const error = {
xmltv_id: item.channel.xmltv_id,
site: item.channel.site,
site_id: item.channel.site_id,
lang: item.channel.lang,
date: item.date,
error: item.error
}
criticalErrors.push(error)
await logError(key, error)
} else {
if (!itemPrograms.length) {
await logError(key, {
xmltv_id: item.channel.xmltv_id,
site: item.channel.site,
site_id: item.channel.site_id,
lang: item.channel.lang,
date: item.date,
error: 'Programs not found'
})
continue
}
const channel = api.channels.find({ id: item.channel.xmltv_id })
if (!channel) {
await logError(key, {
xmltv_id: item.channel.xmltv_id,
site: item.channel.site,
site_id: item.channel.site_id,
lang: item.channel.lang,
date: item.date,
error: 'The channel has the wrong xmltv_id'
})
continue
}
channels[channel.id] = {
xmltv_id: channel.id,
name: item.channel.display_name,
logo: channel.logo,
site: item.channel.site
}
}
}
channels = Object.values(channels)
channels = _.sortBy(channels, 'xmltv_id')
programs = _.sortBy(programs, ['channel', 'start'])
logger.info(`Creating "${filepath}"...`)
const output = grabber.convertToXMLTV({ channels, programs })
await file.create(filepath, output)
let status = 0
if (criticalErrors.length > 0 || !channels.length) {
status = 1
}
await logGuide({
group: key,
count: channels.length,
status
})
}
logger.info(`Done`)
}
function groupByGroup(items = []) {
const groups = {}
items.forEach(item => {
item.groups.forEach(key => {
if (!groups[key]) {
groups[key] = []
}
groups[key].push(item)
})
})
return groups
}
async function loadQueue() {
logger.info('Loading queue...')
await db.queue.load()
return await db.queue.find({}).sort({ xmltv_id: 1 })
}
async function loadProgramsForItem(item) {
return await db.programs.find({ _qid: item._id }).sort({ channel: 1, start: 1 })
}
async function setUp() {
logger.info(`Creating '${GUIDES_PATH}'...`)
await file.create(GUIDES_PATH)
await file.createDir(`${LOGS_DIR}/errors`)
}
async function logGuide(data) {
await file.append(GUIDES_PATH, JSON.stringify(data) + '\r\n')
}
async function logError(key, data) {
const filepath = `${LOGS_DIR}/errors/${key}.log`
if (!(await file.exists(filepath))) {
await file.create(filepath)
}
await file.append(filepath, JSON.stringify(data) + '\r\n')
}

View file

@ -7,3 +7,4 @@ exports.markdown = require('./markdown')
exports.api = require('./api')
exports.date = require('./date')
exports.table = require('./table')
exports.xml = require('./xml')

View file

@ -1,61 +1,6 @@
const { Command } = require('commander')
const { db, logger } = require('../core')
const path = require('path')
const _ = require('lodash')
const fs = require('fs')
const xml = {}
const program = new Command()
program
.requiredOption('-c, --config <config>', 'Config file')
.option('-s, --set [args...]', 'Set custom arguments', [])
.option('-o, --output <output>', 'Output file')
.parse(process.argv)
const options = program.opts()
async function main() {
await db.channels.load()
const config = require(path.resolve(options.config))
const args = {}
options.set.forEach(arg => {
const [key, value] = arg.split(':')
args[key] = value
})
let channels = config.channels(args)
if (isPromise(channels)) {
channels = await channels
}
channels = _.uniqBy(channels, 'site_id')
const siteChannels = await db.channels.find({ site: config.site })
for (const channel of channels) {
if (channel.xmltv_id) continue
const data = siteChannels.find(c => c.site_id === channel.site_id.toString())
if (data) {
channel.xmltv_id = data.xmltv_id
channel.name = data.name
}
}
channels = _.sortBy(channels, 'xmltv_id')
const xml = json2xml(channels, config.site)
const dir = path.parse(options.config).dir
const output = options.output || `${dir}/${config.site}.channels.xml`
fs.writeFileSync(path.resolve(output), xml)
logger.info(`File '${output}' successfully saved`)
}
main()
function isPromise(promise) {
return !!promise && typeof promise.then === 'function'
}
function json2xml(items, site) {
xml.create = function (items, site) {
let output = `<?xml version="1.0" encoding="UTF-8"?>\r\n<site site="${site}">\r\n <channels>\r\n`
items.forEach(channel => {
@ -101,4 +46,4 @@ function escapeString(string, defaultValue = '') {
.trim()
}
module.exports = { json2xml }
module.exports = xml

View file

View file

@ -16,14 +16,14 @@ To load a program guide, all you need to do is copy the link to one or more of t
</tbody>
</table>
## EPG Codes
📋&nbsp;&nbsp;[iptv-org.github.io](https://iptv-org.github.io/)
## API
The API documentation can be found in the [iptv-org/api](https://github.com/iptv-org/api) repository.
## Resources
Links to other useful IPTV-related resources can be found in the [iptv-org/awesome-iptv](https://github.com/iptv-org/awesome-iptv) repository.
## Contribution
If you find a bug or want to contribute to the code or documentation, you can help by submitting an [issue](https://github.com/iptv-org/epg/issues) or a [pull request](https://github.com/iptv-org/epg/pulls).

View file

@ -1 +0,0 @@
{"xmltv_id":"CNNInternationalEurope2.us","site":"example.com","site_id":"141","lang":"en","error":"The channel has the wrong xmltv_id"}

View file

@ -1 +0,0 @@
{"xmltv_id":"CNNInternationalEurope.us","site":"magticom.ge","site_id":"140","lang":"ru","date":"2022-01-21T00:00:00Z","error":"Programs not found"}

View file

@ -1 +0,0 @@
{"xmltv_id":"Perviykanal.ru","site":"yandex.ru","site_id":"1","lang":"ru","date":"2022-01-21T00:00:00Z","error":"Some error"}

View file

@ -1 +0,0 @@
{"xmltv_id":"BravoEast.us","site":"directv.com","site_id":"237","lang":"en","date":"2022-01-21T00:00:00Z","error":"Invalid header value char"}

View file

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<site site="parse-channels.com">
<channels>
<channel lang="en" xmltv_id="CNNInternational.us" site_id="140">CNN International</channel>
</channels>
</site>

View file

@ -1 +0,0 @@
{"lang":"en","xmltv_id":"BravoEast.us","site_id":"237","site":"directv.com","configPath":"sites/directv.com/directv.com.config.js","groups":["us/directv.com"],"cluster_id":84,"_id":"00AluKCrCnfgrl8W","date":"2022-01-21T00:00:00Z","error":"Invalid header value char"}

View file

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<site site="duplicate.com">
<channels>
<channel lang="en" xmltv_id="CNNInternationalEurope.us" site_id="140">CNN International</channel>
<channel lang="en" xmltv_id="CNNInternationalEurope.us" site_id="140">CNN International</channel>
</channels>
</site>

View file

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<site site="lint.com">
<channels>
<channel xmltv_id="CNNInternationalEurope.us" site_id="140">CNN International</channel>
</channels>
</site>

View file

@ -0,0 +1,19 @@
module.exports = {
site: 'parse-channels.com',
url() {
return `https://parse-channels.com`
},
parser() {
return []
},
channels() {
return [
{
lang: 'en',
xmltv_id: 'CNNInternational.us',
site_id: 140,
name: 'CNN International'
}
]
}
}

View file

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<site site="wrong_xmltv_id.com">
<channels>
<channel lang="en" xmltv_id="CNNInternational" site_id="140">CNN International</channel>
</channels>
</site>

View file

@ -0,0 +1,26 @@
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
const stdout = execSync(
'CHANNELS_PATH=tests/__data__/input/sites/example.com_ca.channels.xml OUTPUT_DIR=tests/__data__/output/api npm run api:update',
{ encoding: 'utf8' }
)
})
it('can generate guides.json', () => {
expect(content('tests/__data__/output/api/guides.json')).toBe(
content('tests/__data__/expected/api/guides.json')
)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return JSON.stringify(data)
}

View file

@ -0,0 +1,19 @@
const { execSync } = require('child_process')
it('will show a message if the file contains a syntax error', () => {
try {
const stdout = execSync(
'npm run channels:lint -- tests/__data__/input/sites/lint.channels.xml',
{
encoding: 'utf8'
}
)
console.log(stdout)
process.exit(1)
} catch (err) {
expect(err.status).toBe(1)
expect(err.stdout).toBe(
`\n> channels:lint\n> node scripts/commands/channels/lint.js "tests/__data__/input/sites/lint.channels.xml"\n\n\ntests/__data__/input/sites/lint.channels.xml\n 4:0 Element 'channel': The attribute 'lang' is required but missing.\n\n1 error(s)\n`
)
}
})

View file

@ -0,0 +1,24 @@
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
const stdout = execSync(
'npm run channels:parse -- --config=tests/__data__/input/sites/parse-channels.config.js --output=tests/__data__/output/channels.xml',
{ encoding: 'utf8' }
)
})
it('can parse channels', () => {
expect(content('tests/__data__/output/channels.xml')).toEqual(
content('tests/__data__/expected/sites/parse-channels.channels.xml')
)
})
function content(filepath) {
return fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
}

View file

@ -0,0 +1,49 @@
const { execSync } = require('child_process')
it('will show a message if the file contains a duplicate', () => {
try {
const stdout = execSync(
'npm run channels:validate -- tests/__data__/input/sites/duplicate.channels.xml',
{
encoding: 'utf8'
}
)
console.log(stdout)
process.exit(1)
} catch (err) {
expect(err.status).toBe(1)
expect(err.stdout).toBe(
`\n> channels:validate\n> node scripts/commands/channels/validate.js "tests/__data__/input/sites/duplicate.channels.xml"\n\ntests/__data__/input/sites/duplicate.channels.xml
(index) type lang xmltv_id site_id name
0 'duplicate' 'en' 'CNNInternationalEurope.us' '140' 'CNN International'
\n1 error(s) in 1 file(s)\n`
)
}
})
it('will show a message if the file contains a channel with wrong xmltv_id', () => {
try {
const stdout = execSync(
'npm run channels:validate -- tests/__data__/input/sites/wrong_xmltv_id.channels.xml',
{
encoding: 'utf8'
}
)
console.log(stdout)
process.exit(1)
} catch (err) {
expect(err.status).toBe(1)
expect(err.stdout).toBe(
`\n> channels:validate\n> node scripts/commands/channels/validate.js "tests/__data__/input/sites/wrong_xmltv_id.channels.xml"\n\ntests/__data__/input/sites/wrong_xmltv_id.channels.xml
(index) type lang xmltv_id site_id name
0 'wrong_xmltv_id' 'en' 'CNNInternational' '140' 'CNN International'
\n1 error(s) in 1 file(s)\n`
)
}
})

View file

@ -0,0 +1,32 @@
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.copyFileSync('tests/__data__/input/database/queue.db', 'tests/__data__/output/queue.db')
execSync(
'DB_DIR=tests/__data__/output LOGS_DIR=tests/__data__/output/logs npm run cluster:load -- --cluster-id=1 --timeout=10000',
{ encoding: 'utf8' }
)
})
it('can load cluster', () => {
expect(content('tests/__data__/output/logs/cluster/load/cluster_1.log')).toEqual(
content('tests/__data__/expected/logs/cluster/load/cluster_1.log')
)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return data
.split('\n')
.filter(l => l)
.map(l => {
return JSON.parse(l)
})
}

View file

@ -1,25 +0,0 @@
const fs = require('fs')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.rmdirSync('tests/__data__/output', { recursive: true })
fs.mkdirSync('tests/__data__/output')
fs.mkdirSync('tests/__data__/temp/database', { recursive: true })
fs.copyFileSync('tests/__data__/input/database/queue.db', 'tests/__data__/temp/database/queue.db')
})
afterEach(() => {
fs.rmdirSync('tests/__data__/temp', { recursive: true })
})
it('can create valid matrix', () => {
const result = execSync(
'DB_DIR=tests/__data__/temp/database node scripts/commands/create-matrix.js',
{
encoding: 'utf8'
}
)
expect(result).toBe('::set-output name=matrix::{"cluster_id":[1,4,84,120]}\n')
})

View file

@ -0,0 +1,32 @@
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.copyFileSync('tests/__data__/input/database/queue.db', 'tests/__data__/output/queue.db')
fs.copyFileSync('tests/__data__/input/database/programs.db', 'tests/__data__/output/programs.db')
const stdout = execSync(
'DB_DIR=tests/__data__/output DATA_DIR=tests/__data__/input/data PUBLIC_DIR=tests/__data__/output npm run guides:update',
{ encoding: 'utf8' }
)
})
it('can generate /guides', () => {
expect(content('tests/__data__/output/guides/fr/chaines-tv.orange.fr.epg.xml')).toBe(
content('tests/__data__/expected/guides/fr/chaines-tv.orange.fr.epg.xml')
)
expect(content('tests/__data__/output/guides/zw/dstv.com.epg.xml')).toBe(
content('tests/__data__/expected/guides/zw/dstv.com.epg.xml')
)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return JSON.stringify(data)
}

View file

@ -1,40 +0,0 @@
const fs = require('fs')
const path = require('path')
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
const { execSync } = require('child_process')
dayjs.extend(utc)
beforeEach(() => {
fs.rmdirSync('tests/__data__/temp', { recursive: true })
fs.rmdirSync('tests/__data__/output', { recursive: true })
fs.mkdirSync('tests/__data__/output')
fs.mkdirSync('tests/__data__/temp/database', { recursive: true })
fs.copyFileSync('tests/__data__/input/database/queue.db', 'tests/__data__/temp/database/queue.db')
execSync(
'DB_DIR=tests/__data__/temp/database LOGS_DIR=tests/__data__/output/logs node scripts/commands/load-cluster.js --cluster-id=1 --timeout=10000',
{ encoding: 'utf8' }
)
})
it('can load cluster', () => {
let output = content('tests/__data__/output/logs/load-cluster/cluster_1.log')
let expected = content('tests/__data__/expected/logs/load-cluster/cluster_1.log')
expect(output).toEqual(expected)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return data
.split('\n')
.filter(l => l)
.map(l => {
return JSON.parse(l)
})
}

View file

@ -1,25 +1,19 @@
const fs = require('fs')
const path = require('path')
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
beforeEach(() => {
fs.rmdirSync('tests/__data__/output', { recursive: true })
fs.mkdirSync('tests/__data__/output')
fs.mkdirSync('tests/__data__/output/database', { recursive: true })
fs.copyFileSync(
'tests/__data__/input/database/queue.db',
'tests/__data__/output/database/queue.db'
)
fs.emptyDirSync('tests/__data__/output')
fs.copyFileSync('tests/__data__/input/database/queue.db', 'tests/__data__/output/queue.db')
const stdout = execSync(
'DB_DIR=tests/__data__/output/database LOGS_DIR=tests/__data__/input/logs node scripts/commands/save-results.js',
'DB_DIR=tests/__data__/output LOGS_DIR=tests/__data__/input/logs npm run programs:save',
{ encoding: 'utf8' }
)
})
it('can save programs to database', () => {
let output = content('tests/__data__/output/database/programs.db')
let output = content('tests/__data__/output/programs.db')
let expected = content('tests/__data__/expected/database/programs.db')
output = output.map(i => {
@ -35,10 +29,9 @@ it('can save programs to database', () => {
})
it('can update queue', () => {
const output = content('tests/__data__/output/database/queue.db')
const expected = content('tests/__data__/expected/database/queue-with-errors.db')
expect(output).toEqual(expected)
expect(content('tests/__data__/output/queue.db')).toEqual(
content('tests/__data__/expected/database/queue-with-errors.db')
)
})
function content(filepath) {

View file

@ -1,13 +1,12 @@
const fs = require('fs')
const path = require('path')
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
beforeEach(() => {
fs.rmdirSync('tests/__data__/output', { recursive: true })
fs.mkdirSync('tests/__data__/output')
fs.emptyDirSync('tests/__data__/output')
const stdout = execSync(
'DB_DIR=tests/__data__/output/database LOGS_DIR=tests/__data__/output/logs CHANNELS_PATH=tests/__data__/input/sites/*.channels.xml node scripts/commands/create-queue.js --max-clusters=1 --days=2',
'DB_DIR=tests/__data__/output/database LOGS_DIR=tests/__data__/output/logs CHANNELS_PATH=tests/__data__/input/sites/example.com_ca.channels.xml npm run queue:create -- --max-clusters=1 --days=2',
{ encoding: 'utf8' }
)
})
@ -35,13 +34,6 @@ it('can create queue', () => {
)
})
it('can log errors', () => {
let output = content('tests/__data__/output/logs/errors/ca/example.com.log')
let expected = content('tests/__data__/expected/logs/errors/ca/example.com.log')
expect(output).toEqual(expected)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'

View file

@ -0,0 +1,26 @@
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
const stdout = execSync(
'CHANNELS_PATH=tests/__data__/input/sites/example.com_ca.channels.xml DATA_DIR=tests/__data__/input/data npm run readme:update -- --config=tests/__data__/input/readme.json',
{ encoding: 'utf8' }
)
})
it('can update readme.md', () => {
expect(content('tests/__data__/output/readme.md')).toBe(
content('tests/__data__/expected/_readme.md')
)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return JSON.stringify(data)
}

View file

@ -1,28 +0,0 @@
const fs = require('fs')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.rmdirSync('tests/__data__/output', { recursive: true })
fs.mkdirSync('tests/__data__/output')
const stdout = execSync(
'CHANNELS_PATH=tests/__data__/input/sites/**.channels.xml OUTPUT_DIR=tests/__data__/output/api node scripts/commands/update-api.js',
{ encoding: 'utf8' }
)
})
it('can generate guides.json', () => {
const output = content('tests/__data__/output/api/guides.json')
const expected = content('tests/__data__/expected/api/guides.json')
expect(output).toBe(expected)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return JSON.stringify(data)
}

View file

@ -1,67 +0,0 @@
const fs = require('fs')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.rmdirSync('tests/__data__/output', { recursive: true })
fs.mkdirSync('tests/__data__/output')
fs.mkdirSync('tests/__data__/temp/database', { recursive: true })
fs.copyFileSync('tests/__data__/input/database/queue.db', 'tests/__data__/temp/database/queue.db')
fs.copyFileSync(
'tests/__data__/input/database/programs.db',
'tests/__data__/temp/database/programs.db'
)
const stdout = execSync(
'DB_DIR=tests/__data__/temp/database DATA_DIR=tests/__data__/input/data PUBLIC_DIR=tests/__data__/output LOGS_DIR=tests/__data__/output/logs node scripts/commands/update-guides.js',
{ encoding: 'utf8' }
)
})
afterEach(() => {
fs.rmdirSync('tests/__data__/temp', { recursive: true })
})
it('can generate /guides', () => {
const output1 = content('tests/__data__/output/guides/fr/chaines-tv.orange.fr.epg.xml')
const expected1 = content('tests/__data__/expected/guides/fr/chaines-tv.orange.fr.epg.xml')
expect(output1).toBe(expected1)
const output2 = content('tests/__data__/output/guides/zw/dstv.com.epg.xml')
const expected2 = content('tests/__data__/expected/guides/zw/dstv.com.epg.xml')
expect(output2).toBe(expected2)
})
it('can create guides.log', () => {
const output = content('tests/__data__/output/logs/guides.log')
const expected = content('tests/__data__/expected/logs/guides.log')
expect(output).toBe(expected)
})
it('can log errors', () => {
const output1 = content('tests/__data__/output/logs/errors/ru/yandex.ru.log')
const expected1 = content('tests/__data__/expected/logs/errors/ru/yandex.ru.log')
expect(output1).toBe(expected1)
const output2 = content('tests/__data__/output/logs/errors/us/directv.com.log')
const expected2 = content('tests/__data__/expected/logs/errors/us/directv.com.log')
expect(output2).toBe(expected2)
const output3 = content('tests/__data__/output/logs/errors/ge/magticom.ge.log')
const expected3 = content('tests/__data__/expected/logs/errors/ge/magticom.ge.log')
expect(output3).toBe(expected3)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return JSON.stringify(data)
}

View file

@ -1,28 +0,0 @@
const fs = require('fs')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.rmdirSync('tests/__data__/output', { recursive: true })
fs.mkdirSync('tests/__data__/output')
const stdout = execSync(
'CHANNELS_PATH=tests/__data__/input/sites/*.channels.xml DATA_DIR=tests/__data__/input/data node scripts/commands/update-readme.js --config=tests/__data__/input/_readme.json',
{ encoding: 'utf8' }
)
})
it('can update readme.md', () => {
const output = content('tests/__data__/output/readme.md')
const expected = content('tests/__data__/expected/readme.md')
expect(output).toBe(expected)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return JSON.stringify(data)
}