mirror of
https://github.com/iptv-org/epg.git
synced 2025-05-09 16:40:07 -04:00
commit
720ed71b54
25 changed files with 1544 additions and 1198 deletions
63
Jenkinsfile
vendored
63
Jenkinsfile
vendored
|
@ -1,63 +0,0 @@
|
|||
List target_sites = (env.TARGET_SITES == null) ? [] : env.TARGET_SITES.split(';')
|
||||
List exclude_sites = (env.EXCLUDE_SITES == null) ? [] : env.EXCLUDE_SITES.split(';')
|
||||
|
||||
target_sites.removeAll { it in exclude_sites }
|
||||
|
||||
Map matrix_axes = [
|
||||
SITE: target_sites
|
||||
]
|
||||
|
||||
@NonCPS
|
||||
List getMatrixAxes(Map matrix_axes) {
|
||||
List axes = []
|
||||
matrix_axes.each { axis, values ->
|
||||
List axisList = []
|
||||
values.each { value ->
|
||||
axisList << [(axis): value]
|
||||
}
|
||||
axes << axisList
|
||||
}
|
||||
axes.combinations()*.sum()
|
||||
}
|
||||
|
||||
List axes = getMatrixAxes(matrix_axes)
|
||||
|
||||
Map tasks = [failFast: false]
|
||||
|
||||
for(int i = 0; i < axes.size(); i++) {
|
||||
Map axis = axes[i]
|
||||
List axisEnv = axis.collect { k, v ->
|
||||
"${k}=${v}"
|
||||
}
|
||||
tasks[axisEnv.join(', ')] = { ->
|
||||
env.NODEJS_HOME = "${tool 'node'}"
|
||||
env.PATH="${env.NODEJS_HOME}/bin:${env.PATH}"
|
||||
|
||||
node {
|
||||
skipDefaultCheckout()
|
||||
withEnv(axisEnv) {
|
||||
try {
|
||||
cleanWs()
|
||||
checkout scm
|
||||
sh 'npm install'
|
||||
sh "npm run grab"
|
||||
} finally {
|
||||
archiveArtifacts artifacts: "guides/**/*.xml", onlyIfSuccessful: true
|
||||
cleanWs(
|
||||
cleanWhenNotBuilt: false,
|
||||
deleteDirs: true,
|
||||
disableDeferredWipeout: true,
|
||||
notFailBuild: true,
|
||||
patterns: [[pattern: '.gitignore', type: 'INCLUDE'],
|
||||
[pattern: '.propsfile', type: 'EXCLUDE']])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
node {
|
||||
stage('Load') {
|
||||
parallel(tasks)
|
||||
}
|
||||
}
|
29
README.md
29
README.md
|
@ -42,19 +42,34 @@ npm install
|
|||
Now choose one of the sources (their complete list can be found in the [/sites](https://github.com/iptv-org/epg/tree/master/sites) folder) and start downloading the guide using the command:
|
||||
|
||||
```sh
|
||||
# Windows
|
||||
set SITE=example.com&& npm run grab
|
||||
npm run grab -- --site=example.com
|
||||
```
|
||||
|
||||
# Linux/macOS
|
||||
SITE=example.com npm run grab
|
||||
To download a guide in a specific language pass its [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) code to the `--lang` argument:
|
||||
|
||||
```sh
|
||||
npm run grab -- --site=example.com --lang=fr
|
||||
```
|
||||
|
||||
To also create a compressed version of the guide, add the `--gzip` flag:
|
||||
|
||||
```sh
|
||||
npm run grab -- --site=example.com --gzip
|
||||
```
|
||||
|
||||
After the download is completed in the current directory will appear a new folder `guides`, which will store all XML files:
|
||||
|
||||
```sh
|
||||
guides
|
||||
└── en
|
||||
└── fr
|
||||
└── example.com.xml
|
||||
└── example.com.xml.gz
|
||||
```
|
||||
|
||||
If you want to download the guide automatically on a schedule, you need to pass a valid [cron expression](https://crontab.guru/) to the script using the `--cron` attribute:
|
||||
|
||||
```sh
|
||||
npm run grab -- --site=example.com --cron="0 0 * * *"
|
||||
```
|
||||
|
||||
Also you can make these guides available via URL by running your own server:
|
||||
|
@ -66,13 +81,13 @@ npm run serve
|
|||
After that all the downloaded guides will be available at a link like this:
|
||||
|
||||
```
|
||||
http://localhost:3000/guides/en/example.com.xml
|
||||
http://localhost:3000/guides/fr/example.com.xml
|
||||
```
|
||||
|
||||
In addition, they will be available on your local network at:
|
||||
|
||||
```
|
||||
http://<your_local_ip_address>:3000/guides/en/example.com.xml
|
||||
http://<your_local_ip_address>:3000/guides/fr/example.com.xml
|
||||
```
|
||||
|
||||
## Playlists
|
||||
|
|
1480
package-lock.json
generated
1480
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -11,7 +11,7 @@
|
|||
"test:commands": "npx jest --runInBand -- commands",
|
||||
"test:sites": "TZ=Pacific/Nauru npx jest --runInBand -- sites",
|
||||
"check": "npm run api:load && npm run channels:lint sites/**/*.js && npm run channels:validate sites/**/*.xml",
|
||||
"grab": "cross-var epg-grabber --config=sites/$SITE/$SITE.config.js --channels=sites/$SITE/$SITE.channels.xml --output=guides/{lang}/{site}.xml",
|
||||
"grab": "node scripts/commands/epg/grab.js",
|
||||
"serve": "npx serve"
|
||||
},
|
||||
"private": true,
|
||||
|
@ -33,10 +33,12 @@
|
|||
"chalk": "^4.1.2",
|
||||
"cheerio": "^1.0.0-rc.10",
|
||||
"commander": "^8.2.0",
|
||||
"consola": "^3.2.3",
|
||||
"cron": "^2.3.1",
|
||||
"cross-var": "^1.1.0",
|
||||
"csv-parser": "^3.0.0",
|
||||
"dayjs": "^1.11.7",
|
||||
"epg-grabber": "^0.31.0",
|
||||
"epg-grabber": "^0.32.0",
|
||||
"epg-parser": "^0.2.0",
|
||||
"form-data": "^4.0.0",
|
||||
"fs-extra": "^10.0.1",
|
||||
|
|
|
@ -57,10 +57,10 @@ async function main() {
|
|||
}
|
||||
|
||||
if (localErrors.length) {
|
||||
logger.info(`\n${chalk.underline(filepath)}`)
|
||||
console.log(`\n${chalk.underline(filepath)}`)
|
||||
localErrors.forEach(error => {
|
||||
const position = `${error.line}:${error.column}`
|
||||
logger.error(` ${chalk.gray(position.padEnd(4, ' '))} ${error.message.trim()}`)
|
||||
console.log(` ${chalk.gray(position.padEnd(4, ' '))} ${error.message.trim()}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(localErrors)
|
||||
|
@ -68,7 +68,7 @@ async function main() {
|
|||
}
|
||||
|
||||
if (errors.length) {
|
||||
logger.error(chalk.red(`\n${errors.length} error(s)`))
|
||||
console.log(chalk.red(`\n${errors.length} error(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ async function main() {
|
|||
}
|
||||
|
||||
if (errors.length) {
|
||||
logger.info(chalk.underline(filepath))
|
||||
console.log(chalk.underline(filepath))
|
||||
console.table(errors, ['type', 'lang', 'xmltv_id', 'site_id', 'name'])
|
||||
console.log()
|
||||
stats.files++
|
||||
|
@ -60,7 +60,7 @@ async function main() {
|
|||
}
|
||||
|
||||
if (stats.errors > 0) {
|
||||
logger.error(chalk.red(`${stats.errors} error(s) in ${stats.files} file(s)`))
|
||||
console.log(chalk.red(`${stats.errors} error(s) in ${stats.files} file(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
|
218
scripts/commands/epg/grab.js
Normal file
218
scripts/commands/epg/grab.js
Normal file
|
@ -0,0 +1,218 @@
|
|||
const { program } = require('commander')
|
||||
const _ = require('lodash')
|
||||
const { EPGGrabber, generateXMLTV, Channel, Program } = require('epg-grabber')
|
||||
const { db, logger, date, timer, file, parser, api, zip } = require('../../core')
|
||||
const path = require('path')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const CronJob = require('cron').CronJob
|
||||
|
||||
dayjs.extend(utc)
|
||||
|
||||
const BASE_DIR = process.env.BASE_DIR || '.'
|
||||
const CURR_DATE = process.env.CURR_DATE || new Date()
|
||||
|
||||
program
|
||||
.requiredOption('-s, --site <name>', 'Name of the site to parse')
|
||||
.option('-l, --lang <code>', 'Filter channels by language (ISO 639-2 code)')
|
||||
.option('-o, --output <path>', 'Path to output file')
|
||||
.option('--cron <expression>', 'Schedule a script run')
|
||||
.option('--gzip', 'Create a compressed version of the guide as well', false)
|
||||
.parse(process.argv)
|
||||
|
||||
const options = program.opts()
|
||||
|
||||
options.output = options.output || file.resolve(`${BASE_DIR}/guides/{lang}/{site}.xml`)
|
||||
options.config = file.resolve(`${BASE_DIR}/sites/${options.site}/${options.site}.config.js`)
|
||||
options.channels = file.resolve(`${BASE_DIR}/sites/${options.site}/${options.site}*.channels.xml`)
|
||||
|
||||
let channels = []
|
||||
let programs = []
|
||||
let runIndex = 0
|
||||
|
||||
async function main() {
|
||||
logger.start('staring...')
|
||||
|
||||
logger.info('settings:')
|
||||
for (let prop in options) {
|
||||
logger.info(` ${prop}: ${options[prop]}`)
|
||||
}
|
||||
|
||||
const config = await loadConfig(options.config)
|
||||
const queue = await createQueue(options.channels, config)
|
||||
const outputPath = options.output
|
||||
|
||||
if (options.cron) {
|
||||
const job = new CronJob(options.cron, function () {
|
||||
runJob(config, queue, outputPath)
|
||||
})
|
||||
job.start()
|
||||
} else {
|
||||
await runJob(config, queue, outputPath)
|
||||
}
|
||||
}
|
||||
|
||||
async function loadConfig(configPath) {
|
||||
let config = require(file.resolve(configPath))
|
||||
config = _.merge(config, {})
|
||||
config.days = config.days || 1
|
||||
|
||||
logger.info('config:')
|
||||
logConfig(config)
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
function logConfig(config, level = 1) {
|
||||
let padLeft = ' '.repeat(level)
|
||||
for (let prop in config) {
|
||||
if (typeof config[prop] === 'string' || typeof config[prop] === 'number') {
|
||||
logger.info(`${padLeft}${prop}: ${config[prop]}`)
|
||||
} else if (typeof config[prop] === 'object') {
|
||||
level++
|
||||
logger.info(`${padLeft}${prop}:`)
|
||||
logConfig(config[prop], level)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function runJob(config, queue, outputPath) {
|
||||
runIndex++
|
||||
logger.info(`run #${runIndex}:`)
|
||||
|
||||
timer.start()
|
||||
|
||||
await grab(queue, config)
|
||||
|
||||
await save(outputPath, channels, programs)
|
||||
|
||||
logger.success(` done in ${timer.format('HH[h] mm[m] ss[s]')}`)
|
||||
}
|
||||
|
||||
async function grab(queue, config) {
|
||||
const grabber = new EPGGrabber(config)
|
||||
const total = queue.length
|
||||
|
||||
let i = 1
|
||||
for (const item of queue) {
|
||||
let channel = item.channel
|
||||
let date = item.date
|
||||
channels.push(item.channel)
|
||||
await grabber
|
||||
.grab(channel, date, (data, err) => {
|
||||
logger.info(
|
||||
` [${i}/${total}] ${channel.site} (${channel.lang}) - ${channel.xmltv_id} - ${dayjs
|
||||
.utc(data.date)
|
||||
.format('MMM D, YYYY')} (${data.programs.length} programs)`
|
||||
)
|
||||
if (i < total) i++
|
||||
|
||||
if (err) {
|
||||
logger.info(` ERR: ${err.message}`)
|
||||
}
|
||||
})
|
||||
.then(results => {
|
||||
programs = programs.concat(results)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function createQueue(channelsPath, config) {
|
||||
logger.info('creating queue...')
|
||||
let queue = {}
|
||||
await api.channels.load().catch(logger.error)
|
||||
const files = await file.list(channelsPath).catch(logger.error)
|
||||
const utcDate = date.getUTC(CURR_DATE)
|
||||
for (const filepath of files) {
|
||||
logger.info(` loading "${filepath}"...`)
|
||||
try {
|
||||
const dir = file.dirname(filepath)
|
||||
const { channels } = await parser.parseChannels(filepath)
|
||||
const filename = file.basename(filepath)
|
||||
const dates = Array.from({ length: config.days }, (_, i) => utcDate.add(i, 'd'))
|
||||
for (const channel of channels) {
|
||||
if (!channel.site || !channel.xmltv_id) continue
|
||||
if (options.lang && channel.lang !== options.lang) continue
|
||||
const found = api.channels.find({ id: channel.xmltv_id })
|
||||
if (found) {
|
||||
channel.logo = found.logo
|
||||
}
|
||||
for (const d of dates) {
|
||||
const dateString = d.toJSON()
|
||||
const key = `${channel.site}:${channel.lang}:${channel.xmltv_id}:${dateString}`
|
||||
if (!queue[key]) {
|
||||
queue[key] = {
|
||||
channel,
|
||||
date: dateString,
|
||||
config,
|
||||
error: null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
queue = Object.values(queue)
|
||||
|
||||
logger.info(` added ${queue.length} items`)
|
||||
|
||||
return queue
|
||||
}
|
||||
|
||||
async function save(template, parsedChannels, programs = []) {
|
||||
const variables = file.templateVariables(template)
|
||||
|
||||
const groups = _.groupBy(parsedChannels, channel => {
|
||||
let groupId = ''
|
||||
for (let key in channel) {
|
||||
if (variables.includes(key)) {
|
||||
groupId += channel[key]
|
||||
}
|
||||
}
|
||||
|
||||
return groupId
|
||||
})
|
||||
|
||||
for (let groupId in groups) {
|
||||
const channels = groups[groupId]
|
||||
|
||||
let output = {
|
||||
channels,
|
||||
programs: [],
|
||||
date: CURR_DATE
|
||||
}
|
||||
|
||||
for (let program of programs) {
|
||||
let programLang = program.titles[0].lang
|
||||
let channel = channels.find(c => c.xmltv_id === program.channel && c.lang === programLang)
|
||||
if (!channel) continue
|
||||
|
||||
output.programs.push(new Program(program, channel))
|
||||
}
|
||||
|
||||
output.channels = _.sortBy(output.channels, 'id')
|
||||
output.channels = _.uniqBy(output.channels, 'id')
|
||||
|
||||
output.programs = _.sortBy(output.programs, ['channel', 'start'])
|
||||
output.programs = _.uniqBy(output.programs, p => p.channel + p.start)
|
||||
|
||||
const outputPath = file.templateFormat(template, output.channels[0])
|
||||
const xmlFilepath = outputPath
|
||||
const xmltv = generateXMLTV(output)
|
||||
logger.info(` saving to "${xmlFilepath}"...`)
|
||||
await file.create(xmlFilepath, xmltv)
|
||||
|
||||
if (options.gzip) {
|
||||
const gzFilepath = `${outputPath}.gz`
|
||||
const compressed = await zip.compress(xmltv)
|
||||
logger.info(` saving to "${gzFilepath}"...`)
|
||||
await file.create(gzFilepath, compressed)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
|
@ -4,6 +4,23 @@ const fs = require('fs-extra')
|
|||
|
||||
const file = {}
|
||||
|
||||
file.templateVariables = function (template) {
|
||||
const match = template.match(/{[^}]+}/g)
|
||||
|
||||
return Array.isArray(match) ? match.map(s => s.substring(1, s.length - 1)) : []
|
||||
}
|
||||
|
||||
file.templateFormat = function (template, obj) {
|
||||
let output = template
|
||||
for (let key in obj) {
|
||||
const regex = new RegExp(`{${key}}`, 'g')
|
||||
const value = obj[key] || undefined
|
||||
output = output.replace(regex, value)
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
file.list = function (pattern) {
|
||||
return new Promise(resolve => {
|
||||
glob(pattern, function (err, files) {
|
||||
|
|
|
@ -1,19 +1,3 @@
|
|||
const { Signale } = require('signale')
|
||||
const { consola } = require('consola')
|
||||
|
||||
const options = {}
|
||||
|
||||
const logger = new Signale(options)
|
||||
|
||||
logger.config({
|
||||
displayLabel: false,
|
||||
displayScope: false,
|
||||
displayBadge: false
|
||||
})
|
||||
|
||||
logger.memoryUsage = function () {
|
||||
const used = process.memoryUsage().heapUsed / 1024 / 1024
|
||||
|
||||
logger.info(`memory: ${Math.round(used * 100) / 100} MB`)
|
||||
}
|
||||
|
||||
module.exports = logger
|
||||
module.exports = consola
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?><tv date="20221020">
|
||||
<channel id="6eren.dk"><display-name>6'eren</display-name><icon src="https://upload.wikimedia.org/wikipedia/commons/6/64/6%27eren_2015.png"/><url>https://allente.se</url></channel>
|
||||
<programme start="20221024040000 +0000" stop="20221024042500 +0000" channel="6eren.dk"><title lang="da">Diners, Drive-Ins and Dives</title><desc lang="da">Underholdning</desc><category lang="da">series</category><icon src="https://viasatps.api.comspace.se/PS/channeldate/image/viasat.ps/487/2022-10-24/se.cs.6eren.event.B_0254194276971024040000.jpg?size=2560x1440"/><episode-num system="xmltv_ns">23.5.0/1</episode-num><episode-num system="onscreen">S24E06</episode-num></programme>
|
||||
</tv>
|
Binary file not shown.
|
@ -1,3 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?><tv date="20221020">
|
||||
<channel id="BravoEast.us"><display-name>Bravo East</display-name><icon src="https://www.directv.com/images/logos/channels/dark/large/579.png"/><url>https://directv.com</url></channel>
|
||||
</tv>
|
Binary file not shown.
4
tests/__data__/expected/guides/en/example.com.xml
Normal file
4
tests/__data__/expected/guides/en/example.com.xml
Normal file
|
@ -0,0 +1,4 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?><tv date="20221020">
|
||||
<channel id="Channel1.us"><display-name>Channel 1</display-name><url>https://example.com</url></channel>
|
||||
<programme start="20220306043000 +0000" stop="20220306071000 +0000" channel="Channel1.us"><title lang="en">Program1</title></programme>
|
||||
</tv>
|
|
@ -1,4 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?><tv date="20221020">
|
||||
<channel id="BBCNews.uk"><display-name>BBC News</display-name><icon src="https://i.imgur.com/rPzH88J.png"/><url>https://sky.com</url></channel>
|
||||
<programme start="20221027120000 +0000" stop="20221027123000 +0000" channel="BBCNews.uk"><title lang="en">BBC News at One</title><desc lang="en">The latest national and international news from the BBC. [S,SL]</desc><icon src="http://epgstatic.sky.com/epgdata/1.0/paimage/46/1/lisa/5.2.2/linear/channel/ca247bc8-6be0-48f9-88d1-865f87f7680e/2011"/></programme>
|
||||
</tv>
|
Binary file not shown.
|
@ -1,4 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?><tv date="20221020">
|
||||
<channel id="BBCNews.uk"><display-name>BBC News</display-name><icon src="https://i.imgur.com/rPzH88J.png"/><url>https://virginmedia.com</url></channel>
|
||||
<programme start="20221027120000 +0000" stop="20221027123000 +0000" channel="BBCNews.uk"><title lang="en">BBC News at One</title><desc lang="en">The latest national and international news, followed by weather.</desc><category lang="en">News</category><episode-num system="xmltv_ns">96839999.145799123.0/1</episode-num><episode-num system="onscreen">S96840000E145799124</episode-num></programme>
|
||||
</tv>
|
Binary file not shown.
4
tests/__data__/expected/guides/fr/example.com.xml
Normal file
4
tests/__data__/expected/guides/fr/example.com.xml
Normal file
|
@ -0,0 +1,4 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?><tv date="20221020">
|
||||
<channel id="Channel1.us"><display-name>Channel 1</display-name><url>https://example.com</url></channel>
|
||||
<programme start="20220306043000 +0000" stop="20220306071000 +0000" channel="Channel1.us"><title lang="fr">Program1</title></programme>
|
||||
</tv>
|
|
@ -1,6 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?><tv date="20221020">
|
||||
<channel id="BBCNews.uk"><display-name>BBC News</display-name><icon src="https://i.imgur.com/rPzH88J.png"/><url>https://sky.com</url></channel>
|
||||
<channel id="CNN.us"><display-name>CNN</display-name><icon src="https://www.directv.com/images/logos/channels/dark/large/579.png"/><url>https://sky.com</url></channel>
|
||||
<programme start="20221027120000 +0000" stop="20221027123000 +0000" channel="BBCNews.uk"><title lang="fr">BBC News at One</title><desc lang="fr">Les dernières nouvelles nationales et internationales de la BBC. [S,SL]</desc><icon src="http://epgstatic.sky.com/epgdata/1.0/paimage/46/1/lisa/5.2.2/linear/channel/ca247bc8-6be0-48f9-88d1-865f87f7680e/2011"/></programme>
|
||||
<programme start="20221027120000 +0000" stop="20221027123000 +0000" channel="CNN.us"><title lang="fr">French title</title></programme>
|
||||
</tv>
|
Binary file not shown.
|
@ -0,0 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<site site="example.com">
|
||||
<channels>
|
||||
<channel lang="en" xmltv_id="Channel1.us" site_id="140">Channel 1</channel>
|
||||
<channel lang="fr" xmltv_id="Channel1.us" site_id="140">Channel 1</channel>
|
||||
</channels>
|
||||
</site>
|
16
tests/__data__/input/sites/epg-grab/epg-grab.config.js
Normal file
16
tests/__data__/input/sites/epg-grab/epg-grab.config.js
Normal file
|
@ -0,0 +1,16 @@
|
|||
module.exports = {
|
||||
site: 'example.com',
|
||||
days: 2,
|
||||
url() {
|
||||
return `https://example.com`
|
||||
},
|
||||
parser() {
|
||||
return [
|
||||
{
|
||||
title: 'Program1',
|
||||
start: '2022-03-06T04:30:00.000Z',
|
||||
stop: '2022-03-06T07:10:00.000Z'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
39
tests/commands/epg/grab.test.js
Normal file
39
tests/commands/epg/grab.test.js
Normal file
|
@ -0,0 +1,39 @@
|
|||
const { execSync } = require('child_process')
|
||||
const fs = require('fs-extra')
|
||||
const path = require('path')
|
||||
|
||||
beforeEach(() => {
|
||||
fs.emptyDirSync('tests/__data__/output')
|
||||
})
|
||||
|
||||
it('can grab epg', () => {
|
||||
const stdout = execSync(
|
||||
'BASE_DIR=tests/__data__/input CURR_DATE=2022-10-20 DATA_DIR=tests/__data__/input/tmp/data npm run grab -- --site=epg-grab --output=tests/__data__/output/{lang}/{site}.xml',
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
expect(content('tests/__data__/output/en/example.com.xml')).toEqual(
|
||||
content('tests/__data__/expected/guides/en/example.com.xml')
|
||||
)
|
||||
|
||||
expect(content('tests/__data__/output/fr/example.com.xml')).toEqual(
|
||||
content('tests/__data__/expected/guides/fr/example.com.xml')
|
||||
)
|
||||
})
|
||||
|
||||
it('can grab epg with language filter enabled', () => {
|
||||
const stdout = execSync(
|
||||
'BASE_DIR=tests/__data__/input CURR_DATE=2022-10-20 DATA_DIR=tests/__data__/input/tmp/data npm run grab -- --site=epg-grab --lang=fr --output=tests/__data__/output/fr/guide.xml',
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
expect(content('tests/__data__/output/fr/guide.xml')).toEqual(
|
||||
content('tests/__data__/expected/guides/fr/example.com.xml')
|
||||
)
|
||||
})
|
||||
|
||||
function content(filepath) {
|
||||
return fs.readFileSync(path.resolve(filepath), {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue