Use simultaneous fetch helper.

- Introduce fetch helper for site to fetch guide simultaneously.
- Clean-up unused requires.
- Updated tv.yandex.ru cookies.

Signed-off-by: Toha <tohenk@yahoo.com>
This commit is contained in:
Toha 2024-12-17 10:24:58 +07:00
parent f00d53cb7b
commit 916b5f5234
12 changed files with 273 additions and 429 deletions

109
scripts/core/fetch.js Normal file
View file

@ -0,0 +1,109 @@
const axios = require('axios')
/**
* A callback when fetch queue is completely done.
*
* To check for successful operation simply check if res is not undefined.
*
* @callback completeCallback
* @param {string|object} queue Fetched queue which is complete
* @param {string|object} res Response content returned by axios
* @param {object} headers Response headers returned by axios
*/
/**
* @type {number}
*/
let nworker = 25
/**
* @type {boolean}
*/
let checkResult = true
/**
* @type {any}
*/
let debug
/**
* Queued url fetch.
*
* @param {array<string>} queues The queues
* @param {completeCallback} cb Queue completion callback
*/
async function doFetch(queues, cb) {
let n = Math.min(nworker, queues.length)
const workers = []
const adjustWorker = () => {
if (queues.length > workers.length && workers.length < nworker) {
let nw = Math.min(nworker, queues.length)
if (n < nw) {
n = nw
createWorker()
}
}
}
const createWorker = () => {
while (workers.length < n) {
startWorker()
}
}
const startWorker = () => {
const worker = () => {
if (queues.length) {
const queue = queues.shift()
const done = (res, headers) => {
if ((checkResult && res) || !checkResult) {
cb(queue, res, headers)
adjustWorker()
}
worker()
}
const url = typeof queue === 'string' ? queue : queue.u
const params = typeof queue === 'object' && queue.params ? queue.params : {}
const method = typeof queue === 'object' && queue.m ? queue.m : 'get'
if (typeof debug === 'function') {
debug(`fetch %s with %s`, url, JSON.stringify(params))
}
axios[method](url, params)
.then(response => {
done(response.data, response.headers)
})
.catch(err => {
console.error(`Unable to fetch ${url}: ${err.message}!`)
done()
})
} else {
workers.splice(workers.indexOf(worker), 1)
}
}
workers.push(worker)
worker()
}
createWorker()
await new Promise(resolve => {
const interval = setInterval(() => {
if (workers.length === 0) {
clearInterval(interval)
resolve()
}
}, 500)
})
}
module.exports = doFetch
Object.assign(module.exports, {
setMaxWorker(n) {
nworker = n
return module.exports
},
setCheckResult(enabled) {
checkResult = enabled
return module.exports
},
setDebugger(dbg) {
debug = dbg
return module.exports
}
})

View file

@ -43,7 +43,6 @@ module.exports = {
},
async channels() {
const axios = require('axios')
const cheerio = require('cheerio')
const result = await axios
.get(
`https://api.firstmedia.com/api/content/tv-guide/list?date=${dayjs().format(

View file

@ -1,19 +1,23 @@
const _ = require('lodash')
const axios = require('axios')
const cheerio = require('cheerio')
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
const timezone = require('dayjs/plugin/timezone')
const customParseFormat = require('dayjs/plugin/customParseFormat')
const doFetch = require('../../scripts/core/fetch')
const debug = require('debug')('site:mncvision.id')
dayjs.extend(utc)
dayjs.extend(timezone)
dayjs.extend(customParseFormat)
doFetch
.setCheckResult(false)
.setDebugger(debug)
const languages = { en: 'english', id: 'indonesia' }
const cookies = {}
const timeout = 30000
const nworker = 25
module.exports = {
site: 'mncvision.id',
@ -55,8 +59,6 @@ module.exports = {
return await parseItems(content, date, cookies[channel.lang])
},
async channels({ lang = 'id' }) {
const axios = require('axios')
const cheerio = require('cheerio')
const result = await axios
.get('https://www.mncvision.id/schedule')
.then(response => response.data)
@ -117,13 +119,20 @@ async function parseItems(content, date, cookies) {
const $ = cheerio.load(content)
const items = $('tr[valign="top"]').toArray()
if (items.length) {
const workers = []
const n = Math.min(nworker, items.length)
while (workers.length < n) {
const worker = () => {
if (items.length) {
const $item = $(items.shift())
const done = (description = null) => {
const queues = []
for (const item of items) {
const $item = $(item)
const url = $item.find('a').attr('href')
const headers = {
'X-Requested-With': 'XMLHttpRequest',
Cookie: cookies,
}
queues.push({ i: $item, u: url, params: { headers, timeout } })
}
await doFetch(queues, (queue, res) => {
const $item = queue.i
const $page = cheerio.load(res)
const description = $page('.synopsis').text().trim()
const start = parseStart($item, date)
const duration = parseDuration($item)
const stop = start.add(duration, 'm')
@ -131,28 +140,10 @@ async function parseItems(content, date, cookies) {
title: parseTitle($item),
season: parseSeason($item),
episode: parseEpisode($item),
description,
description: description && description !== '-' ? description : null,
start,
stop
})
worker()
}
loadDescription($item, cookies)
.then(description => done(description))
} else {
workers.splice(workers.indexOf(worker), 1)
}
}
workers.push(worker)
worker()
}
await new Promise(resolve => {
const interval = setInterval(() => {
if (workers.length === 0) {
clearInterval(interval)
resolve()
}
}, 500)
})
}
@ -168,24 +159,6 @@ function loadLangCookies(channel) {
.catch(error => console.error(error.message))
}
async function loadDescription($item, cookies) {
const url = $item.find('a').attr('href')
if (!url) return null
const content = await axios
.get(url, {
headers: { 'X-Requested-With': 'XMLHttpRequest', Cookie: cookies },
timeout
})
.then(r => r.data)
.catch(error => console.error(error.message))
if (!content) return null
const $page = cheerio.load(content)
const description = $page('.synopsis').text().trim()
return description !== '-' ? description : null
}
function parseCookies(headers) {
const cookies = []
if (Array.isArray(headers['set-cookie'])) {

View file

@ -3,15 +3,17 @@ const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
const timezone = require('dayjs/plugin/timezone')
const customParseFormat = require('dayjs/plugin/customParseFormat')
const doFetch = require('../../scripts/core/fetch')
const debug = require('debug')('site:mytelly.co.uk')
dayjs.extend(utc)
dayjs.extend(timezone)
dayjs.extend(customParseFormat)
doFetch.setDebugger(debug)
const detailedGuide = true
const tz = 'Europe/London'
const nworker = 25
module.exports = {
site: 'mytelly.co.uk',
@ -108,7 +110,6 @@ module.exports = {
},
async channels() {
const channels = {}
const axios = require('axios')
const queues = [{ t: 'p', m: 'post', u: 'https://www.mytelly.co.uk/getform' }]
await doFetch(queues, (queue, res) => {
// process form -> provider
@ -191,67 +192,3 @@ function parseText($item) {
return text
}
async function doFetch(queues, cb) {
const axios = require('axios')
let n = Math.min(nworker, queues.length)
const workers = []
const adjustWorker = () => {
if (queues.length > workers.length && workers.length < nworker) {
let nw = Math.min(nworker, queues.length)
if (n < nw) {
n = nw
createWorker()
}
}
}
const createWorker = () => {
while (workers.length < n) {
startWorker()
}
}
const startWorker = () => {
const worker = () => {
if (queues.length) {
const queue = queues.shift()
const done = res => {
if (res) {
cb(queue, res)
adjustWorker()
}
worker()
}
const url = typeof queue === 'string' ? queue : queue.u
const params = typeof queue === 'object' && queue.params ? queue.params : {}
const method = typeof queue === 'object' && queue.m ? queue.m : 'get'
debug(`fetch %s with %s`, url, JSON.stringify(params))
if (method === 'post') {
axios
.post(url, params)
.then(response => done(response.data))
.catch(console.error)
} else {
axios
.get(url, params)
.then(response => done(response.data))
.catch(console.error)
}
} else {
workers.splice(workers.indexOf(worker), 1)
}
}
workers.push(worker)
worker()
}
createWorker()
await new Promise(resolve => {
const interval = setInterval(() => {
if (workers.length === 0) {
clearInterval(interval)
resolve()
}
}, 500)
})
}

View file

@ -4,16 +4,19 @@ const dayjs = require('dayjs')
const timezone = require('dayjs/plugin/timezone')
const utc = require('dayjs/plugin/utc')
const customParseFormat = require('dayjs/plugin/customParseFormat')
const doFetch = require('../../scripts/core/fetch')
const debug = require('debug')('site:rotana.net')
dayjs.extend(timezone)
dayjs.extend(utc)
dayjs.extend(customParseFormat)
const tz = 'Asia/Riyadh'
const nworker = 25
doFetch
.setCheckResult(false)
.setDebugger(debug)
const headers = {
const tz = 'Asia/Riyadh'
const defaultHeaders = {
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36 OPR/104.0.0.0'
}
@ -26,7 +29,7 @@ module.exports = {
return `https://rotana.net/${channel.lang}/streams?channel=${channel.site_id}&tz=`
},
request: {
headers,
headers: defaultHeaders,
timeout: 15000
},
async parser({ content, headers, channel, date }) {
@ -37,31 +40,20 @@ module.exports = {
const items = parseItems(content, date)
if (items.length) {
const workers = []
const n = Math.min(nworker, items.length)
while (workers.length < n) {
const worker = () => {
if (items.length) {
const item = items.shift()
parseProgram(item, channel)
.then(() => {
programs.push(item)
worker()
})
} else {
workers.splice(workers.indexOf(worker), 1)
const queues = []
for (const item of items) {
const url = `https://rotana.net/${channel.lang}/streams?channel=${channel.site_id}&itemId=${item.program}`
const params = {
headers: {
...defaultHeaders,
'X-Requested-With': 'XMLHttpRequest',
cookie: cookies[channel.lang],
}
}
workers.push(worker)
worker()
queues.push({ i: item, u: url, params })
}
await new Promise(resolve => {
const interval = setInterval(() => {
if (workers.length === 0) {
clearInterval(interval)
resolve()
}
}, 500)
await doFetch(queues, (queue, res) => {
programs.push(parseProgram(queue.i, res))
})
}
@ -83,19 +75,7 @@ module.exports = {
}
}
async function parseProgram(item, channel) {
if (item.program) {
const url = `https://rotana.net/${channel.lang}/streams?channel=${channel.site_id}&itemId=${item.program}`
const params = {
headers: Object.assign({}, headers, { 'X-Requested-With': 'XMLHttpRequest' }),
Cookie: cookies[channel.lang]
}
debug(`fetching description ${url}`)
const result = await axios
.get(url, params)
.then(response => response.data)
.catch(console.error)
function parseProgram(item, result) {
const $ = cheerio.load(result)
const details = $('.trending-info .row div > span')
if (details.length) {
@ -144,7 +124,7 @@ async function parseProgram(item, channel) {
item.image = img.attr('src')
}
delete item.program
}
return item
}
function parseItems(content, date) {

View file

@ -52,12 +52,11 @@ it('can generate valid arabic url', () => {
})
it('can parse english response', async () => {
let result = await parser({
const result = (await parser({
channel,
date,
content: fs.readFileSync(path.join(__dirname, '/__data__/content_en.html'))
})
result = result.map(a => {
})).map(a => {
a.start = a.start.toJSON()
a.stop = a.stop.toJSON()
return a
@ -76,12 +75,11 @@ it('can parse english response', async () => {
})
it('can parse arabic response', async () => {
let result = await parser({
const result = (await parser({
channel: channelAr,
date,
content: fs.readFileSync(path.join(__dirname, '/__data__/content_ar.html'))
})
result = result.map(a => {
})).map(a => {
a.start = a.start.toJSON()
a.stop = a.stop.toJSON()
return a

View file

@ -1,11 +1,12 @@
const cheerio = require('cheerio')
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
const doFetch = require('../../scripts/core/fetch')
const debug = require('debug')('site:sky.com')
dayjs.extend(utc)
const nworker = 10
doFetch.setDebugger(debug)
module.exports = {
site: 'sky.com',
@ -78,64 +79,3 @@ module.exports = {
return Object.values(channels)
}
}
async function doFetch(queues, cb) {
const axios = require('axios')
let n = Math.min(nworker, queues.length)
const workers = []
const adjustWorker = () => {
if (queues.length > workers.length && workers.length < nworker) {
let nw = Math.min(nworker, queues.length)
if (n < nw) {
n = nw
createWorker()
}
}
}
const createWorker = () => {
while (workers.length < n) {
startWorker()
}
}
const startWorker = () => {
const worker = () => {
if (queues.length) {
const queue = queues.shift()
const done = (res, headers) => {
if (res) {
cb(queue, res, headers)
adjustWorker()
}
worker()
}
const url = typeof queue === 'string' ? queue : queue.u
const params = typeof queue === 'object' && queue.params ? queue.params : {}
const method = typeof queue === 'object' && queue.m ? queue.m : 'get'
if (typeof debug === 'function') {
debug(`fetch %s with %s`, url, JSON.stringify(params))
}
axios[method](url, params)
.then(response => {
done(response.data, response.headers)
})
.catch(err => {
console.error(`Unable to fetch ${url}: ${err.message}!`)
done()
})
} else {
workers.splice(workers.indexOf(worker), 1)
}
}
workers.push(worker)
worker()
}
createWorker()
await new Promise(resolve => {
const interval = setInterval(() => {
if (workers.length === 0) {
clearInterval(interval)
resolve()
}
}, 500)
})
}

View file

@ -1,14 +1,16 @@
const axios = require('axios')
const cheerio = require('cheerio')
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
const customParseFormat = require('dayjs/plugin/customParseFormat')
const doFetch = require('../../scripts/core/fetch')
const debug = require('debug')('site:startimestv.com')
dayjs.extend(utc)
dayjs.extend(customParseFormat)
const nworker = 5
doFetch
.setDebugger(debug)
.setMaxWorker(5)
module.exports = {
site: 'startimestv.com',
@ -110,66 +112,3 @@ function parseText($item) {
return text
}
async function doFetch(queues, cb) {
const axios = require('axios')
let n = Math.min(nworker, queues.length)
const workers = []
const adjustWorker = () => {
if (queues.length > workers.length && workers.length < nworker) {
let nw = Math.min(nworker, queues.length)
if (n < nw) {
n = nw
createWorker()
}
}
}
const createWorker = () => {
while (workers.length < n) {
startWorker()
}
}
const startWorker = () => {
const worker = () => {
if (queues.length) {
const queue = queues.shift()
const done = res => {
if (res) {
cb(queue, res)
adjustWorker()
}
worker()
}
const url = typeof queue === 'string' ? queue : queue.u
const params = typeof queue === 'object' && queue.params ? queue.params : {}
const method = typeof queue === 'object' && queue.m ? queue.m : 'get'
debug(`fetch %s with %s`, url, JSON.stringify(params))
if (method === 'post') {
axios
.post(url, params)
.then(response => done(response.data))
.catch(console.error)
} else {
axios
.get(url, params)
.then(response => done(response.data))
.catch(console.error)
}
} else {
workers.splice(workers.indexOf(worker), 1)
}
}
workers.push(worker)
worker()
}
createWorker()
await new Promise(resolve => {
const interval = setInterval(() => {
if (workers.length === 0) {
clearInterval(interval)
resolve()
}
}, 500)
})
}

View file

@ -1,18 +1,22 @@
const dayjs = require('dayjs')
const doFetch = require('../../scripts/core/fetch')
const debug = require('debug')('site:tv.yandex.ru')
doFetch
.setDebugger(debug)
.setMaxWorker(10)
// enable to fetch guide description but its take a longer time
const detailedGuide = true
const nworker = 10
// update this data by heading to https://tv.yandex.ru and change the values accordingly
const cookies = {
i: 'dkim62pClrWWC4CShVQYMpVw1ELNVw4XJdL/lzT4E2r05IgcST1GtCA4ho/UyGgW2AO4qftDfZzGX2OHqCzwY7GUkpM=',
spravka: 'dD0xNzMyNjgzMTEwO2k9MTgwLjI0OC41OS40MDtEPTkyOUM2MkQ0Mzc3OUNBMUFCNzg3NTIyMEQ4OEJBMEVBMzQ2RUNGNUU5Q0FEQUM5RUVDMTFCNjc1ODA2MThEQTQ3RTY3RTUyRUNBRDdBMTY2OTY1MjMzRDU1QjNGMTc1MDA0NDM3MjBGMUNGQTM5RjA3OUQwRjE2MzQxMUNFOTgxQ0E0RjNGRjRGODNCMEM1QjlGNTg5RkI4NDk0NEM2QjNDQUQ5NkJGRTBFNTVCQ0Y1OTEzMEY0O3U9MTczMjY4MzExMDY3MTA1MzIzNDtoPTA1YWJmMTY0ZmI2MGViNTBhMDUwZWUwMThmYWNiYjhm',
i: 'eIUfSP+/mzQWXcH+Cuz8o1vY+D2K8fhBd6Sj0xvbPZeO4l3cY+BvMp8fFIuM17l6UE1Z5+R2a18lP00ex9iYVJ+VT+c=',
spravka: 'dD0xNzM0MjA0NjM4O2k9MTI1LjE2NC4xNDkuMjAwO0Q9QTVCQ0IyOTI5RDQxNkU5NkEyOTcwMTNDMzZGMDAzNjRDNTFFNDM4QkE2Q0IyOTJDRjhCOTZDRDIzODdBQzk2MzRFRDc5QTk2Qjc2OEI1MUY5MTM5M0QzNkY3OEQ2OUY3OTUwNkQ3RjBCOEJGOEJDMjAwMTQ0RDUwRkFCMDNEQzJFMDI2OEI5OTk5OUJBNEFERUYwOEQ1MjUwQTE0QTI3RDU1MEQwM0U0O3U9MTczNDIwNDYzODUyNDYyNzg1NDtoPTIxNTc0ZTc2MDQ1ZjcwMDBkYmY0NTVkM2Q2ZWMyM2Y1',
yandexuid: '1197179041732383499',
yashr: '4682342911732383504',
yuidss: '1197179041732383499',
user_display: 930,
user_display: 824,
}
const headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 OPR/114.0.0.0',
@ -91,32 +95,35 @@ async function fetchSchedules({ date, content = null }) {
let mainApi
// parse content as schedules and add to queue if more requests is needed
const f = (data, src) => {
const f = (src, res, headers) => {
if (src) {
fetches.push(src)
}
const [q, s] = parseContent(data, date)
if (headers) {
parseCookies(headers)
}
const [q, s] = parseContent(res, date)
if (!mainApi) {
mainApi = true
if (caches.region) {
queues.push(getUrl(date, caches.region))
queues.push(getQueue(getUrl(date, caches.region), src))
}
}
for (const url of q) {
if (fetches.indexOf(url) < 0) {
queues.push(url)
queues.push(getQueue(url, src))
}
}
schedules.push(...s)
}
// is main html already fetched?
if (content) {
f(content)
f(url, content)
} else {
queues.push(url)
queues.push(getQueue(url, 'https://tv.yandex.ru/'))
}
// fetch all queues
await doFetch(queues, url, f)
await doFetch(queues, f)
return schedules
}
@ -129,17 +136,20 @@ async function fetchPrograms({ schedules, date, channel }) {
queues.push(
...schedule.events
.filter(event => date.isSame(event.start, 'day'))
.map(event => getUrl(null, caches.region, null, event))
.map(event => getQueue(getUrl(null, caches.region, null, event), 'https://tv.yandex.ru/'))
)
})
await doFetch(queues, getUrl(date), content => {
await doFetch(queues, (queue, res, headers) => {
if (headers) {
parseCookies(headers)
}
// is it a program?
if (content?.program) {
if (res?.program) {
let updated = false
schedules.forEach(schedule => {
schedule.events.forEach(event => {
if (event.channelFamilyId === content.channelFamilyId && event.id === content.id) {
Object.assign(event, content)
if (event.channelFamilyId === res.channelFamilyId && event.id === res.id) {
Object.assign(event, res)
updated = true
return true
}
@ -152,61 +162,6 @@ async function fetchPrograms({ schedules, date, channel }) {
})
}
async function doFetch(queues, referer, cb) {
if (queues.length) {
const workers = []
let n = Math.min(nworker, queues.length)
while (workers.length < n) {
const worker = () => {
if (queues.length) {
const url = queues.shift()
debug(`Fetching ${url}`)
const data = {
'Origin': 'https://tv.yandex.ru',
}
if (referer) {
data['Referer'] = referer
}
if (url.indexOf('api') > 0) {
data['X-Requested-With'] = 'XMLHttpRequest'
}
const headers = getHeaders(data)
doRequest(url, { headers })
.then(res => {
cb(res, url)
worker()
})
} else {
workers.splice(workers.indexOf(worker), 1)
}
}
workers.push(worker)
worker()
}
await new Promise(resolve => {
const interval = setInterval(() => {
if (workers.length === 0) {
clearInterval(interval)
resolve()
}
}, 500)
})
}
}
async function doRequest(url, params) {
const axios = require('axios')
const content = await axios
.get(url, params)
.then(response => {
parseCookies(response.headers)
return response.data
})
.catch(err => console.error(err.message))
return content
}
function parseContent(content, date, checkOnly = false) {
const queues = []
const schedules = []
@ -308,3 +263,20 @@ function getUrl(date, region = null, page = null, event = null) {
}
return url
}
function getQueue(url, referer) {
const data = {
'Origin': 'https://tv.yandex.ru',
}
if (referer) {
data['Referer'] = referer
}
if (url.indexOf('api') > 0) {
data['X-Requested-With'] = 'XMLHttpRequest'
}
const headers = getHeaders(data)
return {
u: url,
params: { headers }
}
}

View file

@ -52,12 +52,12 @@ it('can generate valid url', () => {
it('can generate valid request headers', () => {
expect(request.headers).toMatchObject({
Cookie:
'i=dkim62pClrWWC4CShVQYMpVw1ELNVw4XJdL/lzT4E2r05IgcST1GtCA4ho/UyGgW2AO4qftDfZzGX2OHqCzwY7GUkpM=; ' +
'spravka=dD0xNzMyNjgzMTEwO2k9MTgwLjI0OC41OS40MDtEPTkyOUM2MkQ0Mzc3OUNBMUFCNzg3NTIyMEQ4OEJBMEVBMzQ2RUNGNUU5Q0FEQUM5RUVDMTFCNjc1ODA2MThEQTQ3RTY3RTUyRUNBRDdBMTY2OTY1MjMzRDU1QjNGMTc1MDA0NDM3MjBGMUNGQTM5RjA3OUQwRjE2MzQxMUNFOTgxQ0E0RjNGRjRGODNCMEM1QjlGNTg5RkI4NDk0NEM2QjNDQUQ5NkJGRTBFNTVCQ0Y1OTEzMEY0O3U9MTczMjY4MzExMDY3MTA1MzIzNDtoPTA1YWJmMTY0ZmI2MGViNTBhMDUwZWUwMThmYWNiYjhm; ' +
'i=eIUfSP+/mzQWXcH+Cuz8o1vY+D2K8fhBd6Sj0xvbPZeO4l3cY+BvMp8fFIuM17l6UE1Z5+R2a18lP00ex9iYVJ+VT+c=; ' +
'spravka=dD0xNzM0MjA0NjM4O2k9MTI1LjE2NC4xNDkuMjAwO0Q9QTVCQ0IyOTI5RDQxNkU5NkEyOTcwMTNDMzZGMDAzNjRDNTFFNDM4QkE2Q0IyOTJDRjhCOTZDRDIzODdBQzk2MzRFRDc5QTk2Qjc2OEI1MUY5MTM5M0QzNkY3OEQ2OUY3OTUwNkQ3RjBCOEJGOEJDMjAwMTQ0RDUwRkFCMDNEQzJFMDI2OEI5OTk5OUJBNEFERUYwOEQ1MjUwQTE0QTI3RDU1MEQwM0U0O3U9MTczNDIwNDYzODUyNDYyNzg1NDtoPTIxNTc0ZTc2MDQ1ZjcwMDBkYmY0NTVkM2Q2ZWMyM2Y1; ' +
'yandexuid=1197179041732383499; ' +
'yashr=4682342911732383504; ' +
'yuidss=1197179041732383499; ' +
'user_display=930'
'user_display=824'
})
})

View file

@ -1,16 +1,13 @@
const axios = require('axios')
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
const timezone = require('dayjs/plugin/timezone')
const customParseFormat = require('dayjs/plugin/customParseFormat')
const cheerio = require('cheerio')
dayjs.extend(utc)
dayjs.extend(timezone)
dayjs.extend(customParseFormat)
const languages = { en: 'ENG', id: 'IND' }
const tz = 'Asia/Jakarta'
module.exports = {
site: 'visionplus.id',
@ -22,7 +19,7 @@ module.exports = {
'YYYY-MM-DD'
)}T00%3A00%3A00Z&view=cd-events-grid-view`
},
parser({ content, channel, date }) {
parser({ content, channel }) {
const programs = []
const json = JSON.parse(content)
if (Array.isArray(json.evs)) {

View file

@ -1,9 +1,10 @@
const { parser, url, request } = require('./visionplus.id.config.js')
const { parser, url } = require('./visionplus.id.config.js')
const fs = require('fs')
const path = require('path')
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
const customParseFormat = require('dayjs/plugin/customParseFormat')
dayjs.extend(customParseFormat)
dayjs.extend(utc)
@ -17,7 +18,6 @@ const channel = {
}
const channelId = { ...channel, lang: 'id' }
it('can generate valid url', () => {
expect(url({ channel, date })).toBe(
'https://www.visionplus.id/managetv/tvinfo/events/schedule?language=ENG&serviceId=00000000000000000079&start=2024-11-24T00%3A00%3A00Z&end=2024-11-25T00%3A00%3A00Z&view=cd-events-grid-view'
@ -30,7 +30,7 @@ it('can generate valid url', () => {
it('can parse response', () => {
let content = fs.readFileSync(path.resolve(__dirname, '__data__/content_en.json'))
let results = parser({ content, channel, date })
results = results.map(p => {
.map(p => {
p.start = p.start.toJSON()
p.stop = p.stop.toJSON()
return p
@ -48,7 +48,7 @@ it('can parse response', () => {
content = fs.readFileSync(path.resolve(__dirname, '__data__/content_id.json'))
results = parser({ content, channel: channelId, date })
results = results.map(p => {
.map(p => {
p.start = p.start.toJSON()
p.stop = p.stop.toJSON()
return p