mirror of
https://github.com/iptv-org/epg.git
synced 2025-05-10 00:50:09 -04:00
Use simultaneous fetch helper.
- Introduce fetch helper for site to fetch guide simultaneously. - Clean-up unused requires. - Updated tv.yandex.ru cookies. Signed-off-by: Toha <tohenk@yahoo.com>
This commit is contained in:
parent
f00d53cb7b
commit
916b5f5234
12 changed files with 273 additions and 429 deletions
|
@ -4,16 +4,19 @@ const dayjs = require('dayjs')
|
|||
const timezone = require('dayjs/plugin/timezone')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const customParseFormat = require('dayjs/plugin/customParseFormat')
|
||||
const doFetch = require('../../scripts/core/fetch')
|
||||
const debug = require('debug')('site:rotana.net')
|
||||
|
||||
dayjs.extend(timezone)
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(customParseFormat)
|
||||
|
||||
const tz = 'Asia/Riyadh'
|
||||
const nworker = 25
|
||||
doFetch
|
||||
.setCheckResult(false)
|
||||
.setDebugger(debug)
|
||||
|
||||
const headers = {
|
||||
const tz = 'Asia/Riyadh'
|
||||
const defaultHeaders = {
|
||||
'User-Agent':
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36 OPR/104.0.0.0'
|
||||
}
|
||||
|
@ -26,7 +29,7 @@ module.exports = {
|
|||
return `https://rotana.net/${channel.lang}/streams?channel=${channel.site_id}&tz=`
|
||||
},
|
||||
request: {
|
||||
headers,
|
||||
headers: defaultHeaders,
|
||||
timeout: 15000
|
||||
},
|
||||
async parser({ content, headers, channel, date }) {
|
||||
|
@ -37,31 +40,20 @@ module.exports = {
|
|||
|
||||
const items = parseItems(content, date)
|
||||
if (items.length) {
|
||||
const workers = []
|
||||
const n = Math.min(nworker, items.length)
|
||||
while (workers.length < n) {
|
||||
const worker = () => {
|
||||
if (items.length) {
|
||||
const item = items.shift()
|
||||
parseProgram(item, channel)
|
||||
.then(() => {
|
||||
programs.push(item)
|
||||
worker()
|
||||
})
|
||||
} else {
|
||||
workers.splice(workers.indexOf(worker), 1)
|
||||
const queues = []
|
||||
for (const item of items) {
|
||||
const url = `https://rotana.net/${channel.lang}/streams?channel=${channel.site_id}&itemId=${item.program}`
|
||||
const params = {
|
||||
headers: {
|
||||
...defaultHeaders,
|
||||
'X-Requested-With': 'XMLHttpRequest',
|
||||
cookie: cookies[channel.lang],
|
||||
}
|
||||
}
|
||||
workers.push(worker)
|
||||
worker()
|
||||
queues.push({ i: item, u: url, params })
|
||||
}
|
||||
await new Promise(resolve => {
|
||||
const interval = setInterval(() => {
|
||||
if (workers.length === 0) {
|
||||
clearInterval(interval)
|
||||
resolve()
|
||||
}
|
||||
}, 500)
|
||||
await doFetch(queues, (queue, res) => {
|
||||
programs.push(parseProgram(queue.i, res))
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -83,68 +75,56 @@ module.exports = {
|
|||
}
|
||||
}
|
||||
|
||||
async function parseProgram(item, channel) {
|
||||
if (item.program) {
|
||||
const url = `https://rotana.net/${channel.lang}/streams?channel=${channel.site_id}&itemId=${item.program}`
|
||||
const params = {
|
||||
headers: Object.assign({}, headers, { 'X-Requested-With': 'XMLHttpRequest' }),
|
||||
Cookie: cookies[channel.lang]
|
||||
}
|
||||
debug(`fetching description ${url}`)
|
||||
const result = await axios
|
||||
.get(url, params)
|
||||
.then(response => response.data)
|
||||
.catch(console.error)
|
||||
|
||||
const $ = cheerio.load(result)
|
||||
const details = $('.trending-info .row div > span')
|
||||
if (details.length) {
|
||||
for (const el of details[0].children) {
|
||||
switch (el.constructor.name) {
|
||||
case 'Text':
|
||||
if (item.description === undefined) {
|
||||
const desc = $(el).text().trim()
|
||||
if (desc) {
|
||||
item.description = desc
|
||||
}
|
||||
function parseProgram(item, result) {
|
||||
const $ = cheerio.load(result)
|
||||
const details = $('.trending-info .row div > span')
|
||||
if (details.length) {
|
||||
for (const el of details[0].children) {
|
||||
switch (el.constructor.name) {
|
||||
case 'Text':
|
||||
if (item.description === undefined) {
|
||||
const desc = $(el).text().trim()
|
||||
if (desc) {
|
||||
item.description = desc
|
||||
}
|
||||
break;
|
||||
case 'Element':
|
||||
if (el.name === 'span') {
|
||||
const [k, v] = $(el).text().split(':').map(a => a.trim())
|
||||
switch (k) {
|
||||
case 'Category':
|
||||
case 'التصنيف':
|
||||
item.category = v;
|
||||
break;
|
||||
case 'Country':
|
||||
case 'البلد':
|
||||
item.country = v;
|
||||
break;
|
||||
case 'Director':
|
||||
case 'المخرج':
|
||||
item.director = v;
|
||||
break;
|
||||
case 'Language':
|
||||
case 'اللغة':
|
||||
item.language = v;
|
||||
break;
|
||||
case 'Release Year':
|
||||
case 'سنة الإصدار':
|
||||
item.date = v;
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'Element':
|
||||
if (el.name === 'span') {
|
||||
const [k, v] = $(el).text().split(':').map(a => a.trim())
|
||||
switch (k) {
|
||||
case 'Category':
|
||||
case 'التصنيف':
|
||||
item.category = v;
|
||||
break;
|
||||
case 'Country':
|
||||
case 'البلد':
|
||||
item.country = v;
|
||||
break;
|
||||
case 'Director':
|
||||
case 'المخرج':
|
||||
item.director = v;
|
||||
break;
|
||||
case 'Language':
|
||||
case 'اللغة':
|
||||
item.language = v;
|
||||
break;
|
||||
case 'Release Year':
|
||||
case 'سنة الإصدار':
|
||||
item.date = v;
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
const img = $('.row > div > img')
|
||||
if (img.length) {
|
||||
item.image = img.attr('src')
|
||||
}
|
||||
delete item.program
|
||||
}
|
||||
const img = $('.row > div > img')
|
||||
if (img.length) {
|
||||
item.image = img.attr('src')
|
||||
}
|
||||
delete item.program
|
||||
return item
|
||||
}
|
||||
|
||||
function parseItems(content, date) {
|
||||
|
|
|
@ -52,12 +52,11 @@ it('can generate valid arabic url', () => {
|
|||
})
|
||||
|
||||
it('can parse english response', async () => {
|
||||
let result = await parser({
|
||||
const result = (await parser({
|
||||
channel,
|
||||
date,
|
||||
content: fs.readFileSync(path.join(__dirname, '/__data__/content_en.html'))
|
||||
})
|
||||
result = result.map(a => {
|
||||
})).map(a => {
|
||||
a.start = a.start.toJSON()
|
||||
a.stop = a.stop.toJSON()
|
||||
return a
|
||||
|
@ -76,12 +75,11 @@ it('can parse english response', async () => {
|
|||
})
|
||||
|
||||
it('can parse arabic response', async () => {
|
||||
let result = await parser({
|
||||
const result = (await parser({
|
||||
channel: channelAr,
|
||||
date,
|
||||
content: fs.readFileSync(path.join(__dirname, '/__data__/content_ar.html'))
|
||||
})
|
||||
result = result.map(a => {
|
||||
})).map(a => {
|
||||
a.start = a.start.toJSON()
|
||||
a.stop = a.stop.toJSON()
|
||||
return a
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue