From 7a9ece8e59d5e605676ce5dfcbf28b88aea12d2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Isma=C3=ABl=20Moret?= <30985701+BellezaEmporium@users.noreply.github.com> Date: Thu, 19 Dec 2024 15:52:45 +0100 Subject: [PATCH 01/11] Update i24news.tv.channels.xml --- sites/i24news.tv/i24news.tv.channels.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sites/i24news.tv/i24news.tv.channels.xml b/sites/i24news.tv/i24news.tv.channels.xml index b8957434..3f6b355a 100644 --- a/sites/i24news.tv/i24news.tv.channels.xml +++ b/sites/i24news.tv/i24news.tv.channels.xml @@ -1,7 +1,7 @@ - I24NEWS عربى - I24NEWS English (USA) - I24NEWS English (World) - I24NEWS Français + I24NEWS عربى + I24NEWS English (USA) + I24NEWS Français + I24NEWS עברית From ed3ede914c99d12e004bc5484ea459ed7ea88428 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Isma=C3=ABl=20Moret?= <30985701+BellezaEmporium@users.noreply.github.com> Date: Thu, 19 Dec 2024 15:52:56 +0100 Subject: [PATCH 02/11] Update i24news.tv.config.js --- sites/i24news.tv/i24news.tv.config.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/sites/i24news.tv/i24news.tv.config.js b/sites/i24news.tv/i24news.tv.config.js index 556bea40..b8539910 100644 --- a/sites/i24news.tv/i24news.tv.config.js +++ b/sites/i24news.tv/i24news.tv.config.js @@ -11,9 +11,7 @@ module.exports = { site: 'i24news.tv', days: 2, url: function ({ channel }) { - const [lang, region] = channel.site_id.split('#') - - return `https://api.i24news.tv/v2/${lang}/schedules/${region}` + return `https://api.i24news.tv/v2/${channel.site_id}/schedules` }, parser: function ({ content, date }) { let programs = [] From 38ab58d03ac2654f08e900f6878f3dc336d37c1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Isma=C3=ABl=20Moret?= <30985701+BellezaEmporium@users.noreply.github.com> Date: Thu, 19 Dec 2024 15:53:07 +0100 Subject: [PATCH 03/11] Update i24news.tv.test.js --- sites/i24news.tv/i24news.tv.test.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sites/i24news.tv/i24news.tv.test.js b/sites/i24news.tv/i24news.tv.test.js index 54c05f7a..2525cdd8 100644 --- a/sites/i24news.tv/i24news.tv.test.js +++ b/sites/i24news.tv/i24news.tv.test.js @@ -7,12 +7,12 @@ dayjs.extend(utc) const date = dayjs.utc('2022-03-06', 'YYYY-MM-DD').startOf('d') const channel = { - site_id: 'ar#', + site_id: 'ar', xmltv_id: 'I24NewsArabic.il' } it('can generate valid url', () => { - expect(url({ channel })).toBe('https://api.i24news.tv/v2/ar/schedules/world') + expect(url({ channel })).toBe('https://api.i24news.tv/v2/ar/schedules') }) it('can parse response', () => { From 8374cd588584391dbedb9b80f429cdd694ec9c9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Isma=C3=ABl=20Moret?= <30985701+BellezaEmporium@users.noreply.github.com> Date: Thu, 19 Dec 2024 15:55:23 +0100 Subject: [PATCH 04/11] Update pickx.be.config.js --- sites/pickx.be/pickx.be.config.js | 92 ++++++++++++------------------- 1 file changed, 35 insertions(+), 57 deletions(-) diff --git a/sites/pickx.be/pickx.be.config.js b/sites/pickx.be/pickx.be.config.js index 3fc67e31..38d1ac21 100644 --- a/sites/pickx.be/pickx.be.config.js +++ b/sites/pickx.be/pickx.be.config.js @@ -3,29 +3,22 @@ const dayjs = require('dayjs') const utc = require('dayjs/plugin/utc') let apiVersion -let isApiVersionFetched = false - -;(async () => { - try { - await fetchApiVersion() - isApiVersionFetched = true - } catch (error) { - console.error('Error during script initialization:', error) - } -})() dayjs.extend(utc) module.exports = { site: 'pickx.be', days: 2, - apiVersion: function () { + setApiVersion: function (version) { + apiVersion = version + }, + getApiVersion: function () { return apiVersion }, - fetchApiVersion: fetchApiVersion, // Export fetchApiVersion + fetchApiVersion: fetchApiVersion, url: async function ({ channel, date }) { - while (!isApiVersionFetched) { - await new Promise(resolve => setTimeout(resolve, 100)) // Wait for 100 milliseconds + if (!apiVersion) { + await fetchApiVersion() } return `https://px-epg.azureedge.net/airings/${apiVersion}/${date.format( 'YYYY-MM-DD' @@ -116,7 +109,7 @@ module.exports = { }` } const result = await axios - .post('https://api.proximusmwc.be/tiams/v2/graphql', query) + .post('https://api.proximusmwc.be/tiams/v3/graphql', query) .then(r => r.data) .catch(console.error) @@ -140,53 +133,38 @@ function fetchApiVersion() { return new Promise(async (resolve, reject) => { try { // you'll never find what happened here :) - // load pickx bundle and get react version hash (regex). + // load the pickx page and get the hash from the MWC configuration. // it's not the best way to get the version but it's the only way to get it. - // find bundle version - const minBundleVer = "https://www.pickx.be/minimal-bundle-version" - const bundleVerData = await axios.get(minBundleVer, { - headers: { - Origin: 'https://www.pickx.be', - Referer: 'https://www.pickx.be/' - } + const hashUrl = 'https://www.pickx.be/nl/televisie/tv-gids'; + + const hashData = await axios.get(hashUrl) + .then(r => { + const re = /"hashes":\["(.*)"\]/ + const match = r.data.match(re) + if (match && match[1]) { + return match[1] + } else { + throw new Error('React app version hash not found') + } + }) + .catch(console.error); + + const versionUrl = `https://www.pickx.be/api/s-${hashData}` + + const response = await axios.get(versionUrl, { + headers: { + Origin: 'https://www.pickx.be', + Referer: 'https://www.pickx.be/' + } }) - if (bundleVerData.status !== 200) { - console.error(`Failed to fetch bundle version. Status: ${bundleVerData.status}`) - reject(`Failed to fetch bundle version. Status: ${bundleVerData.status}`) + if (response.status === 200) { + apiVersion = response.data.version + resolve() } else { - const bundleVer = bundleVerData.data.version - // get the minified JS app bundle - const bundleUrl = `https://components.pickx.be/pxReactPlayer/${bundleVer}/bundle.min.js` - - // now, find the react hash inside the bundle URL - const bundle = await axios.get(bundleUrl).then(r => { - const re = /REACT_APP_VERSION_HASH:"([^"]+)"/ - const match = r.data.match(re) - if (match && match[1]) { - return match[1] - } else { - throw new Error('React app version hash not found') - } - }).catch(console.error) - - const versionUrl = `https://www.pickx.be/api/s-${bundle.replace('/REACT_APP_VERSION_HASH:"', '')}` - - const response = await axios.get(versionUrl, { - headers: { - Origin: 'https://www.pickx.be', - Referer: 'https://www.pickx.be/' - } - }) - - if (response.status === 200) { - apiVersion = response.data.version - resolve() - } else { - console.error(`Failed to fetch API version. Status: ${response.status}`) - reject(`Failed to fetch API version. Status: ${response.status}`) - } + console.error(`Failed to fetch API version. Status: ${response.status}`) + reject(`Failed to fetch API version. Status: ${response.status}`) } } catch (error) { console.error('Error during fetchApiVersion:', error) From 782f911d3e5653e06d62fa37778627de903bce17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Isma=C3=ABl=20Moret?= <30985701+BellezaEmporium@users.noreply.github.com> Date: Thu, 19 Dec 2024 15:55:39 +0100 Subject: [PATCH 05/11] Update pickx.be.test.js --- sites/pickx.be/pickx.be.test.js | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/sites/pickx.be/pickx.be.test.js b/sites/pickx.be/pickx.be.test.js index eef0a82f..aa89f09c 100644 --- a/sites/pickx.be/pickx.be.test.js +++ b/sites/pickx.be/pickx.be.test.js @@ -1,4 +1,20 @@ -const { parser, url, request, fetchApiVersion, apiVersion } = require('./pickx.be.config.js') +jest.mock('./pickx.be.config.js', () => { + const originalModule = jest.requireActual('./pickx.be.config.js') + return { + ...originalModule, + fetchApiVersion: jest.fn(() => Promise.resolve()) + } +}) + +const { + parser, + url, + request, + fetchApiVersion, + setApiVersion, + getApiVersion +} = require('./pickx.be.config.js') + const fs = require('fs') const path = require('path') const dayjs = require('dayjs') @@ -13,12 +29,14 @@ const channel = { xmltv_id: 'Vedia.be' } +beforeEach(() => { + setApiVersion('mockedApiVersion') +}) + it('can generate valid url', async () => { - await fetchApiVersion() const generatedUrl = await url({ channel, date }) - const resolvedApiVersion = apiVersion() expect(generatedUrl).toBe( - `https://px-epg.azureedge.net/airings/${resolvedApiVersion}/2023-12-13/channel/UID0118?timezone=Europe%2FBrussels` + `https://px-epg.azureedge.net/airings/mockedApiVersion/2023-12-13/channel/UID0118?timezone=Europe%2FBrussels` ) }) From 67e7f13c02283464059c0167607bc3a8734e4919 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Isma=C3=ABl=20Moret?= <30985701+BellezaEmporium@users.noreply.github.com> Date: Thu, 19 Dec 2024 16:03:48 +0100 Subject: [PATCH 06/11] Update iltalehti.fi.test.js --- sites/iltalehti.fi/iltalehti.fi.test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sites/iltalehti.fi/iltalehti.fi.test.js b/sites/iltalehti.fi/iltalehti.fi.test.js index b117e3d0..954eb422 100644 --- a/sites/iltalehti.fi/iltalehti.fi.test.js +++ b/sites/iltalehti.fi/iltalehti.fi.test.js @@ -9,7 +9,7 @@ dayjs.extend(utc) const date = dayjs.utc('2022-10-29', 'YYYY-MM-DD').startOf('d') const channel = { - site_id: 'default_builtin_channelgroup1#yle-tv1', + site_id: '1#yle-tv1', xmltv_id: 'YleTV1.fi' } From e00efc77b9c88bdb3eb7668dd54a74ee2ee5b07d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Isma=C3=ABl=20Moret?= <30985701+BellezaEmporium@users.noreply.github.com> Date: Thu, 19 Dec 2024 16:04:03 +0100 Subject: [PATCH 07/11] Update iltalehti.fi.config.js From 916b5f52343f5df0bf2b4387da0b501d8f26b9b6 Mon Sep 17 00:00:00 2001 From: Toha Date: Tue, 17 Dec 2024 10:24:58 +0700 Subject: [PATCH 08/11] Use simultaneous fetch helper. - Introduce fetch helper for site to fetch guide simultaneously. - Clean-up unused requires. - Updated tv.yandex.ru cookies. Signed-off-by: Toha --- scripts/core/fetch.js | 109 +++++++++++++ sites/firstmedia.com/firstmedia.com.config.js | 1 - sites/mncvision.id/mncvision.id.config.js | 85 ++++------ sites/mytelly.co.uk/mytelly.co.uk.config.js | 69 +-------- sites/rotana.net/rotana.net.config.js | 146 ++++++++---------- sites/rotana.net/rotana.net.test.js | 10 +- sites/sky.com/sky.com.config.js | 64 +------- .../startimestv.com/startimestv.com.config.js | 69 +-------- sites/tv.yandex.ru/tv.yandex.ru.config.js | 114 ++++++-------- sites/tv.yandex.ru/tv.yandex.ru.test.js | 6 +- sites/visionplus.id/visionplus.id.config.js | 5 +- sites/visionplus.id/visionplus.id.test.js | 24 +-- 12 files changed, 273 insertions(+), 429 deletions(-) create mode 100644 scripts/core/fetch.js diff --git a/scripts/core/fetch.js b/scripts/core/fetch.js new file mode 100644 index 00000000..a8e7ae81 --- /dev/null +++ b/scripts/core/fetch.js @@ -0,0 +1,109 @@ +const axios = require('axios') + +/** + * A callback when fetch queue is completely done. + * + * To check for successful operation simply check if res is not undefined. + * + * @callback completeCallback + * @param {string|object} queue Fetched queue which is complete + * @param {string|object} res Response content returned by axios + * @param {object} headers Response headers returned by axios + */ + +/** + * @type {number} + */ +let nworker = 25 + +/** + * @type {boolean} + */ +let checkResult = true + +/** + * @type {any} + */ +let debug + +/** + * Queued url fetch. + * + * @param {array} queues The queues + * @param {completeCallback} cb Queue completion callback + */ +async function doFetch(queues, cb) { + let n = Math.min(nworker, queues.length) + const workers = [] + const adjustWorker = () => { + if (queues.length > workers.length && workers.length < nworker) { + let nw = Math.min(nworker, queues.length) + if (n < nw) { + n = nw + createWorker() + } + } + } + const createWorker = () => { + while (workers.length < n) { + startWorker() + } + } + const startWorker = () => { + const worker = () => { + if (queues.length) { + const queue = queues.shift() + const done = (res, headers) => { + if ((checkResult && res) || !checkResult) { + cb(queue, res, headers) + adjustWorker() + } + worker() + } + const url = typeof queue === 'string' ? queue : queue.u + const params = typeof queue === 'object' && queue.params ? queue.params : {} + const method = typeof queue === 'object' && queue.m ? queue.m : 'get' + if (typeof debug === 'function') { + debug(`fetch %s with %s`, url, JSON.stringify(params)) + } + axios[method](url, params) + .then(response => { + done(response.data, response.headers) + }) + .catch(err => { + console.error(`Unable to fetch ${url}: ${err.message}!`) + done() + }) + } else { + workers.splice(workers.indexOf(worker), 1) + } + } + workers.push(worker) + worker() + } + createWorker() + await new Promise(resolve => { + const interval = setInterval(() => { + if (workers.length === 0) { + clearInterval(interval) + resolve() + } + }, 500) + }) +} + +module.exports = doFetch +Object.assign(module.exports, { + setMaxWorker(n) { + nworker = n + return module.exports + }, + setCheckResult(enabled) { + checkResult = enabled + return module.exports + }, + setDebugger(dbg) { + debug = dbg + return module.exports + } +}) \ No newline at end of file diff --git a/sites/firstmedia.com/firstmedia.com.config.js b/sites/firstmedia.com/firstmedia.com.config.js index 9ca29486..7c6865fc 100644 --- a/sites/firstmedia.com/firstmedia.com.config.js +++ b/sites/firstmedia.com/firstmedia.com.config.js @@ -43,7 +43,6 @@ module.exports = { }, async channels() { const axios = require('axios') - const cheerio = require('cheerio') const result = await axios .get( `https://api.firstmedia.com/api/content/tv-guide/list?date=${dayjs().format( diff --git a/sites/mncvision.id/mncvision.id.config.js b/sites/mncvision.id/mncvision.id.config.js index f2bf93bc..46994b9a 100644 --- a/sites/mncvision.id/mncvision.id.config.js +++ b/sites/mncvision.id/mncvision.id.config.js @@ -1,19 +1,23 @@ -const _ = require('lodash') const axios = require('axios') const cheerio = require('cheerio') const dayjs = require('dayjs') const utc = require('dayjs/plugin/utc') const timezone = require('dayjs/plugin/timezone') const customParseFormat = require('dayjs/plugin/customParseFormat') +const doFetch = require('../../scripts/core/fetch') +const debug = require('debug')('site:mncvision.id') dayjs.extend(utc) dayjs.extend(timezone) dayjs.extend(customParseFormat) +doFetch + .setCheckResult(false) + .setDebugger(debug) + const languages = { en: 'english', id: 'indonesia' } const cookies = {} const timeout = 30000 -const nworker = 25 module.exports = { site: 'mncvision.id', @@ -55,8 +59,6 @@ module.exports = { return await parseItems(content, date, cookies[channel.lang]) }, async channels({ lang = 'id' }) { - const axios = require('axios') - const cheerio = require('cheerio') const result = await axios .get('https://www.mncvision.id/schedule') .then(response => response.data) @@ -117,42 +119,31 @@ async function parseItems(content, date, cookies) { const $ = cheerio.load(content) const items = $('tr[valign="top"]').toArray() if (items.length) { - const workers = [] - const n = Math.min(nworker, items.length) - while (workers.length < n) { - const worker = () => { - if (items.length) { - const $item = $(items.shift()) - const done = (description = null) => { - const start = parseStart($item, date) - const duration = parseDuration($item) - const stop = start.add(duration, 'm') - programs.push({ - title: parseTitle($item), - season: parseSeason($item), - episode: parseEpisode($item), - description, - start, - stop - }) - worker() - } - loadDescription($item, cookies) - .then(description => done(description)) - } else { - workers.splice(workers.indexOf(worker), 1) - } + const queues = [] + for (const item of items) { + const $item = $(item) + const url = $item.find('a').attr('href') + const headers = { + 'X-Requested-With': 'XMLHttpRequest', + Cookie: cookies, } - workers.push(worker) - worker() + queues.push({ i: $item, u: url, params: { headers, timeout } }) } - await new Promise(resolve => { - const interval = setInterval(() => { - if (workers.length === 0) { - clearInterval(interval) - resolve() - } - }, 500) + await doFetch(queues, (queue, res) => { + const $item = queue.i + const $page = cheerio.load(res) + const description = $page('.synopsis').text().trim() + const start = parseStart($item, date) + const duration = parseDuration($item) + const stop = start.add(duration, 'm') + programs.push({ + title: parseTitle($item), + season: parseSeason($item), + episode: parseEpisode($item), + description: description && description !== '-' ? description : null, + start, + stop + }) }) } @@ -168,24 +159,6 @@ function loadLangCookies(channel) { .catch(error => console.error(error.message)) } -async function loadDescription($item, cookies) { - const url = $item.find('a').attr('href') - if (!url) return null - const content = await axios - .get(url, { - headers: { 'X-Requested-With': 'XMLHttpRequest', Cookie: cookies }, - timeout - }) - .then(r => r.data) - .catch(error => console.error(error.message)) - if (!content) return null - - const $page = cheerio.load(content) - const description = $page('.synopsis').text().trim() - - return description !== '-' ? description : null -} - function parseCookies(headers) { const cookies = [] if (Array.isArray(headers['set-cookie'])) { diff --git a/sites/mytelly.co.uk/mytelly.co.uk.config.js b/sites/mytelly.co.uk/mytelly.co.uk.config.js index f08be0bd..0a42f01e 100644 --- a/sites/mytelly.co.uk/mytelly.co.uk.config.js +++ b/sites/mytelly.co.uk/mytelly.co.uk.config.js @@ -3,15 +3,17 @@ const dayjs = require('dayjs') const utc = require('dayjs/plugin/utc') const timezone = require('dayjs/plugin/timezone') const customParseFormat = require('dayjs/plugin/customParseFormat') +const doFetch = require('../../scripts/core/fetch') const debug = require('debug')('site:mytelly.co.uk') dayjs.extend(utc) dayjs.extend(timezone) dayjs.extend(customParseFormat) +doFetch.setDebugger(debug) + const detailedGuide = true const tz = 'Europe/London' -const nworker = 25 module.exports = { site: 'mytelly.co.uk', @@ -108,7 +110,6 @@ module.exports = { }, async channels() { const channels = {} - const axios = require('axios') const queues = [{ t: 'p', m: 'post', u: 'https://www.mytelly.co.uk/getform' }] await doFetch(queues, (queue, res) => { // process form -> provider @@ -191,67 +192,3 @@ function parseText($item) { return text } - -async function doFetch(queues, cb) { - const axios = require('axios') - - let n = Math.min(nworker, queues.length) - const workers = [] - const adjustWorker = () => { - if (queues.length > workers.length && workers.length < nworker) { - let nw = Math.min(nworker, queues.length) - if (n < nw) { - n = nw - createWorker() - } - } - } - const createWorker = () => { - while (workers.length < n) { - startWorker() - } - } - const startWorker = () => { - const worker = () => { - if (queues.length) { - const queue = queues.shift() - const done = res => { - if (res) { - cb(queue, res) - adjustWorker() - } - worker() - } - const url = typeof queue === 'string' ? queue : queue.u - const params = typeof queue === 'object' && queue.params ? queue.params : {} - const method = typeof queue === 'object' && queue.m ? queue.m : 'get' - debug(`fetch %s with %s`, url, JSON.stringify(params)) - if (method === 'post') { - axios - .post(url, params) - .then(response => done(response.data)) - .catch(console.error) - } else { - axios - .get(url, params) - .then(response => done(response.data)) - .catch(console.error) - } - } else { - workers.splice(workers.indexOf(worker), 1) - } - } - workers.push(worker) - worker() - } - createWorker() - await new Promise(resolve => { - const interval = setInterval(() => { - if (workers.length === 0) { - clearInterval(interval) - resolve() - } - }, 500) - }) -} - diff --git a/sites/rotana.net/rotana.net.config.js b/sites/rotana.net/rotana.net.config.js index 50da4de9..6237d756 100644 --- a/sites/rotana.net/rotana.net.config.js +++ b/sites/rotana.net/rotana.net.config.js @@ -4,16 +4,19 @@ const dayjs = require('dayjs') const timezone = require('dayjs/plugin/timezone') const utc = require('dayjs/plugin/utc') const customParseFormat = require('dayjs/plugin/customParseFormat') +const doFetch = require('../../scripts/core/fetch') const debug = require('debug')('site:rotana.net') dayjs.extend(timezone) dayjs.extend(utc) dayjs.extend(customParseFormat) -const tz = 'Asia/Riyadh' -const nworker = 25 +doFetch + .setCheckResult(false) + .setDebugger(debug) -const headers = { +const tz = 'Asia/Riyadh' +const defaultHeaders = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36 OPR/104.0.0.0' } @@ -26,7 +29,7 @@ module.exports = { return `https://rotana.net/${channel.lang}/streams?channel=${channel.site_id}&tz=` }, request: { - headers, + headers: defaultHeaders, timeout: 15000 }, async parser({ content, headers, channel, date }) { @@ -37,31 +40,20 @@ module.exports = { const items = parseItems(content, date) if (items.length) { - const workers = [] - const n = Math.min(nworker, items.length) - while (workers.length < n) { - const worker = () => { - if (items.length) { - const item = items.shift() - parseProgram(item, channel) - .then(() => { - programs.push(item) - worker() - }) - } else { - workers.splice(workers.indexOf(worker), 1) + const queues = [] + for (const item of items) { + const url = `https://rotana.net/${channel.lang}/streams?channel=${channel.site_id}&itemId=${item.program}` + const params = { + headers: { + ...defaultHeaders, + 'X-Requested-With': 'XMLHttpRequest', + cookie: cookies[channel.lang], } } - workers.push(worker) - worker() + queues.push({ i: item, u: url, params }) } - await new Promise(resolve => { - const interval = setInterval(() => { - if (workers.length === 0) { - clearInterval(interval) - resolve() - } - }, 500) + await doFetch(queues, (queue, res) => { + programs.push(parseProgram(queue.i, res)) }) } @@ -83,68 +75,56 @@ module.exports = { } } -async function parseProgram(item, channel) { - if (item.program) { - const url = `https://rotana.net/${channel.lang}/streams?channel=${channel.site_id}&itemId=${item.program}` - const params = { - headers: Object.assign({}, headers, { 'X-Requested-With': 'XMLHttpRequest' }), - Cookie: cookies[channel.lang] - } - debug(`fetching description ${url}`) - const result = await axios - .get(url, params) - .then(response => response.data) - .catch(console.error) - - const $ = cheerio.load(result) - const details = $('.trending-info .row div > span') - if (details.length) { - for (const el of details[0].children) { - switch (el.constructor.name) { - case 'Text': - if (item.description === undefined) { - const desc = $(el).text().trim() - if (desc) { - item.description = desc - } +function parseProgram(item, result) { + const $ = cheerio.load(result) + const details = $('.trending-info .row div > span') + if (details.length) { + for (const el of details[0].children) { + switch (el.constructor.name) { + case 'Text': + if (item.description === undefined) { + const desc = $(el).text().trim() + if (desc) { + item.description = desc } - break; - case 'Element': - if (el.name === 'span') { - const [k, v] = $(el).text().split(':').map(a => a.trim()) - switch (k) { - case 'Category': - case 'التصنيف': - item.category = v; - break; - case 'Country': - case 'البلد': - item.country = v; - break; - case 'Director': - case 'المخرج': - item.director = v; - break; - case 'Language': - case 'اللغة': - item.language = v; - break; - case 'Release Year': - case 'سنة الإصدار': - item.date = v; - break; - } + } + break; + case 'Element': + if (el.name === 'span') { + const [k, v] = $(el).text().split(':').map(a => a.trim()) + switch (k) { + case 'Category': + case 'التصنيف': + item.category = v; + break; + case 'Country': + case 'البلد': + item.country = v; + break; + case 'Director': + case 'المخرج': + item.director = v; + break; + case 'Language': + case 'اللغة': + item.language = v; + break; + case 'Release Year': + case 'سنة الإصدار': + item.date = v; + break; } - break; - } + } + break; } } - const img = $('.row > div > img') - if (img.length) { - item.image = img.attr('src') - } - delete item.program } + const img = $('.row > div > img') + if (img.length) { + item.image = img.attr('src') + } + delete item.program + return item } function parseItems(content, date) { diff --git a/sites/rotana.net/rotana.net.test.js b/sites/rotana.net/rotana.net.test.js index 1718f715..17803fdd 100644 --- a/sites/rotana.net/rotana.net.test.js +++ b/sites/rotana.net/rotana.net.test.js @@ -52,12 +52,11 @@ it('can generate valid arabic url', () => { }) it('can parse english response', async () => { - let result = await parser({ + const result = (await parser({ channel, date, content: fs.readFileSync(path.join(__dirname, '/__data__/content_en.html')) - }) - result = result.map(a => { + })).map(a => { a.start = a.start.toJSON() a.stop = a.stop.toJSON() return a @@ -76,12 +75,11 @@ it('can parse english response', async () => { }) it('can parse arabic response', async () => { - let result = await parser({ + const result = (await parser({ channel: channelAr, date, content: fs.readFileSync(path.join(__dirname, '/__data__/content_ar.html')) - }) - result = result.map(a => { + })).map(a => { a.start = a.start.toJSON() a.stop = a.stop.toJSON() return a diff --git a/sites/sky.com/sky.com.config.js b/sites/sky.com/sky.com.config.js index 1a0fcfcc..cf24b538 100644 --- a/sites/sky.com/sky.com.config.js +++ b/sites/sky.com/sky.com.config.js @@ -1,11 +1,12 @@ const cheerio = require('cheerio') const dayjs = require('dayjs') const utc = require('dayjs/plugin/utc') +const doFetch = require('../../scripts/core/fetch') const debug = require('debug')('site:sky.com') dayjs.extend(utc) -const nworker = 10 +doFetch.setDebugger(debug) module.exports = { site: 'sky.com', @@ -78,64 +79,3 @@ module.exports = { return Object.values(channels) } } - -async function doFetch(queues, cb) { - const axios = require('axios') - let n = Math.min(nworker, queues.length) - const workers = [] - const adjustWorker = () => { - if (queues.length > workers.length && workers.length < nworker) { - let nw = Math.min(nworker, queues.length) - if (n < nw) { - n = nw - createWorker() - } - } - } - const createWorker = () => { - while (workers.length < n) { - startWorker() - } - } - const startWorker = () => { - const worker = () => { - if (queues.length) { - const queue = queues.shift() - const done = (res, headers) => { - if (res) { - cb(queue, res, headers) - adjustWorker() - } - worker() - } - const url = typeof queue === 'string' ? queue : queue.u - const params = typeof queue === 'object' && queue.params ? queue.params : {} - const method = typeof queue === 'object' && queue.m ? queue.m : 'get' - if (typeof debug === 'function') { - debug(`fetch %s with %s`, url, JSON.stringify(params)) - } - axios[method](url, params) - .then(response => { - done(response.data, response.headers) - }) - .catch(err => { - console.error(`Unable to fetch ${url}: ${err.message}!`) - done() - }) - } else { - workers.splice(workers.indexOf(worker), 1) - } - } - workers.push(worker) - worker() - } - createWorker() - await new Promise(resolve => { - const interval = setInterval(() => { - if (workers.length === 0) { - clearInterval(interval) - resolve() - } - }, 500) - }) -} diff --git a/sites/startimestv.com/startimestv.com.config.js b/sites/startimestv.com/startimestv.com.config.js index b97e8738..e84f433c 100644 --- a/sites/startimestv.com/startimestv.com.config.js +++ b/sites/startimestv.com/startimestv.com.config.js @@ -1,14 +1,16 @@ -const axios = require('axios') const cheerio = require('cheerio') const dayjs = require('dayjs') const utc = require('dayjs/plugin/utc') const customParseFormat = require('dayjs/plugin/customParseFormat') +const doFetch = require('../../scripts/core/fetch') const debug = require('debug')('site:startimestv.com') dayjs.extend(utc) dayjs.extend(customParseFormat) -const nworker = 5 +doFetch + .setDebugger(debug) + .setMaxWorker(5) module.exports = { site: 'startimestv.com', @@ -110,66 +112,3 @@ function parseText($item) { return text } - -async function doFetch(queues, cb) { - const axios = require('axios') - - let n = Math.min(nworker, queues.length) - const workers = [] - const adjustWorker = () => { - if (queues.length > workers.length && workers.length < nworker) { - let nw = Math.min(nworker, queues.length) - if (n < nw) { - n = nw - createWorker() - } - } - } - const createWorker = () => { - while (workers.length < n) { - startWorker() - } - } - const startWorker = () => { - const worker = () => { - if (queues.length) { - const queue = queues.shift() - const done = res => { - if (res) { - cb(queue, res) - adjustWorker() - } - worker() - } - const url = typeof queue === 'string' ? queue : queue.u - const params = typeof queue === 'object' && queue.params ? queue.params : {} - const method = typeof queue === 'object' && queue.m ? queue.m : 'get' - debug(`fetch %s with %s`, url, JSON.stringify(params)) - if (method === 'post') { - axios - .post(url, params) - .then(response => done(response.data)) - .catch(console.error) - } else { - axios - .get(url, params) - .then(response => done(response.data)) - .catch(console.error) - } - } else { - workers.splice(workers.indexOf(worker), 1) - } - } - workers.push(worker) - worker() - } - createWorker() - await new Promise(resolve => { - const interval = setInterval(() => { - if (workers.length === 0) { - clearInterval(interval) - resolve() - } - }, 500) - }) -} diff --git a/sites/tv.yandex.ru/tv.yandex.ru.config.js b/sites/tv.yandex.ru/tv.yandex.ru.config.js index 22000db5..99c908f8 100644 --- a/sites/tv.yandex.ru/tv.yandex.ru.config.js +++ b/sites/tv.yandex.ru/tv.yandex.ru.config.js @@ -1,18 +1,22 @@ const dayjs = require('dayjs') +const doFetch = require('../../scripts/core/fetch') const debug = require('debug')('site:tv.yandex.ru') +doFetch + .setDebugger(debug) + .setMaxWorker(10) + // enable to fetch guide description but its take a longer time const detailedGuide = true -const nworker = 10 // update this data by heading to https://tv.yandex.ru and change the values accordingly const cookies = { - i: 'dkim62pClrWWC4CShVQYMpVw1ELNVw4XJdL/lzT4E2r05IgcST1GtCA4ho/UyGgW2AO4qftDfZzGX2OHqCzwY7GUkpM=', - spravka: 'dD0xNzMyNjgzMTEwO2k9MTgwLjI0OC41OS40MDtEPTkyOUM2MkQ0Mzc3OUNBMUFCNzg3NTIyMEQ4OEJBMEVBMzQ2RUNGNUU5Q0FEQUM5RUVDMTFCNjc1ODA2MThEQTQ3RTY3RTUyRUNBRDdBMTY2OTY1MjMzRDU1QjNGMTc1MDA0NDM3MjBGMUNGQTM5RjA3OUQwRjE2MzQxMUNFOTgxQ0E0RjNGRjRGODNCMEM1QjlGNTg5RkI4NDk0NEM2QjNDQUQ5NkJGRTBFNTVCQ0Y1OTEzMEY0O3U9MTczMjY4MzExMDY3MTA1MzIzNDtoPTA1YWJmMTY0ZmI2MGViNTBhMDUwZWUwMThmYWNiYjhm', + i: 'eIUfSP+/mzQWXcH+Cuz8o1vY+D2K8fhBd6Sj0xvbPZeO4l3cY+BvMp8fFIuM17l6UE1Z5+R2a18lP00ex9iYVJ+VT+c=', + spravka: 'dD0xNzM0MjA0NjM4O2k9MTI1LjE2NC4xNDkuMjAwO0Q9QTVCQ0IyOTI5RDQxNkU5NkEyOTcwMTNDMzZGMDAzNjRDNTFFNDM4QkE2Q0IyOTJDRjhCOTZDRDIzODdBQzk2MzRFRDc5QTk2Qjc2OEI1MUY5MTM5M0QzNkY3OEQ2OUY3OTUwNkQ3RjBCOEJGOEJDMjAwMTQ0RDUwRkFCMDNEQzJFMDI2OEI5OTk5OUJBNEFERUYwOEQ1MjUwQTE0QTI3RDU1MEQwM0U0O3U9MTczNDIwNDYzODUyNDYyNzg1NDtoPTIxNTc0ZTc2MDQ1ZjcwMDBkYmY0NTVkM2Q2ZWMyM2Y1', yandexuid: '1197179041732383499', yashr: '4682342911732383504', yuidss: '1197179041732383499', - user_display: 930, + user_display: 824, } const headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 OPR/114.0.0.0', @@ -91,32 +95,35 @@ async function fetchSchedules({ date, content = null }) { let mainApi // parse content as schedules and add to queue if more requests is needed - const f = (data, src) => { + const f = (src, res, headers) => { if (src) { fetches.push(src) } - const [q, s] = parseContent(data, date) + if (headers) { + parseCookies(headers) + } + const [q, s] = parseContent(res, date) if (!mainApi) { mainApi = true if (caches.region) { - queues.push(getUrl(date, caches.region)) + queues.push(getQueue(getUrl(date, caches.region), src)) } } for (const url of q) { if (fetches.indexOf(url) < 0) { - queues.push(url) + queues.push(getQueue(url, src)) } } schedules.push(...s) } // is main html already fetched? if (content) { - f(content) + f(url, content) } else { - queues.push(url) + queues.push(getQueue(url, 'https://tv.yandex.ru/')) } // fetch all queues - await doFetch(queues, url, f) + await doFetch(queues, f) return schedules } @@ -129,17 +136,20 @@ async function fetchPrograms({ schedules, date, channel }) { queues.push( ...schedule.events .filter(event => date.isSame(event.start, 'day')) - .map(event => getUrl(null, caches.region, null, event)) + .map(event => getQueue(getUrl(null, caches.region, null, event), 'https://tv.yandex.ru/')) ) }) - await doFetch(queues, getUrl(date), content => { + await doFetch(queues, (queue, res, headers) => { + if (headers) { + parseCookies(headers) + } // is it a program? - if (content?.program) { + if (res?.program) { let updated = false schedules.forEach(schedule => { schedule.events.forEach(event => { - if (event.channelFamilyId === content.channelFamilyId && event.id === content.id) { - Object.assign(event, content) + if (event.channelFamilyId === res.channelFamilyId && event.id === res.id) { + Object.assign(event, res) updated = true return true } @@ -152,61 +162,6 @@ async function fetchPrograms({ schedules, date, channel }) { }) } -async function doFetch(queues, referer, cb) { - if (queues.length) { - const workers = [] - let n = Math.min(nworker, queues.length) - while (workers.length < n) { - const worker = () => { - if (queues.length) { - const url = queues.shift() - debug(`Fetching ${url}`) - const data = { - 'Origin': 'https://tv.yandex.ru', - } - if (referer) { - data['Referer'] = referer - } - if (url.indexOf('api') > 0) { - data['X-Requested-With'] = 'XMLHttpRequest' - } - const headers = getHeaders(data) - doRequest(url, { headers }) - .then(res => { - cb(res, url) - worker() - }) - } else { - workers.splice(workers.indexOf(worker), 1) - } - } - workers.push(worker) - worker() - } - await new Promise(resolve => { - const interval = setInterval(() => { - if (workers.length === 0) { - clearInterval(interval) - resolve() - } - }, 500) - }) - } -} - -async function doRequest(url, params) { - const axios = require('axios') - const content = await axios - .get(url, params) - .then(response => { - parseCookies(response.headers) - return response.data - }) - .catch(err => console.error(err.message)) - - return content -} - function parseContent(content, date, checkOnly = false) { const queues = [] const schedules = [] @@ -307,4 +262,21 @@ function getUrl(date, region = null, page = null, event = null) { url += `${url.indexOf('?') < 0 ? '?' : '&'}limit=${page.limit}` } return url +} + +function getQueue(url, referer) { + const data = { + 'Origin': 'https://tv.yandex.ru', + } + if (referer) { + data['Referer'] = referer + } + if (url.indexOf('api') > 0) { + data['X-Requested-With'] = 'XMLHttpRequest' + } + const headers = getHeaders(data) + return { + u: url, + params: { headers } + } } \ No newline at end of file diff --git a/sites/tv.yandex.ru/tv.yandex.ru.test.js b/sites/tv.yandex.ru/tv.yandex.ru.test.js index 42f143a1..ea72df5b 100644 --- a/sites/tv.yandex.ru/tv.yandex.ru.test.js +++ b/sites/tv.yandex.ru/tv.yandex.ru.test.js @@ -52,12 +52,12 @@ it('can generate valid url', () => { it('can generate valid request headers', () => { expect(request.headers).toMatchObject({ Cookie: - 'i=dkim62pClrWWC4CShVQYMpVw1ELNVw4XJdL/lzT4E2r05IgcST1GtCA4ho/UyGgW2AO4qftDfZzGX2OHqCzwY7GUkpM=; ' + - 'spravka=dD0xNzMyNjgzMTEwO2k9MTgwLjI0OC41OS40MDtEPTkyOUM2MkQ0Mzc3OUNBMUFCNzg3NTIyMEQ4OEJBMEVBMzQ2RUNGNUU5Q0FEQUM5RUVDMTFCNjc1ODA2MThEQTQ3RTY3RTUyRUNBRDdBMTY2OTY1MjMzRDU1QjNGMTc1MDA0NDM3MjBGMUNGQTM5RjA3OUQwRjE2MzQxMUNFOTgxQ0E0RjNGRjRGODNCMEM1QjlGNTg5RkI4NDk0NEM2QjNDQUQ5NkJGRTBFNTVCQ0Y1OTEzMEY0O3U9MTczMjY4MzExMDY3MTA1MzIzNDtoPTA1YWJmMTY0ZmI2MGViNTBhMDUwZWUwMThmYWNiYjhm; ' + + 'i=eIUfSP+/mzQWXcH+Cuz8o1vY+D2K8fhBd6Sj0xvbPZeO4l3cY+BvMp8fFIuM17l6UE1Z5+R2a18lP00ex9iYVJ+VT+c=; ' + + 'spravka=dD0xNzM0MjA0NjM4O2k9MTI1LjE2NC4xNDkuMjAwO0Q9QTVCQ0IyOTI5RDQxNkU5NkEyOTcwMTNDMzZGMDAzNjRDNTFFNDM4QkE2Q0IyOTJDRjhCOTZDRDIzODdBQzk2MzRFRDc5QTk2Qjc2OEI1MUY5MTM5M0QzNkY3OEQ2OUY3OTUwNkQ3RjBCOEJGOEJDMjAwMTQ0RDUwRkFCMDNEQzJFMDI2OEI5OTk5OUJBNEFERUYwOEQ1MjUwQTE0QTI3RDU1MEQwM0U0O3U9MTczNDIwNDYzODUyNDYyNzg1NDtoPTIxNTc0ZTc2MDQ1ZjcwMDBkYmY0NTVkM2Q2ZWMyM2Y1; ' + 'yandexuid=1197179041732383499; ' + 'yashr=4682342911732383504; ' + 'yuidss=1197179041732383499; ' + - 'user_display=930' + 'user_display=824' }) }) diff --git a/sites/visionplus.id/visionplus.id.config.js b/sites/visionplus.id/visionplus.id.config.js index 30a779e0..f2f5ca49 100644 --- a/sites/visionplus.id/visionplus.id.config.js +++ b/sites/visionplus.id/visionplus.id.config.js @@ -1,16 +1,13 @@ -const axios = require('axios') const dayjs = require('dayjs') const utc = require('dayjs/plugin/utc') const timezone = require('dayjs/plugin/timezone') const customParseFormat = require('dayjs/plugin/customParseFormat') -const cheerio = require('cheerio') dayjs.extend(utc) dayjs.extend(timezone) dayjs.extend(customParseFormat) const languages = { en: 'ENG', id: 'IND' } -const tz = 'Asia/Jakarta' module.exports = { site: 'visionplus.id', @@ -22,7 +19,7 @@ module.exports = { 'YYYY-MM-DD' )}T00%3A00%3A00Z&view=cd-events-grid-view` }, - parser({ content, channel, date }) { + parser({ content, channel }) { const programs = [] const json = JSON.parse(content) if (Array.isArray(json.evs)) { diff --git a/sites/visionplus.id/visionplus.id.test.js b/sites/visionplus.id/visionplus.id.test.js index 1772e7e0..99b1398c 100644 --- a/sites/visionplus.id/visionplus.id.test.js +++ b/sites/visionplus.id/visionplus.id.test.js @@ -1,9 +1,10 @@ -const { parser, url, request } = require('./visionplus.id.config.js') +const { parser, url } = require('./visionplus.id.config.js') const fs = require('fs') const path = require('path') const dayjs = require('dayjs') const utc = require('dayjs/plugin/utc') const customParseFormat = require('dayjs/plugin/customParseFormat') + dayjs.extend(customParseFormat) dayjs.extend(utc) @@ -17,7 +18,6 @@ const channel = { } const channelId = { ...channel, lang: 'id' } - it('can generate valid url', () => { expect(url({ channel, date })).toBe( 'https://www.visionplus.id/managetv/tvinfo/events/schedule?language=ENG&serviceId=00000000000000000079&start=2024-11-24T00%3A00%3A00Z&end=2024-11-25T00%3A00%3A00Z&view=cd-events-grid-view' @@ -30,11 +30,11 @@ it('can generate valid url', () => { it('can parse response', () => { let content = fs.readFileSync(path.resolve(__dirname, '__data__/content_en.json')) let results = parser({ content, channel, date }) - results = results.map(p => { - p.start = p.start.toJSON() - p.stop = p.stop.toJSON() - return p - }) + .map(p => { + p.start = p.start.toJSON() + p.stop = p.stop.toJSON() + return p + }) expect(results.length).toBe(1) expect(results[0]).toMatchObject({ @@ -48,11 +48,11 @@ it('can parse response', () => { content = fs.readFileSync(path.resolve(__dirname, '__data__/content_id.json')) results = parser({ content, channel: channelId, date }) - results = results.map(p => { - p.start = p.start.toJSON() - p.stop = p.stop.toJSON() - return p - }) + .map(p => { + p.start = p.start.toJSON() + p.stop = p.stop.toJSON() + return p + }) expect(results.length).toBe(1) expect(results[0]).toMatchObject({ From a5229d3af0cb59c3a65ef477dec0ee9dbc088737 Mon Sep 17 00:00:00 2001 From: Toha Date: Wed, 18 Dec 2024 17:36:45 +0700 Subject: [PATCH 09/11] Simultaneous fetch is now external package. Signed-off-by: Toha --- package.json | 1 + scripts/core/fetch.js | 109 ------------------ sites/mncvision.id/mncvision.id.config.js | 4 +- sites/mytelly.co.uk/mytelly.co.uk.config.js | 8 +- sites/rotana.net/rotana.net.config.js | 4 +- sites/sky.com/sky.com.config.js | 6 +- .../startimestv.com/startimestv.com.config.js | 6 +- sites/tv.yandex.ru/tv.yandex.ru.config.js | 4 +- .../virgintvgo.virginmedia.com.config.js | 67 +---------- 9 files changed, 20 insertions(+), 189 deletions(-) delete mode 100644 scripts/core/fetch.js diff --git a/package.json b/package.json index 3d3a5213..5f15d386 100644 --- a/package.json +++ b/package.json @@ -30,6 +30,7 @@ "@alex_neo/jest-expect-message": "^1.0.5", "@freearhey/core": "^0.3.1", "@freearhey/search-js": "^0.1.1", + "@ntlab/sfetch": "^1.0.0", "@octokit/core": "^4.1.0", "@types/cli-progress": "^3.11.3", "@types/fs-extra": "^11.0.2", diff --git a/scripts/core/fetch.js b/scripts/core/fetch.js deleted file mode 100644 index a8e7ae81..00000000 --- a/scripts/core/fetch.js +++ /dev/null @@ -1,109 +0,0 @@ -const axios = require('axios') - -/** - * A callback when fetch queue is completely done. - * - * To check for successful operation simply check if res is not undefined. - * - * @callback completeCallback - * @param {string|object} queue Fetched queue which is complete - * @param {string|object} res Response content returned by axios - * @param {object} headers Response headers returned by axios - */ - -/** - * @type {number} - */ -let nworker = 25 - -/** - * @type {boolean} - */ -let checkResult = true - -/** - * @type {any} - */ -let debug - -/** - * Queued url fetch. - * - * @param {array} queues The queues - * @param {completeCallback} cb Queue completion callback - */ -async function doFetch(queues, cb) { - let n = Math.min(nworker, queues.length) - const workers = [] - const adjustWorker = () => { - if (queues.length > workers.length && workers.length < nworker) { - let nw = Math.min(nworker, queues.length) - if (n < nw) { - n = nw - createWorker() - } - } - } - const createWorker = () => { - while (workers.length < n) { - startWorker() - } - } - const startWorker = () => { - const worker = () => { - if (queues.length) { - const queue = queues.shift() - const done = (res, headers) => { - if ((checkResult && res) || !checkResult) { - cb(queue, res, headers) - adjustWorker() - } - worker() - } - const url = typeof queue === 'string' ? queue : queue.u - const params = typeof queue === 'object' && queue.params ? queue.params : {} - const method = typeof queue === 'object' && queue.m ? queue.m : 'get' - if (typeof debug === 'function') { - debug(`fetch %s with %s`, url, JSON.stringify(params)) - } - axios[method](url, params) - .then(response => { - done(response.data, response.headers) - }) - .catch(err => { - console.error(`Unable to fetch ${url}: ${err.message}!`) - done() - }) - } else { - workers.splice(workers.indexOf(worker), 1) - } - } - workers.push(worker) - worker() - } - createWorker() - await new Promise(resolve => { - const interval = setInterval(() => { - if (workers.length === 0) { - clearInterval(interval) - resolve() - } - }, 500) - }) -} - -module.exports = doFetch -Object.assign(module.exports, { - setMaxWorker(n) { - nworker = n - return module.exports - }, - setCheckResult(enabled) { - checkResult = enabled - return module.exports - }, - setDebugger(dbg) { - debug = dbg - return module.exports - } -}) \ No newline at end of file diff --git a/sites/mncvision.id/mncvision.id.config.js b/sites/mncvision.id/mncvision.id.config.js index 46994b9a..ba944503 100644 --- a/sites/mncvision.id/mncvision.id.config.js +++ b/sites/mncvision.id/mncvision.id.config.js @@ -4,7 +4,7 @@ const dayjs = require('dayjs') const utc = require('dayjs/plugin/utc') const timezone = require('dayjs/plugin/timezone') const customParseFormat = require('dayjs/plugin/customParseFormat') -const doFetch = require('../../scripts/core/fetch') +const doFetch = require('@ntlab/sfetch') const debug = require('debug')('site:mncvision.id') dayjs.extend(utc) @@ -127,7 +127,7 @@ async function parseItems(content, date, cookies) { 'X-Requested-With': 'XMLHttpRequest', Cookie: cookies, } - queues.push({ i: $item, u: url, params: { headers, timeout } }) + queues.push({ i: $item, url, params: { headers, timeout } }) } await doFetch(queues, (queue, res) => { const $item = queue.i diff --git a/sites/mytelly.co.uk/mytelly.co.uk.config.js b/sites/mytelly.co.uk/mytelly.co.uk.config.js index 0a42f01e..282f8dc4 100644 --- a/sites/mytelly.co.uk/mytelly.co.uk.config.js +++ b/sites/mytelly.co.uk/mytelly.co.uk.config.js @@ -3,7 +3,7 @@ const dayjs = require('dayjs') const utc = require('dayjs/plugin/utc') const timezone = require('dayjs/plugin/timezone') const customParseFormat = require('dayjs/plugin/customParseFormat') -const doFetch = require('../../scripts/core/fetch') +const doFetch = require('@ntlab/sfetch') const debug = require('debug')('site:mytelly.co.uk') dayjs.extend(utc) @@ -110,7 +110,7 @@ module.exports = { }, async channels() { const channels = {} - const queues = [{ t: 'p', m: 'post', u: 'https://www.mytelly.co.uk/getform' }] + const queues = [{ t: 'p', method: 'post', url: 'https://www.mytelly.co.uk/getform' }] await doFetch(queues, (queue, res) => { // process form -> provider if (queue.t === 'p') { @@ -119,7 +119,7 @@ module.exports = { .forEach(el => { const opt = $(el) const provider = opt.attr('value') - queues.push({ t: 'r', m: 'post', u: 'https://www.mytelly.co.uk/getregions', params: { provider } }) + queues.push({ t: 'r', method: 'post', url: 'https://www.mytelly.co.uk/getregions', params: { provider } }) }) } // process provider -> region @@ -135,7 +135,7 @@ module.exports = { u_time: now.format('HHmm'), is_mobile: 1 } - queues.push({ t: 's', m: 'post', u: 'https://www.mytelly.co.uk/tv-guide/schedule', params }) + queues.push({ t: 's', method: 'post', url: 'https://www.mytelly.co.uk/tv-guide/schedule', params }) } } // process schedule -> channels diff --git a/sites/rotana.net/rotana.net.config.js b/sites/rotana.net/rotana.net.config.js index 6237d756..9eb0b930 100644 --- a/sites/rotana.net/rotana.net.config.js +++ b/sites/rotana.net/rotana.net.config.js @@ -4,7 +4,7 @@ const dayjs = require('dayjs') const timezone = require('dayjs/plugin/timezone') const utc = require('dayjs/plugin/utc') const customParseFormat = require('dayjs/plugin/customParseFormat') -const doFetch = require('../../scripts/core/fetch') +const doFetch = require('@ntlab/sfetch') const debug = require('debug')('site:rotana.net') dayjs.extend(timezone) @@ -50,7 +50,7 @@ module.exports = { cookie: cookies[channel.lang], } } - queues.push({ i: item, u: url, params }) + queues.push({ i: item, url, params }) } await doFetch(queues, (queue, res) => { programs.push(parseProgram(queue.i, res)) diff --git a/sites/sky.com/sky.com.config.js b/sites/sky.com/sky.com.config.js index cf24b538..cf024178 100644 --- a/sites/sky.com/sky.com.config.js +++ b/sites/sky.com/sky.com.config.js @@ -1,7 +1,7 @@ const cheerio = require('cheerio') const dayjs = require('dayjs') const utc = require('dayjs/plugin/utc') -const doFetch = require('../../scripts/core/fetch') +const doFetch = require('@ntlab/sfetch') const debug = require('debug')('site:sky.com') dayjs.extend(utc) @@ -49,7 +49,7 @@ module.exports = { }, async channels() { const channels = {} - const queues = [{ t: 'r', u: 'https://www.sky.com/tv-guide' }] + const queues = [{ t: 'r', url: 'https://www.sky.com/tv-guide' }] await doFetch(queues, (queue, res) => { // process regions if (queue.t === 'r') { @@ -57,7 +57,7 @@ module.exports = { const initialData = JSON.parse(decodeURIComponent($('#initialData').text())) initialData.state.epgData.regions .forEach(region => { - queues.push({ t: 'c', u: `https://awk.epgsky.com/hawk/linear/services/${region.bouquet}/${region.subBouquet}` }) + queues.push({ t: 'c', url: `https://awk.epgsky.com/hawk/linear/services/${region.bouquet}/${region.subBouquet}` }) }) } // process channels diff --git a/sites/startimestv.com/startimestv.com.config.js b/sites/startimestv.com/startimestv.com.config.js index e84f433c..83b6a4d4 100644 --- a/sites/startimestv.com/startimestv.com.config.js +++ b/sites/startimestv.com/startimestv.com.config.js @@ -2,7 +2,7 @@ const cheerio = require('cheerio') const dayjs = require('dayjs') const utc = require('dayjs/plugin/utc') const customParseFormat = require('dayjs/plugin/customParseFormat') -const doFetch = require('../../scripts/core/fetch') +const doFetch = require('@ntlab/sfetch') const debug = require('debug')('site:startimestv.com') dayjs.extend(utc) @@ -48,7 +48,7 @@ module.exports = { }, async channels() { const channels = {} - const queues = [{ t: 'a', u: 'https://www.startimestv.com/tv_guide.html' }] + const queues = [{ t: 'a', url: 'https://www.startimestv.com/tv_guide.html' }] await doFetch(queues, (queue, res) => { // process area-id if (queue.t === 'a') { @@ -59,7 +59,7 @@ module.exports = { const areaId = dd.attr('area-id') queues.push({ t: 's', - u: 'https://www.startimestv.com/tv_guide.html', + url: 'https://www.startimestv.com/tv_guide.html', params: { headers: { cookie: `default_areaID=${areaId}` diff --git a/sites/tv.yandex.ru/tv.yandex.ru.config.js b/sites/tv.yandex.ru/tv.yandex.ru.config.js index 99c908f8..e12df5d0 100644 --- a/sites/tv.yandex.ru/tv.yandex.ru.config.js +++ b/sites/tv.yandex.ru/tv.yandex.ru.config.js @@ -1,5 +1,5 @@ const dayjs = require('dayjs') -const doFetch = require('../../scripts/core/fetch') +const doFetch = require('@ntlab/sfetch') const debug = require('debug')('site:tv.yandex.ru') doFetch @@ -276,7 +276,7 @@ function getQueue(url, referer) { } const headers = getHeaders(data) return { - u: url, + url, params: { headers } } } \ No newline at end of file diff --git a/sites/virgintvgo.virginmedia.com/virgintvgo.virginmedia.com.config.js b/sites/virgintvgo.virginmedia.com/virgintvgo.virginmedia.com.config.js index 75cc2fcd..a26c23fd 100644 --- a/sites/virgintvgo.virginmedia.com/virgintvgo.virginmedia.com.config.js +++ b/sites/virgintvgo.virginmedia.com/virgintvgo.virginmedia.com.config.js @@ -1,11 +1,13 @@ const dayjs = require('dayjs') const utc = require('dayjs/plugin/utc') +const doFetch = require('@ntlab/sfetch') const debug = require('debug')('site:virgintvgo.virginmedia.com') dayjs.extend(utc) +doFetch.setDebugger(debug) + const detailedGuide = true -const nworker = 25 module.exports = { site: 'virgintvgo.virginmedia.com', @@ -110,66 +112,3 @@ module.exports = { return channels } } - -async function doFetch(queues, cb) { - const axios = require('axios') - - let n = Math.min(nworker, queues.length) - const workers = [] - const adjustWorker = () => { - if (queues.length > workers.length && workers.length < nworker) { - let nw = Math.min(nworker, queues.length) - if (n < nw) { - n = nw - createWorker() - } - } - } - const createWorker = () => { - while (workers.length < n) { - startWorker() - } - } - const startWorker = () => { - const worker = () => { - if (queues.length) { - const queue = queues.shift() - const done = res => { - if (res) { - cb(queue, res) - adjustWorker() - } - worker() - } - const url = typeof queue === 'string' ? queue : queue.u - const params = typeof queue === 'object' && queue.params ? queue.params : {} - const method = typeof queue === 'object' && queue.m ? queue.m : 'get' - debug(`fetch %s with %s`, url, JSON.stringify(params)) - if (method === 'post') { - axios - .post(url, params) - .then(response => done(response.data)) - .catch(console.error) - } else { - axios - .get(url, params) - .then(response => done(response.data)) - .catch(console.error) - } - } else { - workers.splice(workers.indexOf(worker), 1) - } - } - workers.push(worker) - worker() - } - createWorker() - await new Promise(resolve => { - const interval = setInterval(() => { - if (workers.length === 0) { - clearInterval(interval) - resolve() - } - }, 500) - }) -} From 838620b69811f5ca406b5a86751e700379fa5ab4 Mon Sep 17 00:00:00 2001 From: freearhey <7253922+freearhey@users.noreply.github.com> Date: Wed, 18 Dec 2024 21:42:41 +0300 Subject: [PATCH 10/11] Update package-lock.json --- package-lock.json | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/package-lock.json b/package-lock.json index 92c7f236..1edf0299 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,6 +11,7 @@ "@alex_neo/jest-expect-message": "^1.0.5", "@freearhey/core": "^0.3.1", "@freearhey/search-js": "^0.1.1", + "@ntlab/sfetch": "^1.0.0", "@octokit/core": "^4.1.0", "@types/cli-progress": "^3.11.3", "@types/fs-extra": "^11.0.2", @@ -1885,6 +1886,14 @@ "node": ">=10" } }, + "node_modules/@ntlab/sfetch": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@ntlab/sfetch/-/sfetch-1.0.0.tgz", + "integrity": "sha512-AWrC43z1TncvB7S7dl9Wn8xZpCqdKFBfXqaN3BXPfJeS3gxV9Fm86eAsW95YdXTOgPWbCC/GAgVuXi6Aot6DkQ==", + "dependencies": { + "axios": "^1.7.9" + } + }, "node_modules/@octokit/auth-token": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.2.tgz", @@ -10340,6 +10349,14 @@ } } }, + "@ntlab/sfetch": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@ntlab/sfetch/-/sfetch-1.0.0.tgz", + "integrity": "sha512-AWrC43z1TncvB7S7dl9Wn8xZpCqdKFBfXqaN3BXPfJeS3gxV9Fm86eAsW95YdXTOgPWbCC/GAgVuXi6Aot6DkQ==", + "requires": { + "axios": "^1.7.9" + } + }, "@octokit/auth-token": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.2.tgz", From 37e27c8cf86de365760fc03c1bc6696336bcfbbb Mon Sep 17 00:00:00 2001 From: freearhey <7253922+freearhey@users.noreply.github.com> Date: Wed, 18 Dec 2024 21:42:44 +0300 Subject: [PATCH 11/11] Update yarn.lock --- yarn.lock | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/yarn.lock b/yarn.lock index 3f9d3eeb..1dee6b73 100644 --- a/yarn.lock +++ b/yarn.lock @@ -758,6 +758,13 @@ dependencies: semver "^7.3.5" +"@ntlab/sfetch@^1.0.0": + version "1.0.0" + resolved "https://registry.npmjs.org/@ntlab/sfetch/-/sfetch-1.0.0.tgz" + integrity sha512-AWrC43z1TncvB7S7dl9Wn8xZpCqdKFBfXqaN3BXPfJeS3gxV9Fm86eAsW95YdXTOgPWbCC/GAgVuXi6Aot6DkQ== + dependencies: + axios "^1.7.9" + "@octokit/auth-token@^3.0.0": version "3.0.2" resolved "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.2.tgz" @@ -1354,7 +1361,7 @@ axios-mock-adapter@^1.20.0: is-blob "^2.1.0" is-buffer "^2.0.5" -axios@^1.5.1, axios@^1.6.1, "axios@>= 0.9.0", axios@>=0.20.0: +axios@^1.5.1, axios@^1.6.1, axios@^1.7.9, "axios@>= 0.9.0", axios@>=0.20.0: version "1.7.9" resolved "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz" integrity sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==