mirror of
https://github.com/iptv-org/database.git
synced 2025-05-09 19:20:01 -04:00
Merge pull request #17654 from iptv-org/patch-2025.04.1
Patch 2025.04.1
This commit is contained in:
commit
bcab2227e0
122 changed files with 40243 additions and 39913 deletions
2
.github/ISSUE_TEMPLATE/2_channels_edit.yml
vendored
2
.github/ISSUE_TEMPLATE/2_channels_edit.yml
vendored
|
@ -7,7 +7,7 @@ body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
Please specify exactly what should be changed. To delete an existing value without replacement use the `~` symbol.
|
Please specify exactly what should be changed. To delete an existing value without replacement use the `~` symbol. To edit `broadcast_area`, `languages`, `timezones` and `video_formats` use this [form](https://github.com/iptv-org/database/issues/new?assignees=&labels=feeds%3Aedit&projects=&template=5_feeds_edit.yml&title=Edit%3A+).
|
||||||
|
|
||||||
- type: input
|
- type: input
|
||||||
id: id
|
id: id
|
||||||
|
|
2
.github/workflows/check.yml
vendored
2
.github/workflows/check.yml
vendored
|
@ -31,4 +31,4 @@ jobs:
|
||||||
run: npm install
|
run: npm install
|
||||||
- name: validate
|
- name: validate
|
||||||
if: steps.files.outputs.any_changed == 'true'
|
if: steps.files.outputs.any_changed == 'true'
|
||||||
run: npm run db:validate -- ${{ steps.files.outputs.all_changed_files }}
|
run: npm run db:validate
|
||||||
|
|
|
@ -1 +1,7 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
data_changed="$(git diff --staged --name-only --diff-filter=ACMR -- 'data/*.csv' | sed 's| |\\ |g')"
|
||||||
|
|
||||||
|
if [ ! -z "$data_changed" ]; then
|
||||||
npm run db:validate
|
npm run db:validate
|
||||||
|
fi
|
3
data/.gitignore
vendored
3
data/.gitignore
vendored
|
@ -1,3 +0,0 @@
|
||||||
*
|
|
||||||
!*.csv
|
|
||||||
!.gitignore
|
|
74316
data/channels.csv
74316
data/channels.csv
File diff suppressed because it is too large
Load diff
|
@ -1449,7 +1449,7 @@ AMCenEspanol.us,SD,SD,TRUE,c/US,America/New_York,spa,480i
|
||||||
AMCEurope.uk,Bulgary,Bulgary,FALSE,c/BG,Europe/Budapest,bul,576i
|
AMCEurope.uk,Bulgary,Bulgary,FALSE,c/BG,Europe/Budapest,bul,576i
|
||||||
AMCEurope.uk,CzechRepublic,CzechRepublic,FALSE,c/CZ;c/SK,Europe/Budapest,eng,576i
|
AMCEurope.uk,CzechRepublic,CzechRepublic,FALSE,c/CZ;c/SK,Europe/Budapest,eng,576i
|
||||||
AMCEurope.uk,Hungary,Hungary,FALSE,c/HU,Europe/Budapest,eng,576i
|
AMCEurope.uk,Hungary,Hungary,FALSE,c/HU,Europe/Budapest,eng,576i
|
||||||
AMCEurope.uk,Polnad,Poland,FALSE,c/PL,Europe/Budapest,pol,576i
|
AMCEurope.uk,Poland,Poland,FALSE,c/PL,Europe/Budapest,pol,576i
|
||||||
AMCEurope.uk,Portugal,Portugal,FALSE,c/PT,Europe/Madrid,por,576i
|
AMCEurope.uk,Portugal,Portugal,FALSE,c/PT,Europe/Madrid,por,576i
|
||||||
AMCEurope.uk,Romania,Romania,FALSE,c/RO,Europe/Budapest,eng,576i
|
AMCEurope.uk,Romania,Romania,FALSE,c/RO,Europe/Budapest,eng,576i
|
||||||
AMCEurope.uk,Serbia,Serbia,FALSE,c/RS,Europe/Budapest,eng,576i
|
AMCEurope.uk,Serbia,Serbia,FALSE,c/RS,Europe/Budapest,eng,576i
|
||||||
|
@ -2439,7 +2439,7 @@ AXNMystery.jp,SD,SD,TRUE,c/JP,Asia/Singapore,jpn,480i
|
||||||
AXNSciFi.ru,SD,SD,TRUE,r/CIS,Europe/Moscow,rus,576i
|
AXNSciFi.ru,SD,SD,TRUE,r/CIS,Europe/Moscow,rus,576i
|
||||||
AXNSpin.hu,Poland,Poland,FALSE,c/PL,Europe/Warsaw,pol,576i
|
AXNSpin.hu,Poland,Poland,FALSE,c/PL,Europe/Warsaw,pol,576i
|
||||||
AXNSpin.hu,SD,SD,TRUE,c/BA;c/RO;c/HR;c/MK;c/RS;c/SI,Europe/Athens,srp;pol;ron;hrv,576i
|
AXNSpin.hu,SD,SD,TRUE,c/BA;c/RO;c/HR;c/MK;c/RS;c/SI,Europe/Athens,srp;pol;ron;hrv,576i
|
||||||
AXNWhite.us,Bulgaria,Bulgria,FALSE,c/BG,Europe/Sofia,bul,576i
|
AXNWhite.us,Bulgaria,Bulgaria,FALSE,c/BG,Europe/Sofia,bul,576i
|
||||||
AXNWhite.us,Poland,Poland,FALSE,c/PL,Europe/Warsaw,pol,576i
|
AXNWhite.us,Poland,Poland,FALSE,c/PL,Europe/Warsaw,pol,576i
|
||||||
AXNWhite.us,Portugal,Portugal,FALSE,c/PT,Europe/Lisbon,por,576i
|
AXNWhite.us,Portugal,Portugal,FALSE,c/PT,Europe/Lisbon,por,576i
|
||||||
AXNWhite.us,SD,SD,TRUE,c/ES;c/AD;c/CZ;c/HU;c/RO;c/SK,America/New_York,eng,576i
|
AXNWhite.us,SD,SD,TRUE,c/ES;c/AD;c/CZ;c/HU;c/RO;c/SK,America/New_York,eng,576i
|
||||||
|
@ -4764,7 +4764,7 @@ CartoonitoLiveLatinAmerica.br,SD,SD,TRUE,r/HISPAM,America/New_York,spa,480i
|
||||||
CartoonitoMENA.uk,HD,HD,FALSE,r/MENA;c/GR;c/CY,America/Toronto,eng;ara;ell,1080i
|
CartoonitoMENA.uk,HD,HD,FALSE,r/MENA;c/GR;c/CY,America/Toronto,eng;ara;ell,1080i
|
||||||
CartoonitoMENA.uk,SD,SD,TRUE,r/MENA;c/GR;c/CY,America/Toronto,eng;ara;ell,576i
|
CartoonitoMENA.uk,SD,SD,TRUE,r/MENA;c/GR;c/CY,America/Toronto,eng;ara;ell,576i
|
||||||
CartoonitoMENA.uk,Turkey,Turkey,FALSE,c/TR,Europe/Istanbul,tur,576i
|
CartoonitoMENA.uk,Turkey,Turkey,FALSE,c/TR,Europe/Istanbul,tur,576i
|
||||||
CartoonitoNordic.uk,SD,SD,TRUE,r/NORDIC,America/Toronto,eng,576i
|
CartoonitoNordic.uk,SD,SD,TRUE,r/NORD,America/Toronto,eng,576i
|
||||||
CartoonitoWesternEurope.uk,Norway,Norway,FALSE,c/NO,America/Toronto,nor,576i
|
CartoonitoWesternEurope.uk,Norway,Norway,FALSE,c/NO,America/Toronto,nor,576i
|
||||||
CartoonitoWesternEurope.uk,Sweden,Sweden,TRUE,c/SE,America/Toronto,swe,576i
|
CartoonitoWesternEurope.uk,Sweden,Sweden,TRUE,c/SE,America/Toronto,swe,576i
|
||||||
CartoonNetwork.au,Australia,Australia,TRUE,c/AU,Australia/Sydney,eng,576i
|
CartoonNetwork.au,Australia,Australia,TRUE,c/AU,Australia/Sydney,eng,576i
|
||||||
|
@ -4793,7 +4793,7 @@ CartoonNetworkAfrica.uk,SD,SD,TRUE,r/AFR,Europe/London,eng,576i
|
||||||
CartoonNetworkArabic.ae,SD,SD,TRUE,r/MENA,Asia/Dubai,ara,576i
|
CartoonNetworkArabic.ae,SD,SD,TRUE,r/MENA,Asia/Dubai,ara,576i
|
||||||
CartoonNetworkAsia.sg,SD,SD,TRUE,r/SEA,Asia/Singapore,eng,576i
|
CartoonNetworkAsia.sg,SD,SD,TRUE,r/SEA,Asia/Singapore,eng,576i
|
||||||
CartoonNetworkAsia.sg,Vietnam,Vietnam,FALSE,c/VN,Asia/Singapore,vie;eng,576i
|
CartoonNetworkAsia.sg,Vietnam,Vietnam,FALSE,c/VN,Asia/Singapore,vie;eng,576i
|
||||||
CartoonNetworkCEE.uk,Bulgray,Bulgary,FALSE,c/BG,America/Toronto,bul,576i
|
CartoonNetworkCEE.uk,Bulgary,Bulgary,FALSE,c/BG,America/Toronto,bul,576i
|
||||||
CartoonNetworkCEE.uk,CzechRepublic,Czech Republic,FALSE,c/CZ,America/Toronto,ces,576i
|
CartoonNetworkCEE.uk,CzechRepublic,Czech Republic,FALSE,c/CZ,America/Toronto,ces,576i
|
||||||
CartoonNetworkCEE.uk,Denmark,Denmark,FALSE,c/DK,America/Toronto,dan,576i
|
CartoonNetworkCEE.uk,Denmark,Denmark,FALSE,c/DK,America/Toronto,dan,576i
|
||||||
CartoonNetworkCEE.uk,France,France,FALSE,c/FR;c/BE;c/LU;c/CH;c/MA;c/TN;c/LB;c/MG;c/MU;c/HT,America/Toronto,eng,576i
|
CartoonNetworkCEE.uk,France,France,FALSE,c/FR;c/BE;c/LU;c/CH;c/MA;c/TN;c/LB;c/MG;c/MU;c/HT,America/Toronto,eng,576i
|
||||||
|
|
Can't render this file because it is too large.
|
1555
package-lock.json
generated
1555
package-lock.json
generated
File diff suppressed because it is too large
Load diff
17
package.json
17
package.json
|
@ -4,16 +4,13 @@
|
||||||
"act:check": "act pull_request -W .github/workflows/check.yml",
|
"act:check": "act pull_request -W .github/workflows/check.yml",
|
||||||
"act:update": "act workflow_dispatch -W .github/workflows/update.yml",
|
"act:update": "act workflow_dispatch -W .github/workflows/update.yml",
|
||||||
"act:deploy": "act push -W .github/workflows/deploy.yml",
|
"act:deploy": "act push -W .github/workflows/deploy.yml",
|
||||||
"db:validate": "tsx scripts/db/validate.ts",
|
"db:validate": "tsx scripts/commands/db/validate.ts",
|
||||||
"db:export": "tsx scripts/db/export.ts",
|
"db:export": "tsx scripts/commands/db/export.ts",
|
||||||
"db:update": "tsx scripts/db/update.ts",
|
"db:update": "tsx scripts/commands/db/update.ts",
|
||||||
"lint": "npx eslint \"{scripts,tests}/**/*.{ts,js}\"",
|
"lint": "npx eslint \"{scripts,tests}/**/*.{ts,js}\"",
|
||||||
"test": "jest --runInBand",
|
"test": "jest --runInBand",
|
||||||
"prepare": "husky"
|
"prepare": "husky"
|
||||||
},
|
},
|
||||||
"engines": {
|
|
||||||
"node": ">=18.0.0 <=22.12.0"
|
|
||||||
},
|
|
||||||
"private": true,
|
"private": true,
|
||||||
"author": "Arhey",
|
"author": "Arhey",
|
||||||
"jest": {
|
"jest": {
|
||||||
|
@ -24,16 +21,16 @@
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@eslint/js": "^9.16.0",
|
"@eslint/js": "^9.16.0",
|
||||||
"@freearhey/core": "^0.6.0",
|
"@freearhey/core": "^0.8.2",
|
||||||
"@joi/date": "^2.1.0",
|
"@joi/date": "^2.1.0",
|
||||||
"@json2csv/formatters": "^7.0.3",
|
"@json2csv/formatters": "^7.0.3",
|
||||||
"@json2csv/node": "^7.0.3",
|
"@json2csv/node": "^7.0.3",
|
||||||
"@json2csv/transforms": "^7.0.3",
|
"@json2csv/transforms": "^7.0.3",
|
||||||
"@octokit/core": "^4.2.0",
|
"@octokit/core": "^6.1.5",
|
||||||
"@octokit/plugin-paginate-rest": "^6.0.0",
|
"@octokit/plugin-paginate-rest": "^12.0.0",
|
||||||
"@octokit/plugin-rest-endpoint-methods": "^7.1.3",
|
"@octokit/plugin-rest-endpoint-methods": "^7.1.3",
|
||||||
|
"@types/fs-extra": "^11.0.4",
|
||||||
"@types/jest": "^29.5.5",
|
"@types/jest": "^29.5.5",
|
||||||
"@types/joi": "^17.2.3",
|
|
||||||
"@typescript-eslint/eslint-plugin": "^8.17.0",
|
"@typescript-eslint/eslint-plugin": "^8.17.0",
|
||||||
"chalk": "^4.1.2",
|
"chalk": "^4.1.2",
|
||||||
"commander": "^9.0.0",
|
"commander": "^9.0.0",
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
import { DATA_DIR, API_DIR } from '../../constants'
|
||||||
import { Storage, File } from '@freearhey/core'
|
import { Storage, File } from '@freearhey/core'
|
||||||
import { DATA_DIR, API_DIR } from '../constants'
|
import { CSVParser } from '../../core'
|
||||||
import { CSVParser } from '../core'
|
import { CSVParserRow } from '../../types/csvParser'
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
const dataStorage = new Storage(DATA_DIR)
|
const dataStorage = new Storage(DATA_DIR)
|
||||||
|
@ -12,7 +13,8 @@ async function main() {
|
||||||
const file = new File(filepath)
|
const file = new File(filepath)
|
||||||
const filename = file.name()
|
const filename = file.name()
|
||||||
const data = await dataStorage.load(file.basename())
|
const data = await dataStorage.load(file.basename())
|
||||||
const items = await parser.parse(data)
|
const parsed = await parser.parse(data)
|
||||||
|
const items = parsed.map((row: CSVParserRow) => row.data)
|
||||||
|
|
||||||
await apiStorage.save(`${filename}.json`, items.toJSON())
|
await apiStorage.save(`${filename}.json`, items.toJSON())
|
||||||
}
|
}
|
410
scripts/commands/db/update.ts
Normal file
410
scripts/commands/db/update.ts
Normal file
|
@ -0,0 +1,410 @@
|
||||||
|
import { CSV, IssueLoader, Issue, IssueData } from '../../core'
|
||||||
|
import { createChannelId, createFeedId } from '../../utils'
|
||||||
|
import { Channel, Feed, BlocklistRecord } from '../../models'
|
||||||
|
import { Storage, Collection, Logger } from '@freearhey/core'
|
||||||
|
import { DATA_DIR } from '../../constants'
|
||||||
|
import { DataLoader } from '../../core/dataLoader'
|
||||||
|
import { DataLoaderData } from '../../types/dataLoader'
|
||||||
|
|
||||||
|
const processedIssues = new Collection()
|
||||||
|
const dataStorage = new Storage(DATA_DIR)
|
||||||
|
const logger = new Logger({ level: -999 })
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const issueLoader = new IssueLoader()
|
||||||
|
const dataLoader = new DataLoader({ storage: dataStorage })
|
||||||
|
|
||||||
|
logger.info('loading issues...')
|
||||||
|
const issues = await issueLoader.load()
|
||||||
|
|
||||||
|
logger.info('loading data...')
|
||||||
|
const data = await dataLoader.load()
|
||||||
|
|
||||||
|
logger.info('processing issues...')
|
||||||
|
await removeFeeds(issues, data)
|
||||||
|
await removeChannels(issues, data)
|
||||||
|
await editFeeds(issues, data)
|
||||||
|
await editChannels(issues, data)
|
||||||
|
await addFeeds(issues, data)
|
||||||
|
await addChannels(issues, data)
|
||||||
|
await blockChannels(issues, data)
|
||||||
|
await unblockChannels(issues, data)
|
||||||
|
|
||||||
|
logger.info('saving data...')
|
||||||
|
await save(data)
|
||||||
|
|
||||||
|
const output = processedIssues.map((issue: Issue) => `closes #${issue.number}`).join(', ')
|
||||||
|
process.stdout.write(`OUTPUT=${output}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
main()
|
||||||
|
|
||||||
|
async function save(data: DataLoaderData) {
|
||||||
|
const channels = data.channels
|
||||||
|
.sortBy((channel: Channel) => channel.id.toLowerCase())
|
||||||
|
.map((channel: Channel) => channel.data())
|
||||||
|
const channelsOutput = new CSV({ items: channels }).toString()
|
||||||
|
await dataStorage.save('channels.csv', channelsOutput)
|
||||||
|
|
||||||
|
const feeds = data.feeds
|
||||||
|
.sortBy((feed: Feed) => `${feed.getStreamId()}`.toLowerCase())
|
||||||
|
.map((feed: Feed) => feed.data())
|
||||||
|
const feedsOutput = new CSV({ items: feeds }).toString()
|
||||||
|
await dataStorage.save('feeds.csv', feedsOutput)
|
||||||
|
|
||||||
|
const blocklistRecords = data.blocklistRecords
|
||||||
|
.sortBy((blocklistRecord: BlocklistRecord) => blocklistRecord.channelId.toLowerCase())
|
||||||
|
.map((blocklistRecord: BlocklistRecord) => blocklistRecord.data())
|
||||||
|
const blocklistOutput = new CSV({ items: blocklistRecords }).toString()
|
||||||
|
await dataStorage.save('blocklist.csv', blocklistOutput)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function removeFeeds(issues: Collection, data: DataLoaderData) {
|
||||||
|
const requests = issues.filter(
|
||||||
|
issue => issue.labels.includes('feeds:remove') && issue.labels.includes('approved')
|
||||||
|
)
|
||||||
|
|
||||||
|
requests.forEach((issue: Issue) => {
|
||||||
|
const issueData: IssueData = issue.data
|
||||||
|
|
||||||
|
if (issueData.missing('channel_id') || issueData.missing('feed_id')) return
|
||||||
|
|
||||||
|
const found: Feed = data.feeds.first(
|
||||||
|
(feed: Feed) =>
|
||||||
|
feed.channelId === issueData.getString('channel_id') &&
|
||||||
|
feed.id === issueData.getString('feed_id')
|
||||||
|
)
|
||||||
|
if (!found) return
|
||||||
|
|
||||||
|
data.feeds.remove((feed: Feed) => feed.channelId === found.channelId && feed.id === found.id)
|
||||||
|
|
||||||
|
onFeedRemoval(found.channelId, found.id, data)
|
||||||
|
|
||||||
|
processedIssues.push(issue)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function editFeeds(issues: Collection, data: DataLoaderData) {
|
||||||
|
const requests = issues.filter(
|
||||||
|
issue => issue.labels.includes('feeds:edit') && issue.labels.includes('approved')
|
||||||
|
)
|
||||||
|
|
||||||
|
requests.forEach((issue: Issue) => {
|
||||||
|
const issueData: IssueData = issue.data
|
||||||
|
if (issueData.missing('channel_id') || issueData.missing('feed_id')) return
|
||||||
|
|
||||||
|
const found: Feed = data.feeds.first(
|
||||||
|
(feed: Feed) =>
|
||||||
|
feed.channelId === issueData.getString('channel_id') &&
|
||||||
|
feed.id === issueData.getString('feed_id')
|
||||||
|
)
|
||||||
|
if (!found) return
|
||||||
|
|
||||||
|
const channelId: string | undefined = found.channelId
|
||||||
|
let feedId: string | undefined = found.id
|
||||||
|
if (issueData.has('feed_name')) {
|
||||||
|
const name = issueData.getString('feed_name') || found.name
|
||||||
|
if (name) {
|
||||||
|
feedId = createFeedId(name)
|
||||||
|
if (feedId) onFeedIdChange(found.channelId, found.id, feedId, data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (issueData.has('is_main')) {
|
||||||
|
const isMain = issueData.getBoolean('is_main') || false
|
||||||
|
if (isMain) onFeedNewMain(channelId, feedId, data)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!feedId || !channelId) return
|
||||||
|
|
||||||
|
found.update(issueData).setId(feedId)
|
||||||
|
|
||||||
|
processedIssues.push(issue)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function addFeeds(issues: Collection, data: DataLoaderData) {
|
||||||
|
const requests = issues.filter(
|
||||||
|
issue => issue.labels.includes('feeds:add') && issue.labels.includes('approved')
|
||||||
|
)
|
||||||
|
|
||||||
|
requests.forEach((issue: Issue) => {
|
||||||
|
const issueData: IssueData = issue.data
|
||||||
|
|
||||||
|
if (
|
||||||
|
issueData.missing('channel_id') ||
|
||||||
|
issueData.missing('feed_name') ||
|
||||||
|
issueData.missing('is_main') ||
|
||||||
|
issueData.missing('broadcast_area') ||
|
||||||
|
issueData.missing('timezones') ||
|
||||||
|
issueData.missing('languages') ||
|
||||||
|
issueData.missing('video_format')
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
const channelId = issueData.getString('channel_id')
|
||||||
|
const feedName = issueData.getString('feed_name') || 'SD'
|
||||||
|
const feedId = createFeedId(feedName)
|
||||||
|
if (!channelId || !feedId) return
|
||||||
|
|
||||||
|
const found: Feed = data.feeds.first(
|
||||||
|
(feed: Feed) => feed.channelId === channelId && feed.id === feedId
|
||||||
|
)
|
||||||
|
if (found) return
|
||||||
|
|
||||||
|
const isMain = issueData.getBoolean('is_main') || false
|
||||||
|
if (isMain) onFeedNewMain(channelId, feedId, data)
|
||||||
|
|
||||||
|
const newFeed = new Feed({
|
||||||
|
channel: channelId,
|
||||||
|
id: feedId,
|
||||||
|
name: feedName,
|
||||||
|
is_main: issueData.getBoolean('is_main') || false,
|
||||||
|
broadcast_area: issueData.getArray('broadcast_area') || [],
|
||||||
|
timezones: issueData.getArray('timezones') || [],
|
||||||
|
languages: issueData.getArray('languages') || [],
|
||||||
|
video_format: issueData.getString('video_format')
|
||||||
|
})
|
||||||
|
|
||||||
|
data.feeds.add(newFeed)
|
||||||
|
|
||||||
|
processedIssues.push(issue)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function removeChannels(issues: Collection, data: DataLoaderData) {
|
||||||
|
const requests = issues.filter(
|
||||||
|
issue => issue.labels.includes('channels:remove') && issue.labels.includes('approved')
|
||||||
|
)
|
||||||
|
|
||||||
|
requests.forEach((issue: Issue) => {
|
||||||
|
const issueData: IssueData = issue.data
|
||||||
|
|
||||||
|
if (issueData.missing('channel_id')) return
|
||||||
|
|
||||||
|
const found = data.channels.first(
|
||||||
|
(channel: Channel) => channel.id === issueData.getString('channel_id')
|
||||||
|
)
|
||||||
|
if (!found) return
|
||||||
|
|
||||||
|
data.channels.remove((channel: Channel) => channel.id === found.id)
|
||||||
|
|
||||||
|
onChannelRemoval(found.id, data)
|
||||||
|
|
||||||
|
processedIssues.push(issue)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function editChannels(issues: Collection, data: DataLoaderData) {
|
||||||
|
const requests = issues.filter(
|
||||||
|
issue => issue.labels.includes('channels:edit') && issue.labels.includes('approved')
|
||||||
|
)
|
||||||
|
|
||||||
|
requests.forEach((issue: Issue) => {
|
||||||
|
const issueData: IssueData = issue.data
|
||||||
|
|
||||||
|
if (issueData.missing('channel_id')) return
|
||||||
|
|
||||||
|
const found: Channel = data.channels.first(
|
||||||
|
(channel: Channel) => channel.id === issueData.getString('channel_id')
|
||||||
|
)
|
||||||
|
if (!found) return
|
||||||
|
|
||||||
|
let channelId: string | undefined = found.id
|
||||||
|
if (issueData.has('channel_name') || issueData.has('country')) {
|
||||||
|
const name = issueData.getString('channel_name') || found.name
|
||||||
|
const country = issueData.getString('country') || found.countryCode
|
||||||
|
if (name && country) {
|
||||||
|
channelId = createChannelId(name, country)
|
||||||
|
if (channelId) onChannelIdChange(found.id, channelId, data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!channelId) return
|
||||||
|
|
||||||
|
found.update(issueData).setId(channelId)
|
||||||
|
|
||||||
|
processedIssues.push(issue)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function addChannels(issues: Collection, data: DataLoaderData) {
|
||||||
|
const requests = issues.filter(
|
||||||
|
issue => issue.labels.includes('channels:add') && issue.labels.includes('approved')
|
||||||
|
)
|
||||||
|
|
||||||
|
requests.forEach((issue: Issue) => {
|
||||||
|
const issueData: IssueData = issue.data
|
||||||
|
|
||||||
|
if (
|
||||||
|
issueData.missing('channel_name') ||
|
||||||
|
issueData.missing('country') ||
|
||||||
|
issueData.missing('is_nsfw') ||
|
||||||
|
issueData.missing('logo') ||
|
||||||
|
issueData.missing('feed_name') ||
|
||||||
|
issueData.missing('broadcast_area') ||
|
||||||
|
issueData.missing('timezones') ||
|
||||||
|
issueData.missing('languages') ||
|
||||||
|
issueData.missing('video_format')
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
const channelId = createChannelId(
|
||||||
|
issueData.getString('channel_name'),
|
||||||
|
issueData.getString('country')
|
||||||
|
)
|
||||||
|
if (!channelId) return
|
||||||
|
|
||||||
|
const found: Channel = data.channels.first((channel: Channel) => channel.id === channelId)
|
||||||
|
if (found) return
|
||||||
|
|
||||||
|
const newChannel = new Channel({
|
||||||
|
id: channelId,
|
||||||
|
name: issueData.getString('channel_name') || '',
|
||||||
|
alt_names: issueData.getArray('alt_names'),
|
||||||
|
network: issueData.getString('network'),
|
||||||
|
owners: issueData.getArray('owners'),
|
||||||
|
country: issueData.getString('country') || '',
|
||||||
|
subdivision: issueData.getString('subdivision'),
|
||||||
|
city: issueData.getString('city'),
|
||||||
|
categories: issueData.getArray('categories'),
|
||||||
|
is_nsfw: issueData.getBoolean('is_nsfw') || false,
|
||||||
|
launched: issueData.getString('launched'),
|
||||||
|
closed: issueData.getString('closed'),
|
||||||
|
replaced_by: issueData.getString('replaced_by'),
|
||||||
|
website: issueData.getString('website'),
|
||||||
|
logo: issueData.getString('logo') || ''
|
||||||
|
})
|
||||||
|
data.channels.add(newChannel)
|
||||||
|
|
||||||
|
const feedName = issueData.getString('feed_name') || 'SD'
|
||||||
|
const newFeed = new Feed({
|
||||||
|
channel: channelId,
|
||||||
|
id: createFeedId(feedName),
|
||||||
|
name: feedName,
|
||||||
|
is_main: true,
|
||||||
|
broadcast_area: issueData.getArray('broadcast_area') || [],
|
||||||
|
timezones: issueData.getArray('timezones') || [],
|
||||||
|
languages: issueData.getArray('languages') || [],
|
||||||
|
video_format: issueData.getString('video_format')
|
||||||
|
})
|
||||||
|
data.feeds.add(newFeed)
|
||||||
|
|
||||||
|
processedIssues.push(issue)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function unblockChannels(issues: Collection, data: DataLoaderData) {
|
||||||
|
const requests = issues.filter(
|
||||||
|
issue => issue.labels.includes('blocklist:remove') && issue.labels.includes('approved')
|
||||||
|
)
|
||||||
|
|
||||||
|
requests.forEach((issue: Issue) => {
|
||||||
|
const issueData: IssueData = issue.data
|
||||||
|
|
||||||
|
if (issueData.missing('channel_id')) return
|
||||||
|
|
||||||
|
const found: BlocklistRecord = data.blocklistRecords.first(
|
||||||
|
(blocklistRecord: BlocklistRecord) =>
|
||||||
|
blocklistRecord.channelId === issueData.getString('channel_id')
|
||||||
|
)
|
||||||
|
if (!found) return
|
||||||
|
|
||||||
|
data.blocklistRecords.remove(
|
||||||
|
(blocklistRecord: BlocklistRecord) => blocklistRecord.channelId === found.channelId
|
||||||
|
)
|
||||||
|
|
||||||
|
processedIssues.push(issue)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function blockChannels(issues: Collection, data: DataLoaderData) {
|
||||||
|
const requests = issues.filter(
|
||||||
|
issue => issue.labels.includes('blocklist:add') && issue.labels.includes('approved')
|
||||||
|
)
|
||||||
|
|
||||||
|
requests.forEach((issue: Issue) => {
|
||||||
|
const issueData: IssueData = issue.data
|
||||||
|
|
||||||
|
if (issueData.missing('channel_id')) return
|
||||||
|
|
||||||
|
const found: BlocklistRecord = data.blocklistRecords.first(
|
||||||
|
(blocklistRecord: BlocklistRecord) =>
|
||||||
|
blocklistRecord.channelId === issueData.getString('channel_id')
|
||||||
|
)
|
||||||
|
if (found) return
|
||||||
|
|
||||||
|
const channel = issueData.getString('channel_id')
|
||||||
|
const reason = issueData.getString('reason')?.toLowerCase()
|
||||||
|
const ref = issueData.getString('ref')
|
||||||
|
if (!channel || !reason || !ref) return
|
||||||
|
|
||||||
|
const newBlocklistRecord = new BlocklistRecord({
|
||||||
|
channel,
|
||||||
|
reason,
|
||||||
|
ref
|
||||||
|
})
|
||||||
|
data.blocklistRecords.add(newBlocklistRecord)
|
||||||
|
|
||||||
|
processedIssues.push(issue)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function onFeedIdChange(
|
||||||
|
channelId: string,
|
||||||
|
feedId: string,
|
||||||
|
newFeedId: string,
|
||||||
|
data: DataLoaderData
|
||||||
|
) {
|
||||||
|
data.channels.forEach((channel: Channel) => {
|
||||||
|
if (channel.replacedBy && channel.replacedBy === `${channelId}@${feedId}`) {
|
||||||
|
channel.replacedBy = `${channelId}@${newFeedId}`
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function onFeedNewMain(channelId: string, feedId: string, data: DataLoaderData) {
|
||||||
|
data.feeds.forEach((feed: Feed) => {
|
||||||
|
if (feed.channelId === channelId && feed.id !== feedId && feed.isMain === true) {
|
||||||
|
feed.isMain = false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function onFeedRemoval(channelId: string, feedId: string, data: DataLoaderData) {
|
||||||
|
data.channels.forEach((channel: Channel) => {
|
||||||
|
if (channel.replacedBy && channel.replacedBy === `${channelId}@${feedId}`) {
|
||||||
|
channel.replacedBy = ''
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function onChannelIdChange(channelId: string, newChannelId: string, data: DataLoaderData) {
|
||||||
|
data.channels.forEach((channel: Channel) => {
|
||||||
|
if (channel.replacedBy && channel.replacedBy.includes(channelId)) {
|
||||||
|
channel.replacedBy = channel.replacedBy.replace(channelId, newChannelId)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
data.feeds.forEach((feed: Feed) => {
|
||||||
|
if (feed.channelId === channelId) {
|
||||||
|
feed.channelId = newChannelId
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
data.blocklistRecords.forEach((blocklistRecord: BlocklistRecord) => {
|
||||||
|
if (blocklistRecord.channelId === channelId) {
|
||||||
|
blocklistRecord.channelId = newChannelId
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function onChannelRemoval(channelId: string, data: DataLoaderData) {
|
||||||
|
data.channels.forEach((channel: Channel) => {
|
||||||
|
if (channel.replacedBy && channel.replacedBy.includes(channelId)) {
|
||||||
|
channel.replacedBy = ''
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
data.feeds.remove((feed: Feed) => feed.channelId === channelId)
|
||||||
|
}
|
260
scripts/commands/db/validate.ts
Normal file
260
scripts/commands/db/validate.ts
Normal file
|
@ -0,0 +1,260 @@
|
||||||
|
import { Collection, Storage, Dictionary } from '@freearhey/core'
|
||||||
|
import { DataLoaderData } from '../../types/dataLoader'
|
||||||
|
import { ValidatorError } from '../../types/validator'
|
||||||
|
import { DataLoader } from '../../core/dataLoader'
|
||||||
|
import { DATA_DIR } from '../../constants'
|
||||||
|
import chalk from 'chalk'
|
||||||
|
import {
|
||||||
|
BlocklistRecord,
|
||||||
|
Subdivision,
|
||||||
|
Category,
|
||||||
|
Language,
|
||||||
|
Timezone,
|
||||||
|
Channel,
|
||||||
|
Country,
|
||||||
|
Region,
|
||||||
|
Feed
|
||||||
|
} from '../../models'
|
||||||
|
import {
|
||||||
|
BlocklistRecordValidator,
|
||||||
|
SubdivisionValidator,
|
||||||
|
CategoryValidator,
|
||||||
|
LanguageValidator,
|
||||||
|
TimezoneValidator,
|
||||||
|
ChannelValidator,
|
||||||
|
CountryValidator,
|
||||||
|
RegionValidator,
|
||||||
|
FeedValidator
|
||||||
|
} from '../../validators'
|
||||||
|
|
||||||
|
let totalErrors = 0
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const dataStorage = new Storage(DATA_DIR)
|
||||||
|
const dataLoader = new DataLoader({ storage: dataStorage })
|
||||||
|
const data = await dataLoader.load()
|
||||||
|
|
||||||
|
validateChannels(data)
|
||||||
|
validateFeeds(data)
|
||||||
|
validateRegions(data)
|
||||||
|
validateBlocklist(data)
|
||||||
|
validateCategories(data)
|
||||||
|
validateCountries(data)
|
||||||
|
validateSubdivisions(data)
|
||||||
|
validateLanguages(data)
|
||||||
|
validateTimezones(data)
|
||||||
|
|
||||||
|
if (totalErrors > 0) {
|
||||||
|
console.log(chalk.red(`\r\n${totalErrors} error(s)`))
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main()
|
||||||
|
|
||||||
|
function validateChannels(data: DataLoaderData) {
|
||||||
|
let errors = new Collection()
|
||||||
|
|
||||||
|
findDuplicatesBy(data.channels, ['id']).forEach((channel: Channel) => {
|
||||||
|
errors.add({
|
||||||
|
line: channel.getLine(),
|
||||||
|
message: `channel with id "${channel.id}" already exists`
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const validator = new ChannelValidator({ data })
|
||||||
|
data.channels.forEach((channel: Channel) => {
|
||||||
|
errors = errors.concat(validator.validate(channel))
|
||||||
|
})
|
||||||
|
|
||||||
|
if (errors.count()) displayErrors('channels.csv', errors)
|
||||||
|
|
||||||
|
totalErrors += errors.count()
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateFeeds(data: DataLoaderData) {
|
||||||
|
let errors = new Collection()
|
||||||
|
|
||||||
|
findDuplicatesBy(data.feeds, ['channelId', 'id']).forEach((feed: Feed) => {
|
||||||
|
errors.add({
|
||||||
|
line: feed.getLine(),
|
||||||
|
message: `feed with channel "${feed.channelId}" and id "${feed.id}" already exists`
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const validator = new FeedValidator({ data })
|
||||||
|
data.feeds.forEach((feed: Feed) => {
|
||||||
|
errors = errors.concat(validator.validate(feed))
|
||||||
|
})
|
||||||
|
|
||||||
|
if (errors.count()) displayErrors('feeds.csv', errors)
|
||||||
|
|
||||||
|
totalErrors += errors.count()
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateRegions(data: DataLoaderData) {
|
||||||
|
let errors = new Collection()
|
||||||
|
|
||||||
|
findDuplicatesBy(data.regions, ['code']).forEach((region: Region) => {
|
||||||
|
errors.add({
|
||||||
|
line: region.getLine(),
|
||||||
|
message: `region with code "${region.code}" already exists`
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const validator = new RegionValidator({ data })
|
||||||
|
data.regions.forEach((region: Region) => {
|
||||||
|
errors = errors.concat(validator.validate(region))
|
||||||
|
})
|
||||||
|
|
||||||
|
if (errors.count()) displayErrors('regions.csv', errors)
|
||||||
|
|
||||||
|
totalErrors += errors.count()
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateBlocklist(data: DataLoaderData) {
|
||||||
|
let errors = new Collection()
|
||||||
|
|
||||||
|
findDuplicatesBy(data.blocklistRecords, ['channelId', 'ref']).forEach(
|
||||||
|
(blocklistRecord: BlocklistRecord) => {
|
||||||
|
errors.add({
|
||||||
|
line: blocklistRecord.getLine(),
|
||||||
|
message: `blocklist record with channel "${blocklistRecord.channelId}" and ref "${blocklistRecord.ref}" already exists`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const validator = new BlocklistRecordValidator({ data })
|
||||||
|
data.blocklistRecords.forEach((blocklistRecord: BlocklistRecord) => {
|
||||||
|
errors = errors.concat(validator.validate(blocklistRecord))
|
||||||
|
})
|
||||||
|
|
||||||
|
if (errors.count()) displayErrors('blocklist.csv', errors)
|
||||||
|
|
||||||
|
totalErrors += errors.count()
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateCategories(data: DataLoaderData) {
|
||||||
|
let errors = new Collection()
|
||||||
|
|
||||||
|
findDuplicatesBy(data.categories, ['id']).forEach((category: Category) => {
|
||||||
|
errors.add({
|
||||||
|
line: category.getLine(),
|
||||||
|
message: `category with id "${category.id}" already exists`
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const validator = new CategoryValidator({ data })
|
||||||
|
data.categories.forEach((category: Category) => {
|
||||||
|
errors = errors.concat(validator.validate(category))
|
||||||
|
})
|
||||||
|
|
||||||
|
if (errors.count()) displayErrors('categories.csv', errors)
|
||||||
|
|
||||||
|
totalErrors += errors.count()
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateCountries(data: DataLoaderData) {
|
||||||
|
let errors = new Collection()
|
||||||
|
|
||||||
|
findDuplicatesBy(data.countries, ['code']).forEach((country: Country) => {
|
||||||
|
errors.add({
|
||||||
|
line: country.getLine(),
|
||||||
|
message: `country with code "${country.code}" already exists`
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const validator = new CountryValidator({ data })
|
||||||
|
data.countries.forEach((country: Country) => {
|
||||||
|
errors = errors.concat(validator.validate(country))
|
||||||
|
})
|
||||||
|
|
||||||
|
if (errors.count()) displayErrors('countries.csv', errors)
|
||||||
|
|
||||||
|
totalErrors += errors.count()
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateSubdivisions(data: DataLoaderData) {
|
||||||
|
let errors = new Collection()
|
||||||
|
|
||||||
|
findDuplicatesBy(data.subdivisions, ['code']).forEach((subdivision: Subdivision) => {
|
||||||
|
errors.add({
|
||||||
|
line: subdivision.getLine(),
|
||||||
|
message: `subdivision with code "${subdivision.code}" already exists`
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const validator = new SubdivisionValidator({ data })
|
||||||
|
data.subdivisions.forEach((subdivision: Subdivision) => {
|
||||||
|
errors = errors.concat(validator.validate(subdivision))
|
||||||
|
})
|
||||||
|
|
||||||
|
if (errors.count()) displayErrors('subdivisions.csv', errors)
|
||||||
|
|
||||||
|
totalErrors += errors.count()
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateLanguages(data: DataLoaderData) {
|
||||||
|
let errors = new Collection()
|
||||||
|
|
||||||
|
findDuplicatesBy(data.languages, ['code']).forEach((language: Language) => {
|
||||||
|
errors.add({
|
||||||
|
line: language.getLine(),
|
||||||
|
message: `language with code "${language.code}" already exists`
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const validator = new LanguageValidator({ data })
|
||||||
|
data.languages.forEach((language: Language) => {
|
||||||
|
errors = errors.concat(validator.validate(language))
|
||||||
|
})
|
||||||
|
|
||||||
|
if (errors.count()) displayErrors('languages.csv', errors)
|
||||||
|
|
||||||
|
totalErrors += errors.count()
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateTimezones(data: DataLoaderData) {
|
||||||
|
let errors = new Collection()
|
||||||
|
|
||||||
|
findDuplicatesBy(data.timezones, ['id']).forEach((timezone: Timezone) => {
|
||||||
|
errors.add({
|
||||||
|
line: timezone.getLine(),
|
||||||
|
message: `timezone with id "${timezone.id}" already exists`
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const validator = new TimezoneValidator({ data })
|
||||||
|
data.timezones.forEach((timezone: Timezone) => {
|
||||||
|
errors = errors.concat(validator.validate(timezone))
|
||||||
|
})
|
||||||
|
|
||||||
|
if (errors.count()) displayErrors('timezones.csv', errors)
|
||||||
|
|
||||||
|
totalErrors += errors.count()
|
||||||
|
}
|
||||||
|
|
||||||
|
function findDuplicatesBy(items: Collection, keys: string[]) {
|
||||||
|
const duplicates = new Collection()
|
||||||
|
const buffer = new Dictionary()
|
||||||
|
|
||||||
|
items.forEach((item: { [key: string]: string | number }) => {
|
||||||
|
const normId = keys.map(key => item[key].toString().toLowerCase()).join()
|
||||||
|
if (buffer.has(normId)) {
|
||||||
|
duplicates.add(item)
|
||||||
|
}
|
||||||
|
|
||||||
|
buffer.set(normId, true)
|
||||||
|
})
|
||||||
|
|
||||||
|
return duplicates
|
||||||
|
}
|
||||||
|
|
||||||
|
function displayErrors(filepath: string, errors: Collection) {
|
||||||
|
console.log(`\r\n${chalk.underline(filepath)}`)
|
||||||
|
|
||||||
|
errors.forEach((error: ValidatorError) => {
|
||||||
|
const position = error.line.toString().padEnd(6, ' ')
|
||||||
|
console.log(` ${chalk.gray(position) + error.message}`)
|
||||||
|
})
|
||||||
|
}
|
|
@ -30,9 +30,12 @@ const opts = {
|
||||||
|
|
||||||
export class CSVParser {
|
export class CSVParser {
|
||||||
async parse(data: string): Promise<Collection> {
|
async parse(data: string): Promise<Collection> {
|
||||||
const items = await csv2json(opts).fromString(data)
|
const parsed = await csv2json(opts).fromString(data)
|
||||||
|
const rows = parsed.map((data, i) => {
|
||||||
|
return { line: i + 2, data }
|
||||||
|
})
|
||||||
|
|
||||||
return new Collection(items)
|
return new Collection(rows)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
173
scripts/core/dataLoader.ts
Normal file
173
scripts/core/dataLoader.ts
Normal file
|
@ -0,0 +1,173 @@
|
||||||
|
import { Storage, File, Dictionary, Collection } from '@freearhey/core'
|
||||||
|
import { DataLoaderData, DataLoaderProps } from '../types/dataLoader'
|
||||||
|
import { CSVParserRow } from '../types/csvParser'
|
||||||
|
import { CSVParser } from './'
|
||||||
|
import chalk from 'chalk'
|
||||||
|
import {
|
||||||
|
Feed,
|
||||||
|
Channel,
|
||||||
|
BlocklistRecord,
|
||||||
|
Language,
|
||||||
|
Country,
|
||||||
|
Subdivision,
|
||||||
|
Region,
|
||||||
|
Timezone,
|
||||||
|
Category
|
||||||
|
} from '../models'
|
||||||
|
|
||||||
|
export class DataLoader {
|
||||||
|
storage: Storage
|
||||||
|
parser: CSVParser
|
||||||
|
|
||||||
|
constructor({ storage }: DataLoaderProps) {
|
||||||
|
this.storage = storage
|
||||||
|
this.parser = new CSVParser()
|
||||||
|
}
|
||||||
|
|
||||||
|
async load(): Promise<DataLoaderData> {
|
||||||
|
const files = await this.storage.list('*.csv')
|
||||||
|
|
||||||
|
const data: DataLoaderData = {
|
||||||
|
channels: new Collection(),
|
||||||
|
feeds: new Collection(),
|
||||||
|
categories: new Collection(),
|
||||||
|
languages: new Collection(),
|
||||||
|
blocklistRecords: new Collection(),
|
||||||
|
timezones: new Collection(),
|
||||||
|
regions: new Collection(),
|
||||||
|
subdivisions: new Collection(),
|
||||||
|
countries: new Collection(),
|
||||||
|
feedsGroupedByChannelId: new Dictionary(),
|
||||||
|
feedsKeyByStreamId: new Dictionary(),
|
||||||
|
channelsKeyById: new Dictionary(),
|
||||||
|
countriesKeyByCode: new Dictionary(),
|
||||||
|
subdivisionsKeyByCode: new Dictionary(),
|
||||||
|
categoriesKeyById: new Dictionary(),
|
||||||
|
regionsKeyByCode: new Dictionary(),
|
||||||
|
timezonesKeyById: new Dictionary(),
|
||||||
|
languagesKeyByCode: new Dictionary()
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const filepath of files) {
|
||||||
|
const file = new File(filepath)
|
||||||
|
if (file.extension() !== 'csv') continue
|
||||||
|
|
||||||
|
const csv = await this.storage.load(file.basename())
|
||||||
|
const rows = csv.split(/\r\n/)
|
||||||
|
const headers = rows[0].split(',')
|
||||||
|
const errors = new Collection()
|
||||||
|
for (const [i, line] of rows.entries()) {
|
||||||
|
if (!line.trim()) continue
|
||||||
|
if (line.indexOf('\n') > -1) {
|
||||||
|
errors.add({
|
||||||
|
line: i + 1,
|
||||||
|
message: 'row has the wrong line ending character, should be CRLF'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (line.split(/,(?=(?:(?:[^"]*"){2})*[^"]*$)/).length !== headers.length) {
|
||||||
|
errors.add({
|
||||||
|
line: i + 1,
|
||||||
|
message: 'row has the wrong number of columns'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errors.notEmpty()) {
|
||||||
|
displayErrors(filepath, errors)
|
||||||
|
console.log(chalk.red(`\r\n${errors.count()} error(s)`))
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = await this.parser.parse(csv)
|
||||||
|
const filename = file.name()
|
||||||
|
|
||||||
|
switch (filename) {
|
||||||
|
case 'channels': {
|
||||||
|
const channels = parsed.map((row: CSVParserRow) =>
|
||||||
|
new Channel(row.data).setLine(row.line)
|
||||||
|
)
|
||||||
|
data.channels = channels
|
||||||
|
data.channelsKeyById = channels.keyBy((channel: Channel) => channel.id)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'feeds': {
|
||||||
|
const feeds = parsed.map((row: CSVParserRow) => new Feed(row.data).setLine(row.line))
|
||||||
|
data.feeds = feeds
|
||||||
|
data.feedsGroupedByChannelId = feeds.groupBy((feed: Feed) => feed.channelId)
|
||||||
|
data.feedsKeyByStreamId = feeds.keyBy((feed: Feed) => feed.getStreamId())
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'blocklist': {
|
||||||
|
const blocklistRecords = parsed.map((row: CSVParserRow) =>
|
||||||
|
new BlocklistRecord(row.data).setLine(row.line)
|
||||||
|
)
|
||||||
|
data.blocklistRecords = blocklistRecords
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'categories': {
|
||||||
|
const categories = parsed.map((row: CSVParserRow) =>
|
||||||
|
new Category(row.data).setLine(row.line)
|
||||||
|
)
|
||||||
|
data.categories = categories
|
||||||
|
data.categoriesKeyById = categories.keyBy((category: Category) => category.id)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'timezones': {
|
||||||
|
const timezones = parsed.map((row: CSVParserRow) =>
|
||||||
|
new Timezone(row.data).setLine(row.line)
|
||||||
|
)
|
||||||
|
data.timezones = timezones
|
||||||
|
data.timezonesKeyById = timezones.keyBy((timezone: Timezone) => timezone.id)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'regions': {
|
||||||
|
const regions = parsed.map((row: CSVParserRow) => new Region(row.data).setLine(row.line))
|
||||||
|
data.regions = regions
|
||||||
|
data.regionsKeyByCode = regions.keyBy((region: Region) => region.code)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'languages': {
|
||||||
|
const languages = parsed.map((row: CSVParserRow) =>
|
||||||
|
new Language(row.data).setLine(row.line)
|
||||||
|
)
|
||||||
|
data.languages = languages
|
||||||
|
data.languagesKeyByCode = languages.keyBy((language: Language) => language.code)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'countries': {
|
||||||
|
const countries = parsed.map((row: CSVParserRow) =>
|
||||||
|
new Country(row.data).setLine(row.line)
|
||||||
|
)
|
||||||
|
data.countries = countries
|
||||||
|
data.countriesKeyByCode = countries.keyBy((country: Country) => country.code)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 'subdivisions': {
|
||||||
|
const subdivisions = parsed.map((row: CSVParserRow) =>
|
||||||
|
new Subdivision(row.data).setLine(row.line)
|
||||||
|
)
|
||||||
|
data.subdivisions = subdivisions
|
||||||
|
data.subdivisionsKeyByCode = subdivisions.keyBy(
|
||||||
|
(subdivision: Subdivision) => subdivision.code
|
||||||
|
)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data.channels = data.channels.map((channel: Channel) =>
|
||||||
|
channel.withFeeds(data.feedsGroupedByChannelId)
|
||||||
|
)
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function displayErrors(filepath: string, errors: Collection) {
|
||||||
|
console.log(`\r\n${chalk.underline(filepath)}`)
|
||||||
|
|
||||||
|
errors.forEach((error: ValidatorError) => {
|
||||||
|
const position = error.line.toString().padEnd(6, ' ')
|
||||||
|
console.log(` ${chalk.gray(position) + error.message}`)
|
||||||
|
})
|
||||||
|
}
|
|
@ -17,7 +17,7 @@ export class IssueLoader {
|
||||||
|
|
||||||
let issues: object[] = []
|
let issues: object[] = []
|
||||||
if (TESTING) {
|
if (TESTING) {
|
||||||
issues = (await import('../../tests/__data__/input/update/issues.js')).default
|
issues = (await import('../../tests/__data__/input/db/update/issues.js')).default
|
||||||
} else {
|
} else {
|
||||||
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
||||||
owner: OWNER,
|
owner: OWNER,
|
||||||
|
|
|
@ -1,424 +0,0 @@
|
||||||
import { CSV, IssueLoader, CSVParser, Issue, IssueData } from '../core'
|
|
||||||
import { Channel, Blocked, Feed } from '../models'
|
|
||||||
import { DATA_DIR } from '../constants'
|
|
||||||
import { Storage, Collection } from '@freearhey/core'
|
|
||||||
import { createChannelId, createFeedId } from '../utils'
|
|
||||||
|
|
||||||
let blocklist = new Collection()
|
|
||||||
let channels = new Collection()
|
|
||||||
let feeds = new Collection()
|
|
||||||
let issues = new Collection()
|
|
||||||
const processedIssues = new Collection()
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
const dataStorage = new Storage(DATA_DIR)
|
|
||||||
const parser = new CSVParser()
|
|
||||||
const loader = new IssueLoader()
|
|
||||||
|
|
||||||
issues = await loader.load()
|
|
||||||
|
|
||||||
const channelsCSV = await dataStorage.load('channels.csv')
|
|
||||||
channels = (await parser.parse(channelsCSV)).map(data => new Channel(data))
|
|
||||||
|
|
||||||
const feedsCSV = await dataStorage.load('feeds.csv')
|
|
||||||
feeds = (await parser.parse(feedsCSV)).map(data => new Feed(data))
|
|
||||||
|
|
||||||
const blocklistCSV = await dataStorage.load('blocklist.csv')
|
|
||||||
blocklist = (await parser.parse(blocklistCSV)).map(data => new Blocked(data))
|
|
||||||
|
|
||||||
await removeFeeds()
|
|
||||||
await removeChannels()
|
|
||||||
await editFeeds()
|
|
||||||
await editChannels()
|
|
||||||
await addFeeds()
|
|
||||||
await addChannels()
|
|
||||||
await blockChannels()
|
|
||||||
await unblockChannels()
|
|
||||||
|
|
||||||
channels = channels.sortBy(channel => channel.id.toLowerCase())
|
|
||||||
const channelsOutput = new CSV({ items: channels }).toString()
|
|
||||||
await dataStorage.save('channels.csv', channelsOutput)
|
|
||||||
|
|
||||||
feeds = feeds.sortBy(feed => `${feed.channel}@${feed.id}`.toLowerCase())
|
|
||||||
const feedsOutput = new CSV({ items: feeds }).toString()
|
|
||||||
await dataStorage.save('feeds.csv', feedsOutput)
|
|
||||||
|
|
||||||
blocklist = blocklist.sortBy(blocked => blocked.channel.toLowerCase())
|
|
||||||
const blocklistOutput = new CSV({ items: blocklist }).toString()
|
|
||||||
await dataStorage.save('blocklist.csv', blocklistOutput)
|
|
||||||
|
|
||||||
const output = processedIssues.map((issue: Issue) => `closes #${issue.number}`).join(', ')
|
|
||||||
process.stdout.write(`OUTPUT=${output}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
main()
|
|
||||||
|
|
||||||
async function removeFeeds() {
|
|
||||||
const requests = issues.filter(
|
|
||||||
issue => issue.labels.includes('feeds:remove') && issue.labels.includes('approved')
|
|
||||||
)
|
|
||||||
|
|
||||||
requests.forEach((issue: Issue) => {
|
|
||||||
if (issue.data.missing('channel_id') || issue.data.missing('feed_id')) return
|
|
||||||
|
|
||||||
const found = feeds.first(
|
|
||||||
(feed: Feed) =>
|
|
||||||
feed.channel === issue.data.getString('channel_id') &&
|
|
||||||
feed.id === issue.data.getString('feed_id')
|
|
||||||
)
|
|
||||||
if (!found) return
|
|
||||||
|
|
||||||
feeds.remove((feed: Feed) => feed.channel === found.channel && feed.id === found.id)
|
|
||||||
|
|
||||||
onFeedRemoval(found.channel, found.id)
|
|
||||||
|
|
||||||
processedIssues.push(issue)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async function editFeeds() {
|
|
||||||
const requests = issues.filter(
|
|
||||||
issue => issue.labels.includes('feeds:edit') && issue.labels.includes('approved')
|
|
||||||
)
|
|
||||||
|
|
||||||
requests.forEach((issue: Issue) => {
|
|
||||||
const data: IssueData = issue.data
|
|
||||||
if (data.missing('channel_id') || data.missing('feed_id')) return
|
|
||||||
|
|
||||||
const found: Feed = feeds.first(
|
|
||||||
(feed: Feed) =>
|
|
||||||
feed.channel === data.getString('channel_id') && feed.id === data.getString('feed_id')
|
|
||||||
)
|
|
||||||
if (!found) return
|
|
||||||
|
|
||||||
let channelId: string | undefined = found.channel
|
|
||||||
let feedId: string | undefined = found.id
|
|
||||||
if (data.has('feed_name')) {
|
|
||||||
const name = data.getString('feed_name') || found.name
|
|
||||||
if (name) {
|
|
||||||
feedId = createFeedId(name)
|
|
||||||
if (feedId) onFeedIdChange(found.channel, found.id, feedId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data.has('is_main')) {
|
|
||||||
const isMain = data.getBoolean('is_main') || false
|
|
||||||
if (isMain) onFeedNewMain(channelId, feedId)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!feedId || !channelId) return
|
|
||||||
|
|
||||||
const updated = new Feed({
|
|
||||||
channel: channelId,
|
|
||||||
id: feedId,
|
|
||||||
name: data.getString('feed_name'),
|
|
||||||
is_main: data.getBoolean('is_main'),
|
|
||||||
broadcast_area: data.getArray('broadcast_area'),
|
|
||||||
timezones: data.getArray('timezones'),
|
|
||||||
languages: data.getArray('languages'),
|
|
||||||
video_format: data.getString('video_format')
|
|
||||||
})
|
|
||||||
|
|
||||||
found.merge(updated)
|
|
||||||
|
|
||||||
processedIssues.push(issue)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async function addFeeds() {
|
|
||||||
const requests = issues.filter(
|
|
||||||
issue => issue.labels.includes('feeds:add') && issue.labels.includes('approved')
|
|
||||||
)
|
|
||||||
|
|
||||||
requests.forEach((issue: Issue) => {
|
|
||||||
const data: IssueData = issue.data
|
|
||||||
|
|
||||||
if (
|
|
||||||
data.missing('channel_id') ||
|
|
||||||
data.missing('feed_name') ||
|
|
||||||
data.missing('is_main') ||
|
|
||||||
data.missing('broadcast_area') ||
|
|
||||||
data.missing('timezones') ||
|
|
||||||
data.missing('languages') ||
|
|
||||||
data.missing('video_format')
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
const channelId = data.getString('channel_id')
|
|
||||||
const feedName = data.getString('feed_name') || 'SD'
|
|
||||||
const feedId = createFeedId(feedName)
|
|
||||||
if (!channelId || !feedId) return
|
|
||||||
|
|
||||||
const found: Feed = feeds.first(
|
|
||||||
(feed: Feed) => feed.channel === channelId && feed.id === feedId
|
|
||||||
)
|
|
||||||
if (found) return
|
|
||||||
|
|
||||||
const isMain = data.getBoolean('is_main') || false
|
|
||||||
if (isMain) onFeedNewMain(channelId, feedId)
|
|
||||||
|
|
||||||
feeds.push(
|
|
||||||
new Feed({
|
|
||||||
channel: channelId,
|
|
||||||
id: feedId,
|
|
||||||
name: feedName,
|
|
||||||
is_main: data.getBoolean('is_main'),
|
|
||||||
broadcast_area: data.getArray('broadcast_area'),
|
|
||||||
timezones: data.getArray('timezones'),
|
|
||||||
languages: data.getArray('languages'),
|
|
||||||
video_format: data.getString('video_format')
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
processedIssues.push(issue)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async function removeChannels() {
|
|
||||||
const requests = issues.filter(
|
|
||||||
issue => issue.labels.includes('channels:remove') && issue.labels.includes('approved')
|
|
||||||
)
|
|
||||||
|
|
||||||
requests.forEach((issue: Issue) => {
|
|
||||||
if (issue.data.missing('channel_id')) return
|
|
||||||
|
|
||||||
const found = channels.first(
|
|
||||||
(channel: Channel) => channel.id === issue.data.getString('channel_id')
|
|
||||||
)
|
|
||||||
if (!found) return
|
|
||||||
|
|
||||||
channels.remove((channel: Channel) => channel.id === found.id)
|
|
||||||
|
|
||||||
onChannelRemoval(found.id)
|
|
||||||
|
|
||||||
processedIssues.push(issue)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async function editChannels() {
|
|
||||||
const requests = issues.filter(
|
|
||||||
issue => issue.labels.includes('channels:edit') && issue.labels.includes('approved')
|
|
||||||
)
|
|
||||||
|
|
||||||
requests.forEach((issue: Issue) => {
|
|
||||||
const data: IssueData = issue.data
|
|
||||||
if (data.missing('channel_id')) return
|
|
||||||
|
|
||||||
const found: Channel = channels.first(
|
|
||||||
(channel: Channel) => channel.id === data.getString('channel_id')
|
|
||||||
)
|
|
||||||
if (!found) return
|
|
||||||
|
|
||||||
let channelId: string | undefined = found.id
|
|
||||||
if (data.has('channel_name') || data.has('country')) {
|
|
||||||
const name = data.getString('channel_name') || found.name
|
|
||||||
const country = data.getString('country') || found.country
|
|
||||||
if (name && country) {
|
|
||||||
channelId = createChannelId(name, country)
|
|
||||||
if (channelId) onChannelIdChange(found.id, channelId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!channelId) return
|
|
||||||
|
|
||||||
const updated = new Channel({
|
|
||||||
id: channelId,
|
|
||||||
name: data.getString('channel_name'),
|
|
||||||
alt_names: data.getArray('alt_names'),
|
|
||||||
network: data.getString('network'),
|
|
||||||
owners: data.getArray('owners'),
|
|
||||||
country: data.getString('country'),
|
|
||||||
subdivision: data.getString('subdivision'),
|
|
||||||
city: data.getString('city'),
|
|
||||||
broadcast_area: data.getArray('broadcast_area'),
|
|
||||||
languages: data.getArray('languages'),
|
|
||||||
categories: data.getArray('categories'),
|
|
||||||
is_nsfw: data.getBoolean('is_nsfw'),
|
|
||||||
launched: data.getString('launched'),
|
|
||||||
closed: data.getString('closed'),
|
|
||||||
replaced_by: data.getString('replaced_by'),
|
|
||||||
website: data.getString('website'),
|
|
||||||
logo: data.getString('logo')
|
|
||||||
})
|
|
||||||
|
|
||||||
found.merge(updated)
|
|
||||||
|
|
||||||
processedIssues.push(issue)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async function addChannels() {
|
|
||||||
const requests = issues.filter(
|
|
||||||
issue => issue.labels.includes('channels:add') && issue.labels.includes('approved')
|
|
||||||
)
|
|
||||||
|
|
||||||
requests.forEach((issue: Issue) => {
|
|
||||||
const data: IssueData = issue.data
|
|
||||||
|
|
||||||
if (
|
|
||||||
data.missing('channel_name') ||
|
|
||||||
data.missing('country') ||
|
|
||||||
data.missing('is_nsfw') ||
|
|
||||||
data.missing('logo') ||
|
|
||||||
data.missing('feed_name') ||
|
|
||||||
data.missing('broadcast_area') ||
|
|
||||||
data.missing('timezones') ||
|
|
||||||
data.missing('languages') ||
|
|
||||||
data.missing('video_format')
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
const channelId = createChannelId(data.getString('channel_name'), data.getString('country'))
|
|
||||||
if (!channelId) return
|
|
||||||
|
|
||||||
const found: Channel = channels.first((channel: Channel) => channel.id === channelId)
|
|
||||||
if (found) return
|
|
||||||
|
|
||||||
channels.push(
|
|
||||||
new Channel({
|
|
||||||
id: channelId,
|
|
||||||
name: data.getString('channel_name'),
|
|
||||||
alt_names: data.getArray('alt_names'),
|
|
||||||
network: data.getString('network'),
|
|
||||||
owners: data.getArray('owners'),
|
|
||||||
country: data.getString('country'),
|
|
||||||
subdivision: data.getString('subdivision'),
|
|
||||||
city: data.getString('city'),
|
|
||||||
broadcast_area: data.getArray('broadcast_area'),
|
|
||||||
languages: data.getArray('languages'),
|
|
||||||
categories: data.getArray('categories'),
|
|
||||||
is_nsfw: data.getBoolean('is_nsfw'),
|
|
||||||
launched: data.getString('launched'),
|
|
||||||
closed: data.getString('closed'),
|
|
||||||
replaced_by: data.getString('replaced_by'),
|
|
||||||
website: data.getString('website'),
|
|
||||||
logo: data.getString('logo')
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
const feedName = data.getString('feed_name') || 'SD'
|
|
||||||
|
|
||||||
feeds.push(
|
|
||||||
new Feed({
|
|
||||||
channel: channelId,
|
|
||||||
id: createFeedId(feedName),
|
|
||||||
name: feedName,
|
|
||||||
is_main: true,
|
|
||||||
broadcast_area: data.getArray('broadcast_area'),
|
|
||||||
timezones: data.getArray('timezones'),
|
|
||||||
languages: data.getArray('languages'),
|
|
||||||
video_format: data.getString('video_format'),
|
|
||||||
launched: data.getString('launched'),
|
|
||||||
closed: data.getString('closed'),
|
|
||||||
replaced_by: data.getString('replaced_by')
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
processedIssues.push(issue)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async function unblockChannels() {
|
|
||||||
const requests = issues.filter(
|
|
||||||
issue => issue.labels.includes('blocklist:remove') && issue.labels.includes('approved')
|
|
||||||
)
|
|
||||||
|
|
||||||
requests.forEach((issue: Issue) => {
|
|
||||||
const data = issue.data
|
|
||||||
if (data.missing('channel_id')) return
|
|
||||||
|
|
||||||
const found: Blocked = blocklist.first(
|
|
||||||
(blocked: Blocked) => blocked.channel === data.getString('channel_id')
|
|
||||||
)
|
|
||||||
if (!found) return
|
|
||||||
|
|
||||||
blocklist.remove((blocked: Blocked) => blocked.channel === found.channel)
|
|
||||||
|
|
||||||
processedIssues.push(issue)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async function blockChannels() {
|
|
||||||
const requests = issues.filter(
|
|
||||||
issue => issue.labels.includes('blocklist:add') && issue.labels.includes('approved')
|
|
||||||
)
|
|
||||||
|
|
||||||
requests.forEach((issue: Issue) => {
|
|
||||||
const data = issue.data
|
|
||||||
if (data.missing('channel_id')) return
|
|
||||||
|
|
||||||
const found: Blocked = blocklist.first(
|
|
||||||
(blocked: Blocked) => blocked.channel === data.getString('channel_id')
|
|
||||||
)
|
|
||||||
if (found) return
|
|
||||||
|
|
||||||
const channel = data.getString('channel_id')
|
|
||||||
const reason = data.getString('reason')?.toLowerCase()
|
|
||||||
const ref = data.getString('ref')
|
|
||||||
if (!channel || !reason || !ref) return
|
|
||||||
|
|
||||||
blocklist.push(
|
|
||||||
new Blocked({
|
|
||||||
channel,
|
|
||||||
reason,
|
|
||||||
ref
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
processedIssues.push(issue)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function onFeedIdChange(channelId: string, feedId: string, newFeedId: string) {
|
|
||||||
channels.forEach((channel: Channel) => {
|
|
||||||
if (channel.replaced_by && channel.replaced_by === `${channelId}@${feedId}`) {
|
|
||||||
channel.replaced_by = `${channelId}@${newFeedId}`
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function onFeedNewMain(channelId: string, feedId: string) {
|
|
||||||
feeds.forEach((feed: Feed) => {
|
|
||||||
if (feed.channel === channelId && feed.id !== feedId && feed.is_main === true) {
|
|
||||||
feed.is_main = false
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function onFeedRemoval(channelId: string, feedId: string) {
|
|
||||||
channels.forEach((channel: Channel) => {
|
|
||||||
if (channel.replaced_by && channel.replaced_by === `${channelId}@${feedId}`) {
|
|
||||||
channel.replaced_by = ''
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function onChannelIdChange(channelId: string, newChannelId: string) {
|
|
||||||
channels.forEach((channel: Channel) => {
|
|
||||||
if (channel.replaced_by && channel.replaced_by.includes(channelId)) {
|
|
||||||
channel.replaced_by = channel.replaced_by.replace(channelId, newChannelId)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
feeds.forEach((feed: Feed) => {
|
|
||||||
if (feed.channel === channelId) {
|
|
||||||
feed.channel = newChannelId
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
blocklist.forEach((blocked: Blocked) => {
|
|
||||||
if (blocked.channel === channelId) {
|
|
||||||
blocked.channel = newChannelId
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function onChannelRemoval(channelId: string) {
|
|
||||||
channels.forEach((channel: Channel) => {
|
|
||||||
if (channel.replaced_by && channel.replaced_by.includes(channelId)) {
|
|
||||||
channel.replaced_by = ''
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
feeds.remove((feed: Feed) => feed.channel === channelId)
|
|
||||||
}
|
|
|
@ -1,361 +0,0 @@
|
||||||
import { Collection, Storage, File, Dictionary, Logger } from '@freearhey/core'
|
|
||||||
import { DATA_DIR } from '../constants'
|
|
||||||
import schemesData from '../schemes'
|
|
||||||
import { program } from 'commander'
|
|
||||||
import Joi from 'joi'
|
|
||||||
import { CSVParser } from '../core'
|
|
||||||
import chalk from 'chalk'
|
|
||||||
import { createChannelId } from '../utils'
|
|
||||||
|
|
||||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
|
|
||||||
|
|
||||||
const logger = new Logger()
|
|
||||||
const buffer = new Dictionary()
|
|
||||||
const files = new Dictionary()
|
|
||||||
const schemes: { [key: string]: object } = schemesData
|
|
||||||
|
|
||||||
async function main() {
|
|
||||||
const dataStorage = new Storage(DATA_DIR)
|
|
||||||
const _files = await dataStorage.list('*.csv')
|
|
||||||
let globalErrors = new Collection()
|
|
||||||
const parser = new CSVParser()
|
|
||||||
|
|
||||||
for (const filepath of _files) {
|
|
||||||
const file = new File(filepath)
|
|
||||||
if (file.extension() !== 'csv') continue
|
|
||||||
|
|
||||||
const csv = await dataStorage.load(file.basename())
|
|
||||||
|
|
||||||
const rows = csv.split(/\r\n/)
|
|
||||||
const headers = rows[0].split(',')
|
|
||||||
for (const [i, line] of rows.entries()) {
|
|
||||||
if (!line.trim()) continue
|
|
||||||
if (line.indexOf('\n') > -1)
|
|
||||||
return handleError(
|
|
||||||
`Error: row ${i + 1} has the wrong line ending character, should be CRLF (${filepath})`
|
|
||||||
)
|
|
||||||
if (line.split(/,(?=(?:(?:[^"]*"){2})*[^"]*$)/).length !== headers.length)
|
|
||||||
return handleError(`Error: row ${i + 1} has the wrong number of columns (${filepath})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await parser.parse(csv)
|
|
||||||
const filename = file.name()
|
|
||||||
|
|
||||||
switch (filename) {
|
|
||||||
case 'feeds':
|
|
||||||
buffer.set(
|
|
||||||
'feeds',
|
|
||||||
data.keyBy(item => item.channel + item.id)
|
|
||||||
)
|
|
||||||
buffer.set(
|
|
||||||
'feedsByChannel',
|
|
||||||
data.filter(item => item.is_main).keyBy(item => item.channel)
|
|
||||||
)
|
|
||||||
break
|
|
||||||
case 'blocklist':
|
|
||||||
buffer.set(
|
|
||||||
'blocklist',
|
|
||||||
data.keyBy(item => item.channel + item.ref)
|
|
||||||
)
|
|
||||||
break
|
|
||||||
case 'categories':
|
|
||||||
case 'channels':
|
|
||||||
case 'timezones':
|
|
||||||
buffer.set(
|
|
||||||
filename,
|
|
||||||
data.keyBy(item => item.id)
|
|
||||||
)
|
|
||||||
break
|
|
||||||
default:
|
|
||||||
buffer.set(
|
|
||||||
filename,
|
|
||||||
data.keyBy(item => item.code)
|
|
||||||
)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
files.set(filename, data)
|
|
||||||
}
|
|
||||||
|
|
||||||
const filesToCheck = program.args.length ? program.args : _files
|
|
||||||
for (const filepath of filesToCheck) {
|
|
||||||
const file = new File(filepath)
|
|
||||||
const filename = file.name()
|
|
||||||
if (!schemes[filename]) return handleError(`Error: "${filename}" scheme is missing`)
|
|
||||||
|
|
||||||
const rows: Collection = files.get(filename)
|
|
||||||
const rowsCopy = JSON.parse(JSON.stringify(rows.all()))
|
|
||||||
|
|
||||||
let fileErrors = new Collection()
|
|
||||||
switch (filename) {
|
|
||||||
case 'channels':
|
|
||||||
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, ['id']))
|
|
||||||
for (const [i, row] of rowsCopy.entries()) {
|
|
||||||
fileErrors = fileErrors.concat(validateChannelId(row, i))
|
|
||||||
fileErrors = fileErrors.concat(validateMainFeed(row, i))
|
|
||||||
fileErrors = fileErrors.concat(validateChannelBroadcastArea(row, i))
|
|
||||||
fileErrors = fileErrors.concat(validateReplacedBy(row, i))
|
|
||||||
fileErrors = fileErrors.concat(
|
|
||||||
checkValue(i, row, 'id', 'subdivision', buffer.get('subdivisions'))
|
|
||||||
)
|
|
||||||
fileErrors = fileErrors.concat(
|
|
||||||
checkValue(i, row, 'id', 'categories', buffer.get('categories'))
|
|
||||||
)
|
|
||||||
fileErrors = fileErrors.concat(
|
|
||||||
checkValue(i, row, 'id', 'languages', buffer.get('languages'))
|
|
||||||
)
|
|
||||||
fileErrors = fileErrors.concat(
|
|
||||||
checkValue(i, row, 'id', 'country', buffer.get('countries'))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'feeds':
|
|
||||||
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, ['channel', 'id']))
|
|
||||||
fileErrors = fileErrors.concat(findDuplicateMainFeeds(rowsCopy))
|
|
||||||
for (const [i, row] of rowsCopy.entries()) {
|
|
||||||
fileErrors = fileErrors.concat(validateChannel(row.channel, i))
|
|
||||||
fileErrors = fileErrors.concat(validateTimezones(row, i))
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'blocklist':
|
|
||||||
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, ['channel', 'ref']))
|
|
||||||
for (const [i, row] of rowsCopy.entries()) {
|
|
||||||
fileErrors = fileErrors.concat(validateChannel(row.channel, i))
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'countries':
|
|
||||||
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, ['code']))
|
|
||||||
for (const [i, row] of rowsCopy.entries()) {
|
|
||||||
fileErrors = fileErrors.concat(
|
|
||||||
checkValue(i, row, 'code', 'languages', buffer.get('languages'))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'subdivisions':
|
|
||||||
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, ['code']))
|
|
||||||
for (const [i, row] of rowsCopy.entries()) {
|
|
||||||
fileErrors = fileErrors.concat(
|
|
||||||
checkValue(i, row, 'code', 'country', buffer.get('countries'))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'regions':
|
|
||||||
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, ['code']))
|
|
||||||
for (const [i, row] of rowsCopy.entries()) {
|
|
||||||
fileErrors = fileErrors.concat(
|
|
||||||
checkValue(i, row, 'code', 'countries', buffer.get('countries'))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'categories':
|
|
||||||
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, ['id']))
|
|
||||||
break
|
|
||||||
case 'languages':
|
|
||||||
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, ['code']))
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
const schema = Joi.object(schemes[filename])
|
|
||||||
rows.all().forEach((row: { [key: string]: string }, i: number) => {
|
|
||||||
const { error } = schema.validate(row, { abortEarly: false })
|
|
||||||
if (error) {
|
|
||||||
error.details.forEach(detail => {
|
|
||||||
fileErrors.push({ line: i + 2, row, message: detail.message })
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (fileErrors.count()) {
|
|
||||||
logger.info(`\n${chalk.underline(filepath)}`)
|
|
||||||
fileErrors.forEach(err => {
|
|
||||||
const position = err.line.toString().padEnd(6, ' ')
|
|
||||||
const id = err.row && err.row.id ? ` ${err.row.id}:` : ''
|
|
||||||
logger.info(` ${chalk.gray(position)}${id} ${err.message}`)
|
|
||||||
})
|
|
||||||
globalErrors = globalErrors.concat(fileErrors)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (globalErrors.count()) return handleError(`${globalErrors.count()} error(s)`)
|
|
||||||
}
|
|
||||||
|
|
||||||
main()
|
|
||||||
|
|
||||||
function checkValue(
|
|
||||||
i: number,
|
|
||||||
row: { [key: string]: string[] | string | boolean },
|
|
||||||
key: string,
|
|
||||||
field: string,
|
|
||||||
collection: Collection
|
|
||||||
) {
|
|
||||||
const errors = new Collection()
|
|
||||||
let values: string[] = []
|
|
||||||
if (Array.isArray(row[field])) {
|
|
||||||
values = row[field] as string[]
|
|
||||||
} else if (typeof row[field] === 'string') {
|
|
||||||
values = new Array(row[field]) as string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
values.forEach((value: string) => {
|
|
||||||
if (collection.missing(value)) {
|
|
||||||
errors.push({
|
|
||||||
line: i + 2,
|
|
||||||
message: `"${row[key]}" has an invalid ${field} "${value}"`
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
function validateReplacedBy(row: { [key: string]: string }, i: number) {
|
|
||||||
const errors = new Collection()
|
|
||||||
|
|
||||||
if (!row.replaced_by) return errors
|
|
||||||
|
|
||||||
const channels = buffer.get('channels')
|
|
||||||
const feeds = buffer.get('feeds')
|
|
||||||
const [channelId, feedId] = row.replaced_by.split('@')
|
|
||||||
|
|
||||||
if (channels.missing(channelId)) {
|
|
||||||
errors.push({
|
|
||||||
line: i + 2,
|
|
||||||
message: `"${row.id}" has an invalid replaced_by "${row.replaced_by}"`
|
|
||||||
})
|
|
||||||
} else if (feedId && feeds.missing(channelId + feedId)) {
|
|
||||||
errors.push({
|
|
||||||
line: i + 2,
|
|
||||||
message: `"${row.id}" has an invalid replaced_by "${row.replaced_by}"`
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
function validateChannel(channelId: string, i: number) {
|
|
||||||
const errors = new Collection()
|
|
||||||
const channels = buffer.get('channels')
|
|
||||||
|
|
||||||
if (channels.missing(channelId)) {
|
|
||||||
errors.push({
|
|
||||||
line: i + 2,
|
|
||||||
message: `"${channelId}" is missing in the channels.csv`
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
function validateMainFeed(row: { [key: string]: string }, i: number) {
|
|
||||||
const errors = new Collection()
|
|
||||||
const feedsByChannel = buffer.get('feedsByChannel')
|
|
||||||
|
|
||||||
if (feedsByChannel.missing(row.id)) {
|
|
||||||
errors.push({
|
|
||||||
line: i + 2,
|
|
||||||
message: `"${row.id}" channel does not have a main feed`
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
function findDuplicatesBy(rows: { [key: string]: string }[], keys: string[]) {
|
|
||||||
const errors = new Collection()
|
|
||||||
const buffer = new Dictionary()
|
|
||||||
|
|
||||||
rows.forEach((row, i) => {
|
|
||||||
const normId = keys.map(key => row[key].toString().toLowerCase()).join()
|
|
||||||
if (buffer.has(normId)) {
|
|
||||||
const fieldsList = keys.map(key => `${key} "${row[key]}"`).join(' and ')
|
|
||||||
errors.push({
|
|
||||||
line: i + 2,
|
|
||||||
message: `entry with the ${fieldsList} already exists`
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer.set(normId, true)
|
|
||||||
})
|
|
||||||
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
function findDuplicateMainFeeds(rows: { [key: string]: string }[]) {
|
|
||||||
const errors = new Collection()
|
|
||||||
const buffer = new Dictionary()
|
|
||||||
|
|
||||||
rows.forEach((row, i) => {
|
|
||||||
const normId = `${row.channel}${row.is_main}`
|
|
||||||
if (buffer.has(normId)) {
|
|
||||||
errors.push({
|
|
||||||
line: i + 2,
|
|
||||||
message: `entry with the channel "${row.channel}" and is_main "true" already exists`
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (row.is_main) {
|
|
||||||
buffer.set(normId, true)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
function validateChannelId(row: { [key: string]: string }, i: number) {
|
|
||||||
const errors = new Collection()
|
|
||||||
|
|
||||||
const expectedId = createChannelId(row.name, row.country)
|
|
||||||
|
|
||||||
if (expectedId !== row.id) {
|
|
||||||
errors.push({
|
|
||||||
line: i + 2,
|
|
||||||
message: `"${row.id}" must be derived from the channel name "${row.name}" and the country code "${row.country}"`
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
function validateChannelBroadcastArea(row: { [key: string]: string[] }, i: number) {
|
|
||||||
const errors = new Collection()
|
|
||||||
const regions = buffer.get('regions')
|
|
||||||
const countries = buffer.get('countries')
|
|
||||||
const subdivisions = buffer.get('subdivisions')
|
|
||||||
|
|
||||||
row.broadcast_area.forEach((areaCode: string) => {
|
|
||||||
const [type, code] = areaCode.split('/')
|
|
||||||
if (
|
|
||||||
(type === 'r' && regions.missing(code)) ||
|
|
||||||
(type === 'c' && countries.missing(code)) ||
|
|
||||||
(type === 's' && subdivisions.missing(code))
|
|
||||||
) {
|
|
||||||
errors.push({
|
|
||||||
line: i + 2,
|
|
||||||
message: `"${row.id}" has the wrong broadcast_area "${areaCode}"`
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
function validateTimezones(row: { [key: string]: string[] }, i: number) {
|
|
||||||
const errors = new Collection()
|
|
||||||
const timezones = buffer.get('timezones')
|
|
||||||
|
|
||||||
row.timezones.forEach((timezone: string) => {
|
|
||||||
if (timezones.missing(timezone)) {
|
|
||||||
errors.push({
|
|
||||||
line: i + 2,
|
|
||||||
message: `"${row.channel}@${row.id}" has the wrong timezone "${timezone}"`
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleError(message: string) {
|
|
||||||
logger.error(chalk.red(message))
|
|
||||||
process.exit(1)
|
|
||||||
}
|
|
|
@ -1,17 +0,0 @@
|
||||||
type BlockedProps = {
|
|
||||||
channel: string
|
|
||||||
reason: string
|
|
||||||
ref: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class Blocked {
|
|
||||||
channel: string
|
|
||||||
reason: string
|
|
||||||
ref: string
|
|
||||||
|
|
||||||
constructor({ ref, reason, channel }: BlockedProps) {
|
|
||||||
this.channel = channel
|
|
||||||
this.reason = reason
|
|
||||||
this.ref = ref
|
|
||||||
}
|
|
||||||
}
|
|
42
scripts/models/blocklistRecord.ts
Normal file
42
scripts/models/blocklistRecord.ts
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
import { BlocklistRecordData } from '../types/blocklistRecord'
|
||||||
|
import { Dictionary } from '@freearhey/core'
|
||||||
|
import { Model } from './model'
|
||||||
|
import Joi from 'joi'
|
||||||
|
|
||||||
|
export class BlocklistRecord extends Model {
|
||||||
|
channelId: string
|
||||||
|
reason: string
|
||||||
|
ref: string
|
||||||
|
|
||||||
|
constructor(data: BlocklistRecordData) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this.channelId = data.channel
|
||||||
|
this.reason = data.reason
|
||||||
|
this.ref = data.ref
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidChannelId(channelsKeyById: Dictionary): boolean {
|
||||||
|
return channelsKeyById.has(this.channelId)
|
||||||
|
}
|
||||||
|
|
||||||
|
data(): BlocklistRecordData {
|
||||||
|
return {
|
||||||
|
channel: this.channelId,
|
||||||
|
reason: this.reason,
|
||||||
|
ref: this.ref
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getSchema() {
|
||||||
|
return Joi.object({
|
||||||
|
channel: Joi.string()
|
||||||
|
.regex(/^[A-Za-z0-9]+\.[a-z]{2}$/)
|
||||||
|
.required(),
|
||||||
|
reason: Joi.string()
|
||||||
|
.valid(...['dmca', 'nsfw'])
|
||||||
|
.required(),
|
||||||
|
ref: Joi.string().uri().required()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
33
scripts/models/category.ts
Normal file
33
scripts/models/category.ts
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
import { CategoryData } from '../types/category'
|
||||||
|
import { Model } from './model'
|
||||||
|
import Joi from 'joi'
|
||||||
|
|
||||||
|
export class Category extends Model {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
|
||||||
|
constructor(data: CategoryData) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this.id = data.id
|
||||||
|
this.name = data.name
|
||||||
|
}
|
||||||
|
|
||||||
|
data(): CategoryData {
|
||||||
|
return {
|
||||||
|
id: this.id,
|
||||||
|
name: this.name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getSchema() {
|
||||||
|
return Joi.object({
|
||||||
|
id: Joi.string()
|
||||||
|
.regex(/^[a-z]+$/)
|
||||||
|
.required(),
|
||||||
|
name: Joi.string()
|
||||||
|
.regex(/^[A-Z]+$/i)
|
||||||
|
.required()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,91 +1,237 @@
|
||||||
type ChannelProps = {
|
import { Dictionary, Collection } from '@freearhey/core'
|
||||||
|
import { ChannelData } from '../types/channel'
|
||||||
|
import { createChannelId } from '../utils'
|
||||||
|
import JoiDate from '@joi/date'
|
||||||
|
import { Model } from './model'
|
||||||
|
import { Feed } from './feed'
|
||||||
|
import BaseJoi from 'joi'
|
||||||
|
import { IssueData } from '../core'
|
||||||
|
|
||||||
|
const Joi = BaseJoi.extend(JoiDate)
|
||||||
|
|
||||||
|
export class Channel extends Model {
|
||||||
id: string
|
id: string
|
||||||
name?: string
|
name: string
|
||||||
alt_names?: string[]
|
altNames?: Collection
|
||||||
network?: string
|
networkName?: string
|
||||||
owners?: string[]
|
ownerNames?: Collection
|
||||||
country?: string
|
countryCode: string
|
||||||
subdivision?: string
|
subdivisionCode?: string
|
||||||
city?: string
|
cityName?: string
|
||||||
broadcast_area?: string[]
|
categoryIds?: Collection
|
||||||
languages?: string[]
|
isNSFW: boolean
|
||||||
categories?: string[]
|
launchedDateString?: string
|
||||||
is_nsfw?: boolean
|
closedDateString?: string
|
||||||
launched?: string
|
replacedBy?: string
|
||||||
closed?: string
|
websiteUrl?: string
|
||||||
replaced_by?: string
|
logoUrl: string
|
||||||
website?: string
|
feeds?: Collection
|
||||||
logo?: string
|
|
||||||
|
constructor(data: ChannelData) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this.id = data.id
|
||||||
|
this.name = data.name
|
||||||
|
this.altNames = data.alt_names ? new Collection(data.alt_names) : undefined
|
||||||
|
this.networkName = data.network
|
||||||
|
this.ownerNames = data.owners ? new Collection(data.owners) : undefined
|
||||||
|
this.countryCode = data.country
|
||||||
|
this.subdivisionCode = data.subdivision
|
||||||
|
this.cityName = data.city
|
||||||
|
this.categoryIds = data.categories ? new Collection(data.categories) : undefined
|
||||||
|
this.isNSFW = data.is_nsfw
|
||||||
|
this.launchedDateString = data.launched
|
||||||
|
this.closedDateString = data.closed
|
||||||
|
this.replacedBy = data.replaced_by
|
||||||
|
this.websiteUrl = data.website
|
||||||
|
this.logoUrl = data.logo
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Channel {
|
setId(id: string): this {
|
||||||
id: string
|
|
||||||
name?: string
|
|
||||||
alt_names?: string[]
|
|
||||||
network?: string
|
|
||||||
owners?: string[]
|
|
||||||
country?: string
|
|
||||||
subdivision?: string
|
|
||||||
city?: string
|
|
||||||
broadcast_area?: string[]
|
|
||||||
languages?: string[]
|
|
||||||
categories?: string[]
|
|
||||||
is_nsfw?: boolean
|
|
||||||
launched?: string
|
|
||||||
closed?: string
|
|
||||||
replaced_by?: string
|
|
||||||
website?: string
|
|
||||||
logo?: string
|
|
||||||
|
|
||||||
constructor({
|
|
||||||
id,
|
|
||||||
name,
|
|
||||||
alt_names,
|
|
||||||
network,
|
|
||||||
owners,
|
|
||||||
country,
|
|
||||||
subdivision,
|
|
||||||
city,
|
|
||||||
broadcast_area,
|
|
||||||
languages,
|
|
||||||
categories,
|
|
||||||
is_nsfw,
|
|
||||||
launched,
|
|
||||||
closed,
|
|
||||||
replaced_by,
|
|
||||||
website,
|
|
||||||
logo
|
|
||||||
}: ChannelProps) {
|
|
||||||
this.id = id
|
this.id = id
|
||||||
this.name = name
|
|
||||||
this.alt_names = alt_names
|
return this
|
||||||
this.network = network
|
|
||||||
this.owners = owners
|
|
||||||
this.country = country
|
|
||||||
this.subdivision = subdivision
|
|
||||||
this.city = city
|
|
||||||
this.broadcast_area = broadcast_area
|
|
||||||
this.languages = languages
|
|
||||||
this.categories = categories
|
|
||||||
this.is_nsfw = is_nsfw
|
|
||||||
this.launched = launched
|
|
||||||
this.closed = closed
|
|
||||||
this.replaced_by = replaced_by
|
|
||||||
this.website = website
|
|
||||||
this.logo = logo
|
|
||||||
}
|
}
|
||||||
|
|
||||||
data() {
|
update(issueData: IssueData): this {
|
||||||
const { ...object } = this
|
const data = {
|
||||||
|
channel_name: issueData.getString('channel_name'),
|
||||||
return object
|
alt_names: issueData.getArray('alt_names'),
|
||||||
|
network: issueData.getString('network'),
|
||||||
|
owners: issueData.getArray('owners'),
|
||||||
|
country: issueData.getString('country'),
|
||||||
|
subdivision: issueData.getString('subdivision'),
|
||||||
|
city: issueData.getString('city'),
|
||||||
|
categories: issueData.getArray('categories'),
|
||||||
|
is_nsfw: issueData.getBoolean('is_nsfw'),
|
||||||
|
launched: issueData.getString('launched'),
|
||||||
|
closed: issueData.getString('closed'),
|
||||||
|
replaced_by: issueData.getString('replaced_by'),
|
||||||
|
website: issueData.getString('website'),
|
||||||
|
logo: issueData.getString('logo')
|
||||||
}
|
}
|
||||||
|
|
||||||
merge(channel: Channel) {
|
if (data.channel_name !== undefined) this.name = data.channel_name
|
||||||
const data: { [key: string]: string | string[] | boolean | undefined } = channel.data()
|
if (data.alt_names !== undefined) this.altNames = new Collection(data.alt_names)
|
||||||
for (const prop in data) {
|
if (data.network !== undefined) this.networkName = data.network
|
||||||
if (data[prop] === undefined) continue
|
if (data.owners !== undefined) this.ownerNames = new Collection(data.owners)
|
||||||
this[prop] = data[prop]
|
if (data.country !== undefined) this.countryCode = data.country
|
||||||
|
if (data.subdivision !== undefined) this.subdivisionCode = data.subdivision
|
||||||
|
if (data.city !== undefined) this.cityName = data.city
|
||||||
|
if (data.categories !== undefined) this.categoryIds = new Collection(data.categories)
|
||||||
|
if (data.is_nsfw !== undefined) this.isNSFW = data.is_nsfw
|
||||||
|
if (data.launched !== undefined) this.launchedDateString = data.launched
|
||||||
|
if (data.closed !== undefined) this.closedDateString = data.closed
|
||||||
|
if (data.replaced_by !== undefined) this.replacedBy = data.replaced_by
|
||||||
|
if (data.website !== undefined) this.websiteUrl = data.website
|
||||||
|
if (data.logo !== undefined) this.logoUrl = data.logo
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
withFeeds(feedsGroupedByChannelId: Dictionary): this {
|
||||||
|
this.feeds = new Collection(feedsGroupedByChannelId.get(this.id))
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
getFeeds(): Collection {
|
||||||
|
if (!this.feeds) return new Collection()
|
||||||
|
|
||||||
|
return this.feeds
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidId(): boolean {
|
||||||
|
const expectedId = createChannelId(this.name, this.countryCode)
|
||||||
|
|
||||||
|
return expectedId === this.id
|
||||||
|
}
|
||||||
|
|
||||||
|
hasMainFeed(): boolean {
|
||||||
|
const feeds = this.getFeeds()
|
||||||
|
|
||||||
|
if (feeds.isEmpty()) return false
|
||||||
|
|
||||||
|
const mainFeed = feeds.find((feed: Feed) => feed.isMain)
|
||||||
|
|
||||||
|
return !!mainFeed
|
||||||
|
}
|
||||||
|
|
||||||
|
hasMoreThanOneMainFeed(): boolean {
|
||||||
|
const mainFeeds = this.getFeeds().filter((feed: Feed) => feed.isMain)
|
||||||
|
|
||||||
|
return mainFeeds.count() > 1
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidReplacedBy(channelsKeyById: Dictionary, feedsKeyByStreamId: Dictionary): boolean {
|
||||||
|
if (!this.replacedBy) return true
|
||||||
|
|
||||||
|
const [channelId, feedId] = this.replacedBy.split('@')
|
||||||
|
|
||||||
|
if (channelsKeyById.missing(channelId)) return false
|
||||||
|
if (feedId && feedsKeyByStreamId.missing(this.replacedBy)) return false
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidCountryCode(countriesKeyByCode: Dictionary): boolean {
|
||||||
|
return countriesKeyByCode.has(this.countryCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidSubdivisionCode(subdivisionsKeyByCode: Dictionary): boolean {
|
||||||
|
return !this.subdivisionCode || subdivisionsKeyByCode.has(this.subdivisionCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidCategoryIds(categoriesKeyById: Dictionary): boolean {
|
||||||
|
const hasInvalid = this.getCategoryIds().find((id: string) => categoriesKeyById.missing(id))
|
||||||
|
|
||||||
|
return !hasInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
getCategoryIds(): Collection {
|
||||||
|
if (!this.categoryIds) return new Collection()
|
||||||
|
|
||||||
|
return this.categoryIds
|
||||||
|
}
|
||||||
|
|
||||||
|
getAltNames(): Collection {
|
||||||
|
if (!this.altNames) return new Collection()
|
||||||
|
|
||||||
|
return this.altNames
|
||||||
|
}
|
||||||
|
|
||||||
|
getOwnerNames(): Collection {
|
||||||
|
if (!this.ownerNames) return new Collection()
|
||||||
|
|
||||||
|
return this.ownerNames
|
||||||
|
}
|
||||||
|
|
||||||
|
data(): ChannelData {
|
||||||
|
return {
|
||||||
|
id: this.id,
|
||||||
|
name: this.name,
|
||||||
|
alt_names: this.getAltNames().all(),
|
||||||
|
network: this.networkName,
|
||||||
|
owners: this.getOwnerNames().all(),
|
||||||
|
country: this.countryCode,
|
||||||
|
subdivision: this.subdivisionCode,
|
||||||
|
city: this.cityName,
|
||||||
|
categories: this.getCategoryIds().all(),
|
||||||
|
is_nsfw: this.isNSFW,
|
||||||
|
launched: this.launchedDateString,
|
||||||
|
closed: this.closedDateString,
|
||||||
|
replaced_by: this.replacedBy,
|
||||||
|
website: this.websiteUrl,
|
||||||
|
logo: this.logoUrl
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getSchema() {
|
||||||
|
return Joi.object({
|
||||||
|
id: Joi.string()
|
||||||
|
.regex(/^[A-Za-z0-9]+\.[a-z]{2}$/)
|
||||||
|
.required(),
|
||||||
|
name: Joi.string()
|
||||||
|
.regex(/^[a-z0-9-!:&.+'/»#%°$@?|¡–\s_—]+$/i)
|
||||||
|
.regex(/^((?!\s-\s).)*$/)
|
||||||
|
.required(),
|
||||||
|
alt_names: Joi.array().items(
|
||||||
|
Joi.string()
|
||||||
|
.regex(/^[^",]+$/)
|
||||||
|
.invalid(Joi.ref('name'))
|
||||||
|
),
|
||||||
|
network: Joi.string()
|
||||||
|
.regex(/^[^",]+$/)
|
||||||
|
.allow(null),
|
||||||
|
owners: Joi.array().items(Joi.string().regex(/^[^",]+$/)),
|
||||||
|
country: Joi.string()
|
||||||
|
.regex(/^[A-Z]{2}$/)
|
||||||
|
.required(),
|
||||||
|
subdivision: Joi.string()
|
||||||
|
.regex(/^[A-Z]{2}-[A-Z0-9]{1,3}$/)
|
||||||
|
.allow(null),
|
||||||
|
city: Joi.string()
|
||||||
|
.regex(/^[^",]+$/)
|
||||||
|
.allow(null),
|
||||||
|
categories: Joi.array().items(Joi.string().regex(/^[a-z]+$/)),
|
||||||
|
is_nsfw: Joi.boolean().strict().required(),
|
||||||
|
launched: Joi.date().format('YYYY-MM-DD').raw().allow(null),
|
||||||
|
closed: Joi.date().format('YYYY-MM-DD').raw().allow(null).greater(Joi.ref('launched')),
|
||||||
|
replaced_by: Joi.string()
|
||||||
|
.regex(/^[A-Za-z0-9]+\.[a-z]{2}($|@[A-Za-z0-9]+$)/)
|
||||||
|
.allow(null),
|
||||||
|
website: Joi.string()
|
||||||
|
.regex(/,/, { invert: true })
|
||||||
|
.uri({
|
||||||
|
scheme: ['http', 'https']
|
||||||
|
})
|
||||||
|
.allow(null),
|
||||||
|
logo: Joi.string()
|
||||||
|
.regex(/,/, { invert: true })
|
||||||
|
.uri({
|
||||||
|
scheme: ['https']
|
||||||
|
})
|
||||||
|
.required()
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
54
scripts/models/country.ts
Normal file
54
scripts/models/country.ts
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
import { Collection, Dictionary } from '@freearhey/core'
|
||||||
|
import { CountryData } from '../types/country'
|
||||||
|
import { Model } from './model'
|
||||||
|
import Joi from 'joi'
|
||||||
|
|
||||||
|
export class Country extends Model {
|
||||||
|
code: string
|
||||||
|
name: string
|
||||||
|
flagEmoji: string
|
||||||
|
languageCodes: Collection
|
||||||
|
|
||||||
|
constructor(data: CountryData) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this.code = data.code
|
||||||
|
this.name = data.name
|
||||||
|
this.flagEmoji = data.flag
|
||||||
|
this.languageCodes = new Collection(data.languages)
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidLanguageCodes(languagesKeyByCode: Dictionary): boolean {
|
||||||
|
const hasInvalid = this.languageCodes.find((code: string) => languagesKeyByCode.missing(code))
|
||||||
|
|
||||||
|
return !hasInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
data(): CountryData {
|
||||||
|
return {
|
||||||
|
code: this.code,
|
||||||
|
name: this.name,
|
||||||
|
flag: this.flagEmoji,
|
||||||
|
languages: this.languageCodes.all()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getSchema() {
|
||||||
|
return Joi.object({
|
||||||
|
name: Joi.string()
|
||||||
|
.regex(/^[\sA-Z\u00C0-\u00FF().-]+$/i)
|
||||||
|
.required(),
|
||||||
|
code: Joi.string()
|
||||||
|
.regex(/^[A-Z]{2}$/)
|
||||||
|
.required(),
|
||||||
|
languages: Joi.array().items(
|
||||||
|
Joi.string()
|
||||||
|
.regex(/^[a-z]{3}$/)
|
||||||
|
.required()
|
||||||
|
),
|
||||||
|
flag: Joi.string()
|
||||||
|
.regex(/^[\uD83C][\uDDE6-\uDDFF][\uD83C][\uDDE6-\uDDFF]$/)
|
||||||
|
.required()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,55 +1,147 @@
|
||||||
type FeedProps = {
|
import { Collection, Dictionary } from '@freearhey/core'
|
||||||
channel: string
|
import { FeedData } from '../types/feed'
|
||||||
|
import { createFeedId } from '../utils'
|
||||||
|
import { Model } from './model'
|
||||||
|
import JoiDate from '@joi/date'
|
||||||
|
import BaseJoi from 'joi'
|
||||||
|
import { IssueData } from '../core'
|
||||||
|
|
||||||
|
const Joi = BaseJoi.extend(JoiDate)
|
||||||
|
|
||||||
|
export class Feed extends Model {
|
||||||
|
channelId: string
|
||||||
id: string
|
id: string
|
||||||
name?: string
|
name: string
|
||||||
is_main?: boolean
|
isMain: boolean
|
||||||
broadcast_area?: string[]
|
broadcastAreaCodes: Collection
|
||||||
timezones?: string[]
|
timezoneIds: Collection
|
||||||
languages?: string[]
|
languageCodes: Collection
|
||||||
video_format?: string
|
videoFormat?: string
|
||||||
|
|
||||||
|
constructor(data: FeedData) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this.channelId = data.channel
|
||||||
|
this.id = data.id
|
||||||
|
this.name = data.name
|
||||||
|
this.isMain = data.is_main
|
||||||
|
this.broadcastAreaCodes = new Collection(data.broadcast_area)
|
||||||
|
this.timezoneIds = new Collection(data.timezones)
|
||||||
|
this.languageCodes = new Collection(data.languages)
|
||||||
|
this.videoFormat = data.video_format
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Feed {
|
setId(id: string): this {
|
||||||
channel: string
|
|
||||||
id: string
|
|
||||||
name?: string
|
|
||||||
is_main?: boolean
|
|
||||||
broadcast_area: string[]
|
|
||||||
timezones: string[]
|
|
||||||
languages: string[]
|
|
||||||
video_format?: string
|
|
||||||
|
|
||||||
constructor({
|
|
||||||
channel,
|
|
||||||
id,
|
|
||||||
name,
|
|
||||||
is_main,
|
|
||||||
broadcast_area,
|
|
||||||
timezones,
|
|
||||||
languages,
|
|
||||||
video_format
|
|
||||||
}: FeedProps) {
|
|
||||||
this.channel = channel
|
|
||||||
this.id = id
|
this.id = id
|
||||||
this.name = name
|
|
||||||
this.is_main = is_main
|
return this
|
||||||
this.broadcast_area = broadcast_area || []
|
|
||||||
this.timezones = timezones || []
|
|
||||||
this.languages = languages || []
|
|
||||||
this.video_format = video_format
|
|
||||||
}
|
}
|
||||||
|
|
||||||
data() {
|
update(issueData: IssueData): this {
|
||||||
const { ...object } = this
|
const data = {
|
||||||
|
feed_name: issueData.getString('feed_name'),
|
||||||
return object
|
is_main: issueData.getBoolean('is_main'),
|
||||||
|
broadcast_area: issueData.getArray('broadcast_area'),
|
||||||
|
timezones: issueData.getArray('timezones'),
|
||||||
|
languages: issueData.getArray('languages'),
|
||||||
|
video_format: issueData.getString('video_format')
|
||||||
}
|
}
|
||||||
|
|
||||||
merge(feed: Feed) {
|
if (data.feed_name !== undefined) this.name = data.feed_name
|
||||||
const data: { [key: string]: string | string[] | boolean | undefined } = feed.data()
|
if (data.is_main !== undefined) this.isMain = data.is_main
|
||||||
for (const prop in data) {
|
if (data.broadcast_area !== undefined)
|
||||||
if (data[prop] === undefined) continue
|
this.broadcastAreaCodes = new Collection(data.broadcast_area)
|
||||||
this[prop] = data[prop]
|
if (data.timezones !== undefined) this.timezoneIds = new Collection(data.timezones)
|
||||||
|
if (data.languages !== undefined) this.languageCodes = new Collection(data.languages)
|
||||||
|
if (data.video_format !== undefined) this.videoFormat = data.video_format
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidId(): boolean {
|
||||||
|
const expectedId = createFeedId(this.name)
|
||||||
|
|
||||||
|
return expectedId === this.id
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidChannelId(channelsKeyById: Dictionary): boolean {
|
||||||
|
return channelsKeyById.has(this.channelId)
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidTimezones(timezonesKeyById: Dictionary): boolean {
|
||||||
|
const hasInvalid = this.timezoneIds.find((id: string) => timezonesKeyById.missing(id))
|
||||||
|
|
||||||
|
return !hasInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidBroadcastAreaCodes(
|
||||||
|
countriesKeyByCode: Dictionary,
|
||||||
|
subdivisionsKeyByCode: Dictionary,
|
||||||
|
regionsKeyByCode: Dictionary
|
||||||
|
): boolean {
|
||||||
|
const hasInvalid = this.broadcastAreaCodes.find((areaCode: string) => {
|
||||||
|
const [type, code] = areaCode.split('/')
|
||||||
|
switch (type) {
|
||||||
|
case 'c':
|
||||||
|
return countriesKeyByCode.missing(code)
|
||||||
|
case 's':
|
||||||
|
return subdivisionsKeyByCode.missing(code)
|
||||||
|
case 'r':
|
||||||
|
return regionsKeyByCode.missing(code)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return !hasInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
getStreamId(): string {
|
||||||
|
return `${this.channelId}@${this.id}`
|
||||||
|
}
|
||||||
|
|
||||||
|
data(): FeedData {
|
||||||
|
return {
|
||||||
|
channel: this.channelId,
|
||||||
|
id: this.id,
|
||||||
|
name: this.name,
|
||||||
|
is_main: this.isMain,
|
||||||
|
broadcast_area: this.broadcastAreaCodes.all(),
|
||||||
|
timezones: this.timezoneIds.all(),
|
||||||
|
languages: this.languageCodes.all(),
|
||||||
|
video_format: this.videoFormat
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getSchema() {
|
||||||
|
return Joi.object({
|
||||||
|
channel: Joi.string()
|
||||||
|
.regex(/^[A-Za-z0-9]+\.[a-z]{2}$/)
|
||||||
|
.required(),
|
||||||
|
id: Joi.string()
|
||||||
|
.regex(/^[A-Za-z0-9]+$/)
|
||||||
|
.required(),
|
||||||
|
name: Joi.string()
|
||||||
|
.regex(/^[a-z0-9-!:&.+'/»#%°$@?|¡–\s_—]+$/i)
|
||||||
|
.regex(/^((?!\s-\s).)*$/)
|
||||||
|
.required(),
|
||||||
|
is_main: Joi.boolean().strict().required(),
|
||||||
|
broadcast_area: Joi.array().items(
|
||||||
|
Joi.string()
|
||||||
|
.regex(/^(s\/[A-Z]{2}-[A-Z0-9]{1,3}|c\/[A-Z]{2}|r\/[A-Z0-9]{2,7})$/)
|
||||||
|
.required()
|
||||||
|
),
|
||||||
|
timezones: Joi.array().items(
|
||||||
|
Joi.string()
|
||||||
|
.regex(/^[a-z-_/]+$/i)
|
||||||
|
.required()
|
||||||
|
),
|
||||||
|
languages: Joi.array().items(
|
||||||
|
Joi.string()
|
||||||
|
.regex(/^[a-z]{3}$/)
|
||||||
|
.required()
|
||||||
|
),
|
||||||
|
video_format: Joi.string()
|
||||||
|
.regex(/^\d+(i|p)$/)
|
||||||
|
.allow(null)
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,9 @@
|
||||||
export * from './channel'
|
export * from './channel'
|
||||||
export * from './blocked'
|
export * from './blocklistRecord'
|
||||||
export * from './feed'
|
export * from './feed'
|
||||||
|
export * from './region'
|
||||||
|
export * from './subdivision'
|
||||||
|
export * from './category'
|
||||||
|
export * from './country'
|
||||||
|
export * from './language'
|
||||||
|
export * from './timezone'
|
||||||
|
|
31
scripts/models/language.ts
Normal file
31
scripts/models/language.ts
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
import { LanguageData } from '../types/language'
|
||||||
|
import { Model } from './model'
|
||||||
|
import Joi from 'joi'
|
||||||
|
|
||||||
|
export class Language extends Model {
|
||||||
|
code: string
|
||||||
|
name: string
|
||||||
|
|
||||||
|
constructor(data: LanguageData) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this.code = data.code
|
||||||
|
this.name = data.name
|
||||||
|
}
|
||||||
|
|
||||||
|
data(): LanguageData {
|
||||||
|
return {
|
||||||
|
code: this.code,
|
||||||
|
name: this.name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getSchema() {
|
||||||
|
return Joi.object({
|
||||||
|
code: Joi.string()
|
||||||
|
.regex(/^[a-z]{3}$/)
|
||||||
|
.required(),
|
||||||
|
name: Joi.string().required()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
15
scripts/models/model.ts
Normal file
15
scripts/models/model.ts
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
export class Model {
|
||||||
|
line?: number
|
||||||
|
|
||||||
|
constructor() {}
|
||||||
|
|
||||||
|
setLine(line: number): this {
|
||||||
|
this.line = line
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
getLine(): number {
|
||||||
|
return this.line || 0
|
||||||
|
}
|
||||||
|
}
|
48
scripts/models/region.ts
Normal file
48
scripts/models/region.ts
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
import { Collection, Dictionary } from '@freearhey/core'
|
||||||
|
import { RegionData } from '../types/region'
|
||||||
|
import { Model } from './model'
|
||||||
|
import Joi from 'joi'
|
||||||
|
|
||||||
|
export class Region extends Model {
|
||||||
|
code: string
|
||||||
|
name: string
|
||||||
|
countryCodes: Collection
|
||||||
|
|
||||||
|
constructor(data: RegionData) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this.code = data.code
|
||||||
|
this.name = data.name
|
||||||
|
this.countryCodes = new Collection(data.countries)
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidCountryCodes(countriesKeyByCode: Dictionary): boolean {
|
||||||
|
const hasInvalid = this.countryCodes.find((code: string) => countriesKeyByCode.missing(code))
|
||||||
|
|
||||||
|
return !hasInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
data(): RegionData {
|
||||||
|
return {
|
||||||
|
code: this.code,
|
||||||
|
name: this.name,
|
||||||
|
countries: this.countryCodes.all()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getSchema() {
|
||||||
|
return Joi.object({
|
||||||
|
name: Joi.string()
|
||||||
|
.regex(/^[\sA-Z\u00C0-\u00FF().,-]+$/i)
|
||||||
|
.required(),
|
||||||
|
code: Joi.string()
|
||||||
|
.regex(/^[A-Z]{2,7}$/)
|
||||||
|
.required(),
|
||||||
|
countries: Joi.array().items(
|
||||||
|
Joi.string()
|
||||||
|
.regex(/^[A-Z]{2}$/)
|
||||||
|
.required()
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
42
scripts/models/subdivision.ts
Normal file
42
scripts/models/subdivision.ts
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
import { SubdivisionData } from '../types/subdivision'
|
||||||
|
import { Dictionary } from '@freearhey/core'
|
||||||
|
import { Model } from './model'
|
||||||
|
import Joi from 'joi'
|
||||||
|
|
||||||
|
export class Subdivision extends Model {
|
||||||
|
code: string
|
||||||
|
name: string
|
||||||
|
countryCode: string
|
||||||
|
|
||||||
|
constructor(data: SubdivisionData) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this.code = data.code
|
||||||
|
this.name = data.name
|
||||||
|
this.countryCode = data.country
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidCountryCode(countriesKeyByCode: Dictionary): boolean {
|
||||||
|
return countriesKeyByCode.has(this.countryCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
data(): SubdivisionData {
|
||||||
|
return {
|
||||||
|
code: this.code,
|
||||||
|
name: this.name,
|
||||||
|
country: this.countryCode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getSchema() {
|
||||||
|
return Joi.object({
|
||||||
|
country: Joi.string()
|
||||||
|
.regex(/^[A-Z]{2}$/)
|
||||||
|
.required(),
|
||||||
|
name: Joi.string().required(),
|
||||||
|
code: Joi.string()
|
||||||
|
.regex(/^[A-Z]{2}-[A-Z0-9]{1,3}$/)
|
||||||
|
.required()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
44
scripts/models/timezone.ts
Normal file
44
scripts/models/timezone.ts
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
import { Collection, Dictionary } from '@freearhey/core'
|
||||||
|
import { TimezoneData } from '../types/timezone'
|
||||||
|
import { Model } from './model'
|
||||||
|
import Joi from 'joi'
|
||||||
|
|
||||||
|
export class Timezone extends Model {
|
||||||
|
id: string
|
||||||
|
utcOffset: string
|
||||||
|
countryCodes: Collection
|
||||||
|
|
||||||
|
constructor(data: TimezoneData) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this.id = data.id
|
||||||
|
this.utcOffset = data.utc_offset
|
||||||
|
this.countryCodes = new Collection(data.countries)
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidCountryCodes(countriesKeyByCode: Dictionary): boolean {
|
||||||
|
const hasInvalid = this.countryCodes.find((code: string) => countriesKeyByCode.missing(code))
|
||||||
|
|
||||||
|
return !hasInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
data(): TimezoneData {
|
||||||
|
return {
|
||||||
|
id: this.id,
|
||||||
|
utc_offset: this.utcOffset,
|
||||||
|
countries: this.countryCodes.all()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getSchema() {
|
||||||
|
return Joi.object({
|
||||||
|
id: Joi.string()
|
||||||
|
.regex(/^[a-z-_/]+$/i)
|
||||||
|
.required(),
|
||||||
|
utc_offset: Joi.string()
|
||||||
|
.regex(/^(\+|-)\d{2}:\d{2}$/)
|
||||||
|
.required(),
|
||||||
|
countries: Joi.array().items(Joi.string().regex(/^[A-Z]{2}$/))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,11 +0,0 @@
|
||||||
import Joi from 'joi'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
channel: Joi.string()
|
|
||||||
.regex(/^[A-Za-z0-9]+\.[a-z]{2}$/)
|
|
||||||
.required(),
|
|
||||||
reason: Joi.string()
|
|
||||||
.valid(...['dmca', 'nsfw'])
|
|
||||||
.required(),
|
|
||||||
ref: Joi.string().uri().required()
|
|
||||||
}
|
|
|
@ -1,10 +0,0 @@
|
||||||
import Joi from 'joi'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
id: Joi.string()
|
|
||||||
.regex(/^[a-z]+$/)
|
|
||||||
.required(),
|
|
||||||
name: Joi.string()
|
|
||||||
.regex(/^[A-Z]+$/i)
|
|
||||||
.required()
|
|
||||||
}
|
|
|
@ -1,71 +0,0 @@
|
||||||
import BaseJoi from 'joi'
|
|
||||||
import JoiDate from '@joi/date'
|
|
||||||
import path from 'path'
|
|
||||||
import url from 'url'
|
|
||||||
|
|
||||||
const Joi = BaseJoi.extend(JoiDate)
|
|
||||||
|
|
||||||
export default {
|
|
||||||
id: Joi.string()
|
|
||||||
.regex(/^[A-Za-z0-9]+\.[a-z]{2}$/)
|
|
||||||
.required(),
|
|
||||||
name: Joi.string()
|
|
||||||
.regex(/^[a-z0-9-!:&.+'/»#%°$@?|¡–\s_—]+$/i)
|
|
||||||
.regex(/^((?!\s-\s).)*$/)
|
|
||||||
.required(),
|
|
||||||
alt_names: Joi.array().items(
|
|
||||||
Joi.string()
|
|
||||||
.regex(/^[^",]+$/)
|
|
||||||
.invalid(Joi.ref('name'))
|
|
||||||
),
|
|
||||||
network: Joi.string()
|
|
||||||
.regex(/^[^",]+$/)
|
|
||||||
.allow(null),
|
|
||||||
owners: Joi.array().items(Joi.string().regex(/^[^",]+$/)),
|
|
||||||
country: Joi.string()
|
|
||||||
.regex(/^[A-Z]{2}$/)
|
|
||||||
.required(),
|
|
||||||
subdivision: Joi.string()
|
|
||||||
.regex(/^[A-Z]{2}-[A-Z0-9]{1,3}$/)
|
|
||||||
.allow(null),
|
|
||||||
city: Joi.string()
|
|
||||||
.regex(/^[^",]+$/)
|
|
||||||
.allow(null),
|
|
||||||
broadcast_area: Joi.array().items(
|
|
||||||
Joi.string()
|
|
||||||
.regex(/^(s\/[A-Z]{2}-[A-Z0-9]{1,3}|c\/[A-Z]{2}|r\/[A-Z0-9]{2,7})$/)
|
|
||||||
.required()
|
|
||||||
),
|
|
||||||
languages: Joi.array().items(
|
|
||||||
Joi.string()
|
|
||||||
.regex(/^[a-z]{3}$/)
|
|
||||||
.required()
|
|
||||||
),
|
|
||||||
categories: Joi.array().items(Joi.string().regex(/^[a-z]+$/)),
|
|
||||||
is_nsfw: Joi.boolean().strict().required(),
|
|
||||||
launched: Joi.date().format('YYYY-MM-DD').raw().allow(null),
|
|
||||||
closed: Joi.date().format('YYYY-MM-DD').raw().allow(null).greater(Joi.ref('launched')),
|
|
||||||
replaced_by: Joi.string()
|
|
||||||
.regex(/^[A-Za-z0-9]+\.[a-z]{2}($|@[A-Za-z0-9]+$)/)
|
|
||||||
.allow(null),
|
|
||||||
website: Joi.string()
|
|
||||||
.regex(/,/, { invert: true })
|
|
||||||
.uri({
|
|
||||||
scheme: ['http', 'https']
|
|
||||||
})
|
|
||||||
.allow(null),
|
|
||||||
logo: Joi.string()
|
|
||||||
.regex(/,/, { invert: true })
|
|
||||||
.uri({
|
|
||||||
scheme: ['https']
|
|
||||||
})
|
|
||||||
.custom((value, helper) => {
|
|
||||||
const ext = path.extname(url.parse(value).pathname)
|
|
||||||
if (!ext || /(\.png|\.jpeg|\.jpg)/i.test(ext)) {
|
|
||||||
return true
|
|
||||||
} else {
|
|
||||||
return helper.message(`"logo" has an invalid file extension "${ext}"`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.required()
|
|
||||||
}
|
|
|
@ -1,18 +0,0 @@
|
||||||
import Joi from 'joi'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
name: Joi.string()
|
|
||||||
.regex(/^[\sA-Z\u00C0-\u00FF().-]+$/i)
|
|
||||||
.required(),
|
|
||||||
code: Joi.string()
|
|
||||||
.regex(/^[A-Z]{2}$/)
|
|
||||||
.required(),
|
|
||||||
languages: Joi.array().items(
|
|
||||||
Joi.string()
|
|
||||||
.regex(/^[a-z]{3}$/)
|
|
||||||
.required()
|
|
||||||
),
|
|
||||||
flag: Joi.string()
|
|
||||||
.regex(/^[\uD83C][\uDDE6-\uDDFF][\uD83C][\uDDE6-\uDDFF]$/)
|
|
||||||
.required()
|
|
||||||
}
|
|
|
@ -1,36 +0,0 @@
|
||||||
import BaseJoi from 'joi'
|
|
||||||
import JoiDate from '@joi/date'
|
|
||||||
|
|
||||||
const Joi = BaseJoi.extend(JoiDate)
|
|
||||||
|
|
||||||
export default {
|
|
||||||
channel: Joi.string()
|
|
||||||
.regex(/^[A-Za-z0-9]+\.[a-z]{2}$/)
|
|
||||||
.required(),
|
|
||||||
id: Joi.string()
|
|
||||||
.regex(/^[A-Za-z0-9]+$/)
|
|
||||||
.required(),
|
|
||||||
name: Joi.string()
|
|
||||||
.regex(/^[a-z0-9-!:&.+'/»#%°$@?|¡–\s_—]+$/i)
|
|
||||||
.regex(/^((?!\s-\s).)*$/)
|
|
||||||
.required(),
|
|
||||||
is_main: Joi.boolean().strict().required(),
|
|
||||||
broadcast_area: Joi.array().items(
|
|
||||||
Joi.string()
|
|
||||||
.regex(/^(s\/[A-Z]{2}-[A-Z0-9]{1,3}|c\/[A-Z]{2}|r\/[A-Z0-9]{2,7})$/)
|
|
||||||
.required()
|
|
||||||
),
|
|
||||||
timezones: Joi.array().items(
|
|
||||||
Joi.string()
|
|
||||||
.regex(/^[a-z-_/]+$/i)
|
|
||||||
.required()
|
|
||||||
),
|
|
||||||
languages: Joi.array().items(
|
|
||||||
Joi.string()
|
|
||||||
.regex(/^[a-z]{3}$/)
|
|
||||||
.required()
|
|
||||||
),
|
|
||||||
video_format: Joi.string()
|
|
||||||
.regex(/^\d+(i|p)$/)
|
|
||||||
.allow(null)
|
|
||||||
}
|
|
|
@ -1,21 +0,0 @@
|
||||||
import { default as channels } from './channels'
|
|
||||||
import { default as categories } from './categories'
|
|
||||||
import { default as countries } from './countries'
|
|
||||||
import { default as languages } from './languages'
|
|
||||||
import { default as regions } from './regions'
|
|
||||||
import { default as subdivisions } from './subdivisions'
|
|
||||||
import { default as blocklist } from './blocklist'
|
|
||||||
import { default as feeds } from './feeds'
|
|
||||||
import { default as timezones } from './timezones'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
channels,
|
|
||||||
categories,
|
|
||||||
countries,
|
|
||||||
languages,
|
|
||||||
regions,
|
|
||||||
subdivisions,
|
|
||||||
blocklist,
|
|
||||||
feeds,
|
|
||||||
timezones
|
|
||||||
}
|
|
|
@ -1,8 +0,0 @@
|
||||||
import Joi from 'joi'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
code: Joi.string()
|
|
||||||
.regex(/^[a-z]{3}$/)
|
|
||||||
.required(),
|
|
||||||
name: Joi.string().required()
|
|
||||||
}
|
|
|
@ -1,15 +0,0 @@
|
||||||
import Joi from 'joi'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
name: Joi.string()
|
|
||||||
.regex(/^[\sA-Z\u00C0-\u00FF().,-]+$/i)
|
|
||||||
.required(),
|
|
||||||
code: Joi.string()
|
|
||||||
.regex(/^[A-Z]{2,7}$/)
|
|
||||||
.required(),
|
|
||||||
countries: Joi.array().items(
|
|
||||||
Joi.string()
|
|
||||||
.regex(/^[A-Z]{2}$/)
|
|
||||||
.required()
|
|
||||||
)
|
|
||||||
}
|
|
|
@ -1,11 +0,0 @@
|
||||||
import Joi from 'joi'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
country: Joi.string()
|
|
||||||
.regex(/^[A-Z]{2}$/)
|
|
||||||
.required(),
|
|
||||||
name: Joi.string().required(),
|
|
||||||
code: Joi.string()
|
|
||||||
.regex(/^[A-Z]{2}-[A-Z0-9]{1,3}$/)
|
|
||||||
.required()
|
|
||||||
}
|
|
|
@ -1,11 +0,0 @@
|
||||||
import Joi from 'joi'
|
|
||||||
|
|
||||||
export default {
|
|
||||||
id: Joi.string()
|
|
||||||
.regex(/^[a-z-_/]+$/i)
|
|
||||||
.required(),
|
|
||||||
utc_offset: Joi.string()
|
|
||||||
.regex(/^(\+|-)\d{2}:\d{2}$/)
|
|
||||||
.required(),
|
|
||||||
countries: Joi.array().items(Joi.string().regex(/^[A-Z]{2}$/))
|
|
||||||
}
|
|
5
scripts/types/blocklistRecord.d.ts
vendored
Normal file
5
scripts/types/blocklistRecord.d.ts
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
export type BlocklistRecordData = {
|
||||||
|
channel: string
|
||||||
|
reason: string
|
||||||
|
ref: string
|
||||||
|
}
|
4
scripts/types/category.d.ts
vendored
Normal file
4
scripts/types/category.d.ts
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
export type CategoryData = {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
}
|
17
scripts/types/channel.d.ts
vendored
Normal file
17
scripts/types/channel.d.ts
vendored
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
export type ChannelData = {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
alt_names?: string[]
|
||||||
|
network?: string
|
||||||
|
owners?: string[]
|
||||||
|
country: string
|
||||||
|
subdivision?: string
|
||||||
|
city?: string
|
||||||
|
categories?: string[]
|
||||||
|
is_nsfw: boolean
|
||||||
|
launched?: string
|
||||||
|
closed?: string
|
||||||
|
replaced_by?: string
|
||||||
|
website?: string
|
||||||
|
logo: string
|
||||||
|
}
|
6
scripts/types/country.d.ts
vendored
Normal file
6
scripts/types/country.d.ts
vendored
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
export type CountryData = {
|
||||||
|
code: string
|
||||||
|
name: string
|
||||||
|
flag: string
|
||||||
|
languages: string[]
|
||||||
|
}
|
4
scripts/types/csvParser.d.ts
vendored
Normal file
4
scripts/types/csvParser.d.ts
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
export type CSVParserRow = {
|
||||||
|
line: number
|
||||||
|
data: { [key: string]: string }
|
||||||
|
}
|
26
scripts/types/dataLoader.d.ts
vendored
Normal file
26
scripts/types/dataLoader.d.ts
vendored
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
import { Dictionary, Collection, Storage } from '@freearhey/core'
|
||||||
|
|
||||||
|
export type DataLoaderData = {
|
||||||
|
feeds: Collection
|
||||||
|
feedsGroupedByChannelId: Dictionary
|
||||||
|
feedsKeyByStreamId: Dictionary
|
||||||
|
channels: Collection
|
||||||
|
categories: Collection
|
||||||
|
countries: Collection
|
||||||
|
languages: Collection
|
||||||
|
blocklistRecords: Collection
|
||||||
|
timezones: Collection
|
||||||
|
regions: Collection
|
||||||
|
subdivisions: Collection
|
||||||
|
channelsKeyById: Dictionary
|
||||||
|
countriesKeyByCode: Dictionary
|
||||||
|
subdivisionsKeyByCode: Dictionary
|
||||||
|
categoriesKeyById: Dictionary
|
||||||
|
regionsKeyByCode: Dictionary
|
||||||
|
timezonesKeyById: Dictionary
|
||||||
|
languagesKeyByCode: Dictionary
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DataLoaderProps = {
|
||||||
|
storage: Storage
|
||||||
|
}
|
10
scripts/types/feed.d.ts
vendored
Normal file
10
scripts/types/feed.d.ts
vendored
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
export type FeedData = {
|
||||||
|
channel: string
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
is_main: boolean
|
||||||
|
broadcast_area: string[]
|
||||||
|
timezones: string[]
|
||||||
|
languages: string[]
|
||||||
|
video_format?: string
|
||||||
|
}
|
4
scripts/types/language.d.ts
vendored
Normal file
4
scripts/types/language.d.ts
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
export type LanguageData = {
|
||||||
|
code: string
|
||||||
|
name: string
|
||||||
|
}
|
5
scripts/types/region.d.ts
vendored
Normal file
5
scripts/types/region.d.ts
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
export type RegionData = {
|
||||||
|
code: string
|
||||||
|
name: string
|
||||||
|
countries: string[]
|
||||||
|
}
|
5
scripts/types/subdivision.d.ts
vendored
Normal file
5
scripts/types/subdivision.d.ts
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
export type SubdivisionData = {
|
||||||
|
country: string
|
||||||
|
name: string
|
||||||
|
code: string
|
||||||
|
}
|
5
scripts/types/timezone.d.ts
vendored
Normal file
5
scripts/types/timezone.d.ts
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
export type TimezoneData = {
|
||||||
|
id: string
|
||||||
|
utc_offset: string
|
||||||
|
countries: string[]
|
||||||
|
}
|
10
scripts/types/validator.d.ts
vendored
Normal file
10
scripts/types/validator.d.ts
vendored
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
import { DataLoaderData } from './dataLoader'
|
||||||
|
|
||||||
|
export type ValidatorError = {
|
||||||
|
line: number
|
||||||
|
message: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ValidatorProps = {
|
||||||
|
data: DataLoaderData
|
||||||
|
}
|
|
@ -20,5 +20,6 @@ function normalize(string: string) {
|
||||||
.replace(/^&/i, 'And')
|
.replace(/^&/i, 'And')
|
||||||
.replace(/\+/gi, 'Plus')
|
.replace(/\+/gi, 'Plus')
|
||||||
.replace(/\s-(\d)/gi, ' Minus$1')
|
.replace(/\s-(\d)/gi, ' Minus$1')
|
||||||
|
.replace(/^-(\d)/gi, 'Minus$1')
|
||||||
.replace(/[^a-z\d]+/gi, '')
|
.replace(/[^a-z\d]+/gi, '')
|
||||||
}
|
}
|
||||||
|
|
38
scripts/validators/blocklistRecordValidator.ts
Normal file
38
scripts/validators/blocklistRecordValidator.ts
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
import { DataLoaderData } from '../types/dataLoader'
|
||||||
|
import { ValidatorProps } from '../types/validator'
|
||||||
|
import { Collection } from '@freearhey/core'
|
||||||
|
import { Validator } from './validator'
|
||||||
|
import { BlocklistRecord } from '../models'
|
||||||
|
|
||||||
|
export class BlocklistRecordValidator extends Validator {
|
||||||
|
constructor(props: ValidatorProps) {
|
||||||
|
super(props)
|
||||||
|
}
|
||||||
|
|
||||||
|
validate(blocklistRecord: BlocklistRecord): Collection {
|
||||||
|
const { channelsKeyById }: DataLoaderData = this.data
|
||||||
|
|
||||||
|
const errors = new Collection()
|
||||||
|
|
||||||
|
const joiResults = blocklistRecord
|
||||||
|
.getSchema()
|
||||||
|
.validate(blocklistRecord.data(), { abortEarly: false })
|
||||||
|
if (joiResults.error) {
|
||||||
|
joiResults.error.details.forEach((detail: { message: string }) => {
|
||||||
|
errors.add({
|
||||||
|
line: blocklistRecord.getLine(),
|
||||||
|
message: `${blocklistRecord.channelId}: ${detail.message}`
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!blocklistRecord.hasValidChannelId(channelsKeyById)) {
|
||||||
|
errors.add({
|
||||||
|
line: blocklistRecord.getLine(),
|
||||||
|
message: `"${blocklistRecord.channelId}" is missing from the channels.csv`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors
|
||||||
|
}
|
||||||
|
}
|
23
scripts/validators/categoryValidator.ts
Normal file
23
scripts/validators/categoryValidator.ts
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
import { ValidatorProps } from '../types/validator'
|
||||||
|
import { Collection } from '@freearhey/core'
|
||||||
|
import { Validator } from './validator'
|
||||||
|
import { Category } from '../models'
|
||||||
|
|
||||||
|
export class CategoryValidator extends Validator {
|
||||||
|
constructor(props: ValidatorProps) {
|
||||||
|
super(props)
|
||||||
|
}
|
||||||
|
|
||||||
|
validate(category: Category): Collection {
|
||||||
|
const errors = new Collection()
|
||||||
|
|
||||||
|
const joiResults = category.getSchema().validate(category.data(), { abortEarly: false })
|
||||||
|
if (joiResults.error) {
|
||||||
|
joiResults.error.details.forEach((detail: { message: string }) => {
|
||||||
|
errors.add({ line: category.getLine(), message: `${category.id}: ${detail.message}` })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors
|
||||||
|
}
|
||||||
|
}
|
81
scripts/validators/channelValidator.ts
Normal file
81
scripts/validators/channelValidator.ts
Normal file
|
@ -0,0 +1,81 @@
|
||||||
|
import { ValidatorProps } from '../types/validator'
|
||||||
|
import { DataLoaderData } from '../types/dataLoader'
|
||||||
|
import { Collection } from '@freearhey/core'
|
||||||
|
import { Validator } from './validator'
|
||||||
|
import { Channel } from '../models'
|
||||||
|
|
||||||
|
export class ChannelValidator extends Validator {
|
||||||
|
constructor(props: ValidatorProps) {
|
||||||
|
super(props)
|
||||||
|
}
|
||||||
|
|
||||||
|
validate(channel: Channel): Collection {
|
||||||
|
const {
|
||||||
|
channelsKeyById,
|
||||||
|
feedsKeyByStreamId,
|
||||||
|
countriesKeyByCode,
|
||||||
|
subdivisionsKeyByCode,
|
||||||
|
categoriesKeyById
|
||||||
|
}: DataLoaderData = this.data
|
||||||
|
|
||||||
|
const errors = new Collection()
|
||||||
|
|
||||||
|
const joiResults = channel.getSchema().validate(channel.data(), { abortEarly: false })
|
||||||
|
if (joiResults.error) {
|
||||||
|
joiResults.error.details.forEach((detail: { message: string }) => {
|
||||||
|
errors.add({ line: channel.getLine(), message: `${channel.id}: ${detail.message}` })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!channel.hasValidId()) {
|
||||||
|
errors.add({
|
||||||
|
line: channel.getLine(),
|
||||||
|
message: `"${channel.id}" must be derived from the channel name "${channel.name}" and the country code "${channel.countryCode}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!channel.hasMainFeed()) {
|
||||||
|
errors.add({
|
||||||
|
line: channel.getLine(),
|
||||||
|
message: `"${channel.id}" does not have a main feed`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (channel.hasMoreThanOneMainFeed()) {
|
||||||
|
errors.add({
|
||||||
|
line: channel.getLine(),
|
||||||
|
message: `"${channel.id}" has an more than one main feed`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!channel.hasValidReplacedBy(channelsKeyById, feedsKeyByStreamId)) {
|
||||||
|
errors.add({
|
||||||
|
line: channel.getLine(),
|
||||||
|
message: `"${channel.id}" has an invalid replaced_by "${channel.replacedBy}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!channel.hasValidCountryCode(countriesKeyByCode)) {
|
||||||
|
errors.add({
|
||||||
|
line: channel.getLine(),
|
||||||
|
message: `"${channel.id}" has an invalid country "${channel.countryCode}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!channel.hasValidSubdivisionCode(subdivisionsKeyByCode)) {
|
||||||
|
errors.add({
|
||||||
|
line: channel.getLine(),
|
||||||
|
message: `"${channel.id}" has an invalid subdivision "${channel.subdivisionCode}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!channel.hasValidCategoryIds(categoriesKeyById)) {
|
||||||
|
errors.add({
|
||||||
|
line: channel.getLine(),
|
||||||
|
message: `"${channel.id}" has an invalid categories "${channel.getCategoryIds().join(';')}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors
|
||||||
|
}
|
||||||
|
}
|
33
scripts/validators/countryValidator.ts
Normal file
33
scripts/validators/countryValidator.ts
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
import { ValidatorProps } from '../types/validator'
|
||||||
|
import { Collection } from '@freearhey/core'
|
||||||
|
import { Validator } from './validator'
|
||||||
|
import { Country } from '../models'
|
||||||
|
import { DataLoaderData } from '../types/dataLoader'
|
||||||
|
|
||||||
|
export class CountryValidator extends Validator {
|
||||||
|
constructor(props: ValidatorProps) {
|
||||||
|
super(props)
|
||||||
|
}
|
||||||
|
|
||||||
|
validate(country: Country): Collection {
|
||||||
|
const { languagesKeyByCode }: DataLoaderData = this.data
|
||||||
|
|
||||||
|
const errors = new Collection()
|
||||||
|
|
||||||
|
const joiResults = country.getSchema().validate(country.data(), { abortEarly: false })
|
||||||
|
if (joiResults.error) {
|
||||||
|
joiResults.error.details.forEach((detail: { message: string }) => {
|
||||||
|
errors.add({ line: country.getLine(), message: `${country.code}: ${detail.message}` })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!country.hasValidLanguageCodes(languagesKeyByCode)) {
|
||||||
|
errors.add({
|
||||||
|
line: country.getLine(),
|
||||||
|
message: `"${country.code}" has an invalid languages "${country.languageCodes.join(';')}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors
|
||||||
|
}
|
||||||
|
}
|
66
scripts/validators/feedValidator.ts
Normal file
66
scripts/validators/feedValidator.ts
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
import { ValidatorProps } from '../types/validator'
|
||||||
|
import { Collection } from '@freearhey/core'
|
||||||
|
import { Validator } from './validator'
|
||||||
|
import { Feed } from '../models'
|
||||||
|
import { DataLoaderData } from '../types/dataLoader'
|
||||||
|
|
||||||
|
export class FeedValidator extends Validator {
|
||||||
|
constructor(props: ValidatorProps) {
|
||||||
|
super(props)
|
||||||
|
}
|
||||||
|
|
||||||
|
validate(feed: Feed): Collection {
|
||||||
|
const {
|
||||||
|
channelsKeyById,
|
||||||
|
countriesKeyByCode,
|
||||||
|
subdivisionsKeyByCode,
|
||||||
|
regionsKeyByCode,
|
||||||
|
timezonesKeyById
|
||||||
|
}: DataLoaderData = this.data
|
||||||
|
|
||||||
|
const errors = new Collection()
|
||||||
|
|
||||||
|
const joiResults = feed.getSchema().validate(feed.data(), { abortEarly: false })
|
||||||
|
if (joiResults.error) {
|
||||||
|
joiResults.error.details.forEach((detail: { message: string }) => {
|
||||||
|
errors.add({ line: feed.getLine(), message: `${feed.getStreamId()}: ${detail.message}` })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!feed.hasValidId()) {
|
||||||
|
errors.add({
|
||||||
|
line: feed.getLine(),
|
||||||
|
message: `"${feed.getStreamId()}" id "${feed.id}" must be derived from the name "${
|
||||||
|
feed.name
|
||||||
|
}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!feed.hasValidChannelId(channelsKeyById)) {
|
||||||
|
errors.add({
|
||||||
|
line: feed.getLine(),
|
||||||
|
message: `"${feed.getStreamId()}" has the wrong channel "${feed.channelId}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
!feed.hasValidBroadcastAreaCodes(countriesKeyByCode, subdivisionsKeyByCode, regionsKeyByCode)
|
||||||
|
) {
|
||||||
|
errors.add({
|
||||||
|
line: feed.getLine(),
|
||||||
|
message: `"${feed.getStreamId()}" has the wrong broadcast_area "${feed.broadcastAreaCodes.join(
|
||||||
|
';'
|
||||||
|
)}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!feed.hasValidTimezones(timezonesKeyById)) {
|
||||||
|
errors.add({
|
||||||
|
line: feed.getLine(),
|
||||||
|
message: `"${feed.getStreamId()}" has the wrong timezones "${feed.timezoneIds.join(';')}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors
|
||||||
|
}
|
||||||
|
}
|
9
scripts/validators/index.ts
Normal file
9
scripts/validators/index.ts
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
export * from './blocklistRecordValidator'
|
||||||
|
export * from './categoryValidator'
|
||||||
|
export * from './channelValidator'
|
||||||
|
export * from './countryValidator'
|
||||||
|
export * from './feedValidator'
|
||||||
|
export * from './languageValidator'
|
||||||
|
export * from './regionValidator'
|
||||||
|
export * from './subdivisionValidator'
|
||||||
|
export * from './timezoneValidator'
|
23
scripts/validators/languageValidator.ts
Normal file
23
scripts/validators/languageValidator.ts
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
import { ValidatorProps } from '../types/validator'
|
||||||
|
import { Collection } from '@freearhey/core'
|
||||||
|
import { Validator } from './validator'
|
||||||
|
import { Language } from '../models'
|
||||||
|
|
||||||
|
export class LanguageValidator extends Validator {
|
||||||
|
constructor(props: ValidatorProps) {
|
||||||
|
super(props)
|
||||||
|
}
|
||||||
|
|
||||||
|
validate(language: Language): Collection {
|
||||||
|
const errors = new Collection()
|
||||||
|
|
||||||
|
const joiResults = language.getSchema().validate(language.data(), { abortEarly: false })
|
||||||
|
if (joiResults.error) {
|
||||||
|
joiResults.error.details.forEach((detail: { message: string }) => {
|
||||||
|
errors.add({ line: language.getLine(), message: `${language.code}: ${detail.message}` })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors
|
||||||
|
}
|
||||||
|
}
|
33
scripts/validators/regionValidator.ts
Normal file
33
scripts/validators/regionValidator.ts
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
import { DataLoaderData } from '../types/dataLoader'
|
||||||
|
import { ValidatorProps } from '../types/validator'
|
||||||
|
import { Collection } from '@freearhey/core'
|
||||||
|
import { Validator } from './validator'
|
||||||
|
import { Region } from '../models'
|
||||||
|
|
||||||
|
export class RegionValidator extends Validator {
|
||||||
|
constructor(props: ValidatorProps) {
|
||||||
|
super(props)
|
||||||
|
}
|
||||||
|
|
||||||
|
validate(region: Region): Collection {
|
||||||
|
const { countriesKeyByCode }: DataLoaderData = this.data
|
||||||
|
|
||||||
|
const errors = new Collection()
|
||||||
|
|
||||||
|
const joiResults = region.getSchema().validate(region.data(), { abortEarly: false })
|
||||||
|
if (joiResults.error) {
|
||||||
|
joiResults.error.details.forEach((detail: { message: string }) => {
|
||||||
|
errors.add({ line: region.getLine(), message: `${region.code}: ${detail.message}` })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!region.hasValidCountryCodes(countriesKeyByCode)) {
|
||||||
|
errors.add({
|
||||||
|
line: region.getLine(),
|
||||||
|
message: `"${region.code}" has the wrong countries "${region.countryCodes.join(';')}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors
|
||||||
|
}
|
||||||
|
}
|
36
scripts/validators/subdivisionValidator.ts
Normal file
36
scripts/validators/subdivisionValidator.ts
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
import { ValidatorProps } from '../types/validator'
|
||||||
|
import { Collection } from '@freearhey/core'
|
||||||
|
import { Validator } from './validator'
|
||||||
|
import { Subdivision } from '../models'
|
||||||
|
import { DataLoaderData } from '../types/dataLoader'
|
||||||
|
|
||||||
|
export class SubdivisionValidator extends Validator {
|
||||||
|
constructor(props: ValidatorProps) {
|
||||||
|
super(props)
|
||||||
|
}
|
||||||
|
|
||||||
|
validate(subdivision: Subdivision): Collection {
|
||||||
|
const { countriesKeyByCode }: DataLoaderData = this.data
|
||||||
|
|
||||||
|
const errors = new Collection()
|
||||||
|
|
||||||
|
const joiResults = subdivision.getSchema().validate(subdivision.data(), { abortEarly: false })
|
||||||
|
if (joiResults.error) {
|
||||||
|
joiResults.error.details.forEach((detail: { message: string }) => {
|
||||||
|
errors.add({
|
||||||
|
line: subdivision.getLine(),
|
||||||
|
message: `${subdivision.code}: ${detail.message}`
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!subdivision.hasValidCountryCode(countriesKeyByCode)) {
|
||||||
|
errors.add({
|
||||||
|
line: subdivision.getLine(),
|
||||||
|
message: `"${subdivision.code}" has an invalid country "${subdivision.countryCode}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors
|
||||||
|
}
|
||||||
|
}
|
33
scripts/validators/timezoneValidator.ts
Normal file
33
scripts/validators/timezoneValidator.ts
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
import { DataLoaderData } from '../types/dataLoader'
|
||||||
|
import { ValidatorProps } from '../types/validator'
|
||||||
|
import { Collection } from '@freearhey/core'
|
||||||
|
import { Validator } from './validator'
|
||||||
|
import { Timezone } from '../models'
|
||||||
|
|
||||||
|
export class TimezoneValidator extends Validator {
|
||||||
|
constructor(props: ValidatorProps) {
|
||||||
|
super(props)
|
||||||
|
}
|
||||||
|
|
||||||
|
validate(timezone: Timezone): Collection {
|
||||||
|
const { countriesKeyByCode }: DataLoaderData = this.data
|
||||||
|
|
||||||
|
const errors = new Collection()
|
||||||
|
|
||||||
|
const joiResults = timezone.getSchema().validate(timezone.data(), { abortEarly: false })
|
||||||
|
if (joiResults.error) {
|
||||||
|
joiResults.error.details.forEach((detail: { message: string }) => {
|
||||||
|
errors.add({ line: timezone.getLine(), message: `${timezone.id}: ${detail.message}` })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!timezone.hasValidCountryCodes(countriesKeyByCode)) {
|
||||||
|
errors.add({
|
||||||
|
line: timezone.getLine(),
|
||||||
|
message: `"${timezone.id}" has the wrong countries "${timezone.countryCodes.join(';')}"`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors
|
||||||
|
}
|
||||||
|
}
|
10
scripts/validators/validator.ts
Normal file
10
scripts/validators/validator.ts
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
import { ValidatorProps } from '../types/validator'
|
||||||
|
import { DataLoaderData } from '../types/dataLoader'
|
||||||
|
|
||||||
|
export class Validator {
|
||||||
|
data: DataLoaderData
|
||||||
|
|
||||||
|
constructor({ data }: ValidatorProps) {
|
||||||
|
this.data = data
|
||||||
|
}
|
||||||
|
}
|
|
@ -1 +0,0 @@
|
||||||
[{"id":"002RadioTV.do","name":"002 Radio TV","alt_names":[],"network":null,"owners":[],"country":"DO","subdivision":null,"city":null,"broadcast_area":["c/DO"],"languages":["spa"],"categories":["general"],"is_nsfw":false,"launched":null,"closed":null,"replaced_by":null,"website":"https://www.002radio.com/","logo":"https://i.imgur.com/7oNe8xj.png"},{"id":"BeijingSatelliteTV.cn","name":"Beijing Satellite TV","alt_names":["北京卫视"],"network":null,"owners":[],"country":"CN","subdivision":null,"city":"Beijing","broadcast_area":["c/CN"],"languages":["zho"],"categories":["general"],"is_nsfw":false,"launched":"1979-05-16","closed":null,"replaced_by":"002RadioTV.do@SD","website":"https://www.brtn.cn/btv/","logo":"https://i.imgur.com/vsktAez.png"},{"id":"M5.hu","name":"M5","alt_names":[],"network":null,"owners":[],"country":"HU","subdivision":null,"city":null,"broadcast_area":["c/HU"],"languages":["hun"],"categories":["auto"],"is_nsfw":true,"launched":null,"closed":"2001-01-01","replaced_by":"BeijingSatelliteTV.cn","website":"https://www.mediaklikk.hu/m5/","logo":"https://i.imgur.com/y21wFd0.png"}]
|
|
|
@ -1,8 +0,0 @@
|
||||||
id,name,alt_names,network,owners,country,subdivision,city,broadcast_area,languages,categories,is_nsfw,launched,closed,replaced_by,website,logo
|
|
||||||
0TV.dk,0-TV,,,,DK,,København,c/DK,dan,general,FALSE,,,,https://0-tv.dk/,https://i.imgur.com/aR5q6mA.png
|
|
||||||
1000xHoraTV.uy,1000xHora TV,,,,UY,,Montevideo,c/UY,spa,auto,FALSE,2020-01-01,2021-01-01,M5.hu@HD,https://www.1000xhoratv.com/,https://i.imgur.com/wP3bbYr.png
|
|
||||||
beINMoviesTurk.tr,beIN Movies Turk,beIN Movies Türk,BBC,Gazprom Media,TR,US-CA,London,c/TR,tur,movies,FALSE,1979-05-16,1980-05-16,M5.hu,http://www.digiturk.com.tr/,https://i.imgur.com/nw8Sa2z.png
|
|
||||||
M5.hu,M5,,,Duna Médiaszolgáltató Nonprofit Zrt.,HU,,,c/HU,hun,,TRUE,2020-01-01,,0TV.dk@SD,https://www.mediaklikk.hu/m5/,https://i.imgur.com/y21wFd0.png
|
|
||||||
WenzhouEconomicandEducation.cn,Wenzhou Economic and Education,,,,CN,,Wenzhou,c/CN,zho,science,FALSE,,,,,https://www.tvchinese.net/uploads/tv/wzjjkj.jpg
|
|
||||||
YiwuBusinessChannel.cn,Yiwu Business Channel,,,,CN,,,c/CN,zho,business,FALSE,,,,,https://www.tvchinese.net/uploads/tv/yiwutv.jpg
|
|
||||||
YiwuNewsIntegratedChannel.cn,Yiwu News Integrated Channel,,,,CN,,,c/CN,zho,news,FALSE,,,,,https://www.tvchinese.net/uploads/tv/yiwutv.jpg
|
|
|
1
tests/__data__/expected/db/export/api/channels.json
Normal file
1
tests/__data__/expected/db/export/api/channels.json
Normal file
|
@ -0,0 +1 @@
|
||||||
|
[{"id":"002RadioTV.do","name":"002 Radio TV","alt_names":[],"network":null,"owners":[],"country":"DO","subdivision":null,"city":null,"categories":["general"],"is_nsfw":false,"launched":null,"closed":null,"replaced_by":null,"website":"https://www.002radio.com/","logo":"https://i.imgur.com/7oNe8xj.png"},{"id":"BeijingSatelliteTV.cn","name":"Beijing Satellite TV","alt_names":["北京卫视"],"network":null,"owners":[],"country":"CN","subdivision":null,"city":"Beijing","categories":["general"],"is_nsfw":false,"launched":"1979-05-16","closed":null,"replaced_by":"002RadioTV.do@SD","website":"https://www.brtn.cn/btv/","logo":"https://i.imgur.com/vsktAez.png"},{"id":"M5.hu","name":"M5","alt_names":[],"network":null,"owners":[],"country":"HU","subdivision":null,"city":null,"categories":["auto"],"is_nsfw":true,"launched":null,"closed":"2001-01-01","replaced_by":"BeijingSatelliteTV.cn","website":"https://www.mediaklikk.hu/m5/","logo":"https://i.imgur.com/y21wFd0.png"}]
|
8
tests/__data__/expected/db/export/data/channels.csv
Normal file
8
tests/__data__/expected/db/export/data/channels.csv
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
id,name,alt_names,network,owners,country,subdivision,city,categories,is_nsfw,launched,closed,replaced_by,website,logo
|
||||||
|
0TV.dk,0-TV,,,,DK,,København,general,FALSE,,,,https://0-tv.dk/,https://i.imgur.com/aR5q6mA.png
|
||||||
|
1000xHoraTV.uy,1000xHora TV,,,,UY,,Montevideo,auto,FALSE,2020-01-01,2021-01-01,M5.hu@HD,https://www.1000xhoratv.com/,https://i.imgur.com/wP3bbYr.png
|
||||||
|
beINMoviesTurk.tr,beIN Movies Turk,beIN Movies Türk,BBC,Gazprom Media,TR,US-CA,London,movies,FALSE,1979-05-16,1980-05-16,M5.hu,http://www.digiturk.com.tr/,https://i.imgur.com/nw8Sa2z.png
|
||||||
|
M5.hu,M5,,,Duna Médiaszolgáltató Nonprofit Zrt.,HU,,,,TRUE,2020-01-01,,0TV.dk@SD,https://www.mediaklikk.hu/m5/,https://i.imgur.com/y21wFd0.png
|
||||||
|
WenzhouEconomicandEducation.cn,Wenzhou Economic and Education,,,,CN,,Wenzhou,science,FALSE,,,,,https://www.tvchinese.net/uploads/tv/wzjjkj.jpg
|
||||||
|
YiwuBusinessChannel.cn,Yiwu Business Channel,,,,CN,,,business,FALSE,,,,,https://www.tvchinese.net/uploads/tv/yiwutv.jpg
|
||||||
|
YiwuNewsIntegratedChannel.cn,Yiwu News Integrated Channel,,,,CN,,,news,FALSE,,,,,https://www.tvchinese.net/uploads/tv/yiwutv.jpg
|
|
3
tests/__data__/expected/db/update/data/blocklist.csv
Normal file
3
tests/__data__/expected/db/update/data/blocklist.csv
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
channel,reason,ref
|
||||||
|
beINMoviesTurk.tr,dmca,https://github.com/iptv-org/iptv/issues/1831
|
||||||
|
HGTVHungary.hu,nsfw,https://github.com/iptv-org/iptv/issues/1831
|
|
9
tests/__data__/expected/db/update/data/channels.csv
Normal file
9
tests/__data__/expected/db/update/data/channels.csv
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
id,name,alt_names,network,owners,country,subdivision,city,categories,is_nsfw,launched,closed,replaced_by,website,logo
|
||||||
|
0TV.dk,0-TV,,,,DK,,København,general,FALSE,,,,https://0-tv.dk/,https://i.imgur.com/aR5q6mA.png
|
||||||
|
1000xHoraTV.uy,1000xHora TV,,,,UY,,Montevideo,auto,FALSE,2020-01-01,2021-01-01,M5.hu@HD,https://www.1000xhoratv.com/,https://i.imgur.com/wP3bbYr.png
|
||||||
|
beINMoviesTurk.tr,beIN Movies Turk,beIN Movies Türk,BBC,Gazprom Media,TR,US-CA,London,movies,FALSE,1979-05-16,1980-05-16,M5.hu,http://www.digiturk.com.tr/,https://i.imgur.com/nw8Sa2z.png
|
||||||
|
Channel82.bm,Channel 82,,,,BM,,,,FALSE,,,,,https://i.imgur.com/y21wFd0.png
|
||||||
|
M5.hu,M5,,,Duna Médiaszolgáltató Nonprofit Zrt.,HU,,,,TRUE,2020-01-01,,0TV.dk@SD,https://www.mediaklikk.hu/m5/,https://i.imgur.com/y21wFd0.png
|
||||||
|
WenzhouEconomicandEducation.cn,Wenzhou Economic and Education,,,,CN,,Wenzhou,science,FALSE,,,,,https://www.tvchinese.net/uploads/tv/wzjjkj.jpg
|
||||||
|
YiwuBusinessChannel.cn,Yiwu Business Channel,,,,CN,,,business,FALSE,,,,,https://www.tvchinese.net/uploads/tv/yiwutv.jpg
|
||||||
|
YiwuNewsIntegratedChannel.cn,Yiwu News Integrated Channel,,,,CN,,,news,FALSE,,,,,https://www.tvchinese.net/uploads/tv/yiwutv.jpg
|
|
11
tests/__data__/expected/db/update/data/feeds.csv
Normal file
11
tests/__data__/expected/db/update/data/feeds.csv
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
channel,id,name,is_main,broadcast_area,timezones,languages,video_format
|
||||||
|
0TV.dk,SD,SD,TRUE,c/DK,Europe/Copenhagen,dan,576i
|
||||||
|
1000xHoraTV.uy,HD,HD,TRUE,c/CN,Africa/Johannesburg;Africa/Kigali,zho,576i
|
||||||
|
1000xHoraTV.uy,SD,SD,FALSE,c/UY,America/Montevideo,spa,576i
|
||||||
|
beINMoviesTurk.tr,SD,SD,TRUE,c/DO,America/Santo_Domingo,spa,480i
|
||||||
|
Channel82.bm,SD,SD,FALSE,c/BM,Atlantic/Bermuda,eng,480i
|
||||||
|
M5.hu,HD,HD,TRUE,c/BR,Africa/Dakar;Africa/El_Aaiun,por;spa,1080i
|
||||||
|
M5.hu,West,West,FALSE,c/DO,America/Santo_Domingo,spa,480i
|
||||||
|
WenzhouEconomicandEducation.cn,SD,SD,TRUE,c/CN,Africa/Johannesburg;Africa/Kigali,zho,576i
|
||||||
|
YiwuBusinessChannel.cn,SD,SD,TRUE,c/CN,Africa/Johannesburg;Africa/Kigali,zho,576i
|
||||||
|
YiwuNewsIntegratedChannel.cn,SD,SD,TRUE,c/CN,Africa/Johannesburg;Africa/Kigali,zho,576i
|
|
4
tests/__data__/input/db/export/data/channels.csv
Normal file
4
tests/__data__/input/db/export/data/channels.csv
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
id,name,alt_names,network,owners,country,subdivision,city,categories,is_nsfw,launched,closed,replaced_by,website,logo
|
||||||
|
002RadioTV.do,002 Radio TV,,,,DO,,,general,FALSE,,,,https://www.002radio.com/,https://i.imgur.com/7oNe8xj.png
|
||||||
|
BeijingSatelliteTV.cn,Beijing Satellite TV,北京卫视,,,CN,,Beijing,general,FALSE,1979-05-16,,002RadioTV.do@SD,https://www.brtn.cn/btv/,https://i.imgur.com/vsktAez.png
|
||||||
|
M5.hu,M5,,,,HU,,,auto,TRUE,,2001-01-01,BeijingSatelliteTV.cn,https://www.mediaklikk.hu/m5/,https://i.imgur.com/y21wFd0.png
|
|
7
tests/__data__/input/db/update/data/channels.csv
Normal file
7
tests/__data__/input/db/update/data/channels.csv
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
id,name,alt_names,network,owners,country,subdivision,city,categories,is_nsfw,launched,closed,replaced_by,website,logo
|
||||||
|
002RadioTV.do,002 Radio TV,,,,DO,,,general,FALSE,,,,https://www.002radio.com/,https://i.imgur.com/7oNe8xj.png
|
||||||
|
0TV.dk,0-TV,,,,DK,,København,general,FALSE,,,01TV.fr@SD,https://0-tv.dk/,https://i.imgur.com/aR5q6mA.png
|
||||||
|
1000xHoraTV.uy,1000xHora TV,,,,UY,,Montevideo,auto,FALSE,,,M5.hu@SD,https://www.1000xhoratv.com/,https://i.imgur.com/wP3bbYr.png
|
||||||
|
BeijingSatelliteTV.cn,Beijing Satellite TV,北京卫视,,,CN,,Beijing,general,FALSE,,,002RadioTV.do@SD,https://www.brtn.cn/btv/,https://i.imgur.com/vsktAez.png
|
||||||
|
M5.hu,M5,,,,HU,,,auto,TRUE,,2021-01-01,002RadioTV.do@SD,https://www.mediaklikk.hu/m5/,https://i.imgur.com/y21wFd0.png
|
||||||
|
Channel82.bm,Channel 82,,,,BM,,,,FALSE,,,,,https://i.imgur.com/y21wFd0.png
|
|
|
@ -6,3 +6,4 @@ channel,id,name,is_main,broadcast_area,timezones,languages,video_format
|
||||||
BeijingSatelliteTV.cn,SD,SD,TRUE,c/DO,America/Santo_Domingo,spa,480i
|
BeijingSatelliteTV.cn,SD,SD,TRUE,c/DO,America/Santo_Domingo,spa,480i
|
||||||
M5.hu,SD,SD,FALSE,c/DO,America/Santo_Domingo,spa,480i
|
M5.hu,SD,SD,FALSE,c/DO,America/Santo_Domingo,spa,480i
|
||||||
M5.hu,West,West,TRUE,c/DO,America/Santo_Domingo,spa,480i
|
M5.hu,West,West,TRUE,c/DO,America/Santo_Domingo,spa,480i
|
||||||
|
Channel82.bm,SD,SD,FALSE,c/BM,America/Santo_Domingo,eng,480i
|
|
|
@ -1025,5 +1025,88 @@ module.exports = [
|
||||||
timeline_url: 'https://api.github.com/repos/iptv-org/database/issues/6871/timeline',
|
timeline_url: 'https://api.github.com/repos/iptv-org/database/issues/6871/timeline',
|
||||||
performed_via_github_app: null,
|
performed_via_github_app: null,
|
||||||
state_reason: null
|
state_reason: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
url: 'https://api.github.com/repos/iptv-org/database/issues/17612',
|
||||||
|
repository_url: 'https://api.github.com/repos/iptv-org/database',
|
||||||
|
labels_url: 'https://api.github.com/repos/iptv-org/database/issues/17612/labels{/name}',
|
||||||
|
comments_url: 'https://api.github.com/repos/iptv-org/database/issues/17612/comments',
|
||||||
|
events_url: 'https://api.github.com/repos/iptv-org/database/issues/17612/events',
|
||||||
|
html_url: 'https://github.com/iptv-org/database/issues/17612',
|
||||||
|
id: 3021806112,
|
||||||
|
node_id: 'I_kwDOG1Kwp860HRog',
|
||||||
|
number: 17612,
|
||||||
|
title: 'Edit: Channel 82 SD [BM]',
|
||||||
|
user: {
|
||||||
|
login: 'Carlinhos027',
|
||||||
|
id: 111005658,
|
||||||
|
node_id: 'U_kgDOBp3P2g',
|
||||||
|
avatar_url: 'https://avatars.githubusercontent.com/u/111005658?v=4',
|
||||||
|
gravatar_id: '',
|
||||||
|
url: 'https://api.github.com/users/Carlinhos027',
|
||||||
|
html_url: 'https://github.com/Carlinhos027',
|
||||||
|
followers_url: 'https://api.github.com/users/Carlinhos027/followers',
|
||||||
|
following_url: 'https://api.github.com/users/Carlinhos027/following{/other_user}',
|
||||||
|
gists_url: 'https://api.github.com/users/Carlinhos027/gists{/gist_id}',
|
||||||
|
starred_url: 'https://api.github.com/users/Carlinhos027/starred{/owner}{/repo}',
|
||||||
|
subscriptions_url: 'https://api.github.com/users/Carlinhos027/subscriptions',
|
||||||
|
organizations_url: 'https://api.github.com/users/Carlinhos027/orgs',
|
||||||
|
repos_url: 'https://api.github.com/users/Carlinhos027/repos',
|
||||||
|
events_url: 'https://api.github.com/users/Carlinhos027/events{/privacy}',
|
||||||
|
received_events_url: 'https://api.github.com/users/Carlinhos027/received_events',
|
||||||
|
type: 'User',
|
||||||
|
user_view_type: 'public',
|
||||||
|
site_admin: false
|
||||||
|
},
|
||||||
|
labels: [
|
||||||
|
{
|
||||||
|
id: 5366738347,
|
||||||
|
node_id: 'LA_kwDOG1Kwp88AAAABP-Htqw',
|
||||||
|
url: 'https://api.github.com/repos/iptv-org/database/labels/approved',
|
||||||
|
name: 'approved',
|
||||||
|
color: '85DDDE',
|
||||||
|
default: false,
|
||||||
|
description: ''
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 8389506496,
|
||||||
|
node_id: 'LA_kwDOG1Kwp88AAAAB9A21wA',
|
||||||
|
url: 'https://api.github.com/repos/iptv-org/database/labels/feeds:edit',
|
||||||
|
name: 'feeds:edit',
|
||||||
|
color: 'E0D817',
|
||||||
|
default: false,
|
||||||
|
description: 'Request to edit feed description'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
state: 'open',
|
||||||
|
locked: false,
|
||||||
|
assignee: null,
|
||||||
|
assignees: [],
|
||||||
|
milestone: null,
|
||||||
|
comments: 0,
|
||||||
|
created_at: '2025-04-26T12:21:13Z',
|
||||||
|
updated_at: '2025-04-26T12:21:13Z',
|
||||||
|
closed_at: null,
|
||||||
|
author_association: 'CONTRIBUTOR',
|
||||||
|
type: null,
|
||||||
|
active_lock_reason: null,
|
||||||
|
sub_issues_summary: { total: 0, completed: 0, percent_completed: 0 },
|
||||||
|
body: '### Channel ID (required)\n\nChannel82.bm\n\n### Feed ID (required)\n\nSD\n\n### Feed Name\n\n_No response_\n\n### Main Feed\n\nNone\n\n### Broadcast Area\n\n_No response_\n\n### Timezones\n\nAtlantic/Bermuda\n\n### Languages\n\n_No response_\n\n### Format\n\nNone\n\n### Notes\n\n_No response_',
|
||||||
|
closed_by: null,
|
||||||
|
reactions: {
|
||||||
|
url: 'https://api.github.com/repos/iptv-org/database/issues/17612/reactions',
|
||||||
|
total_count: 0,
|
||||||
|
'+1': 0,
|
||||||
|
'-1': 0,
|
||||||
|
laugh: 0,
|
||||||
|
hooray: 0,
|
||||||
|
confused: 0,
|
||||||
|
heart: 0,
|
||||||
|
rocket: 0,
|
||||||
|
eyes: 0
|
||||||
|
},
|
||||||
|
timeline_url: 'https://api.github.com/repos/iptv-org/database/issues/17612/timeline',
|
||||||
|
performed_via_github_app: null,
|
||||||
|
state_reason: null
|
||||||
}
|
}
|
||||||
]
|
]
|
3
tests/__data__/input/db/validate/duplicate/blocklist.csv
Normal file
3
tests/__data__/input/db/validate/duplicate/blocklist.csv
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
channel,reason,ref
|
||||||
|
002RadioTV.do,dmca,https://en.wikipedia.org/wiki/Lemurs_of_Madagascar_(book)
|
||||||
|
002RadioTV.do,dmca,https://en.wikipedia.org/wiki/Lemurs_of_Madagascar_(book)
|
|
2
tests/__data__/input/db/validate/duplicate/channels.csv
Normal file
2
tests/__data__/input/db/validate/duplicate/channels.csv
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
id,name,alt_names,network,owners,country,subdivision,city,categories,is_nsfw,launched,closed,replaced_by,website,logo
|
||||||
|
002RadioTV.do,002 Radio TV,,,,DO,,,,FALSE,,,,,https://i.imgur.com/7oNe8xj.png
|
|
|
@ -1,3 +1,3 @@
|
||||||
channel,id,name,is_main,broadcast_area,timezones,languages,video_format
|
channel,id,name,is_main,broadcast_area,timezones,languages,video_format
|
||||||
002RadioTV.do,SD,SD,TRUE,c/DO,America/Santo_Domingo,spa,480i
|
002RadioTV.do,SD,SD,TRUE,c/DO,America/Santo_Domingo,spa,480i
|
||||||
002RadioTV.do,SD,HD,FALSE,c/DO,America/Santo_Domingo,spa,1080i
|
002RadioTV.do,SD,SD,FALSE,c/DO,America/Santo_Domingo,spa,1080i
|
|
1
tests/__data__/input/db/validate/empty_line/channels.csv
Normal file
1
tests/__data__/input/db/validate/empty_line/channels.csv
Normal file
|
@ -0,0 +1 @@
|
||||||
|
id,name,alt_names,network,owners,country,subdivision,city,categories,is_nsfw,launched,closed,replaced_by,website,logo
|
|
1
tests/__data__/input/db/validate/invalid_line_ending/.gitattributes
vendored
Normal file
1
tests/__data__/input/db/validate/invalid_line_ending/.gitattributes
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
*.csv binary
|
|
@ -0,0 +1,4 @@
|
||||||
|
id,name,alt_names,network,owners,country,subdivision,city,categories,is_nsfw,launched,closed,replaced_by,website,logo
|
||||||
|
002RadioTV.do,002 Radio TV,,,,DO,,,,FALSE,,,002RadioTV.do@4K,ttps://www.002radio.com/,https://i.imgur.com/7oNe8xj.png
|
||||||
|
10Channel.do,10 Channel,,,,DO,,,,FALSE,,,,,https://i.ibb.co/0XMM4gn/download-7.png
|
||||||
|
24B.do,24B,,,,DO,,,,FALSE,,,,,https://i.imgur.com/8LgdPst.png
|
|
6
tests/__data__/input/db/validate/invalid_value/feeds.csv
Normal file
6
tests/__data__/input/db/validate/invalid_value/feeds.csv
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
channel,id,name,is_main,broadcast_area,timezones,languages,video_format
|
||||||
|
0TV.dk,SD,SD,TRUE,c/BE,Europe/Copenhagen,dan,576I
|
||||||
|
002RadioTV.do,SD,SD,TRUE,c/DO,Africa/Accra,dan,576i
|
||||||
|
002RadioTV.do,HD,HD,TRUE,c/DO,Africa/Accra,dan,576i
|
||||||
|
24B.do,SD,SD,FALSE,c/DO,Africa/Accra,dan,576i
|
||||||
|
10Channel.do,SD,SD,TRUE,c/DO,Africa/Accra,dan,576i
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue