mirror of
https://github.com/iptv-org/iptv.git
synced 2025-05-13 02:20:03 -04:00
Update scripts
This commit is contained in:
parent
74b3cff1d2
commit
02ec7e6f76
42 changed files with 1317 additions and 694 deletions
|
@ -1,45 +1,63 @@
|
|||
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { IssueLoader, PlaylistParser } from '../../core'
|
||||
import { Stream, Playlist, Channel, Issue } from '../../models'
|
||||
import { Stream, Playlist, Channel, Feed, Issue } from '../../models'
|
||||
import validUrl from 'valid-url'
|
||||
import { uniqueId } from 'lodash'
|
||||
|
||||
let processedIssues = new Collection()
|
||||
let streams: Collection
|
||||
let groupedChannels: Dictionary
|
||||
let issues: Collection
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ disabled: true })
|
||||
const loader = new IssueLoader()
|
||||
|
||||
logger.info('loading issues...')
|
||||
issues = await loader.load()
|
||||
const issues = await loader.load()
|
||||
|
||||
logger.info('loading channels from api...')
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsContent = await dataStorage.json('channels.json')
|
||||
groupedChannels = new Collection(channelsContent)
|
||||
.map(data => new Channel(data))
|
||||
.keyBy((channel: Channel) => channel.id)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({ storage: streamsStorage })
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
feedsGroupedByChannelId,
|
||||
channelsGroupedById
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
streams = await parser.parse(files)
|
||||
const streams = await parser.parse(files)
|
||||
|
||||
logger.info('removing broken streams...')
|
||||
await removeStreams(loader)
|
||||
await removeStreams({ streams, issues })
|
||||
|
||||
logger.info('edit stream description...')
|
||||
await editStreams(loader)
|
||||
await editStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
|
||||
logger.info('add new streams...')
|
||||
await addStreams(loader)
|
||||
await addStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
|
||||
logger.info('saving...')
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.filepath)
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
|
||||
for (let filepath of groupedStreams.keys()) {
|
||||
let streams = groupedStreams.get(filepath) || []
|
||||
streams = streams.filter((stream: Stream) => stream.removed === false)
|
||||
|
@ -54,7 +72,7 @@ async function main() {
|
|||
|
||||
main()
|
||||
|
||||
async function removeStreams(loader: IssueLoader) {
|
||||
async function removeStreams({ streams, issues }: { streams: Collection; issues: Collection }) {
|
||||
const requests = issues.filter(
|
||||
issue => issue.labels.includes('streams:remove') && issue.labels.includes('approved')
|
||||
)
|
||||
|
@ -62,22 +80,35 @@ async function removeStreams(loader: IssueLoader) {
|
|||
const data = issue.data
|
||||
if (data.missing('brokenLinks')) return
|
||||
|
||||
const brokenLinks = data.getString('brokenLinks').split(/\r?\n/).filter(Boolean)
|
||||
const brokenLinks = data.getString('brokenLinks') || ''
|
||||
|
||||
let changed = false
|
||||
brokenLinks.forEach(link => {
|
||||
const found: Stream = streams.first((_stream: Stream) => _stream.url === link.trim())
|
||||
if (found) {
|
||||
found.removed = true
|
||||
changed = true
|
||||
}
|
||||
})
|
||||
brokenLinks
|
||||
.split(/\r?\n/)
|
||||
.filter(Boolean)
|
||||
.forEach(link => {
|
||||
const found: Stream = streams.first((_stream: Stream) => _stream.url === link.trim())
|
||||
if (found) {
|
||||
found.removed = true
|
||||
changed = true
|
||||
}
|
||||
})
|
||||
|
||||
if (changed) processedIssues.add(issue.number)
|
||||
})
|
||||
}
|
||||
|
||||
async function editStreams(loader: IssueLoader) {
|
||||
async function editStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
}: {
|
||||
streams: Collection
|
||||
issues: Collection
|
||||
channelsGroupedById: Dictionary
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
}) {
|
||||
const requests = issues.filter(
|
||||
issue => issue.labels.includes('streams:edit') && issue.labels.includes('approved')
|
||||
)
|
||||
|
@ -86,59 +117,110 @@ async function editStreams(loader: IssueLoader) {
|
|||
|
||||
if (data.missing('streamUrl')) return
|
||||
|
||||
let stream = streams.first(
|
||||
let stream: Stream = streams.first(
|
||||
(_stream: Stream) => _stream.url === data.getString('streamUrl')
|
||||
) as Stream
|
||||
|
||||
)
|
||||
if (!stream) return
|
||||
|
||||
if (data.has('channelId')) {
|
||||
const channel = groupedChannels.get(data.getString('channelId'))
|
||||
const streamId = data.getString('streamId') || ''
|
||||
const [channelId, feedId] = streamId.split('@')
|
||||
|
||||
if (!channel) return
|
||||
|
||||
stream.channel = data.getString('channelId')
|
||||
stream.filepath = `${channel.country.toLowerCase()}.m3u`
|
||||
stream.line = -1
|
||||
stream.name = channel.name
|
||||
if (channelId) {
|
||||
stream
|
||||
.setChannelId(channelId)
|
||||
.setFeedId(feedId)
|
||||
.withChannel(channelsGroupedById)
|
||||
.withFeed(feedsGroupedByChannelId)
|
||||
.updateId()
|
||||
.updateName()
|
||||
.updateFilepath()
|
||||
}
|
||||
|
||||
if (data.has('label')) stream.label = data.getString('label')
|
||||
if (data.has('quality')) stream.quality = data.getString('quality')
|
||||
if (data.has('httpUserAgent')) stream.httpUserAgent = data.getString('httpUserAgent')
|
||||
if (data.has('httpReferrer')) stream.httpReferrer = data.getString('httpReferrer')
|
||||
const label = data.getString('label') || ''
|
||||
const quality = data.getString('quality') || ''
|
||||
const httpUserAgent = data.getString('httpUserAgent') || ''
|
||||
const httpReferrer = data.getString('httpReferrer') || ''
|
||||
|
||||
if (data.has('label')) stream.setLabel(label)
|
||||
if (data.has('quality')) stream.setQuality(quality)
|
||||
if (data.has('httpUserAgent')) stream.setHttpUserAgent(httpUserAgent)
|
||||
if (data.has('httpReferrer')) stream.setHttpReferrer(httpReferrer)
|
||||
|
||||
processedIssues.add(issue.number)
|
||||
})
|
||||
}
|
||||
|
||||
async function addStreams(loader: IssueLoader) {
|
||||
async function addStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
}: {
|
||||
streams: Collection
|
||||
issues: Collection
|
||||
channelsGroupedById: Dictionary
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
}) {
|
||||
const requests = issues.filter(
|
||||
issue => issue.labels.includes('streams:add') && issue.labels.includes('approved')
|
||||
)
|
||||
requests.forEach((issue: Issue) => {
|
||||
const data = issue.data
|
||||
if (data.missing('channelId') || data.missing('streamUrl')) return
|
||||
if (data.missing('streamId') || data.missing('streamUrl')) return
|
||||
if (streams.includes((_stream: Stream) => _stream.url === data.getString('streamUrl'))) return
|
||||
if (!validUrl.isUri(data.getString('streamUrl'))) return
|
||||
const stringUrl = data.getString('streamUrl') || ''
|
||||
if (!isUri(stringUrl)) return
|
||||
|
||||
const channel = groupedChannels.get(data.getString('channelId'))
|
||||
const streamId = data.getString('streamId') || ''
|
||||
const [channelId] = streamId.split('@')
|
||||
|
||||
const channel: Channel = channelsGroupedById.get(channelId)
|
||||
if (!channel) return
|
||||
|
||||
const label = data.getString('label') || ''
|
||||
const quality = data.getString('quality') || ''
|
||||
const httpUserAgent = data.getString('httpUserAgent') || ''
|
||||
const httpReferrer = data.getString('httpReferrer') || ''
|
||||
|
||||
const stream = new Stream({
|
||||
channel: data.getString('channelId'),
|
||||
url: data.getString('streamUrl'),
|
||||
label: data.getString('label'),
|
||||
quality: data.getString('quality'),
|
||||
httpUserAgent: data.getString('httpUserAgent'),
|
||||
httpReferrer: data.getString('httpReferrer'),
|
||||
filepath: `${channel.country.toLowerCase()}.m3u`,
|
||||
tvg: {
|
||||
id: streamId,
|
||||
name: '',
|
||||
url: '',
|
||||
logo: '',
|
||||
rec: '',
|
||||
shift: ''
|
||||
},
|
||||
name: data.getString('channelName') || channel.name,
|
||||
url: stringUrl,
|
||||
group: {
|
||||
title: ''
|
||||
},
|
||||
http: {
|
||||
'user-agent': httpUserAgent,
|
||||
referrer: httpReferrer
|
||||
},
|
||||
line: -1,
|
||||
name: data.getString('channelName') || channel.name
|
||||
raw: '',
|
||||
timeshift: '',
|
||||
catchup: {
|
||||
type: '',
|
||||
source: '',
|
||||
days: ''
|
||||
}
|
||||
})
|
||||
.withChannel(channelsGroupedById)
|
||||
.withFeed(feedsGroupedByChannelId)
|
||||
.setLabel(label)
|
||||
.setQuality(quality)
|
||||
.updateName()
|
||||
.updateFilepath()
|
||||
|
||||
streams.add(stream)
|
||||
processedIssues.add(issue.number)
|
||||
})
|
||||
}
|
||||
|
||||
function isUri(string: string) {
|
||||
return validUrl.isUri(encodeURI(string))
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue