mirror of
https://github.com/iptv-org/iptv.git
synced 2024-12-26 17:37:02 +00:00
Update scripts
This commit is contained in:
parent
8a83f23243
commit
f1d2add19a
|
@ -1,28 +0,0 @@
|
|||
const { logger, db, file } = require('../../core')
|
||||
const _ = require('lodash')
|
||||
|
||||
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.api'
|
||||
|
||||
async function main() {
|
||||
logger.info(`loading streams...`)
|
||||
await db.streams.load()
|
||||
|
||||
let streams = await db.streams.find({})
|
||||
streams = _.sortBy(streams, 'channel')
|
||||
streams = streams.map(stream => {
|
||||
let data = {
|
||||
channel: stream.channel,
|
||||
url: stream.url,
|
||||
http_referrer: stream.http_referrer,
|
||||
user_agent: stream.user_agent
|
||||
}
|
||||
|
||||
return data
|
||||
})
|
||||
logger.info(`found ${streams.length} streams`)
|
||||
|
||||
logger.info('saving to .api/streams.json...')
|
||||
await file.create(`${PUBLIC_DIR}/streams.json`, JSON.stringify(streams))
|
||||
}
|
||||
|
||||
main()
|
25
scripts/commands/api/generate.ts
Normal file
25
scripts/commands/api/generate.ts
Normal file
|
@ -0,0 +1,25 @@
|
|||
import { API_DIR, DB_DIR } from '../../constants'
|
||||
import { Logger, Database, Collection, Storage } from '../../core'
|
||||
import { Stream } from '../../models'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info(`loading streams...`)
|
||||
const db = new Database(DB_DIR)
|
||||
const dbStreams = await db.load('streams.db')
|
||||
const docs = await dbStreams.find({})
|
||||
|
||||
const streams = new Collection(docs as any[])
|
||||
.map(data => new Stream(data))
|
||||
.orderBy((stream: Stream) => stream.channel)
|
||||
.map((stream: Stream) => stream.toJSON())
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
logger.info('saving to .api/streams.json...')
|
||||
const storage = new Storage(API_DIR)
|
||||
await storage.save('streams.json', streams.toJSON())
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,11 +1,11 @@
|
|||
#!/bin/bash
|
||||
|
||||
mkdir -p scripts/tmp/data
|
||||
curl -L -o scripts/tmp/data/blocklist.json https://iptv-org.github.io/api/blocklist.json
|
||||
curl -L -o scripts/tmp/data/categories.json https://iptv-org.github.io/api/categories.json
|
||||
curl -L -o scripts/tmp/data/channels.json https://iptv-org.github.io/api/channels.json
|
||||
curl -L -o scripts/tmp/data/streams.json https://iptv-org.github.io/api/streams.json
|
||||
curl -L -o scripts/tmp/data/countries.json https://iptv-org.github.io/api/countries.json
|
||||
curl -L -o scripts/tmp/data/languages.json https://iptv-org.github.io/api/languages.json
|
||||
curl -L -o scripts/tmp/data/regions.json https://iptv-org.github.io/api/regions.json
|
||||
curl -L -o scripts/tmp/data/subdivisions.json https://iptv-org.github.io/api/subdivisions.json
|
||||
mkdir -p temp/data
|
||||
curl -L -o temp/data/blocklist.json https://iptv-org.github.io/api/blocklist.json
|
||||
curl -L -o temp/data/categories.json https://iptv-org.github.io/api/categories.json
|
||||
curl -L -o temp/data/channels.json https://iptv-org.github.io/api/channels.json
|
||||
curl -L -o temp/data/streams.json https://iptv-org.github.io/api/streams.json
|
||||
curl -L -o temp/data/countries.json https://iptv-org.github.io/api/countries.json
|
||||
curl -L -o temp/data/languages.json https://iptv-org.github.io/api/languages.json
|
||||
curl -L -o temp/data/regions.json https://iptv-org.github.io/api/regions.json
|
||||
curl -L -o temp/data/subdivisions.json https://iptv-org.github.io/api/subdivisions.json
|
|
@ -1,40 +0,0 @@
|
|||
const { db, file, parser, store, logger } = require('../../core')
|
||||
const { program } = require('commander')
|
||||
const _ = require('lodash')
|
||||
|
||||
const options = program
|
||||
.option('--input-dir <input-dir>', 'Set path to input directory', 'streams')
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
async function main() {
|
||||
logger.info(`looking for streams...`)
|
||||
const streams = []
|
||||
const files = await file.list(`${options.inputDir}/**/*.m3u`)
|
||||
for (const filepath of files) {
|
||||
const playlist = await parser.parsePlaylist(filepath)
|
||||
for (const item of playlist.items) {
|
||||
item.filepath = filepath
|
||||
|
||||
const stream = store.create()
|
||||
|
||||
stream.set('channel', item.tvg.id)
|
||||
stream.set('title', item.name)
|
||||
stream.set('filepath', item.filepath)
|
||||
stream.set('url', item.url)
|
||||
stream.set('http_referrer', item.http.referrer)
|
||||
stream.set('user_agent', item.http['user-agent'])
|
||||
|
||||
streams.push(stream)
|
||||
}
|
||||
}
|
||||
logger.info(`found ${streams.length} streams`)
|
||||
|
||||
logger.info('saving to the database...')
|
||||
await db.streams.load()
|
||||
await db.streams.reset()
|
||||
const data = streams.map(stream => stream.data())
|
||||
await db.streams.insert(data)
|
||||
}
|
||||
|
||||
main()
|
33
scripts/commands/database/create.ts
Normal file
33
scripts/commands/database/create.ts
Normal file
|
@ -0,0 +1,33 @@
|
|||
import { Storage, Logger, PlaylistParser, Collection, Database } from '../../core'
|
||||
import { Stream, Playlist } from '../../models'
|
||||
import { STREAMS_DIR, DB_DIR } from '../../constants'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info(`looking for streams...`)
|
||||
const storage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage
|
||||
})
|
||||
const files = await storage.list(`**/*.m3u`)
|
||||
let streams = new Collection()
|
||||
for (let filepath of files) {
|
||||
const playlist: Playlist = await parser.parse(filepath)
|
||||
streams = streams.concat(playlist.streams)
|
||||
}
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
logger.info('clean up the storage...')
|
||||
const dbStorage = new Storage(DB_DIR)
|
||||
await dbStorage.clear('streams.db')
|
||||
|
||||
logger.info('saving streams to the database...')
|
||||
const db = new Database(DB_DIR)
|
||||
const dbStreams = await db.load('streams.db')
|
||||
const data = streams.map((stream: Stream) => stream.data()).all()
|
||||
await dbStreams.insert(data)
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,33 +0,0 @@
|
|||
const { create: createPlaylist } = require('../../core/playlist')
|
||||
const { normalize: normalizeUrl } = require('../../core/url')
|
||||
const { db, logger, file } = require('../../core')
|
||||
const { orderBy } = require('natural-orderby')
|
||||
const _ = require('lodash')
|
||||
|
||||
async function main() {
|
||||
logger.info('loading streams...')
|
||||
await db.streams.load()
|
||||
let streams = await db.streams.find({})
|
||||
|
||||
streams = streams.map(stream => {
|
||||
stream.url = normalizeUrl(stream.url)
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
logger.info('sorting links...')
|
||||
streams = orderBy(
|
||||
streams,
|
||||
['channel', s => (s.channel ? '' : s.title), 'url'],
|
||||
['asc', 'asc', 'asc']
|
||||
)
|
||||
|
||||
logger.info('saving...')
|
||||
const files = _.groupBy(streams, 'filepath')
|
||||
for (const filepath in files) {
|
||||
const playlist = createPlaylist(files[filepath], { public: false })
|
||||
await file.create(filepath, playlist.toString())
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,76 +0,0 @@
|
|||
const { db, generator, api, logger, file } = require('../../core')
|
||||
const { orderBy } = require('natural-orderby')
|
||||
const _ = require('lodash')
|
||||
|
||||
async function main() {
|
||||
const streams = await loadStreams()
|
||||
|
||||
logger.info('generating categories/...')
|
||||
await generator.generate('categories', streams)
|
||||
logger.info('generating countries/...')
|
||||
await generator.generate('countries', streams)
|
||||
logger.info('generating languages/...')
|
||||
await generator.generate('languages', streams)
|
||||
logger.info('generating regions/...')
|
||||
await generator.generate('regions', streams)
|
||||
logger.info('generating index.category.m3u...')
|
||||
await generator.generate('index_category_m3u', streams)
|
||||
logger.info('generating index.country.m3u...')
|
||||
await generator.generate('index_country_m3u', streams)
|
||||
logger.info('generating index.language.m3u...')
|
||||
await generator.generate('index_language_m3u', streams)
|
||||
logger.info('generating index.m3u...')
|
||||
await generator.generate('index_m3u', streams)
|
||||
logger.info('generating index.nsfw.m3u...')
|
||||
await generator.generate('index_nsfw_m3u', streams)
|
||||
logger.info('generating index.region.m3u...')
|
||||
await generator.generate('index_region_m3u', streams)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadStreams() {
|
||||
await db.streams.load()
|
||||
let streams = await db.streams.find({})
|
||||
streams = orderBy(streams, ['channel', 'url'], ['asc', 'asc'])
|
||||
streams = _.uniqBy(streams, stream => stream.channel || _.uniqueId())
|
||||
|
||||
await api.channels.load()
|
||||
let channels = await api.channels.all()
|
||||
channels = _.keyBy(channels, 'id')
|
||||
|
||||
await api.categories.load()
|
||||
let categories = await api.categories.all()
|
||||
categories = _.keyBy(categories, 'id')
|
||||
|
||||
await api.languages.load()
|
||||
let languages = await api.languages.all()
|
||||
languages = _.keyBy(languages, 'code')
|
||||
|
||||
streams = streams.map(stream => {
|
||||
const channel = channels[stream.channel] || null
|
||||
const filename = file.getFilename(stream.filepath)
|
||||
const [_, code] = filename.match(/^([a-z]{2})(_|$)/) || [null, null]
|
||||
const defaultBroadcastArea = code ? [`c/${code.toUpperCase()}`] : []
|
||||
|
||||
if (channel) {
|
||||
stream.categories = channel.categories.map(id => categories[id]).filter(i => i)
|
||||
stream.languages = channel.languages.map(id => languages[id]).filter(i => i)
|
||||
stream.broadcast_area = channel.broadcast_area
|
||||
stream.is_nsfw = channel.is_nsfw
|
||||
stream.logo = channel.logo
|
||||
} else {
|
||||
stream.categories = []
|
||||
stream.languages = []
|
||||
stream.broadcast_area = defaultBroadcastArea
|
||||
stream.is_nsfw = false
|
||||
stream.logo = null
|
||||
}
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
streams = orderBy(streams, ['title'], ['asc'])
|
||||
|
||||
return streams
|
||||
}
|
148
scripts/commands/playlist/generate.ts
Normal file
148
scripts/commands/playlist/generate.ts
Normal file
|
@ -0,0 +1,148 @@
|
|||
import { File, Storage } from '../../core'
|
||||
import { Stream, Category, Channel, Language, Country, Region, Subdivision } from '../../models'
|
||||
import { Database } from '../../core/database'
|
||||
import { Collection } from '../../core/collection'
|
||||
import { Logger } from '../../core/logger'
|
||||
import _ from 'lodash'
|
||||
import {
|
||||
CategoriesGenerator,
|
||||
CountriesGenerator,
|
||||
LanguagesGenerator,
|
||||
RegionsGenerator,
|
||||
IndexGenerator,
|
||||
IndexNsfwGenerator,
|
||||
IndexCategoryGenerator,
|
||||
IndexCountryGenerator,
|
||||
IndexLanguageGenerator,
|
||||
IndexRegionGenerator
|
||||
} from '../../generators'
|
||||
import { DATA_DIR, DB_DIR, LOGS_DIR } from '../../constants'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
const storage = new Storage(DATA_DIR)
|
||||
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
const channels = new Collection(channelsContent).map(data => new Channel(data))
|
||||
|
||||
const categoriesContent = await storage.json('categories.json')
|
||||
const categories = new Collection(categoriesContent).map(data => new Category(data))
|
||||
|
||||
const countriesContent = await storage.json('countries.json')
|
||||
const countries = new Collection(countriesContent).map(data => new Country(data))
|
||||
|
||||
const languagesContent = await storage.json('languages.json')
|
||||
const languages = new Collection(languagesContent).map(data => new Language(data))
|
||||
|
||||
const regionsContent = await storage.json('regions.json')
|
||||
const regions = new Collection(regionsContent).map(data => new Region(data))
|
||||
|
||||
const subdivisionsContent = await storage.json('subdivisions.json')
|
||||
const subdivisions = new Collection(subdivisionsContent).map(data => new Subdivision(data))
|
||||
|
||||
const streams = await loadStreams({ channels, categories, languages })
|
||||
|
||||
const generatorsLogger = new Logger({
|
||||
stream: await new Storage(LOGS_DIR).createStream(`generators.log`)
|
||||
})
|
||||
|
||||
logger.info('generating categories/...')
|
||||
await new CategoriesGenerator({ categories, streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating countries/...')
|
||||
await new CountriesGenerator({
|
||||
countries,
|
||||
streams,
|
||||
regions,
|
||||
subdivisions,
|
||||
logger: generatorsLogger
|
||||
}).generate()
|
||||
|
||||
logger.info('generating languages/...')
|
||||
await new LanguagesGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating regions/...')
|
||||
await new RegionsGenerator({
|
||||
streams,
|
||||
regions,
|
||||
subdivisions,
|
||||
logger: generatorsLogger
|
||||
}).generate()
|
||||
|
||||
logger.info('generating index.m3u...')
|
||||
await new IndexGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.nsfw.m3u...')
|
||||
await new IndexNsfwGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.category.m3u...')
|
||||
await new IndexCategoryGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.country.m3u...')
|
||||
await new IndexCountryGenerator({
|
||||
streams,
|
||||
countries,
|
||||
regions,
|
||||
subdivisions,
|
||||
logger: generatorsLogger
|
||||
}).generate()
|
||||
|
||||
logger.info('generating index.language.m3u...')
|
||||
await new IndexLanguageGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.region.m3u...')
|
||||
await new IndexRegionGenerator({ streams, regions, logger: generatorsLogger }).generate()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadStreams({
|
||||
channels,
|
||||
categories,
|
||||
languages
|
||||
}: {
|
||||
channels: Collection
|
||||
categories: Collection
|
||||
languages: Collection
|
||||
}) {
|
||||
const groupedChannels = channels.keyBy(channel => channel.id)
|
||||
const groupedCategories = categories.keyBy(category => category.id)
|
||||
const groupedLanguages = languages.keyBy(language => language.code)
|
||||
|
||||
const db = new Database(DB_DIR)
|
||||
const dbStreams = await db.load('streams.db')
|
||||
const docs = await dbStreams.find({})
|
||||
const streams = new Collection(docs as any[])
|
||||
.map((data: any) => new Stream(data))
|
||||
.orderBy([(stream: Stream) => stream.channel, (stream: Stream) => stream.url], ['asc', 'asc'])
|
||||
.uniqBy((stream: Stream) => stream.channel || _.uniqueId())
|
||||
.map((stream: Stream) => {
|
||||
const channel: Channel | undefined = groupedChannels.get(stream.channel)
|
||||
|
||||
if (channel) {
|
||||
const channelCategories = channel.categories
|
||||
.map((id: string) => groupedCategories.get(id))
|
||||
.filter(Boolean)
|
||||
const channelLanguages = channel.languages
|
||||
.map((id: string) => groupedLanguages.get(id))
|
||||
.filter(Boolean)
|
||||
|
||||
stream.categories = channelCategories
|
||||
stream.languages = channelLanguages
|
||||
stream.broadcastArea = channel.broadcastArea
|
||||
stream.isNSFW = channel.isNSFW
|
||||
if (channel.logo) stream.logo = channel.logo
|
||||
} else {
|
||||
const file = new File(stream.filepath)
|
||||
const [_, countryCode] = file.getFilename().match(/^([a-z]{2})(_|$)/) || [null, null]
|
||||
const defaultBroadcastArea = countryCode ? [`c/${countryCode.toUpperCase()}`] : []
|
||||
|
||||
stream.broadcastArea = new Collection(defaultBroadcastArea)
|
||||
}
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
return streams
|
||||
}
|
143
scripts/commands/playlist/update.ts
Normal file
143
scripts/commands/playlist/update.ts
Normal file
|
@ -0,0 +1,143 @@
|
|||
import { DB_DIR, DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { Database, Storage, Logger, Collection, Dictionary, IssueLoader } from '../../core'
|
||||
import { Stream, Playlist, Channel } from '../../models'
|
||||
|
||||
let processedIssues = new Collection()
|
||||
let streams: Collection
|
||||
let groupedChannels: Dictionary
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ disabled: true })
|
||||
const loader = new IssueLoader()
|
||||
|
||||
logger.info('loading streams...')
|
||||
const db = new Database(DB_DIR)
|
||||
const docs = await db.load('streams.db')
|
||||
const dbStreams = await docs.find({})
|
||||
|
||||
streams = new Collection(dbStreams as any[]).map(data => new Stream(data))
|
||||
|
||||
const storage = new Storage(DATA_DIR)
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
groupedChannels = new Collection(channelsContent)
|
||||
.map(data => new Channel(data))
|
||||
.keyBy((channel: Channel) => channel.id)
|
||||
|
||||
logger.info('removing broken streams...')
|
||||
await removeStreams(loader)
|
||||
|
||||
logger.info('edit stream description...')
|
||||
await editStreams(loader)
|
||||
|
||||
logger.info('add new streams...')
|
||||
await addStreams(loader)
|
||||
|
||||
logger.info('normalizing links...')
|
||||
streams = streams.map(stream => {
|
||||
stream.normalizeURL()
|
||||
return stream
|
||||
})
|
||||
|
||||
logger.info('sorting links...')
|
||||
streams = streams.orderBy(
|
||||
[
|
||||
(stream: Stream) => stream.name,
|
||||
(stream: Stream) => parseInt(stream.quality.replace('p', '')),
|
||||
(stream: Stream) => stream.label,
|
||||
(stream: Stream) => stream.url
|
||||
],
|
||||
['asc', 'desc', 'asc', 'asc']
|
||||
)
|
||||
|
||||
logger.info('saving...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.filepath)
|
||||
for (let filepath of groupedStreams.keys()) {
|
||||
const streams = groupedStreams.get(filepath) || []
|
||||
|
||||
if (!streams.length) return
|
||||
|
||||
const playlist = new Playlist(streams, { public: false })
|
||||
await streamsStorage.save(filepath, playlist.toString())
|
||||
}
|
||||
|
||||
const output = processedIssues.map(issue_number => `closes #${issue_number}`).join(', ')
|
||||
console.log(`OUTPUT=${output}`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function removeStreams(loader: IssueLoader) {
|
||||
const issues = await loader.load({ labels: ['streams:remove', 'approved'] })
|
||||
issues.forEach((data: Dictionary) => {
|
||||
if (data.missing('stream_url')) return
|
||||
|
||||
const removed = streams.remove((_stream: Stream) => _stream.url === data.get('stream_url'))
|
||||
if (removed.notEmpty()) {
|
||||
processedIssues.add(data.get('issue_number'))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function editStreams(loader: IssueLoader) {
|
||||
const issues = await loader.load({ labels: ['streams:edit', 'approved'] })
|
||||
issues.forEach((data: Dictionary) => {
|
||||
if (data.missing('stream_url')) return
|
||||
|
||||
let stream = streams.first(
|
||||
(_stream: Stream) => _stream.url === data.get('stream_url')
|
||||
) as Stream
|
||||
|
||||
if (!stream) return
|
||||
|
||||
if (data.has('channel_id')) {
|
||||
const channel = groupedChannels.get(data.get('channel_id'))
|
||||
|
||||
if (!channel) return
|
||||
|
||||
stream.channel = data.get('channel_id')
|
||||
stream.filepath = `${channel.country.toLowerCase()}.m3u`
|
||||
stream.line = -1
|
||||
stream.name = channel.name
|
||||
}
|
||||
|
||||
if (data.has('channel_name')) stream.name = data.get('channel_name')
|
||||
if (data.has('label')) stream.label = data.get('label')
|
||||
if (data.has('quality')) stream.quality = data.get('quality')
|
||||
if (data.has('user_agent')) stream.userAgent = data.get('user_agent')
|
||||
if (data.has('http_referrer')) stream.httpReferrer = data.get('http_referrer')
|
||||
if (data.has('channel_name')) stream.name = data.get('channel_name')
|
||||
|
||||
streams.remove((_stream: Stream) => _stream.channel === stream.channel)
|
||||
streams.add(stream)
|
||||
|
||||
processedIssues.add(data.get('issue_number'))
|
||||
})
|
||||
}
|
||||
|
||||
async function addStreams(loader: IssueLoader) {
|
||||
const issues = await loader.load({ labels: ['streams:add', 'approved'] })
|
||||
issues.forEach((data: Dictionary) => {
|
||||
if (data.missing('channel_id') || data.missing('stream_url')) return
|
||||
if (streams.includes((_stream: Stream) => _stream.url === data.get('stream_url'))) return
|
||||
|
||||
const channel = groupedChannels.get(data.get('channel_id'))
|
||||
|
||||
if (!channel) return
|
||||
|
||||
const stream = new Stream({
|
||||
channel: data.get('channel_id'),
|
||||
url: data.get('stream_url'),
|
||||
label: data.get('label'),
|
||||
quality: data.get('quality'),
|
||||
userAgent: data.get('user_agent'),
|
||||
httpReferrer: data.get('http_referrer'),
|
||||
filepath: `${channel.country.toLowerCase()}.m3u`,
|
||||
line: -1,
|
||||
name: data.get('channel_name') || channel.name
|
||||
})
|
||||
|
||||
streams.add(stream)
|
||||
processedIssues.add(data.get('issue_number'))
|
||||
})
|
||||
}
|
|
@ -1,106 +0,0 @@
|
|||
const { file, logger, api, parser, id } = require('../../core')
|
||||
const { program } = require('commander')
|
||||
const chalk = require('chalk')
|
||||
const _ = require('lodash')
|
||||
|
||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const files = program.args.length ? program.args : await file.list('streams/*.m3u')
|
||||
|
||||
logger.info(`loading blocklist...`)
|
||||
await api.channels.load()
|
||||
await api.blocklist.load()
|
||||
|
||||
let blocklist = await api.blocklist.all()
|
||||
blocklist = blocklist
|
||||
.map(blocked => {
|
||||
const channel = api.channels.find({ id: blocked.channel })
|
||||
if (!channel) return null
|
||||
return { ...blocked, name: channel.name }
|
||||
})
|
||||
.filter(i => i)
|
||||
logger.info(`found ${blocklist.length} records`)
|
||||
|
||||
let errors = []
|
||||
let warnings = []
|
||||
for (const filepath of files) {
|
||||
if (!filepath.endsWith('.m3u')) continue
|
||||
|
||||
const basename = file.basename(filepath)
|
||||
const [__, country] = basename.match(/([a-z]{2})(|_.*)\.m3u/i) || [null, null]
|
||||
|
||||
const buffer = {}
|
||||
const fileLog = []
|
||||
try {
|
||||
const playlist = await parser.parsePlaylist(filepath)
|
||||
for (const item of playlist.items) {
|
||||
if (item.tvg.id && !api.channels.find({ id: item.tvg.id })) {
|
||||
fileLog.push({
|
||||
type: 'warning',
|
||||
line: item.line,
|
||||
message: `"${item.tvg.id}" is not in the database`
|
||||
})
|
||||
}
|
||||
|
||||
if (item.url && buffer[item.url]) {
|
||||
fileLog.push({
|
||||
type: 'warning',
|
||||
line: item.line,
|
||||
message: `"${item.url}" is already on the playlist`
|
||||
})
|
||||
} else {
|
||||
buffer[item.url] = true
|
||||
}
|
||||
|
||||
const channel_id = id.generate(item.name, country)
|
||||
const found = blocklist.find(
|
||||
blocked =>
|
||||
item.tvg.id.toLowerCase() === blocked.channel.toLowerCase() ||
|
||||
channel_id.toLowerCase() === blocked.channel.toLowerCase()
|
||||
)
|
||||
if (found) {
|
||||
fileLog.push({
|
||||
type: 'error',
|
||||
line: item.line,
|
||||
message: `"${found.name}" is on the blocklist due to claims of copyright holders (${found.ref})`
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
fileLog.push({
|
||||
type: 'error',
|
||||
line: 0,
|
||||
message: err.message.toLowerCase()
|
||||
})
|
||||
}
|
||||
|
||||
if (fileLog.length) {
|
||||
logger.info(`\n${chalk.underline(filepath)}`)
|
||||
|
||||
fileLog.forEach(err => {
|
||||
const position = err.line.toString().padEnd(6, ' ')
|
||||
const type = err.type.padEnd(9, ' ')
|
||||
const status = err.type === 'error' ? chalk.red(type) : chalk.yellow(type)
|
||||
logger.info(` ${chalk.gray(position)}${status}${err.message}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(fileLog.filter(e => e.type === 'error'))
|
||||
warnings = warnings.concat(fileLog.filter(e => e.type === 'warning'))
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(
|
||||
chalk.red(
|
||||
`\n${errors.length + warnings.length} problems (${errors.length} errors, ${
|
||||
warnings.length
|
||||
} warnings)`
|
||||
)
|
||||
)
|
||||
|
||||
if (errors.length) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
132
scripts/commands/playlist/validate.ts
Normal file
132
scripts/commands/playlist/validate.ts
Normal file
|
@ -0,0 +1,132 @@
|
|||
import { Logger, Storage, PlaylistParser, Collection, File, Dictionary } from '../../core'
|
||||
import { Channel, Stream, Blocked } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import { transliterate } from 'transliteration'
|
||||
import _ from 'lodash'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
|
||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
type LogItem = {
|
||||
type: string
|
||||
line: number
|
||||
message: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info(`loading blocklist...`)
|
||||
const storage = new Storage(DATA_DIR)
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
const channels = new Collection(channelsContent).map(data => new Channel(data))
|
||||
const blocklistContent = await storage.json('blocklist.json')
|
||||
const blocklist = new Collection(blocklistContent).map(data => new Blocked(data))
|
||||
|
||||
logger.info(`found ${blocklist.count()} records`)
|
||||
|
||||
let errors = new Collection()
|
||||
let warnings = new Collection()
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({ storage: streamsStorage })
|
||||
const files = program.args.length ? program.args : await streamsStorage.list('**/*.m3u')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'm3u') continue
|
||||
|
||||
const [, countryCode] = file.basename().match(/([a-z]{2})(|_.*)\.m3u/i) || [null, '']
|
||||
|
||||
const log = new Collection()
|
||||
const buffer = new Dictionary()
|
||||
try {
|
||||
const relativeFilepath = filepath.replace(STREAMS_DIR, '')
|
||||
const playlist = await parser.parse(relativeFilepath)
|
||||
playlist.streams.forEach((stream: Stream) => {
|
||||
const channelNotInDatabase =
|
||||
stream.channel && !channels.first((channel: Channel) => channel.id === stream.channel)
|
||||
if (channelNotInDatabase) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.line,
|
||||
message: `"${stream.channel}" is not in the database`
|
||||
})
|
||||
}
|
||||
|
||||
const alreadyOnPlaylist = stream.url && buffer.has(stream.url)
|
||||
if (alreadyOnPlaylist) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.line,
|
||||
message: `"${stream.url}" is already on the playlist`
|
||||
})
|
||||
} else {
|
||||
buffer.set(stream.url, true)
|
||||
}
|
||||
|
||||
const channelId = generateChannelId(stream.name, countryCode)
|
||||
const blocked = blocklist.first(
|
||||
blocked =>
|
||||
stream.channel.toLowerCase() === blocked.channel.toLowerCase() ||
|
||||
channelId.toLowerCase() === blocked.channel.toLowerCase()
|
||||
)
|
||||
if (blocked) {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: stream.line,
|
||||
message: `"${stream.name}" is on the blocklist due to claims of copyright holders (${blocked.ref})`
|
||||
})
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: 0,
|
||||
message: error.message.toLowerCase()
|
||||
})
|
||||
}
|
||||
|
||||
if (log.notEmpty()) {
|
||||
logger.info(`\n${chalk.underline(filepath)}`)
|
||||
|
||||
log.forEach((logItem: LogItem) => {
|
||||
const position = logItem.line.toString().padEnd(6, ' ')
|
||||
const type = logItem.type.padEnd(9, ' ')
|
||||
const status = logItem.type === 'error' ? chalk.red(type) : chalk.yellow(type)
|
||||
|
||||
logger.info(` ${chalk.gray(position)}${status}${logItem.message}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(log.filter((logItem: LogItem) => logItem.type === 'error'))
|
||||
warnings = warnings.concat(log.filter((logItem: LogItem) => logItem.type === 'warning'))
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(
|
||||
chalk.red(
|
||||
`\n${
|
||||
errors.count() + warnings.count()
|
||||
} problems (${errors.count()} errors, ${warnings.count()} warnings)`
|
||||
)
|
||||
)
|
||||
|
||||
if (errors.count()) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
function generateChannelId(name: string, code: string) {
|
||||
if (!name || !code) return ''
|
||||
|
||||
name = name.replace(/ *\([^)]*\) */g, '')
|
||||
name = name.replace(/ *\[[^)]*\] */g, '')
|
||||
name = name.replace(/\+/gi, 'Plus')
|
||||
name = name.replace(/[^a-z\d]+/gi, '')
|
||||
name = name.trim()
|
||||
name = transliterate(name)
|
||||
code = code.toLowerCase()
|
||||
|
||||
return `${name}.${code}`
|
||||
}
|
|
@ -1,143 +0,0 @@
|
|||
const { file, markdown, parser, logger, api } = require('../../core')
|
||||
const { create: createTable } = require('../../core/table')
|
||||
const { program } = require('commander')
|
||||
|
||||
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/tmp/logs/generators'
|
||||
|
||||
const options = program
|
||||
.option('-c, --config <config>', 'Set path to config file', '.readme/config.json')
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
async function main() {
|
||||
await createCategoryTable()
|
||||
await createCountryTable()
|
||||
await createLanguageTable()
|
||||
await createRegionTable()
|
||||
await updateReadme()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function createCategoryTable() {
|
||||
logger.info('creating category table...')
|
||||
const rows = []
|
||||
await api.categories.load()
|
||||
const items = await parser.parseLogs(`${LOGS_DIR}/categories.log`)
|
||||
for (const item of items) {
|
||||
const id = file.getFilename(item.filepath)
|
||||
const category = await api.categories.find({ id })
|
||||
rows.push({
|
||||
name: category ? category.name : 'Undefined',
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
}
|
||||
|
||||
const table = createTable(rows, [
|
||||
{ name: 'Category' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', nowrap: true }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_categories.md', table)
|
||||
}
|
||||
|
||||
async function createCountryTable() {
|
||||
logger.info('creating country table...')
|
||||
const rows = []
|
||||
await api.countries.load()
|
||||
await api.subdivisions.load()
|
||||
const items = await parser.parseLogs(`${LOGS_DIR}/countries.log`)
|
||||
for (const item of items) {
|
||||
const code = file.getFilename(item.filepath)
|
||||
const country = await api.countries.find({ code: code.toUpperCase() })
|
||||
if (country) {
|
||||
rows.push({
|
||||
name: `${country.flag} ${country.name}`,
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
} else if (code === 'int') {
|
||||
rows.push({
|
||||
name: `🌍 International`,
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
} else {
|
||||
const subdivision = await api.subdivisions.find({ code: code.toUpperCase() })
|
||||
if (subdivision) {
|
||||
rows.push({
|
||||
name: ` ${subdivision.name}`,
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const table = createTable(rows, [
|
||||
{ name: 'Country' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', nowrap: true }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_countries.md', table)
|
||||
}
|
||||
|
||||
async function createLanguageTable() {
|
||||
logger.info('creating language table...')
|
||||
const rows = []
|
||||
await api.languages.load()
|
||||
const items = await parser.parseLogs(`${LOGS_DIR}/languages.log`)
|
||||
for (const item of items) {
|
||||
const code = file.getFilename(item.filepath)
|
||||
const language = await api.languages.find({ code })
|
||||
rows.push({
|
||||
name: language ? language.name : 'Undefined',
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
}
|
||||
|
||||
const table = createTable(rows, [
|
||||
{ name: 'Language', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left', nowrap: true }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_languages.md', table)
|
||||
}
|
||||
|
||||
async function createRegionTable() {
|
||||
logger.info('creating region table...')
|
||||
const rows = []
|
||||
await api.regions.load()
|
||||
const items = await parser.parseLogs(`${LOGS_DIR}/regions.log`)
|
||||
for (const item of items) {
|
||||
const code = file.getFilename(item.filepath)
|
||||
const region = await api.regions.find({ code: code.toUpperCase() })
|
||||
if (region) {
|
||||
rows.push({
|
||||
name: region.name,
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const table = createTable(rows, [
|
||||
{ name: 'Region', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left', nowrap: true }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_regions.md', table)
|
||||
}
|
||||
|
||||
async function updateReadme() {
|
||||
logger.info('updating readme.md...')
|
||||
const config = require(file.resolve(options.config))
|
||||
await file.createDir(file.dirname(config.build))
|
||||
await markdown.compile(options.config)
|
||||
}
|
24
scripts/commands/readme/update.ts
Normal file
24
scripts/commands/readme/update.ts
Normal file
|
@ -0,0 +1,24 @@
|
|||
import { CategoryTable, CountryTable, LanguageTable, RegionTable } from '../../tables'
|
||||
import { Logger, Markdown } from '../../core'
|
||||
import { README_DIR } from '../../constants'
|
||||
import path from 'path'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('creating category table...')
|
||||
await new CategoryTable().make()
|
||||
logger.info('creating country table...')
|
||||
await new CountryTable().make()
|
||||
logger.info('creating language table...')
|
||||
await new LanguageTable().make()
|
||||
logger.info('creating region table...')
|
||||
await new RegionTable().make()
|
||||
|
||||
logger.info('updating readme.md...')
|
||||
const configPath = path.join(README_DIR, 'config.json')
|
||||
const readme = new Markdown(configPath)
|
||||
readme.compile()
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,106 +0,0 @@
|
|||
const { api } = require('../../core')
|
||||
const { Octokit } = require('@octokit/core')
|
||||
const { paginateRest } = require('@octokit/plugin-paginate-rest')
|
||||
const CustomOctokit = Octokit.plugin(paginateRest)
|
||||
const _ = require('lodash')
|
||||
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
const DATA_DIR = process.env.DATA_DIR || './tmp/data'
|
||||
const OWNER = 'iptv-org'
|
||||
const REPO = 'iptv'
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
await api.channels.load()
|
||||
let channels = await api.channels.all()
|
||||
channels = _.keyBy(channels, 'id')
|
||||
|
||||
await api.blocklist.load()
|
||||
let blocklist = await api.blocklist.all()
|
||||
blocklist = _.keyBy(blocklist, 'channel')
|
||||
|
||||
await api.streams.load()
|
||||
let streams = await api.streams.all()
|
||||
streams = _.keyBy(streams, 'channel')
|
||||
|
||||
const channelRequests = await loadChannelRequests()
|
||||
const buffer = {}
|
||||
const report = channelRequests.map(r => {
|
||||
let result = {
|
||||
issueNumber: r.issue.number,
|
||||
channelId: r.channel.id || undefined,
|
||||
status: undefined
|
||||
}
|
||||
|
||||
if (!r.channel || !r.channel.id) result.status = 'error'
|
||||
else if (blocklist[r.channel.id]) result.status = 'blocked'
|
||||
else if (!channels[r.channel.id]) result.status = 'invalid_id'
|
||||
else if (streams[r.channel.id]) result.status = 'fullfilled'
|
||||
else if (buffer[r.channel.id] && !r.channel.url) result.status = 'duplicate'
|
||||
else result.status = 'pending'
|
||||
|
||||
buffer[r.channel.id] = true
|
||||
|
||||
return result
|
||||
})
|
||||
console.table(report)
|
||||
} catch (err) {
|
||||
console.log(err.message)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadChannelRequests() {
|
||||
const issues = await fetchIssues('channel request')
|
||||
|
||||
return issues.map(parseIssue)
|
||||
}
|
||||
|
||||
async function fetchIssues(labels) {
|
||||
const issues = await octokit.paginate('GET /repos/{owner}/{repo}/issues', {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
direction: 'asc',
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
|
||||
return issues
|
||||
}
|
||||
|
||||
function parseIssue(issue) {
|
||||
const buffer = {}
|
||||
const channel = {}
|
||||
const fields = {
|
||||
'Channel ID (required)': 'id',
|
||||
'Channel ID': 'id',
|
||||
'Stream URL (optional)': 'url',
|
||||
'Stream URL': 'url',
|
||||
'Notes (optional)': 'notes',
|
||||
Notes: 'notes'
|
||||
}
|
||||
|
||||
const matches = issue.body.match(/### ([^\r\n]+)\s+([^\r\n]+)/g)
|
||||
|
||||
if (!matches) return { issue, channel: null }
|
||||
|
||||
matches.forEach(item => {
|
||||
const [, fieldLabel, value] = item.match(/### ([^\r\n]+)\s+([^\r\n]+)/)
|
||||
const field = fields[fieldLabel]
|
||||
|
||||
if (!field) return
|
||||
|
||||
buffer[field] = value === '_No response_' ? undefined : value.trim()
|
||||
})
|
||||
|
||||
for (let field in buffer) {
|
||||
channel[field] = buffer[field]
|
||||
}
|
||||
|
||||
return { issue, channel }
|
||||
}
|
53
scripts/commands/report/create.ts
Normal file
53
scripts/commands/report/create.ts
Normal file
|
@ -0,0 +1,53 @@
|
|||
import { DATA_DIR } from '../../constants'
|
||||
import { Collection, Dictionary, IssueLoader, Storage } from '../../core'
|
||||
import { Blocked, Channel, Stream } from '../../models'
|
||||
|
||||
async function main() {
|
||||
const loader = new IssueLoader()
|
||||
|
||||
const storage = new Storage(DATA_DIR)
|
||||
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
const groupedChannels = new Collection(channelsContent)
|
||||
.map(data => new Channel(data))
|
||||
.groupBy((channel: Channel) => channel.id)
|
||||
|
||||
const streamsContent = await storage.json('streams.json')
|
||||
const groupedStreams = new Collection(streamsContent)
|
||||
.map(data => new Stream(data))
|
||||
.groupBy((stream: Stream) => stream.url)
|
||||
|
||||
const blocklistContent = await storage.json('blocklist.json')
|
||||
const groupedBlocklist = new Collection(blocklistContent)
|
||||
.map(data => new Blocked(data))
|
||||
.groupBy((blocked: Blocked) => blocked.channel)
|
||||
|
||||
const issues = await loader.load({ labels: ['streams:add'] })
|
||||
|
||||
const buffer = new Dictionary()
|
||||
const report = issues.map(data => {
|
||||
const channelId = data.get('channel_id') || undefined
|
||||
const streamUrl = data.get('stream_url') || undefined
|
||||
|
||||
const result = new Dictionary({
|
||||
issueNumber: data.get('issue_number'),
|
||||
channelId,
|
||||
status: undefined
|
||||
})
|
||||
|
||||
if (!channelId || !streamUrl) result.set('status', 'error')
|
||||
else if (groupedBlocklist.has(channelId)) result.set('status', 'blocked')
|
||||
else if (groupedChannels.missing(channelId)) result.set('status', 'invalid_id')
|
||||
else if (groupedStreams.has(streamUrl)) result.set('status', 'fullfilled')
|
||||
else if (buffer.has(streamUrl)) result.set('status', 'duplicate')
|
||||
else result.set('status', 'pending')
|
||||
|
||||
buffer.set(streamUrl, true)
|
||||
|
||||
return result.data()
|
||||
})
|
||||
|
||||
console.table(report.all())
|
||||
}
|
||||
|
||||
main()
|
11
scripts/constants.ts
Normal file
11
scripts/constants.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
export const ROOT_DIR = process.env.ROOT_DIR || './'
|
||||
export const STREAMS_DIR = process.env.STREAMS_DIR || './streams'
|
||||
export const PUBLIC_DIR = process.env.PUBLIC_DIR || './.gh-pages'
|
||||
export const README_DIR = process.env.README_DIR || './.readme'
|
||||
export const API_DIR = process.env.API_DIR || './.api'
|
||||
export const DATA_DIR = process.env.DATA_DIR || './temp/data'
|
||||
export const LOGS_DIR = process.env.LOGS_DIR || './temp/logs'
|
||||
export const DB_DIR = process.env.DB_DIR || './temp/database'
|
||||
export const TESTING = process.env.NODE_ENV === 'test' ? true : false
|
||||
export const OWNER = 'iptv-org'
|
||||
export const REPO = 'iptv'
|
|
@ -1,41 +0,0 @@
|
|||
const _ = require('lodash')
|
||||
const file = require('./file')
|
||||
|
||||
const DATA_DIR = process.env.DATA_DIR || './scripts/tmp/data'
|
||||
|
||||
class API {
|
||||
constructor(filepath) {
|
||||
this.filepath = file.resolve(filepath)
|
||||
}
|
||||
|
||||
async load() {
|
||||
const data = await file.read(this.filepath)
|
||||
this.collection = JSON.parse(data)
|
||||
}
|
||||
|
||||
find(query) {
|
||||
return _.find(this.collection, query)
|
||||
}
|
||||
|
||||
filter(query) {
|
||||
return _.filter(this.collection, query)
|
||||
}
|
||||
|
||||
all() {
|
||||
return this.collection
|
||||
}
|
||||
}
|
||||
|
||||
const api = {}
|
||||
|
||||
api.channels = new API(`${DATA_DIR}/channels.json`)
|
||||
api.streams = new API(`${DATA_DIR}/streams.json`)
|
||||
api.countries = new API(`${DATA_DIR}/countries.json`)
|
||||
api.guides = new API(`${DATA_DIR}/guides.json`)
|
||||
api.categories = new API(`${DATA_DIR}/categories.json`)
|
||||
api.languages = new API(`${DATA_DIR}/languages.json`)
|
||||
api.regions = new API(`${DATA_DIR}/regions.json`)
|
||||
api.blocklist = new API(`${DATA_DIR}/blocklist.json`)
|
||||
api.subdivisions = new API(`${DATA_DIR}/subdivisions.json`)
|
||||
|
||||
module.exports = api
|
175
scripts/core/collection.ts
Normal file
175
scripts/core/collection.ts
Normal file
|
@ -0,0 +1,175 @@
|
|||
import _ from 'lodash'
|
||||
import { orderBy, Order } from 'natural-orderby'
|
||||
import { Dictionary } from './'
|
||||
|
||||
type Iteratee = (value: any, value2?: any) => void
|
||||
|
||||
export class Collection {
|
||||
_items: any[]
|
||||
|
||||
constructor(items?: any[]) {
|
||||
this._items = Array.isArray(items) ? items : []
|
||||
}
|
||||
|
||||
first(predicate?: Iteratee) {
|
||||
if (predicate) {
|
||||
return this._items.find(predicate)
|
||||
}
|
||||
|
||||
return this._items[0]
|
||||
}
|
||||
|
||||
last(predicate?: Iteratee) {
|
||||
if (predicate) {
|
||||
return _.findLast(this._items, predicate)
|
||||
}
|
||||
|
||||
return this._items[this._items.length - 1]
|
||||
}
|
||||
|
||||
find(iteratee: Iteratee): Collection {
|
||||
const found = this._items.filter(iteratee)
|
||||
|
||||
return new Collection(found)
|
||||
}
|
||||
|
||||
add(data: any) {
|
||||
this._items.push(data)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
intersects(collection: Collection): boolean {
|
||||
return _.intersection(this._items, collection.all()).length > 0
|
||||
}
|
||||
|
||||
count() {
|
||||
return this._items.length
|
||||
}
|
||||
|
||||
join(separator: string) {
|
||||
return this._items.join(separator)
|
||||
}
|
||||
|
||||
indexOf(value: string) {
|
||||
return this._items.indexOf(value)
|
||||
}
|
||||
|
||||
push(data: any) {
|
||||
this.add(data)
|
||||
}
|
||||
|
||||
uniq() {
|
||||
const items = _.uniq(this._items)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
reduce(iteratee: Iteratee, accumulator: any) {
|
||||
const items = _.reduce(this._items, iteratee, accumulator)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
filter(iteratee: Iteratee) {
|
||||
const items = _.filter(this._items, iteratee)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
forEach(iteratee: Iteratee) {
|
||||
for (let item of this._items) {
|
||||
iteratee(item)
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
remove(iteratee: Iteratee): Collection {
|
||||
const removed = _.remove(this._items, iteratee)
|
||||
|
||||
return new Collection(removed)
|
||||
}
|
||||
|
||||
concat(collection: Collection) {
|
||||
const items = this._items.concat(collection._items)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
isEmpty(): boolean {
|
||||
return this._items.length === 0
|
||||
}
|
||||
|
||||
notEmpty(): boolean {
|
||||
return this._items.length > 0
|
||||
}
|
||||
|
||||
sort() {
|
||||
const items = this._items.sort()
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
orderBy(iteratees: Iteratee | Iteratee[], orders?: Order | Order[]) {
|
||||
const items = orderBy(this._items, iteratees, orders)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
keyBy(iteratee: Iteratee) {
|
||||
const items = _.keyBy(this._items, iteratee)
|
||||
|
||||
return new Dictionary(items)
|
||||
}
|
||||
|
||||
empty() {
|
||||
return this._items.length === 0
|
||||
}
|
||||
|
||||
includes(value: any) {
|
||||
if (typeof value === 'function') {
|
||||
const found = this._items.find(value)
|
||||
|
||||
return !!found
|
||||
}
|
||||
|
||||
return this._items.includes(value)
|
||||
}
|
||||
|
||||
missing(value: any) {
|
||||
if (typeof value === 'function') {
|
||||
const found = this._items.find(value)
|
||||
|
||||
return !found
|
||||
}
|
||||
|
||||
return !this._items.includes(value)
|
||||
}
|
||||
|
||||
uniqBy(iteratee: Iteratee) {
|
||||
const items = _.uniqBy(this._items, iteratee)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
groupBy(iteratee: Iteratee) {
|
||||
const object = _.groupBy(this._items, iteratee)
|
||||
|
||||
return new Dictionary(object)
|
||||
}
|
||||
|
||||
map(iteratee: Iteratee) {
|
||||
const items = this._items.map(iteratee)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
all() {
|
||||
return this._items
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return JSON.stringify(this._items)
|
||||
}
|
||||
}
|
22
scripts/core/database.ts
Normal file
22
scripts/core/database.ts
Normal file
|
@ -0,0 +1,22 @@
|
|||
import Datastore from '@seald-io/nedb'
|
||||
import * as path from 'path'
|
||||
|
||||
export class Database {
|
||||
rootDir: string
|
||||
|
||||
constructor(rootDir: string) {
|
||||
this.rootDir = rootDir
|
||||
}
|
||||
|
||||
async load(filepath: string) {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
|
||||
return new Datastore({
|
||||
filename: path.resolve(absFilepath),
|
||||
autoload: true,
|
||||
onload: (error: Error): any => {
|
||||
if (error) console.error(error.message)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
|
||||
dayjs.extend(utc)
|
||||
|
||||
const date = {}
|
||||
|
||||
date.utc = d => {
|
||||
return dayjs.utc(d)
|
||||
}
|
||||
|
||||
module.exports = date
|
|
@ -1,82 +0,0 @@
|
|||
const nedb = require('nedb-promises')
|
||||
const fs = require('fs-extra')
|
||||
const file = require('./file')
|
||||
|
||||
const DB_DIR = process.env.DB_DIR || './scripts/tmp/database'
|
||||
|
||||
fs.ensureDirSync(DB_DIR)
|
||||
|
||||
class Database {
|
||||
constructor(filepath) {
|
||||
this.filepath = filepath
|
||||
}
|
||||
|
||||
load() {
|
||||
this.db = nedb.create({
|
||||
filename: file.resolve(this.filepath),
|
||||
autoload: true,
|
||||
onload: err => {
|
||||
if (err) console.error(err)
|
||||
},
|
||||
compareStrings: (a, b) => {
|
||||
a = a.replace(/\s/g, '_')
|
||||
b = b.replace(/\s/g, '_')
|
||||
|
||||
return a.localeCompare(b, undefined, {
|
||||
sensitivity: 'accent',
|
||||
numeric: true
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
removeIndex(field) {
|
||||
return this.db.removeIndex(field)
|
||||
}
|
||||
|
||||
addIndex(options) {
|
||||
return this.db.ensureIndex(options)
|
||||
}
|
||||
|
||||
compact() {
|
||||
return this.db.persistence.compactDatafile()
|
||||
}
|
||||
|
||||
stopAutocompact() {
|
||||
return this.db.persistence.stopAutocompaction()
|
||||
}
|
||||
|
||||
reset() {
|
||||
return file.clear(this.filepath)
|
||||
}
|
||||
|
||||
count(query) {
|
||||
return this.db.count(query)
|
||||
}
|
||||
|
||||
insert(doc) {
|
||||
return this.db.insert(doc)
|
||||
}
|
||||
|
||||
update(query, update) {
|
||||
return this.db.update(query, update)
|
||||
}
|
||||
|
||||
find(query) {
|
||||
return this.db.find(query)
|
||||
}
|
||||
|
||||
all() {
|
||||
return this.find({})
|
||||
}
|
||||
|
||||
remove(query, options) {
|
||||
return this.db.remove(query, options)
|
||||
}
|
||||
}
|
||||
|
||||
const db = {}
|
||||
|
||||
db.streams = new Database(`${DB_DIR}/streams.db`)
|
||||
|
||||
module.exports = db
|
31
scripts/core/dictionary.ts
Normal file
31
scripts/core/dictionary.ts
Normal file
|
@ -0,0 +1,31 @@
|
|||
export class Dictionary {
|
||||
dict: any
|
||||
|
||||
constructor(dict?: any) {
|
||||
this.dict = dict || {}
|
||||
}
|
||||
|
||||
set(key: string, value: any) {
|
||||
this.dict[key] = value
|
||||
}
|
||||
|
||||
has(key: string): boolean {
|
||||
return !!this.dict[key]
|
||||
}
|
||||
|
||||
missing(key: string): boolean {
|
||||
return !this.dict[key]
|
||||
}
|
||||
|
||||
get(key: string): any {
|
||||
return this.dict[key] ? this.dict[key] : undefined
|
||||
}
|
||||
|
||||
keys(): string[] {
|
||||
return Object.keys(this.dict)
|
||||
}
|
||||
|
||||
data() {
|
||||
return this.dict
|
||||
}
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
const { create: createPlaylist } = require('./playlist')
|
||||
const store = require('./store')
|
||||
const path = require('path')
|
||||
const glob = require('glob')
|
||||
const fs = require('fs-extra')
|
||||
const _ = require('lodash')
|
||||
|
||||
const file = {}
|
||||
|
||||
file.list = function (pattern) {
|
||||
return new Promise(resolve => {
|
||||
glob(pattern, function (err, files) {
|
||||
resolve(files)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
file.getFilename = function (filepath) {
|
||||
return path.parse(filepath).name
|
||||
}
|
||||
|
||||
file.createDir = async function (dir) {
|
||||
if (await file.exists(dir)) return
|
||||
|
||||
return fs.mkdir(dir, { recursive: true }).catch(console.error)
|
||||
}
|
||||
|
||||
file.exists = function (filepath) {
|
||||
return fs.exists(path.resolve(filepath))
|
||||
}
|
||||
|
||||
file.read = function (filepath) {
|
||||
return fs.readFile(path.resolve(filepath), { encoding: 'utf8' }).catch(console.error)
|
||||
}
|
||||
|
||||
file.append = function (filepath, data) {
|
||||
return fs.appendFile(path.resolve(filepath), data).catch(console.error)
|
||||
}
|
||||
|
||||
file.create = function (filepath, data = '') {
|
||||
filepath = path.resolve(filepath)
|
||||
const dir = path.dirname(filepath)
|
||||
|
||||
return file
|
||||
.createDir(dir)
|
||||
.then(() => fs.writeFile(filepath, data, { encoding: 'utf8', flag: 'w' }))
|
||||
.catch(console.error)
|
||||
}
|
||||
|
||||
file.write = function (filepath, data = '') {
|
||||
return fs.writeFile(path.resolve(filepath), data).catch(console.error)
|
||||
}
|
||||
|
||||
file.clear = function (filepath) {
|
||||
return file.write(filepath, '')
|
||||
}
|
||||
|
||||
file.resolve = function (filepath) {
|
||||
return path.resolve(filepath)
|
||||
}
|
||||
|
||||
file.dirname = function (filepath) {
|
||||
return path.dirname(filepath)
|
||||
}
|
||||
|
||||
file.basename = function (filepath) {
|
||||
return path.basename(filepath)
|
||||
}
|
||||
|
||||
module.exports = file
|
31
scripts/core/file.ts
Normal file
31
scripts/core/file.ts
Normal file
|
@ -0,0 +1,31 @@
|
|||
import * as path from 'path'
|
||||
|
||||
export class File {
|
||||
filepath: string
|
||||
content: string
|
||||
|
||||
constructor(filepath: string, content?: string) {
|
||||
this.filepath = filepath
|
||||
this.content = content || ''
|
||||
}
|
||||
|
||||
getFilename() {
|
||||
return path.parse(this.filepath).name
|
||||
}
|
||||
|
||||
dirname() {
|
||||
return path.dirname(this.filepath)
|
||||
}
|
||||
|
||||
basename() {
|
||||
return path.basename(this.filepath)
|
||||
}
|
||||
|
||||
append(data: string) {
|
||||
this.content = this.content + data
|
||||
}
|
||||
|
||||
extension() {
|
||||
return this.filepath.split('.').pop()
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
const { create: createPlaylist } = require('./playlist')
|
||||
const generators = require('../generators')
|
||||
const logger = require('./logger')
|
||||
const file = require('./file')
|
||||
|
||||
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.gh-pages'
|
||||
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/tmp/logs/generators'
|
||||
|
||||
const generator = {}
|
||||
|
||||
generator.generate = async function (name, streams = []) {
|
||||
if (typeof generators[name] === 'function') {
|
||||
try {
|
||||
let output = await generators[name].bind()(streams)
|
||||
output = Array.isArray(output) ? output : [output]
|
||||
for (const type of output) {
|
||||
const playlist = createPlaylist(type.items, { public: true })
|
||||
await file.create(`${PUBLIC_DIR}/${type.filepath}`, playlist.toString())
|
||||
}
|
||||
await file.create(`${LOGS_DIR}/${name}.log`, output.map(toJSON).join('\n'))
|
||||
} catch (error) {
|
||||
logger.error(`generators/${name}.js: ${error.message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = generator
|
||||
|
||||
function toJSON(type) {
|
||||
type.count = type.items.length
|
||||
delete type.items
|
||||
return JSON.stringify(type)
|
||||
}
|
46
scripts/core/htmlTable.ts
Normal file
46
scripts/core/htmlTable.ts
Normal file
|
@ -0,0 +1,46 @@
|
|||
type Column = {
|
||||
name: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
}
|
||||
|
||||
type DataItem = string[]
|
||||
|
||||
export class HTMLTable {
|
||||
data: DataItem[]
|
||||
columns: Column[]
|
||||
|
||||
constructor(data: DataItem[], columns: Column[]) {
|
||||
this.data = data
|
||||
this.columns = columns
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = '<table>\n'
|
||||
|
||||
output += ' <thead>\n <tr>'
|
||||
for (let column of this.columns) {
|
||||
output += `<th align="left">${column.name}</th>`
|
||||
}
|
||||
output += '</tr>\n </thead>\n'
|
||||
|
||||
output += ' <tbody>\n'
|
||||
for (let item of this.data) {
|
||||
output += ' <tr>'
|
||||
let i = 0
|
||||
for (let prop in item) {
|
||||
const column = this.columns[i]
|
||||
let nowrap = column.nowrap ? ` nowrap` : ''
|
||||
let align = column.align ? ` align="${column.align}"` : ''
|
||||
output += `<td${align}${nowrap}>${item[prop]}</td>`
|
||||
i++
|
||||
}
|
||||
output += '</tr>\n'
|
||||
}
|
||||
output += ' </tbody>\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
const { transliterate } = require('transliteration')
|
||||
|
||||
const id = {}
|
||||
|
||||
id.generate = function (name, code) {
|
||||
if (!name || !code) return null
|
||||
|
||||
name = name.replace(/ *\([^)]*\) */g, '')
|
||||
name = name.replace(/ *\[[^)]*\] */g, '')
|
||||
name = name.replace(/\+/gi, 'Plus')
|
||||
name = name.replace(/[^a-z\d]+/gi, '')
|
||||
name = name.trim()
|
||||
name = transliterate(name)
|
||||
code = code.toLowerCase()
|
||||
|
||||
return `${name}.${code}`
|
||||
}
|
||||
|
||||
module.exports = id
|
|
@ -1,14 +0,0 @@
|
|||
exports.db = require('./db')
|
||||
exports.logger = require('./logger')
|
||||
exports.file = require('./file')
|
||||
exports.timer = require('./timer')
|
||||
exports.parser = require('./parser')
|
||||
exports.checker = require('./checker')
|
||||
exports.generator = require('./generator')
|
||||
exports.playlist = require('./playlist')
|
||||
exports.store = require('./store')
|
||||
exports.markdown = require('./markdown')
|
||||
exports.api = require('./api')
|
||||
exports.id = require('./id')
|
||||
exports.m3u = require('./m3u')
|
||||
exports.date = require('./date')
|
14
scripts/core/index.ts
Normal file
14
scripts/core/index.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
export * from './database'
|
||||
export * from './logger'
|
||||
export * from './playlistParser'
|
||||
export * from './numberParser'
|
||||
export * from './logParser'
|
||||
export * from './markdown'
|
||||
export * from './file'
|
||||
export * from './collection'
|
||||
export * from './dictionary'
|
||||
export * from './storage'
|
||||
export * from './url'
|
||||
export * from './issueLoader'
|
||||
export * from './issueParser'
|
||||
export * from './htmlTable'
|
46
scripts/core/issueLoader.ts
Normal file
46
scripts/core/issueLoader.ts
Normal file
|
@ -0,0 +1,46 @@
|
|||
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
|
||||
import { paginateRest } from '@octokit/plugin-paginate-rest'
|
||||
import { Octokit } from '@octokit/core'
|
||||
import { Collection, IssueParser } from './'
|
||||
import { TESTING, OWNER, REPO } from '../constants'
|
||||
|
||||
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
export class IssueLoader {
|
||||
async load({ labels }: { labels: string[] | string }) {
|
||||
labels = Array.isArray(labels) ? labels.join(',') : labels
|
||||
let issues: any[] = []
|
||||
if (TESTING) {
|
||||
switch (labels) {
|
||||
case 'streams:add':
|
||||
issues = (await import('../../tests/__data__/input/issues/streams_add')).default
|
||||
break
|
||||
case 'streams:add,approved':
|
||||
issues = (await import('../../tests/__data__/input/issues/streams_add_approved')).default
|
||||
break
|
||||
case 'streams:edit,approved':
|
||||
issues = (await import('../../tests/__data__/input/issues/streams_edit_approved')).default
|
||||
break
|
||||
case 'streams:remove,approved':
|
||||
issues = (await import('../../tests/__data__/input/issues/streams_remove_approved'))
|
||||
.default
|
||||
break
|
||||
}
|
||||
} else {
|
||||
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const parser = new IssueParser()
|
||||
|
||||
return new Collection(issues).map(parser.parse)
|
||||
}
|
||||
}
|
48
scripts/core/issueParser.ts
Normal file
48
scripts/core/issueParser.ts
Normal file
|
@ -0,0 +1,48 @@
|
|||
import { Dictionary } from './'
|
||||
|
||||
export class IssueParser {
|
||||
parse(issue: any): Dictionary {
|
||||
const data = new Dictionary()
|
||||
data.set('issue_number', issue.number)
|
||||
|
||||
const idDict = new Dictionary({
|
||||
'Channel ID': 'channel_id',
|
||||
'Channel ID (required)': 'channel_id',
|
||||
'Broken Link': 'stream_url',
|
||||
'Stream URL': 'stream_url',
|
||||
'Stream URL (optional)': 'stream_url',
|
||||
'Stream URL (required)': 'stream_url',
|
||||
Label: 'label',
|
||||
Quality: 'quality',
|
||||
'Channel Name': 'channel_name',
|
||||
'HTTP User-Agent': 'user_agent',
|
||||
'HTTP Referrer': 'http_referrer',
|
||||
Reason: 'reason',
|
||||
'What happened to the stream?': 'reason',
|
||||
'Possible Replacement (optional)': 'possible_replacement',
|
||||
Notes: 'notes',
|
||||
'Notes (optional)': 'notes'
|
||||
})
|
||||
|
||||
const fields = issue.body.split('###')
|
||||
|
||||
if (!fields.length) return data
|
||||
|
||||
fields.forEach((field: string) => {
|
||||
let [_label, , _value] = field.split(/\r?\n/)
|
||||
_label = _label ? _label.trim() : ''
|
||||
_value = _value ? _value.trim() : ''
|
||||
|
||||
if (!_label || !_value) return data
|
||||
|
||||
const id: string = idDict.get(_label)
|
||||
const value: string = _value === '_No response_' || _value === 'None' ? '' : _value
|
||||
|
||||
if (!id) return
|
||||
|
||||
data.set(id, value)
|
||||
})
|
||||
|
||||
return data
|
||||
}
|
||||
}
|
13
scripts/core/logParser.ts
Normal file
13
scripts/core/logParser.ts
Normal file
|
@ -0,0 +1,13 @@
|
|||
export type LogItem = {
|
||||
filepath: string
|
||||
count: number
|
||||
}
|
||||
|
||||
export class LogParser {
|
||||
parse(content: string): any[] {
|
||||
if (!content) return []
|
||||
const lines = content.split('\n')
|
||||
|
||||
return lines.map(line => (line ? JSON.parse(line) : null)).filter(l => l)
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
const { Signale } = require('signale')
|
||||
|
||||
const options = {}
|
||||
|
||||
const logger = new Signale(options)
|
||||
|
||||
logger.config({
|
||||
displayLabel: false,
|
||||
displayScope: false,
|
||||
displayBadge: false
|
||||
})
|
||||
|
||||
module.exports = logger
|
9
scripts/core/logger.ts
Normal file
9
scripts/core/logger.ts
Normal file
|
@ -0,0 +1,9 @@
|
|||
import signale from 'signale'
|
||||
|
||||
const { Signale } = signale
|
||||
|
||||
export class Logger extends Signale {
|
||||
constructor(options?: any) {
|
||||
super(options)
|
||||
}
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
const m3u = {}
|
||||
|
||||
m3u.create = function (links = [], header = {}) {
|
||||
let output = `#EXTM3U`
|
||||
for (const attr in header) {
|
||||
const value = header[attr]
|
||||
output += ` ${attr}="${value}"`
|
||||
}
|
||||
output += `\n`
|
||||
|
||||
for (const link of links) {
|
||||
output += `#EXTINF:-1`
|
||||
for (const name in link.attrs) {
|
||||
const value = link.attrs[name]
|
||||
if (value !== undefined) {
|
||||
output += ` ${name}="${value}"`
|
||||
}
|
||||
}
|
||||
output += `,${link.title}\n`
|
||||
|
||||
for (const name in link.vlcOpts) {
|
||||
const value = link.vlcOpts[name]
|
||||
if (value !== undefined) {
|
||||
output += `#EXTVLCOPT:${name}=${value}\n`
|
||||
}
|
||||
}
|
||||
|
||||
output += `${link.url}\n`
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
module.exports = m3u
|
|
@ -1,10 +0,0 @@
|
|||
const markdownInclude = require('markdown-include')
|
||||
const file = require('./file')
|
||||
|
||||
const markdown = {}
|
||||
|
||||
markdown.compile = function (filepath) {
|
||||
markdownInclude.compileFiles(file.resolve(filepath))
|
||||
}
|
||||
|
||||
module.exports = markdown
|
13
scripts/core/markdown.ts
Normal file
13
scripts/core/markdown.ts
Normal file
|
@ -0,0 +1,13 @@
|
|||
import markdownInclude from 'markdown-include'
|
||||
|
||||
export class Markdown {
|
||||
filepath: string
|
||||
|
||||
constructor(filepath: string) {
|
||||
this.filepath = filepath
|
||||
}
|
||||
|
||||
compile() {
|
||||
markdownInclude.compileFiles(this.filepath)
|
||||
}
|
||||
}
|
10
scripts/core/numberParser.ts
Normal file
10
scripts/core/numberParser.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
export default class NumberParser {
|
||||
async parse(number: string) {
|
||||
const parsed = parseInt(number)
|
||||
if (isNaN(parsed)) {
|
||||
throw new Error('numberParser:parse() Input value is not a number')
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
const ipp = require('iptv-playlist-parser')
|
||||
const logger = require('./logger')
|
||||
const file = require('./file')
|
||||
|
||||
const parser = {}
|
||||
|
||||
parser.parsePlaylist = async function (filepath) {
|
||||
const content = await file.read(filepath)
|
||||
|
||||
return ipp.parse(content)
|
||||
}
|
||||
|
||||
parser.parseLogs = async function (filepath) {
|
||||
const content = await file.read(filepath)
|
||||
if (!content) return []
|
||||
const lines = content.split('\n')
|
||||
|
||||
return lines.map(line => (line ? JSON.parse(line) : null)).filter(l => l)
|
||||
}
|
||||
|
||||
parser.parseNumber = function (string) {
|
||||
const parsed = parseInt(string)
|
||||
if (isNaN(parsed)) {
|
||||
throw new Error('scripts/core/parser.js:parseNumber() Input value is not a number')
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
|
||||
module.exports = parser
|
|
@ -1,53 +0,0 @@
|
|||
const store = require('./store')
|
||||
const m3u = require('./m3u')
|
||||
const _ = require('lodash')
|
||||
|
||||
const playlist = {}
|
||||
|
||||
class Playlist {
|
||||
constructor(items = [], options = {}) {
|
||||
this.header = {}
|
||||
|
||||
this.links = []
|
||||
for (const item of items) {
|
||||
const stream = store.create(item)
|
||||
|
||||
let attrs
|
||||
if (options.public) {
|
||||
attrs = {
|
||||
'tvg-id': stream.get('tvg_id'),
|
||||
'tvg-logo': stream.get('tvg_logo'),
|
||||
'group-title': stream.get('group_title'),
|
||||
'user-agent': stream.get('user_agent') || undefined
|
||||
}
|
||||
} else {
|
||||
attrs = {
|
||||
'tvg-id': stream.get('tvg_id'),
|
||||
'user-agent': stream.get('user_agent') || undefined
|
||||
}
|
||||
}
|
||||
|
||||
const vlcOpts = {
|
||||
'http-referrer': stream.get('http_referrer') || undefined,
|
||||
'http-user-agent': stream.get('user_agent') || undefined
|
||||
}
|
||||
|
||||
this.links.push({
|
||||
url: stream.get('url'),
|
||||
title: stream.get('title'),
|
||||
attrs,
|
||||
vlcOpts
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
toString() {
|
||||
return m3u.create(this.links, this.header)
|
||||
}
|
||||
}
|
||||
|
||||
playlist.create = function (items, options) {
|
||||
return new Playlist(items, options)
|
||||
}
|
||||
|
||||
module.exports = playlist
|
45
scripts/core/playlistParser.ts
Normal file
45
scripts/core/playlistParser.ts
Normal file
|
@ -0,0 +1,45 @@
|
|||
import parser from 'iptv-playlist-parser'
|
||||
import { Playlist, Stream } from '../models'
|
||||
import { Collection, Storage } from './'
|
||||
|
||||
export class PlaylistParser {
|
||||
storage: Storage
|
||||
|
||||
constructor({ storage }: { storage: Storage }) {
|
||||
this.storage = storage
|
||||
}
|
||||
|
||||
async parse(filepath: string): Promise<Playlist> {
|
||||
const streams = new Collection()
|
||||
|
||||
const content = await this.storage.read(filepath)
|
||||
const parsed: parser.Playlist = parser.parse(content)
|
||||
|
||||
parsed.items.forEach((item: parser.PlaylistItem) => {
|
||||
const { name, label, quality } = parseTitle(item.name)
|
||||
const stream = new Stream({
|
||||
channel: item.tvg.id,
|
||||
name,
|
||||
label,
|
||||
quality,
|
||||
filepath,
|
||||
line: item.line,
|
||||
url: item.url,
|
||||
httpReferrer: item.http.referrer,
|
||||
userAgent: item.http['user-agent']
|
||||
})
|
||||
|
||||
streams.add(stream)
|
||||
})
|
||||
|
||||
return new Playlist(streams)
|
||||
}
|
||||
}
|
||||
|
||||
function parseTitle(title: string): { name: string; label: string; quality: string } {
|
||||
const [, label] = title.match(/ \[(.*)\]$/) || [null, '']
|
||||
const [, quality] = title.match(/ \(([0-9]+p)\)/) || [null, '']
|
||||
const name = title.replace(` (${quality})`, '').replace(` [${label}]`, '')
|
||||
|
||||
return { name, label, quality }
|
||||
}
|
82
scripts/core/storage.ts
Normal file
82
scripts/core/storage.ts
Normal file
|
@ -0,0 +1,82 @@
|
|||
import { File, Collection } from './'
|
||||
import * as path from 'path'
|
||||
import fs from 'fs-extra'
|
||||
import { glob } from 'glob'
|
||||
|
||||
export class Storage {
|
||||
rootDir: string
|
||||
|
||||
constructor(rootDir?: string) {
|
||||
this.rootDir = rootDir || './'
|
||||
}
|
||||
|
||||
list(pattern: string): Promise<string[]> {
|
||||
return glob(pattern, {
|
||||
cwd: this.rootDir
|
||||
})
|
||||
}
|
||||
|
||||
async createDir(dir: string): Promise<void> {
|
||||
if (await fs.exists(dir)) return
|
||||
|
||||
await fs.mkdir(dir, { recursive: true }).catch(console.error)
|
||||
}
|
||||
|
||||
async load(filepath: string): Promise<any> {
|
||||
return this.read(filepath)
|
||||
}
|
||||
|
||||
async read(filepath: string): Promise<any> {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
|
||||
return await fs.readFile(absFilepath, { encoding: 'utf8' })
|
||||
}
|
||||
|
||||
async json(filepath: string): Promise<any> {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
const content = await fs.readFile(absFilepath, { encoding: 'utf8' })
|
||||
|
||||
return JSON.parse(content)
|
||||
}
|
||||
|
||||
async exists(filepath: string): Promise<boolean> {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
|
||||
return await fs.exists(absFilepath)
|
||||
}
|
||||
|
||||
async write(filepath: string, data: string = ''): Promise<void> {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
const dir = path.dirname(absFilepath)
|
||||
|
||||
await this.createDir(dir)
|
||||
await fs.writeFile(absFilepath, data, { encoding: 'utf8', flag: 'w' })
|
||||
}
|
||||
|
||||
async append(filepath: string, data: string = ''): Promise<void> {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
|
||||
await fs.appendFile(absFilepath, data, { encoding: 'utf8', flag: 'w' })
|
||||
}
|
||||
|
||||
async clear(filepath: string): Promise<void> {
|
||||
await this.write(filepath)
|
||||
}
|
||||
|
||||
async createStream(filepath: string): Promise<NodeJS.WriteStream> {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
const dir = path.dirname(absFilepath)
|
||||
|
||||
await this.createDir(dir)
|
||||
|
||||
return fs.createWriteStream(absFilepath) as unknown as NodeJS.WriteStream
|
||||
}
|
||||
|
||||
async save(filepath: string, content: string): Promise<void> {
|
||||
await this.write(filepath, content)
|
||||
}
|
||||
|
||||
async saveFile(file: File): Promise<void> {
|
||||
await this.write(file.filepath, file.content)
|
||||
}
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
const _ = require('lodash')
|
||||
const logger = require('./logger')
|
||||
const setters = require('../store/setters')
|
||||
const getters = require('../store/getters')
|
||||
|
||||
module.exports = {
|
||||
create(state = {}) {
|
||||
return {
|
||||
state,
|
||||
changed: false,
|
||||
set: function (prop, value) {
|
||||
const prevState = JSON.stringify(this.state)
|
||||
|
||||
const setter = setters[prop]
|
||||
if (typeof setter === 'function') {
|
||||
try {
|
||||
this.state[prop] = setter.bind()(value)
|
||||
} catch (error) {
|
||||
logger.error(`store/setters/${prop}.js: ${error.message}`)
|
||||
}
|
||||
} else if (typeof value === 'object') {
|
||||
this.state[prop] = value[prop]
|
||||
} else {
|
||||
this.state[prop] = value
|
||||
}
|
||||
|
||||
const newState = JSON.stringify(this.state)
|
||||
if (prevState !== newState) {
|
||||
this.changed = true
|
||||
}
|
||||
|
||||
return this
|
||||
},
|
||||
get: function (prop) {
|
||||
const getter = getters[prop]
|
||||
if (typeof getter === 'function') {
|
||||
try {
|
||||
return getter.bind(this.state)()
|
||||
} catch (error) {
|
||||
logger.error(`store/getters/${prop}.js: ${error.message}`)
|
||||
}
|
||||
} else {
|
||||
return prop.split('.').reduce((o, i) => (o ? o[i] : undefined), this.state)
|
||||
}
|
||||
},
|
||||
has: function (prop) {
|
||||
const value = this.get(prop)
|
||||
|
||||
return !_.isEmpty(value)
|
||||
},
|
||||
data: function () {
|
||||
return this.state
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
const table = {}
|
||||
|
||||
table.create = function (data, cols) {
|
||||
let output = '<table>\n'
|
||||
|
||||
output += ' <thead>\n <tr>'
|
||||
for (let column of cols) {
|
||||
output += `<th align="left">${column.name}</th>`
|
||||
}
|
||||
output += '</tr>\n </thead>\n'
|
||||
|
||||
output += ' <tbody>\n'
|
||||
for (let item of data) {
|
||||
output += ' <tr>'
|
||||
let i = 0
|
||||
for (let prop in item) {
|
||||
const column = cols[i]
|
||||
let nowrap = column.nowrap ? ` nowrap` : ''
|
||||
let align = column.align ? ` align="${column.align}"` : ''
|
||||
output += `<td${align}${nowrap}>${item[prop]}</td>`
|
||||
i++
|
||||
}
|
||||
output += '</tr>\n'
|
||||
}
|
||||
output += ' </tbody>\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
module.exports = table
|
|
@ -1,29 +0,0 @@
|
|||
const { performance } = require('perf_hooks')
|
||||
const dayjs = require('dayjs')
|
||||
const duration = require('dayjs/plugin/duration')
|
||||
const relativeTime = require('dayjs/plugin/relativeTime')
|
||||
|
||||
dayjs.extend(relativeTime)
|
||||
dayjs.extend(duration)
|
||||
|
||||
const timer = {}
|
||||
|
||||
let t0 = 0
|
||||
|
||||
timer.start = function () {
|
||||
t0 = performance.now()
|
||||
}
|
||||
|
||||
timer.format = function (f) {
|
||||
let t1 = performance.now()
|
||||
|
||||
return dayjs.duration(t1 - t0).format(f)
|
||||
}
|
||||
|
||||
timer.humanize = function (suffix = true) {
|
||||
let t1 = performance.now()
|
||||
|
||||
return dayjs.duration(t1 - t0).humanize(suffix)
|
||||
}
|
||||
|
||||
module.exports = timer
|
|
@ -1,11 +0,0 @@
|
|||
const normalize = require('normalize-url')
|
||||
|
||||
const url = {}
|
||||
|
||||
url.normalize = function (string) {
|
||||
const normalized = normalize(string, { stripWWW: false })
|
||||
|
||||
return decodeURIComponent(normalized).replace(/\s/g, '+')
|
||||
}
|
||||
|
||||
module.exports = url
|
20
scripts/core/url.ts
Normal file
20
scripts/core/url.ts
Normal file
|
@ -0,0 +1,20 @@
|
|||
import normalizeUrl from 'normalize-url'
|
||||
|
||||
export class URL {
|
||||
url: string
|
||||
|
||||
constructor(url: string) {
|
||||
this.url = url
|
||||
}
|
||||
|
||||
normalize(): URL {
|
||||
const normalized = normalizeUrl(this.url, { stripWWW: false })
|
||||
this.url = decodeURIComponent(normalized).replace(/\s/g, '+')
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.url
|
||||
}
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
await api.categories.load()
|
||||
const categories = await api.categories.all()
|
||||
|
||||
const output = []
|
||||
for (const category of categories) {
|
||||
let items = _.filter(streams, { categories: [{ id: category.id }] })
|
||||
output.push({ filepath: `categories/${category.id}.m3u`, items })
|
||||
}
|
||||
|
||||
let items = _.filter(streams, stream => !stream.categories.length)
|
||||
output.push({ filepath: 'categories/undefined.m3u', items })
|
||||
|
||||
return output
|
||||
}
|
55
scripts/generators/categoriesGenerator.ts
Normal file
55
scripts/generators/categoriesGenerator.ts
Normal file
|
@ -0,0 +1,55 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Stream, Category, Playlist } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type CategoriesGeneratorProps = {
|
||||
streams: Collection
|
||||
categories: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class CategoriesGenerator implements Generator {
|
||||
streams: Collection
|
||||
categories: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, categories, logger }: CategoriesGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.categories = categories
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate() {
|
||||
const streams = this.streams.orderBy([(stream: Stream) => stream.getTitle()])
|
||||
|
||||
this.categories.forEach(async (category: Category) => {
|
||||
let categoryStreams = streams
|
||||
.filter((stream: Stream) => stream.hasCategory(category))
|
||||
.map((stream: Stream) => {
|
||||
const groupTitle = stream.categories
|
||||
? stream.categories
|
||||
.map((category: Category) => category.name)
|
||||
.sort()
|
||||
.join(';')
|
||||
: ''
|
||||
stream.groupTitle = groupTitle
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
const playlist = new Playlist(categoryStreams, { public: true })
|
||||
const filepath = `categories/${category.id}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
})
|
||||
|
||||
const undefinedStreams = streams.filter((stream: Stream) => stream.noCategories())
|
||||
const playlist = new Playlist(undefinedStreams, { public: true })
|
||||
const filepath = `categories/undefined.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
|
@ -1,53 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
await api.countries.load()
|
||||
const countries = await api.countries.all()
|
||||
await api.regions.load()
|
||||
let regions = await api.regions.all()
|
||||
regions = regions.filter(r => r.code !== 'INT')
|
||||
await api.subdivisions.load()
|
||||
const subdivisions = await api.subdivisions.all()
|
||||
|
||||
let output = []
|
||||
for (const country of countries) {
|
||||
let countryRegionCodes = _.filter(regions, { countries: [country.code] }).map(
|
||||
r => `r/${r.code}`
|
||||
)
|
||||
const countrySubdivisions = _.filter(subdivisions, { country: country.code })
|
||||
const countryAreaCodes = countryRegionCodes.concat(countrySubdivisions.map(s => `s/${s.code}`))
|
||||
countryAreaCodes.push(`c/${country.code}`)
|
||||
|
||||
let items = _.filter(streams, stream => {
|
||||
return _.intersection(stream.broadcast_area, countryAreaCodes).length
|
||||
})
|
||||
|
||||
output.push({ filepath: `countries/${country.code.toLowerCase()}.m3u`, items })
|
||||
|
||||
for (let subdivision of countrySubdivisions) {
|
||||
let subdivisionItems = _.filter(streams, stream => {
|
||||
return stream.broadcast_area.includes(`s/${subdivision.code}`)
|
||||
})
|
||||
|
||||
if (subdivisionItems.length) {
|
||||
output.push({
|
||||
filepath: `subdivisions/${subdivision.code.toLowerCase()}.m3u`,
|
||||
items: subdivisionItems
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let intItems = _.filter(streams, stream => stream.broadcast_area.includes('r/INT'))
|
||||
output.push({
|
||||
filepath: `countries/int.m3u`,
|
||||
items: intItems
|
||||
})
|
||||
|
||||
output = output.filter(f => f.items.length > 0)
|
||||
|
||||
return output
|
||||
}
|
85
scripts/generators/countriesGenerator.ts
Normal file
85
scripts/generators/countriesGenerator.ts
Normal file
|
@ -0,0 +1,85 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Country, Region, Subdivision, Stream, Playlist } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type CountriesGeneratorProps = {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
subdivisions: Collection
|
||||
countries: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class CountriesGenerator implements Generator {
|
||||
streams: Collection
|
||||
countries: Collection
|
||||
regions: Collection
|
||||
subdivisions: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, countries, regions, subdivisions, logger }: CountriesGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.countries = countries
|
||||
this.regions = regions
|
||||
this.subdivisions = subdivisions
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let streams = this.streams
|
||||
.orderBy([stream => stream.getTitle()])
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
let regions = this.regions.filter((region: Region) => region.code !== 'INT')
|
||||
|
||||
this.countries.forEach(async (country: Country) => {
|
||||
const countrySubdivisions = this.subdivisions.filter(
|
||||
(subdivision: Subdivision) => subdivision.country === country.code
|
||||
)
|
||||
|
||||
const countrySubdivisionsCodes = countrySubdivisions.map(
|
||||
(subdivision: Subdivision) => `s/${subdivision.code}`
|
||||
)
|
||||
|
||||
const countryAreaCodes = regions
|
||||
.filter((region: Region) => region.countries.includes(country.code))
|
||||
.map((region: Region) => `r/${region.code}`)
|
||||
.concat(countrySubdivisionsCodes)
|
||||
.add(`c/${country.code}`)
|
||||
|
||||
const countryStreams = streams.filter(stream =>
|
||||
stream.broadcastArea.intersects(countryAreaCodes)
|
||||
)
|
||||
|
||||
if (countryStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(countryStreams, { public: true })
|
||||
const filepath = `countries/${country.code.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
|
||||
countrySubdivisions.forEach(async (subdivision: Subdivision) => {
|
||||
const subdivisionStreams = streams.filter(stream =>
|
||||
stream.broadcastArea.includes(`s/${subdivision.code}`)
|
||||
)
|
||||
|
||||
if (subdivisionStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(subdivisionStreams, { public: true })
|
||||
const filepath = `subdivisions/${subdivision.code.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
})
|
||||
})
|
||||
|
||||
const internationalStreams = streams.filter(stream => stream.isInternational())
|
||||
if (internationalStreams.notEmpty()) {
|
||||
const playlist = new Playlist(internationalStreams, { public: true })
|
||||
const filepath = `countries/int.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
||||
}
|
3
scripts/generators/generator.ts
Normal file
3
scripts/generators/generator.ts
Normal file
|
@ -0,0 +1,3 @@
|
|||
export interface Generator {
|
||||
generate(): Promise<void>
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
exports.categories = require('./categories')
|
||||
exports.countries = require('./countries')
|
||||
exports.languages = require('./languages')
|
||||
exports.regions = require('./regions')
|
||||
exports.index_m3u = require('./index_m3u')
|
||||
exports.index_nsfw_m3u = require('./index_nsfw_m3u')
|
||||
exports.index_category_m3u = require('./index_category_m3u')
|
||||
exports.index_country_m3u = require('./index_country_m3u')
|
||||
exports.index_language_m3u = require('./index_language_m3u')
|
||||
exports.index_region_m3u = require('./index_region_m3u')
|
10
scripts/generators/index.ts
Normal file
10
scripts/generators/index.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
export * from './categoriesGenerator'
|
||||
export * from './countriesGenerator'
|
||||
export * from './languagesGenerator'
|
||||
export * from './regionsGenerator'
|
||||
export * from './indexGenerator'
|
||||
export * from './indexNsfwGenerator'
|
||||
export * from './indexCategoryGenerator'
|
||||
export * from './indexCountryGenerator'
|
||||
export * from './indexLanguageGenerator'
|
||||
export * from './indexRegionGenerator'
|
53
scripts/generators/indexCategoryGenerator.ts
Normal file
53
scripts/generators/indexCategoryGenerator.ts
Normal file
|
@ -0,0 +1,53 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Stream, Playlist, Category } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type IndexCategoryGeneratorProps = {
|
||||
streams: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class IndexCategoryGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, logger }: IndexCategoryGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams
|
||||
.orderBy(stream => stream.getTitle())
|
||||
.filter(stream => stream.isSFW())
|
||||
|
||||
let groupedStreams = new Collection()
|
||||
streams.forEach((stream: Stream) => {
|
||||
if (stream.noCategories()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.add(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
stream.categories.forEach((category: Category) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = category.name
|
||||
groupedStreams.push(streamClone)
|
||||
})
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.orderBy(stream => {
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZ'
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.category.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
104
scripts/generators/indexCountryGenerator.ts
Normal file
104
scripts/generators/indexCountryGenerator.ts
Normal file
|
@ -0,0 +1,104 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Stream, Playlist, Country, Subdivision, Region } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type IndexCountryGeneratorProps = {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
countries: Collection
|
||||
subdivisions: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class IndexCountryGenerator implements Generator {
|
||||
streams: Collection
|
||||
countries: Collection
|
||||
regions: Collection
|
||||
subdivisions: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, regions, countries, subdivisions, logger }: IndexCountryGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.countries = countries
|
||||
this.regions = regions
|
||||
this.subdivisions = subdivisions
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let groupedStreams = new Collection()
|
||||
|
||||
this.streams
|
||||
.orderBy(stream => stream.getTitle())
|
||||
.filter(stream => stream.isSFW())
|
||||
.forEach(stream => {
|
||||
if (stream.noBroadcastArea()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.add(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
if (stream.isInternational()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'International'
|
||||
groupedStreams.add(streamClone)
|
||||
}
|
||||
|
||||
this.getStreamBroadcastCountries(stream).forEach((country: Country) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = country.name
|
||||
groupedStreams.add(streamClone)
|
||||
})
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.orderBy((stream: Stream) => {
|
||||
if (stream.groupTitle === 'International') return 'ZZ'
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZZ'
|
||||
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.country.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
|
||||
getStreamBroadcastCountries(stream: Stream) {
|
||||
const groupedRegions = this.regions.keyBy((region: Region) => region.code)
|
||||
const groupedCountries = this.countries.keyBy((country: Country) => country.code)
|
||||
const groupedSubdivisions = this.subdivisions.keyBy(
|
||||
(subdivision: Subdivision) => subdivision.code
|
||||
)
|
||||
|
||||
let broadcastCountries = new Collection()
|
||||
|
||||
stream.broadcastArea.forEach(broadcastAreaCode => {
|
||||
const [type, code] = broadcastAreaCode.split('/')
|
||||
switch (type) {
|
||||
case 'c':
|
||||
broadcastCountries.add(code)
|
||||
break
|
||||
case 'r':
|
||||
if (code !== 'INT' && groupedRegions.has(code)) {
|
||||
broadcastCountries = broadcastCountries.concat(groupedRegions.get(code).countries)
|
||||
}
|
||||
break
|
||||
case 's':
|
||||
if (groupedSubdivisions.has(code)) {
|
||||
broadcastCountries.add(groupedSubdivisions.get(code).country)
|
||||
}
|
||||
break
|
||||
}
|
||||
})
|
||||
|
||||
return broadcastCountries
|
||||
.uniq()
|
||||
.map(code => groupedCountries.get(code))
|
||||
.filter(Boolean)
|
||||
}
|
||||
}
|
32
scripts/generators/indexGenerator.ts
Normal file
32
scripts/generators/indexGenerator.ts
Normal file
|
@ -0,0 +1,32 @@
|
|||
import { Collection, Logger, Storage } from '../core'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Generator } from './generator'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type IndexGeneratorProps = {
|
||||
streams: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class IndexGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, logger }: IndexGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const sfwStreams = this.streams
|
||||
.orderBy(stream => stream.getTitle())
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
|
||||
const playlist = new Playlist(sfwStreams, { public: true })
|
||||
const filepath = 'index.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
52
scripts/generators/indexLanguageGenerator.ts
Normal file
52
scripts/generators/indexLanguageGenerator.ts
Normal file
|
@ -0,0 +1,52 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Stream, Playlist, Language } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type IndexLanguageGeneratorProps = {
|
||||
streams: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class IndexLanguageGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, logger }: IndexLanguageGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let groupedStreams = new Collection()
|
||||
this.streams
|
||||
.orderBy(stream => stream.getTitle())
|
||||
.filter(stream => stream.isSFW())
|
||||
.forEach(stream => {
|
||||
if (stream.noLanguages()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.add(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
stream.languages.forEach((language: Language) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = language.name
|
||||
groupedStreams.add(streamClone)
|
||||
})
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.orderBy((stream: Stream) => {
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZ'
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.language.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
30
scripts/generators/indexNsfwGenerator.ts
Normal file
30
scripts/generators/indexNsfwGenerator.ts
Normal file
|
@ -0,0 +1,30 @@
|
|||
import { Collection, Logger, Storage } from '../core'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Generator } from './generator'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type IndexNsfwGeneratorProps = {
|
||||
streams: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class IndexNsfwGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, logger }: IndexNsfwGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const allStreams = this.streams.orderBy((stream: Stream) => stream.getTitle())
|
||||
|
||||
const playlist = new Playlist(allStreams, { public: true })
|
||||
const filepath = 'index.nsfw.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
83
scripts/generators/indexRegionGenerator.ts
Normal file
83
scripts/generators/indexRegionGenerator.ts
Normal file
|
@ -0,0 +1,83 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Stream, Playlist, Region } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type IndexRegionGeneratorProps = {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class IndexRegionGenerator implements Generator {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, regions, logger }: IndexRegionGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.regions = regions
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let groupedStreams = new Collection()
|
||||
this.streams
|
||||
.orderBy((stream: Stream) => stream.getTitle())
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
.forEach((stream: Stream) => {
|
||||
if (stream.noBroadcastArea()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.push(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
this.getStreamRegions(stream).forEach((region: Region) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = region.name
|
||||
groupedStreams.push(streamClone)
|
||||
})
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.orderBy((stream: Stream) => {
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZ'
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.region.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
|
||||
getStreamRegions(stream: Stream) {
|
||||
let streamRegions = new Collection()
|
||||
stream.broadcastArea.forEach(broadcastAreaCode => {
|
||||
const [type, code] = broadcastAreaCode.split('/')
|
||||
switch (type) {
|
||||
case 'r':
|
||||
const groupedRegions = this.regions.keyBy((region: Region) => region.code)
|
||||
streamRegions.add(groupedRegions.get(code))
|
||||
break
|
||||
case 's':
|
||||
const [countryCode] = code.split('-')
|
||||
const subdivisionRegions = this.regions.filter((region: Region) =>
|
||||
region.countries.includes(countryCode)
|
||||
)
|
||||
streamRegions = streamRegions.concat(subdivisionRegions)
|
||||
break
|
||||
case 'c':
|
||||
const countryRegions = this.regions.filter((region: Region) =>
|
||||
region.countries.includes(code)
|
||||
)
|
||||
streamRegions = streamRegions.concat(countryRegions)
|
||||
break
|
||||
}
|
||||
})
|
||||
|
||||
return streamRegions
|
||||
}
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
let items = []
|
||||
streams.forEach(stream => {
|
||||
if (!stream.categories.length) {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = 'Undefined'
|
||||
items.push(item)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
stream.categories
|
||||
.filter(c => c)
|
||||
.forEach(category => {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = category.name
|
||||
items.push(item)
|
||||
})
|
||||
})
|
||||
|
||||
items = _.sortBy(items, item => {
|
||||
if (item.group_title === 'Undefined') return ''
|
||||
|
||||
return item.group_title
|
||||
})
|
||||
|
||||
return { filepath: 'index.category.m3u', items }
|
||||
}
|
|
@ -1,80 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
await api.regions.load()
|
||||
let regions = await api.regions.all()
|
||||
regions = _.keyBy(regions, 'code')
|
||||
|
||||
await api.countries.load()
|
||||
let countries = await api.countries.all()
|
||||
countries = _.keyBy(countries, 'code')
|
||||
|
||||
await api.subdivisions.load()
|
||||
let subdivisions = await api.subdivisions.all()
|
||||
subdivisions = _.keyBy(subdivisions, 'code')
|
||||
|
||||
let items = []
|
||||
streams.forEach(stream => {
|
||||
if (!stream.broadcast_area.length) {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = 'Undefined'
|
||||
items.push(item)
|
||||
return
|
||||
}
|
||||
|
||||
if (stream.broadcast_area.includes('r/INT')) {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = 'International'
|
||||
items.push(item)
|
||||
}
|
||||
|
||||
const broadcastCountries = getBroadcastCountries(stream, { countries, regions, subdivisions })
|
||||
broadcastCountries.forEach(country => {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = country.name
|
||||
items.push(item)
|
||||
})
|
||||
})
|
||||
|
||||
items = sortByGroupTitle(items)
|
||||
|
||||
return { filepath: 'index.country.m3u', items }
|
||||
}
|
||||
|
||||
function getBroadcastCountries(stream, { countries, regions, subdivisions }) {
|
||||
let codes = stream.broadcast_area.reduce((acc, item) => {
|
||||
const [type, code] = item.split('/')
|
||||
switch (type) {
|
||||
case 'c':
|
||||
acc.push(code)
|
||||
break
|
||||
case 'r':
|
||||
if (code !== 'INT' && regions[code]) {
|
||||
acc = acc.concat(regions[code].countries)
|
||||
}
|
||||
break
|
||||
case 's':
|
||||
if (subdivisions[code]) {
|
||||
acc.push(subdivisions[code].country)
|
||||
}
|
||||
break
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
codes = _.uniq(codes)
|
||||
|
||||
return codes.map(code => countries[code]).filter(c => c)
|
||||
}
|
||||
|
||||
function sortByGroupTitle(items) {
|
||||
return _.sortBy(items, item => {
|
||||
if (item.group_title === 'International') return '[' // ASCII character 91
|
||||
if (item.group_title === 'Undefined') return ']' // ASCII character 93
|
||||
|
||||
return item.group_title
|
||||
})
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
let items = []
|
||||
streams.forEach(stream => {
|
||||
if (!stream.languages.length) {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = 'Undefined'
|
||||
items.push(item)
|
||||
return
|
||||
}
|
||||
|
||||
stream.languages.forEach(language => {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = language.name
|
||||
items.push(item)
|
||||
})
|
||||
})
|
||||
|
||||
items = _.sortBy(items, i => {
|
||||
if (i.group_title === 'Undefined') return ''
|
||||
|
||||
return i.group_title
|
||||
})
|
||||
|
||||
return { filepath: 'index.language.m3u', items }
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
return { filepath: 'index.m3u', items: streams }
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
return { filepath: 'index.nsfw.m3u', items: streams }
|
||||
}
|
|
@ -1,57 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
await api.regions.load()
|
||||
let regions = await api.regions.all()
|
||||
regions = _.keyBy(regions, 'code')
|
||||
|
||||
let items = []
|
||||
streams.forEach(stream => {
|
||||
if (!stream.broadcast_area.length) {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = 'Undefined'
|
||||
items.push(item)
|
||||
return
|
||||
}
|
||||
|
||||
getChannelRegions(stream, { regions }).forEach(region => {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = region.name
|
||||
items.push(item)
|
||||
})
|
||||
})
|
||||
|
||||
items = _.sortBy(items, i => {
|
||||
if (i.group_title === 'Undefined') return ''
|
||||
|
||||
return i.group_title
|
||||
})
|
||||
|
||||
return { filepath: 'index.region.m3u', items }
|
||||
}
|
||||
|
||||
function getChannelRegions(stream, { regions }) {
|
||||
return stream.broadcast_area
|
||||
.reduce((acc, item) => {
|
||||
const [type, code] = item.split('/')
|
||||
switch (type) {
|
||||
case 'r':
|
||||
acc.push(regions[code])
|
||||
break
|
||||
case 's':
|
||||
const [c] = code.split('-')
|
||||
const r1 = _.filter(regions, { countries: [c] })
|
||||
acc = acc.concat(r1)
|
||||
break
|
||||
case 'c':
|
||||
const r2 = _.filter(regions, { countries: [code] })
|
||||
acc = acc.concat(r2)
|
||||
break
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
.filter(i => i)
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
let languages = []
|
||||
streams.forEach(stream => {
|
||||
languages = languages.concat(stream.languages)
|
||||
})
|
||||
languages = _.uniqBy(languages, 'code')
|
||||
languages = _.sortBy(languages, 'name')
|
||||
|
||||
const output = []
|
||||
for (const language of languages) {
|
||||
let items = _.filter(streams, { languages: [{ code: language.code }] })
|
||||
if (items.length) {
|
||||
output.push({ filepath: `languages/${language.code}.m3u`, items })
|
||||
}
|
||||
}
|
||||
|
||||
let items = _.filter(streams, stream => !stream.languages.length)
|
||||
output.push({ filepath: 'languages/undefined.m3u', items })
|
||||
|
||||
return output
|
||||
}
|
50
scripts/generators/languagesGenerator.ts
Normal file
50
scripts/generators/languagesGenerator.ts
Normal file
|
@ -0,0 +1,50 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Playlist, Language, Stream } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type LanguagesGeneratorProps = { streams: Collection; logger: Logger }
|
||||
|
||||
export class LanguagesGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, logger }: LanguagesGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let streams = this.streams.orderBy(stream => stream.getTitle()).filter(stream => stream.isSFW())
|
||||
|
||||
let languages = new Collection()
|
||||
streams.forEach((stream: Stream) => {
|
||||
languages = languages.concat(stream.languages)
|
||||
})
|
||||
|
||||
languages
|
||||
.uniqBy((language: Language) => language.code)
|
||||
.orderBy((language: Language) => language.name)
|
||||
.forEach(async (language: Language) => {
|
||||
const languageStreams = streams.filter(stream => stream.hasLanguage(language))
|
||||
|
||||
if (languageStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(languageStreams, { public: true })
|
||||
const filepath = `languages/${language.code}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
})
|
||||
|
||||
const undefinedStreams = streams.filter(stream => stream.noLanguages())
|
||||
|
||||
if (undefinedStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(undefinedStreams, { public: true })
|
||||
const filepath = 'languages/undefined.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
await api.regions.load()
|
||||
const regions = await api.regions.all()
|
||||
|
||||
await api.subdivisions.load()
|
||||
const subdivisions = await api.subdivisions.all()
|
||||
|
||||
const output = []
|
||||
for (const region of regions) {
|
||||
if (region.code === 'INT') continue
|
||||
|
||||
const regionCountries = region.countries
|
||||
let areaCodes = regionCountries.map(code => `c/${code}`)
|
||||
|
||||
const regionSubdivisions = _.filter(
|
||||
subdivisions,
|
||||
s => regionCountries.indexOf(s.country) > -1
|
||||
).map(s => `s/${s.code}`)
|
||||
areaCodes = areaCodes.concat(regionSubdivisions)
|
||||
|
||||
areaCodes.push(`r/${region.code}`)
|
||||
|
||||
let items = _.filter(streams, stream => _.intersection(stream.broadcast_area, areaCodes).length)
|
||||
output.push({ filepath: `regions/${region.code.toLowerCase()}.m3u`, items })
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
51
scripts/generators/regionsGenerator.ts
Normal file
51
scripts/generators/regionsGenerator.ts
Normal file
|
@ -0,0 +1,51 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Playlist, Subdivision, Region } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type RegionsGeneratorProps = {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
subdivisions: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class RegionsGenerator implements Generator {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
subdivisions: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, regions, subdivisions, logger }: RegionsGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.regions = regions
|
||||
this.subdivisions = subdivisions
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let streams = this.streams.orderBy(stream => stream.getTitle()).filter(stream => stream.isSFW())
|
||||
|
||||
this.regions.forEach(async (region: Region) => {
|
||||
if (region.code === 'INT') return
|
||||
|
||||
const regionSubdivisionsCodes = this.subdivisions
|
||||
.filter((subdivision: Subdivision) => region.countries.indexOf(subdivision.country) > -1)
|
||||
.map((subdivision: Subdivision) => `s/${subdivision.code}`)
|
||||
|
||||
const regionCodes = region.countries
|
||||
.map((code: string) => `c/${code}`)
|
||||
.concat(regionSubdivisionsCodes)
|
||||
.add(`r/${region.code}`)
|
||||
|
||||
const regionStreams = streams.filter(stream => stream.broadcastArea.intersects(regionCodes))
|
||||
|
||||
const playlist = new Playlist(regionStreams, { public: true })
|
||||
const filepath = `regions/${region.code.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
})
|
||||
}
|
||||
}
|
14
scripts/models/blocked.ts
Normal file
14
scripts/models/blocked.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
type BlockedProps = {
|
||||
channel: string
|
||||
ref: string
|
||||
}
|
||||
|
||||
export class Blocked {
|
||||
channel: string
|
||||
ref: string
|
||||
|
||||
constructor({ ref, channel }: BlockedProps) {
|
||||
this.channel = channel
|
||||
this.ref = ref
|
||||
}
|
||||
}
|
14
scripts/models/category.ts
Normal file
14
scripts/models/category.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
type CategoryProps = {
|
||||
id: string
|
||||
name: string
|
||||
}
|
||||
|
||||
export class Category {
|
||||
id: string
|
||||
name: string
|
||||
|
||||
constructor({ id, name }: CategoryProps) {
|
||||
this.id = id
|
||||
this.name = name
|
||||
}
|
||||
}
|
79
scripts/models/channel.ts
Normal file
79
scripts/models/channel.ts
Normal file
|
@ -0,0 +1,79 @@
|
|||
import { Collection } from '../core'
|
||||
|
||||
type ChannelProps = {
|
||||
id: string
|
||||
name: string
|
||||
alt_names: string[]
|
||||
network: string
|
||||
owners: string[]
|
||||
country: string
|
||||
subdivision: string
|
||||
city: string
|
||||
broadcast_area: string[]
|
||||
languages: string[]
|
||||
categories: string[]
|
||||
is_nsfw: boolean
|
||||
launched: string
|
||||
closed: string
|
||||
replaced_by: string
|
||||
website: string
|
||||
logo: string
|
||||
}
|
||||
|
||||
export class Channel {
|
||||
id: string
|
||||
name: string
|
||||
altNames: Collection
|
||||
network: string
|
||||
owners: Collection
|
||||
country: string
|
||||
subdivision: string
|
||||
city: string
|
||||
broadcastArea: Collection
|
||||
languages: Collection
|
||||
categories: Collection
|
||||
isNSFW: boolean
|
||||
launched: string
|
||||
closed: string
|
||||
replacedBy: string
|
||||
website: string
|
||||
logo: string
|
||||
|
||||
constructor({
|
||||
id,
|
||||
name,
|
||||
alt_names,
|
||||
network,
|
||||
owners,
|
||||
country,
|
||||
subdivision,
|
||||
city,
|
||||
broadcast_area,
|
||||
languages,
|
||||
categories,
|
||||
is_nsfw,
|
||||
launched,
|
||||
closed,
|
||||
replaced_by,
|
||||
website,
|
||||
logo
|
||||
}: ChannelProps) {
|
||||
this.id = id
|
||||
this.name = name
|
||||
this.altNames = new Collection(alt_names)
|
||||
this.network = network
|
||||
this.owners = new Collection(owners)
|
||||
this.country = country
|
||||
this.subdivision = subdivision
|
||||
this.city = city
|
||||
this.broadcastArea = new Collection(broadcast_area)
|
||||
this.languages = new Collection(languages)
|
||||
this.categories = new Collection(categories)
|
||||
this.isNSFW = is_nsfw
|
||||
this.launched = launched
|
||||
this.closed = closed
|
||||
this.replacedBy = replaced_by
|
||||
this.website = website
|
||||
this.logo = logo
|
||||
}
|
||||
}
|
20
scripts/models/country.ts
Normal file
20
scripts/models/country.ts
Normal file
|
@ -0,0 +1,20 @@
|
|||
type CountryProps = {
|
||||
code: string
|
||||
name: string
|
||||
languages: string[]
|
||||
flag: string
|
||||
}
|
||||
|
||||
export class Country {
|
||||
code: string
|
||||
name: string
|
||||
languages: string[]
|
||||
flag: string
|
||||
|
||||
constructor({ code, name, languages, flag }: CountryProps) {
|
||||
this.code = code
|
||||
this.name = name
|
||||
this.languages = languages
|
||||
this.flag = flag
|
||||
}
|
||||
}
|
9
scripts/models/index.ts
Normal file
9
scripts/models/index.ts
Normal file
|
@ -0,0 +1,9 @@
|
|||
export * from './playlist'
|
||||
export * from './blocked'
|
||||
export * from './stream'
|
||||
export * from './category'
|
||||
export * from './channel'
|
||||
export * from './language'
|
||||
export * from './country'
|
||||
export * from './region'
|
||||
export * from './subdivision'
|
14
scripts/models/language.ts
Normal file
14
scripts/models/language.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
type LanguageProps = {
|
||||
code: string
|
||||
name: string
|
||||
}
|
||||
|
||||
export class Language {
|
||||
code: string
|
||||
name: string
|
||||
|
||||
constructor({ code, name }: LanguageProps) {
|
||||
this.code = code
|
||||
this.name = name
|
||||
}
|
||||
}
|
28
scripts/models/playlist.ts
Normal file
28
scripts/models/playlist.ts
Normal file
|
@ -0,0 +1,28 @@
|
|||
import { Collection } from '../core'
|
||||
import { Stream } from '../models'
|
||||
|
||||
type PlaylistOptions = {
|
||||
public: boolean
|
||||
}
|
||||
|
||||
export class Playlist {
|
||||
streams: Collection
|
||||
options: {
|
||||
public: boolean
|
||||
}
|
||||
|
||||
constructor(streams: Collection, options?: PlaylistOptions) {
|
||||
this.streams = streams
|
||||
this.options = options || { public: false }
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = `#EXTM3U\n`
|
||||
|
||||
this.streams.forEach((stream: Stream) => {
|
||||
output += stream.toString(this.options) + `\n`
|
||||
})
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
19
scripts/models/region.ts
Normal file
19
scripts/models/region.ts
Normal file
|
@ -0,0 +1,19 @@
|
|||
import { Collection } from '../core'
|
||||
|
||||
type RegionProps = {
|
||||
code: string
|
||||
name: string
|
||||
countries: string[]
|
||||
}
|
||||
|
||||
export class Region {
|
||||
code: string
|
||||
name: string
|
||||
countries: Collection
|
||||
|
||||
constructor({ code, name, countries }: RegionProps) {
|
||||
this.code = code
|
||||
this.name = name
|
||||
this.countries = new Collection(countries)
|
||||
}
|
||||
}
|
177
scripts/models/stream.ts
Normal file
177
scripts/models/stream.ts
Normal file
|
@ -0,0 +1,177 @@
|
|||
import { URL, Collection } from '../core'
|
||||
import { Category, Language } from './index'
|
||||
|
||||
type StreamProps = {
|
||||
name: string
|
||||
url: string
|
||||
filepath: string
|
||||
line: number
|
||||
channel?: string
|
||||
httpReferrer?: string
|
||||
label?: string
|
||||
quality?: string
|
||||
userAgent?: string
|
||||
}
|
||||
|
||||
export class Stream {
|
||||
channel: string
|
||||
filepath: string
|
||||
line: number
|
||||
httpReferrer: string
|
||||
label: string
|
||||
name: string
|
||||
quality: string
|
||||
url: string
|
||||
userAgent: string
|
||||
logo: string
|
||||
broadcastArea: Collection
|
||||
categories: Collection
|
||||
languages: Collection
|
||||
isNSFW: boolean
|
||||
groupTitle: string
|
||||
|
||||
constructor({
|
||||
channel,
|
||||
filepath,
|
||||
line,
|
||||
httpReferrer,
|
||||
label,
|
||||
name,
|
||||
quality,
|
||||
url,
|
||||
userAgent
|
||||
}: StreamProps) {
|
||||
this.channel = channel || ''
|
||||
this.filepath = filepath
|
||||
this.line = line
|
||||
this.httpReferrer = httpReferrer || ''
|
||||
this.label = label || ''
|
||||
this.name = name
|
||||
this.quality = quality || ''
|
||||
this.url = url
|
||||
this.userAgent = userAgent || ''
|
||||
this.logo = ''
|
||||
this.broadcastArea = new Collection()
|
||||
this.categories = new Collection()
|
||||
this.languages = new Collection()
|
||||
this.isNSFW = false
|
||||
this.groupTitle = 'Undefined'
|
||||
}
|
||||
|
||||
normalizeURL() {
|
||||
const url = new URL(this.url)
|
||||
|
||||
this.url = url.normalize().toString()
|
||||
}
|
||||
|
||||
clone(): Stream {
|
||||
return Object.assign(Object.create(Object.getPrototypeOf(this)), this)
|
||||
}
|
||||
|
||||
hasName(): boolean {
|
||||
return !!this.name
|
||||
}
|
||||
|
||||
noName(): boolean {
|
||||
return !this.name
|
||||
}
|
||||
|
||||
hasChannel() {
|
||||
return !!this.channel
|
||||
}
|
||||
|
||||
hasCategories(): boolean {
|
||||
return this.categories.notEmpty()
|
||||
}
|
||||
|
||||
noCategories(): boolean {
|
||||
return this.categories.empty()
|
||||
}
|
||||
|
||||
hasCategory(category: Category): boolean {
|
||||
return this.categories.includes((_category: Category) => _category.id === category.id)
|
||||
}
|
||||
|
||||
noLanguages(): boolean {
|
||||
return this.languages.empty()
|
||||
}
|
||||
|
||||
hasLanguage(language: Language): boolean {
|
||||
return this.languages.includes((_language: Language) => _language.code === language.code)
|
||||
}
|
||||
|
||||
noBroadcastArea(): boolean {
|
||||
return this.broadcastArea.empty()
|
||||
}
|
||||
|
||||
isInternational(): boolean {
|
||||
return this.broadcastArea.includes('r/INT')
|
||||
}
|
||||
|
||||
isSFW(): boolean {
|
||||
return this.isNSFW === false
|
||||
}
|
||||
|
||||
getTitle(): string {
|
||||
let title = `${this.name}`
|
||||
|
||||
if (this.quality) {
|
||||
title += ` (${this.quality})`
|
||||
}
|
||||
|
||||
if (this.label) {
|
||||
title += ` [${this.label}]`
|
||||
}
|
||||
|
||||
return title
|
||||
}
|
||||
|
||||
data() {
|
||||
return {
|
||||
channel: this.channel,
|
||||
filepath: this.filepath,
|
||||
httpReferrer: this.httpReferrer,
|
||||
label: this.label,
|
||||
name: this.name,
|
||||
quality: this.quality,
|
||||
url: this.url,
|
||||
userAgent: this.userAgent,
|
||||
line: this.line
|
||||
}
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
channel: this.channel,
|
||||
url: this.url,
|
||||
http_referrer: this.httpReferrer || null,
|
||||
user_agent: this.userAgent || null
|
||||
}
|
||||
}
|
||||
|
||||
toString(options: { public: boolean }) {
|
||||
let output = `#EXTINF:-1 tvg-id="${this.channel}"`
|
||||
|
||||
if (options.public) {
|
||||
output += ` tvg-logo="${this.logo}" group-title="${this.groupTitle}"`
|
||||
}
|
||||
|
||||
if (this.userAgent) {
|
||||
output += ` user-agent="${this.userAgent}"`
|
||||
}
|
||||
|
||||
output += `,${this.getTitle()}`
|
||||
|
||||
if (this.httpReferrer) {
|
||||
output += `\n#EXTVLCOPT:http-referrer=${this.httpReferrer}`
|
||||
}
|
||||
|
||||
if (this.userAgent) {
|
||||
output += `\n#EXTVLCOPT:http-user-agent=${this.userAgent}`
|
||||
}
|
||||
|
||||
output += `\n${this.url}`
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
17
scripts/models/subdivision.ts
Normal file
17
scripts/models/subdivision.ts
Normal file
|
@ -0,0 +1,17 @@
|
|||
type SubdivisionProps = {
|
||||
code: string
|
||||
name: string
|
||||
country: string
|
||||
}
|
||||
|
||||
export class Subdivision {
|
||||
code: string
|
||||
name: string
|
||||
country: string
|
||||
|
||||
constructor({ code, name, country }: SubdivisionProps) {
|
||||
this.code = code
|
||||
this.name = name
|
||||
this.country = country
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
module.exports = function () {
|
||||
if (this.group_title) return this.group_title
|
||||
|
||||
if (this.categories.length) {
|
||||
return this.categories
|
||||
.filter(c => c)
|
||||
.map(category => category.name)
|
||||
.sort()
|
||||
.join(';')
|
||||
}
|
||||
|
||||
return 'Undefined'
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
exports.group_title = require('./group_title')
|
||||
exports.tvg_id = require('./tvg_id')
|
||||
exports.tvg_logo = require('./tvg_logo')
|
||||
exports.tvg_country = require('./tvg_country')
|
||||
exports.tvg_language = require('./tvg_language')
|
|
@ -1,16 +0,0 @@
|
|||
module.exports = function () {
|
||||
if (this.tvg_country) return this.tvg_country
|
||||
|
||||
if (this.broadcast_area.length) {
|
||||
return this.broadcast_area
|
||||
.map(item => {
|
||||
const [_, code] = item.split('/')
|
||||
return code
|
||||
})
|
||||
.filter(i => i)
|
||||
.sort()
|
||||
.join(';')
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
module.exports = function () {
|
||||
return this.channel || ''
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
module.exports = function () {
|
||||
if (this.tvg_language) return this.tvg_language
|
||||
|
||||
if (this.languages.length) {
|
||||
return this.languages
|
||||
.map(language => (language ? language.name : null))
|
||||
.filter(l => l)
|
||||
.sort()
|
||||
.join(';')
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
module.exports = function () {
|
||||
if (this.tvg_logo) return this.tvg_logo
|
||||
|
||||
return this.logo || ''
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
module.exports = function (channel) {
|
||||
return channel || null
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
module.exports = function (http_referrer) {
|
||||
return http_referrer || null
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
exports.http_referrer = require('./http_referrer')
|
||||
exports.user_agent = require('./user_agent')
|
||||
exports.channel = require('./channel')
|
|
@ -1,3 +0,0 @@
|
|||
module.exports = function (user_agent) {
|
||||
return user_agent || null
|
||||
}
|
52
scripts/tables/categoryTable.ts
Normal file
52
scripts/tables/categoryTable.ts
Normal file
|
@ -0,0 +1,52 @@
|
|||
import { Storage, HTMLTable, Collection, LogParser, LogItem, File } from '../core'
|
||||
import { Category } from '../models'
|
||||
import { DATA_DIR, LOGS_DIR, README_DIR } from '../constants'
|
||||
import { Table } from './table'
|
||||
|
||||
export class CategoryTable implements Table {
|
||||
constructor() {}
|
||||
|
||||
async make() {
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const categoriesContent = await dataStorage.json('categories.json')
|
||||
const categories = new Collection(categoriesContent).map(data => new Category(data))
|
||||
|
||||
const parser = new LogParser()
|
||||
const logsStorage = new Storage(LOGS_DIR)
|
||||
const generatorsLog = await logsStorage.read('generators.log')
|
||||
|
||||
let data = new Collection()
|
||||
parser
|
||||
.parse(generatorsLog)
|
||||
.filter((logItem: LogItem) => logItem.filepath.includes('categories/'))
|
||||
.forEach((logItem: LogItem) => {
|
||||
const file = new File(logItem.filepath)
|
||||
const categoryId = file.getFilename()
|
||||
const category: Category = categories.first(
|
||||
(category: Category) => category.id === categoryId
|
||||
)
|
||||
data.add([
|
||||
category ? category.name : 'ZZ',
|
||||
category ? category.name : 'Undefined',
|
||||
logItem.count,
|
||||
`<code>https://iptv-org.github.io/iptv/${logItem.filepath}</code>`
|
||||
])
|
||||
})
|
||||
|
||||
data = data
|
||||
.orderBy(item => item[0])
|
||||
.map(item => {
|
||||
item.shift()
|
||||
return item
|
||||
})
|
||||
|
||||
const table = new HTMLTable(data.all(), [
|
||||
{ name: 'Category' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', nowrap: true }
|
||||
])
|
||||
|
||||
const readmeStorage = new Storage(README_DIR)
|
||||
await readmeStorage.save('_categories.md', table.toString())
|
||||
}
|
||||
}
|
81
scripts/tables/countryTable.ts
Normal file
81
scripts/tables/countryTable.ts
Normal file
|
@ -0,0 +1,81 @@
|
|||
import { Storage, HTMLTable, Collection, LogParser, LogItem, File } from '../core'
|
||||
import { Country, Subdivision } from '../models'
|
||||
import { DATA_DIR, LOGS_DIR, README_DIR } from '../constants'
|
||||
import { Table } from './table'
|
||||
|
||||
export class CountryTable implements Table {
|
||||
constructor() {}
|
||||
|
||||
async make() {
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
|
||||
const countriesContent = await dataStorage.json('countries.json')
|
||||
const countries = new Collection(countriesContent).map(data => new Country(data))
|
||||
|
||||
const subdivisionsContent = await dataStorage.json('subdivisions.json')
|
||||
const subdivisions = new Collection(subdivisionsContent).map(data => new Subdivision(data))
|
||||
|
||||
const parser = new LogParser()
|
||||
const logsStorage = new Storage(LOGS_DIR)
|
||||
const generatorsLog = await logsStorage.read('generators.log')
|
||||
|
||||
let data = new Collection()
|
||||
parser
|
||||
.parse(generatorsLog)
|
||||
.filter(
|
||||
(logItem: LogItem) =>
|
||||
logItem.filepath.includes('countries/') || logItem.filepath.includes('subdivisions/')
|
||||
)
|
||||
.forEach((logItem: LogItem) => {
|
||||
const file = new File(logItem.filepath)
|
||||
const code = file.getFilename().toUpperCase()
|
||||
const [countryCode, subdivisionCode] = code.split('-') || ['', '']
|
||||
|
||||
if (subdivisionCode) {
|
||||
const subdivision = subdivisions.first(
|
||||
(subdivision: Subdivision) => subdivision.code === code
|
||||
)
|
||||
const country = countries.first(
|
||||
(country: Country) => country.code === subdivision.country
|
||||
)
|
||||
data.add([
|
||||
`${country.name}/${subdivision.name}`,
|
||||
` ${subdivision.name}`,
|
||||
logItem.count,
|
||||
`<code>https://iptv-org.github.io/iptv/${logItem.filepath}</code>`
|
||||
])
|
||||
} else if (countryCode === 'INT') {
|
||||
data.add([
|
||||
'ZZ',
|
||||
`🌍 International`,
|
||||
logItem.count,
|
||||
`<code>https://iptv-org.github.io/iptv/${logItem.filepath}</code>`
|
||||
])
|
||||
} else {
|
||||
const country = countries.first((country: Country) => country.code === countryCode)
|
||||
data.add([
|
||||
country.name,
|
||||
`${country.flag} ${country.name}`,
|
||||
logItem.count,
|
||||
`<code>https://iptv-org.github.io/iptv/${logItem.filepath}</code>`
|
||||
])
|
||||
}
|
||||
})
|
||||
|
||||
data = data
|
||||
.orderBy(item => item[0])
|
||||
.map(item => {
|
||||
item.shift()
|
||||
return item
|
||||
})
|
||||
|
||||
const table = new HTMLTable(data.all(), [
|
||||
{ name: 'Country' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', nowrap: true }
|
||||
])
|
||||
|
||||
const readmeStorage = new Storage(README_DIR)
|
||||
await readmeStorage.save('_countries.md', table.toString())
|
||||
}
|
||||
}
|
4
scripts/tables/index.ts
Normal file
4
scripts/tables/index.ts
Normal file
|
@ -0,0 +1,4 @@
|
|||
export * from './categoryTable'
|
||||
export * from './countryTable'
|
||||
export * from './languageTable'
|
||||
export * from './regionTable'
|
53
scripts/tables/languageTable.ts
Normal file
53
scripts/tables/languageTable.ts
Normal file
|
@ -0,0 +1,53 @@
|
|||
import { Storage, HTMLTable, Collection, LogParser, LogItem, File } from '../core'
|
||||
import { Language } from '../models'
|
||||
import { DATA_DIR, LOGS_DIR, README_DIR } from '../constants'
|
||||
import { Table } from './table'
|
||||
|
||||
export class LanguageTable implements Table {
|
||||
constructor() {}
|
||||
|
||||
async make() {
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const languagesContent = await dataStorage.json('languages.json')
|
||||
const languages = new Collection(languagesContent).map(data => new Language(data))
|
||||
|
||||
const parser = new LogParser()
|
||||
const logsStorage = new Storage(LOGS_DIR)
|
||||
const generatorsLog = await logsStorage.read('generators.log')
|
||||
|
||||
let data = new Collection()
|
||||
parser
|
||||
.parse(generatorsLog)
|
||||
.filter((logItem: LogItem) => logItem.filepath.includes('languages/'))
|
||||
.forEach((logItem: LogItem) => {
|
||||
const file = new File(logItem.filepath)
|
||||
const languageCode = file.getFilename()
|
||||
const language: Language = languages.first(
|
||||
(language: Language) => language.code === languageCode
|
||||
)
|
||||
|
||||
data.add([
|
||||
language ? language.name : 'ZZ',
|
||||
language ? language.name : 'Undefined',
|
||||
logItem.count,
|
||||
`<code>https://iptv-org.github.io/iptv/${logItem.filepath}</code>`
|
||||
])
|
||||
})
|
||||
|
||||
data = data
|
||||
.orderBy(item => item[0])
|
||||
.map(item => {
|
||||
item.shift()
|
||||
return item
|
||||
})
|
||||
|
||||
const table = new HTMLTable(data.all(), [
|
||||
{ name: 'Language', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left', nowrap: true }
|
||||
])
|
||||
|
||||
const readmeStorage = new Storage(README_DIR)
|
||||
await readmeStorage.save('_languages.md', table.toString())
|
||||
}
|
||||
}
|
47
scripts/tables/regionTable.ts
Normal file
47
scripts/tables/regionTable.ts
Normal file
|
@ -0,0 +1,47 @@
|
|||
import { Storage, HTMLTable, Collection, LogParser, LogItem, File } from '../core'
|
||||
import { Region } from '../models'
|
||||
import { DATA_DIR, LOGS_DIR, README_DIR } from '../constants'
|
||||
import { Table } from './table'
|
||||
|
||||
export class RegionTable implements Table {
|
||||
constructor() {}
|
||||
|
||||
async make() {
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const regionsContent = await dataStorage.json('regions.json')
|
||||
const regions = new Collection(regionsContent).map(data => new Region(data))
|
||||
|
||||
const parser = new LogParser()
|
||||
const logsStorage = new Storage(LOGS_DIR)
|
||||
const generatorsLog = await logsStorage.read('generators.log')
|
||||
|
||||
let data = new Collection()
|
||||
parser
|
||||
.parse(generatorsLog)
|
||||
.filter((logItem: LogItem) => logItem.filepath.includes('regions/'))
|
||||
.forEach((logItem: LogItem) => {
|
||||
const file = new File(logItem.filepath)
|
||||
const regionCode = file.getFilename().toUpperCase()
|
||||
const region: Region = regions.first((region: Region) => region.code === regionCode)
|
||||
|
||||
if (region) {
|
||||
data.add([
|
||||
region.name,
|
||||
logItem.count,
|
||||
`<code>https://iptv-org.github.io/iptv/${logItem.filepath}</code>`
|
||||
])
|
||||
}
|
||||
})
|
||||
|
||||
data = data.orderBy(item => item[0])
|
||||
|
||||
const table = new HTMLTable(data.all(), [
|
||||
{ name: 'Region', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left', nowrap: true }
|
||||
])
|
||||
|
||||
const readmeStorage = new Storage(README_DIR)
|
||||
await readmeStorage.save('_regions.md', table.toString())
|
||||
}
|
||||
}
|
3
scripts/tables/table.ts
Normal file
3
scripts/tables/table.ts
Normal file
|
@ -0,0 +1,3 @@
|
|||
export interface Table {
|
||||
make(): void
|
||||
}
|
2
scripts/tmp/.gitignore
vendored
2
scripts/tmp/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
*
|
||||
!.gitignore
|
1
scripts/types/markdown-include.d.ts
vendored
Normal file
1
scripts/types/markdown-include.d.ts
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
declare module 'markdown-include'
|
Loading…
Reference in a new issue