|
import express from 'express'; |
|
import cors from 'cors'; |
|
import fs from 'fs/promises'; |
|
import path from 'path'; |
|
import { fileURLToPath } from 'url'; |
|
import AsyncLock from 'async-lock'; |
|
import { getDebridServices } from './src/debrids.js'; |
|
import { isVideo, base64Encode, base64Decode, extractInfoHash } from './src/util.js'; |
|
import { ERROR } from './src/const.js'; |
|
import { fetchRSSFeeds as fetchIPTFeeds } from './src/iptorrents.js'; |
|
import { fetchRSSFeeds as fetchTDayFeeds } from './src/tday.js'; |
|
import { fetchRSSFeeds as fetchTorrentingFeeds } from './src/torrenting.js'; |
|
|
|
const __dirname = path.dirname(fileURLToPath(import.meta.url)); |
|
const app = express(); |
|
const lock = new AsyncLock(); |
|
|
|
app.use(cors({ |
|
origin: '*', |
|
methods: ['GET', 'POST', 'OPTIONS'], |
|
allowedHeaders: ['Content-Type', 'Authorization'], |
|
credentials: true |
|
})); |
|
|
|
app.use(express.static(path.join(__dirname, 'public'))); |
|
app.options('*', cors()); |
|
|
|
async function getCinemetaMetadata(imdbId) { |
|
try { |
|
console.log(`\n㪠Fetching Cinemeta data for ${imdbId}`); |
|
const response = await fetch(`https://v3-cinemeta.strem.io/meta/movie/${imdbId}.json`); |
|
if (!response.ok) throw new Error('Failed to fetch from Cinemeta'); |
|
const data = await response.json(); |
|
console.log('β
Found:', data.meta.name); |
|
return data; |
|
} catch (error) { |
|
console.error('β Cinemeta error:', error); |
|
return null; |
|
} |
|
} |
|
|
|
async function readMovieData(imdbId, year) { |
|
const lockKey = `year-${year}`; |
|
const yearFile = path.join(__dirname, 'movies', `${year}.json`); |
|
|
|
try { |
|
return await lock.acquire(lockKey, async () => { |
|
console.log(`\nπ Reading data for year ${year}`); |
|
const content = await fs.readFile(yearFile, 'utf8'); |
|
const movies = JSON.parse(content); |
|
const movie = movies.find(m => m.imdbId === imdbId); |
|
if (movie) { |
|
console.log(`β
Found movie: ${movie.originalTitle}`); |
|
console.log(`Found ${movie.streams.length} streams`); |
|
} |
|
return movie; |
|
}); |
|
} catch (error) { |
|
if (error.name === 'AsyncLockTimeout') { |
|
console.error(`β Lock timeout reading year ${year}`); |
|
return null; |
|
} |
|
if (error.code !== 'ENOENT') { |
|
console.error(`β Error reading movie data:`, error); |
|
} |
|
return null; |
|
} |
|
} |
|
|
|
async function getAllStreams(imdbId) { |
|
try { |
|
console.log('\nπ Fetching all available streams'); |
|
console.log('Fetching from RSS feeds for IMDB ID:', imdbId); |
|
|
|
const startTime = Date.now(); |
|
const [iptStreams, tdayStreams, torrentingStreams] = await Promise.all([ |
|
fetchIPTFeeds(imdbId).catch(err => { |
|
console.error('IPTorrents fetch failed:', err); |
|
return []; |
|
}), |
|
fetchTDayFeeds(imdbId).catch(err => { |
|
console.error('TorrentDay fetch failed:', err); |
|
return []; |
|
}), |
|
fetchTorrentingFeeds(imdbId).catch(err => { |
|
console.error('Torrenting fetch failed:', err); |
|
return []; |
|
}) |
|
]); |
|
|
|
console.log(`\nStream fetch results (${Date.now() - startTime}ms):`); |
|
console.log('IPTorrents:', iptStreams.length, 'streams'); |
|
console.log('TorrentDay:', tdayStreams.length, 'streams'); |
|
console.log('Torrenting:', torrentingStreams.length, 'streams'); |
|
|
|
const allStreams = [ |
|
...iptStreams, |
|
...tdayStreams, |
|
...torrentingStreams |
|
]; |
|
|
|
console.log('\nPre-deduplication total:', allStreams.length, 'streams'); |
|
|
|
|
|
const uniqueStreams = Array.from( |
|
new Map( |
|
allStreams |
|
.filter(Boolean) |
|
.map(stream => { |
|
const hash = extractInfoHash(stream.magnetLink); |
|
return [hash, stream]; |
|
}) |
|
).values() |
|
); |
|
|
|
console.log('Post-deduplication total:', uniqueStreams.length, 'streams'); |
|
|
|
|
|
if (uniqueStreams.length > 0) { |
|
console.log('\nSample stream data:', { |
|
magnetLink: uniqueStreams[0].magnetLink.substring(0, 100) + '...', |
|
filename: uniqueStreams[0].filename, |
|
quality: uniqueStreams[0].quality, |
|
size: uniqueStreams[0].size, |
|
source: uniqueStreams[0].source |
|
}); |
|
} |
|
|
|
return uniqueStreams; |
|
} catch (error) { |
|
console.error('β Error fetching streams:', error); |
|
return []; |
|
} |
|
} |
|
|
|
async function checkCacheStatuses(service, hashes) { |
|
if (!hashes?.length) { |
|
console.log('No hashes to check'); |
|
return {}; |
|
} |
|
|
|
try { |
|
console.log(`\nπ Checking cache status for ${hashes.length} hashes with ${service.constructor.name}`); |
|
console.log('Sample hashes:', hashes.slice(0, 3)); |
|
|
|
const startTime = Date.now(); |
|
const results = await service.checkCacheStatuses(hashes); |
|
console.log(`Cache check completed in ${Date.now() - startTime}ms`); |
|
|
|
const cachedCount = Object.values(results).filter(r => r.cached).length; |
|
console.log(`Cache check results: ${cachedCount} cached out of ${hashes.length} total`); |
|
|
|
|
|
const sampleHash = hashes[0]; |
|
if (sampleHash && results[sampleHash]) { |
|
console.log('Sample cache result:', { |
|
hash: sampleHash, |
|
result: results[sampleHash] |
|
}); |
|
} |
|
|
|
return results; |
|
} catch (error) { |
|
console.error('β Cache check error:', error); |
|
return {}; |
|
} |
|
} |
|
|
|
async function mergeAndSaveStreams(existingStreams = [], newStreams = [], imdbId, year, movieTitle = '') { |
|
const lockKey = `year-${year}`; |
|
|
|
try { |
|
return await lock.acquire(lockKey, async () => { |
|
if (!newStreams.length) { |
|
console.log('No new streams to merge'); |
|
return existingStreams; |
|
} |
|
|
|
console.log(`\nπ Merging streams for ${movieTitle}`); |
|
console.log('Existing streams:', existingStreams.length); |
|
console.log('New streams:', newStreams.length); |
|
|
|
const existingHashes = new Set( |
|
existingStreams.map(stream => |
|
extractInfoHash(stream.magnetLink) |
|
).filter(Boolean) |
|
); |
|
|
|
const uniqueNewStreams = newStreams.filter(stream => { |
|
const hash = extractInfoHash(stream.magnetLink); |
|
return hash && !existingHashes.has(hash); |
|
}); |
|
|
|
if (!uniqueNewStreams.length) { |
|
console.log('No unique new streams found'); |
|
return existingStreams; |
|
} |
|
|
|
console.log(`Found ${uniqueNewStreams.length} new unique streams`); |
|
|
|
const mergedStreams = [...existingStreams, ...uniqueNewStreams]; |
|
const yearFile = path.join(__dirname, 'movies', `${year}.json`); |
|
|
|
let movies = []; |
|
try { |
|
const content = await fs.readFile(yearFile, 'utf8'); |
|
movies = JSON.parse(content); |
|
console.log(`Read existing ${year}.json with ${movies.length} movies`); |
|
} catch (error) { |
|
console.log(`Creating new ${year}.json file`); |
|
} |
|
|
|
const movieIndex = movies.findIndex(m => m.imdbId === imdbId); |
|
if (movieIndex >= 0) { |
|
console.log('Updating existing movie entry'); |
|
movies[movieIndex].streams = mergedStreams; |
|
movies[movieIndex].lastUpdated = new Date().toISOString(); |
|
} else { |
|
console.log('Adding new movie entry'); |
|
movies.push({ |
|
imdbId, |
|
streams: mergedStreams, |
|
originalTitle: movieTitle, |
|
addedAt: new Date().toISOString(), |
|
lastUpdated: new Date().toISOString() |
|
}); |
|
} |
|
|
|
await fs.mkdir(path.join(__dirname, 'movies'), { recursive: true }); |
|
|
|
const tempFile = `${yearFile}.tmp`; |
|
await fs.writeFile(tempFile, JSON.stringify(movies, null, 2)); |
|
await fs.rename(tempFile, yearFile); |
|
|
|
console.log(`β
Added ${uniqueNewStreams.length} new streams to ${year}.json`); |
|
return mergedStreams; |
|
}); |
|
} catch (error) { |
|
if (error.name === 'AsyncLockTimeout') { |
|
console.error(`β Lock timeout for year ${year}, skipping save`); |
|
return existingStreams; |
|
} |
|
console.error('β Error merging and saving streams:', error); |
|
return existingStreams; |
|
} |
|
} |
|
|
|
app.get('/:apiKeys/manifest.json', (req, res) => { |
|
const manifest = { |
|
id: 'org.multirss', |
|
version: '1.0.0', |
|
name: 'Multi RSS', |
|
description: 'Stream movies via Debrid services', |
|
resources: ['stream'], |
|
types: ['movie'], |
|
catalogs: [] |
|
}; |
|
res.json(manifest); |
|
}); |
|
|
|
app.get('/:apiKeys/stream/:type/:id.json', async (req, res) => { |
|
const { apiKeys, type, id } = req.params; |
|
|
|
try { |
|
console.log('\nπ‘ Stream request received:', { type, id }); |
|
console.log('API Keys:', apiKeys); |
|
|
|
const debridServices = getDebridServices(apiKeys); |
|
if (!debridServices.length) { |
|
throw new Error('No valid debrid service configured'); |
|
} |
|
|
|
const metadata = await getCinemetaMetadata(id); |
|
if (!metadata?.meta) return res.json({ streams: [] }); |
|
|
|
const year = new Date(metadata.meta.released).getFullYear(); |
|
console.log('Movie year:', year); |
|
|
|
const movieData = await readMovieData(id, year); |
|
|
|
const localStreams = movieData?.streams || []; |
|
console.log(`Found ${localStreams.length} streams in cache`); |
|
|
|
let processedStreams = []; |
|
|
|
if (localStreams.length > 0) { |
|
console.log('\nπ Processing cached streams'); |
|
const hashes = localStreams.map(stream => extractInfoHash(stream.magnetLink)).filter(Boolean); |
|
console.log(`Checking ${hashes.length} hashes for cached streams`); |
|
|
|
const cacheResults = {}; |
|
for (const service of debridServices) { |
|
console.log(`\nChecking cache with ${service.constructor.name}`); |
|
const results = await checkCacheStatuses(service, hashes); |
|
Object.entries(results).forEach(([hash, info]) => { |
|
if (info.cached) cacheResults[hash] = info; |
|
}); |
|
} |
|
|
|
console.log(`Found ${Object.keys(cacheResults).length} cached streams`); |
|
|
|
processedStreams = localStreams |
|
.map(stream => { |
|
const hash = extractInfoHash(stream.magnetLink); |
|
const cacheInfo = cacheResults[hash]; |
|
if (!cacheInfo?.cached) return null; |
|
|
|
const quality = stream.quality || stream.websiteTitle.match(/\d{3,4}p|4k|HDTS|CAM/i)?.[0] || ''; |
|
const size = stream.size || stream.websiteTitle.match(/\d+(\.\d+)?\s*(GB|MB)/i)?.[0] || ''; |
|
|
|
return { |
|
name: ['π§²', quality, size, `β‘οΈ ${cacheInfo.service}`, `[${stream.source}]`] |
|
.filter(Boolean) |
|
.join(' | '), |
|
title: stream.filename, |
|
url: `${req.protocol}://${req.get('host')}/${apiKeys}/${base64Encode(stream.magnetLink)}`, |
|
service: cacheInfo.service |
|
}; |
|
}) |
|
.filter(Boolean); |
|
} |
|
|
|
if (processedStreams.length === 0) { |
|
console.log('\nπ No cached streams available, fetching new streams...'); |
|
const newStreams = await getAllStreams(id); |
|
|
|
if (newStreams.length > 0) { |
|
await mergeAndSaveStreams( |
|
[], |
|
newStreams, |
|
id, |
|
year, |
|
metadata.meta.name |
|
); |
|
|
|
const hashes = newStreams.map(stream => extractInfoHash(stream.magnetLink)).filter(Boolean); |
|
console.log(`Checking ${hashes.length} hashes for new streams`); |
|
|
|
const cacheResults = {}; |
|
for (const service of debridServices) { |
|
console.log(`\nChecking cache with ${service.constructor.name}`); |
|
const results = await checkCacheStatuses(service, hashes); |
|
Object.entries(results).forEach(([hash, info]) => { |
|
if (info.cached) cacheResults[hash] = info; |
|
}); |
|
} |
|
|
|
processedStreams = newStreams |
|
.map(stream => { |
|
const hash = extractInfoHash(stream.magnetLink); |
|
const cacheInfo = cacheResults[hash]; |
|
if (!cacheInfo?.cached) return null; |
|
|
|
return { |
|
name: ['π§²', stream.quality, stream.size, `β‘οΈ ${cacheInfo.service}`, `[${stream.source}]`] |
|
.filter(Boolean) |
|
.join(' | '), |
|
title: stream.filename, |
|
url: `${req.protocol}://${req.get('host')}/${apiKeys}/${base64Encode(stream.magnetLink)}`, |
|
service: cacheInfo.service |
|
}; |
|
}) |
|
.filter(Boolean); |
|
} |
|
} else { |
|
console.log('\nπ Starting background stream update'); |
|
getAllStreams(id).then(async newStreams => { |
|
if (newStreams.length > 0) { |
|
console.log(`Found ${newStreams.length} new streams`); |
|
await mergeAndSaveStreams( |
|
localStreams, |
|
newStreams, |
|
id, |
|
year, |
|
metadata.meta.name |
|
); |
|
} |
|
}).catch(error => { |
|
console.error('Background update error:', error); |
|
}); |
|
} |
|
|
|
processedStreams.sort((a, b) => { |
|
const getQuality = name => { |
|
const quality = name.match(/4k|\d{3,4}/i)?.[0]?.toLowerCase(); |
|
if (quality === '4k') return 2160; |
|
return parseInt(quality) || 0; |
|
}; |
|
|
|
const qualityA = getQuality(a.name); |
|
const qualityB = getQuality(b.name); |
|
|
|
return qualityB - qualityA; |
|
}); |
|
|
|
console.log(`\nβ
Sending ${processedStreams.length} streams`); |
|
if (processedStreams.length > 0) { |
|
console.log('Sample processed stream:', { |
|
name: processedStreams[0].name, |
|
title: processedStreams[0].title, |
|
service: processedStreams[0].service |
|
}); |
|
} |
|
|
|
res.json({ streams: processedStreams }); |
|
|
|
} catch (error) { |
|
console.error('β Error processing streams:', error); |
|
res.json({ streams: [] }); |
|
} |
|
}); |
|
|
|
app.get('/:apiKeys/:magnetLink', async (req, res) => { |
|
const { apiKeys, magnetLink } = req.params; |
|
|
|
try { |
|
const debridServices = getDebridServices(apiKeys); |
|
if (!debridServices.length) { |
|
throw new Error('No valid debrid service configured'); |
|
} |
|
|
|
console.log('\nπ§² Processing magnet request'); |
|
const decodedMagnet = base64Decode(magnetLink); |
|
console.log('Decoded magnet link:', decodedMagnet.substring(0, 100) + '...'); |
|
|
|
for (const service of debridServices) { |
|
try { |
|
console.log(`\nTrying ${service.constructor.name}`); |
|
const streamUrl = await service.getStreamUrl(decodedMagnet); |
|
console.log('Stream URL generated:', streamUrl.substring(0, 100) + '...'); |
|
return res.redirect(streamUrl); |
|
} catch (error) { |
|
console.error(`Service ${service.constructor.name} failed:`, error); |
|
continue; |
|
} |
|
} |
|
|
|
throw new Error('All debrid services failed'); |
|
|
|
} catch (error) { |
|
console.error('β Error processing magnet:', error); |
|
res.status(500).json({ error: 'Failed to process magnet', details: error.message }); |
|
} |
|
}); |
|
|
|
app.use((err, req, res, next) => { |
|
console.error('\nβ Unhandled error:', err); |
|
res.status(500).json({ error: 'Internal server error', details: err.message }); |
|
}); |
|
|
|
const port = process.env.PORT || 9518; |
|
app.listen(port, () => console.log(`\nπ Addon running at http://localhost:${port}`)); |