import express from 'express'; import cors from 'cors'; import fs from 'fs/promises'; import path from 'path'; import { fileURLToPath } from 'url'; import AsyncLock from 'async-lock'; import { getDebridServices } from './src/debrids.js'; import { isVideo, base64Encode, base64Decode, extractInfoHash } from './src/util.js'; import { ERROR } from './src/const.js'; import { fetchRSSFeeds as fetchIPTFeeds } from './src/iptorrents.js'; import { fetchRSSFeeds as fetchTDayFeeds } from './src/tday.js'; import { fetchRSSFeeds as fetchTorrentingFeeds } from './src/torrenting.js'; import { searchTorrents as searchYBTTorrents } from './src/yourbittorrent.js'; import { searchTorrents as searchEZTVTorrents } from './src/eztv.js'; import { searchTorrents as search1337xTorrents } from './src/1337x.js'; const __dirname = path.dirname(fileURLToPath(import.meta.url)); const app = express(); const lock = new AsyncLock(); app.use(cors({ origin: '*', methods: ['GET', 'POST', 'OPTIONS'], allowedHeaders: ['Content-Type', 'Authorization'], credentials: true })); app.use(express.static(path.join(__dirname, 'public'))); app.options('*', cors()); // Configure endpoint that redirects to index.html app.get('/configure', (req, res) => { res.redirect('/'); }); // Basic manifest endpoint app.get('/manifest.json', (req, res) => { res.json({ id: 'org.community.premiumize', version: '1.5.0', name: 'premiumize', logo:'https://dl.strem.io/addon-logo.png', description: 'Stream movies and series via premiumize', resources: ['stream'], types: ['movie', 'series'], catalogs: [], behaviorHints: { configurable: true, configurationRequired: true }, idPrefixes: ['tt'] }); }); // API-based manifest endpoint app.get('/:apiKeys/manifest.json', (req, res) => { res.json({ id: 'org.community.community.premiumize', version: '1.5.0', name: 'premiumize', logo:'https://dl.strem.io/addon-logo.png', description: 'Stream movies and series via premiumize', resources: ['stream'], types: ['movie', 'series'], catalogs: [], idPrefixes: ['tt'] }); }); function parseSize(sizeStr) { if (!sizeStr) return 0; const match = sizeStr.match(/(\d+(\.\d+)?)\s*(GB|MB|TB)/i); if (!match) return 0; const [, value, , unit] = match; const size = parseFloat(value); switch (unit.toUpperCase()) { case 'TB': return size * 1024 * 1024; case 'GB': return size * 1024; case 'MB': return size; default: return 0; } } function getQualityValue(name) { const quality = name.match(/\b(4k|2160p|1080p|720p|480p)\b/i)?.[1]?.toLowerCase(); switch (quality) { case '4k': case '2160p': return 4; case '1080p': return 3; case '720p': return 2; case '480p': return 1; default: return 0; } } async function getCinemetaMetadata(type, id) { try { const cleanId = id.split(':')[0]; console.log(`\nšŸŽ¬ Fetching Cinemeta data for ${type} ${cleanId}`); const response = await fetch(`https://v3-cinemeta.strem.io/meta/${type}/${cleanId}.json`); if (!response.ok) throw new Error('Failed to fetch from Cinemeta'); const data = await response.json(); console.log('āœ… Found:', data.meta.name); return data; } catch (error) { console.error('āŒ Cinemeta error:', error); return null; } } async function readData(type, id, year) { const lockKey = `${type}-${year}`; const folder = type === 'movie' ? 'movies' : 'series'; const yearFile = path.join(__dirname, folder, `${year}.json`); try { return await lock.acquire(lockKey, async () => { console.log(`\nšŸ“‚ Reading data for ${type} year ${year}`); const content = await fs.readFile(yearFile, 'utf8'); const items = JSON.parse(content); const item = items.find(m => m.id === id); if (item) { console.log(`āœ… Found ${type}: ${item.originalTitle}`); if (type === 'movie') { console.log(`Found ${item.streams.length} streams`); } else { const episodeCount = Object.keys(item.episodes || {}).length; console.log(`Found ${episodeCount} episodes with streams`); } } return item; }); } catch (error) { if (error.name === 'AsyncLockTimeout') { console.error(`āŒ Lock timeout reading year ${year}`); return null; } if (error.code !== 'ENOENT') { console.error(`āŒ Error reading data:`, error); } return null; } } async function checkCacheStatuses(service, hashes, streams) { if (!hashes?.length) { console.log('No hashes to check'); return {}; } try { console.log(`\nšŸ” Checking cache status for ${hashes.length} hashes with ${service.constructor.name}`); console.log('Sample hashes:', hashes.slice(0, 3)); const startTime = Date.now(); const results = await service.checkCacheStatuses(hashes); console.log(`Cache check completed in ${Date.now() - startTime}ms`); const cachedCount = Object.values(results).filter(r => r.cached).length; console.log(`Cache check results: ${cachedCount} cached out of ${hashes.length} total`); if (streams && streams.length > 0) { const uncachedStream = streams.find(stream => { const hash = extractInfoHash(stream.magnetLink); return hash && !results[hash]?.cached; }); if (uncachedStream) { console.log(`\nšŸ”„ Adding uncached magnet to ${service.constructor.name} for future availability`); try { if (service.constructor.name === 'Premiumize') { // Use transfer/create for Premiumize const body = new FormData(); body.append('src', uncachedStream.magnetLink); await service.makeRequest('POST', '/transfer/create', { body }); console.log('Transfer created in Premiumize'); } else { // For other services, use existing getStreamUrl await service.getStreamUrl(uncachedStream.magnetLink).catch(err => { console.log('Background caching initiated'); }); } } catch (error) { console.log('Background caching attempt made'); } } } return results; } catch (error) { console.error('āŒ Cache check error:', error); return {}; } } async function getAllStreams(type, id, season, episode) { try { console.log('\nšŸ”„ Fetching all available streams'); const cleanId = id.split(':')[0]; const metadata = await getCinemetaMetadata(type, cleanId).catch(err => { console.log('Continuing without Cinemeta metadata'); return null; }); let searchQuery; if (type === 'series') { const showTitle = metadata?.meta?.name || cleanId; searchQuery = `${showTitle} S${season.toString().padStart(2, '0')}E${episode.toString().padStart(2, '0')}`; } else { searchQuery = metadata?.meta?.name || cleanId; } console.log('Search query:', searchQuery); const startTime = Date.now(); const [iptStreams, tdayStreams, torrentingStreams, ybtStreams, eztvStreams, l337xStreams] = await Promise.all([ fetchIPTFeeds(searchQuery, type).catch(err => { console.error('IPTorrents fetch failed:', err); return []; }), fetchTDayFeeds(searchQuery, type).catch(err => { console.error('TorrentDay fetch failed:', err); return []; }), fetchTorrentingFeeds(searchQuery, type).catch(err => { console.error('Torrenting fetch failed:', err); return []; }), searchYBTTorrents(searchQuery, type).catch(err => { console.error('YourBittorrent fetch failed:', err); return []; }), type === 'series' ? searchEZTVTorrents(searchQuery, type).catch(err => { console.error('EZTV fetch failed:', err); return []; }) : Promise.resolve([]), search1337xTorrents(searchQuery, type).catch(err => { console.error('1337x fetch failed:', err); return []; }) ]); console.log(`\nStream fetch results (${Date.now() - startTime}ms):`); console.log('IPTorrents:', iptStreams.length, 'streams'); console.log('TorrentDay:', tdayStreams.length, 'streams'); console.log('Torrenting:', torrentingStreams.length, 'streams'); console.log('YourBittorrent:', ybtStreams.length, 'streams'); console.log('EZTV:', eztvStreams.length, 'streams'); console.log('1337x:', l337xStreams.length, 'streams'); const allStreams = [ ...iptStreams, ...tdayStreams, ...torrentingStreams, ...ybtStreams, ...eztvStreams, ...l337xStreams ]; console.log('\nPre-deduplication total:', allStreams.length, 'streams'); const uniqueStreams = Array.from( new Map( allStreams .filter(stream => stream && stream.magnetLink) .map(stream => { const hash = extractInfoHash(stream.magnetLink); if (!hash) return null; return [hash, stream]; }) .filter(Boolean) ).values() ); console.log('Post-deduplication total:', uniqueStreams.length, 'streams'); return uniqueStreams; } catch (error) { console.error('āŒ Error fetching streams:', error); return []; } } async function mergeAndSaveStreams(type, existingStreams = [], newStreams = [], id, year, title = '', season = null, episode = null) { // First check if we already have 100+ streams if (existingStreams.length >= 100) { console.log(`\nšŸ“ Skipping merge - already have ${existingStreams.length} streams in database`); return existingStreams; } const lockKey = `${type}-${year}`; const folder = type === 'movie' ? 'movies' : 'series'; const cleanId = id.split(':')[0]; try { return await lock.acquire(lockKey, async () => { if (!newStreams.length) { console.log('No new streams to merge'); return existingStreams; } console.log(`\nšŸ”„ Merging streams for ${title}`); console.log('Existing streams:', existingStreams.length); console.log('New streams:', newStreams.length); const existingHashes = new Set( existingStreams .filter(stream => stream && stream.magnetLink) .map(stream => extractInfoHash(stream.magnetLink)) .filter(Boolean) ); const uniqueNewStreams = newStreams .filter(stream => stream && stream.magnetLink) .filter(stream => { const hash = extractInfoHash(stream.magnetLink); return hash && !existingHashes.has(hash); }); if (!uniqueNewStreams.length) { console.log('No unique new streams found'); return existingStreams; } const mergedStreams = [...existingStreams, ...uniqueNewStreams]; const yearFile = path.join(__dirname, folder, `${year}.json`); let items = []; try { const content = await fs.readFile(yearFile, 'utf8'); items = JSON.parse(content); console.log(`Read existing ${year}.json with ${items.length} items`); } catch (error) { console.log(`Creating new ${year}.json file`); } const itemIndex = items.findIndex(m => m.id === cleanId); if (itemIndex >= 0) { console.log('Updating existing entry'); if (type === 'series') { items[itemIndex].episodes = items[itemIndex].episodes || {}; items[itemIndex].episodes[`${season}x${episode}`] = { streams: mergedStreams, lastUpdated: new Date().toISOString() }; } else { items[itemIndex].streams = mergedStreams; items[itemIndex].lastUpdated = new Date().toISOString(); } } else { console.log('Adding new entry'); const newItem = { id: cleanId, originalTitle: title, addedAt: new Date().toISOString(), lastUpdated: new Date().toISOString() }; if (type === 'series') { newItem.episodes = { [`${season}x${episode}`]: { streams: mergedStreams, lastUpdated: new Date().toISOString() } }; } else { newItem.streams = mergedStreams; } items.push(newItem); } await fs.mkdir(path.join(__dirname, folder), { recursive: true }); // Use a temporary file for atomic write const tempFile = `${yearFile}.tmp`; await fs.writeFile(tempFile, JSON.stringify(items, null, 2)); await fs.rename(tempFile, yearFile); console.log(`āœ… Added ${uniqueNewStreams.length} new streams to ${year}.json`); return mergedStreams; }); } catch (error) { if (error.name === 'AsyncLockTimeout') { console.error(`āŒ Lock timeout for year ${year}, skipping save`); return existingStreams; } console.error('āŒ Error merging and saving streams:', error); return existingStreams; } } app.get('/:apiKeys/stream/:type/:id/:extra?.json', async (req, res) => { const { apiKeys, type, id } = req.params; try { console.log('\nšŸ“” Stream request received:', { type, id }); console.log('API Keys:', apiKeys); const debridServices = getDebridServices(apiKeys); if (!debridServices.length) { throw new Error('No valid debrid service configured'); } let realId = id; let season, episode; if (type === 'series') { const parts = id.split(':'); if (parts.length === 3) { [realId, season, episode] = parts; season = parseInt(season); episode = parseInt(episode); console.log('Series request:', { realId, season, episode }); } else { throw new Error('Invalid series ID format'); } } let year; let metadata; try { metadata = await getCinemetaMetadata(type, realId); year = metadata?.meta ? new Date(metadata.meta.released).getFullYear() : new Date().getFullYear(); } catch (error) { console.log('Metadata fetch failed, using current year as fallback'); year = new Date().getFullYear(); } console.log('Year:', year); const itemData = await readData(type, realId, year); let localStreams = []; if (type === 'series') { localStreams = itemData?.episodes?.[`${season}x${episode}`]?.streams || []; } else { localStreams = itemData?.streams || []; } console.log(`Found ${localStreams.length} streams in cache`); if (localStreams.length > 0) { console.log('\nšŸ” Processing cached streams'); const hashes = localStreams .filter(stream => stream && stream.magnetLink) .map(stream => extractInfoHash(stream.magnetLink)) .filter(Boolean); console.log(`Checking ${hashes.length} hashes for cached streams`); const cacheResults = {}; for (const service of debridServices) { console.log(`\nChecking cache with ${service.constructor.name}`); const results = await checkCacheStatuses(service, hashes, localStreams); Object.entries(results).forEach(([hash, info]) => { if (info.cached) cacheResults[hash] = info; }); } console.log(`Found ${Object.keys(cacheResults).length} cached streams`); const processedStreams = localStreams .filter(stream => stream && stream.magnetLink) .map(stream => { const hash = extractInfoHash(stream.magnetLink); if (!hash) return null; const cacheInfo = cacheResults[hash]; if (!cacheInfo?.cached) return null; const quality = stream.quality || stream.websiteTitle?.match(/\d{3,4}p|4k|HDTS|CAM/i)?.[0] || ''; const size = stream.size || stream.websiteTitle?.match(/\d+(\.\d+)?\s*(GB|MB)/i)?.[0] || ''; return { name: ['šŸ§²', quality, size, `āš”ļø ${cacheInfo.service}`, `[${stream.source}]`] .filter(Boolean) .join(' | '), title: stream.filename || stream.websiteTitle, url: `${req.protocol}://${req.get('host')}/${apiKeys}/${base64Encode(stream.magnetLink)}`, service: cacheInfo.service }; }) .filter(Boolean); processedStreams.sort((a, b) => { const qualityDiff = getQualityValue(b.name) - getQualityValue(a.name); if (qualityDiff !== 0) return qualityDiff; const sizeA = parseSize(a.name.match(/\|\s*([\d.]+\s*[KMGT]B)/i)?.[1]); const sizeB = parseSize(b.name.match(/\|\s*([\d.]+\s*[KMGT]B)/i)?.[1]); return sizeB - sizeA; }); console.log(`\nāœ… Sending ${processedStreams.length} cached streams`); if (processedStreams.length > 0) { console.log('Top 3 streams:'); processedStreams.slice(0, 3).forEach((stream, index) => { console.log(`${index + 1}. ${stream.name}`); }); } res.json({ streams: processedStreams }); if (hashes.length < 100) { console.log('\nšŸ”„ Starting background stream update (less than 100 hashes in database)'); getAllStreams(type, id, season, episode).then(async newStreams => { if (newStreams.length > 0) { console.log(`Found ${newStreams.length} new streams in background update`); await mergeAndSaveStreams( type, localStreams, newStreams, id, year, metadata?.meta?.name || id, season, episode ); } }).catch(error => { console.error('Background update error:', error); }); } else { console.log('\nšŸ“ Skipping background update - already have 100+ hashes'); } } else { console.log('\nšŸ”„ No cached streams available, fetching new streams...'); const newStreams = await getAllStreams(type, id, season, episode); if (newStreams.length > 0) { await mergeAndSaveStreams( type, [], newStreams, id, year, metadata?.meta?.name || id, season, episode ); const hashes = newStreams .filter(stream => stream && stream.magnetLink) .map(stream => extractInfoHash(stream.magnetLink)) .filter(Boolean); console.log(`Checking ${hashes.length} hashes for new streams`); const cacheResults = {}; for (const service of debridServices) { console.log(`\nChecking cache with ${service.constructor.name}`); const results = await checkCacheStatuses(service, hashes, newStreams); Object.entries(results).forEach(([hash, info]) => { if (info.cached) cacheResults[hash] = info; }); } const processedStreams = newStreams .filter(stream => stream && stream.magnetLink) .map(stream => { const hash = extractInfoHash(stream.magnetLink); if (!hash) return null; const cacheInfo = cacheResults[hash]; if (!cacheInfo?.cached) return null; return { name: ['šŸ§²', stream.quality, stream.size, `āš”ļø ${cacheInfo.service}`, `[${stream.source}]`] .filter(Boolean) .join(' | '), title: stream.filename || stream.websiteTitle, url: `${req.protocol}://${req.get('host')}/${apiKeys}/${base64Encode(stream.magnetLink)}`, service: cacheInfo.service }; }) .filter(Boolean); processedStreams.sort((a, b) => { const qualityDiff = getQualityValue(b.name) - getQualityValue(a.name); if (qualityDiff !== 0) return qualityDiff; const sizeA = parseSize(a.name.match(/\|\s*([\d.]+\s*[KMGT]B)/i)?.[1]); const sizeB = parseSize(b.name.match(/\|\s*([\d.]+\s*[KMGT]B)/i)?.[1]); return sizeB - sizeA; }); console.log(`\nāœ… Sending ${processedStreams.length} fresh streams`); if (processedStreams.length > 0) { console.log('Top 3 streams:'); processedStreams.slice(0, 3).forEach((stream, index) => { console.log(`${index + 1}. ${stream.name}`); }); } res.json({ streams: processedStreams }); } else { console.log('No streams found'); res.json({ streams: [] }); } } } catch (error) { console.error('āŒ Error processing streams:', error); res.json({ streams: [] }); } }); app.get('/:apiKeys/:magnetLink', async (req, res) => { const { apiKeys, magnetLink } = req.params; try { const debridServices = getDebridServices(apiKeys); if (!debridServices.length) { throw new Error('No valid debrid service configured'); } console.log('\nšŸ§² Processing magnet request'); const decodedMagnet = base64Decode(magnetLink); if (!decodedMagnet) { throw new Error('Invalid magnet link'); } console.log('Decoded magnet link:', decodedMagnet.substring(0, 100) + '...'); for (const service of debridServices) { try { console.log(`\nTrying ${service.constructor.name}`); const streamUrl = await service.getStreamUrl(decodedMagnet); if (!streamUrl) { console.error(`No stream URL returned from ${service.constructor.name}`); continue; } console.log('Stream URL generated:', streamUrl.substring(0, 100) + '...'); return res.redirect(streamUrl); } catch (error) { console.error(`Service ${service.constructor.name} failed:`, error); continue; } } throw new Error('All debrid services failed'); } catch (error) { console.error('āŒ Error processing magnet:', error); res.status(500).json({ error: 'Failed to process magnet', details: error.message }); } }); app.use((err, req, res, next) => { console.error('\nāŒ Unhandled error:', err); res.status(500).json({ error: 'Internal server error', details: err.message }); }); const port = process.env.PORT || 9518; app.listen(port, () => console.log(`\nšŸš€ Addon running at http://localhost:${port}`)); export default app;