calculus / src /debrids.js
no1b4me's picture
Upload 12 files
8740315 verified
raw
history blame
11 kB
import { ERROR } from './const.js';
class BaseDebrid {
#apiKey;
constructor(apiKey, prefix) {
this.#apiKey = apiKey.replace(`${prefix}=`, '');
}
getKey() {
return this.#apiKey;
}
}
class DebridLink extends BaseDebrid {
constructor(apiKey) {
super(apiKey, 'dl');
}
static canHandle(apiKey) {
return apiKey.startsWith('dl=');
}
async #request(method, path, opts = {}) {
try {
const query = opts.query || {};
const queryString = new URLSearchParams(query).toString();
const url = `https://debrid-link.com/api/v2${path}${queryString ? '?' + queryString : ''}`;
opts = {
method,
headers: {
'User-Agent': 'Stremio',
'Accept': 'application/json',
'Authorization': `Bearer ${this.getKey()}`,
...(method === 'POST' && {
'Content-Type': 'application/json'
}),
...(opts.headers || {})
},
...opts
};
console.log('\n🔷 DebridLink Request:', method, path);
if (opts.body) console.log('Request Body:', opts.body);
console.log('Request URL:', url);
console.log('Request Headers:', opts.headers);
const startTime = Date.now();
const res = await fetch(url, opts);
console.log(`Response Time: ${Date.now() - startTime}ms`);
console.log('Response Status:', res.status);
const data = await res.json();
console.log('Response Data:', data);
if (!data.success) {
switch (data.error) {
case 'badToken':
throw new Error(ERROR.INVALID_API_KEY);
case 'maxLink':
case 'maxLinkHost':
case 'maxData':
case 'maxDataHost':
case 'maxTorrent':
case 'torrentTooBig':
case 'freeServerOverload':
throw new Error(ERROR.NOT_PREMIUM);
default:
throw new Error(`API Error: ${JSON.stringify(data)}`);
}
}
return data.value;
} catch (error) {
console.error('❌ Request failed:', error);
throw error;
}
}
async checkCacheStatuses(hashes) {
try {
console.log(`\n📡 DebridLink: Batch checking ${hashes.length} hashes`);
console.log('Sample hashes being checked:', hashes.slice(0, 3));
const response = await this.#request('GET', '/seedbox/cached', {
query: { url: hashes.join(',') }
});
console.log('Raw cache check response:', response);
const results = {};
for (const hash of hashes) {
const cacheInfo = response[hash];
results[hash] = {
cached: !!cacheInfo,
files: cacheInfo?.files || [],
fileCount: cacheInfo?.files?.length || 0,
service: 'DebridLink'
};
}
const cachedCount = Object.values(results).filter(r => r.cached).length;
console.log(`DebridLink found ${cachedCount} cached torrents out of ${hashes.length}`);
return results;
} catch (error) {
if (error.message === ERROR.INVALID_API_KEY) {
console.error('❌ Invalid DebridLink API key');
return {};
}
console.error('Cache check failed:', error);
return {};
}
}
async getStreamUrl(magnetLink) {
try {
console.log('\n📥 Using DebridLink to process magnet:', magnetLink.substring(0, 100) + '...');
const data = await this.#request('POST', '/seedbox/add', {
body: JSON.stringify({
url: magnetLink,
async: true
})
});
console.log('Seedbox add response:', data);
const videoFiles = data.files
.filter(file => /\.(mp4|mkv|avi|mov|webm)$/i.test(file.name))
.sort((a, b) => b.size - a.size);
if (!videoFiles.length) {
console.error('No video files found in torrent');
throw new Error('No video files found');
}
console.log('Selected video file:', videoFiles[0].name);
return videoFiles[0].downloadUrl;
} catch (error) {
console.error('❌ Failed to get stream URL:', error);
throw error;
}
}
}
class Premiumize extends BaseDebrid {
#apiUrl = 'https://www.premiumize.me/api';
#batchSize = 99;
constructor(apiKey) {
super(apiKey, 'pr');
}
static canHandle(apiKey) {
return apiKey.startsWith('pr=');
}
async makeRequest(method, path, opts = {}) {
const retries = 3;
let lastError;
for (let i = 0; i < retries; i++) {
try {
const url = `${this.#apiUrl}${path}`;
console.log(`\n🔷 Premiumize Request (Attempt ${i + 1}/${retries}):`, method, path);
if (opts.body) console.log('Request Body:', opts.body);
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 30000);
// Add API key to FormData if it's a POST request
if (method === 'POST' && opts.body instanceof FormData) {
opts.body.append('apikey', this.getKey());
}
// For GET requests, add API key to URL
const finalUrl = method === 'GET'
? `${url}${url.includes('?') ? '&' : '?'}apikey=${this.getKey()}`
: url;
const startTime = Date.now();
const response = await fetch(finalUrl, {
...opts,
method,
signal: controller.signal
});
clearTimeout(timeout);
console.log(`Response Time: ${Date.now() - startTime}ms`);
console.log('Response Status:', response.status);
const data = await response.json();
console.log('Response Data:', data);
if (data.status === 'error') {
if (data.message === 'Invalid API key.') {
throw new Error(ERROR.INVALID_API_KEY);
}
throw new Error(`API Error: ${data.message}`);
}
return data;
} catch (error) {
console.log(`Attempt ${i + 1} failed:`, error.message);
lastError = error;
if (i < retries - 1) {
console.log('Retrying after 2 seconds...');
await new Promise(r => setTimeout(r, 2000));
}
}
}
throw lastError;
}
async checkCacheStatuses(hashes) {
try {
console.log(`\n📡 Premiumize: Batch checking ${hashes.length} hashes`);
console.log('Processing in batches of', this.#batchSize);
const results = {};
const batches = [];
// Split hashes into batches
for (let i = 0; i < hashes.length; i += this.#batchSize) {
batches.push(hashes.slice(i, i + this.#batchSize));
}
console.log(`Split into ${batches.length} batches`);
// Process each batch
for (let i = 0; i < batches.length; i++) {
const batch = batches[i];
console.log(`\nProcessing batch ${i + 1}/${batches.length} (${batch.length} hashes)`);
const params = new URLSearchParams();
batch.forEach(hash => params.append('items[]', hash));
const data = await this.makeRequest('GET', `/cache/check?${params}`);
// Map the responses to the corresponding hashes
batch.forEach((hash, index) => {
results[hash] = {
cached: data.response[index],
files: [],
fileCount: 0,
service: 'Premiumize'
};
});
// Add a small delay between batches to avoid rate limiting
if (i < batches.length - 1) {
await new Promise(resolve => setTimeout(resolve, 500));
}
}
const cachedCount = Object.values(results).filter(r => r.cached).length;
console.log(`\nPremiumize found ${cachedCount} cached torrents out of ${hashes.length}`);
return results;
} catch (error) {
console.error('Cache check failed:', error);
return {};
}
}
async getStreamUrl(magnetLink) {
try {
console.log('\n📥 Using Premiumize to process magnet:', magnetLink.substring(0, 100) + '...');
const body = new FormData();
body.append('src', magnetLink);
const data = await this.makeRequest('POST', '/transfer/directdl', {
body
});
const videoFiles = data.content
.filter(file => /\.(mp4|mkv|avi|mov|webm)$/i.test(file.path))
.sort((a, b) => b.size - a.size);
if (!videoFiles.length) {
console.error('No video files found in torrent');
throw new Error('No video files found');
}
console.log('Selected video file:', videoFiles[0].path);
return videoFiles[0].link;
} catch (error) {
console.error('❌ Failed to get stream URL:', error);
throw error;
}
}
}
export function getDebridServices(apiKeys) {
console.log('\n🔐 Initializing debrid services with keys:', apiKeys);
const services = [];
for (const key of apiKeys.split(',')) {
if (DebridLink.canHandle(key)) {
console.log('Adding DebridLink service');
services.push(new DebridLink(key));
} else if (Premiumize.canHandle(key)) {
console.log('Adding Premiumize service');
services.push(new Premiumize(key));
} else {
console.log('Unknown service key format:', key);
}
}
console.log(`Initialized ${services.length} debrid services`);
return services;
}
export { DebridLink, Premiumize };