Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, HTTPException
|
2 |
+
from pydantic import BaseModel
|
3 |
+
from typing import List, Tuple
|
4 |
+
import requests
|
5 |
+
from bs4 import BeautifulSoup
|
6 |
+
|
7 |
+
app = FastAPI()
|
8 |
+
|
9 |
+
class SearchQuery(BaseModel):
|
10 |
+
query: str
|
11 |
+
|
12 |
+
class LinkResults(BaseModel):
|
13 |
+
four_links_click: List[str]
|
14 |
+
direct4_links: List[str]
|
15 |
+
magnet_links: List[str]
|
16 |
+
|
17 |
+
def scrape_links(query: str) -> List[str]:
|
18 |
+
try:
|
19 |
+
url = f"https://www.full4movies.fyi/?s={query}"
|
20 |
+
response = requests.get(url)
|
21 |
+
|
22 |
+
if response.status_code == 200:
|
23 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
24 |
+
elements = soup.find_all('h2', class_='blog-entry-title entry-title')
|
25 |
+
links = [element.find('a')['href'] for element in elements]
|
26 |
+
return links
|
27 |
+
else:
|
28 |
+
raise HTTPException(status_code=response.status_code, detail="Failed to retrieve the webpage.")
|
29 |
+
except Exception as e:
|
30 |
+
raise HTTPException(status_code=500, detail=str(e))
|
31 |
+
|
32 |
+
def find_links(url: str) -> Tuple[List[str], List[str], List[str]]:
|
33 |
+
try:
|
34 |
+
response = requests.get(url)
|
35 |
+
if response.status_code == 200:
|
36 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
37 |
+
links = soup.find_all('a', href=True)
|
38 |
+
|
39 |
+
four_links_click = [link['href'] for link in links if '4links.click' in link['href']]
|
40 |
+
direct4_links = [link['href'] for link in links if 'direct4.link' in link['href']]
|
41 |
+
magnet_links = [link['href'] for link in links if link['href'].startswith('magnet:')]
|
42 |
+
|
43 |
+
return four_links_click, direct4_links, magnet_links
|
44 |
+
else:
|
45 |
+
raise HTTPException(status_code=response.status_code, detail="Failed to retrieve the selected webpage.")
|
46 |
+
except Exception as e:
|
47 |
+
raise HTTPException(status_code=500, detail=f"An error occurred while scraping the selected link: {str(e)}")
|
48 |
+
|
49 |
+
@app.post("/search", response_model=List[str])
|
50 |
+
def search_links(payload: SearchQuery):
|
51 |
+
links = scrape_links(payload.query)
|
52 |
+
if not links:
|
53 |
+
raise HTTPException(status_code=404, detail="No links found.")
|
54 |
+
return links
|
55 |
+
|
56 |
+
@app.get("/details", response_model=LinkResults)
|
57 |
+
def get_details(url: str):
|
58 |
+
four_links_click, direct4_links, magnet_links = find_links(url)
|
59 |
+
|
60 |
+
return LinkResults(
|
61 |
+
four_links_click=four_links_click,
|
62 |
+
direct4_links=direct4_links,
|
63 |
+
magnet_links=magnet_links
|
64 |
+
)
|