sekerlipencere commited on
Commit
44e4bcd
1 Parent(s): 4bd6a95

Create iceriktoplama.py

Browse files
Files changed (1) hide show
  1. iceriktoplama.py +78 -0
iceriktoplama.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import httpx
2
+ from bs4 import BeautifulSoup
3
+ import json
4
+ from concurrent.futures import ThreadPoolExecutor, as_completed
5
+ from tqdm import tqdm
6
+ import multiprocessing
7
+ import time
8
+
9
+ # URL'leri içeren txt dosyasının yolunu belirtin
10
+ file_path = '/content/url.txt'
11
+
12
+ # URL'leri txt dosyasından okuyun
13
+ with open(file_path, 'r') as file:
14
+ url_list = [line.strip() for line in file if line.strip()]
15
+
16
+ disclaimer = "'zynp_msgdata' veri seti 'sekerlipencere' tarafından hazırlanmıştır."
17
+ cpu_count = multiprocessing.cpu_count()
18
+ max_workers = cpu_count * 300 # İşlemci çekirdek sayısının 20 katı kadar iş parçacığı oluştur
19
+
20
+ def fetch_content(url):
21
+ try:
22
+ with httpx.Client(timeout=5) as client:
23
+ response = client.get(url)
24
+ response.raise_for_status()
25
+ return url, response.text
26
+ except Exception as e:
27
+ print(f"URL alınamadı: {url} - Hata: {e}")
28
+ return url, None
29
+
30
+ def process_content(url_content):
31
+ url, webpage = url_content
32
+ if webpage is None:
33
+ return None
34
+
35
+ try:
36
+ soup = BeautifulSoup(webpage, 'html.parser')
37
+ soru_div = soup.find('div', class_='p-title')
38
+ if soru_div:
39
+ soru = soru_div.get_text(strip=True)
40
+ ayrintili_soru_div = soup.find('div', class_='bbWrapper')
41
+ ayrintili_soru = ayrintili_soru_div.get_text(strip=True) if ayrintili_soru_div else ''
42
+ cevap_divs = soup.find_all('div', class_='bbWrapper')[1:]
43
+ cevaplar = [cevap.get_text(strip=True) for cevap in cevap_divs]
44
+ return {
45
+ 'soru': soru,
46
+ 'url': url,
47
+ 'ayrintili_soru': ayrintili_soru,
48
+ 'cevaplar': cevaplar,
49
+ 'atıf': disclaimer
50
+ }
51
+ except Exception as e:
52
+ print(f"İçerik işlenirken hata oluştu: {url} - Hata: {e}")
53
+ return None
54
+
55
+ def write_to_json(result):
56
+ if result:
57
+ with open('sonuc.json', 'a', encoding='utf-8') as f:
58
+ json.dump(result, f, ensure_ascii=False, indent=4)
59
+ f.write('\n')
60
+
61
+ def main():
62
+ batch_size = 500 # Toplu işleme için isteklerin gruplanacağı boyut
63
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
64
+ for i in range(0, len(url_list), batch_size):
65
+ batch_urls = url_list[i:i+batch_size]
66
+ futures = [executor.submit(fetch_content, url) for url in batch_urls]
67
+ with tqdm(total=len(futures), desc="Toplam İşleniyor", unit="URL") as pbar:
68
+ for future in as_completed(futures):
69
+ url_content = future.result()
70
+ result = process_content(url_content)
71
+ write_to_json(result)
72
+ pbar.update(1)
73
+
74
+ if __name__ == "__main__":
75
+ start_time = time.time()
76
+ main()
77
+ end_time = time.time()
78
+ print(f"Toplam süre: {end_time - start_time:.2f} saniye")