ChandimaPrabath commited on
Commit
f54c5d1
·
1 Parent(s): ab26803
Files changed (2) hide show
  1. app.py +15 -12
  2. hf_scrapper.py +20 -7
app.py CHANGED
@@ -7,8 +7,6 @@ from hf_scrapper import download_film, download_episode, get_system_proxies, get
7
  from indexer import indexer
8
  from tvdb import fetch_and_cache_json
9
  import re
10
- import psutil
11
- import shutil
12
 
13
  app = Flask(__name__)
14
 
@@ -112,6 +110,9 @@ def bytes_to_human_readable(num, suffix="B"):
112
  num /= 1024.0
113
  return f"{num:.1f} Y{suffix}"
114
 
 
 
 
115
  def get_all_tv_shows(indexed_cache):
116
  """Get all TV shows from the indexed cache structure JSON file."""
117
  tv_shows = {}
@@ -195,6 +196,7 @@ def get_tv_show_api():
195
  title = request.args.get('title')
196
  season = request.args.get('season')
197
  episode = request.args.get('episode')
 
198
  if not title or not season or not episode:
199
  return jsonify({"error": "Title, season, and episode parameters are required"}), 400
200
 
@@ -203,12 +205,13 @@ def get_tv_show_api():
203
  tv_store_data = json.load(json_file)
204
 
205
  # Check if the episode is already cached
206
- episode_key = f"{title}_S{season}_E{episode}"
207
- if episode_key in tv_store_data:
208
- cache_path = tv_store_data[episode_key]
209
- if os.path.exists(cache_path):
210
- return send_from_directory(os.path.dirname(cache_path), os.path.basename(cache_path))
211
-
 
212
  tv_path = find_tv_path(file_structure, title)
213
 
214
  if not tv_path:
@@ -220,9 +223,9 @@ def get_tv_show_api():
220
  for sub_directory in directory['contents']:
221
  if sub_directory['type'] == 'directory' and title.lower() in sub_directory['path'].lower():
222
  for season_dir in sub_directory['contents']:
223
- if season_dir['type'] == 'directory':
224
  for episode_file in season_dir['contents']:
225
- if episode_file['type'] == 'file':
226
  episode_path = episode_file['path']
227
  break
228
 
@@ -232,7 +235,7 @@ def get_tv_show_api():
232
  cache_path = os.path.join(CACHE_DIR, episode_path)
233
  file_url = f"https://huggingface.co/{REPO}/resolve/main/{episode_path}"
234
  proxies = get_system_proxies()
235
- episode_id = f"{title}_S{season}_E{episode}"
236
 
237
  # Start the download in a separate thread if not already downloading
238
  if episode_id not in download_threads or not download_threads[episode_id].is_alive():
@@ -266,7 +269,7 @@ def get_episode_id_api():
266
  episode = request.args.get('episode')
267
  if not title or not season or not episode:
268
  return jsonify({"error": "Title, season, and episode parameters are required"}), 400
269
- episode_id = f"{title}_S{season}_E{episode}"
270
  return jsonify({"episode_id": episode_id})
271
 
272
  @app.route('/api/cache/size', methods=['GET'])
 
7
  from indexer import indexer
8
  from tvdb import fetch_and_cache_json
9
  import re
 
 
10
 
11
  app = Flask(__name__)
12
 
 
110
  num /= 1024.0
111
  return f"{num:.1f} Y{suffix}"
112
 
113
+ def encode_episodeid(title,season,episode):
114
+ f"{title}_{season}_{episode}"
115
+
116
  def get_all_tv_shows(indexed_cache):
117
  """Get all TV shows from the indexed cache structure JSON file."""
118
  tv_shows = {}
 
196
  title = request.args.get('title')
197
  season = request.args.get('season')
198
  episode = request.args.get('episode')
199
+
200
  if not title or not season or not episode:
201
  return jsonify({"error": "Title, season, and episode parameters are required"}), 400
202
 
 
205
  tv_store_data = json.load(json_file)
206
 
207
  # Check if the episode is already cached
208
+ if title in tv_store_data and season in tv_store_data[title]:
209
+ for ep in tv_store_data[title][season]:
210
+ if episode in ep:
211
+ cache_path = tv_store_data[title][season][ep]
212
+ if os.path.exists(cache_path):
213
+ return send_from_directory(os.path.dirname(cache_path), os.path.basename(cache_path))
214
+
215
  tv_path = find_tv_path(file_structure, title)
216
 
217
  if not tv_path:
 
223
  for sub_directory in directory['contents']:
224
  if sub_directory['type'] == 'directory' and title.lower() in sub_directory['path'].lower():
225
  for season_dir in sub_directory['contents']:
226
+ if season_dir['type'] == 'directory' and season in season_dir['path']:
227
  for episode_file in season_dir['contents']:
228
+ if episode_file['type'] == 'file' and episode in episode_file['path']:
229
  episode_path = episode_file['path']
230
  break
231
 
 
235
  cache_path = os.path.join(CACHE_DIR, episode_path)
236
  file_url = f"https://huggingface.co/{REPO}/resolve/main/{episode_path}"
237
  proxies = get_system_proxies()
238
+ episode_id = encode_episodeid(title,season,episode)
239
 
240
  # Start the download in a separate thread if not already downloading
241
  if episode_id not in download_threads or not download_threads[episode_id].is_alive():
 
269
  episode = request.args.get('episode')
270
  if not title or not season or not episode:
271
  return jsonify({"error": "Title, season, and episode parameters are required"}), 400
272
+ episode_id = encode_episodeid(title,season,episode)
273
  return jsonify({"episode_id": episode_id})
274
 
275
  @app.route('/api/cache/size', methods=['GET'])
hf_scrapper.py CHANGED
@@ -168,24 +168,37 @@ def download_episode(file_url, token, cache_path, proxies, episode_id, title, ch
168
 
169
  def update_tv_store_json(title, cache_path):
170
  """
171
- Updates the tv store JSON with the new file.
172
 
173
  Args:
174
- title (str): The title of the film.
175
  cache_path (str): The local path where the file is saved.
176
  """
177
  TV_STORE_JSON_PATH = os.path.join(CACHE_DIR, "tv_store.json")
178
 
179
- film_store_data = {}
180
  if os.path.exists(TV_STORE_JSON_PATH):
181
  with open(TV_STORE_JSON_PATH, 'r') as json_file:
182
- film_store_data = json.load(json_file)
183
 
184
- film_store_data[title] = cache_path
 
 
 
 
 
 
 
 
 
 
 
 
185
 
186
  with open(TV_STORE_JSON_PATH, 'w') as json_file:
187
- json.dump(film_store_data, json_file, indent=2)
188
- print(f'TV store updated with {title}.')
 
189
 
190
  ###############################################################################
191
  def get_file_structure(repo, token, path="", proxies=None):
 
168
 
169
  def update_tv_store_json(title, cache_path):
170
  """
171
+ Updates the TV store JSON with the new file, organizing by title, season, and episode.
172
 
173
  Args:
174
+ title (str): The title of the TV show.
175
  cache_path (str): The local path where the file is saved.
176
  """
177
  TV_STORE_JSON_PATH = os.path.join(CACHE_DIR, "tv_store.json")
178
 
179
+ tv_store_data = {}
180
  if os.path.exists(TV_STORE_JSON_PATH):
181
  with open(TV_STORE_JSON_PATH, 'r') as json_file:
182
+ tv_store_data = json.load(json_file)
183
 
184
+ # Extract season and episode information from the cache_path
185
+ season_part = os.path.basename(os.path.dirname(cache_path)) # Extracts 'Season 1'
186
+ episode_part = os.path.basename(cache_path) # Extracts 'Grand Blue Dreaming - S01E01 - Deep Blue HDTV-720p.mp4'
187
+
188
+ # Create the structure if not already present
189
+ if title not in tv_store_data:
190
+ tv_store_data[title] = {}
191
+
192
+ if season_part not in tv_store_data[title]:
193
+ tv_store_data[title][season_part] = {}
194
+
195
+ # Assuming episode_part is unique for each episode within a season
196
+ tv_store_data[title][season_part][episode_part] = cache_path
197
 
198
  with open(TV_STORE_JSON_PATH, 'w') as json_file:
199
+ json.dump(tv_store_data, json_file, indent=2)
200
+
201
+ print(f'TV store updated with {title}, {season_part}, {episode_part}.')
202
 
203
  ###############################################################################
204
  def get_file_structure(repo, token, path="", proxies=None):