ChandimaPrabath commited on
Commit
61f8970
·
1 Parent(s): b7e3063

re-register if load balancer didnt fetch report for a 1 minute

Browse files
Files changed (1) hide show
  1. Instance.py +33 -8
Instance.py CHANGED
@@ -10,13 +10,14 @@ from indexer import indexer
10
  import re
11
  from tvdb import fetch_and_cache_json
12
  import logging
 
13
 
14
  CACHE_DIR = os.getenv("CACHE_DIR")
15
 
16
  download_progress = {}
17
 
18
  class Instance:
19
- def __init__(self, id, url, cache_dir, index_file, token, repo, load_balancer_api,max_retries=20, initial_delay=1):
20
  self.version = "0.2.4 V Alpha"
21
  self.id = id
22
  self.url = url
@@ -31,6 +32,8 @@ class Instance:
31
  self.load_balancer_api = load_balancer_api
32
  self.max_retries = max_retries
33
  self.initial_delay = initial_delay
 
 
34
 
35
  # Ensure CACHE_DIR exists
36
  if not os.path.exists(self.CACHE_DIR):
@@ -51,19 +54,26 @@ class Instance:
51
  with open(self.INDEX_FILE, 'r') as f:
52
  self.file_structure = json.load(f)
53
 
54
- # Start prefetching metadata
55
- thread = Thread(target=self.start_prefetching)
56
  self.register_to_load_balancer()
57
- thread.daemon = True
58
- thread.start()
 
 
 
 
 
 
59
 
60
  def compile_report(self):
61
- film_store_path = os.path.join(CACHE_DIR, "film_store.json")
62
- tv_store_path = os.path.join(CACHE_DIR, "tv_store.json")
 
 
63
  cache_size = self.get_cache_size()
64
 
65
  report = {
66
- "instance_id":self.id,
67
  "instance_url": self.url,
68
  "film_store": self.read_json(film_store_path),
69
  "tv_store": self.read_json(tv_store_path),
@@ -71,6 +81,21 @@ class Instance:
71
  }
72
  return report
73
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74
  def get_cache_size(self):
75
  total_size = 0
76
  for dirpath, dirnames, filenames in os.walk(CACHE_DIR):
 
10
  import re
11
  from tvdb import fetch_and_cache_json
12
  import logging
13
+ from threading import Event
14
 
15
  CACHE_DIR = os.getenv("CACHE_DIR")
16
 
17
  download_progress = {}
18
 
19
  class Instance:
20
+ def __init__(self, id, url, cache_dir, index_file, token, repo, load_balancer_api, max_retries=20, initial_delay=1):
21
  self.version = "0.2.4 V Alpha"
22
  self.id = id
23
  self.url = url
 
32
  self.load_balancer_api = load_balancer_api
33
  self.max_retries = max_retries
34
  self.initial_delay = initial_delay
35
+ self.last_report_time = time.time() # Initialize the last report time
36
+ self.re_register_event = Event()
37
 
38
  # Ensure CACHE_DIR exists
39
  if not os.path.exists(self.CACHE_DIR):
 
54
  with open(self.INDEX_FILE, 'r') as f:
55
  self.file_structure = json.load(f)
56
 
57
+ # Start prefetching metadata and monitoring registration
 
58
  self.register_to_load_balancer()
59
+ prefetch_thread = Thread(target=self.start_prefetching)
60
+ registration_thread = Thread(target=self.monitor_registration)
61
+
62
+ prefetch_thread.daemon = True
63
+ registration_thread.daemon = True
64
+
65
+ prefetch_thread.start()
66
+ registration_thread.start()
67
 
68
  def compile_report(self):
69
+ self.last_report_time = time.time() # Update the last report time
70
+
71
+ film_store_path = os.path.join(self.CACHE_DIR, "film_store.json")
72
+ tv_store_path = os.path.join(self.CACHE_DIR, "tv_store.json")
73
  cache_size = self.get_cache_size()
74
 
75
  report = {
76
+ "instance_id": self.id,
77
  "instance_url": self.url,
78
  "film_store": self.read_json(film_store_path),
79
  "tv_store": self.read_json(tv_store_path),
 
81
  }
82
  return report
83
 
84
+ def register_to_load_balancer(self):
85
+ result = self.load_balancer_api.register_instance(self.id, self.url)
86
+ if result is not None:
87
+ logging.info(f'Registered instance {self.id} to load balancer.')
88
+ else:
89
+ logging.error(f'Failed to register instance {self.id} to load balancer.')
90
+
91
+ def monitor_registration(self):
92
+ while True:
93
+ if time.time() - self.last_report_time > 60: # Check if 1 minute has passed
94
+ logging.info('1 minute passed since last report. Re-registering...')
95
+ self.register_to_load_balancer()
96
+ self.last_report_time = time.time() # Reset the last report time
97
+ time.sleep(30) # Check every 30 seconds
98
+
99
  def get_cache_size(self):
100
  total_size = 0
101
  for dirpath, dirnames, filenames in os.walk(CACHE_DIR):