ianeksdi commited on
Commit
35e3337
·
verified ·
1 Parent(s): cc840fb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -25
app.py CHANGED
@@ -1,45 +1,65 @@
1
- from smolagents import CodeAgent, HfApiModel
 
 
 
2
  import yaml
3
  from tools.final_answer import FinalAnswerTool
 
4
  from Gradio_UI import GradioUI
5
 
6
- # Updated system prompt: provide only final, direct advice with no chain-of-thought or code.
7
- system_prompt = (
8
- "You are a health and lifestyle advisor specializing in the early detection and prevention of hypertension. "
9
- "Diagnostic criteria: Normal BP is < 120/80 mmHg, Borderline BP is 120-139/80-89 mmHg, and Hypertension is > 140/90 mmHg. "
10
- "Based solely on the user's details, provide only the final, direct, and concise lifestyle tips. "
11
- "Do NOT include any internal reasoning, chain-of-thought, or any code snippets in your output. "
12
- "Only output the final advice as plain text. For example, if the user mentions alcohol consumption, simply say: "
13
- "'Reduce alcohol intake, as it can raise blood pressure.'"
14
- )
 
15
 
16
- # Use only the final_answer tool.
17
- final_answer = FinalAnswerTool()
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
- # Set up your model.
 
20
  model = HfApiModel(
21
- max_tokens=2096,
22
- temperature=0.5,
23
- model_id='deepseek-ai/DeepSeek-R1-Distill-Qwen-32B', # Adjust if needed.
24
- custom_role_conversions=None,
25
  )
26
 
27
- # Load prompt templates from the YAML file.
 
 
 
28
  with open("prompts.yaml", 'r') as stream:
29
  prompt_templates = yaml.safe_load(stream)
30
-
31
- # Initialize the CodeAgent with the updated system prompt.
32
  agent = CodeAgent(
33
  model=model,
34
- tools=[final_answer],
35
  max_steps=6,
36
  verbosity_level=1,
37
  grammar=None,
38
  planning_interval=None,
39
- name="Hypertension Prevention Advisor",
40
- description=system_prompt,
41
  prompt_templates=prompt_templates
42
  )
43
 
44
- # Launch the Gradio UI.
45
- GradioUI(agent).launch()
 
1
+ from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool
2
+ import datetime
3
+ import requests
4
+ import pytz
5
  import yaml
6
  from tools.final_answer import FinalAnswerTool
7
+
8
  from Gradio_UI import GradioUI
9
 
10
+ # Below is an example of a tool that does nothing. Amaze us with your creativity!
11
+ @tool
12
+ def my_custom_tool(arg1:str, arg2:int)-> str: # it's important to specify the return type
13
+ # Keep this format for the tool description / args description but feel free to modify the tool
14
+ """A tool that does nothing yet
15
+ Args:
16
+ arg1: the first argument
17
+ arg2: the second argument
18
+ """
19
+ return "What magic will you build ?"
20
 
21
+ @tool
22
+ def get_current_time_in_timezone(timezone: str) -> str:
23
+ """A tool that fetches the current local time in a specified timezone.
24
+ Args:
25
+ timezone: A string representing a valid timezone (e.g., 'America/New_York').
26
+ """
27
+ try:
28
+ # Create timezone object
29
+ tz = pytz.timezone(timezone)
30
+ # Get current time in that timezone
31
+ local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
32
+ return f"The current local time in {timezone} is: {local_time}"
33
+ except Exception as e:
34
+ return f"Error fetching time for timezone '{timezone}': {str(e)}"
35
 
36
+
37
+ final_answer = FinalAnswerTool()
38
  model = HfApiModel(
39
+ max_tokens=2096,
40
+ temperature=0.5,
41
+ model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
42
+ custom_role_conversions=None,
43
  )
44
 
45
+
46
+ # Import tool from Hub
47
+ image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
48
+
49
  with open("prompts.yaml", 'r') as stream:
50
  prompt_templates = yaml.safe_load(stream)
51
+
 
52
  agent = CodeAgent(
53
  model=model,
54
+ tools=[final_answer], # add your tools here (don't remove final_answer)
55
  max_steps=6,
56
  verbosity_level=1,
57
  grammar=None,
58
  planning_interval=None,
59
+ name=None,
60
+ description=None,
61
  prompt_templates=prompt_templates
62
  )
63
 
64
+
65
+ GradioUI(agent).launch()