PrabhakarVenkat
commited on
Upload 21 files
Browse files- stock_analysis/.env +5 -0
- stock_analysis/README.md +3 -0
- stock_analysis/__pycache__/stock_analysis_agents.cpython-310.pyc +0 -0
- stock_analysis/__pycache__/stock_analysis_tasks.cpython-310.pyc +0 -0
- stock_analysis/content.txt +5 -0
- stock_analysis/main.py +62 -0
- stock_analysis/poetry.lock +0 -0
- stock_analysis/pyproject.toml +35 -0
- stock_analysis/stock_analysis_agents.py +77 -0
- stock_analysis/stock_analysis_tasks.py +97 -0
- stock_analysis/tools/__init__.py +0 -0
- stock_analysis/tools/__pycache__/__init__.cpython-310.pyc +0 -0
- stock_analysis/tools/__pycache__/browser_tools.cpython-310.pyc +0 -0
- stock_analysis/tools/__pycache__/calculator_tools.cpython-310.pyc +0 -0
- stock_analysis/tools/__pycache__/search_tools.cpython-310.pyc +0 -0
- stock_analysis/tools/__pycache__/sec_tools.cpython-310.pyc +0 -0
- stock_analysis/tools/browser_tools.py +38 -0
- stock_analysis/tools/calculator_tools.py +13 -0
- stock_analysis/tools/search_tools.py +57 -0
- stock_analysis/tools/sec_tools.py +112 -0
- tree +1 -0
stock_analysis/.env
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
SERPER_API_KEY="88540dcf1210ca7031a57df5b6236ab609bb2a32"
|
2 |
+
BROWSERLESS_API_KEY=NA
|
3 |
+
SEC_API_API_KEY="ed5cb8aae17cbe13ca0594919300b4f3f1f6badfef3b85cde47066d7a29af703"
|
4 |
+
OPENAI_API_KEY=NA
|
5 |
+
GROQ_API_KEY="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"
|
stock_analysis/README.md
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
# Stock Analysis
|
2 |
+
|
3 |
+
This project is an example using the CrewAI framework to automate the process of analyzing a stock. CrewAI orchestrates autonomous AI agents, enabling them to collaborate and execute complex tasks efficiently.
|
stock_analysis/__pycache__/stock_analysis_agents.cpython-310.pyc
ADDED
Binary file (2.75 kB). View file
|
|
stock_analysis/__pycache__/stock_analysis_tasks.cpython-310.pyc
ADDED
Binary file (4.22 kB). View file
|
|
stock_analysis/content.txt
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
SERPER_API_KEY=KEY
|
2 |
+
BROWSERLESS_API_KEY=KEY
|
3 |
+
SEC_API_API_KEY=KEY
|
4 |
+
OPENAI_API_KEY=NA
|
5 |
+
GROQ_API_KEY=KEY
|
stock_analysis/main.py
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from crewai import Crew
|
2 |
+
from textwrap import dedent
|
3 |
+
|
4 |
+
from stock_analysis_agents import StockAnalysisAgents
|
5 |
+
from stock_analysis_tasks import StockAnalysisTasks
|
6 |
+
|
7 |
+
from dotenv import load_dotenv
|
8 |
+
load_dotenv()
|
9 |
+
|
10 |
+
|
11 |
+
class FinancialCrew:
|
12 |
+
def __init__(self, company):
|
13 |
+
self.company = company
|
14 |
+
|
15 |
+
def run(self):
|
16 |
+
agents = StockAnalysisAgents()
|
17 |
+
tasks = StockAnalysisTasks()
|
18 |
+
|
19 |
+
research_analyst_agent = agents.research_analyst()
|
20 |
+
financial_analyst_agent = agents.financial_analyst()
|
21 |
+
investment_advisor_agent = agents.investment_advisor()
|
22 |
+
|
23 |
+
research_task = tasks.research(research_analyst_agent, self.company)
|
24 |
+
financial_task = tasks.financial_analysis(financial_analyst_agent)
|
25 |
+
filings_task = tasks.filings_analysis(financial_analyst_agent)
|
26 |
+
recommend_task = tasks.recommend(investment_advisor_agent)
|
27 |
+
|
28 |
+
|
29 |
+
crew = Crew(
|
30 |
+
agents=[
|
31 |
+
research_analyst_agent,
|
32 |
+
financial_analyst_agent,
|
33 |
+
investment_advisor_agent
|
34 |
+
],
|
35 |
+
tasks=[
|
36 |
+
research_task,
|
37 |
+
financial_task,
|
38 |
+
filings_task,
|
39 |
+
recommend_task
|
40 |
+
],
|
41 |
+
verbose=True
|
42 |
+
)
|
43 |
+
|
44 |
+
result = crew.kickoff()
|
45 |
+
return result
|
46 |
+
|
47 |
+
if __name__ == "__main__":
|
48 |
+
print("## Welcome to Financial Analysis Crew")
|
49 |
+
print('-------------------------------')
|
50 |
+
company = input(
|
51 |
+
dedent("""
|
52 |
+
What is the company you want to analyze?
|
53 |
+
"""))
|
54 |
+
|
55 |
+
financial_crew = FinancialCrew(company)
|
56 |
+
result = financial_crew.run()
|
57 |
+
print("\n\n########################")
|
58 |
+
print("## Here is the Report")
|
59 |
+
print("########################\n")
|
60 |
+
print(result)
|
61 |
+
with open("report.txt", "w+") as f:
|
62 |
+
f.write(result)
|
stock_analysis/poetry.lock
ADDED
The diff for this file is too large to render.
See raw diff
|
|
stock_analysis/pyproject.toml
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[tool.poetry]
|
2 |
+
name = "stock-analysis-crew"
|
3 |
+
version = "0.1.0"
|
4 |
+
description = ""
|
5 |
+
authors = ["Your Name <[email protected]>"]
|
6 |
+
|
7 |
+
[tool.poetry.dependencies]
|
8 |
+
python = ">=3.10.0,<3.12"
|
9 |
+
crewai = "^0.11.0"
|
10 |
+
unstructured = '==0.10.25'
|
11 |
+
pyowm = '3.3.0'
|
12 |
+
tools = "^0.1.9"
|
13 |
+
wikipedia = "1.4.0"
|
14 |
+
yfinance = "0.2.35"
|
15 |
+
sec-api = "1.0.17"
|
16 |
+
tiktoken = "0.5.2"
|
17 |
+
faiss-cpu = "1.7.4"
|
18 |
+
python-dotenv = "1.0.0"
|
19 |
+
langchain-community = "0.0.10"
|
20 |
+
langchain-core = "0.1.8"
|
21 |
+
openai = "1.7.1"
|
22 |
+
|
23 |
+
[tool.pyright]
|
24 |
+
# https://github.com/microsoft/pyright/blob/main/docs/configuration.md
|
25 |
+
useLibraryCodeForTypes = true
|
26 |
+
exclude = [".cache"]
|
27 |
+
|
28 |
+
[tool.ruff]
|
29 |
+
# https://beta.ruff.rs/docs/configuration/
|
30 |
+
select = ['E', 'W', 'F', 'I', 'B', 'C4', 'ARG', 'SIM']
|
31 |
+
ignore = ['W291', 'W292', 'W293']
|
32 |
+
|
33 |
+
[build-system]
|
34 |
+
requires = ["poetry-core>=1.0.0"]
|
35 |
+
build-backend = "poetry.core.masonry.api"
|
stock_analysis/stock_analysis_agents.py
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from crewai import Agent
|
2 |
+
from tools.browser_tools import BrowserTools
|
3 |
+
from tools.calculator_tools import CalculatorTools
|
4 |
+
from tools.search_tools import SearchTools
|
5 |
+
from tools.sec_tools import SECTools
|
6 |
+
# from langchain.llms.ollama import Ollama
|
7 |
+
from langchain_groq import ChatGroq
|
8 |
+
from dotenv import load_dotenv
|
9 |
+
import os
|
10 |
+
load_dotenv()
|
11 |
+
from langchain.tools.yahoo_finance_news import YahooFinanceNewsTool
|
12 |
+
llm = ChatGroq(temperature=0, groq_api_key=os.getenv("GROQ_API_KEY"), model_name="mixtral-8x7b-32768")
|
13 |
+
# llm = Ollama(model="crew-mistral")
|
14 |
+
|
15 |
+
class StockAnalysisAgents():
|
16 |
+
def financial_analyst(self):
|
17 |
+
return Agent(
|
18 |
+
role='The Best Financial Analyst',
|
19 |
+
goal="""Impress all customers with your financial data
|
20 |
+
and market trends analysis""",
|
21 |
+
backstory="""The most seasoned financial analyst with
|
22 |
+
lots of expertise in stock market analysis and investment
|
23 |
+
strategies that is working for a super important customer.""",
|
24 |
+
verbose=True,
|
25 |
+
tools=[
|
26 |
+
BrowserTools.scrape_and_summarize_website,
|
27 |
+
SearchTools.search_internet,
|
28 |
+
CalculatorTools.calculate,
|
29 |
+
SECTools.search_10q,
|
30 |
+
SECTools.search_10k
|
31 |
+
],
|
32 |
+
llm=llm,
|
33 |
+
max_iter=30
|
34 |
+
)
|
35 |
+
|
36 |
+
def research_analyst(self):
|
37 |
+
return Agent(
|
38 |
+
role='Staff Research Analyst',
|
39 |
+
goal="""Being the best at gather, interpret data and amaze
|
40 |
+
your customer with it""",
|
41 |
+
backstory="""Known as the BEST research analyst, you're
|
42 |
+
skilled in sifting through news, company announcements,
|
43 |
+
and market sentiments. Now you're working on a super
|
44 |
+
important customer""",
|
45 |
+
verbose=True,
|
46 |
+
tools=[
|
47 |
+
BrowserTools.scrape_and_summarize_website,
|
48 |
+
SearchTools.search_internet,
|
49 |
+
SearchTools.search_news,
|
50 |
+
YahooFinanceNewsTool(),
|
51 |
+
SECTools.search_10q,
|
52 |
+
SECTools.search_10k
|
53 |
+
],
|
54 |
+
llm=llm,
|
55 |
+
max_iter=30
|
56 |
+
)
|
57 |
+
|
58 |
+
def investment_advisor(self):
|
59 |
+
return Agent(
|
60 |
+
role='Private Investment Advisor',
|
61 |
+
goal="""Impress your customers with full analyses over stocks
|
62 |
+
and completer investment recommendations""",
|
63 |
+
backstory="""You're the most experienced investment advisor
|
64 |
+
and you combine various analytical insights to formulate
|
65 |
+
strategic investment advice. You are now working for
|
66 |
+
a super important customer you need to impress.""",
|
67 |
+
verbose=True,
|
68 |
+
tools=[
|
69 |
+
BrowserTools.scrape_and_summarize_website,
|
70 |
+
SearchTools.search_internet,
|
71 |
+
SearchTools.search_news,
|
72 |
+
CalculatorTools.calculate,
|
73 |
+
YahooFinanceNewsTool()
|
74 |
+
],
|
75 |
+
llm=llm,
|
76 |
+
max_iter=30
|
77 |
+
)
|
stock_analysis/stock_analysis_tasks.py
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from crewai import Task
|
2 |
+
from textwrap import dedent
|
3 |
+
|
4 |
+
class StockAnalysisTasks():
|
5 |
+
def research(self, agent, company):
|
6 |
+
return Task(description=dedent(f"""
|
7 |
+
Collect and summarize recent news articles, press
|
8 |
+
releases, and market analyses related to the stock and
|
9 |
+
its industry.
|
10 |
+
Pay special attention to any significant events, market
|
11 |
+
sentiments, and analysts' opinions. Also include upcoming
|
12 |
+
events like earnings and others.
|
13 |
+
|
14 |
+
Your final answer MUST be a report that includes a
|
15 |
+
comprehensive summary of the latest news, any notable
|
16 |
+
shifts in market sentiment, and potential impacts on
|
17 |
+
the stock.
|
18 |
+
Also make sure to return the stock ticker.
|
19 |
+
|
20 |
+
{self.__tip_section()}
|
21 |
+
|
22 |
+
Make sure to use the most recent data as possible.
|
23 |
+
|
24 |
+
Selected company by the customer: {company}
|
25 |
+
"""),
|
26 |
+
expected_output="A detailed report covering the specified analysis points",
|
27 |
+
agent=agent
|
28 |
+
)
|
29 |
+
|
30 |
+
def financial_analysis(self, agent):
|
31 |
+
return Task(description=dedent(f"""
|
32 |
+
Conduct a thorough analysis of the stock's financial
|
33 |
+
health and market performance.
|
34 |
+
This includes examining key financial metrics such as
|
35 |
+
P/E ratio, EPS growth, revenue trends, and
|
36 |
+
debt-to-equity ratio.
|
37 |
+
Also, analyze the stock's performance in comparison
|
38 |
+
to its industry peers and overall market trends.
|
39 |
+
|
40 |
+
Your final report MUST expand on the summary provided
|
41 |
+
but now including a clear assessment of the stock's
|
42 |
+
financial standing, its strengths and weaknesses,
|
43 |
+
and how it fares against its competitors in the current
|
44 |
+
market scenario.{self.__tip_section()}
|
45 |
+
|
46 |
+
Make sure to use the most recent data possible.
|
47 |
+
"""),
|
48 |
+
expected_output="A detailed report covering the specified analysis points",
|
49 |
+
agent=agent
|
50 |
+
)
|
51 |
+
|
52 |
+
def filings_analysis(self, agent):
|
53 |
+
return Task(description=dedent(f"""
|
54 |
+
Analyze the latest 10-Q and 10-K filings from EDGAR for
|
55 |
+
the stock in question.
|
56 |
+
Focus on key sections like Management's Discussion and
|
57 |
+
Analysis, financial statements, insider trading activity,
|
58 |
+
and any disclosed risks.
|
59 |
+
Extract relevant data and insights that could influence
|
60 |
+
the stock's future performance.
|
61 |
+
|
62 |
+
Your final answer must be an expanded report that now
|
63 |
+
also highlights significant findings from these filings,
|
64 |
+
including any red flags or positive indicators for
|
65 |
+
your customer.
|
66 |
+
{self.__tip_section()}
|
67 |
+
"""),
|
68 |
+
expected_output="A detailed report covering the specified analysis points",
|
69 |
+
agent=agent
|
70 |
+
)
|
71 |
+
|
72 |
+
def recommend(self, agent):
|
73 |
+
return Task(description=dedent(f"""
|
74 |
+
Review and synthesize the analyses provided by the
|
75 |
+
Financial Analyst and the Research Analyst.
|
76 |
+
Combine these insights to form a comprehensive
|
77 |
+
investment recommendation.
|
78 |
+
|
79 |
+
You MUST Consider all aspects, including financial
|
80 |
+
health, market sentiment, and qualitative data from
|
81 |
+
EDGAR filings.
|
82 |
+
|
83 |
+
Make sure to include a section that shows insider
|
84 |
+
trading activity, and upcoming events like earnings.
|
85 |
+
|
86 |
+
Your final answer MUST be a recommendation for your
|
87 |
+
customer. It should be a full super detailed report, providing a
|
88 |
+
clear investment stance and strategy with supporting evidence.
|
89 |
+
Make it pretty and well formatted for your customer.
|
90 |
+
{self.__tip_section()}
|
91 |
+
"""),
|
92 |
+
expected_output="A detailed report covering the specified analysis points",
|
93 |
+
agent=agent
|
94 |
+
)
|
95 |
+
|
96 |
+
def __tip_section(self):
|
97 |
+
return "If you do your BEST WORK, I'll give you a $10,000 commission!"
|
stock_analysis/tools/__init__.py
ADDED
File without changes
|
stock_analysis/tools/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (154 Bytes). View file
|
|
stock_analysis/tools/__pycache__/browser_tools.cpython-310.pyc
ADDED
Binary file (2.08 kB). View file
|
|
stock_analysis/tools/__pycache__/calculator_tools.cpython-310.pyc
ADDED
Binary file (765 Bytes). View file
|
|
stock_analysis/tools/__pycache__/search_tools.cpython-310.pyc
ADDED
Binary file (1.6 kB). View file
|
|
stock_analysis/tools/__pycache__/sec_tools.cpython-310.pyc
ADDED
Binary file (4.03 kB). View file
|
|
stock_analysis/tools/browser_tools.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import os
|
3 |
+
|
4 |
+
import requests
|
5 |
+
from crewai import Agent, Task
|
6 |
+
from langchain.tools import tool
|
7 |
+
from unstructured.partition.html import partition_html
|
8 |
+
|
9 |
+
|
10 |
+
class BrowserTools():
|
11 |
+
|
12 |
+
@tool("Scrape website content")
|
13 |
+
def scrape_and_summarize_website(website):
|
14 |
+
"""Useful to scrape and summarize a website content"""
|
15 |
+
url = f"https://chrome.browserless.io/content?token={os.environ['BROWSERLESS_API_KEY']}"
|
16 |
+
payload = json.dumps({"url": website})
|
17 |
+
headers = {'cache-control': 'no-cache', 'content-type': 'application/json'}
|
18 |
+
response = requests.request("POST", url, headers=headers, data=payload)
|
19 |
+
elements = partition_html(text=response.text)
|
20 |
+
content = "\n\n".join([str(el) for el in elements])
|
21 |
+
content = [content[i:i + 8000] for i in range(0, len(content), 8000)]
|
22 |
+
summaries = []
|
23 |
+
for chunk in content:
|
24 |
+
agent = Agent(
|
25 |
+
role='Principal Researcher',
|
26 |
+
goal=
|
27 |
+
'Do amazing research and summaries based on the content you are working with',
|
28 |
+
backstory=
|
29 |
+
"You're a Principal Researcher at a big company and you need to do research about a given topic.",
|
30 |
+
allow_delegation=False)
|
31 |
+
task = Task(
|
32 |
+
agent=agent,
|
33 |
+
description=
|
34 |
+
f'Analyze and summarize the content below, make sure to include the most relevant information in the summary, return only the summary nothing else.\n\nCONTENT\n----------\n{chunk}'
|
35 |
+
)
|
36 |
+
summary = task.execute()
|
37 |
+
summaries.append(summary)
|
38 |
+
return "\n\n".join(summaries)
|
stock_analysis/tools/calculator_tools.py
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain.tools import tool
|
2 |
+
|
3 |
+
|
4 |
+
class CalculatorTools():
|
5 |
+
|
6 |
+
@tool("Make a calculation")
|
7 |
+
def calculate(operation):
|
8 |
+
"""Useful to perform any mathematical calculations,
|
9 |
+
like sum, minus, multiplication, division, etc.
|
10 |
+
The input to this tool should be a mathematical
|
11 |
+
expression, a couple examples are `200*7` or `5000/2*10`
|
12 |
+
"""
|
13 |
+
return eval(operation)
|
stock_analysis/tools/search_tools.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import os
|
3 |
+
|
4 |
+
import requests
|
5 |
+
from langchain.tools import tool
|
6 |
+
|
7 |
+
|
8 |
+
class SearchTools():
|
9 |
+
@tool("Search the internet")
|
10 |
+
def search_internet(query):
|
11 |
+
"""Useful to search the internet
|
12 |
+
about a a given topic and return relevant results"""
|
13 |
+
top_result_to_return = 4
|
14 |
+
url = "https://google.serper.dev/search"
|
15 |
+
payload = json.dumps({"q": query})
|
16 |
+
headers = {
|
17 |
+
'X-API-KEY': os.environ['SERPER_API_KEY'],
|
18 |
+
'content-type': 'application/json'
|
19 |
+
}
|
20 |
+
response = requests.request("POST", url, headers=headers, data=payload)
|
21 |
+
results = response.json()['organic']
|
22 |
+
string = []
|
23 |
+
for result in results[:top_result_to_return]:
|
24 |
+
try:
|
25 |
+
string.append('\n'.join([
|
26 |
+
f"Title: {result['title']}", f"Link: {result['link']}",
|
27 |
+
f"Snippet: {result['snippet']}", "\n-----------------"
|
28 |
+
]))
|
29 |
+
except KeyError:
|
30 |
+
next
|
31 |
+
|
32 |
+
return '\n'.join(string)
|
33 |
+
|
34 |
+
@tool("Search news on the internet")
|
35 |
+
def search_news(query):
|
36 |
+
"""Useful to search news about a company, stock or any other
|
37 |
+
topic and return relevant results"""""
|
38 |
+
top_result_to_return = 4
|
39 |
+
url = "https://google.serper.dev/news"
|
40 |
+
payload = json.dumps({"q": query})
|
41 |
+
headers = {
|
42 |
+
'X-API-KEY': os.environ['SERPER_API_KEY'],
|
43 |
+
'content-type': 'application/json'
|
44 |
+
}
|
45 |
+
response = requests.request("POST", url, headers=headers, data=payload)
|
46 |
+
results = response.json()['news']
|
47 |
+
string = []
|
48 |
+
for result in results[:top_result_to_return]:
|
49 |
+
try:
|
50 |
+
string.append('\n'.join([
|
51 |
+
f"Title: {result['title']}", f"Link: {result['link']}",
|
52 |
+
f"Snippet: {result['snippet']}", "\n-----------------"
|
53 |
+
]))
|
54 |
+
except KeyError:
|
55 |
+
next
|
56 |
+
|
57 |
+
return '\n'.join(string)
|
stock_analysis/tools/sec_tools.py
ADDED
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
import requests
|
4 |
+
|
5 |
+
from langchain.tools import tool
|
6 |
+
from langchain.text_splitter import CharacterTextSplitter
|
7 |
+
from langchain.embeddings import OpenAIEmbeddings
|
8 |
+
from langchain_community.vectorstores import FAISS
|
9 |
+
|
10 |
+
from sec_api import QueryApi
|
11 |
+
from unstructured.partition.html import partition_html
|
12 |
+
|
13 |
+
class SECTools():
|
14 |
+
@tool("Search 10-Q form")
|
15 |
+
def search_10q(data):
|
16 |
+
"""
|
17 |
+
Useful to search information from the latest 10-Q form for a
|
18 |
+
given stock.
|
19 |
+
The input to this tool should be a pipe (|) separated text of
|
20 |
+
length two, representing the stock ticker you are interested and what
|
21 |
+
question you have from it.
|
22 |
+
For example, `AAPL|what was last quarter's revenue`.
|
23 |
+
"""
|
24 |
+
stock, ask = data.split("|")
|
25 |
+
queryApi = QueryApi(api_key=os.environ['SEC_API_API_KEY'])
|
26 |
+
query = {
|
27 |
+
"query": {
|
28 |
+
"query_string": {
|
29 |
+
"query": f"ticker:{stock} AND formType:\"10-Q\""
|
30 |
+
}
|
31 |
+
},
|
32 |
+
"from": "0",
|
33 |
+
"size": "1",
|
34 |
+
"sort": [{ "filedAt": { "order": "desc" }}]
|
35 |
+
}
|
36 |
+
|
37 |
+
fillings = queryApi.get_filings(query)['filings']
|
38 |
+
if len(fillings) == 0:
|
39 |
+
return "Sorry, I couldn't find any filling for this stock, check if the ticker is correct."
|
40 |
+
link = fillings[0]['linkToFilingDetails']
|
41 |
+
answer = SECTools.__embedding_search(link, ask)
|
42 |
+
return answer
|
43 |
+
|
44 |
+
@tool("Search 10-K form")
|
45 |
+
def search_10k(data):
|
46 |
+
"""
|
47 |
+
Useful to search information from the latest 10-K form for a
|
48 |
+
given stock.
|
49 |
+
The input to this tool should be a pipe (|) separated text of
|
50 |
+
length two, representing the stock ticker you are interested, what
|
51 |
+
question you have from it.
|
52 |
+
For example, `AAPL|what was last year's revenue`.
|
53 |
+
"""
|
54 |
+
stock, ask = data.split("|")
|
55 |
+
queryApi = QueryApi(api_key=os.environ['SEC_API_API_KEY'])
|
56 |
+
query = {
|
57 |
+
"query": {
|
58 |
+
"query_string": {
|
59 |
+
"query": f"ticker:{stock} AND formType:\"10-K\""
|
60 |
+
}
|
61 |
+
},
|
62 |
+
"from": "0",
|
63 |
+
"size": "1",
|
64 |
+
"sort": [{ "filedAt": { "order": "desc" }}]
|
65 |
+
}
|
66 |
+
|
67 |
+
fillings = queryApi.get_filings(query)['filings']
|
68 |
+
if len(fillings) == 0:
|
69 |
+
return "Sorry, I couldn't find any filling for this stock, check if the ticker is correct."
|
70 |
+
link = fillings[0]['linkToFilingDetails']
|
71 |
+
answer = SECTools.__embedding_search(link, ask)
|
72 |
+
return answer
|
73 |
+
|
74 |
+
def __embedding_search(url, ask):
|
75 |
+
text = SECTools.__download_form_html(url)
|
76 |
+
elements = partition_html(text=text)
|
77 |
+
content = "\n".join([str(el) for el in elements])
|
78 |
+
text_splitter = CharacterTextSplitter(
|
79 |
+
separator = "\n",
|
80 |
+
chunk_size = 1000,
|
81 |
+
chunk_overlap = 150,
|
82 |
+
length_function = len,
|
83 |
+
is_separator_regex = False,
|
84 |
+
)
|
85 |
+
docs = text_splitter.create_documents([content])
|
86 |
+
retriever = FAISS.from_documents(
|
87 |
+
docs, OpenAIEmbeddings()
|
88 |
+
).as_retriever()
|
89 |
+
answers = retriever.get_relevant_documents(ask, top_k=4)
|
90 |
+
answers = "\n\n".join([a.page_content for a in answers])
|
91 |
+
return answers
|
92 |
+
|
93 |
+
def __download_form_html(url):
|
94 |
+
headers = {
|
95 |
+
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
96 |
+
'Accept-Encoding': 'gzip, deflate, br',
|
97 |
+
'Accept-Language': 'en-US,en;q=0.9,pt-BR;q=0.8,pt;q=0.7',
|
98 |
+
'Cache-Control': 'max-age=0',
|
99 |
+
'Dnt': '1',
|
100 |
+
'Sec-Ch-Ua': '"Not_A Brand";v="8", "Chromium";v="120"',
|
101 |
+
'Sec-Ch-Ua-Mobile': '?0',
|
102 |
+
'Sec-Ch-Ua-Platform': '"macOS"',
|
103 |
+
'Sec-Fetch-Dest': 'document',
|
104 |
+
'Sec-Fetch-Mode': 'navigate',
|
105 |
+
'Sec-Fetch-Site': 'none',
|
106 |
+
'Sec-Fetch-User': '?1',
|
107 |
+
'Upgrade-Insecure-Requests': '1',
|
108 |
+
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36'
|
109 |
+
}
|
110 |
+
|
111 |
+
response = requests.get(url, headers=headers)
|
112 |
+
return response.text
|
tree
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
|