Spaces:
Running
Running
Edit for Beneficiar
Browse files- generation.py +77 -52
- interface.py +27 -122
generation.py
CHANGED
@@ -1,42 +1,31 @@
|
|
1 |
import json
|
2 |
from enum import Enum
|
3 |
from openai import OpenAI
|
|
|
4 |
import google.generativeai as genai
|
5 |
from llama_index.core.llms import ChatMessage
|
6 |
from prompts import LEGAL_POSITION_PROMPT, SYSTEM_PROMPT
|
|
|
7 |
|
8 |
|
9 |
class GenerationProvider(str, Enum):
|
10 |
OPENAI = "openai"
|
11 |
-
GEMINI = "gemini"
|
|
|
12 |
|
13 |
|
14 |
class GenerationModelName(str, Enum):
|
15 |
# OpenAI models
|
16 |
-
|
|
|
|
|
17 |
# Gemini models
|
18 |
-
GEMINI_FLASH = "gemini-1.5-flash"
|
19 |
-
|
20 |
-
|
21 |
-
#
|
22 |
-
|
23 |
-
|
24 |
-
"json_schema": {
|
25 |
-
"name": "lp_schema",
|
26 |
-
"schema": {
|
27 |
-
"type": "object",
|
28 |
-
"properties": {
|
29 |
-
"title": {"type": "string", "description": "Title of the legal position"},
|
30 |
-
"text": {"type": "string", "description": "Text of the legal position"},
|
31 |
-
"proceeding": {"type": "string", "description": "Type of court proceedings"},
|
32 |
-
"category": {"type": "string", "description": "Category of the legal position"},
|
33 |
-
},
|
34 |
-
"required": ["title", "text", "proceeding", "category"],
|
35 |
-
"additionalProperties": False
|
36 |
-
},
|
37 |
-
"strict": True
|
38 |
-
}
|
39 |
-
}
|
40 |
|
41 |
|
42 |
def generate_legal_position(court_decision_text: str, comment_input: str, provider: str, model_name: str) -> dict:
|
@@ -67,34 +56,65 @@ def generate_legal_position(court_decision_text: str, comment_input: str, provid
|
|
67 |
)
|
68 |
parsed_response = json.loads(response.choices[0].message.content)
|
69 |
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
}
|
|
|
80 |
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
89 |
)
|
90 |
-
parsed_response = json.loads(response.text)
|
91 |
-
|
92 |
-
# Та сама перевірка для Gemini
|
93 |
-
if 'text_lp' in parsed_response and 'text' not in parsed_response:
|
94 |
-
parsed_response['text'] = parsed_response.pop('text_lp')
|
95 |
|
96 |
else:
|
97 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
98 |
|
99 |
# Валідація результату
|
100 |
required_fields = ["title", "text", "proceeding", "category"]
|
@@ -102,7 +122,12 @@ def generate_legal_position(court_decision_text: str, comment_input: str, provid
|
|
102 |
return parsed_response
|
103 |
|
104 |
missing_fields = [field for field in required_fields if field not in parsed_response]
|
105 |
-
|
|
|
|
|
|
|
|
|
|
|
106 |
|
107 |
except json.JSONDecodeError as e:
|
108 |
return {
|
@@ -113,8 +138,8 @@ def generate_legal_position(court_decision_text: str, comment_input: str, provid
|
|
113 |
}
|
114 |
except Exception as e:
|
115 |
return {
|
116 |
-
"title":
|
117 |
-
"text":
|
118 |
-
"proceeding":
|
119 |
-
"category":
|
120 |
}
|
|
|
1 |
import json
|
2 |
from enum import Enum
|
3 |
from openai import OpenAI
|
4 |
+
from anthropic import Anthropic
|
5 |
import google.generativeai as genai
|
6 |
from llama_index.core.llms import ChatMessage
|
7 |
from prompts import LEGAL_POSITION_PROMPT, SYSTEM_PROMPT
|
8 |
+
from config import anthropic_api_key
|
9 |
|
10 |
|
11 |
class GenerationProvider(str, Enum):
|
12 |
OPENAI = "openai"
|
13 |
+
# GEMINI = "gemini"
|
14 |
+
ANTHROPIC = "anthropic"
|
15 |
|
16 |
|
17 |
class GenerationModelName(str, Enum):
|
18 |
# OpenAI models
|
19 |
+
GPT_MODEL_8 = "ft:gpt-4o-mini-2024-07-18:personal:legal-position-1500:Aaiu4WZd"
|
20 |
+
GPT_MODEL_9 = "ft:gpt-4o-mini-2024-07-18:personal:legal-position-1700:AbNt5I2x"
|
21 |
+
|
22 |
# Gemini models
|
23 |
+
# GEMINI_FLASH = "gemini-1.5-flash"
|
24 |
+
|
25 |
+
# Anthropic models
|
26 |
+
# CLAUDE_3_HAIKU = "claude-3-haiku-20240307"
|
27 |
+
ANTHROPIC_MODEL_4 = "claude-3-5-sonnet-latest"
|
28 |
+
# CLAUDE_3_OPUS = "claude-3-opus-20240229"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
|
30 |
|
31 |
def generate_legal_position(court_decision_text: str, comment_input: str, provider: str, model_name: str) -> dict:
|
|
|
56 |
)
|
57 |
parsed_response = json.loads(response.choices[0].message.content)
|
58 |
|
59 |
+
# elif provider == GenerationProvider.GEMINI.value:
|
60 |
+
# generation_config = {
|
61 |
+
# "temperature": 0,
|
62 |
+
# "max_output_tokens": 8192,
|
63 |
+
# "response_mime_type": "application/json",
|
64 |
+
# }
|
65 |
+
#
|
66 |
+
# model = genai.GenerativeModel(
|
67 |
+
# model_name=model_name,
|
68 |
+
# generation_config=generation_config,
|
69 |
+
# )
|
70 |
+
#
|
71 |
+
# chat = model.start_chat(history=[])
|
72 |
+
# response = chat.send_message(
|
73 |
+
# f"{SYSTEM_PROMPT}\n\n{content}",
|
74 |
+
# )
|
75 |
+
# parsed_response = json.loads(response.text)
|
76 |
+
|
77 |
+
elif provider == GenerationProvider.ANTHROPIC.value:
|
78 |
+
client = Anthropic(api_key=anthropic_api_key)
|
79 |
+
|
80 |
+
json_instruction = """
|
81 |
+
Будь ласка, надай відповідь у форматі JSON з наступними полями:
|
82 |
+
{
|
83 |
+
"title": "заголовок правової позиції",
|
84 |
+
"text": "текст правової позиції",
|
85 |
+
"proceeding": "тип судочинства",
|
86 |
+
"category": "категорія справи"
|
87 |
}
|
88 |
+
"""
|
89 |
|
90 |
+
response = client.messages.create(
|
91 |
+
model=model_name,
|
92 |
+
max_tokens=4096,
|
93 |
+
temperature=0,
|
94 |
+
messages=[
|
95 |
+
{
|
96 |
+
"role": "assistant",
|
97 |
+
"content": "Ти - кваліфікований юрист-аналітик."
|
98 |
+
},
|
99 |
+
{
|
100 |
+
"role": "user",
|
101 |
+
"content": f"{SYSTEM_PROMPT}\n{json_instruction}\n{content}"
|
102 |
+
}
|
103 |
+
]
|
104 |
)
|
105 |
+
parsed_response = json.loads(response.content[0].text)
|
|
|
|
|
|
|
|
|
106 |
|
107 |
else:
|
108 |
+
return {
|
109 |
+
"title": "Error",
|
110 |
+
"text": f"Unsupported provider: {provider}",
|
111 |
+
"proceeding": "Error",
|
112 |
+
"category": "Error"
|
113 |
+
}
|
114 |
+
|
115 |
+
# Перевірка та конвертація полів для всіх провайдерів
|
116 |
+
if 'text_lp' in parsed_response and 'text' not in parsed_response:
|
117 |
+
parsed_response['text'] = parsed_response.pop('text_lp')
|
118 |
|
119 |
# Валідація результату
|
120 |
required_fields = ["title", "text", "proceeding", "category"]
|
|
|
122 |
return parsed_response
|
123 |
|
124 |
missing_fields = [field for field in required_fields if field not in parsed_response]
|
125 |
+
return {
|
126 |
+
"title": parsed_response.get('title', 'Error'),
|
127 |
+
"text": f"Missing required fields: {', '.join(missing_fields)}",
|
128 |
+
"proceeding": parsed_response.get('proceeding', 'Error'),
|
129 |
+
"category": parsed_response.get('category', 'Error')
|
130 |
+
}
|
131 |
|
132 |
except json.JSONDecodeError as e:
|
133 |
return {
|
|
|
138 |
}
|
139 |
except Exception as e:
|
140 |
return {
|
141 |
+
"title": "Error",
|
142 |
+
"text": f"Unexpected error: {str(e)}",
|
143 |
+
"proceeding": "Error",
|
144 |
+
"category": "Error"
|
145 |
}
|
interface.py
CHANGED
@@ -1,171 +1,76 @@
|
|
1 |
import gradio as gr
|
2 |
from typing import List
|
3 |
import json
|
4 |
-
from enum import Enum
|
5 |
|
6 |
-
from analysis import ModelProvider, ModelName, PrecedentAnalysisWorkflow
|
7 |
from generation import GenerationProvider, GenerationModelName, generate_legal_position
|
8 |
-
from utils import extract_court_decision_text
|
9 |
-
from search import search_with_ai_action
|
10 |
|
11 |
|
12 |
def create_gradio_interface():
|
13 |
def update_generation_model_choices(provider):
|
14 |
if provider == GenerationProvider.OPENAI.value:
|
15 |
return gr.Dropdown(choices=[m.value for m in GenerationModelName if m.value.startswith("ft")])
|
16 |
-
|
17 |
-
|
|
|
|
|
18 |
|
19 |
-
def
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
return
|
|
|
|
|
24 |
|
25 |
-
async def generate_position_action(url, provider, model_name
|
26 |
try:
|
27 |
court_decision_text = extract_court_decision_text(url)
|
28 |
-
legal_position_json = generate_legal_position(court_decision_text,
|
|
|
29 |
position_output_content = (
|
30 |
-
f"**Короткий зміст позиції суду за введеним рішенням (модель: {
|
31 |
f"*{legal_position_json['title']}*: \n"
|
32 |
f"{legal_position_json['text']} "
|
33 |
f"**Категорія:** \n{legal_position_json['category']} "
|
34 |
f"({legal_position_json['proceeding']})\n\n"
|
35 |
)
|
36 |
-
return position_output_content
|
37 |
-
except Exception as e:
|
38 |
-
return f"Error during position generation: {str(e)}", None
|
39 |
-
|
40 |
-
async def analyze_action(legal_position_json, question, nodes, provider, model_name):
|
41 |
-
try:
|
42 |
-
workflow = PrecedentAnalysisWorkflow(
|
43 |
-
provider=ModelProvider(provider),
|
44 |
-
model_name=ModelName(model_name)
|
45 |
-
)
|
46 |
-
|
47 |
-
query = (
|
48 |
-
f"{legal_position_json['title']}: "
|
49 |
-
f"{legal_position_json['text']}: "
|
50 |
-
f"{legal_position_json['proceeding']}: "
|
51 |
-
f"{legal_position_json['category']}"
|
52 |
-
)
|
53 |
-
|
54 |
-
response_text = await workflow.run(
|
55 |
-
query=query,
|
56 |
-
question=question,
|
57 |
-
nodes=nodes
|
58 |
-
)
|
59 |
-
|
60 |
-
output = f"**Аналіз ШІ (модель: {model_name}):**\n{response_text}\n\n"
|
61 |
-
output += "**Наявні в базі Правові Позицій Верховного Суду:**\n\n"
|
62 |
-
|
63 |
-
analysis_lines = response_text.split('\n')
|
64 |
-
for line in analysis_lines:
|
65 |
-
if line.startswith('* ['):
|
66 |
-
index = line[3:line.index(']')]
|
67 |
-
node = nodes[int(index) - 1]
|
68 |
-
source_node = node.node
|
69 |
-
|
70 |
-
source_title = source_node.metadata.get('title', 'Невідомий заголовок')
|
71 |
-
source_text_lp = node.text
|
72 |
-
doc_ids = source_node.metadata.get('doc_id')
|
73 |
-
lp_id = source_node.metadata.get('lp_id')
|
74 |
-
|
75 |
-
links = get_links_html(doc_ids)
|
76 |
-
links_lp = get_links_html_lp(lp_id)
|
77 |
-
|
78 |
-
output += f"[{index}]: *{source_title}* | {source_text_lp} | {links_lp} | {links}\n\n"
|
79 |
-
|
80 |
-
return output
|
81 |
-
|
82 |
except Exception as e:
|
83 |
-
return f"Error during
|
84 |
|
85 |
with gr.Blocks() as app:
|
86 |
-
gr.Markdown("#
|
87 |
|
88 |
-
|
89 |
-
comment_input = gr.Textbox(label="Коментар до формування короткого змісту судового рішення:")
|
90 |
-
url_input = gr.Textbox(label="URL судового рішення:")
|
91 |
-
question_input = gr.Textbox(label="Уточнююче питання для аналізу:")
|
92 |
|
93 |
with gr.Row():
|
94 |
-
# Провайдер для генерування
|
95 |
generation_provider_dropdown = gr.Dropdown(
|
96 |
choices=[p.value for p in GenerationProvider],
|
97 |
-
value=GenerationProvider.
|
98 |
label="Провайдер AI для генерування",
|
99 |
)
|
100 |
generation_model_dropdown = gr.Dropdown(
|
101 |
-
choices=[m.value for m in GenerationModelName if m.value.startswith("
|
102 |
-
value=GenerationModelName.
|
103 |
label="Модель для генерування",
|
104 |
)
|
105 |
|
106 |
-
|
107 |
-
# Провайдер для аналізу
|
108 |
-
analysis_provider_dropdown = gr.Dropdown(
|
109 |
-
choices=[p.value for p in ModelProvider],
|
110 |
-
value=ModelProvider.OPENAI.value,
|
111 |
-
label="Провайдер AI для аналізу",
|
112 |
-
)
|
113 |
-
analysis_model_dropdown = gr.Dropdown(
|
114 |
-
choices=[m.value for m in ModelName if m.value.startswith("gpt")],
|
115 |
-
value=ModelName.GPT4o_MINI.value,
|
116 |
-
label="Модель для аналізу",
|
117 |
-
)
|
118 |
-
|
119 |
-
with gr.Row():
|
120 |
-
generate_position_button = gr.Button("Генерувати короткий зміст позиції суду")
|
121 |
-
search_with_ai_button = gr.Button("Пошук", interactive=False)
|
122 |
-
analyze_button = gr.Button("Аналіз", interactive=False)
|
123 |
-
|
124 |
position_output = gr.Markdown(label="Короткий зміст позиції суду за введеним рішенням")
|
125 |
-
search_output = gr.Markdown(label="Результат пошуку")
|
126 |
-
analysis_output = gr.Markdown(label="Результат аналізу")
|
127 |
-
|
128 |
-
state_lp_json = gr.State()
|
129 |
-
state_nodes = gr.State()
|
130 |
|
131 |
# Підключення функцій до кнопок та подій
|
132 |
generate_position_button.click(
|
133 |
fn=generate_position_action,
|
134 |
-
inputs=[url_input, generation_provider_dropdown, generation_model_dropdown
|
135 |
-
outputs=
|
136 |
-
).then(
|
137 |
-
fn=lambda: gr.update(interactive=True),
|
138 |
-
inputs=None,
|
139 |
-
outputs=search_with_ai_button
|
140 |
-
)
|
141 |
-
|
142 |
-
search_with_ai_button.click(
|
143 |
-
fn=search_with_ai_action,
|
144 |
-
inputs=state_lp_json,
|
145 |
-
outputs=[search_output, state_nodes]
|
146 |
-
).then(
|
147 |
-
fn=lambda: gr.update(interactive=True),
|
148 |
-
inputs=None,
|
149 |
-
outputs=analyze_button
|
150 |
)
|
151 |
|
152 |
-
|
153 |
-
fn=analyze_action,
|
154 |
-
inputs=[state_lp_json, question_input, state_nodes, analysis_provider_dropdown, analysis_model_dropdown],
|
155 |
-
outputs=analysis_output
|
156 |
-
)
|
157 |
-
|
158 |
-
# Оновлення списків моделей при зміні провайдера
|
159 |
generation_provider_dropdown.change(
|
160 |
fn=update_generation_model_choices,
|
161 |
inputs=generation_provider_dropdown,
|
162 |
outputs=generation_model_dropdown
|
163 |
)
|
164 |
|
165 |
-
analysis_provider_dropdown.change(
|
166 |
-
fn=update_analysis_model_choices,
|
167 |
-
inputs=analysis_provider_dropdown,
|
168 |
-
outputs=analysis_model_dropdown
|
169 |
-
)
|
170 |
-
|
171 |
return app
|
|
|
1 |
import gradio as gr
|
2 |
from typing import List
|
3 |
import json
|
|
|
4 |
|
|
|
5 |
from generation import GenerationProvider, GenerationModelName, generate_legal_position
|
6 |
+
from utils import extract_court_decision_text
|
|
|
7 |
|
8 |
|
9 |
def create_gradio_interface():
|
10 |
def update_generation_model_choices(provider):
|
11 |
if provider == GenerationProvider.OPENAI.value:
|
12 |
return gr.Dropdown(choices=[m.value for m in GenerationModelName if m.value.startswith("ft")])
|
13 |
+
# elif provider == GenerationProvider.GEMINI.value:
|
14 |
+
# return gr.Dropdown(choices=[m.value for m in GenerationModelName if m.value.startswith("gemini")])
|
15 |
+
elif provider == GenerationProvider.ANTHROPIC.value:
|
16 |
+
return gr.Dropdown(choices=[m.value for m in GenerationModelName if m.value.startswith("claude")])
|
17 |
|
18 |
+
def get_model_name_by_value(model_value: str) -> str:
|
19 |
+
try:
|
20 |
+
# Знаходимо відповідну модель в енумі за значенням
|
21 |
+
model = next(model for model in GenerationModelName if model.value == model_value)
|
22 |
+
return model.name
|
23 |
+
except StopIteration:
|
24 |
+
return model_value
|
25 |
|
26 |
+
async def generate_position_action(url, provider, model_name):
|
27 |
try:
|
28 |
court_decision_text = extract_court_decision_text(url)
|
29 |
+
legal_position_json = generate_legal_position(court_decision_text, "", provider, model_name)
|
30 |
+
display_model_name = get_model_name_by_value(model_name)
|
31 |
position_output_content = (
|
32 |
+
f"**Короткий зміст позиції суду за введеним рішенням (модель: {display_model_name}):**\n"
|
33 |
f"*{legal_position_json['title']}*: \n"
|
34 |
f"{legal_position_json['text']} "
|
35 |
f"**Категорія:** \n{legal_position_json['category']} "
|
36 |
f"({legal_position_json['proceeding']})\n\n"
|
37 |
)
|
38 |
+
return position_output_content
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
except Exception as e:
|
40 |
+
return f"Error during position generation: {str(e)}"
|
41 |
|
42 |
with gr.Blocks() as app:
|
43 |
+
gr.Markdown("# Генератор короткого змісту судового рішення")
|
44 |
|
45 |
+
url_input = gr.Textbox(label="URL судового рішення:")
|
|
|
|
|
|
|
46 |
|
47 |
with gr.Row():
|
|
|
48 |
generation_provider_dropdown = gr.Dropdown(
|
49 |
choices=[p.value for p in GenerationProvider],
|
50 |
+
value=GenerationProvider.OPENAI.value,
|
51 |
label="Провайдер AI для генерування",
|
52 |
)
|
53 |
generation_model_dropdown = gr.Dropdown(
|
54 |
+
choices=[m.value for m in GenerationModelName if m.value.startswith("ft")],
|
55 |
+
value=GenerationModelName.GPT4_MODEL_9.value,
|
56 |
label="Модель для генерування",
|
57 |
)
|
58 |
|
59 |
+
generate_position_button = gr.Button("Генерувати короткий зміст позиції суду")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
60 |
position_output = gr.Markdown(label="Короткий зміст позиції суду за введеним рішенням")
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
# Підключення функцій до кнопок та подій
|
63 |
generate_position_button.click(
|
64 |
fn=generate_position_action,
|
65 |
+
inputs=[url_input, generation_provider_dropdown, generation_model_dropdown],
|
66 |
+
outputs=position_output
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
67 |
)
|
68 |
|
69 |
+
# Оновлення списку моделей при зміні провайдера
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
generation_provider_dropdown.change(
|
71 |
fn=update_generation_model_choices,
|
72 |
inputs=generation_provider_dropdown,
|
73 |
outputs=generation_model_dropdown
|
74 |
)
|
75 |
|
|
|
|
|
|
|
|
|
|
|
|
|
76 |
return app
|