Update README.md
Browse files
README.md
CHANGED
@@ -1,14 +1,36 @@
|
|
1 |
---
|
2 |
-
# For reference on model card metadata, see the spec: https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1
|
3 |
-
# Doc / guide: https://huggingface.co/docs/hub/model-cards
|
4 |
-
|
5 |
widget:
|
6 |
-
- text:
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
---
|
13 |
|
14 |
|
@@ -37,18 +59,22 @@ Addressing the power of LLM in fintuned downstream task. Implemented as a person
|
|
37 |
### How to use
|
38 |
|
39 |
# Load model directly
|
|
|
40 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
41 |
|
42 |
tokenizer = AutoTokenizer.from_pretrained("SwastikM/bart-large-nl2sql")
|
43 |
|
44 |
model = AutoModelForSeq2SeqLM.from_pretrained("SwastikM/bart-large-nl2sql")
|
45 |
|
46 |
-
query_question_with_context = "sql_prompt: Which economic diversification efforts in
|
|
|
|
|
|
|
47 |
|
48 |
sql = model.generate(text)
|
49 |
|
50 |
print(sql)
|
51 |
-
|
52 |
|
53 |
|
54 |
## Training Details
|
@@ -57,8 +83,6 @@ print(sql)
|
|
57 |
|
58 |
[gretelai/synthetic_text_to_sql](https://huggingface.co/datasets/gretelai/synthetic_text_to_sql)
|
59 |
|
60 |
-
[More Information Needed]
|
61 |
-
|
62 |
### Training Procedure
|
63 |
|
64 |
HuggingFace Accelerate with Training Loop.
|
@@ -89,7 +113,7 @@ HuggingFace Accelerate with Training Loop.
|
|
89 |
|
90 |
#### Hardware
|
91 |
|
92 |
-
|
93 |
|
94 |
|
95 |
## Citation
|
@@ -132,4 +156,4 @@ HuggingFace Accelerate with Training Loop.
|
|
132 |
|
133 |
## Model Card Authors
|
134 |
|
135 |
-
Swastik Maiti
|
|
|
1 |
---
|
|
|
|
|
|
|
2 |
widget:
|
3 |
+
- text: >-
|
4 |
+
sql_prompt: What is the monthly voice usage for each customer in the Mumbai
|
5 |
+
region? sql_context: CREATE TABLE customers (customer_id INT, name
|
6 |
+
VARCHAR(50), voice_usage_minutes FLOAT, region VARCHAR(50)); INSERT INTO
|
7 |
+
customers (customer_id, name, voice_usage_minutes, region) VALUES (1, 'Aarav
|
8 |
+
Patel', 500, 'Mumbai'), (2, 'Priya Shah', 700, 'Mumbai');
|
9 |
+
example_title: Example1
|
10 |
+
- text: >-
|
11 |
+
sql_prompt: How many wheelchair accessible vehicles are there in the 'Train'
|
12 |
+
mode of transport? sql_context: CREATE TABLE Vehicles(vehicle_id INT,
|
13 |
+
vehicle_type VARCHAR(20), mode_of_transport VARCHAR(20),
|
14 |
+
is_wheelchair_accessible BOOLEAN); INSERT INTO Vehicles(vehicle_id,
|
15 |
+
vehicle_type, mode_of_transport, is_wheelchair_accessible) VALUES (1,
|
16 |
+
'Train_Car', 'Train', TRUE), (2, 'Train_Engine', 'Train', FALSE), (3, 'Bus',
|
17 |
+
'Bus', TRUE);
|
18 |
+
example_title: Example2
|
19 |
+
- text: >-
|
20 |
+
sql_prompt: Which economic diversification efforts in the 'diversification'
|
21 |
+
table have a higher budget than the average budget for all economic
|
22 |
+
diversification efforts in the 'budget' table? sql_context: CREATE TABLE
|
23 |
+
diversification (id INT, effort VARCHAR(50), budget FLOAT); CREATE TABLE
|
24 |
+
budget (diversification_id INT, diversification_effort VARCHAR(50), amount
|
25 |
+
FLOAT);
|
26 |
+
example_title: Example3
|
27 |
+
language:
|
28 |
+
- en
|
29 |
+
datasets:
|
30 |
+
- gretelai/synthetic_text_to_sql
|
31 |
+
metrics:
|
32 |
+
- rouge
|
33 |
+
library_name: transformers
|
34 |
---
|
35 |
|
36 |
|
|
|
59 |
### How to use
|
60 |
|
61 |
# Load model directly
|
62 |
+
```python
|
63 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
64 |
|
65 |
tokenizer = AutoTokenizer.from_pretrained("SwastikM/bart-large-nl2sql")
|
66 |
|
67 |
model = AutoModelForSeq2SeqLM.from_pretrained("SwastikM/bart-large-nl2sql")
|
68 |
|
69 |
+
query_question_with_context = """sql_prompt: Which economic diversification efforts in
|
70 |
+
the 'diversification' table have a higher budget than the average budget for all economic diversification efforts in the 'budget' table?
|
71 |
+
sql_context: CREATE TABLE diversification (id INT, effort VARCHAR(50), budget FLOAT); CREATE TABLE
|
72 |
+
budget (diversification_id INT, diversification_effort VARCHAR(50), amount FLOAT);"""
|
73 |
|
74 |
sql = model.generate(text)
|
75 |
|
76 |
print(sql)
|
77 |
+
```
|
78 |
|
79 |
|
80 |
## Training Details
|
|
|
83 |
|
84 |
[gretelai/synthetic_text_to_sql](https://huggingface.co/datasets/gretelai/synthetic_text_to_sql)
|
85 |
|
|
|
|
|
86 |
### Training Procedure
|
87 |
|
88 |
HuggingFace Accelerate with Training Loop.
|
|
|
113 |
|
114 |
#### Hardware
|
115 |
|
116 |
+
- **GPU:** P100
|
117 |
|
118 |
|
119 |
## Citation
|
|
|
156 |
|
157 |
## Model Card Authors
|
158 |
|
159 |
+
Swastik Maiti
|