bol20162021
commited on
Commit
•
65039e8
1
Parent(s):
05c2f5f
Update README.md
Browse files
README.md
CHANGED
@@ -71,5 +71,46 @@ import torch
|
|
71 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
72 |
tokenizer = AutoTokenizer.from_pretrained("sambanovasystems/nova-nsql-Llama-2-70B")
|
73 |
model = AutoModelForCausalLM.from_pretrained("sambanovasystems/nova-nsql-Llama-2-70B", torch_dtype=torch.bfloat16)
|
74 |
-
text = "CREATE TABLE stadium (
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
75 |
```
|
|
|
71 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
72 |
tokenizer = AutoTokenizer.from_pretrained("sambanovasystems/nova-nsql-Llama-2-70B")
|
73 |
model = AutoModelForCausalLM.from_pretrained("sambanovasystems/nova-nsql-Llama-2-70B", torch_dtype=torch.bfloat16)
|
74 |
+
text = "CREATE TABLE stadium (
|
75 |
+
stadium_id number,
|
76 |
+
location text,
|
77 |
+
name text,
|
78 |
+
capacity number,
|
79 |
+
highest number,
|
80 |
+
lowest number,
|
81 |
+
average number
|
82 |
+
)
|
83 |
+
|
84 |
+
CREATE TABLE singer (
|
85 |
+
singer_id number,
|
86 |
+
name text,
|
87 |
+
country text,
|
88 |
+
song_name text,
|
89 |
+
song_release_year text,
|
90 |
+
age number,
|
91 |
+
is_male others
|
92 |
+
)
|
93 |
+
|
94 |
+
CREATE TABLE concert (
|
95 |
+
concert_id number,
|
96 |
+
concert_name text,
|
97 |
+
theme text,
|
98 |
+
stadium_id text,
|
99 |
+
year text
|
100 |
+
)
|
101 |
+
|
102 |
+
CREATE TABLE singer_in_concert (
|
103 |
+
concert_id number,
|
104 |
+
singer_id text
|
105 |
+
)
|
106 |
+
|
107 |
+
|
108 |
+
-- Using valid SQLite, answer the following questions for the tables provided above.
|
109 |
+
|
110 |
+
-- What is the average, minimum, and maximum age of all singers from France?
|
111 |
+
SELECT"
|
112 |
+
input_ids = tokenizer(text, return_tensors="pt").input_ids
|
113 |
+
|
114 |
+
generated_ids = model.generate(input_ids, max_length=500)
|
115 |
+
print(tokenizer.decode(generated_ids[0], skip_special_tokens=True))
|
116 |
```
|