Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -10,6 +10,17 @@ headers = {
|
|
10 |
"Content-Type": "application/json"
|
11 |
}
|
12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
def query(payload):
|
14 |
response = requests.post(API_URL, headers=headers, json=payload)
|
15 |
st.markdown(response.json())
|
@@ -19,7 +30,7 @@ def get_output(prompt):
|
|
19 |
return query({"inputs": prompt})
|
20 |
|
21 |
def main():
|
22 |
-
st.title("Medical
|
23 |
example_input = st.text_input("Enter your example text:")
|
24 |
|
25 |
if st.button("Summarize with Variation 1"):
|
|
|
10 |
"Content-Type": "application/json"
|
11 |
}
|
12 |
|
13 |
+
|
14 |
+
import { HfInference } from '@huggingface/inference'
|
15 |
+
|
16 |
+
hf = new HfInference(API_URL, API_KEY)
|
17 |
+
prompt = 'Write instructions to teach anyone to write a discharge plan. List the entities, features and relationships to CCDA and FHIR objects in boldface.'
|
18 |
+
stream = hf.textGenerationStream({ inputs: prompt })
|
19 |
+
for await (const r of stream) {
|
20 |
+
st.markdown(r.token.text)
|
21 |
+
}
|
22 |
+
|
23 |
+
|
24 |
def query(payload):
|
25 |
response = requests.post(API_URL, headers=headers, json=payload)
|
26 |
st.markdown(response.json())
|
|
|
30 |
return query({"inputs": prompt})
|
31 |
|
32 |
def main():
|
33 |
+
st.title("Medical Llama Test Bench with Inference Endpoints Llama 7B")
|
34 |
example_input = st.text_input("Enter your example text:")
|
35 |
|
36 |
if st.button("Summarize with Variation 1"):
|