File size: 2,177 Bytes
aebf357
204c916
aebf357
 
 
 
 
0dc1562
69f6ec4
aebf357
 
69f6ec4
aebf357
 
02ac1ae
aebf357
 
 
02ac1ae
 
aebf357
 
 
e732bbf
 
 
 
aebf357
 
 
 
 
204c916
 
aebf357
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
812370d
aebf357
 
 
927b6fc
aebf357
 
 
 
 
 
 
 
927b6fc
aebf357
835cde3
 
 
 
 
aebf357
204c916
aebf357
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
import re
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
from peft import PeftModel
from datetime import date


access_token = "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"

base_model = AutoModelForCausalLM.from_pretrained(
    'meta-llama/Llama-2-7b-chat-hf',
    token=access_token,
    trust_remote_code=True, 
    device_map="auto",
    offload_folder="offload/"
)
model = PeftModel.from_pretrained(
    base_model,
    'FinGPT/fingpt-forecaster_dow30_llama2-7b_lora',
    offload_folder="offload/"
)
model = model.eval()

tokenizer = AutoTokenizer.from_pretrained(
    'meta-llama/Llama-2-7b-chat-hf',
    token=access_token
)


def construct_prompt(ticker, date, n_weeks):
    
    return ", ".join([ticker, date, str(n_weeks)])


def get_curday():
    
    return date.today().strftime("%Y-%m-%d")


def predict(ticker, date, n_weeks):
    
    prompt = construct_prompt(ticker, date, n_weeks)
      
#     inputs = tokenizer(
#         prompt, return_tensors='pt',
#         padding=False, max_length=4096
#     )
#     inputs = {key: value.to(model.device) for key, value in inputs.items()}
        
#     res = model.generate(
#         **inputs, max_length=4096, do_sample=True,
#         eos_token_id=tokenizer.eos_token_id,
#         use_cache=True
#     )
#     output = tokenizer.decode(res[0], skip_special_tokens=True)
#     answer = re.sub(r'.*\[/INST\]\s*', '', output, flags=re.DOTALL)

    answer = prompt
    
    return answer


demo = gr.Interface(
    predict,
    inputs=[
        gr.Textbox(
            label="Ticker",
            value="AAPL",
            info="Companys from Dow-30 are recommended"
        ),
        gr.Textbox(
            label="Date",
            value=get_curday,
            info="Date from which the prediction is made, use format yyyy-mm-dd"
        ),
        gr.Slider(
            minimum=1,
            maximum=4,
            value=3,
            step=1,
            label="n_weeks",
            info="Information of the past n weeks will be utilized, choose between 1 and 4"
        )
    ],
    outputs=[
        gr.Textbox(
            label="Response"
        )
    ]
)

demo.launch()