Spaces:
Paused
Paused
TharunSivamani
commited on
Commit
•
f3f33cf
1
Parent(s):
5b30cb7
updated
Browse files
app.py
CHANGED
@@ -6,11 +6,10 @@ DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
|
6 |
model = GPTLanguageModel().to(DEVICE)
|
7 |
model.load_state_dict(torch.load("mini-gpt.pth",map_location=DEVICE), strict=False)
|
8 |
model.eval()
|
9 |
-
answer = decode(model.generate(context, max_new_tokens=500)[0].tolist())
|
10 |
|
11 |
def display(text,number):
|
12 |
-
|
13 |
-
return
|
14 |
|
15 |
input_box = gr.Textbox(label="Story Lines",value="Once Upon a Time")
|
16 |
input_slider = gr.Slider(minimum=200, maximum=500, label="Select the maxium number of tokens/words:",step=100)
|
|
|
6 |
model = GPTLanguageModel().to(DEVICE)
|
7 |
model.load_state_dict(torch.load("mini-gpt.pth",map_location=DEVICE), strict=False)
|
8 |
model.eval()
|
|
|
9 |
|
10 |
def display(text,number):
|
11 |
+
answer = decode(model.generate(context, max_new_tokens=number)[0].tolist())
|
12 |
+
return answer
|
13 |
|
14 |
input_box = gr.Textbox(label="Story Lines",value="Once Upon a Time")
|
15 |
input_slider = gr.Slider(minimum=200, maximum=500, label="Select the maxium number of tokens/words:",step=100)
|