Spaces:
Running
on
A10G
Running
on
A10G
artificialguybr
commited on
Commit
•
1e1d43c
1
Parent(s):
e8c9f8e
Update app.py
Browse files
app.py
CHANGED
@@ -2,8 +2,7 @@ import os
|
|
2 |
import gradio as gr
|
3 |
import mdtex2html
|
4 |
import torch
|
5 |
-
from transformers
|
6 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
7 |
|
8 |
# Initialize model and tokenizer
|
9 |
model_name_or_path = "teknium/OpenHermes-2-Mistral-7B"
|
@@ -15,6 +14,8 @@ model = AutoModelForCausalLM.from_pretrained(model_name_or_path,
|
|
15 |
revision="main")
|
16 |
|
17 |
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True)
|
|
|
|
|
18 |
# Postprocess function
|
19 |
def postprocess(self, y):
|
20 |
if y is None:
|
@@ -126,4 +127,4 @@ def _launch_demo(args, model, tokenizer, config):
|
|
126 |
|
127 |
# Main execution
|
128 |
if __name__ == "__main__":
|
129 |
-
_launch_demo(None, model, tokenizer)
|
|
|
2 |
import gradio as gr
|
3 |
import mdtex2html
|
4 |
import torch
|
5 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline, MistralConfig
|
|
|
6 |
|
7 |
# Initialize model and tokenizer
|
8 |
model_name_or_path = "teknium/OpenHermes-2-Mistral-7B"
|
|
|
14 |
revision="main")
|
15 |
|
16 |
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True)
|
17 |
+
config = MistralConfig()
|
18 |
+
|
19 |
# Postprocess function
|
20 |
def postprocess(self, y):
|
21 |
if y is None:
|
|
|
127 |
|
128 |
# Main execution
|
129 |
if __name__ == "__main__":
|
130 |
+
_launch_demo(None, model, tokenizer, config)
|