BTLM-3B-8k-chat / app.py
KvrParaskevi's picture
Update app.py
9326be5 verified
raw
history blame
No virus
632 Bytes
import gradio as gr
# Optional: Define a wrapper function for your model
def model_wrapper(text):
try:
# Load the model with custom code execution allowed
model = gr.load("models/cerebras/btlm-3b-8k-chat")
# Use the model to predict
return model(text)
except Exception as e:
# Log the exception or handle it in a user-friendly way
return f"An error occurred: {str(e)}"
# Setup Gradio interface
iface = gr.Interface(
fn=model_wrapper, # your function to expose
inputs="text", # input type
outputs="text" # output type
)
# Launch the Gradio app
iface.launch()