File size: 2,104 Bytes
02a67dd
 
0240a50
02a67dd
 
 
 
0240a50
f4c7831
02a67dd
 
 
 
df10d8f
02a67dd
 
0240a50
02a67dd
9526772
 
0240a50
02a67dd
 
d720083
0240a50
02a67dd
 
0240a50
02a67dd
 
0240a50
02a67dd
 
 
0240a50
02a67dd
 
0240a50
02a67dd
 
 
 
0240a50
02a67dd
 
 
 
 
 
0240a50
02a67dd
0240a50
02a67dd
f4c7831
 
00f8b37
f4c7831
00f8b37
f4c7831
00f8b37
 
 
02a67dd
00f8b37
 
 
 
 
 
f4c7831
 
00f8b37
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
# -*- coding: utf-8 -*-
"""app.ipynb

Automatically generated by Colab.

Original file is located at
    https://colab.research.google.com/drive/1qIFntwH-_zF7GkQbgjKoXMXnQpZ4HVse
"""
"""
import gradio as gr
from transformers import AutoTokenizer,  AutoModelForSequenceClassification

# Load the base model
base_model_name = "Preetham04/sentiment-analysis"
tokenizer = AutoTokenizer.from_pretrained(base_model_name)
model =  AutoModelForSequenceClassification.from_pretrained(base_model_name)

# Load the adapter configuration and model files
adapter_config_path = "config.json"
adapter_model_path = "model.safetensors"

# Load the adapter into the model
adapter_name = "custom_adapter"  # Define your adapter name
model.load_adapter(config_path=adapter_config_path, adapter_path=adapter_model_path, adapter_name=adapter_name)

# Activate the adapter
model.set_active_adapters(adapter_name)

st.title("🤖 Chatbot with Adapter-Enhanced Model")
st.write("Interact with your custom adapter-enhanced model. Type a message and get responses!")

# Initialize or retrieve the chat history
if 'history' not in st.session_state:
    st.session_state['history'] = []

# Initialize Gradio
chatbot = Gradio(model=model, tokenizer=tokenizer)

# Define responses for greetings
@chatbot.on_event("welcome")
def welcome_handler(payload):
    return "Welcome! Type a message and get responses from the chatbot."

# Define responses for user messages
@chatbot.on_message
def message_handler(payload):
    user_input = payload["message"]
    response = chatbot.generate_response(user_input)
    return response

# Run Gradio
if __name__ == "__main__":
    chatbot.run()
"""
import gradio as gr
from transformers import pipeline

pipeline = pipeline(task="text-classification", model="Preetham04/sentiment-analysis")

def predict(input_img):
    predictions = pipeline(input_img)
    return input_img, {p["label"]: p["score"] for p in predictions} 

gradio_app = gr.Interface(
    predict,
    inputs="textbox",
    outputs="text",
    title="Sentiment- good or bad?",
)

if __name__ == "__main__":
    gradio_app.launch()