acecalisto3 commited on
Commit
383bc68
1 Parent(s): 5875237

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -11
app.py CHANGED
@@ -1,13 +1,12 @@
 
 
1
  import gradio as gr
2
- from transformers import pipeline
3
- from transformers import AutoModelForCausalLM
4
 
5
-
6
-
7
- # Load the NLP pipeline for text classification
8
  classifier = pipeline("text-classification")
9
 
10
- # Define the function to generate mini-apps based on user input
11
  def generate_mini_apps(theme):
12
  # Use the NLP pipeline to classify the input theme
13
  classification = classifier(theme)
@@ -31,18 +30,38 @@ def generate_mini_apps(theme):
31
  'Mood Tracker',
32
  'Sleep Tracker'
33
  ]
 
 
34
 
35
- # Return the generated mini-apps
36
  return mini_apps
37
 
38
- # Create the Gradio interface
 
 
 
 
 
 
 
 
 
 
39
  demo = gr.Interface(
40
  fn=generate_mini_apps,
41
  inputs=gr.Textbox(label="Enter a theme for your life"),
42
  outputs=gr.Textbox(label="Generated Mini-Apps"),
43
  title="AI4ME: Personalized AI Tools",
44
- description="Enter a theme for your life and we'll generate a set of AI-powered mini-apps tailored to your specific needs."
45
  )
46
 
47
- # Launch the Gradio app
48
- demo.launch()
 
 
 
 
 
 
 
 
 
1
+ import tensorflow
2
+ import torch
3
  import gradio as gr
4
+ from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
 
5
 
6
+ # --- Load the NLP pipeline for text classification ---
 
 
7
  classifier = pipeline("text-classification")
8
 
9
+ # --- Define the function to generate mini-apps based on user input ---
10
  def generate_mini_apps(theme):
11
  # Use the NLP pipeline to classify the input theme
12
  classification = classifier(theme)
 
30
  'Mood Tracker',
31
  'Sleep Tracker'
32
  ]
33
+ else:
34
+ mini_apps = ["No matching mini-apps found. Try a different theme."]
35
 
36
+ # Return the generated mini-apps
37
  return mini_apps
38
 
39
+ # --- Load the model and tokenizer from the provided files ---
40
+ model = AutoModelForCausalLM.from_pretrained("./", trust_remote_code=True) # Load from the current directory
41
+ tokenizer = AutoTokenizer.from_pretrained("./")
42
+
43
+ # --- Define a function to generate text using the model ---
44
+ def generate_text(input_text):
45
+ inputs = tokenizer(input_text, return_tensors="pt")
46
+ output = model.generate(**inputs, max_length=50, num_return_sequences=1)
47
+ return tokenizer.decode(output[0], skip_special_tokens=True)
48
+
49
+ # --- Create the Gradio interface ---
50
  demo = gr.Interface(
51
  fn=generate_mini_apps,
52
  inputs=gr.Textbox(label="Enter a theme for your life"),
53
  outputs=gr.Textbox(label="Generated Mini-Apps"),
54
  title="AI4ME: Personalized AI Tools",
55
+ description="Enter your hobby/interest/job and we'll generate a set of AI-powered mini-apps tailored to your specific needs."
56
  )
57
 
58
+ # --- Add a text generation tab ---
59
+ with gr.Blocks() as demo_text:
60
+ gr.Markdown("## Text Generation")
61
+ input_text = gr.Textbox(label="Enter your text")
62
+ output_text = gr.Textbox(label="Generated Text")
63
+ input_text.submit(generate_text, inputs=input_text, outputs=output_text)
64
+
65
+ # --- Launch the Gradio app ---
66
+ demo.launch(share=True) # Share the app publicly
67
+ demo_text.launch(share=True)