jarif commited on
Commit
fa2b247
1 Parent(s): 4961115

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -12
app.py CHANGED
@@ -1,24 +1,24 @@
1
  import torch
2
  from fastai.text.all import *
3
- from blurr.text.data.all import *
4
- from blurr.text.modeling.all import * # Import only needed functions
5
- from transformers import T5Tokenizer, T5ForConditionalGeneration # Use T5 specifically
6
 
7
  # Load the pre-trained model and tokenizer (adjust for Bart if needed)
8
  pretrained_model_name = "facebook/bart-large-cnn" # Or "facebook/bart-base"
9
- hf_tokenizer = T5Tokenizer.from_pretrained(pretrained_model_name)
10
 
11
  def summarize(article):
12
- # Define your data transformation pipeline here, if applicable
13
- # ...
14
 
15
- # Load the exported model
16
- learn = load_learner('article_highlights.pkl')
17
 
18
- # Generate the summary
19
- summary = learn.predict(article)[0]['highlights']
20
 
21
- return summary
22
 
23
  # Create the Gradio interface
24
  iface = gr.Interface(
@@ -31,4 +31,4 @@ iface = gr.Interface(
31
  )
32
 
33
  # Launch the Gradio interface
34
- iface.launch()
 
1
  import torch
2
  from fastai.text.all import *
3
+ from blurr.data.all import *
4
+ from blurr.modeling.all import *
5
+ from transformers import BartForConditionalGeneration
6
 
7
  # Load the pre-trained model and tokenizer (adjust for Bart if needed)
8
  pretrained_model_name = "facebook/bart-large-cnn" # Or "facebook/bart-base"
9
+ hf_tokenizer = BartTokenizer.from_pretrained(pretrained_model_name)
10
 
11
  def summarize(article):
12
+ # Define your data transformation pipeline here, if applicable
13
+ # ...
14
 
15
+ # Load the exported model
16
+ learn = load_learner('article_highlights.pkl')
17
 
18
+ # Generate the summary
19
+ summary = learn.blurr_generate(article)[0]
20
 
21
+ return summary
22
 
23
  # Create the Gradio interface
24
  iface = gr.Interface(
 
31
  )
32
 
33
  # Launch the Gradio interface
34
+ iface.launch()