File size: 1,643 Bytes
f399f41
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
from multilingual_translation import text_to_text_generation
from utils import lang_ids, data_scraping
import gradio as gr

lang_list = list(lang_ids.keys())
model_list = data_scraping()

def multilingual_translate(
    prompt: str, 
    model_id: str, 
    target_lang: str
    ):
    
    return text_to_text_generation(
        prompt=prompt, 
        model_id=model_id, 
        device='cpu',
        target_lang=lang_ids[target_lang]
        )


inputs = [
    gr.Textbox(lines=4, value="Hello world!", label="Input Text"),
    gr.Dropdown(model_list, value="facebook/m2m100_418M", label="Model"),
    gr.Dropdown(lang_list, value="Turkish", label="Target Language"),
]

output = gr.outputs.Textbox(label="Output Text")

examples = [
    [
        "Hello world!",
        "facebook/m2m100_418M",
        "Turkish",
    ],
    [
        "Omar ve Merve çok iyi arkadaşlar.",
        "facebook/m2m100_418M",
        "Spanish",
    ],
    [
        "Hugging Face is a great company.",
        "facebook/m2m100_418M",
        "French",
    ]
]

title = "Beyond English-Centric Multilingual Machine Translation"

description = "M2M100 is a multilingual encoder-decoder (seq-to-seq) model trained for Many-to-Many multilingual translation. It was introduced in this [paper](https://arxiv.org/abs/2010.11125) and first released in [this](https://github.com/pytorch/fairseq/tree/master/examples/m2m_100) repository."

app = gr.Interface(
    fn=multilingual_translate,
    inputs=inputs,
    outputs=output,
    examples=examples,
    title=title,
    description=description,
    cache_examples=True
)
app.launch(debug=True, enable_queue=True)