aliabd HF staff commited on
Commit
380ea53
1 Parent(s): 6af8ce4

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. README.md +1 -1
  2. messages_testcase.py +15 -0
  3. run.ipynb +1 -1
  4. run.py +1 -1
README.md CHANGED
@@ -5,7 +5,7 @@ emoji: 🔥
5
  colorFrom: indigo
6
  colorTo: indigo
7
  sdk: gradio
8
- sdk_version: 4.37.2
9
  app_file: run.py
10
  pinned: false
11
  hf_oauth: true
 
5
  colorFrom: indigo
6
  colorTo: indigo
7
  sdk: gradio
8
+ sdk_version: 4.38.0
9
  app_file: run.py
10
  pinned: false
11
  hf_oauth: true
messages_testcase.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import time
2
+ import gradio as gr
3
+
4
+
5
+ def slow_echo(message, history):
6
+ for i in range(len(message)):
7
+ time.sleep(0.05)
8
+ yield "You typed: " + message[: i + 1]
9
+
10
+
11
+
12
+ demo = gr.ChatInterface(slow_echo, type="messages")
13
+
14
+ if __name__ == "__main__":
15
+ demo.launch()
run.ipynb CHANGED
@@ -1 +1 @@
1
- {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import time\n", "import gradio as gr\n", "\n", "\n", "def slow_echo(message, history):\n", " for i in range(len(message)):\n", " time.sleep(0.05)\n", " yield \"You typed: \" + message[: i + 1]\n", "\n", "\n", "demo = gr.ChatInterface(slow_echo).queue()\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
 
1
+ {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/chatinterface_streaming_echo/messages_testcase.py"]}, {"cell_type": "code", "execution_count": null, "id": "44380577570523278879349135829904343037", "metadata": {}, "outputs": [], "source": ["import time\n", "import gradio as gr\n", "\n", "\n", "def slow_echo(message, history):\n", " for i in range(len(message)):\n", " time.sleep(0.05)\n", " yield \"You typed: \" + message[: i + 1]\n", "\n", "\n", "demo = gr.ChatInterface(slow_echo)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
run.py CHANGED
@@ -8,7 +8,7 @@ def slow_echo(message, history):
8
  yield "You typed: " + message[: i + 1]
9
 
10
 
11
- demo = gr.ChatInterface(slow_echo).queue()
12
 
13
  if __name__ == "__main__":
14
  demo.launch()
 
8
  yield "You typed: " + message[: i + 1]
9
 
10
 
11
+ demo = gr.ChatInterface(slow_echo)
12
 
13
  if __name__ == "__main__":
14
  demo.launch()