Skip to content

Commit

Permalink
remove static while pending behaviour (#7410)
Browse files Browse the repository at this point in the history
* remove static while pending behaviour

* add changeset

* fix notebooks

* add changeset

* cleanup unused code + fix test

* fix notebooks

* oops

* re-add check

---------

Co-authored-by: gradio-pr-bot <gradio-pr-bot@users.noreply.github.com>
  • Loading branch information
pngwn and gradio-pr-bot committed Feb 14, 2024
1 parent 32b317f commit c2dfc59
Show file tree
Hide file tree
Showing 8 changed files with 97 additions and 74 deletions.
6 changes: 6 additions & 0 deletions .changeset/crazy-ghosts-run.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
"@gradio/app": patch
"gradio": patch
---

fix:remove static while pending behaviour
2 changes: 1 addition & 1 deletion demo/chatinterface_streaming_echo/run.ipynb
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import time\n", "import gradio as gr\n", "\n", "def slow_echo(message, history):\n", " for i in range(len(message)):\n", " time.sleep(0.05)\n", " yield \"You typed: \" + message[: i+1]\n", "\n", "demo = gr.ChatInterface(slow_echo).queue()\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import time\n", "import gradio as gr\n", "\n", "\n", "def slow_echo(message, history):\n", " for i in range(len(message)):\n", " time.sleep(0.05)\n", " yield \"You typed: \" + message[: i + 1]\n", "\n", "\n", "demo = gr.ChatInterface(slow_echo).queue()\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
4 changes: 3 additions & 1 deletion demo/chatinterface_streaming_echo/run.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import time
import gradio as gr


def slow_echo(message, history):
for i in range(len(message)):
time.sleep(0.05)
yield "You typed: " + message[: i+1]
yield "You typed: " + message[: i + 1]


demo = gr.ChatInterface(slow_echo).queue()

Expand Down
1 change: 1 addition & 0 deletions demo/test_chatinterface_streaming_echo/run.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: test_chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "runs = 0\n", "\n", "\n", "def slow_echo(message, history):\n", " global runs # i didn't want to add state or anything to this demo\n", " runs = runs + 1\n", " for i in range(len(message)):\n", " yield f\"Run {runs} - You typed: \" + message[: i + 1]\n", "\n", "\n", "demo = gr.ChatInterface(slow_echo).queue()\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
16 changes: 16 additions & 0 deletions demo/test_chatinterface_streaming_echo/run.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import gradio as gr

runs = 0


def slow_echo(message, history):
global runs # i didn't want to add state or anything to this demo
runs = runs + 1
for i in range(len(message)):
yield f"Run {runs} - You typed: " + message[: i + 1]


demo = gr.ChatInterface(slow_echo).queue()

if __name__ == "__main__":
demo.launch()
32 changes: 3 additions & 29 deletions js/app/src/Blocks.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -343,11 +343,7 @@
rootNode = rootNode;
}, 50);
async function handle_update(
data: any,
fn_index: number,
outputs_set_to_non_interactive: number[]
): Promise<void> {
async function handle_update(data: any, fn_index: number): Promise<void> {
const outputs = dependencies[fn_index].outputs;
data?.forEach((value: any, i: number) => {
Expand All @@ -369,9 +365,6 @@
continue;
} else {
output.props[update_key] = update_value;
if (update_key == "interactive" && !update_value) {
outputs_set_to_non_interactive.push(outputs[i]);
}
}
}
} else {
Expand Down Expand Up @@ -485,7 +478,7 @@
payload.data = v;
make_prediction(payload);
} else {
handle_update(v, dep_index, []);
handle_update(v, dep_index);
}
});
} else {
Expand All @@ -505,8 +498,6 @@
}
function make_prediction(payload: Payload): void {
const pending_outputs: number[] = [];
let outputs_set_to_non_interactive: number[] = [];
const submission = app
.submit(
payload.fn_index,
Expand All @@ -520,27 +511,10 @@
make_prediction(dep.final_event);
}
dep.pending_request = false;
handle_update(data, fn_index, outputs_set_to_non_interactive);
handle_update(data, fn_index);
})
.on("status", ({ fn_index, ...status }) => {
tick().then(() => {
const outputs = dependencies[fn_index].outputs;
outputs.forEach((id) => {
if (
instance_map[id].props.interactive &&
status.stage === "pending" &&
!["focus", "key_up"].includes(dep.targets[0][1])
) {
pending_outputs.push(id);
instance_map[id].props.interactive = false;
} else if (
["complete", "error"].includes(status.stage) &&
pending_outputs.includes(id) &&
!outputs_set_to_non_interactive.includes(id)
) {
instance_map[id].props.interactive = true;
}
});
//@ts-ignore
loading_status.update({
...status,
Expand Down
43 changes: 0 additions & 43 deletions js/app/test/chatinterface_streaming_echo.spec.ts

This file was deleted.

67 changes: 67 additions & 0 deletions js/app/test/test_chatinterface_streaming_echo.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import { test, expect } from "@gradio/tootils";

test("chatinterface works with streaming functions and all buttons behave as expected", async ({
page
}) => {
const submit_button = page.getByRole("button", { name: "Submit" });
const retry_button = page.getByRole("button", { name: "🔄 Retry" });
const undo_button = page.getByRole("button", { name: "↩️ Undo" });
const clear_button = page.getByRole("button", { name: "🗑️ Clear" });
const textbox = page.getByPlaceholder("Type a message...");

await textbox.fill("hello");
await submit_button.click();

await expect(textbox).toHaveValue("");
const expected_text_el_0 = page.locator(".bot p", {
hasText: "Run 1 - You typed: hello"
});
await expect(expected_text_el_0).toBeVisible();
await expect
.poll(async () => page.locator(".bot.message").count(), { timeout: 2000 })
.toBe(1);

await textbox.fill("hi");
await submit_button.click();
await expect(textbox).toHaveValue("");
const expected_text_el_1 = page.locator(".bot p", {
hasText: "Run 2 - You typed: hi"
});
await expect(expected_text_el_1).toBeVisible();
await expect
.poll(async () => page.locator(".bot.message").count(), { timeout: 2000 })
.toBe(2);

await undo_button.click();
await expect
.poll(async () => page.locator(".message.bot").count(), { timeout: 5000 })
.toBe(1);
await expect(textbox).toHaveValue("hi");

await retry_button.click();
const expected_text_el_2 = page.locator(".bot p", {
hasText: "Run 3 - You typed: hello"
});
expect(textbox).toHaveValue("");
await expect(expected_text_el_2).toBeVisible();

await expect
.poll(async () => page.locator(".message.bot").count(), { timeout: 5000 })
.toBe(1);

await textbox.fill("hi");
await submit_button.click();
await expect(textbox).toHaveValue("");
const expected_text_el_3 = page.locator(".bot p", {
hasText: "Run 4 - You typed: hi"
});
await expect(expected_text_el_3).toBeVisible();
await expect
.poll(async () => page.locator(".bot.message").count(), { timeout: 2000 })
.toBe(2);

await clear_button.click();
await expect
.poll(async () => page.locator(".bot.message").count(), { timeout: 5000 })
.toBe(0);
});

0 comments on commit c2dfc59

Please sign in to comment.