From e99773bf6944651662c95156c6c18079bd103daf Mon Sep 17 00:00:00 2001 From: Joe Cheng Date: Mon, 16 Dec 2024 16:29:58 -0800 Subject: [PATCH] Update deps, remove elmer from example (for now) --- DESCRIPTION | 6 ++---- R/chat.R | 32 +++++++++++++++++++++++++------- man/chat_append.Rd | 25 +++++++++++++++++++++---- 3 files changed, 48 insertions(+), 15 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index 6bc5712..9c6d3e0 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -16,13 +16,11 @@ Imports: bslib, coro, htmltools, + promises (>= 1.3.2), rlang, - shiny (> 1.9.1) + shiny (>= 1.10.0) Suggests: - promises, testthat (>= 3.0.0) -Remotes: - rstudio/shiny Encoding: UTF-8 Roxygen: list(markdown = TRUE) RoxygenNote: 7.3.2 diff --git a/R/chat.R b/R/chat.R index 8752288..068a93d 100644 --- a/R/chat.R +++ b/R/chat.R @@ -167,24 +167,42 @@ chat_ui <- function( #' #' @examplesIf interactive() #' library(shiny) +#' library(coro) #' library(bslib) -#' library(elmer) #' library(shinychat) -#' +#' +#' # Dumbest chatbot in the world: ignores user input and chooses +#' # a random, vague response. For a chatbot, try {elmer}. +#' fake_chatbot <- async_generator(function(input) { +#' responses <- c( +#' "What does that suggest to you?", +#' "I see.", +#' "I'm not sure I understand you fully.", +#' "What do you think?", +#' "Can you elaborate on that?", +#' "Interesting question! Let's examine thi... **See more**" +#' ) +#' +#' await(async_sleep(1)) +#' for (chunk in strsplit(sample(responses, 1), "")[[1]]) { +#' yield(chunk) +#' await(async_sleep(0.02)) +#' } +#' }) +#' #' ui <- page_fillable( #' chat_ui("chat", fill = TRUE) #' ) -#' -#' server <- function(input, output, session) { -#' chat <- chat_openai(model = "gpt-4o") #' +#' server <- function(input, output, session) { #' observeEvent(input$chat_user_input, { -#' response <- chat$stream_async(input$chat_user_input) +#' response <- fake_chatbot(input$chat_user_input) #' chat_append("chat", response) #' }) #' } -#' +#' #' shinyApp(ui, server) +#' #' @export chat_append <- function(id, response, role = c("assistant", "user"), session = getDefaultReactiveDomain()) { role <- match.arg(role) diff --git a/man/chat_append.Rd b/man/chat_append.Rd index fc8c941..50e5e75 100644 --- a/man/chat_append.Rd +++ b/man/chat_append.Rd @@ -54,19 +54,36 @@ promise returned by \code{chat_append}. \examples{ \dontshow{if (interactive()) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} library(shiny) +library(coro) library(bslib) -library(elmer) library(shinychat) +# Dumbest chatbot in the world: ignores user input and chooses +# a random, vague response. For a chatbot, try {elmer}. +fake_chatbot <- async_generator(function(input) { + responses <- c( + "What does that suggest to you?", + "I see.", + "I'm not sure I understand you fully.", + "What do you think?", + "Can you elaborate on that?", + "Interesting question! Let's examine thi... **See more**" + ) + + await(async_sleep(1)) + for (chunk in strsplit(sample(responses, 1), "")[[1]]) { + yield(chunk) + await(async_sleep(0.02)) + } +}) + ui <- page_fillable( chat_ui("chat", fill = TRUE) ) server <- function(input, output, session) { - chat <- chat_openai(model = "gpt-4o") - observeEvent(input$chat_user_input, { - response <- chat$stream_async(input$chat_user_input) + response <- fake_chatbot(input$chat_user_input) chat_append("chat", response) }) }