Skip to content

Commit

Permalink
Merge pull request #9 from shaynweidner/main
Browse files Browse the repository at this point in the history
This is awesome!
I will do some minor code accommodation.
Thanks for your contribution!!
  • Loading branch information
jcrodriguez1989 authored Mar 3, 2023
2 parents 7dda9c5 + ba676ea commit 8471686
Show file tree
Hide file tree
Showing 14 changed files with 74 additions and 27 deletions.
4 changes: 2 additions & 2 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Package: chatgpt
Type: Package
Title: Interface to 'ChatGPT' from R
Version: 0.1.5
Version: 0.2.0
Authors@R: c(
person(
given = "Juan Cruz", family = "Rodriguez", role = c("aut", "cre"),
Expand All @@ -16,7 +16,7 @@ URL: https://github.com/jcrodriguez1989/chatgpt
BugReports: https://github.com/jcrodriguez1989/chatgpt/issues
Encoding: UTF-8
LazyData: true
RoxygenNote: 7.2.0
RoxygenNote: 7.2.3
Imports:
httr,
jsonlite,
Expand Down
5 changes: 1 addition & 4 deletions R/addins.R
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,7 @@ run_addin_ask_chatgpt <- function() {
))
server <- function(input, output, session) {
observeEvent(input$ask_button, {
chatgpt_reply <- trimws(sapply(
getFromNamespace("gpt_get_completions", "chatgpt")(input$question)$choices,
function(x) x$text
))
chatgpt_reply <- ask_chatgpt(input$question)
if (as.logical(Sys.getenv("OPENAI_VERBOSE", TRUE))) {
cat(paste0("\n*** ChatGPT output:\n\n", chatgpt_reply, "\n"))
}
Expand Down
2 changes: 1 addition & 1 deletion R/ask_chatgpt.R
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@
#' @export
#'
ask_chatgpt <- function(question) {
trimws(sapply(gpt_get_completions(question)$choices, function(x) x$text))
parse_response(gpt_get_completions(question))
}
2 changes: 1 addition & 1 deletion R/comment_code.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,5 @@
#'
comment_code <- function(code) {
prompt <- paste0('Add inline comments to the following R code: "', code, '"')
trimws(sapply(gpt_get_completions(prompt)$choices, function(x) x$text))
parse_response(gpt_get_completions(prompt))
}
2 changes: 1 addition & 1 deletion R/create_unit_tests.R
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,5 @@ create_unit_tests <- function(code) {
prompt <- paste0(
'Create a full testthat file, with test cases for the following R code: "', code, '"'
)
trimws(sapply(gpt_get_completions(prompt)$choices, function(x) x$text))
parse_response(gpt_get_completions(prompt))
}
2 changes: 1 addition & 1 deletion R/create_variable_name.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,5 @@
#'
create_variable_name <- function(code) {
prompt <- paste0('Give a good variable name to the result of the following R code: "', code, '"')
trimws(sapply(gpt_get_completions(prompt)$choices, function(x) x$text))
parse_response(gpt_get_completions(prompt))
}
2 changes: 1 addition & 1 deletion R/document_code.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,5 @@
#'
document_code <- function(code) {
prompt <- paste0('Document, in roxygen2 format, this R function: "', code, '"')
trimws(sapply(gpt_get_completions(prompt)$choices, function(x) x$text))
parse_response(gpt_get_completions(prompt))
}
2 changes: 1 addition & 1 deletion R/explain_code.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,5 @@
#'
explain_code <- function(code) {
prompt <- paste0('Explain the following R code: "', code, '"')
trimws(sapply(gpt_get_completions(prompt)$choices, function(x) x$text))
parse_response(gpt_get_completions(prompt))
}
2 changes: 1 addition & 1 deletion R/find_issues_in_code.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,5 @@
#'
find_issues_in_code <- function(code) {
prompt <- paste0('Find issues or bugs in the following R code: "', code, '"')
trimws(sapply(gpt_get_completions(prompt)$choices, function(x) x$text))
parse_response(gpt_get_completions(prompt))
}
38 changes: 26 additions & 12 deletions R/gpt_get_completions.R
Original file line number Diff line number Diff line change
Expand Up @@ -11,21 +11,35 @@ gpt_get_completions <- function(prompt, openai_api_key = Sys.getenv("OPENAI_API_
stop("`OPENAI_API_KEY` not provided.")
}
# See https://beta.openai.com/docs/api-reference/completions/create
model <- Sys.getenv("OPENAI_MODEL", "text-davinci-003")
params <- list(
model = Sys.getenv("OPENAI_MODEL", "text-davinci-003"),
max_tokens = as.numeric(Sys.getenv("OPENAI_MAX_TOKENS", 256)),
temperature = as.numeric(Sys.getenv("OPENAI_TEMPERATURE", 0.7)),
top_p = as.numeric(Sys.getenv("OPENAI_TOP_P", 1)),
frequency_penalty = as.numeric(Sys.getenv("OPENAI_FREQUENCY_PENALTY", 0)),
presence_penalty = as.numeric(Sys.getenv("OPENAI_PRESENCE_PENALTY", 0))
model = model,
max_tokens = as.numeric(Sys.getenv("OPENAI_MAX_TOKENS",256)),
temperature = as.numeric(Sys.getenv("OPENAI_TEMPERATURE",0.7)),
top_p = as.numeric(Sys.getenv("OPENAI_TOP_P",1)),
frequency_penalty = as.numeric(Sys.getenv("OPENAI_FREQUENCY_PENALTY",0)),
presence_penalty = as.numeric(Sys.getenv("OPENAI_PRESENCE_PENALTY",0))
)
if (as.logical(Sys.getenv("OPENAI_VERBOSE", TRUE))) {
cat(paste0("\n*** ChatGPT input:\n\n", prompt, "\n"))
}
content(POST(
"https://api.openai.com/v1/completions",
add_headers("Authorization" = paste("Bearer", openai_api_key)),
content_type_json(),
body = toJSON(c(params, list(prompt = prompt)), auto_unbox = TRUE)
))
if (model == "gpt-3.5-turbo") {
messages = list(
list(role = "system", content = "You are a helpful assistant."),
list(role = "user", content = prompt)
)
content(POST(
"https://api.openai.com/v1/chat/completions",
add_headers("Authorization" = paste("Bearer", openai_api_key)),
content_type_json(),
body = toJSON(c(params, list(messages = messages)), auto_unbox = TRUE)
))
} else {
content(POST(
"https://api.openai.com/v1/completions",
add_headers("Authorization" = paste("Bearer", openai_api_key)),
content_type_json(),
body = toJSON(c(params, list(prompt = prompt)), auto_unbox = TRUE)
))
}
}
2 changes: 1 addition & 1 deletion R/optimize_code.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,5 @@
#'
optimize_code <- function(code) {
prompt <- paste0('Optimize the following R code: "', code, '"')
trimws(sapply(gpt_get_completions(prompt)$choices, function(x) x$text))
parse_response(gpt_get_completions(prompt))
}
17 changes: 17 additions & 0 deletions R/parse_response.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#' Parse OpenAI API Response
#'
#' Takes the raw response from the OpenAI API and extracts the text content from it.
#' This function is currently designed to differentiate between gpt-3.5-turbo and
#' others.
#'
#' @param raw_response The raw response object returned by the OpenAI API.
#' @return Returns a character vector containing the text content of the response.
#'
parse_response <- function(raw_response){
if(Sys.getenv("OPENAI_MODEL", "text-davinci-003") == "gpt-3.5-turbo"){
out <- trimws(sapply(raw_response$choices, function(x) x$message$content))
} else {
out <- trimws(sapply(raw_response$choices, function(x) x$text))
}
return(out)
}
2 changes: 1 addition & 1 deletion R/refactor_code.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,5 @@
#'
refactor_code <- function(code) {
prompt <- paste0('Refactor the following R code, returning valid R code: "', code, '"')
trimws(sapply(gpt_get_completions(prompt)$choices, function(x) x$text))
parse_response(gpt_get_completions(prompt))
}
19 changes: 19 additions & 0 deletions man/parse_response.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 8471686

Please sign in to comment.