Skip to content

Commit

Permalink
Tests for new llama3.2 customized prompt
Browse files Browse the repository at this point in the history
  • Loading branch information
edgararuiz committed Sep 29, 2024
1 parent dc6095b commit dd8fdf0
Show file tree
Hide file tree
Showing 3 changed files with 38 additions and 25 deletions.
2 changes: 1 addition & 1 deletion R/m-defaults.R
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ m_defaults_set <- function(...) {
}
model <- defaults[["model"]]
split_model <- strsplit(model, "\\:")[[1]]
if (length(split_model > 1)) {
if (length(split_model) > 1) {
sub_model <- split_model[[1]]
} else {
sub_model <- NULL
Expand Down
48 changes: 24 additions & 24 deletions tests/testthat/_snaps/zzz-cache.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,30 +3,30 @@
Code
fs::dir_ls("_mall_cache", recurse = TRUE)
Output
_mall_cache/0b
_mall_cache/0b/0b18bbcac64aff6c9121b7b6d08e6458.json
_mall_cache/1a
_mall_cache/1a/1a1434659a6539a64dafc4dc1adf5503.json
_mall_cache/1c
_mall_cache/1c/1c74876f58e4060900dc2c3711777cc7.json
_mall_cache/32
_mall_cache/32/32f10622095aad218080f2ec26382b2a.json
_mall_cache/3b
_mall_cache/3b/3b3111d4036392f8e32a6dc4457d515e.json
_mall_cache/00
_mall_cache/00/004088f786ed0f6a3abc08f2aa55ae2b.json
_mall_cache/14
_mall_cache/14/14afc26cb4f76497b80b5552b2b1e217.json
_mall_cache/18
_mall_cache/18/18560280fe5b5a85f2d66fa2dc89aa00.json
_mall_cache/29
_mall_cache/29/296f3116c07dab7f3ecb4a71776e3b64.json
_mall_cache/2c
_mall_cache/2c/2cbb57fd4a7e7178c489d068db063433.json
_mall_cache/42
_mall_cache/42/425e0dc8e9dcadd3482b98fdfa127f30.json
_mall_cache/83
_mall_cache/83/837cf64a31cf9d16de22d95feaafd72b.json
_mall_cache/84
_mall_cache/84/84609d770fdd4eb65d2f232e0c93f15c.json
_mall_cache/a5
_mall_cache/a5/a5ae06127c321290a08d42be969db936.json
_mall_cache/bf
_mall_cache/bf/bf5790d2673fe4e32e382f282ae2a095.json
_mall_cache/ca
_mall_cache/ca/cac76c2359dbefe94ff9007c014996bf.json
_mall_cache/db
_mall_cache/db/db9fde654fc2c2066010bba4733dcc87.json
_mall_cache/ff
_mall_cache/ff/ffea2bb0bc69b4de643fadd6e1d9b0fb.json
_mall_cache/44
_mall_cache/44/44fd00c39a9697e24e93943ef5f2ad1b.json
_mall_cache/57
_mall_cache/57/5702ff773afb880c746037a5d8254019.json
_mall_cache/65
_mall_cache/65/65c76a53ebea14a6695adf433fb2faa6.json
_mall_cache/98
_mall_cache/98/98a43dc690b06455d6b0a5046db31d84.json
_mall_cache/9c
_mall_cache/9c/9c4ed89921994aa00c712bada91ef941.json
_mall_cache/b0
_mall_cache/b0/b02d0fab954e183a98787fa897b47d59.json
_mall_cache/b7
_mall_cache/b7/b7c613386c94b2500b2b733632fedd1a.json

13 changes: 13 additions & 0 deletions tests/testthat/test-m-backend-prompt.R
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,16 @@ test_that("Prompt handles list()", {
list(list(role = "user", content = test_text))
)
})

test_that("Prompt handles list()", {
backend <- llm_use("ollama", "llama3.2:latest", .silent = TRUE)
x <- m_backend_prompt(backend)
x_extract <- x$extract(labels = c("a", "b"))
x_classify <- x$classify(labels = c("a" ~ 1, "b" ~ 2))
backend <- llm_use("ollama", "llama1", .silent = TRUE)
y <- m_backend_prompt(backend)
y_extract <- y$extract(labels = c("a", "b"))
y_classify <- y$classify(labels = c("a" ~ 1, "b" ~ 2))
expect_false(x_extract[[1]]$content == y_extract[[1]]$content)
expect_false(x_classify[[1]]$content == y_classify[[1]]$content)
})

0 comments on commit dd8fdf0

Please sign in to comment.