Skip to content

Commit

Permalink
Merge pull request #190 from rhatdan/test
Browse files Browse the repository at this point in the history
Turn on testing for --nocontainer flag
  • Loading branch information
ericcurtin authored Sep 25, 2024
2 parents f3dbd46 + 267684e commit 07555d5
Show file tree
Hide file tree
Showing 6 changed files with 32 additions and 7 deletions.
1 change: 1 addition & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ jobs:
run: |
sudo apt-get update
sudo apt-get install bats bash
pip install omlmd
- name: run test
run: make test

Expand Down
10 changes: 7 additions & 3 deletions ramalama/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ def run_parser(subparsers):
parser = subparsers.add_parser("run", help="run specified AI Model as a chatbot")
parser.add_argument("--prompt", dest="prompt", action="store_true", help="modify chatbot prompt")
parser.add_argument(
"-n", "--name", dest="name", default=_name(), help="name of container in which the Model will be run"
"-n", "--name", dest="name", help="name of container in which the Model will be run"
)
parser.add_argument("MODEL") # positional argument
parser.add_argument("ARGS", nargs="*", help="additional options to pass to the AI Model")
Expand All @@ -341,7 +341,7 @@ def serve_parser(subparsers):
"-d", "--detach", action="store_true", default=True, dest="detach", help="run the container in detached mode"
)
parser.add_argument(
"-n", "--name", dest="name", default=_name(), help="name of container in which the Model will be run"
"-n", "--name", dest="name", help="name of container in which the Model will be run"
)
parser.add_argument("-p", "--port", default="8080", help="port for AI Model server to listen on")
parser.add_argument(
Expand All @@ -354,6 +354,8 @@ def serve_parser(subparsers):


def serve_cli(args):
if args.nocontainer:
args.detach=False
model = New(args.MODEL)
model.serve(args)

Expand Down Expand Up @@ -456,7 +458,7 @@ def run_container(args):
if conman == "":
return False

if hasattr(args, "name"):
if hasattr(args, "name") and args.name:
name = args.name
else:
name = _name()
Expand Down Expand Up @@ -509,6 +511,8 @@ def run_container(args):

def dry_run(args):
for arg in args:
if not arg:
continue
if " " in arg:
print('"%s"' % arg, end=" ")
else:
Expand Down
12 changes: 8 additions & 4 deletions ramalama/oci.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,13 @@ def pull(self, args):
def get_symlink_path(self, args):
registry, reference = self.model.split("/", 1)
reference_dir = reference.replace(":", "/")
directory = f"{args.store}/models/oci/{registry}/{reference_dir}"
ggufs = [file for file in os.listdir(directory) if file.endswith(".gguf")]
path = f"{args.store}/models/oci/{registry}/{reference_dir}"

if os.path.isfile(path):
return path

ggufs = [file for file in os.listdir(path) if file.endswith(".gguf")]
if len(ggufs) != 1:
raise KeyError(f"unable to identify .gguf file in: {directory}")
raise KeyError(f"unable to identify .gguf file in: {path}")

return f"{directory}/{ggufs[0]}"
return f"{path}/{ggufs[0]}"
2 changes: 2 additions & 0 deletions test/system/030-run.bats
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
load helpers

@test "ramalama --dryrun run basic output" {
skip_if_nocontainer

model=m_$(safename)
image=m_$(safename)

Expand Down
8 changes: 8 additions & 0 deletions test/system/040-serve.bats
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ load helpers
verify_begin="podman run --rm -i --label \"RAMALAMA container\" --security-opt=label=disable -e RAMALAMA_TRANSPORT --name"

@test "ramalama --dryrun serve basic output" {
skip_if_nocontainer

model=m_$(safename)

run_ramalama --dryrun serve ${model}
Expand All @@ -24,6 +26,8 @@ verify_begin="podman run --rm -i --label \"RAMALAMA container\" --security-opt=l
}

@test "ramalama --detach serve" {
skip_if_nocontainer

model=m_$(safename)

run_ramalama --dryrun serve --detach ${model}
Expand All @@ -37,6 +41,8 @@ verify_begin="podman run --rm -i --label \"RAMALAMA container\" --security-opt=l

@test "ramalama serve and stop" {
skip "FIXME does not work in CI/CD system"
skip_if_nocontainer

model=ollama://tiny-llm:latest
container1=c_$(safename)
container2=c_$(safename)
Expand Down Expand Up @@ -67,6 +73,8 @@ verify_begin="podman run --rm -i --label \"RAMALAMA container\" --security-opt=l

@test "ramalama --detach serve and stop all" {
skip "FIXME does not work in CI/CD system"
skip_if_nocontainer

model=ollama://tiny-llm:latest
container=c_$(safename)
port1=8100
Expand Down
6 changes: 6 additions & 0 deletions test/system/helpers.bash
Original file line number Diff line number Diff line change
Expand Up @@ -911,5 +911,11 @@ function make_random_file() {
dd if=/dev/urandom of="$1" bs=1 count=${2:-$((${RANDOM} % 8192 + 1024))} status=none
}

function skip_if_nocontainer() {
if [[ "${_RAMALAMA_TEST_OPTS}" == "--nocontainer" ]]; then
skip "Not supported with --nocontainer"
fi
}

# END miscellaneous tools
###############################################################################

0 comments on commit 07555d5

Please sign in to comment.