Skip to content

Commit

Permalink
Fix #57 - Make more stuff optional to load it as an r2 plugin again
Browse files Browse the repository at this point in the history
  • Loading branch information
trufae authored Sep 23, 2024
1 parent 53713b7 commit 4b15e0e
Show file tree
Hide file tree
Showing 8 changed files with 56 additions and 22 deletions.
7 changes: 6 additions & 1 deletion main.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import argparse

from r2ai.main import main as r2ai_main
from r2ai.main import register_r2plugin

def is_valid_file(parser, arg):
if not os.path.isfile(arg):
Expand Down Expand Up @@ -55,4 +56,8 @@ def main():
r2ai_main(args, args.command, runrepl)

if __name__ == "__main__":
main()
try:
import r2lang # pylint: disable=import-error
register_r2plugin()
except ImportError:
main()
27 changes: 19 additions & 8 deletions r2ai/auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,30 @@
import sys
import re
import os
import boto3

have_bedrock = True

try:
import boto3
from .backend.bedrock import (
BEDROCK_TOOLS_CONFIG, build_messages_for_bedrock, extract_bedrock_tool_calls,
process_bedrock_tool_calls, print_bedrock_response
)
except Exception:
have_bedrock = False

from llama_cpp import Llama
from llama_cpp.llama_tokenizer import LlamaHFTokenizer
from transformers import AutoTokenizer
from anthropic import Anthropic

have_anthropic = True
try:
from anthropic import Anthropic
from .anthropic import construct_tool_use_system_prompt, extract_claude_tool_calls
except Exception:
have_anthorpic = False

from . import index
from .anthropic import construct_tool_use_system_prompt, extract_claude_tool_calls
from .backend.bedrock import (
BEDROCK_TOOLS_CONFIG, build_messages_for_bedrock, extract_bedrock_tool_calls,
process_bedrock_tool_calls, print_bedrock_response
)
from .pipe import have_rlang, r2lang, get_r2_inst

ANSI_REGEX = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
Expand Down Expand Up @@ -443,4 +454,4 @@ def auto_chat_llama(interpreter):
"llama": {
"default": auto_chat_llama
}
}
}
6 changes: 5 additions & 1 deletion r2ai/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@
import requests
import json
import traceback
import chromadb
have_chrome = True
try:
import chromadb
except Exception:
have_chrome = False
from unidecode import unidecode
import sys
from r2ai import LOGGER
Expand Down
15 changes: 12 additions & 3 deletions r2ai/interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,13 @@
import traceback
import json
import platform
have_local = True
import getpass
import tokentrim

try:
import tokentrim
except Exception:
have_local = False

from rich.rule import Rule
from signal import signal, SIGINT
Expand Down Expand Up @@ -952,7 +957,7 @@ def respond(self):
self.messages.append({"role": "assistant", "content": response})
print(response)
self.logger.warn("For a better experience install openai python")
self.load.warn("pip install -U openai")
self.logger.warn("pip install -U openai")
self.logger.warn("export OPENAI_API_KEY=...")
return

Expand Down Expand Up @@ -991,7 +996,11 @@ def respond(self):
return

elif self.model.startswith("bedrock:"):
import boto3
try:
import boto3
except Exception:
self.logger.error("Cannot import boto3. No bedrock for now")
return
bedrock_model = self.model.split(":")[1] + ":0"
self.bedrock_client = boto3.client("bedrock-runtime")
request = {
Expand Down
16 changes: 10 additions & 6 deletions r2ai/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@
within_r2 = True

def r2ai_rlang_plugin(unused_but_required_argument):
global ai
ai = r2ai_singleton()
def _call(s):
if not s.startswith("r2ai"):
return False
try:
run_rcfile_once()
run_rcfile_once(ai)
if len(s) == 4:
builtins.print(help_message)
else:
Expand All @@ -47,7 +47,7 @@ def _call(s):
}

# TODO: see repl.run_script as replacement
def run_rcfile():
def run_rcfile(ai):
try:
lines = slurp(R2AI_RCFILE)

Expand All @@ -61,13 +61,17 @@ def run_rcfile():
if ai is None:
ai = r2ai_singleton()

def run_rcfile_once():
def run_rcfile_once(ai):
global RCFILE_LOADED
if not RCFILE_LOADED:
run_rcfile()
run_rcfile(ai)
RCFILE_LOADED = True


def register_r2plugin():
import r2lang
r2lang.plugin("core", r2ai_rlang_plugin)

def main(args, commands, dorepl=True):
global within_r2

Expand Down Expand Up @@ -133,7 +137,7 @@ def main(args, commands, dorepl=True):
# print("[R2AI] Please: r2pm -ci rlang-python")
# sys.exit(0)

# run_rcfile()
# run_rcfile(ai)
# if len(sys.argv) > 1:
# for arg in sys.argv[1:]:
# if arg.endswith(".py"):
Expand Down
2 changes: 1 addition & 1 deletion r2ai/pipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def cmd(self, x):
r2lang = FakeLang(r2pipe.open())
r2lang.cmd("?V") # r2pipe throws only here
else:
raise Error("must spawn")
raise Exception("must spawn")
except Exception:
try:
have_rlang = False
Expand Down
2 changes: 1 addition & 1 deletion r2ai/repl.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ def runline(ai, usertext):
except Exception:
traceback.print_exc()
if usertext.startswith("-VV"):
from ui.app import R2AIApp
from ui.app import R2AIApp # pylint: disable=import-error
R2AIApp().run()
return
if usertext.startswith("?V") or usertext.startswith("-v"):
Expand Down
3 changes: 2 additions & 1 deletion r2ai/tab.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import os
import sys
from .const import R2AI_HISTFILE, R2AI_HOMEDIR, R2AI_RCFILE, R2AI_USERDIR
from .models import models

Expand Down Expand Up @@ -67,7 +68,7 @@ def complete(self, text, state):

def display_matches(self, substitution, matches, longest_match_length):
line_buffer = readline.get_line_buffer()
columns = environ.get("COLUMNS", 80)
columns = os.environ.get("COLUMNS", 80)
print()
tpl = "{:<" + str(int(max(map(len, matches)) * 1.2)) + "}"
buffer = ""
Expand Down

0 comments on commit 4b15e0e

Please sign in to comment.