diff --git a/buildSrc/src/main/kotlin/school/ai/AssistantManager.kt b/buildSrc/src/main/kotlin/school/ai/AssistantManager.kt index 16a802d..c30b27b 100755 --- a/buildSrc/src/main/kotlin/school/ai/AssistantManager.kt +++ b/buildSrc/src/main/kotlin/school/ai/AssistantManager.kt @@ -16,8 +16,19 @@ import java.io.File import java.time.Duration import java.util.* import kotlin.coroutines.resume +import org.slf4j.LoggerFactory + object AssistantManager { + val logger = LoggerFactory.getLogger(AssistantPlugin::class.java) + + val Project.apiKey: String + get() = Properties().apply { + "$projectDir/private.properties" + .let(::File) + .inputStream() + .use(::load) + }["OPENAI_API_KEY"] as String val userName = System.getProperty("user.name") val assistantName = "E-3PO" @@ -72,9 +83,4 @@ object AssistantManager { }) } } - - val Project.apiKey: String - get() = Properties().apply { - "$projectDir/private.properties".let(::File).inputStream().use(::load) - }["OPENAI_API_KEY"] as String } diff --git a/buildSrc/src/main/kotlin/school/ai/AssistantPlugin.kt b/buildSrc/src/main/kotlin/school/ai/AssistantPlugin.kt index 92bbea3..7621a53 100755 --- a/buildSrc/src/main/kotlin/school/ai/AssistantPlugin.kt +++ b/buildSrc/src/main/kotlin/school/ai/AssistantPlugin.kt @@ -16,113 +16,73 @@ class AssistantPlugin : Plugin { override fun apply(project: Project) { project.run { - task("helloOllamaMistral") { + task("displayAIPrompt") { group = "school-ai" - description = "Display the ollama mistral chatgpt prompt request." - doFirst { - createOllamaChatModel(model = "mistral") - .run { generate(userMessage).let(::println) } - } + description = "Dislpay on console AI prompt assistant" + doFirst { userMessage.let(::println) } } - task("helloOllamaStreamMistral") { + task("displayOpenAIKey") { group = "school-ai" - description = "Display the ollama mistral chatgpt stream prompt request." - doFirst { - runBlocking { - createOllamaStreamingChatModel("mistral").run { - when (val answer = generateStreamingResponse(this, userMessage)) { - is Right -> - "Complete response received: \n${answer.value.content().text()}".run(::println) - - is Left -> - "Error during response generation: \n${answer.value}".run(::println) - } - } - } - } + description = "Display the open ai api keys stored in private.properties" + doFirst { "apiKey : ${project.apiKey}".let(::println) } } - - - - task("helloOllamaPhi") { + task("helloOpenAi") { group = "school-ai" - description = "Display the ollama phi3.5 chatgpt prompt request." + description = "Display the open ai chatgpt hello prompt request." doFirst { - createOllamaChatModel(model = "phi3.5:latest") - .run { generate(userMessage).let(::println) } + OpenAiChatModel + .withApiKey(apiKey) + .generate("Say 'Hello World'") + .run(::println) } } - task("helloOllamaStreamPhi") { - group = "school-ai" - description = "Display the ollama phi3.5 chatgpt stream prompt request." - doFirst { - runBlocking { - createOllamaStreamingChatModel("phi3.5:latest").run { - when (val answer = generateStreamingResponse(this, userMessage)) { - is Right -> - "Complete response received: \n${answer.value.content().text()}".run(::println) - is Left -> - "Error during response generation: \n${answer.value}".run(::println) - } - } + // Generic function for chat model tasks + fun createChatTask(taskName: String, model: String) { + task(taskName) { + group = "school-ai" + description = "Display the Ollama $model chatgpt prompt request." + doFirst { + createOllamaChatModel(model = model) + .run { generate(userMessage).let(::println) } } } } - task("helloOllamaSmollM") { - group = "school-ai" - description = "Display the ollama mistral chatgpt prompt request." - doFirst { - createOllamaChatModel() - .run { generate(userMessage).let(::println) } - } - } - - task("helloOllamaStreamSmollM") { - group = "school-ai" - description = "Display the ollama mistral chatgpt stream prompt request." - doFirst { - runBlocking { - createOllamaStreamingChatModel().run { - when (val answer = generateStreamingResponse(this, userMessage)) { - is Right -> - "Complete response received: \n${answer.value.content().text()}".run(::println) - - is Left -> - "Error during response generation: \n${answer.value}".run(::println) + // Generic function for streaming chat model tasks + fun createStreamingChatTask(taskName: String, model: String) { + task(taskName) { + group = "school-ai" + description = "Display the Ollama $model chatgpt stream prompt request." + doFirst { + runBlocking { + createOllamaStreamingChatModel(model).run { + when (val answer = generateStreamingResponse(this, userMessage)) { + is Right -> "Complete response received:\n${ + answer.value.content().text() + }".run(::println) + + is Left -> "Error during response generation:\n${answer.value}".run(::println) + } } } } } } - - task("displayAIPrompt") { - group = "school-ai" - description = "Dislpay on console AI prompt assistant" - doFirst { userMessage.let(::println) } - } - - task("displayOpenAIKey") { - group = "school-ai" - description = "Display the open ai api keys stored in private.properties" - doFirst { "apiKey : ${project.apiKey}".let(::println) } - } - - task("helloOpenAi") { - group = "school-ai" - description = "Display the open ai chatgpt hello prompt request." - doFirst { - OpenAiChatModel - .withApiKey(apiKey) - .generate("Say 'Hello World'") - .run(::println) - } + // Creating tasks for each model + mapOf( + "llama3.1:latest" to "Llama3", + "mistral" to "Mistral", + "phi3.5:latest" to "Phi", + "smollm:135m" to "SmollM", + ).forEach { model -> + createChatTask("helloOllama${model.value}", model.key) + createStreamingChatTask("helloOllamaStream${model.value}", model.key) } } } -} +} \ No newline at end of file