From bae602e704a69599adc505868f099324219e5c23 Mon Sep 17 00:00:00 2001 From: Rahmani mohamed rami <123117552+Ramyrahmeni@users.noreply.github.com> Date: Fri, 7 Jun 2024 20:03:04 +0100 Subject: [PATCH] adding the model --- app.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/app.py b/app.py index 7862cf8..e4f9e69 100644 --- a/app.py +++ b/app.py @@ -194,7 +194,7 @@ def ask(query, model, embedding_model, embeddings, pages_and_chunks, tokenizer, print(f"Context items: {context_items}") # Format the prompt with context items - print("Formatting the prompt") + '''print("Formatting the prompt") prompt = prompt_formatter(query=query, context_items=context_items, tokenizer=tokenizer) print(f"Prompt: {prompt}") @@ -223,7 +223,7 @@ def ask(query, model, embedding_model, embeddings, pages_and_chunks, tokenizer, return output_text print("Returning answer with context items") - return output_text, context_items + return output_text, context_items''' with st.sidebar: @@ -295,12 +295,12 @@ def main(): ) print(model) tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-4k-instruct",token='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') - answer, context_items =ask(query,model,embedding_model,embeddings,pages_and_chunks,tokenizer, + ask(query,model,embedding_model,embeddings,pages_and_chunks,tokenizer, temperature=0.7, max_new_tokens=512, format_answer_text=True, return_answer_only=True) - st.text(answer) + #st.text(answer) if __name__ == "__main__": main()