From 733241554ff8491d8ee51e8c4f9426361ea6aab2 Mon Sep 17 00:00:00 2001 From: Andrew Ridgway Date: Wed, 17 Sep 2025 10:55:17 +1000 Subject: [PATCH] flip some prompting around --- src/ai_generators/ollama_md_generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ai_generators/ollama_md_generator.py b/src/ai_generators/ollama_md_generator.py index 18a5a8d..5c5dd0b 100644 --- a/src/ai_generators/ollama_md_generator.py +++ b/src/ai_generators/ollama_md_generator.py @@ -134,7 +134,7 @@ class OllamaGenerator: {self.content} """ try: - query_embed = self.ollama_client.embed(model=self.embed_model, input=prompt_system)['embeddings'] + query_embed = self.ollama_client.embed(model=self.embed_model, input=prompt_human)['embeddings'] collection = self.load_to_vector_db() collection_query = collection.query(query_embeddings=query_embed, n_results=100) print("Showing pertinent info from drafts used in final edited edition")