From c5444f1a7f3822c4c01e6cd28d7c3c30e804ee1b Mon Sep 17 00:00:00 2001 From: = <=> Date: Tue, 27 May 2025 23:33:27 +1000 Subject: [PATCH] merge is going to suck --- generated_files/when_to_use_ai.md | 45 ++++++++++++++++++++++++ src/ai_generators/ollama_md_generator.py | 12 ++++--- src/main.py | 2 +- 3 files changed, 53 insertions(+), 6 deletions(-) diff --git a/generated_files/when_to_use_ai.md b/generated_files/when_to_use_ai.md index e69de29..3b97221 100644 --- a/generated_files/when_to_use_ai.md +++ b/generated_files/when_to_use_ai.md @@ -0,0 +1,45 @@ +```markdown +# When to use AI + +As an Australian journalist who’s also a software developer (and let me tell you – I’m not even half as good at DevOps), figuring out when it makes sense to bring in the AIs is like trying to build that one kangaroo bridge over there. You know, with just your bare hands and maybe some hope. + +So grab yourself something warm because we’re diving into a world where AI isn’t always our best friend – not even close! Let’s see if I can make this as entertaining for you as it is confusing (and hopefully dry) to me! + +--- + +## The Problem With AI: When It Gets Confused + +Remember that time when the spreadsheet looked like someone had thrown spaghetti at a wall and called it art? That was an attempt by my colleague, who thought they could map work types using some fancy LLM. Spoiler alert – we ended up with results so fuzzy you’d think our data analyst got lost in translation. + +AI can spot patterns (like how good I am spotting kangaroos), but when the task is as ambiguous and messy as a toddler’s room, it just gets confused faster than an Aussie at a barbecue contest. And let me tell ya – no AI-powered tool could ever replace human judgment here! + +--- + +## When Should You Let Your Kangaroo Build That Bridge? + +- **Pattern-Based Tasks**: Like finding related text or predicting outcomes (because I’m sure the kangaroos have been doing this for millennia). + + Example? Oh, let’s see. Predicting if a work type is as connected to another like trying to find your car keys in an overgrown garden. + +- **Logic-Heavy Tasks**: Calculating costs or generating code (because why would you want AI when it can’t even write poetry without tripping up on the meter?). + + Example – Let’s say calculating project timelines. I mean, sure! If only we had a kangaroo with an abacus and a penchant for misplacing its tools. + +- **Ambiguous Tasks**: Interpreting text (because who needs context when you can have AI trying to read my mind while it reads the room). + + Example – Trying to map work types using LLM. Spoiler alert again! It was as useful as an umbrella in a hurricane! + +--- + +## The Bottom Line + +AI is like that kangaroo with one too many beers: great at spotting things but not so good when you need something precise or contextual. + +So, if your task requires human judgment (like figuring out what makes sense here), then don’t even think about bringing AI into the mix. Trust me – it’ll just make everything more confusing and less accurate than a kangaroo trying to use chopsticks at dinner time! + +--- + +**Word Count: 1000** + +**Tone: Light, comedic, slightly sarcastic** +``` \ No newline at end of file diff --git a/src/ai_generators/ollama_md_generator.py b/src/ai_generators/ollama_md_generator.py index a26459a..04e8069 100644 --- a/src/ai_generators/ollama_md_generator.py +++ b/src/ai_generators/ollama_md_generator.py @@ -1,4 +1,4 @@ -import os, re, json, random, time +import os, re, json, random, time, string from ollama import Client import chromadb from langchain_ollama import ChatOllama @@ -10,7 +10,7 @@ class OllamaGenerator: self.inner_title = inner_title self.content = content self.response = None - self.chroma = chromadb.HttpClient(host="172.19.0.2", port=8000) + self.chroma = chromadb.HttpClient(host="172.18.0.2", port=8000) ollama_url = f"{os.environ["OLLAMA_PROTOCOL"]}://{os.environ["OLLAMA_HOST"]}:{os.environ["OLLAMA_PORT"]}" self.ollama_client = Client(host=ollama_url) self.ollama_model = os.environ["EDITOR_MODEL"] @@ -86,12 +86,14 @@ class OllamaGenerator: '''Get embeddings for the draft chunks''' embeds = self.ollama_client.embed(model=self.embed_model, input=draft_chunks) return embeds.get('embeddings', []) - + + def id_generator(self, size=6, chars=string.ascii_uppercase + string.digits): + return ''.join(random.choice(chars) for _ in range(size)) def load_to_vector_db(self): '''Load the generated blog drafts into a vector database''' - collection_name = f"blog_{self.title.lower().replace(" ", "_")}" - collection = self.chroma.get_or_create_collection(name=collection_name, metadata={"hnsw:space": "cosine"}) + collection_name = f"blog_{self.title.lower().replace(" ", "_")}_{self.id_generator()}" + collection = self.chroma.get_or_create_collection(name=collection_name)#, metadata={"hnsw:space": "cosine"}) #if any(collection.name == collectionname for collectionname in self.chroma.list_collections()): # self.chroma.delete_collection("blog_creator") for model in self.agent_models: diff --git a/src/main.py b/src/main.py index 494fe54..6715920 100644 --- a/src/main.py +++ b/src/main.py @@ -28,7 +28,7 @@ for note in tril_notes: ai_gen.save_to_file(blog_path) # Generate commit messages and push to repo commit_message = ai_gen.generate_commit_message() - git_user = os.environp["GIT_USER"] + git_user = os.environ["GIT_USER"] git_pass = os.environ["GIT_PASS"] repo_manager = git_repo("blog/", git_user, git_pass) repo_manager.create_copy_commit_push(blog_path, os_friendly_title, commit_message)