Compare commits

..

51 Commits

Author SHA1 Message Date
bce439921f Generate tags around context
All checks were successful
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Successful in 27m22s
2025-06-16 10:35:21 +10:00
2de2d0fe3a Merge pull request 'prompt enhancement' (#16) from prompt_fix into master
All checks were successful
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Successful in 11m44s
Reviewed-on: #16
2025-06-06 12:04:44 +10:00
cf795bbc35 prompt enhancement 2025-06-06 12:04:19 +10:00
a6ed20451a Merge pull request 'pipeline_creation' (#15) from pipeline_creation into master
All checks were successful
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Successful in 25m13s
Reviewed-on: #15
2025-06-05 09:22:50 +10:00
7fd32b3024 improve notification prompt 2025-06-05 09:22:28 +10:00
a88d233c6b remove tail and improve notification prompt 2025-06-05 09:22:19 +10:00
2abc39e3ac Merge pull request 'pipeline_creation' (#14) from pipeline_creation into master
All checks were successful
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Successful in 30m33s
Reviewed-on: #14
2025-06-05 08:43:23 +10:00
f430998137 typo 2025-06-05 08:42:45 +10:00
8dceb79d91 remove repo reference 2025-06-05 08:41:32 +10:00
6c5b0f778d remove trailing slash 2025-06-05 08:41:32 +10:00
37ed8fd0f9 fix git for pipeline" 2025-06-05 08:40:59 +10:00
0594ea54aa remove repo reference
Some checks failed
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Failing after 27m5s
2025-06-05 01:02:42 +10:00
60f7473297 remove trailing slash
Some checks failed
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Has been cancelled
2025-06-05 01:00:58 +10:00
ec69e8e4f7 Merge pull request 'do it right' (#13) from pipeline_creation into master
Some checks failed
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Failing after 8m58s
Reviewed-on: #13
2025-06-05 00:47:05 +10:00
62b1175aeb do it right 2025-06-05 00:46:45 +10:00
41f804a1eb Merge pull request 'pipeline_creation' (#12) from pipeline_creation into master
Some checks failed
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Has been cancelled
Reviewed-on: #12
2025-06-05 00:45:53 +10:00
f50d076164 cleanup 2025-06-05 00:45:39 +10:00
fc4f9c5053 dealing with pipeline weirdness 2025-06-05 00:44:57 +10:00
e3262cd366 Merge pull request 'weird trailing newline"' (#11) from pipeline_creation into master
Some checks failed
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Failing after 10m26s
Reviewed-on: #11
2025-06-05 00:12:04 +10:00
341f3d8623 weird trailing newline"
"
2025-06-05 00:11:44 +10:00
e2c29204fa Merge pull request 'pipeline_creation' (#10) from pipeline_creation into master
Some checks failed
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Has been cancelled
Reviewed-on: #10
2025-06-04 23:48:10 +10:00
f0e6a0cb52 load_dotenv work different? 2025-06-04 23:47:53 +10:00
7f0b0376d1 load_dotenv work different? 2025-06-04 23:47:05 +10:00
44b5ea6a68 Merge pull request 'load_dotenv work different?' (#9) from pipeline_creation into master
Some checks failed
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Failing after 9m17s
Reviewed-on: #9
2025-06-04 22:55:26 +10:00
a49457094d load_dotenv work different? 2025-06-04 22:54:09 +10:00
9296fda390 Merge pull request 'tail the .env so we can see it in pipelin' (#8) from pipeline_creation into master
Some checks failed
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Failing after 9m5s
Reviewed-on: #8
2025-06-04 22:44:14 +10:00
bb0d9090f3 tail the .env so we can see it in pipelin 2025-06-04 22:43:53 +10:00
703a2384e7 Merge pull request 'sigh stray U' (#7) from pipeline_creation into master
Some checks failed
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Failing after 9m2s
Reviewed-on: #7
2025-06-04 22:30:36 +10:00
4b3f00c325 Merge branch 'master' into pipeline_creation 2025-06-04 22:29:42 +10:00
38dfe404d1 sigh stray U 2025-06-04 22:29:12 +10:00
347ac63f86 Merge pull request 'helps to install virtualenv' (#6) from pipeline_creation into master
Some checks failed
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Failing after 8m59s
Reviewed-on: #6
2025-06-04 22:17:46 +10:00
506758f67d helps to install virtualenv 2025-06-04 22:17:11 +10:00
f0572ba9fb Merge pull request 'pipeline_creation' (#5) from pipeline_creation into master
Some checks failed
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Failing after 7m5s
Reviewed-on: #5
2025-06-04 22:08:28 +10:00
4686f3fae0 y in right place 2025-06-04 22:07:47 +10:00
ea1c8cfb13 add y to apt call 2025-06-04 22:07:15 +10:00
9ca7578d28 Merge pull request 'pipeline_creation' (#4) from pipeline_creation into master
Some checks failed
Create Blog Article if new notes exist / prepare_blog_drafts_and_push (push) Failing after 4m28s
Reviewed-on: #4
2025-06-04 22:02:00 +10:00
64b466c4ac load dotenv in main.py 2025-06-04 22:01:15 +10:00
49174de9ff correct pipeline titles 2025-06-04 21:59:33 +10:00
59f9f01c69 first cut at pipeline 2025-06-04 21:48:59 +10:00
a7eae4b09f Merge pull request 'matrix_notifications' (#3) from matrix_notifications into master
Reviewed-on: #3
2025-06-04 21:34:12 +10:00
c466b04a25 matrix notifications and config driven chroma 2025-06-04 21:32:51 +10:00
431e5c63aa first pass at docker run 2025-06-04 16:56:08 +10:00
6e117e3ce9 language cleanup for integration testing 2025-06-02 12:32:21 +10:00
9a9228bc07 Merge pull request 'repo_work_fix' (#2) from repo_work_fix into master
Reviewed-on: #2
2025-05-30 17:47:31 +10:00
2dd371408f trying for the hard fix 2025-05-30 17:25:13 +10:00
0005ad1fd3 hard reset for the repo work 2025-05-30 17:20:58 +10:00
446978704d further directory cleanup 2025-01-24 04:51:50 +00:00
f24bd5b361 cleanup directory 2025-01-24 04:44:23 +00:00
4d5c27cfaa clean up 2025-01-24 04:42:04 +00:00
d45f0be314 env set up for remote 2025-01-24 04:41:14 +00:00
e1a24aff20 get rid of think tags 2025-01-24 02:17:05 +00:00
7 changed files with 173 additions and 16 deletions

View File

@ -0,0 +1,56 @@
name: Create Blog Article if new notes exist
on:
schedule:
- cron: "15 3 * * *"
push:
branches:
- master
jobs:
prepare_blog_drafts_and_push:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install dependencies
shell: bash
run: |
apt update && apt upgrade -y
apt install rustc cargo python-is-python3 pip python3-venv python3-virtualenv libmagic-dev git -y
virtualenv .venv
source .venv/bin/activate
pip install --upgrade pip
pip install -r requirements.txt
git config --global user.name "Blog Creator"
git config --global user.email "ridgway.infrastructure@gmail.com"
git config --global push.autoSetupRemote true
- name: Create .env
shell: bash
run: |
echo "TRILIUM_HOST=${{ vars.TRILIUM_HOST }}" > .env
echo "TRILIUM_PORT='${{ vars.TRILIUM_PORT }}'" >> .env
echo "TRILIUM_PROTOCOL='${{ vars.TRILIUM_PROTOCOL }}'" >> .env
echo "TRILIUM_PASS='${{ secrets.TRILIUM_PASS }}'" >> .env
echo "TRILIUM_TOKEN='${{ secrets.TRILIUM_TOKEN }}'" >> .env
echo "OLLAMA_PROTOCOL='${{ vars.OLLAMA_PROTOCOL }}'" >> .env
echo "OLLAMA_HOST='${{ vars.OLLAMA_HOST }}'" >> .env
echo "OLLAMA_PORT='${{ vars.OLLAMA_PORT }}'" >> .env
echo "EMBEDDING_MODEL='${{ vars.EMBEDDING_MODEL }}'" >> .env
echo "EDITOR_MODEL='${{ vars.EDITOR_MODEL }}'" >> .env
export PURE='["${{ vars.CONTENT_CREATOR_MODELS_1 }}", "${{ vars.CONTENT_CREATOR_MODELS_2 }}", "${{ vars.CONTENT_CREATOR_MODELS_3 }}", "${{ vars.CONTENT_CREATOR_MODELS_4 }}"]'
echo "CONTENT_CREATOR_MODELS='$PURE'" >> .env
echo "GIT_PROTOCOL='${{ vars.GIT_PROTOCOL }}'" >> .env
echo "GIT_REMOTE='${{ vars.GIT_REMOTE }}'" >> .env
echo "GIT_USER='${{ vars.GIT_USER }}'" >> .env
echo "GIT_PASS='${{ secrets.GIT_PASS }}'" >> .env
echo "N8N_SECRET='${{ secrets.N8N_SECRET }}'" >> .env
echo "N8N_WEBHOOK_URL='${{ vars.N8N_WEBHOOK_URL }}'" >> .env
echo "CHROMA_HOST='${{ vars.CHROMA_HOST }}'" >> .env
echo "CHROMA_PORT='${{ vars.CHROMA_PORT }}'" >> .env
- name: Create Blogs
shell: bash
run: |
source .venv/bin/activate
python src/main.py

1
.gitignore vendored
View File

@ -7,3 +7,4 @@ __pycache__
pyproject.toml pyproject.toml
.ropeproject .ropeproject
generated_files/* generated_files/*
pyright*

View File

@ -4,3 +4,5 @@ gitpython
PyGithub PyGithub
chromadb chromadb
langchain-ollama langchain-ollama
PyJWT
dotenv

View File

@ -11,7 +11,13 @@ class OllamaGenerator:
self.inner_title = inner_title self.inner_title = inner_title
self.content = content self.content = content
self.response = None self.response = None
self.chroma = chromadb.HttpClient(host="172.18.0.2", port=8000) print("In Class")
print(os.environ["CONTENT_CREATOR_MODELS"])
try:
chroma_port = int(os.environ['CHROMA_PORT'])
except ValueError as e:
raise Exception(f"CHROMA_PORT is not an integer: {e}")
self.chroma = chromadb.HttpClient(host=os.environ['CHROMA_HOST'], port=chroma_port)
ollama_url = f"{os.environ["OLLAMA_PROTOCOL"]}://{os.environ["OLLAMA_HOST"]}:{os.environ["OLLAMA_PORT"]}" ollama_url = f"{os.environ["OLLAMA_PROTOCOL"]}://{os.environ["OLLAMA_HOST"]}:{os.environ["OLLAMA_PORT"]}"
self.ollama_client = Client(host=ollama_url) self.ollama_client = Client(host=ollama_url)
self.ollama_model = os.environ["EDITOR_MODEL"] self.ollama_model = os.environ["EDITOR_MODEL"]
@ -20,14 +26,14 @@ class OllamaGenerator:
self.llm = ChatOllama(model=self.ollama_model, temperature=0.6, top_p=0.5) #This is the level head in the room self.llm = ChatOllama(model=self.ollama_model, temperature=0.6, top_p=0.5) #This is the level head in the room
self.prompt_inject = f""" self.prompt_inject = f"""
You are a journalist, Software Developer and DevOps expert You are a journalist, Software Developer and DevOps expert
writing a 1000 word draft blog for other tech enthusiasts. writing a 3000 word draft blog article for other tech enthusiasts.
You like to use almost no code examples and prefer to talk You like to use almost no code examples and prefer to talk
in a light comedic tone. You are also Australian in a light comedic tone. You are also Australian
As this person write this blog as a markdown document. As this person write this blog as a markdown document.
The title for the blog is {self.inner_title}. The title for the blog is {self.inner_title}.
Do not output the title in the markdown. Do not output the title in the markdown.
The basis for the content of the blog is: The basis for the content of the blog is:
{self.content} <blog>{self.content}</blog>
""" """
def split_into_chunks(self, text, chunk_size=100): def split_into_chunks(self, text, chunk_size=100):
@ -116,14 +122,15 @@ class OllamaGenerator:
prompt_system = f""" prompt_system = f"""
You are an editor taking information from {len(self.agent_models)} Software You are an editor taking information from {len(self.agent_models)} Software
Developers and Data experts Developers and Data experts
writing a 3000 word blog for other tech enthusiasts. writing a 3000 word blog article. You like when they use almost no code examples.
You like when they use almost no code examples and the You are also Australian. The content may have light comedic elements,
voice is in a light comedic tone. You are also Australian you are more professional and will attempt to tone these down
As this person produce and an amalgamtion of this blog as a markdown document. As this person produce the final version of this blog as a markdown document
keeping in mind the context provided by the previous drafts.
The title for the blog is {self.inner_title}. The title for the blog is {self.inner_title}.
Do not output the title in the markdown. Avoid repeated sentences Do not output the title in the markdown. Avoid repeated sentences
The basis for the content of the blog is: The basis for the content of the blog is:
{self.content} <blog>{self.content}</blog>
""" """
try: try:
query_embed = self.ollama_client.embed(model=self.embed_model, input=prompt_system)['embeddings'] query_embed = self.ollama_client.embed(model=self.embed_model, input=prompt_system)['embeddings']
@ -132,7 +139,9 @@ class OllamaGenerator:
print("Showing pertinent info from drafts used in final edited edition") print("Showing pertinent info from drafts used in final edited edition")
pertinent_draft_info = '\n\n'.join(collection.query(query_embeddings=query_embed, n_results=100)['documents'][0]) pertinent_draft_info = '\n\n'.join(collection.query(query_embeddings=query_embed, n_results=100)['documents'][0])
#print(pertinent_draft_info) #print(pertinent_draft_info)
prompt_human = f"Generate the final document using this information from the drafts: {pertinent_draft_info} - ONLY OUTPUT THE MARKDOWN" prompt_human = f"""Generate the final, 3000 word, draft of the blog using this information from the drafts: <context>{pertinent_draft_info}</context>
- Only output in markdown, do not wrap in markdown tags, Only provide the draft not a commentary on the drafts in the context
"""
print("Generating final document") print("Generating final document")
messages = [("system", prompt_system), ("human", prompt_human),] messages = [("system", prompt_system), ("human", prompt_human),]
self.response = self.llm.invoke(messages).text() self.response = self.llm.invoke(messages).text()
@ -154,9 +163,7 @@ class OllamaGenerator:
with open(filename, "w") as f: with open(filename, "w") as f:
f.write(self.generate_markdown()) f.write(self.generate_markdown())
def generate_commit_message(self): def generate_system_message(self, prompt_system, prompt_human):
prompt_system = "You are a blog creator commiting a piece of content to a central git repo"
prompt_human = f"Generate a 5 word git commit message describing {self.response}"
messages = [("system", prompt_system), ("human", prompt_human),] messages = [("system", prompt_system), ("human", prompt_human),]
commit_message = self.llm.invoke(messages).text() ai_message = self.llm.invoke(messages).text()
return commit_message return ai_message

View File

@ -1,7 +1,13 @@
import ai_generators.ollama_md_generator as omg import ai_generators.ollama_md_generator as omg
import trilium.notes as tn import trilium.notes as tn
import repo_management.repo_manager as git_repo import repo_management.repo_manager as git_repo
from notifications.n8n import N8NWebhookJwt
import string,os import string,os
from datetime import datetime
from dotenv import load_dotenv
load_dotenv()
print(os.environ["CONTENT_CREATOR_MODELS"])
tril = tn.TrilumNotes() tril = tn.TrilumNotes()
@ -24,11 +30,51 @@ for note in tril_notes:
ai_gen = omg.OllamaGenerator(os_friendly_title, ai_gen = omg.OllamaGenerator(os_friendly_title,
tril_notes[note]['content'], tril_notes[note]['content'],
tril_notes[note]['title']) tril_notes[note]['title'])
blog_path = f"/blog_creator/generated_files/{os_friendly_title}.md" blog_path = f"generated_files/{os_friendly_title}.md"
ai_gen.save_to_file(blog_path) ai_gen.save_to_file(blog_path)
# Generate commit messages and push to repo # Generate commit messages and push to repo
commit_message = ai_gen.generate_commit_message() print("Generating Commit Message")
git_sytem_prompt = "You are a blog creator commiting a piece of content to a central git repo"
git_human_prompt = f"Generate a 5 word git commit message describing {ai_gen.response}. ONLY OUTPUT THE RESPONSE"
commit_message = ai_gen.generate_system_message(git_sytem_prompt, git_human_prompt)
git_user = os.environ["GIT_USER"] git_user = os.environ["GIT_USER"]
git_pass = os.environ["GIT_PASS"] git_pass = os.environ["GIT_PASS"]
repo_manager = git_repo.GitRepository("blog/", git_user, git_pass) repo_manager = git_repo.GitRepository("blog/", git_user, git_pass)
print("Pushing to Repo")
repo_manager.create_copy_commit_push(blog_path, os_friendly_title, commit_message) repo_manager.create_copy_commit_push(blog_path, os_friendly_title, commit_message)
# Generate notification for Matrix
print("Generating Notification Message")
git_branch_url = f'https://git.aridgwayweb.com/armistace/blog/src/branch/{os_friendly_title}/src/content/{os_friendly_title}.md'
n8n_system_prompt = f"You are a blog creator notifiying the final editor of the final creation of blog available at {git_branch_url}"
n8n_prompt_human = f"""
Generate an informal 100 word
summary describing {ai_gen.response}.
Don't address it or use names. ONLY OUTPUT THE RESPONSE.
ONLY OUTPUT IN PLAINTEXT STRIP ALL MARKDOWN
"""
notification_message = ai_gen.generate_system_message(n8n_system_prompt, n8n_prompt_human)
secret_key = os.environ['N8N_SECRET']
webhook_url = os.environ['N8N_WEBHOOK_URL']
notification_string = f"""
<h2>{tril_notes[note]['title']}</h2>
<h3>Summary</h3>
<p>{notification_message}</p>
<h3>Branch</h3>
<p>{os_friendly_title}</p>
<p><a href="{git_branch_url}">Link to Branch</a></p>
"""
payload = {
"message": f"{notification_string}",
"timestamp": datetime.now().isoformat()
}
webhook_client = N8NWebhookJwt(secret_key, webhook_url)
print("Notifying")
n8n_result = webhook_client.send_webhook(payload)
print(f"N8N response: {n8n_result['status']}")

View File

45
src/notifications/n8n.py Normal file
View File

@ -0,0 +1,45 @@
from datetime import datetime, timedelta
import jwt
import requests
from typing import Dict, Optional
class N8NWebhookJwt:
def __init__(self, secret_key: str, webhook_url: str):
self.secret_key = secret_key
self.webhook_url = webhook_url
self.token_expiration = datetime.now() + timedelta(hours=1)
def _generate_jwt_token(self, payload: Dict) -> str:
"""Generate JWT token with the given payload."""
# Include expiration time (optional)
payload["exp"] = self.token_expiration.timestamp()
encoded_jwt = jwt.encode(
payload,
self.secret_key,
algorithm="HS256",
)
return encoded_jwt #jwt.decode(encoded_jwt, self.secret_key, algorithms=['HS256'])
def send_webhook(self, payload: Dict) -> Dict:
"""Send a webhook request with JWT authentication."""
# Generate JWT token
token = self._generate_jwt_token(payload)
# Set headers with JWT token
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json"
}
# Send POST request
response = requests.post(
self.webhook_url,
json=payload,
headers=headers
)
# Handle response
if response.status_code == 200:
return {"status": "success", "response": response.json()}
else:
return {"status": "error", "response": response.status_code, "message": response.text}