integrating agentic chroma
This commit is contained in:
parent
c3c4445d33
commit
1630df04e6
@ -1,5 +1,4 @@
|
|||||||
networks:
|
networks:
|
||||||
<<<<<<< HEAD
|
|
||||||
net:
|
net:
|
||||||
driver: bridge
|
driver: bridge
|
||||||
|
|
||||||
@ -43,54 +42,3 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
chroma-data:
|
chroma-data:
|
||||||
driver: local
|
driver: local
|
||||||
=======
|
|
||||||
net:
|
|
||||||
driver: bridge
|
|
||||||
|
|
||||||
services:
|
|
||||||
blog_creator:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
container_name: blog_creator
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
volumes:
|
|
||||||
- ./generated_files/:/blog_creator/generated_files
|
|
||||||
|
|
||||||
chroma:
|
|
||||||
image: chromadb/chroma
|
|
||||||
volumes:
|
|
||||||
# Be aware that indexed data are located in "/chroma/chroma/"
|
|
||||||
# Default configuration for persist_directory in chromadb/config.py
|
|
||||||
# Read more about deployments: https://docs.trychroma.com/deployment
|
|
||||||
- chroma-data:/chroma/chroma
|
|
||||||
command: "--workers 1 --host 0.0.0.0 --port 8000 --proxy-headers --log-config chromadb/log_config.yml --timeout-keep-alive 30"
|
|
||||||
environment:
|
|
||||||
- IS_PERSISTENT=TRUE
|
|
||||||
- CHROMA_SERVER_AUTHN_PROVIDER=${CHROMA_SERVER_AUTHN_PROVIDER}
|
|
||||||
- CHROMA_SERVER_AUTHN_CREDENTIALS_FILE=${CHROMA_SERVER_AUTHN_CREDENTIALS_FILE}
|
|
||||||
- CHROMA_SERVER_AUTHN_CREDENTIALS=${CHROMA_SERVER_AUTHN_CREDENTIALS}
|
|
||||||
- CHROMA_AUTH_TOKEN_TRANSPORT_HEADER=${CHROMA_AUTH_TOKEN_TRANSPORT_HEADER}
|
|
||||||
- PERSIST_DIRECTORY=${PERSIST_DIRECTORY:-/chroma/chroma}
|
|
||||||
- CHROMA_OTEL_EXPORTER_ENDPOINT=${CHROMA_OTEL_EXPORTER_ENDPOINT}
|
|
||||||
- CHROMA_OTEL_EXPORTER_HEADERS=${CHROMA_OTEL_EXPORTER_HEADERS}
|
|
||||||
- CHROMA_OTEL_SERVICE_NAME=${CHROMA_OTEL_SERVICE_NAME}
|
|
||||||
- CHROMA_OTEL_GRANULARITY=${CHROMA_OTEL_GRANULARITY}
|
|
||||||
- CHROMA_SERVER_NOFILE=${CHROMA_SERVER_NOFILE}
|
|
||||||
restart: unless-stopped # possible values are: "no", always", "on-failure", "unless-stopped"
|
|
||||||
ports:
|
|
||||||
- "8001:8000"
|
|
||||||
healthcheck:
|
|
||||||
# Adjust below to match your container port
|
|
||||||
test: [ "CMD", "curl", "-f", "http://localhost:8000/api/v2/heartbeat" ]
|
|
||||||
interval: 30s
|
|
||||||
timeout: 10s
|
|
||||||
retries: 3
|
|
||||||
networks:
|
|
||||||
- net
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
chroma-data:
|
|
||||||
driver: local
|
|
||||||
>>>>>>> d35a456 (set up chroma)
|
|
||||||
|
@ -126,7 +126,6 @@ class OllamaGenerator:
|
|||||||
{self.content}
|
{self.content}
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
<<<<<<< HEAD
|
|
||||||
query_embed = self.ollama_client.embed(model=self.embed_model, input=prompt_system)['embeddings']
|
query_embed = self.ollama_client.embed(model=self.embed_model, input=prompt_system)['embeddings']
|
||||||
collection = self.load_to_vector_db()
|
collection = self.load_to_vector_db()
|
||||||
collection_query = collection.query(query_embeddings=query_embed, n_results=100)
|
collection_query = collection.query(query_embeddings=query_embed, n_results=100)
|
||||||
@ -147,24 +146,6 @@ class OllamaGenerator:
|
|||||||
#print ("Markdown Generated")
|
#print ("Markdown Generated")
|
||||||
#print (self.response)
|
#print (self.response)
|
||||||
return self.response#['message']['content']
|
return self.response#['message']['content']
|
||||||
=======
|
|
||||||
self.response = self.ollama_client.chat(model=self.ollama_model,
|
|
||||||
messages=[
|
|
||||||
{
|
|
||||||
'role': 'user',
|
|
||||||
'content': f'{prompt}',
|
|
||||||
},
|
|
||||||
])
|
|
||||||
|
|
||||||
# the deepseek model returns <think> this removes those tabs from the output
|
|
||||||
<<<<<<< HEAD
|
|
||||||
return re.sub(r"<think|.\n\r+?|([^;]*)\/think>",'',self.response['message']['content'])
|
|
||||||
>>>>>>> e1a24af (get rid of think tags)
|
|
||||||
=======
|
|
||||||
# return re.sub(r"<think|.\n\r+?|([^;]*)\/think>",'',self.response['message']['content'])
|
|
||||||
return self.response['message']['content']
|
|
||||||
|
|
||||||
>>>>>>> d45f0be (env set up for remote)
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise Exception(f"Failed to generate markdown: {e}")
|
raise Exception(f"Failed to generate markdown: {e}")
|
||||||
@ -173,12 +154,9 @@ class OllamaGenerator:
|
|||||||
with open(filename, "w") as f:
|
with open(filename, "w") as f:
|
||||||
f.write(self.generate_markdown())
|
f.write(self.generate_markdown())
|
||||||
|
|
||||||
<<<<<<< HEAD
|
|
||||||
def generate_commit_message(self):
|
def generate_commit_message(self):
|
||||||
prompt_system = "You are a blog creator commiting a piece of content to a central git repo"
|
prompt_system = "You are a blog creator commiting a piece of content to a central git repo"
|
||||||
prompt_human = f"Generate a 5 word git commit message describing {self.response}"
|
prompt_human = f"Generate a 5 word git commit message describing {self.response}"
|
||||||
messages = [("system", prompt_system), ("human", prompt_human),]
|
messages = [("system", prompt_system), ("human", prompt_human),]
|
||||||
commit_message = self.llm.invoke(messages).text()
|
commit_message = self.llm.invoke(messages).text()
|
||||||
return commit_message
|
return commit_message
|
||||||
=======
|
|
||||||
>>>>>>> e1a24af (get rid of think tags)
|
|
||||||
|
10
src/main.py
10
src/main.py
@ -19,15 +19,8 @@ for note in tril_notes:
|
|||||||
print(tril_notes[note]['title'])
|
print(tril_notes[note]['title'])
|
||||||
# print(tril_notes[note]['content'])
|
# print(tril_notes[note]['content'])
|
||||||
print("Generating Document")
|
print("Generating Document")
|
||||||
<<<<<<< HEAD
|
|
||||||
|
|
||||||
=======
|
|
||||||
ai_gen = omg.OllamaGenerator(tril_notes[note]['title'],
|
|
||||||
tril_notes[note]['content'],
|
|
||||||
"openthinker:7b")
|
|
||||||
>>>>>>> d35a456 (set up chroma)
|
|
||||||
os_friendly_title = convert_to_lowercase_with_underscores(tril_notes[note]['title'])
|
os_friendly_title = convert_to_lowercase_with_underscores(tril_notes[note]['title'])
|
||||||
<<<<<<< HEAD
|
|
||||||
ai_gen = omg.OllamaGenerator(os_friendly_title,
|
ai_gen = omg.OllamaGenerator(os_friendly_title,
|
||||||
tril_notes[note]['content'],
|
tril_notes[note]['content'],
|
||||||
tril_notes[note]['title'])
|
tril_notes[note]['title'])
|
||||||
@ -39,6 +32,3 @@ for note in tril_notes:
|
|||||||
git_pass = os.environ["GIT_PASS"]
|
git_pass = os.environ["GIT_PASS"]
|
||||||
repo_manager = git_repo.GitRepository("blog/", git_user, git_pass)
|
repo_manager = git_repo.GitRepository("blog/", git_user, git_pass)
|
||||||
repo_manager.create_copy_commit_push(blog_path, os_friendly_title, commit_message)
|
repo_manager.create_copy_commit_push(blog_path, os_friendly_title, commit_message)
|
||||||
=======
|
|
||||||
ai_gen.save_to_file(f"./generated_files/{os_friendly_title}.md")
|
|
||||||
>>>>>>> d45f0be (env set up for remote)
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user