app factory and more
All checks were successful
Gitea Docker Redeploy / Redploy-App-on-self-via-SSH (push) Successful in 3m9s
All checks were successful
Gitea Docker Redeploy / Redploy-App-on-self-via-SSH (push) Successful in 3m9s
This commit is contained in:
parent
691019d743
commit
e9c1d1815f
6
.gitignore
vendored
6
.gitignore
vendored
@ -1,6 +1,10 @@
|
|||||||
backend/__pycache__/
|
backend/__pycache__/
|
||||||
ollama
|
ollama
|
||||||
deployment/ollama
|
|
||||||
*.env
|
*.env
|
||||||
deployment/*.env
|
deployment/*.env
|
||||||
|
deployment/ollama
|
||||||
|
# neo4j
|
||||||
|
deployment/data
|
||||||
|
deployment/logs
|
||||||
|
|
||||||
|
|
||||||
|
952
backend/app.py
952
backend/app.py
File diff suppressed because it is too large
Load Diff
@ -87,26 +87,11 @@ def train_text(bot_id, text):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
#TODO add history
|
|
||||||
|
|
||||||
def ask_bot(question, bot_id):
|
def ask_bot(question, bot_id):
|
||||||
bot = Chatbot.get(id=bot_id)
|
|
||||||
llm = Ollama(
|
|
||||||
model=bot.llm_model,
|
|
||||||
base_url=ollama_url
|
|
||||||
)
|
|
||||||
query = bot.system_prompt + " " + question
|
|
||||||
for chunk in llm.stream(query):
|
|
||||||
yield chunk
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#connections.get_connection()
|
|
||||||
#if es.indices.exists(index="index"):
|
|
||||||
|
|
||||||
def ask_bot2(question, bot_id):
|
|
||||||
"""
|
"""
|
||||||
Asks a chatbot
|
Asks a chatbot using RAG resources
|
||||||
"""
|
"""
|
||||||
|
|
||||||
bot = Chatbot.get(id=bot_id)
|
bot = Chatbot.get(id=bot_id)
|
||||||
@ -159,7 +144,6 @@ def ask_bot2(question, bot_id):
|
|||||||
xs.append([score, dict(doc)])
|
xs.append([score, dict(doc)])
|
||||||
return xs
|
return xs
|
||||||
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"answer_generator": gen_func,
|
"answer_generator": gen_func,
|
||||||
"get_score_docs": get_score_docs
|
"get_score_docs": get_score_docs
|
||||||
|
38
backend/lib/knowledge.py
Normal file
38
backend/lib/knowledge.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
from neo4j import GraphDatabase
|
||||||
|
|
||||||
|
"""
|
||||||
|
pwd = "neo4j2"
|
||||||
|
proto = "bolt"
|
||||||
|
host = "192.168.99.101"
|
||||||
|
|
||||||
|
driver = GraphDatabase.driver("%s://%s:7687" % (proto, host), auth=("neo4j", pwd), encrypted=False)
|
||||||
|
|
||||||
|
def add_friend(tx, name, friend_name):
|
||||||
|
tx.run("MERGE (a:Person {name: $name}) "
|
||||||
|
"MERGE (a)-[:KNOWS]->(friend:Person {name: $friend_name})",
|
||||||
|
name=name, friend_name=friend_name)
|
||||||
|
|
||||||
|
def print_friends(tx, name):
|
||||||
|
for record in tx.run("MATCH (a:Person)-[:KNOWS]->(friend) WHERE a.name = $name "
|
||||||
|
"RETURN friend.name ORDER BY friend.name", name=name):
|
||||||
|
print(record["friend.name"])
|
||||||
|
|
||||||
|
with driver.session() as session:
|
||||||
|
session.write_transaction(add_friend, "Arthur", "Guinevere")
|
||||||
|
session.write_transaction(add_friend, "Arthur", "Lancelot")
|
||||||
|
session.write_transaction(add_friend, "Arthur", "Merlin")
|
||||||
|
session.read_transaction(print_friends, "Arthur")
|
||||||
|
|
||||||
|
driver.close()
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
68
backend/lib/logging.py
Normal file
68
backend/lib/logging.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from datetime import datetime, date
|
||||||
|
from lib.models import LogEntry
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class ElasticsearchLogHandler(logging.Handler):
|
||||||
|
|
||||||
|
def __init__(self, level):
|
||||||
|
logging.Handler.__init__(self=self)
|
||||||
|
#super().__init__(self=self)
|
||||||
|
self.setLevel(level)
|
||||||
|
|
||||||
|
def emit(self, record):
|
||||||
|
|
||||||
|
#print(str(record.__dict__), flush=True)
|
||||||
|
|
||||||
|
#{'name': 'werkzeug',
|
||||||
|
# 'msg': '192.168.64.1 - - [07/Sep/2024 11:43:23] "%s" %s %s',
|
||||||
|
# 'args': ('GET /socket.io/?EIO=4&transport=websocket&sid=MtyTmZQs5IA6DnvhAAAA HTTP/1.1', '200', '-'),
|
||||||
|
# 'levelname': 'INFO',
|
||||||
|
# 'levelno': 20,
|
||||||
|
# 'pathname': '/usr/local/lib/python3.12/dist-packages/werkzeug/_internal.py',
|
||||||
|
# 'filename': '_internal.py',
|
||||||
|
# 'module': '_internal',
|
||||||
|
# 'exc_info': None,
|
||||||
|
# 'exc_text': None,
|
||||||
|
# 'stack_info': None,
|
||||||
|
# 'lineno': 97,
|
||||||
|
# 'funcName': '_log',
|
||||||
|
# 'created': 1725709403.1972203,
|
||||||
|
# 'msecs': 197.0,
|
||||||
|
# 'relativeCreated': 37105.026721954346,
|
||||||
|
# 'thread': 133472930760384,
|
||||||
|
# 'threadName': 'Thread-15 (process_request_thread)',
|
||||||
|
# 'processName': 'MainProcess',
|
||||||
|
# 'process': 26,
|
||||||
|
# 'taskName': None}
|
||||||
|
|
||||||
|
|
||||||
|
entry = LogEntry(
|
||||||
|
message = record.msg,
|
||||||
|
level = record.levelname, #record.levelno,
|
||||||
|
creation_time = datetime.now(),
|
||||||
|
|
||||||
|
name = record.name,
|
||||||
|
pathname = record.pathname,
|
||||||
|
filename = record.filename,
|
||||||
|
module = record.module,
|
||||||
|
lineno = record.lineno,
|
||||||
|
funcName = record.funcName,
|
||||||
|
threadName = record.threadName,
|
||||||
|
processName = record.processName
|
||||||
|
)
|
||||||
|
entry.save()
|
||||||
|
|
||||||
|
|
||||||
|
def get_log_level(default=logging.WARN):
|
||||||
|
LOG_LEVEL = os.getenv("LOG_LEVEL")
|
||||||
|
if LOG_LEVEL:
|
||||||
|
return eval("logging." + LOG_LEVEL)
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
from elasticsearch_dsl import Document, InnerDoc, Nested, Date, Integer, Keyword, Float, Long, Text, connections, Object, Boolean
|
from elasticsearch_dsl import Document, InnerDoc, Nested, Date, Integer, Keyword, Float, Long, Text, connections, Object, Boolean
|
||||||
|
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
creation_date = Date()
|
creation_date = Date()
|
||||||
email = Keyword()
|
email = Keyword()
|
||||||
@ -29,7 +28,6 @@ class User(Document):
|
|||||||
return super(User, self).save(**kwargs)
|
return super(User, self).save(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Chatbot(Document):
|
class Chatbot(Document):
|
||||||
creation_date = Date()
|
creation_date = Date()
|
||||||
changed_date = Date()
|
changed_date = Date()
|
||||||
@ -63,11 +61,6 @@ class Chatbot(Document):
|
|||||||
return super(Chatbot, self).save(**kwargs)
|
return super(Chatbot, self).save(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Text(Document):
|
class Text(Document):
|
||||||
creation_date = Date()
|
creation_date = Date()
|
||||||
creator_id = Keyword()
|
creator_id = Keyword()
|
||||||
@ -84,62 +77,101 @@ class Text(Document):
|
|||||||
return super(Text, self).save(**kwargs)
|
return super(Text, self).save(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class Question(Document):
|
||||||
#======= Query Log ===========
|
question = Text(index=False, required=True)
|
||||||
|
md5 = Keyword()
|
||||||
|
|
||||||
class Sources(InnerDoc):
|
|
||||||
score = Float()
|
|
||||||
#sourceFileId = Text()
|
|
||||||
sourceType = Text()
|
|
||||||
tags = Text()
|
|
||||||
|
|
||||||
#new fields
|
|
||||||
sourceFileId = Keyword()
|
|
||||||
filename = Keyword()
|
|
||||||
url = Keyword()
|
|
||||||
txt_id = Keyword()
|
|
||||||
page = Integer()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class QueryLog(Document):
|
|
||||||
answer = Text()
|
|
||||||
question = Text()
|
|
||||||
|
|
||||||
chatbotid = Keyword()
|
|
||||||
durasecs = Float()
|
|
||||||
#inCt = Float()
|
|
||||||
inToks = Long()
|
|
||||||
llm = Text()
|
|
||||||
#outCt = Float()
|
|
||||||
outToks = Long()
|
|
||||||
|
|
||||||
#queryid = Keyword()
|
|
||||||
#rating = Long()
|
|
||||||
#reason = Text()
|
|
||||||
#reasontags = Text()
|
|
||||||
session = Keyword()
|
|
||||||
|
|
||||||
sources = Object(Sources)
|
|
||||||
temperature = Float()
|
|
||||||
#totalCt = Float()
|
|
||||||
|
|
||||||
timest = Date() #timestamp
|
|
||||||
date = Date() #iso date
|
|
||||||
|
|
||||||
class Index:
|
class Index:
|
||||||
name = 'query_log'
|
name = 'question'
|
||||||
settings = {
|
settings = {
|
||||||
"number_of_shards": 1,
|
"number_of_shards": 1,
|
||||||
}
|
}
|
||||||
|
|
||||||
def save(self, ** kwargs):
|
def save(self, ** kwargs):
|
||||||
return super(QueryLog, self).save(**kwargs)
|
return super(Question, self).save(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class Answer(Document):
|
||||||
|
question_id = Keyword()
|
||||||
|
answer = Text(index=False, required=True)
|
||||||
|
md5 = Keyword()
|
||||||
|
|
||||||
|
class Index:
|
||||||
|
name = 'answer'
|
||||||
|
settings = {
|
||||||
|
"number_of_shards": 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
def save(self, ** kwargs):
|
||||||
|
return super(Answer, self).save(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class LogEntry(Document):
|
||||||
|
message = Text(index=False, required=True)
|
||||||
|
level = Keyword() #Integer(required=True)
|
||||||
|
creation_time = Date()
|
||||||
|
|
||||||
|
|
||||||
|
name = Keyword()
|
||||||
|
|
||||||
|
# 'args': ('GET /socket.io/?EIO=4&transport=websocket&sid=MtyTmZQs5IA6DnvhAAAA HTTP/1.1', '200', '-'),
|
||||||
|
|
||||||
|
pathname = Keyword()
|
||||||
|
# 'pathname': '/usr/local/lib/python3.12/dist-packages/werkzeug/_internal.py',
|
||||||
|
|
||||||
|
filename = Keyword()
|
||||||
|
# 'filename': '_internal.py',
|
||||||
|
|
||||||
|
module = Keyword()
|
||||||
|
# 'module': '_internal',
|
||||||
|
|
||||||
|
lineno = Integer(required=True)
|
||||||
|
# 'lineno': 97,
|
||||||
|
|
||||||
|
funcName = Keyword()
|
||||||
|
# 'funcName': '_log',
|
||||||
|
|
||||||
|
|
||||||
|
# 'created': 1725709403.1972203,
|
||||||
|
# 'msecs': 197.0,
|
||||||
|
|
||||||
|
threadName = Keyword()
|
||||||
|
# 'threadName': 'Thread-15 (process_request_thread)',
|
||||||
|
|
||||||
|
processName = Keyword()
|
||||||
|
# 'processName': 'MainProcess',
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Index:
|
||||||
|
name = 'logentry'
|
||||||
|
settings = {
|
||||||
|
"number_of_shards": 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
def save(self, ** kwargs):
|
||||||
|
return super(LogEntry, self).save(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
#======= Query Log ===========
|
||||||
|
|
||||||
|
|
||||||
|
#class Sources(InnerDoc):
|
||||||
|
#score = Float()
|
||||||
|
#tags = Text()
|
||||||
|
#filename = Keyword()
|
||||||
|
#page = Integer()
|
||||||
|
|
||||||
|
|
||||||
|
#----------------------------------------------
|
||||||
|
|
||||||
def init_indicies():
|
def init_indicies():
|
||||||
# create the mappings in elasticsearch
|
"""
|
||||||
for Index in [QueryLog, Chatbot, User, Text]:
|
Create the mappings in elasticsearch
|
||||||
|
"""
|
||||||
|
for Index in [LogEntry, Question, Answer, Chatbot, User, Text]:
|
||||||
Index.init()
|
Index.init()
|
||||||
|
|
||||||
|
|
||||||
|
@ -32,6 +32,31 @@ services:
|
|||||||
- MINIO_DEFAULT_BUCKETS=defaultbucket
|
- MINIO_DEFAULT_BUCKETS=defaultbucket
|
||||||
command: server --console-address ":29001" /data
|
command: server --console-address ":29001" /data
|
||||||
|
|
||||||
|
neo4j:
|
||||||
|
container_name: ${APP_PREFIX}_neo4j
|
||||||
|
image: neo4j
|
||||||
|
#image: neo4j:3.5
|
||||||
|
#image: neo4j:4.1
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- 7474:7474
|
||||||
|
- 7687:7687
|
||||||
|
volumes:
|
||||||
|
- ./conf:/conf
|
||||||
|
- ./data:/data
|
||||||
|
- ./import:/import
|
||||||
|
- ./logs:/logs
|
||||||
|
- ./plugins:/plugins
|
||||||
|
environment:
|
||||||
|
- NEO4J_AUTH=neo4j/your_password
|
||||||
|
|
||||||
|
# Raise memory limits
|
||||||
|
- NEO4J_server_memory_pagecache_size=512M
|
||||||
|
- NEO4J_server_memory_heap_max__size=512M
|
||||||
|
|
||||||
|
- dbms.usage_report.enabled=false
|
||||||
|
|
||||||
|
|
||||||
elasticsearch:
|
elasticsearch:
|
||||||
container_name: ${APP_PREFIX}_elasticsearch
|
container_name: ${APP_PREFIX}_elasticsearch
|
||||||
image: docker.elastic.co/elasticsearch/elasticsearch:8.11.0
|
image: docker.elastic.co/elasticsearch/elasticsearch:8.11.0
|
||||||
@ -64,6 +89,10 @@ services:
|
|||||||
restart: always
|
restart: always
|
||||||
ports:
|
ports:
|
||||||
- "11434:11434"
|
- "11434:11434"
|
||||||
|
environment:
|
||||||
|
- OLLAMA_NUM_PARALLEL=4
|
||||||
|
- OLLAMA_MAX_LOADED_MODELS=4
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
- ..:/code
|
- ..:/code
|
||||||
- ../ollama/ollama:/root/.ollama
|
- ../ollama/ollama:/root/.ollama
|
||||||
@ -75,6 +104,12 @@ services:
|
|||||||
- /dev/dri
|
- /dev/dri
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#ollama-webui:
|
#ollama-webui:
|
||||||
# container_name: ${APP_PREFIX}_ollama-webui
|
# container_name: ${APP_PREFIX}_ollama-webui
|
||||||
# image: ghcr.io/ollama-webui/ollama-webui:main
|
# image: ghcr.io/ollama-webui/ollama-webui:main
|
||||||
|
@ -5,6 +5,7 @@ APP_PREFIX=creative_bots
|
|||||||
DEFAULT_USERS=[["user@mail.net", "12345", "user"], ["admin@mail.net", "12345", "admin"]]
|
DEFAULT_USERS=[["user@mail.net", "12345", "user"], ["admin@mail.net", "12345", "admin"]]
|
||||||
|
|
||||||
#JWT encryption secret:
|
#JWT encryption secret:
|
||||||
SECRET=1234
|
SECRET=23A344F670E
|
||||||
|
|
||||||
|
|
||||||
|
#WARN INFO FATAL
|
||||||
|
LOG_LEVEL=WARN
|
||||||
|
Loading…
x
Reference in New Issue
Block a user