app factory and more
All checks were successful
Gitea Docker Redeploy / Redploy-App-on-self-via-SSH (push) Successful in 3m9s

This commit is contained in:
Tobias Weise 2024-09-09 16:18:32 +02:00
parent 691019d743
commit e9c1d1815f
8 changed files with 747 additions and 517 deletions

6
.gitignore vendored
View File

@ -1,6 +1,10 @@
backend/__pycache__/ backend/__pycache__/
ollama ollama
deployment/ollama
*.env *.env
deployment/*.env deployment/*.env
deployment/ollama
# neo4j
deployment/data
deployment/logs

View File

@ -1,20 +1,14 @@
""" __version__ = "0.4.0"
OpenAPI access via http://localhost:5000/openapi/ on local docker-compose deployment
"""
#------std lib modules:------- #------std lib modules:-------
import os, sys, json, time import os, sys, json, time
import os.path import os.path
from typing import Any, Tuple, List, Dict, Any, Callable, Optional, Union from typing import Any, Tuple, List, Dict, Any, Callable, Optional, Union
from datetime import datetime, date from datetime import datetime, date
import logging
from functools import wraps from functools import wraps
#-------ext libs-------------- #-------ext libs--------------
#from elasticsearch import NotFoundError, Elasticsearch # for normal read/write without vectors
#from elasticsearch_dsl import A, Document, Date, Integer, Keyword, Float, Long, Text, connections
from elasticsearch_dsl import connections from elasticsearch_dsl import connections
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
@ -24,18 +18,20 @@ import jwt as pyjwt
from flask import Flask, send_from_directory, send_file, Response, request, jsonify from flask import Flask, send_from_directory, send_file, Response, request, jsonify
from flask_openapi3 import Info, Tag, OpenAPI, Server #FileStorage from flask_openapi3 import Info, Tag, OpenAPI, Server #FileStorage
from flask_socketio import SocketIO, join_room, leave_room, rooms, send from flask_socketio import SocketIO, join_room, leave_room, rooms, send
#from werkzeug.utils import secure_filename from werkzeug.utils import secure_filename
import asyncio import asyncio
import logging
#----------home grown-------------- #----------home grown--------------
from lib.funcs import group_by from lib.funcs import group_by
from lib.elastictools import get_by_id, wait_for_elasticsearch from lib.elastictools import get_by_id, wait_for_elasticsearch
from lib.models import init_indicies, Chatbot, User, Text from lib.models import init_indicies, Chatbot, User, Text, Question, Answer
from lib.chatbot import ask_bot, ask_bot2, train_text, download_llm from lib.chatbot import ask_bot, train_text, download_llm
from lib.speech import text_to_speech from lib.speech import text_to_speech
from lib.mail import send_mail from lib.mail import send_mail
from lib.user import hash_password, create_user, create_default_users from lib.user import hash_password, create_user, create_default_users
from lib.logging import ElasticsearchLogHandler, get_log_level
BOT_ROOT_PATH = os.getenv("BOT_ROOT_PATH") BOT_ROOT_PATH = os.getenv("BOT_ROOT_PATH")
@ -51,53 +47,26 @@ jwt_secret = os.getenv("SECRET")
assert jwt_secret assert jwt_secret
# JWT Bearer Sample
jwt = {
"type": "http",
"scheme": "bearer",
"bearerFormat": "JWT"
}
security_schemes = {"jwt": jwt}
security = [{"jwt": []}]
class MeasureTime:
def get_module_versions(): def __init__(self, name, logger=logging.getLogger()):
with open("requirements.txt", "r") as f: self.__start = datetime.now().timestamp()
modules = {line.split("#")[0] for line in f.read().split("\n") if line.split("#")[0] != ""} self.__logger = logger
with open("current_requirements.txt", "r") as f: self.__name = name
d = {k: v for (k, v) in [line.split("#")[0].split("==") for line in f.read().split("\n") if line.split("#")[0] != ""]}
return {k: v for k, v in d.items() if k in modules} def __enter__(self):
return self
def __exit__(self, type, value, traceback):
duration = round(datetime.now().timestamp() - self.__start, 2)
self.__logger.info(f"{self.__name} duration: {duration} seconds")
print(f"{self.__name} duration: {duration} seconds", flush=True)
return False
import multiprocessing def uses_jwt(logger=None, required=True):
cpus = multiprocessing.cpu_count()
info = Info(
title="CreativeBots-API",
version="1.0.0",
summary="The REST-API to manage bots, users and more!",
description="CPUs: " + str(cpus) + "<br>" + json.dumps(get_module_versions(), indent=4).replace("\n", "<br>")
)
servers = [
Server(url=BOT_ROOT_PATH )
]
class NotFoundResponse(BaseModel):
code: int = Field(-1, description="Status Code")
message: str = Field("Resource not found!", description="Exception Information")
app = OpenAPI(
__name__,
info=info,
servers=servers,
responses={404: NotFoundResponse},
security_schemes=security_schemes
)
def uses_jwt(required=True):
""" """
Wraps routes in a jwt-required logic and passes decoded jwt and user from elasticsearch to the route as keyword Wraps routes in a jwt-required logic and passes decoded jwt and user from elasticsearch to the route as keyword
""" """
@ -108,6 +77,11 @@ def uses_jwt(required=True):
def non_param_deco(f): def non_param_deco(f):
@wraps(f) @wraps(f)
def decorated_route(*args, **kwargs): def decorated_route(*args, **kwargs):
#app.logger.info(f"Request from IP: '{request.remote_addr}'")
#request.environ.get('HTTP_X_REAL_IP', request.remote_addr)
token = None token = None
if "Authorization" in request.headers: if "Authorization" in request.headers:
token = request.headers["Authorization"].split(" ")[1] token = request.headers["Authorization"].split(" ")[1]
@ -156,6 +130,43 @@ def uses_jwt(required=True):
def create_app():
# JWT Bearer Sample
jwt = {
"type": "http",
"scheme": "bearer",
"bearerFormat": "JWT"
}
security_schemes = {"jwt": jwt}
security = [{"jwt": []}]
info = Info(
title="CreativeBots-API",
version=__version__,
summary="The REST-API to manage bots, users and more!",
description=""
)
servers = [
Server(url=BOT_ROOT_PATH )
]
class NotFoundResponse(BaseModel):
code: int = Field(-1, description="Status Code")
message: str = Field("Resource not found!", description="Exception Information")
app = OpenAPI(
__name__,
info=info,
servers=servers,
responses={404: NotFoundResponse},
security_schemes=security_schemes
)
logger = logging.getLogger()
socket = SocketIO(app, cors_allowed_origins="*") socket = SocketIO(app, cors_allowed_origins="*")
@socket.on('connect') @socket.on('connect')
@ -184,6 +195,8 @@ def handle_message(message):
SocketMessage.model_validate(message) SocketMessage.model_validate(message)
logger.info("Starting stream")
#try: #try:
room = message["room"] room = message["room"]
@ -191,8 +204,18 @@ def handle_message(message):
system_prompt = message["system_prompt"] system_prompt = message["system_prompt"]
bot_id = message["bot_id"] bot_id = message["bot_id"]
start = datetime.now().timestamp()
d = ask_bot2(system_prompt + " " + question, bot_id) q = Question()
q.text = question
q.md5 = hash_password(question)
q.save()
#start = datetime.now().timestamp()
with MeasureTime("Streaming + Scoring docs -total", logger) as timer:
d = ask_bot(system_prompt + " " + question, bot_id)
def get_scores(*args): def get_scores(*args):
score_docs = d["get_score_docs"]() score_docs = d["get_score_docs"]()
@ -201,10 +224,22 @@ def handle_message(message):
def do_streaming(*args): def do_streaming(*args):
start_stream = datetime.now().timestamp() start_stream = datetime.now().timestamp()
answer = ""
for chunk in d["answer_generator"](): for chunk in d["answer_generator"]():
socket.emit('backend token', {'data': chunk, "done": False}, to=room) socket.emit('backend token', {'data': chunk, "done": False}, to=room)
answer += chunk
stream_duration = round(datetime.now().timestamp() - start_stream, 2) stream_duration = round(datetime.now().timestamp() - start_stream, 2)
print("Stream duration: ", stream_duration, flush=True)
logger.info("Stream duration: " + str(stream_duration))
a = Answer(
question_id=q.meta.id,
answer=answer,
md5=hash_password(answer)
)
a.save()
async def f(): async def f():
@ -221,8 +256,11 @@ def handle_message(message):
"score_docs": score_docs "score_docs": score_docs
}, to=room) }, to=room)
duration = round(datetime.now().timestamp() - start, 2)
print("Total duration: ", duration, flush=True)
#duration = round(datetime.now().timestamp() - start, 2)
#logger.info("Total duration: " + str(duration))
@ -245,10 +283,12 @@ def login(form: LoginRequest):
""" """
Get your JWT to verify access rights Get your JWT to verify access rights
""" """
LoginRequest.model_validate(form)
if form.email is None or form.password is None: if form.email is None or form.password is None:
msg = "Invalid password!" msg = "Invalid password!"
app.logger.error(msg) logger.error(msg)
return jsonify({ return jsonify({
'status': 'error', 'status': 'error',
'message': msg 'message': msg
@ -257,7 +297,7 @@ def login(form: LoginRequest):
match get_by_id(index="user", id_field_name="email", id_value=form.email): match get_by_id(index="user", id_field_name="email", id_value=form.email):
case []: case []:
msg = "User with email '%s' doesn't exist!" % form.email msg = "User with email '%s' doesn't exist!" % form.email
app.logger.error(msg) logger.error(msg)
return jsonify({ return jsonify({
'status': 'error', 'status': 'error',
'message': msg 'message': msg
@ -266,14 +306,14 @@ def login(form: LoginRequest):
case [user]: case [user]:
if user["password_hash"] == hash_password(form.password + form.email): if user["password_hash"] == hash_password(form.password + form.email):
token = pyjwt.encode({"email": form.email}, jwt_secret, algorithm="HS256") token = pyjwt.encode({"email": form.email}, jwt_secret, algorithm="HS256")
#app.logger.info(token) #logger.info(token)
return jsonify({ return jsonify({
'status': 'success', 'status': 'success',
'jwt': token 'jwt': token
}) })
else: else:
msg = "Invalid password!" msg = "Invalid password!"
app.logger.error(msg) logger.error(msg)
return jsonify({ return jsonify({
'status': 'error', 'status': 'error',
'message': msg 'message': msg
@ -293,7 +333,7 @@ def register(form: RegisterRequest):
if form.email is None or form.password is None: if form.email is None or form.password is None:
msg = "Parameters missing!" msg = "Parameters missing!"
app.logger.error(msg) logger.error(msg)
return jsonify({ return jsonify({
'status': 'error', 'status': 'error',
'message': msg 'message': msg
@ -343,13 +383,19 @@ class GetSpeechRequest(BaseModel):
@app.post('/text2speech', summary="", tags=[], security=security) @app.post('/text2speech', summary="", tags=[], security=security)
def text2speech(form: GetSpeechRequest): def text2speech(form: GetSpeechRequest):
with MeasureTime("text2speech file", logger) as timer:
file_name = text_to_speech(form.text) file_name = text_to_speech(form.text)
logger.info("Created '%s' !" % file_name)
return jsonify({ return jsonify({
"status": "success", "status": "success",
"file": "/" + file_name "file": "/" + file_name
}) })
#============ Bot CRUD =============== #============ Bot CRUD ===============
class CreateBotRequest(BaseModel): class CreateBotRequest(BaseModel):
@ -371,12 +417,13 @@ def create_bot(form: CreateBotRequest, decoded_jwt, user):
""" """
CreateBotRequest.model_validate(form) CreateBotRequest.model_validate(form)
bot = Chatbot() bot = Chatbot(
bot.name = form.name name=form.name,
bot.visibility = form.visibility visibility=form.visibility,
bot.description = form.description description=form.description,
bot.system_prompt = form.system_prompt system_prompt=form.system_prompt,
bot.llm_model = form.llm_model llm_model=form.llm_model
)
#add meta data #add meta data
bot.creation_date = datetime.now() bot.creation_date = datetime.now()
@ -443,7 +490,6 @@ def get_bots(query: GetBotRequest, decoded_jwt, user):
class UpdateBotRequest(BaseModel): class UpdateBotRequest(BaseModel):
id: str = Field(None, description="The bot's id") id: str = Field(None, description="The bot's id")
name: str = Field(None, description="The bot's name") name: str = Field(None, description="The bot's name")
@ -513,10 +559,9 @@ def query_bot(query: AskBotRequest, decoded_jwt, user):
bot_id = query.bot_id bot_id = query.bot_id
question = query.question question = query.question
start = datetime.now().timestamp() start = datetime.now().timestamp()
d = ask_bot2(system_prompt + " " + question, bot_id) d = ask_bot(system_prompt + " " + question, bot_id)
def get_scores(*args): def get_scores(*args):
score_docs = d["get_score_docs"]() score_docs = d["get_score_docs"]()
@ -530,7 +575,7 @@ def query_bot(query: AskBotRequest, decoded_jwt, user):
answer += chunk answer += chunk
stream_duration = round(datetime.now().timestamp() - start_stream, 2) stream_duration = round(datetime.now().timestamp() - start_stream, 2)
print("Stream duration: ", stream_duration, flush=True) logger.info(f"Stream duration: {stream_duration}")
return answer return answer
@ -544,9 +589,8 @@ def query_bot(query: AskBotRequest, decoded_jwt, user):
[score_docs, answer] = asyncio.run(f()) [score_docs, answer] = asyncio.run(f())
duration = round(datetime.now().timestamp() - start, 2) duration = round(datetime.now().timestamp() - start, 2)
print("Total duration: ", duration, flush=True)
app.logger.info(duration) logger.info("Total duration: " + str(duration))
return jsonify({ return jsonify({
"answer": answer, "answer": answer,
@ -587,7 +631,10 @@ def upload(form: TrainTextRequest, decoded_jwt, user):
'message': 'No data source found' 'message': 'No data source found'
}), 400 }), 400
with MeasureTime("Training on text", logger) as _:
train_text(bot_id, text) train_text(bot_id, text)
return jsonify({ return jsonify({
"status": "success" "status": "success"
}) })
@ -599,23 +646,43 @@ def index():
return send_from_directory('./public', "index.html") return send_from_directory('./public', "index.html")
@app.route("/info")
def debug_info():
import multiprocessing
def get_module_versions():
with open("requirements.txt", "r") as f:
modules = {line.split("#")[0] for line in f.read().split("\n") if line.split("#")[0] != ""}
with open("current_requirements.txt", "r") as f:
d = {k: v for (k, v) in [line.split("#")[0].split("==") for line in f.read().split("\n") if line.split("#")[0] != ""]}
return {k: v for k, v in d.items() if k in modules}
d = {}
d["module_versions"] = get_module_versions()
d["OLLAMA_NUM_PARALLEL"] = os.getenv("OLLAMA_NUM_PARALLEL")
d["OLLAMA_MAX_LOADED_MODELS"] = os.getenv("OLLAMA_MAX_LOADED_MODELS")
d["cpus"] = multiprocessing.cpu_count()
#return "CPUs: " + str(cpus) + "<br>" + json.dumps(get_module_versions(), indent=4).replace("\n", "<br>")
return json.dumps(d, indent=4).replace("\n", "<br>")
@app.route('/<path:path>') #generische Route (auch Unterordner) @app.route('/<path:path>') #generische Route (auch Unterordner)
def catchAll(path): def catchAll(path):
logger.info(f"Path requested: '{path}'")
return send_from_directory('./public', path) return send_from_directory('./public', path)
#import logging_loki
lvl = get_log_level()
#print(f"Log level: {lvl}", flush=True)
logger.addHandler(ElasticsearchLogHandler(level=logging.INFO))
logger.setLevel(lvl)
def create_app():
LOG_LEVEL = os.getenv("LOG_LEVEL")
if LOG_LEVEL:
logging.basicConfig(level=eval("logging." + LOG_LEVEL))
else:
logging.basicConfig(level=logging.WARN)
#TODO: implement some kind of logging mechanism
download_llm("llama3") download_llm("llama3")
connections.create_connection(hosts=elastic_url, request_timeout=60) connections.create_connection(hosts=elastic_url, request_timeout=60)
wait_for_elasticsearch() wait_for_elasticsearch()
@ -624,8 +691,9 @@ def create_app():
return app return app
if __name__ == '__main__': if __name__ == '__main__':
app = create_app() app = create_app()
app.run(debug=False, host='0.0.0.0') app.run(debug=True, host='0.0.0.0')

View File

@ -87,26 +87,11 @@ def train_text(bot_id, text):
#TODO add history
def ask_bot(question, bot_id): def ask_bot(question, bot_id):
bot = Chatbot.get(id=bot_id)
llm = Ollama(
model=bot.llm_model,
base_url=ollama_url
)
query = bot.system_prompt + " " + question
for chunk in llm.stream(query):
yield chunk
#connections.get_connection()
#if es.indices.exists(index="index"):
def ask_bot2(question, bot_id):
""" """
Asks a chatbot Asks a chatbot using RAG resources
""" """
bot = Chatbot.get(id=bot_id) bot = Chatbot.get(id=bot_id)
@ -159,7 +144,6 @@ def ask_bot2(question, bot_id):
xs.append([score, dict(doc)]) xs.append([score, dict(doc)])
return xs return xs
return { return {
"answer_generator": gen_func, "answer_generator": gen_func,
"get_score_docs": get_score_docs "get_score_docs": get_score_docs

38
backend/lib/knowledge.py Normal file
View File

@ -0,0 +1,38 @@
from neo4j import GraphDatabase
"""
pwd = "neo4j2"
proto = "bolt"
host = "192.168.99.101"
driver = GraphDatabase.driver("%s://%s:7687" % (proto, host), auth=("neo4j", pwd), encrypted=False)
def add_friend(tx, name, friend_name):
tx.run("MERGE (a:Person {name: $name}) "
"MERGE (a)-[:KNOWS]->(friend:Person {name: $friend_name})",
name=name, friend_name=friend_name)
def print_friends(tx, name):
for record in tx.run("MATCH (a:Person)-[:KNOWS]->(friend) WHERE a.name = $name "
"RETURN friend.name ORDER BY friend.name", name=name):
print(record["friend.name"])
with driver.session() as session:
session.write_transaction(add_friend, "Arthur", "Guinevere")
session.write_transaction(add_friend, "Arthur", "Lancelot")
session.write_transaction(add_friend, "Arthur", "Merlin")
session.read_transaction(print_friends, "Arthur")
driver.close()
"""

68
backend/lib/logging.py Normal file
View File

@ -0,0 +1,68 @@
import logging
import os
from datetime import datetime, date
from lib.models import LogEntry
class ElasticsearchLogHandler(logging.Handler):
def __init__(self, level):
logging.Handler.__init__(self=self)
#super().__init__(self=self)
self.setLevel(level)
def emit(self, record):
#print(str(record.__dict__), flush=True)
#{'name': 'werkzeug',
# 'msg': '192.168.64.1 - - [07/Sep/2024 11:43:23] "%s" %s %s',
# 'args': ('GET /socket.io/?EIO=4&transport=websocket&sid=MtyTmZQs5IA6DnvhAAAA HTTP/1.1', '200', '-'),
# 'levelname': 'INFO',
# 'levelno': 20,
# 'pathname': '/usr/local/lib/python3.12/dist-packages/werkzeug/_internal.py',
# 'filename': '_internal.py',
# 'module': '_internal',
# 'exc_info': None,
# 'exc_text': None,
# 'stack_info': None,
# 'lineno': 97,
# 'funcName': '_log',
# 'created': 1725709403.1972203,
# 'msecs': 197.0,
# 'relativeCreated': 37105.026721954346,
# 'thread': 133472930760384,
# 'threadName': 'Thread-15 (process_request_thread)',
# 'processName': 'MainProcess',
# 'process': 26,
# 'taskName': None}
entry = LogEntry(
message = record.msg,
level = record.levelname, #record.levelno,
creation_time = datetime.now(),
name = record.name,
pathname = record.pathname,
filename = record.filename,
module = record.module,
lineno = record.lineno,
funcName = record.funcName,
threadName = record.threadName,
processName = record.processName
)
entry.save()
def get_log_level(default=logging.WARN):
LOG_LEVEL = os.getenv("LOG_LEVEL")
if LOG_LEVEL:
return eval("logging." + LOG_LEVEL)
return default

View File

@ -1,7 +1,6 @@
import os import os
from elasticsearch_dsl import Document, InnerDoc, Nested, Date, Integer, Keyword, Float, Long, Text, connections, Object, Boolean from elasticsearch_dsl import Document, InnerDoc, Nested, Date, Integer, Keyword, Float, Long, Text, connections, Object, Boolean
class User(Document): class User(Document):
creation_date = Date() creation_date = Date()
email = Keyword() email = Keyword()
@ -29,7 +28,6 @@ class User(Document):
return super(User, self).save(**kwargs) return super(User, self).save(**kwargs)
class Chatbot(Document): class Chatbot(Document):
creation_date = Date() creation_date = Date()
changed_date = Date() changed_date = Date()
@ -63,11 +61,6 @@ class Chatbot(Document):
return super(Chatbot, self).save(**kwargs) return super(Chatbot, self).save(**kwargs)
class Text(Document): class Text(Document):
creation_date = Date() creation_date = Date()
creator_id = Keyword() creator_id = Keyword()
@ -84,62 +77,101 @@ class Text(Document):
return super(Text, self).save(**kwargs) return super(Text, self).save(**kwargs)
class Question(Document):
#======= Query Log =========== question = Text(index=False, required=True)
md5 = Keyword()
class Sources(InnerDoc):
score = Float()
#sourceFileId = Text()
sourceType = Text()
tags = Text()
#new fields
sourceFileId = Keyword()
filename = Keyword()
url = Keyword()
txt_id = Keyword()
page = Integer()
class QueryLog(Document):
answer = Text()
question = Text()
chatbotid = Keyword()
durasecs = Float()
#inCt = Float()
inToks = Long()
llm = Text()
#outCt = Float()
outToks = Long()
#queryid = Keyword()
#rating = Long()
#reason = Text()
#reasontags = Text()
session = Keyword()
sources = Object(Sources)
temperature = Float()
#totalCt = Float()
timest = Date() #timestamp
date = Date() #iso date
class Index: class Index:
name = 'query_log' name = 'question'
settings = { settings = {
"number_of_shards": 1, "number_of_shards": 1,
} }
def save(self, ** kwargs): def save(self, ** kwargs):
return super(QueryLog, self).save(**kwargs) return super(Question, self).save(**kwargs)
class Answer(Document):
question_id = Keyword()
answer = Text(index=False, required=True)
md5 = Keyword()
class Index:
name = 'answer'
settings = {
"number_of_shards": 1,
}
def save(self, ** kwargs):
return super(Answer, self).save(**kwargs)
class LogEntry(Document):
message = Text(index=False, required=True)
level = Keyword() #Integer(required=True)
creation_time = Date()
name = Keyword()
# 'args': ('GET /socket.io/?EIO=4&transport=websocket&sid=MtyTmZQs5IA6DnvhAAAA HTTP/1.1', '200', '-'),
pathname = Keyword()
# 'pathname': '/usr/local/lib/python3.12/dist-packages/werkzeug/_internal.py',
filename = Keyword()
# 'filename': '_internal.py',
module = Keyword()
# 'module': '_internal',
lineno = Integer(required=True)
# 'lineno': 97,
funcName = Keyword()
# 'funcName': '_log',
# 'created': 1725709403.1972203,
# 'msecs': 197.0,
threadName = Keyword()
# 'threadName': 'Thread-15 (process_request_thread)',
processName = Keyword()
# 'processName': 'MainProcess',
class Index:
name = 'logentry'
settings = {
"number_of_shards": 1,
}
def save(self, ** kwargs):
return super(LogEntry, self).save(**kwargs)
#======= Query Log ===========
#class Sources(InnerDoc):
#score = Float()
#tags = Text()
#filename = Keyword()
#page = Integer()
#----------------------------------------------
def init_indicies(): def init_indicies():
# create the mappings in elasticsearch """
for Index in [QueryLog, Chatbot, User, Text]: Create the mappings in elasticsearch
"""
for Index in [LogEntry, Question, Answer, Chatbot, User, Text]:
Index.init() Index.init()

View File

@ -32,6 +32,31 @@ services:
- MINIO_DEFAULT_BUCKETS=defaultbucket - MINIO_DEFAULT_BUCKETS=defaultbucket
command: server --console-address ":29001" /data command: server --console-address ":29001" /data
neo4j:
container_name: ${APP_PREFIX}_neo4j
image: neo4j
#image: neo4j:3.5
#image: neo4j:4.1
restart: unless-stopped
ports:
- 7474:7474
- 7687:7687
volumes:
- ./conf:/conf
- ./data:/data
- ./import:/import
- ./logs:/logs
- ./plugins:/plugins
environment:
- NEO4J_AUTH=neo4j/your_password
# Raise memory limits
- NEO4J_server_memory_pagecache_size=512M
- NEO4J_server_memory_heap_max__size=512M
- dbms.usage_report.enabled=false
elasticsearch: elasticsearch:
container_name: ${APP_PREFIX}_elasticsearch container_name: ${APP_PREFIX}_elasticsearch
image: docker.elastic.co/elasticsearch/elasticsearch:8.11.0 image: docker.elastic.co/elasticsearch/elasticsearch:8.11.0
@ -64,6 +89,10 @@ services:
restart: always restart: always
ports: ports:
- "11434:11434" - "11434:11434"
environment:
- OLLAMA_NUM_PARALLEL=4
- OLLAMA_MAX_LOADED_MODELS=4
volumes: volumes:
- ..:/code - ..:/code
- ../ollama/ollama:/root/.ollama - ../ollama/ollama:/root/.ollama
@ -75,6 +104,12 @@ services:
- /dev/dri - /dev/dri
#ollama-webui: #ollama-webui:
# container_name: ${APP_PREFIX}_ollama-webui # container_name: ${APP_PREFIX}_ollama-webui
# image: ghcr.io/ollama-webui/ollama-webui:main # image: ghcr.io/ollama-webui/ollama-webui:main

View File

@ -5,6 +5,7 @@ APP_PREFIX=creative_bots
DEFAULT_USERS=[["user@mail.net", "12345", "user"], ["admin@mail.net", "12345", "admin"]] DEFAULT_USERS=[["user@mail.net", "12345", "user"], ["admin@mail.net", "12345", "admin"]]
#JWT encryption secret: #JWT encryption secret:
SECRET=1234 SECRET=23A344F670E
#WARN INFO FATAL
LOG_LEVEL=WARN