Skip to content

Commit 6d1dbad

Browse files
committed
Adds support for OpenAI responses and adds Open-WebUI interface
1 parent e58279e commit 6d1dbad

File tree

2 files changed

+23
-3
lines changed

2 files changed

+23
-3
lines changed

‎docker-compose.yml‎

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,3 +38,21 @@ services:
3838
- STATIC_FILE_LOCATION=/app/static
3939
env_file:
4040
- .env
41+
openwebui:
42+
image: ghcr.io/open-webui/open-webui:main
43+
ports:
44+
- '3000:8080'
45+
environment:
46+
- OPENAI_API_KEYS=FAKE-KEY;
47+
- OPENAI_API_BASE_URLS=http://dialog:8000/openai;
48+
- ENABLE_OPENAI_API=true
49+
volumes:
50+
- open-webui:/app/backend/data
51+
depends_on:
52+
db:
53+
condition: service_healthy
54+
dialog:
55+
condition: service_started
56+
57+
volumes:
58+
open-webui:

‎src/dialog/routers/openai.py‎

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,15 +31,16 @@ async def ask_question_to_llm(message: OpenAIChat, session: Session = Depends(ge
3131
"""
3232
This posts a message to the LLM and returns the response in the OpenAI format.
3333
"""
34+
logging.info(f"Received message: {message}")
3435
start_time = datetime.datetime.now()
3536
new_chat = ChatEntity(
3637
session_id = f"openai-{str(uuid4())}",
3738
)
3839
session.add(new_chat)
39-
for message in message.messages[:-1]:
40+
for _message in message.messages[:-1]:
4041
new_message = ChatMessages(
4142
session_id=new_chat.session_id,
42-
message=message.message,
43+
message=_message.content,
4344
)
4445
session.add(new_message)
4546
session.flush()
@@ -62,12 +63,13 @@ async def ask_question_to_llm(message: OpenAIChat, session: Session = Depends(ge
6263
],
6364
created=int(datetime.datetime.now().timestamp()),
6465
id=f"talkdai-{str(uuid4())}",
65-
model="talkdai",
66+
model="talkd-ai",
6667
object="chat.completion",
6768
usage={
6869
"completion_tokens": None,
6970
"prompt_tokens": None,
7071
"total_tokens": None
7172
}
7273
)
74+
logging.info(f"Chat completion: {chat_completion}")
7375
return chat_completion

0 commit comments

Comments
 (0)