feat;联调会议预定助手

This commit is contained in:
雷雨
2025-06-04 17:45:12 +08:00
parent fb7a2d7eca
commit 46962aeb3b
7 changed files with 71 additions and 33 deletions

5
.env
View File

@@ -1,2 +1,3 @@
API_KEY=sk-WK7dkfpYfaXYtOQGMOfax5DmQF5itLZ2WXvi8ReVpQfGmbCN
MODEL_BASE_URL=https://www.chataiapi.com
MODEL_API_KEY=sk-KnfrPFFnNDOCFkPkWsvRE7uJGNR0QMDCZ1Ie83ARhtOKMMWa
MODEL_BASE_URL=https://www.chataiapi.com/v1
MODEL_NAME=deepseek-r1

View File

@@ -1,3 +1,3 @@
Django==5.2.1
Django==2.32.3
dotenv==0.9.9
requests==2.32.3
python-decouple==3.8

View File

@@ -1,10 +1,11 @@
from threading import Lock
from datetime import datetime
import requests, json
from openai_client import call_openai_api
from .openai_client import call_openai_api
from decouple import config
MODEL_NAME = ''
BASE_URL = ''
MODEL_NAME = config('MODEL_NAME', default="")
BASE_URL = config('MODEL_BASE_URL', default="")
def is_json(myjson):
@@ -15,10 +16,20 @@ def is_json(myjson):
return False
def query_room_info() -> str:
resp = requests.get('http://127.0.0.1:8000/myapi/room/')
return resp.text
def book_room(data: dict) -> str:
resp = requests.post('http://127.0.0.1:8000/myapi/room/', data)
return resp.text
def build_prompt():
"""构建增强提示词"""
# 获取可用会议室信息
room_info = {}
room_info = query_room_info()
for_mart_str = '''
{
"room_id":"11", //会议室ID
@@ -36,11 +47,13 @@ def build_prompt():
可用会议室信息:
{room_info}
请按以下步骤处理:
1. 解析用户需求(时间、人数、设备要求等)
2. 根据可用会议室列表推荐合适选项,推荐选项请不要返回json格式数据
3. 如果用户确定要预订某间会议室,请帮我提取用户预订信息并返回,结果请json结果返回且不需要包含多余的描述内容,输出结果示例如下:
1. 解析用户预订需求(时间、人数、设备要求等)
2. 根据可用会议室列表推荐合适选项,推荐选项时不需要提取用户预订信息,待用户确认是再提取。按照正常自然语言对话返回
3. 如果用户确定要预订某间会议室,而不是在询问合适会议室时,请帮根据上下文我提取用户预订信息,预订时间等信息并返回,结果请只返回json格式得预订信息且不需要包含多余的描述内容,输出结果示例如下:
{for_mart_str}
4. 用户其他需求,请按照自然语言对话返回
4. 用户再次发起预订会议室时,不要直接提取用户的预订信息而是请重新查看现有会议室的最新情况,基于用户需求给用户推荐合理的会议室,推荐选项时不需要提取用户预订信息不要返回json数据,待用户确认是再提取。按照正常自然语言对话返回
5. 如果用户需要解析预订会议室返回的结果请解析相应的结果信息并给予自然语言反馈不需要返回json数据
6. 用户其他需求,请按照自然语言对话返回
"""
return template
@@ -96,12 +109,16 @@ dialog_manager = DialogManager()
def process_chat(user_id: str, user_input: str):
history = dialog_manager.get_history(user_id)
history = []
query_history = dialog_manager.get_history(user_id)
history.extend(query_history)
prompt = ''
if history is None or len(history) == 0:
prompt = build_prompt()
dialog_manager.add_message(user_id, 'system', prompt)
dialog_manager.add_message(user_id, 'user', user_input)
resp = call_openai_api(model=MODEL_NAME, system_prompt=prompt, user_query=user_input,
api_key='sk-KnfrPFFnNDOCFkPkWsvRE7uJGNR0QMDCZ1Ie83ARhtOKMMWa',
api_key=config('MODEL_API_KEY'),
history=history)
content = resp["choices"][0]["message"]["content"]
reasoning_content = resp["choices"][0]["message"]["reasoning_content"]
@@ -109,20 +126,27 @@ def process_chat(user_id: str, user_input: str):
if 'json' in content or is_json(content):
new_content = content.replace("json", '')
new_content = new_content.replace("`", '')
print(type(new_content))
print(new_content)
data = json.loads(new_content)
# 触发预订函数------
result = {}
result = book_room(data=data)
print(result)
book_promot = f'''
用户预订会议室的结果如下:
系统调用API预订会议室的结果如下:
{result}
请解析预订会议室的结果,并根据结果给予用户相应反馈
帮用户解析预订会议室的结果,并根据结果给予用户相应自然语言反馈
'''
resp = call_openai_api(model=MODEL_NAME, system_prompt=book_promot, user_query=None,
api_key='sk-KnfrPFFnNDOCFkPkWsvRE7uJGNR0QMDCZ1Ie83ARhtOKMMWa',
history=history)
new_history = []
query_history = dialog_manager.get_history(user_id)
new_history.extend(query_history)
resp = call_openai_api(model=MODEL_NAME, user_query=book_promot,
api_key=config('MODEL_API_KEY'),
history=new_history,
system_prompt='',
)
content = resp["choices"][0]["message"]["content"]
dialog_manager.add_message(user_id, 'assistant', content)
return {'response': resp}
else:
dialog_manager.add_message(dialog_manager, 'assistant', reasoning_content)
dialog_manager.add_message(user_id, 'assistant', content)
dialog_manager.add_message(user_id, 'assistant', reasoning_content)
return {'response': resp}

View File

@@ -6,10 +6,10 @@ import os
import uuid
import logging
from typing import Dict, Optional, Any
from decouple import config
# API endpoint as a configurable variable
DEFAULT_API_ENDPOINT = "https://www.chataiapi.com/v1"
DEFAULT_API_ENDPOINT = config('MODEL_BASE_URL', default="https://www.chataiapi.com/v1")
retry = Retry(total=5, backoff_factor=1)
adapter = HTTPAdapter(max_retries=retry, pool_connections=10, pool_maxsize=100)
session = requests.Session()
@@ -28,6 +28,7 @@ def call_openai_api(
max_tokens: Optional[int] = None,
additional_params: Optional[Dict[str, Any]] = None,
history: Optional[list] = [],
assistant_query: Optional[str] = None,
) -> Dict[str, Any]:
"""
Call an OpenAI-compatible API with the given system prompt and user query.
@@ -64,8 +65,8 @@ def call_openai_api(
# Add API key to headers if provided
if api_key:
headers["Authorization"] = f"Bearer {api_key}"
elif os.environ.get("OPENAI_API_KEY"):
headers["Authorization"] = f"Bearer {os.environ.get('OPENAI_API_KEY')}"
elif config('MODEL_API_KEY'):
headers["Authorization"] = f"Bearer {config('MODEL_API_KEY')}"
messages = []
if len(history) > 0:
messages.extend(history)
@@ -73,12 +74,16 @@ def call_openai_api(
messages.append({"role": "system", "content": system_prompt})
if user_query and len(user_query) > 0:
messages.append({"role": "user", "content": user_query})
if assistant_query and len(assistant_query) > 0:
messages.append({"role": "assistant", "content": assistant_query})
else:
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_query}
]
# Prepare request payload
print(messages)
payload = {
"model": model,
"messages": messages,

View File

@@ -12,23 +12,20 @@ https://docs.djangoproject.com/en/5.2/ref/settings/
from pathlib import Path
from dotenv import load_dotenv
from decouple import config
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/5.2/howto/deployment/checklist/
load_dotenv()
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-i(fm5c2v*=vgfwmgdl^qi7iezv(xfwovbqu=+^=vm72e$gnx&l'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["192.168.237.130",'127.0.0.1']
ALLOWED_HOSTS = ["192.168.237.130", '127.0.0.1','10.212.27.44']
# Application definition
@@ -46,7 +43,7 @@ MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',

View File

@@ -21,4 +21,5 @@ from yj_room_agent import views
urlpatterns = [
path('admin/', admin.site.urls),
path('test/', views.hello, name='hello'),
path('book_room/', views.room_chat, name='room_chat'),
]

View File

@@ -1,5 +1,15 @@
from django.http import StreamingHttpResponse, JsonResponse, FileResponse
from yj_room_agent.LLM.ai_service import process_chat
from django.views.decorators.http import require_POST
import json
def hello(request):
return JsonResponse({'msg': 'ok'})
@require_POST
def room_chat(request):
data = json.loads(request.body)
resp = process_chat(data['user_id'], data['user_query'])
return JsonResponse(resp)