mirror of
https://github.com/ClovertaTheTrilobita/SanYeCao-Nonebot.git
synced 2026-04-01 22:04:51 +00:00
commit
063d023371
3 changed files with 28 additions and 2 deletions
|
|
@ -34,4 +34,5 @@ lazy_object_proxy
|
||||||
openai
|
openai
|
||||||
typing_extensions
|
typing_extensions
|
||||||
psutil
|
psutil
|
||||||
Beautifulsoup4
|
Beautifulsoup4
|
||||||
|
aiohttp
|
||||||
|
|
@ -2,6 +2,8 @@ import openai
|
||||||
import requests
|
import requests
|
||||||
from src.clover_sqlite.models.chat import GroupChatRole
|
from src.clover_sqlite.models.chat import GroupChatRole
|
||||||
from src.configs.api_config import v3url, v3key, deepseek_url, deepseek_key
|
from src.configs.api_config import v3url, v3key, deepseek_url, deepseek_key
|
||||||
|
import aiohttp
|
||||||
|
import asyncio
|
||||||
|
|
||||||
openai.api_key = deepseek_key
|
openai.api_key = deepseek_key
|
||||||
openai.base_url = deepseek_url
|
openai.base_url = deepseek_url
|
||||||
|
|
@ -49,5 +51,28 @@ async def deepseek_chat(group_openid,content):
|
||||||
await GroupChatRole.save_chat_history(group_openid, {"role": "assistant", "content": reply_content})
|
await GroupChatRole.save_chat_history(group_openid, {"role": "assistant", "content": reply_content})
|
||||||
return reply_content
|
return reply_content
|
||||||
|
|
||||||
|
async def silicon_flow(group_openid, content):
|
||||||
|
await GroupChatRole.save_chat_history(group_openid, {"role": "user", "content": content})
|
||||||
|
messages = await GroupChatRole.get_chat_history(group_openid)
|
||||||
|
url = "https://api.siliconflow.cn/v1/chat/completions"
|
||||||
|
payload = {
|
||||||
|
"model": "Pro/deepseek-ai/DeepSeek-V3",
|
||||||
|
"stream": False,
|
||||||
|
"messages": messages
|
||||||
|
}
|
||||||
|
headers = {
|
||||||
|
"Authorization": "Bearer sk-lcsbvcogybhzznggjsucscrcmveeuuksecxvdkhtrlmzjmqs",
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.post(url, json=payload, headers=headers) as response:
|
||||||
|
result = await response.json()
|
||||||
|
print(result)
|
||||||
|
reply_content = result["choices"][0]["message"]["content"]
|
||||||
|
|
||||||
|
await GroupChatRole.save_chat_history(group_openid, {"role": "assistant", "content": reply_content})
|
||||||
|
return reply_content
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
print(deepseek_chat("你拽什么啊?"))
|
print(deepseek_chat("你拽什么啊?"))
|
||||||
|
|
|
||||||
|
|
@ -40,7 +40,7 @@ async def handle_function(message: MessageEvent):
|
||||||
member_openid, content = message.author.id, message.get_plaintext()
|
member_openid, content = message.author.id, message.get_plaintext()
|
||||||
status = await GroupChatRole.is_on(group_openid)
|
status = await GroupChatRole.is_on(group_openid)
|
||||||
if status:
|
if status:
|
||||||
msg = await ai_chat.deepseek_chat(group_openid,content)
|
msg = await ai_chat.silicon_flow(group_openid,content)
|
||||||
await check.finish(msg)
|
await check.finish(msg)
|
||||||
else:
|
else:
|
||||||
await check.finish(message=Message(random.choice(text_list)))
|
await check.finish(message=Message(random.choice(text_list)))
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue