177 lines
7.5 KiB
Python
177 lines
7.5 KiB
Python
# -*- coding: utf-8 -*-
|
||
import datetime
|
||
from http import HTTPStatus
|
||
|
||
import dashscope
|
||
import pytz
|
||
from bs4 import BeautifulSoup
|
||
from dashscope import Generation
|
||
|
||
from odoo import models, fields, _
|
||
|
||
LLM_MODELS = {
|
||
'aigc': 'qwen-14b-chat',
|
||
'qwen': 'qwen-14b-chat',
|
||
'baichuan': 'baichuan2-7b-chat-v1',
|
||
'chatglm': 'chatglm-6b-v2',
|
||
'llama2': 'llama2-13b-chat-v2',
|
||
}
|
||
|
||
|
||
class ResConfigSettings(models.TransientModel):
|
||
_inherit = ['res.config.settings']
|
||
|
||
apikey_qwen = fields.Char(string='Qwen API Key', config_parameter='qwen_api_key')
|
||
|
||
|
||
class ResUsers(models.Model):
|
||
_inherit = 'res.users'
|
||
|
||
odoobot_state = fields.Selection(selection_add=[
|
||
('aigc', 'AIGC'),
|
||
('qwen', 'Qwen'),
|
||
('chatglm', 'ChatGLM'),
|
||
('baichuan', 'Baichuan'),
|
||
('llama2', 'Llama2'),
|
||
])
|
||
|
||
|
||
class MailBot(models.AbstractModel):
|
||
_inherit = 'mail.bot'
|
||
|
||
def _get_answer(self, record, body, values, command):
|
||
odoobot_state = self.env.user.odoobot_state
|
||
odoobot_mode = self.env['ir.config_parameter'].sudo().get_param('odoobot_mode')
|
||
|
||
if body == "#help":
|
||
return _(
|
||
"输入如下命令,激活相应的功能:<br/>"
|
||
"#enable 激活AIGC模式<br/>"
|
||
"#disable 退出AIGC模式<br/>"
|
||
"#clear 清理聊天记录<br/>"
|
||
"#qwen 通义千问模型<br/>"
|
||
"#baichuan 百川大语言模型<br/>"
|
||
"#chatglm chatglm大语言模型<br/>"
|
||
)
|
||
|
||
elif body == "#enable":
|
||
self.env.user.odoobot_state = 'aigc'
|
||
return "AIGC enabled"
|
||
elif body == "#disable":
|
||
self.env.user.odoobot_state = 'disabled'
|
||
return "AIGC disabled"
|
||
elif body == "#clear":
|
||
message_ids = self.env['mail.channel'].search([('id', '=', record.id)]).message_ids.ids
|
||
self.env['mail.message'].search([('id', 'in', message_ids[1:])], order='id desc').unlink()
|
||
return # "cleared all the messages"
|
||
elif body == "#qwen":
|
||
self.env.user.odoobot_state = 'qwen'
|
||
return "Qwen enabled"
|
||
elif body == "#baichuan":
|
||
self.env.user.odoobot_state = 'baichuan'
|
||
return "Baichuan enabled"
|
||
elif body == "#chatglm":
|
||
self.env.user.odoobot_state = 'chatglm'
|
||
return "ChatGLM enabled"
|
||
|
||
exclusion = {'o_mail_notification', 'o_mail_redirect', 'o_channel_redirect'}
|
||
msg_sys = [ele for ele in exclusion if (ele in body)]
|
||
if msg_sys:
|
||
return super(MailBot, self)._get_answer(record, body, values, command)
|
||
|
||
if odoobot_state not in ['aigc', 'qwen', 'chatglm', 'baichuan']:
|
||
return super(MailBot, self)._get_answer(record, body, values, command)
|
||
|
||
message_new = []
|
||
message_ids = self.env['mail.channel'].search([('id', '=', record.id)]).message_ids.ids
|
||
message_ids = self.env['mail.message'].search([('id', 'in', message_ids)], order='id desc', limit=32, offset=1)
|
||
# llm_model = LLM_MODELS[odoobot_state]
|
||
prompt, history = self.prompt(llm_model=odoobot_state, message_ids=message_ids)
|
||
# print(history)
|
||
message_new.append({'role': 'system', 'content': prompt}) if odoobot_state not in ['chatglm'] else None
|
||
for item in history:
|
||
message_new.append(item)
|
||
message_new.append({"role": "user", "content": body}) if odoobot_state not in ['chatglm'] else None
|
||
if odoobot_mode == 'streaming':
|
||
self.with_delay().respond(odoobot_state, odoobot_mode, record, message_new, body)
|
||
else:
|
||
return self.respond(odoobot_state,odoobot_mode, record, message_new, body)
|
||
|
||
def prompt(self, llm_model, message_ids):
|
||
lang = self.env.user.lang
|
||
tz = self.env.user.tz
|
||
local = pytz.timezone(tz)
|
||
now = datetime.datetime.strftime(pytz.utc.localize(datetime.datetime.utcnow()).astimezone(local), "%Y-%m-%d %H:%M:%S")
|
||
lang = self.env['res.lang'].search([('code', '=', lang)]).name
|
||
app = self.env['ir.module.module'].search([('state', '=', 'installed'), ('application', '=', True)]).mapped('name')
|
||
message_ids = list(reversed(message_ids))
|
||
length = len(message_ids)
|
||
bot_id = self.env.ref("base.partner_root")
|
||
history = []
|
||
prompt = f"""
|
||
You are an assistant and work for this company with name {self.env.company.name};
|
||
你是{self.env.company.name}助手;
|
||
the date and time now is {now} with timezone {tz};
|
||
当前日期和时间 {now} 时区 {tz};
|
||
Apps installed: {str(app)};
|
||
已经安装的应用: {str(app)};
|
||
answer in: {lang};
|
||
回答使用: {lang}
|
||
"""
|
||
if llm_model in ['chatglm']:
|
||
history.append(['我的身份背景信息,名字等?', prompt])
|
||
i = 0
|
||
for message_id in message_ids:
|
||
i += 1
|
||
if message_id.author_id.id != bot_id.id and i != length:
|
||
message_user = BeautifulSoup(message_id.body, 'html.parser').get_text()
|
||
if message_ids[i].author_id.id == bot_id.id:
|
||
message_assistant = BeautifulSoup(message_ids[i].body, 'html.parser').get_text()
|
||
if message_user and message_assistant:
|
||
item_user = {"role": "user", "content": message_user} if llm_model not in ['chatglm'] else message_user
|
||
item_assistant = {"role": "assistant", "content": message_assistant} if llm_model not in ['chatglm'] else message_assistant
|
||
if llm_model in ['chatglm']:
|
||
history.append([item_user, item_assistant])
|
||
else:
|
||
history.append(item_user)
|
||
history.append(item_assistant)
|
||
return prompt, history
|
||
|
||
def respond(self, llm_model, mode, record, messages, prompt=None):
|
||
dashscope.api_key = self.env['ir.config_parameter'].sudo().get_param('qwen_api_key')
|
||
gen = Generation()
|
||
content = ''
|
||
model = LLM_MODELS[llm_model]
|
||
if llm_model in ['chatglm']:
|
||
response = gen.call(model=model, prompt=prompt, history=messages)
|
||
if response.status_code == HTTPStatus.OK:
|
||
# print(response)
|
||
content = response.output['text']
|
||
elif llm_model in ['qwen', 'baichuan']:
|
||
response = gen.call(model, messages=messages, result_format='message', enable_search=True)
|
||
if response.status_code == HTTPStatus.OK:
|
||
content = response.output.choices[0]['message']['content']
|
||
else:
|
||
response = gen.call(model, messages=messages, result_format='message', enable_search=True)
|
||
if response.status_code == HTTPStatus.OK:
|
||
content = response.output.choices[0]['message']['content']
|
||
|
||
if not content:
|
||
msg = ('Request id: %s, Status code: %s, error code: %s, error message: %s' % (
|
||
response.request_id, response.status_code,
|
||
response.code, response.message
|
||
))
|
||
content = msg + str(response)
|
||
|
||
if mode == 'streaming':
|
||
odoobot_id = self.env.ref("base.partner_root")
|
||
mod_response = self.env['mail.channel'].with_context(chatgpt=True).browse(record.id).message_post(
|
||
body=content,
|
||
message_type='comment',
|
||
subtype_xmlid='mail.mt_comment',
|
||
author_id=odoobot_id.id
|
||
)
|
||
return mod_response
|
||
|
||
return content
|