Skip to content

Commit

Permalink
Merge pull request #459 from henrikclh/henrikclh-patch-1
Browse files Browse the repository at this point in the history
兼容LinkAI bot在slack通道发送回复中的以及图像生成的图片
  • Loading branch information
6vision authored Aug 24, 2024
2 parents 5692818 + 65d3151 commit 1e92ee1
Showing 1 changed file with 20 additions and 11 deletions.
31 changes: 20 additions & 11 deletions model/linkai/link_ai_bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,14 @@ def reply(self, query, context=None):
return '记忆已清除'
new_query = Session.build_session_query(query, from_user_id)
context['session'] = new_query # 将 new_query 添加到 context 字典中 session
log.debug("[lINKAI] session query={}".format(new_query))
log.debug("[LINKAI] session query={}".format(new_query))

# if context.get('stream'):
# # reply in stream
# return self.reply_text_stream(query, context)

reply_content = self._chat(query, context)
log.debug("[lINKAI] new_query={}, user={}, reply_cont={}".format(new_query, from_user_id, reply_content))
log.debug("[LINKAI] new_query={}, user={}, reply_cont={}".format(new_query, from_user_id, reply_content))
return reply_content

elif context.get('type', None) == 'IMAGE_CREATE':
Expand Down Expand Up @@ -95,7 +95,7 @@ def _chat(self, query, context, retry_count=0):
headers = {"Authorization": "Bearer " + linkai_api_key}

# do http request
base_url = model_conf(const.LINKAI).get("api_base", "https://api.link-ai.chat")
base_url = model_conf(const.LINKAI).get("api_base", "https://api.link-ai.tech")
res = requests.post(url=base_url + "/v1/chat/completions", json=body, headers=headers,
timeout=180)
if res.status_code == 200:
Expand All @@ -118,11 +118,20 @@ def _chat(self, query, context, retry_count=0):
reply_content += knowledge_suffix
# image process
if response["choices"][0].get("img_urls"):
thread = threading.Thread(target=self._send_image, args=(context['channel'], context, response["choices"][0].get("img_urls")))
thread.start()
if response["choices"][0].get("text_content"):
reply_content = response["choices"][0].get("text_content")
if 'send' in type(context['channel']).__dict__: # 通道实例所属类的定义是否有send方法
thread = threading.Thread(target=self._send_image, args=(context['channel'], context, response["choices"][0].get("img_urls")))
thread.start()
if response["choices"][0].get("text_content"):
reply_content = response["choices"][0].get("text_content")
else:
reply_content = response["choices"][0].get("text_content", "") + " " + " ".join(response["choices"][0].get("img_urls")) # 图像生成时候需要合并文本和图片url
reply_content = self._process_url(reply_content)

# thread = threading.Thread(target=self._send_image, args=(context['channel'], context, response["choices"][0].get("img_urls")))
# thread.start()
# if response["choices"][0].get("text_content"):
# reply_content = response["choices"][0].get("text_content")
#reply_content = self._process_url(reply_content)
return reply_content

else:
Expand Down Expand Up @@ -244,7 +253,7 @@ async def reply_text_stream(self, query, context, retry_count=0) :
headers = {"Authorization": "Bearer " + linkai_api_key}

# do http request
base_url = model_conf(const.LINKAI).get("api_base", "https://api.link-ai.chat")
base_url = model_conf(const.LINKAI).get("api_base", "https://api.link-ai.tech")
res = requests.post(url=base_url + "/v1/chat/completions", json=body, headers=headers, stream=True,
timeout=180)
if res.status_code == 200:
Expand Down Expand Up @@ -439,7 +448,7 @@ def build_session_query(query, user_id):
'''
session = user_session.get(user_id, [])
if len(session) == 0:
system_prompt = model_conf(const.OPEN_AI).get("character_desc", "")
system_prompt = model_conf(const.LINKAI).get("character_desc", "")
system_item = {'role': 'system', 'content': system_prompt}
session.append(system_item)
user_session[user_id] = session
Expand All @@ -449,8 +458,8 @@ def build_session_query(query, user_id):

@staticmethod
def save_session(query, answer, user_id, used_tokens=0):
max_tokens = model_conf(const.OPEN_AI).get('conversation_max_tokens')
max_history_num = model_conf(const.OPEN_AI).get('max_history_num', None)
max_tokens = model_conf(const.LINKAI).get('conversation_max_tokens')
max_history_num = model_conf(const.LINKAI).get('max_history_num', None)
if not max_tokens or max_tokens > 4000:
# default value
max_tokens = 1000
Expand Down

0 comments on commit 1e92ee1

Please sign in to comment.