You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
399 lines
14 KiB
399 lines
14 KiB
# -*- coding: utf-8 -*-
|
|
import time
|
|
|
|
import httpx
|
|
import json
|
|
import anthropic
|
|
|
|
|
|
def fetch_llm_generate_task(odoo_base_url="http://localhost:8069"):
|
|
"""
|
|
从 Odoo 接口获取待生成的 LLM 任务数据
|
|
|
|
Args:
|
|
odoo_base_url: Odoo 服务地址,默认 http://localhost:8069
|
|
|
|
Returns:
|
|
dict or None: 返回获取到的任务数据,包含以下字段:
|
|
- id: 记录ID
|
|
- final_prompt: 最终提示词
|
|
- system_prompt: 系统提示词
|
|
- user_prompt: 用户提示词
|
|
如果没有待生成的任务,返回 None
|
|
"""
|
|
url = f"{odoo_base_url}/api/local-llm-generate/fetch"
|
|
|
|
headers = {
|
|
"Content-Type": "application/json"
|
|
}
|
|
|
|
payload = {}
|
|
|
|
try:
|
|
print(f"[INFO] 正在请求 Odoo 接口: {url}")
|
|
response = httpx.post(
|
|
url,
|
|
headers=headers,
|
|
json=payload,
|
|
timeout=30
|
|
)
|
|
response.raise_for_status()
|
|
|
|
data = response.json()
|
|
# print(f"[INFO] 接口返回数据: {json.dumps(data, ensure_ascii=False, indent=2)}")
|
|
|
|
# 检查是否返回了 {"data": null}(没有待生成任务)
|
|
if isinstance(data, dict) and 'data' in data and data['data'] is None:
|
|
print("[INFO] 当前没有待生成的 LLM 任务")
|
|
exit(1)
|
|
|
|
# 返回获取到的任务数据(包含 id, final_prompt, system_prompt, user_prompt)
|
|
if isinstance(data, dict) and 'id' in data:
|
|
print(f"[SUCCESS] 获取到任务数据, record_id={data['id']}")
|
|
return data
|
|
|
|
print(f"[WARNING] 返回数据格式不符合预期: {data}")
|
|
return None
|
|
|
|
except httpx.TimeoutException:
|
|
print("[ERROR] 请求超时")
|
|
return None
|
|
except httpx.ConnectError as e:
|
|
print(f"[ERROR] 连接错误: {e}")
|
|
return None
|
|
except httpx.HTTPStatusError as e:
|
|
print(f"[ERROR] HTTP 错误: {e}")
|
|
return None
|
|
except Exception as e:
|
|
print(f"[ERROR] 发生未知错误: {e}")
|
|
return None
|
|
|
|
|
|
def call_llm_generate(task_data, max_retries=3):
|
|
"""
|
|
调用本地 Ollama LLM 生成内容
|
|
|
|
Args:
|
|
task_data: 包含 final_prompt, system_prompt, user_prompt 的字典
|
|
max_retries: 最大重试次数,默认 max_retries 次
|
|
|
|
Returns:
|
|
str: 生成的内容,如果失败返回 "failed"
|
|
"""
|
|
if task_data is None:
|
|
error_msg = "[ERROR] task_data 为 None,无法调用 LLM"
|
|
print(error_msg)
|
|
return error_msg
|
|
|
|
# 提取 task_data 中的字段
|
|
final_prompt = task_data.get('final_prompt', '')
|
|
system_prompt = task_data.get('system_prompt', '')
|
|
user_prompt = task_data.get('user_prompt', '')
|
|
record_id = task_data.get('id', 'unknown')
|
|
|
|
print(f"[INFO] 开始为 record_id={record_id} 调用本地 Ollama")
|
|
|
|
# Ollama API 地址
|
|
ollama_url = "http://localhost:11434/v1/chat/completions"
|
|
|
|
# 组装 messages
|
|
messages = []
|
|
if system_prompt:
|
|
messages.append({"role": "system", "content": system_prompt})
|
|
if final_prompt:
|
|
messages.append({"role": "user", "content": final_prompt})
|
|
|
|
# 请求体
|
|
payload = {
|
|
"model": "kimi-k2.5:cloud",
|
|
"messages": messages,
|
|
"stream": False
|
|
}
|
|
|
|
# 重试机制
|
|
for attempt in range(1, max_retries + 1):
|
|
try:
|
|
print(f"[INFO] 第 {attempt} 次尝试调用 Ollama...")
|
|
|
|
response = httpx.post(
|
|
ollama_url,
|
|
json=payload,
|
|
timeout=600
|
|
)
|
|
response.raise_for_status()
|
|
|
|
result = response.json()
|
|
|
|
# 提取生成的内容
|
|
if "choices" in result and len(result["choices"]) > 0:
|
|
generated_content = result["choices"][0].get("message", {}).get("content", "")
|
|
if generated_content:
|
|
print(f"[SUCCESS] 第 {attempt} 次尝试成功,生成长度: {len(generated_content)} 字符")
|
|
return generated_content
|
|
else:
|
|
print(f"[WARNING] 第 {attempt} 次尝试返回空内容")
|
|
else:
|
|
print(f"[WARNING] 第 {attempt} 次尝试返回格式异常: {result}")
|
|
|
|
except httpx.TimeoutException:
|
|
print(f"[ERROR] 第 {attempt} 次尝试超时")
|
|
except httpx.ConnectError as e:
|
|
print(f"[ERROR] 第 {attempt} 次尝试连接错误: {e}")
|
|
except httpx.HTTPStatusError as e:
|
|
print(f"[ERROR] 第 {attempt} 次尝试 HTTP 错误: {e}")
|
|
except Exception as e:
|
|
print(f"[ERROR] 第 {attempt} 次尝试发生未知错误: {e}")
|
|
|
|
# 如果不是最后一次尝试,等待后继续
|
|
if attempt < max_retries:
|
|
import time
|
|
print(f"[INFO] 等待 2 秒后重试...")
|
|
time.sleep(2)
|
|
|
|
# 所有重试都失败了
|
|
print(f"[FAILED] 经过 {max_retries} 次尝试后仍然失败")
|
|
return "failed"
|
|
|
|
|
|
def call_llm_generate_with_anthropic(task_data, max_retries=3):
|
|
"""
|
|
调用 MiniMax 的 Anthropic 兼容接口生成内容
|
|
|
|
Args:
|
|
task_data: 包含 final_prompt, system_prompt, user_prompt 的字典
|
|
max_retries: 最大重试次数,默认 3 次(针对非529错误)
|
|
|
|
Returns:
|
|
str: 生成的内容,如果失败返回 "failed"
|
|
"""
|
|
if task_data is None:
|
|
error_msg = "[ERROR] task_data 为 None,无法调用 LLM"
|
|
print(error_msg)
|
|
return error_msg
|
|
|
|
# 提取 task_data 中的字段
|
|
final_prompt = task_data.get('final_prompt', '')
|
|
system_prompt = task_data.get('system_prompt', '')
|
|
user_prompt = task_data.get('user_prompt', '')
|
|
record_id = task_data.get('id', 'unknown')
|
|
|
|
print(f"[INFO] 开始为 record_id={record_id} 调用 MiniMax Anthropic 接口")
|
|
|
|
# 配置 Anthropic 客户端
|
|
ANTHROPIC_API_KEY = "sk-cp-l_as8mjqPhsOIny9IFKZ8jzA92z1c0eRwchldhEf4KzQjs9cjVknV2o7VNCcvYUXsXFq7uF4aSgp2RxxmUHLXwPGKgIvzedM70_XUIXiBB3gu_UmLDQLfh4"
|
|
ANTHROPIC_BASE_URL = "https://api.minimaxi.com/anthropic"
|
|
|
|
client = anthropic.Anthropic(
|
|
api_key=ANTHROPIC_API_KEY,
|
|
base_url=ANTHROPIC_BASE_URL
|
|
)
|
|
|
|
# 针对 529 错误的专门重试机制
|
|
MAX_RETRIES_529 = 30 # 529 错误最大重试次数
|
|
RETRY_DELAY_529 = 10 # 529 错误重试间隔(秒)
|
|
|
|
total_retry = 1
|
|
for attempt_529 in range(1, MAX_RETRIES_529 + 1):
|
|
# 普通错误的重试机制
|
|
for attempt in range(1, max_retries + 1):
|
|
try:
|
|
print(f"[INFO] 第 {total_retry} 次尝试调用 MiniMax...")
|
|
total_retry += 1
|
|
|
|
# 构建消息列表
|
|
messages = []
|
|
|
|
# 如果有 final_prompt 或 user_prompt,作为用户消息
|
|
user_content = final_prompt if final_prompt else user_prompt
|
|
if user_content:
|
|
messages.append({
|
|
"role": "user",
|
|
"content": [
|
|
{
|
|
"type": "text",
|
|
"text": user_content
|
|
}
|
|
]
|
|
})
|
|
|
|
# 调用 API(使用流式传输,因为操作可能超过 10 分钟)
|
|
generated_content = ""
|
|
with client.messages.stream(
|
|
model="MiniMax-M2.7",
|
|
system=system_prompt if system_prompt else "You are a helpful assistant.",
|
|
messages=messages,
|
|
max_tokens=32768
|
|
) as stream:
|
|
for text in stream.text_stream:
|
|
generated_content += text
|
|
|
|
# 检查是否有思考过程(流式模式下通常不会返回 thinking 块)
|
|
if hasattr(stream, 'thinking') and stream.thinking:
|
|
print(f"[DEBUG] 思考过程: {stream.thinking[:100]}...")
|
|
|
|
if generated_content:
|
|
print(f"[SUCCESS] 第 {attempt} 次尝试成功,生成长度: {len(generated_content)} 字符")
|
|
return generated_content
|
|
else:
|
|
print(f"[WARNING] 第 {attempt} 次尝试返回空内容")
|
|
|
|
except Exception as e:
|
|
# 检查是否是 529 错误
|
|
error_str = str(e)
|
|
if "529" in error_str or "overloaded" in error_str.lower():
|
|
print(f"[WARNING] 遇到服务器拥挤 (529) 错误: {e}")
|
|
# 跳出内层循环,进入外层 529 重试逻辑
|
|
break
|
|
else:
|
|
print(f"[ERROR] 第 {attempt} 次尝试发生错误: {e}")
|
|
|
|
# 如果不是最后一次尝试,等待后继续(普通错误重试)
|
|
if attempt < max_retries:
|
|
print(f"[INFO] 等待 2 秒后重试...")
|
|
time.sleep(2)
|
|
else:
|
|
# 内层循环正常结束(没有 break),说明没有遇到 529 错误且重试用完
|
|
# 所有重试都失败了
|
|
error_msg = f"[FAILED] 经过 {max_retries} 次尝试后仍然失败"
|
|
print(error_msg)
|
|
return error_msg
|
|
|
|
# 执行到这里说明遇到了 529 错误,进行外层重试
|
|
if attempt_529 < MAX_RETRIES_529:
|
|
print(f"[INFO] 遇到服务器拥挤 (529),第 {attempt_529}/{MAX_RETRIES_529} 次重试,等待 {RETRY_DELAY_529} 秒...")
|
|
time.sleep(RETRY_DELAY_529)
|
|
else:
|
|
error_msg = f"[FAILED] 经过 {MAX_RETRIES_529} 次 529 重试后仍然失败"
|
|
print(error_msg)
|
|
return error_msg
|
|
|
|
# 所有重试都失败了
|
|
error_msg = f"[FAILED] 经过 {MAX_RETRIES_529} 次 529 重试后仍然失败"
|
|
print(error_msg)
|
|
return error_msg
|
|
|
|
|
|
def upload_llm_result(odoo_base_url, upload_data):
|
|
"""
|
|
上传 LLM 生成结果到 Odoo
|
|
|
|
Args:
|
|
odoo_base_url: Odoo 服务地址
|
|
upload_data: 包含以下字段的字典:
|
|
- record_id: int, 记录ID
|
|
- status: str, "success" 或 "failed"
|
|
- result: str, 成功时的生成内容
|
|
- error_message: str, 失败时的错误信息
|
|
|
|
Returns:
|
|
bool: 上传成功返回 True,失败返回 False
|
|
"""
|
|
# 构建上传结果的 API URL
|
|
url = f"{odoo_base_url}/api/alpha-idea/result"
|
|
|
|
# 设置请求头,指定内容类型为 JSON
|
|
headers = {
|
|
"Content-Type": "application/json"
|
|
}
|
|
|
|
try:
|
|
# 打印上传信息日志
|
|
print(f"[INFO] 正在上传结果到 Odoo: {url}")
|
|
print(f"[INFO] 上传数据: {json.dumps(upload_data, ensure_ascii=False)}")
|
|
|
|
# 发送 POST 请求,设置超时时间为 30 秒
|
|
response = httpx.post(
|
|
url,
|
|
headers=headers,
|
|
json=upload_data,
|
|
timeout=30
|
|
)
|
|
# 如果 HTTP 状态码表示错误,则抛出异常
|
|
response.raise_for_status()
|
|
|
|
# 解析响应结果
|
|
result = response.json()
|
|
print(f"[INFO] 上传结果接口返回: {json.dumps(result, ensure_ascii=False)}")
|
|
|
|
# 检查返回结果是否成功
|
|
if isinstance(result, dict):
|
|
# 判断返回状态是否为成功
|
|
if result.get('status') == 'success' or result.get('success') is True:
|
|
print(f"[SUCCESS] 结果上传成功")
|
|
return True
|
|
|
|
# 如果返回结果不符合预期,打印警告信息
|
|
print(f"[WARNING] 结果上传可能失败: {result}")
|
|
return False
|
|
|
|
# 处理各种异常情况
|
|
except httpx.TimeoutException:
|
|
print("[ERROR] 上传结果超时")
|
|
return False
|
|
except httpx.ConnectError as e:
|
|
print(f"[ERROR] 上传结果连接错误: {e}")
|
|
return False
|
|
except httpx.HTTPStatusError as e:
|
|
print(f"[ERROR] 上传结果 HTTP 错误: {e}")
|
|
return False
|
|
except Exception as e:
|
|
print(f"[ERROR] 上传结果发生未知错误: {e}")
|
|
return False
|
|
|
|
|
|
if __name__ == "__main__":
|
|
# 配置 Odoo 服务地址
|
|
ODOO_BASE_URL = "https://quantify.erhe.top"
|
|
|
|
print("开始获取 LLM 生成任务...")
|
|
|
|
# 这里我手动控制就行
|
|
llm_select = 0
|
|
|
|
while True:
|
|
# 获取任务数据
|
|
task_data = fetch_llm_generate_task(odoo_base_url=ODOO_BASE_URL)
|
|
|
|
if not task_data:
|
|
break
|
|
|
|
if llm_select == 1:
|
|
llm_generate_result = call_llm_generate(task_data)
|
|
else:
|
|
llm_generate_result = call_llm_generate_with_anthropic(task_data)
|
|
|
|
if not llm_generate_result:
|
|
continue
|
|
|
|
print(llm_generate_result[:100])
|
|
|
|
# 检查是否成功:如果结果以 [ERROR] 或 [FAILED] 开头,说明失败了
|
|
is_failed = llm_generate_result.startswith('[ERROR]') or llm_generate_result.startswith('[FAILED]')
|
|
|
|
if not is_failed:
|
|
print("开始上传 LLM 生成结果...")
|
|
upload_data = {
|
|
'record_id': task_data['id'],
|
|
'status': 'success',
|
|
'result': llm_generate_result
|
|
}
|
|
else:
|
|
print("开始上传 LLM 生成结果失败...")
|
|
upload_data = {
|
|
'record_id': task_data['id'],
|
|
'status': 'failed',
|
|
'result': '',
|
|
'error_message': llm_generate_result
|
|
}
|
|
|
|
# 上传结果到 Odoo
|
|
upload_success = upload_llm_result(ODOO_BASE_URL, upload_data)
|
|
if upload_success:
|
|
print("[SUCCESS] 当次流程完成")
|
|
else:
|
|
print("[FAILED] 结果上传失败")
|
|
|
|
|
|
sleep_time = 60
|
|
print(f"等待 {sleep_time} 秒后开始下一次循环...")
|
|
time.sleep(sleep_time) |