666ghj

Standardize LLM clients and expose configurable base URLs.

@@ -12,7 +12,7 @@ import re @@ -12,7 +12,7 @@ import re
12 12
13 # 添加项目根目录到Python路径以导入config 13 # 添加项目根目录到Python路径以导入config
14 sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) 14 sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
15 -from config import GUIJI_QWEN3_API_KEY 15 +from config import GUIJI_QWEN3_API_KEY, GUIJI_QWEN3_BASE_URL
16 16
17 # 添加utils目录到Python路径 17 # 添加utils目录到Python路径
18 current_dir = os.path.dirname(os.path.abspath(__file__)) 18 current_dir = os.path.dirname(os.path.abspath(__file__))
@@ -30,21 +30,24 @@ class ForumHost: @@ -30,21 +30,24 @@ class ForumHost:
30 使用Qwen3-235B模型作为智能主持人 30 使用Qwen3-235B模型作为智能主持人
31 """ 31 """
32 32
33 - def __init__(self, api_key: str = None): 33 + def __init__(self, api_key: str = None, base_url: Optional[str] = None):
34 """ 34 """
35 初始化论坛主持人 35 初始化论坛主持人
36 36
37 Args: 37 Args:
38 api_key: 硅基流动API密钥,如果不提供则从配置文件读取 38 api_key: 硅基流动API密钥,如果不提供则从配置文件读取
  39 + base_url: 接口基础地址,默认使用配置文件提供的SiliconFlow地址
39 """ 40 """
40 self.api_key = api_key or GUIJI_QWEN3_API_KEY 41 self.api_key = api_key or GUIJI_QWEN3_API_KEY
41 42
42 if not self.api_key: 43 if not self.api_key:
43 raise ValueError("未找到硅基流动API密钥,请在config.py中设置GUIJI_QWEN3_API_KEY") 44 raise ValueError("未找到硅基流动API密钥,请在config.py中设置GUIJI_QWEN3_API_KEY")
44 45
  46 + self.base_url = base_url or GUIJI_QWEN3_BASE_URL
  47 +
45 self.client = OpenAI( 48 self.client = OpenAI(
46 api_key=self.api_key, 49 api_key=self.api_key,
47 - base_url="https://api.siliconflow.cn/v1" 50 + base_url=self.base_url
48 ) 51 )
49 self.model = "Qwen/Qwen3-235B-A22B-Instruct-2507" # Use larger model variant 52 self.model = "Qwen/Qwen3-235B-A22B-Instruct-2507" # Use larger model variant
50 53
@@ -72,7 +72,8 @@ class DeepSearchAgent: @@ -72,7 +72,8 @@ class DeepSearchAgent:
72 if self.config.default_llm_provider == "deepseek": 72 if self.config.default_llm_provider == "deepseek":
73 return DeepSeekLLM( 73 return DeepSeekLLM(
74 api_key=self.config.deepseek_api_key, 74 api_key=self.config.deepseek_api_key,
75 - model_name=self.config.deepseek_model 75 + model_name=self.config.deepseek_model,
  76 + base_url=self.config.deepseek_base_url
76 ) 77 )
77 elif self.config.default_llm_provider == "openai": 78 elif self.config.default_llm_provider == "openai":
78 return OpenAILLM( 79 return OpenAILLM(
@@ -82,7 +83,8 @@ class DeepSearchAgent: @@ -82,7 +83,8 @@ class DeepSearchAgent:
82 elif self.config.default_llm_provider == "kimi": 83 elif self.config.default_llm_provider == "kimi":
83 return KimiLLM( 84 return KimiLLM(
84 api_key=self.config.kimi_api_key, 85 api_key=self.config.kimi_api_key,
85 - model_name=self.config.kimi_model 86 + model_name=self.config.kimi_model,
  87 + base_url=self.config.kimi_base_url
86 ) 88 )
87 else: 89 else:
88 raise ValueError(f"不支持的LLM提供商: {self.config.default_llm_provider}") 90 raise ValueError(f"不支持的LLM提供商: {self.config.default_llm_provider}")
@@ -9,6 +9,8 @@ from openai import OpenAI @@ -9,6 +9,8 @@ from openai import OpenAI
9 from .base import BaseLLM 9 from .base import BaseLLM
10 import sys 10 import sys
11 11
  12 +DEFAULT_DEEPSEEK_BASE_URL = "https://api.deepseek.com"
  13 +
12 # 添加utils目录到Python路径 14 # 添加utils目录到Python路径
13 current_dir = os.path.dirname(os.path.abspath(__file__)) 15 current_dir = os.path.dirname(os.path.abspath(__file__))
14 root_dir = os.path.dirname(os.path.dirname(current_dir)) 16 root_dir = os.path.dirname(os.path.dirname(current_dir))
@@ -30,13 +32,14 @@ except ImportError: @@ -30,13 +32,14 @@ except ImportError:
30 class DeepSeekLLM(BaseLLM): 32 class DeepSeekLLM(BaseLLM):
31 """DeepSeek LLM实现类""" 33 """DeepSeek LLM实现类"""
32 34
33 - def __init__(self, api_key: Optional[str] = None, model_name: Optional[str] = None): 35 + def __init__(self, api_key: Optional[str] = None, model_name: Optional[str] = None, base_url: Optional[str] = None):
34 """ 36 """
35 初始化DeepSeek客户端 37 初始化DeepSeek客户端
36 38
37 Args: 39 Args:
38 api_key: DeepSeek API密钥,如果不提供则从环境变量读取 40 api_key: DeepSeek API密钥,如果不提供则从环境变量读取
39 model_name: 模型名称,默认使用deepseek-chat 41 model_name: 模型名称,默认使用deepseek-chat
  42 + base_url: DeepSeek API基础地址
40 """ 43 """
41 if api_key is None: 44 if api_key is None:
42 api_key = os.getenv("DEEPSEEK_API_KEY") 45 api_key = os.getenv("DEEPSEEK_API_KEY")
@@ -45,10 +48,12 @@ class DeepSeekLLM(BaseLLM): @@ -45,10 +48,12 @@ class DeepSeekLLM(BaseLLM):
45 48
46 super().__init__(api_key, model_name) 49 super().__init__(api_key, model_name)
47 50
  51 + self.base_url = base_url or os.getenv("DEEPSEEK_BASE_URL") or DEFAULT_DEEPSEEK_BASE_URL
  52 +
48 # 初始化OpenAI客户端,使用DeepSeek的endpoint 53 # 初始化OpenAI客户端,使用DeepSeek的endpoint
49 self.client = OpenAI( 54 self.client = OpenAI(
50 api_key=self.api_key, 55 api_key=self.api_key,
51 - base_url="https://api.deepseek.com" 56 + base_url=self.base_url
52 ) 57 )
53 58
54 self.default_model = model_name or self.get_default_model() 59 self.default_model = model_name or self.get_default_model()
@@ -110,5 +115,5 @@ class DeepSeekLLM(BaseLLM): @@ -110,5 +115,5 @@ class DeepSeekLLM(BaseLLM):
110 return { 115 return {
111 "provider": "DeepSeek", 116 "provider": "DeepSeek",
112 "model": self.default_model, 117 "model": self.default_model,
113 - "api_base": "https://api.deepseek.com" 118 + "api_base": self.base_url
114 } 119 }
@@ -10,6 +10,8 @@ from openai import OpenAI @@ -10,6 +10,8 @@ from openai import OpenAI
10 # 假设 .base 模块和 BaseLLM 类已存在 10 # 假设 .base 模块和 BaseLLM 类已存在
11 from .base import BaseLLM 11 from .base import BaseLLM
12 12
  13 +DEFAULT_KIMI_BASE_URL = "https://api.moonshot.cn/v1"
  14 +
13 # 添加utils目录到Python路径并导入重试模块 15 # 添加utils目录到Python路径并导入重试模块
14 try: 16 try:
15 current_dir = os.path.dirname(os.path.abspath(__file__)) 17 current_dir = os.path.dirname(os.path.abspath(__file__))
@@ -30,13 +32,14 @@ except ImportError: @@ -30,13 +32,14 @@ except ImportError:
30 class KimiLLM(BaseLLM): 32 class KimiLLM(BaseLLM):
31 """Kimi LLM实现类""" 33 """Kimi LLM实现类"""
32 34
33 - def __init__(self, api_key: Optional[str] = None, model_name: Optional[str] = None): 35 + def __init__(self, api_key: Optional[str] = None, model_name: Optional[str] = None, base_url: Optional[str] = None):
34 """ 36 """
35 初始化Kimi客户端 37 初始化Kimi客户端
36 38
37 Args: 39 Args:
38 api_key: Kimi API密钥,如果不提供则从环境变量读取 40 api_key: Kimi API密钥,如果不提供则从环境变量读取
39 model_name: 模型名称,默认使用kimi-k2-0711-preview 41 model_name: 模型名称,默认使用kimi-k2-0711-preview
  42 + base_url: Kimi API基础地址
40 """ 43 """
41 if api_key is None: 44 if api_key is None:
42 api_key = os.getenv("KIMI_API_KEY") 45 api_key = os.getenv("KIMI_API_KEY")
@@ -45,10 +48,12 @@ class KimiLLM(BaseLLM): @@ -45,10 +48,12 @@ class KimiLLM(BaseLLM):
45 48
46 super().__init__(api_key, model_name) 49 super().__init__(api_key, model_name)
47 50
  51 + self.base_url = base_url or os.getenv("KIMI_BASE_URL") or DEFAULT_KIMI_BASE_URL
  52 +
48 # 初始化OpenAI客户端,使用Kimi的endpoint 53 # 初始化OpenAI客户端,使用Kimi的endpoint
49 self.client = OpenAI( 54 self.client = OpenAI(
50 api_key=self.api_key, 55 api_key=self.api_key,
51 - base_url="https://api.moonshot.cn/v1" 56 + base_url=self.base_url
52 ) 57 )
53 58
54 self.default_model = model_name or self.get_default_model() 59 self.default_model = model_name or self.get_default_model()
@@ -136,7 +141,7 @@ class KimiLLM(BaseLLM): @@ -136,7 +141,7 @@ class KimiLLM(BaseLLM):
136 return { 141 return {
137 "provider": "Kimi", 142 "provider": "Kimi",
138 "model": self.default_model, 143 "model": self.default_model,
139 - "api_base": "https://api.moonshot.cn/v1", 144 + "api_base": self.base_url,
140 "max_context_length": "长文本支持(200K+ tokens)" 145 "max_context_length": "长文本支持(200K+ tokens)"
141 } 146 }
142 147
@@ -12,7 +12,7 @@ from dataclasses import dataclass @@ -12,7 +12,7 @@ from dataclasses import dataclass
12 12
13 # 添加项目根目录到Python路径以导入config 13 # 添加项目根目录到Python路径以导入config
14 sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) 14 sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
15 -from config import GUIJI_QWEN3_API_KEY 15 +from config import GUIJI_QWEN3_API_KEY, GUIJI_QWEN3_BASE_URL
16 16
17 # 添加utils目录到Python路径 17 # 添加utils目录到Python路径
18 current_dir = os.path.dirname(os.path.abspath(__file__)) 18 current_dir = os.path.dirname(os.path.abspath(__file__))
@@ -38,21 +38,24 @@ class KeywordOptimizer: @@ -38,21 +38,24 @@ class KeywordOptimizer:
38 使用硅基流动的Qwen3模型将Agent生成的搜索词优化为更贴近真实舆情的关键词 38 使用硅基流动的Qwen3模型将Agent生成的搜索词优化为更贴近真实舆情的关键词
39 """ 39 """
40 40
41 - def __init__(self, api_key: str = None): 41 + def __init__(self, api_key: str = None, base_url: str = None):
42 """ 42 """
43 初始化关键词优化器 43 初始化关键词优化器
44 44
45 Args: 45 Args:
46 api_key: 硅基流动API密钥,如果不提供则从配置文件读取 46 api_key: 硅基流动API密钥,如果不提供则从配置文件读取
  47 + base_url: 接口基础地址,默认使用配置文件提供的SiliconFlow地址
47 """ 48 """
48 self.api_key = api_key or GUIJI_QWEN3_API_KEY 49 self.api_key = api_key or GUIJI_QWEN3_API_KEY
49 50
50 if not self.api_key: 51 if not self.api_key:
51 raise ValueError("未找到硅基流动API密钥,请在config.py中设置GUIJI_QWEN3_API_KEY") 52 raise ValueError("未找到硅基流动API密钥,请在config.py中设置GUIJI_QWEN3_API_KEY")
52 53
  54 + self.base_url = base_url or GUIJI_QWEN3_BASE_URL
  55 +
53 self.client = OpenAI( 56 self.client = OpenAI(
54 api_key=self.api_key, 57 api_key=self.api_key,
55 - base_url="https://api.siliconflow.cn/v1" 58 + base_url=self.base_url
56 ) 59 )
57 self.model = "Qwen/Qwen3-30B-A3B-Instruct-2507" 60 self.model = "Qwen/Qwen3-30B-A3B-Instruct-2507"
58 61
@@ -15,6 +15,9 @@ class Config: @@ -15,6 +15,9 @@ class Config:
15 deepseek_api_key: Optional[str] = None 15 deepseek_api_key: Optional[str] = None
16 openai_api_key: Optional[str] = None 16 openai_api_key: Optional[str] = None
17 kimi_api_key: Optional[str] = None 17 kimi_api_key: Optional[str] = None
  18 + deepseek_base_url: str = "https://api.deepseek.com"
  19 + openai_base_url: Optional[str] = None
  20 + kimi_base_url: str = "https://api.moonshot.cn/v1"
18 21
19 # 数据库配置 22 # 数据库配置
20 db_host: Optional[str] = None 23 db_host: Optional[str] = None
@@ -85,6 +88,10 @@ class Config: @@ -85,6 +88,10 @@ class Config:
85 return cls( 88 return cls(
86 deepseek_api_key=getattr(config_module, "DEEPSEEK_API_KEY", None), 89 deepseek_api_key=getattr(config_module, "DEEPSEEK_API_KEY", None),
87 openai_api_key=getattr(config_module, "OPENAI_API_KEY", None), 90 openai_api_key=getattr(config_module, "OPENAI_API_KEY", None),
  91 + kimi_api_key=getattr(config_module, "KIMI_API_KEY", None),
  92 + deepseek_base_url=getattr(config_module, "DEEPSEEK_BASE_URL", "https://api.deepseek.com"),
  93 + openai_base_url=getattr(config_module, "OPENAI_BASE_URL", None),
  94 + kimi_base_url=getattr(config_module, "KIMI_BASE_URL", "https://api.moonshot.cn/v1"),
88 95
89 db_host=getattr(config_module, "DB_HOST", None), 96 db_host=getattr(config_module, "DB_HOST", None),
90 db_user=getattr(config_module, "DB_USER", None), 97 db_user=getattr(config_module, "DB_USER", None),
@@ -131,6 +138,9 @@ class Config: @@ -131,6 +138,9 @@ class Config:
131 deepseek_api_key=config_dict.get("DEEPSEEK_API_KEY"), 138 deepseek_api_key=config_dict.get("DEEPSEEK_API_KEY"),
132 openai_api_key=config_dict.get("OPENAI_API_KEY"), 139 openai_api_key=config_dict.get("OPENAI_API_KEY"),
133 kimi_api_key=config_dict.get("KIMI_API_KEY"), 140 kimi_api_key=config_dict.get("KIMI_API_KEY"),
  141 + deepseek_base_url=config_dict.get("DEEPSEEK_BASE_URL", "https://api.deepseek.com"),
  142 + openai_base_url=config_dict.get("OPENAI_BASE_URL"),
  143 + kimi_base_url=config_dict.get("KIMI_BASE_URL", "https://api.moonshot.cn/v1"),
134 144
135 db_host=config_dict.get("DB_HOST"), 145 db_host=config_dict.get("DB_HOST"),
136 db_user=config_dict.get("DB_USER"), 146 db_user=config_dict.get("DB_USER"),
@@ -60,7 +60,8 @@ class DeepSearchAgent: @@ -60,7 +60,8 @@ class DeepSearchAgent:
60 if self.config.default_llm_provider == "deepseek": 60 if self.config.default_llm_provider == "deepseek":
61 return DeepSeekLLM( 61 return DeepSeekLLM(
62 api_key=self.config.deepseek_api_key, 62 api_key=self.config.deepseek_api_key,
63 - model_name=self.config.deepseek_model 63 + model_name=self.config.deepseek_model,
  64 + base_url=self.config.deepseek_base_url
64 ) 65 )
65 elif self.config.default_llm_provider == "openai": 66 elif self.config.default_llm_provider == "openai":
66 return OpenAILLM( 67 return OpenAILLM(
@@ -70,7 +71,8 @@ class DeepSearchAgent: @@ -70,7 +71,8 @@ class DeepSearchAgent:
70 elif self.config.default_llm_provider == "gemini": 71 elif self.config.default_llm_provider == "gemini":
71 return GeminiLLM( 72 return GeminiLLM(
72 api_key=self.config.gemini_api_key, 73 api_key=self.config.gemini_api_key,
73 - model_name=self.config.gemini_model 74 + model_name=self.config.gemini_model,
  75 + base_url=self.config.gemini_base_url
74 ) 76 )
75 else: 77 else:
76 raise ValueError(f"不支持的LLM提供商: {self.config.default_llm_provider}") 78 raise ValueError(f"不支持的LLM提供商: {self.config.default_llm_provider}")
@@ -9,6 +9,8 @@ from typing import Optional, Dict, Any @@ -9,6 +9,8 @@ from typing import Optional, Dict, Any
9 from openai import OpenAI 9 from openai import OpenAI
10 from .base import BaseLLM 10 from .base import BaseLLM
11 11
  12 +DEFAULT_DEEPSEEK_BASE_URL = "https://api.deepseek.com"
  13 +
12 # 添加utils目录到Python路径并导入重试模块 14 # 添加utils目录到Python路径并导入重试模块
13 try: 15 try:
14 current_dir = os.path.dirname(os.path.abspath(__file__)) 16 current_dir = os.path.dirname(os.path.abspath(__file__))
@@ -29,13 +31,14 @@ except ImportError: @@ -29,13 +31,14 @@ except ImportError:
29 class DeepSeekLLM(BaseLLM): 31 class DeepSeekLLM(BaseLLM):
30 """DeepSeek LLM实现类""" 32 """DeepSeek LLM实现类"""
31 33
32 - def __init__(self, api_key: Optional[str] = None, model_name: Optional[str] = None): 34 + def __init__(self, api_key: Optional[str] = None, model_name: Optional[str] = None, base_url: Optional[str] = None):
33 """ 35 """
34 初始化DeepSeek客户端 36 初始化DeepSeek客户端
35 37
36 Args: 38 Args:
37 api_key: DeepSeek API密钥,如果不提供则从环境变量读取 39 api_key: DeepSeek API密钥,如果不提供则从环境变量读取
38 model_name: 模型名称,默认使用deepseek-chat 40 model_name: 模型名称,默认使用deepseek-chat
  41 + base_url: DeepSeek API基础地址
39 """ 42 """
40 if api_key is None: 43 if api_key is None:
41 api_key = os.getenv("DEEPSEEK_API_KEY") 44 api_key = os.getenv("DEEPSEEK_API_KEY")
@@ -44,10 +47,12 @@ class DeepSeekLLM(BaseLLM): @@ -44,10 +47,12 @@ class DeepSeekLLM(BaseLLM):
44 47
45 super().__init__(api_key, model_name) 48 super().__init__(api_key, model_name)
46 49
  50 + self.base_url = base_url or os.getenv("DEEPSEEK_BASE_URL") or DEFAULT_DEEPSEEK_BASE_URL
  51 +
47 # 初始化OpenAI客户端,使用DeepSeek的endpoint 52 # 初始化OpenAI客户端,使用DeepSeek的endpoint
48 self.client = OpenAI( 53 self.client = OpenAI(
49 api_key=self.api_key, 54 api_key=self.api_key,
50 - base_url="https://api.deepseek.com" 55 + base_url=self.base_url
51 ) 56 )
52 57
53 self.default_model = model_name or self.get_default_model() 58 self.default_model = model_name or self.get_default_model()
@@ -109,5 +114,5 @@ class DeepSeekLLM(BaseLLM): @@ -109,5 +114,5 @@ class DeepSeekLLM(BaseLLM):
109 return { 114 return {
110 "provider": "DeepSeek", 115 "provider": "DeepSeek",
111 "model": self.default_model, 116 "model": self.default_model,
112 - "api_base": "https://api.deepseek.com" 117 + "api_base": self.base_url
113 } 118 }
@@ -9,6 +9,8 @@ from typing import Optional, Dict, Any @@ -9,6 +9,8 @@ from typing import Optional, Dict, Any
9 from openai import OpenAI 9 from openai import OpenAI
10 from .base import BaseLLM 10 from .base import BaseLLM
11 11
  12 +DEFAULT_GEMINI_BASE_URL = "https://www.chataiapi.com/v1"
  13 +
12 # 添加utils目录到Python路径并导入重试模块 14 # 添加utils目录到Python路径并导入重试模块
13 try: 15 try:
14 current_dir = os.path.dirname(os.path.abspath(__file__)) 16 current_dir = os.path.dirname(os.path.abspath(__file__))
@@ -29,13 +31,14 @@ except ImportError: @@ -29,13 +31,14 @@ except ImportError:
29 class GeminiLLM(BaseLLM): 31 class GeminiLLM(BaseLLM):
30 """Gemini LLM实现类""" 32 """Gemini LLM实现类"""
31 33
32 - def __init__(self, api_key: Optional[str] = None, model_name: Optional[str] = None): 34 + def __init__(self, api_key: Optional[str] = None, model_name: Optional[str] = None, base_url: Optional[str] = None):
33 """ 35 """
34 初始化Gemini客户端 36 初始化Gemini客户端
35 37
36 Args: 38 Args:
37 api_key: Gemini API密钥,如果不提供则从环境变量读取 39 api_key: Gemini API密钥,如果不提供则从环境变量读取
38 model_name: 模型名称,默认使用gemini-2.5-pro 40 model_name: 模型名称,默认使用gemini-2.5-pro
  41 + base_url: Gemini API基础地址
39 """ 42 """
40 if api_key is None: 43 if api_key is None:
41 api_key = os.getenv("GEMINI_API_KEY") 44 api_key = os.getenv("GEMINI_API_KEY")
@@ -44,10 +47,12 @@ class GeminiLLM(BaseLLM): @@ -44,10 +47,12 @@ class GeminiLLM(BaseLLM):
44 47
45 super().__init__(api_key, model_name) 48 super().__init__(api_key, model_name)
46 49
  50 + self.base_url = base_url or os.getenv("GEMINI_BASE_URL") or DEFAULT_GEMINI_BASE_URL
  51 +
47 # 初始化OpenAI客户端,使用Gemini的中转endpoint 52 # 初始化OpenAI客户端,使用Gemini的中转endpoint
48 self.client = OpenAI( 53 self.client = OpenAI(
49 api_key=self.api_key, 54 api_key=self.api_key,
50 - base_url="https://www.chataiapi.com/v1" 55 + base_url=self.base_url
51 ) 56 )
52 57
53 self.default_model = model_name or self.get_default_model() 58 self.default_model = model_name or self.get_default_model()
@@ -109,5 +114,5 @@ class GeminiLLM(BaseLLM): @@ -109,5 +114,5 @@ class GeminiLLM(BaseLLM):
109 return { 114 return {
110 "provider": "Gemini", 115 "provider": "Gemini",
111 "model": self.default_model, 116 "model": self.default_model,
112 - "api_base": "https://www.chataiapi.com/v1" 117 + "api_base": self.base_url
113 } 118 }
@@ -16,6 +16,9 @@ class Config: @@ -16,6 +16,9 @@ class Config:
16 openai_api_key: Optional[str] = None 16 openai_api_key: Optional[str] = None
17 gemini_api_key: Optional[str] = None 17 gemini_api_key: Optional[str] = None
18 bocha_api_key: Optional[str] = None 18 bocha_api_key: Optional[str] = None
  19 + deepseek_base_url: str = "https://api.deepseek.com"
  20 + openai_base_url: Optional[str] = None
  21 + gemini_base_url: str = "https://www.chataiapi.com/v1"
19 22
20 # 模型配置 23 # 模型配置
21 default_llm_provider: str = "deepseek" # deepseek、openai 或 gemini 24 default_llm_provider: str = "deepseek" # deepseek、openai 或 gemini
@@ -72,6 +75,9 @@ class Config: @@ -72,6 +75,9 @@ class Config:
72 deepseek_api_key=getattr(config_module, "DEEPSEEK_API_KEY", None), 75 deepseek_api_key=getattr(config_module, "DEEPSEEK_API_KEY", None),
73 openai_api_key=getattr(config_module, "OPENAI_API_KEY", None), 76 openai_api_key=getattr(config_module, "OPENAI_API_KEY", None),
74 gemini_api_key=getattr(config_module, "GEMINI_API_KEY", None), 77 gemini_api_key=getattr(config_module, "GEMINI_API_KEY", None),
  78 + deepseek_base_url=getattr(config_module, "DEEPSEEK_BASE_URL", "https://api.deepseek.com"),
  79 + openai_base_url=getattr(config_module, "OPENAI_BASE_URL", None),
  80 + gemini_base_url=getattr(config_module, "GEMINI_BASE_URL", "https://www.chataiapi.com/v1"),
75 bocha_api_key=getattr(config_module, "BOCHA_API_KEY", None), 81 bocha_api_key=getattr(config_module, "BOCHA_API_KEY", None),
76 default_llm_provider=getattr(config_module, "DEFAULT_LLM_PROVIDER", "deepseek"), 82 default_llm_provider=getattr(config_module, "DEFAULT_LLM_PROVIDER", "deepseek"),
77 deepseek_model=getattr(config_module, "DEEPSEEK_MODEL", "deepseek-chat"), 83 deepseek_model=getattr(config_module, "DEEPSEEK_MODEL", "deepseek-chat"),
@@ -100,6 +106,9 @@ class Config: @@ -100,6 +106,9 @@ class Config:
100 deepseek_api_key=config_dict.get("DEEPSEEK_API_KEY"), 106 deepseek_api_key=config_dict.get("DEEPSEEK_API_KEY"),
101 openai_api_key=config_dict.get("OPENAI_API_KEY"), 107 openai_api_key=config_dict.get("OPENAI_API_KEY"),
102 gemini_api_key=config_dict.get("GEMINI_API_KEY"), 108 gemini_api_key=config_dict.get("GEMINI_API_KEY"),
  109 + deepseek_base_url=config_dict.get("DEEPSEEK_BASE_URL", "https://api.deepseek.com"),
  110 + openai_base_url=config_dict.get("OPENAI_BASE_URL"),
  111 + gemini_base_url=config_dict.get("GEMINI_BASE_URL", "https://www.chataiapi.com/v1"),
103 bocha_api_key=config_dict.get("BOCHA_API_KEY"), 112 bocha_api_key=config_dict.get("BOCHA_API_KEY"),
104 default_llm_provider=config_dict.get("DEFAULT_LLM_PROVIDER", "deepseek"), 113 default_llm_provider=config_dict.get("DEFAULT_LLM_PROVIDER", "deepseek"),
105 deepseek_model=config_dict.get("DEEPSEEK_MODEL", "deepseek-chat"), 114 deepseek_model=config_dict.get("DEEPSEEK_MODEL", "deepseek-chat"),
@@ -60,7 +60,8 @@ class DeepSearchAgent: @@ -60,7 +60,8 @@ class DeepSearchAgent:
60 if self.config.default_llm_provider == "deepseek": 60 if self.config.default_llm_provider == "deepseek":
61 return DeepSeekLLM( 61 return DeepSeekLLM(
62 api_key=self.config.deepseek_api_key, 62 api_key=self.config.deepseek_api_key,
63 - model_name=self.config.deepseek_model 63 + model_name=self.config.deepseek_model,
  64 + base_url=self.config.deepseek_base_url
64 ) 65 )
65 elif self.config.default_llm_provider == "openai": 66 elif self.config.default_llm_provider == "openai":
66 return OpenAILLM( 67 return OpenAILLM(
@@ -9,6 +9,8 @@ from typing import Optional, Dict, Any @@ -9,6 +9,8 @@ from typing import Optional, Dict, Any
9 from openai import OpenAI 9 from openai import OpenAI
10 from .base import BaseLLM 10 from .base import BaseLLM
11 11
  12 +DEFAULT_DEEPSEEK_BASE_URL = "https://api.deepseek.com"
  13 +
12 # 添加utils目录到Python路径并导入重试模块 14 # 添加utils目录到Python路径并导入重试模块
13 try: 15 try:
14 current_dir = os.path.dirname(os.path.abspath(__file__)) 16 current_dir = os.path.dirname(os.path.abspath(__file__))
@@ -29,13 +31,14 @@ except ImportError: @@ -29,13 +31,14 @@ except ImportError:
29 class DeepSeekLLM(BaseLLM): 31 class DeepSeekLLM(BaseLLM):
30 """DeepSeek LLM实现类""" 32 """DeepSeek LLM实现类"""
31 33
32 - def __init__(self, api_key: Optional[str] = None, model_name: Optional[str] = None): 34 + def __init__(self, api_key: Optional[str] = None, model_name: Optional[str] = None, base_url: Optional[str] = None):
33 """ 35 """
34 初始化DeepSeek客户端 36 初始化DeepSeek客户端
35 37
36 Args: 38 Args:
37 api_key: DeepSeek API密钥,如果不提供则从环境变量读取 39 api_key: DeepSeek API密钥,如果不提供则从环境变量读取
38 model_name: 模型名称,默认使用deepseek-chat 40 model_name: 模型名称,默认使用deepseek-chat
  41 + base_url: DeepSeek API基础地址
39 """ 42 """
40 if api_key is None: 43 if api_key is None:
41 api_key = os.getenv("DEEPSEEK_API_KEY") 44 api_key = os.getenv("DEEPSEEK_API_KEY")
@@ -44,10 +47,12 @@ class DeepSeekLLM(BaseLLM): @@ -44,10 +47,12 @@ class DeepSeekLLM(BaseLLM):
44 47
45 super().__init__(api_key, model_name) 48 super().__init__(api_key, model_name)
46 49
  50 + self.base_url = base_url or os.getenv("DEEPSEEK_BASE_URL") or DEFAULT_DEEPSEEK_BASE_URL
  51 +
47 # 初始化OpenAI客户端,使用DeepSeek的endpoint 52 # 初始化OpenAI客户端,使用DeepSeek的endpoint
48 self.client = OpenAI( 53 self.client = OpenAI(
49 api_key=self.api_key, 54 api_key=self.api_key,
50 - base_url="https://api.deepseek.com" 55 + base_url=self.base_url
51 ) 56 )
52 57
53 self.default_model = model_name or self.get_default_model() 58 self.default_model = model_name or self.get_default_model()
@@ -109,5 +114,5 @@ class DeepSeekLLM(BaseLLM): @@ -109,5 +114,5 @@ class DeepSeekLLM(BaseLLM):
109 return { 114 return {
110 "provider": "DeepSeek", 115 "provider": "DeepSeek",
111 "model": self.default_model, 116 "model": self.default_model,
112 - "api_base": "https://api.deepseek.com" 117 + "api_base": self.base_url
113 } 118 }
@@ -15,6 +15,8 @@ class Config: @@ -15,6 +15,8 @@ class Config:
15 deepseek_api_key: Optional[str] = None 15 deepseek_api_key: Optional[str] = None
16 openai_api_key: Optional[str] = None 16 openai_api_key: Optional[str] = None
17 tavily_api_key: Optional[str] = None 17 tavily_api_key: Optional[str] = None
  18 + deepseek_base_url: str = "https://api.deepseek.com"
  19 + openai_base_url: Optional[str] = None
18 20
19 # 模型配置 21 # 模型配置
20 default_llm_provider: str = "deepseek" # deepseek 或 openai 22 default_llm_provider: str = "deepseek" # deepseek 或 openai
@@ -66,6 +68,8 @@ class Config: @@ -66,6 +68,8 @@ class Config:
66 deepseek_api_key=getattr(config_module, "DEEPSEEK_API_KEY", None), 68 deepseek_api_key=getattr(config_module, "DEEPSEEK_API_KEY", None),
67 openai_api_key=getattr(config_module, "OPENAI_API_KEY", None), 69 openai_api_key=getattr(config_module, "OPENAI_API_KEY", None),
68 tavily_api_key=getattr(config_module, "TAVILY_API_KEY", None), 70 tavily_api_key=getattr(config_module, "TAVILY_API_KEY", None),
  71 + deepseek_base_url=getattr(config_module, "DEEPSEEK_BASE_URL", "https://api.deepseek.com"),
  72 + openai_base_url=getattr(config_module, "OPENAI_BASE_URL", None),
69 default_llm_provider=getattr(config_module, "DEFAULT_LLM_PROVIDER", "deepseek"), 73 default_llm_provider=getattr(config_module, "DEFAULT_LLM_PROVIDER", "deepseek"),
70 deepseek_model=getattr(config_module, "DEEPSEEK_MODEL", "deepseek-chat"), 74 deepseek_model=getattr(config_module, "DEEPSEEK_MODEL", "deepseek-chat"),
71 openai_model=getattr(config_module, "OPENAI_MODEL", "gpt-4o-mini"), 75 openai_model=getattr(config_module, "OPENAI_MODEL", "gpt-4o-mini"),
@@ -93,6 +97,8 @@ class Config: @@ -93,6 +97,8 @@ class Config:
93 deepseek_api_key=config_dict.get("DEEPSEEK_API_KEY"), 97 deepseek_api_key=config_dict.get("DEEPSEEK_API_KEY"),
94 openai_api_key=config_dict.get("OPENAI_API_KEY"), 98 openai_api_key=config_dict.get("OPENAI_API_KEY"),
95 tavily_api_key=config_dict.get("TAVILY_API_KEY"), 99 tavily_api_key=config_dict.get("TAVILY_API_KEY"),
  100 + deepseek_base_url=config_dict.get("DEEPSEEK_BASE_URL", "https://api.deepseek.com"),
  101 + openai_base_url=config_dict.get("OPENAI_BASE_URL"),
96 default_llm_provider=config_dict.get("DEFAULT_LLM_PROVIDER", "deepseek"), 102 default_llm_provider=config_dict.get("DEFAULT_LLM_PROVIDER", "deepseek"),
97 deepseek_model=config_dict.get("DEEPSEEK_MODEL", "deepseek-chat"), 103 deepseek_model=config_dict.get("DEEPSEEK_MODEL", "deepseek-chat"),
98 openai_model=config_dict.get("OPENAI_MODEL", "gpt-4o-mini"), 104 openai_model=config_dict.get("OPENAI_MODEL", "gpt-4o-mini"),
1 <div align="center"> 1 <div align="center">
2 2
3 -<img src="static/image/logo_compressed.png" alt="Weibo Public Opinion Analysis System Logo" width="800"> 3 +<img src="static/image/logo_compressed.png" alt="Weibo Public Opinion Analysis System Logo" width="700">
  4 +
  5 +<a href="https://trendshift.io/repositories/12461" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12461" alt="666ghj%2FWeibo_PublicOpinion_AnalysisSystem | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
  6 +
  7 +<a href="https://leaflow.net/" target="_blank"><img src="static/image/Leaflow_logo.png" alt="666ghj%2FWeibo_PublicOpinion_AnalysisSystem | Leaflow" style="width: 150px;" width="150"/></a>
4 8
5 [![GitHub Stars](https://img.shields.io/github/stars/666ghj/Weibo_PublicOpinion_AnalysisSystem?style=flat-square)](https://github.com/666ghj/Weibo_PublicOpinion_AnalysisSystem/stargazers) 9 [![GitHub Stars](https://img.shields.io/github/stars/666ghj/Weibo_PublicOpinion_AnalysisSystem?style=flat-square)](https://github.com/666ghj/Weibo_PublicOpinion_AnalysisSystem/stargazers)
6 [![GitHub Watchers](https://img.shields.io/github/watchers/666ghj/Weibo_PublicOpinion_AnalysisSystem?style=flat-square)](https://github.com/666ghj/Weibo_PublicOpinion_AnalysisSystem/watchers) 10 [![GitHub Watchers](https://img.shields.io/github/watchers/666ghj/Weibo_PublicOpinion_AnalysisSystem?style=flat-square)](https://github.com/666ghj/Weibo_PublicOpinion_AnalysisSystem/watchers)
7 [![GitHub Forks](https://img.shields.io/github/forks/666ghj/Weibo_PublicOpinion_AnalysisSystem?style=flat-square)](https://github.com/666ghj/Weibo_PublicOpinion_AnalysisSystem/network) 11 [![GitHub Forks](https://img.shields.io/github/forks/666ghj/Weibo_PublicOpinion_AnalysisSystem?style=flat-square)](https://github.com/666ghj/Weibo_PublicOpinion_AnalysisSystem/network)
8 [![GitHub Issues](https://img.shields.io/github/issues/666ghj/Weibo_PublicOpinion_AnalysisSystem?style=flat-square)](https://github.com/666ghj/Weibo_PublicOpinion_AnalysisSystem/issues) 12 [![GitHub Issues](https://img.shields.io/github/issues/666ghj/Weibo_PublicOpinion_AnalysisSystem?style=flat-square)](https://github.com/666ghj/Weibo_PublicOpinion_AnalysisSystem/issues)
  13 +[![GitHub Pull Requests](https://img.shields.io/github/issues-pr/666ghj/Weibo_PublicOpinion_AnalysisSystem?style=flat-square)](https://github.com/666ghj/Weibo_PublicOpinion_AnalysisSystem/pulls)
  14 +
9 [![GitHub License](https://img.shields.io/github/license/666ghj/Weibo_PublicOpinion_AnalysisSystem?style=flat-square)](https://github.com/666ghj/Weibo_PublicOpinion_AnalysisSystem/blob/main/LICENSE) 15 [![GitHub License](https://img.shields.io/github/license/666ghj/Weibo_PublicOpinion_AnalysisSystem?style=flat-square)](https://github.com/666ghj/Weibo_PublicOpinion_AnalysisSystem/blob/main/LICENSE)
  16 +[![Version](https://img.shields.io/badge/version-v1.0.0-green.svg?style=flat-square)](https://github.com/666ghj/Weibo_PublicOpinion_AnalysisSystem)
  17 +[![Docker](https://img.shields.io/badge/Docker-Build-2496ED?style=flat-square&logo=docker&logoColor=white)](https://hub.docker.com/)
  18 +
10 19
11 [English](./README-EN.md) | [中文文档](./README.md) 20 [English](./README-EN.md) | [中文文档](./README.md)
12 21
@@ -221,7 +230,7 @@ playwright install chromium @@ -221,7 +230,7 @@ playwright install chromium
221 230
222 #### 4.1 Configure API Keys 231 #### 4.1 Configure API Keys
223 232
224 -Edit the `config.py` file and fill in your API keys (you can also choose your own models and search proxies): 233 +Edit the `config.py` file and fill in your API keys (you can also choose your own models and search proxies; please see the config file for details):
225 234
226 ```python 235 ```python
227 # MySQL Database Configuration 236 # MySQL Database Configuration
@@ -283,7 +292,9 @@ conda activate your_conda_name @@ -283,7 +292,9 @@ conda activate your_conda_name
283 python app.py 292 python app.py
284 ``` 293 ```
285 294
286 -> Note: Data crawling requires separate operation, see section 5.3 for guidance 295 +> Note 1: After a run is terminated, the Streamlit app might not shut down correctly and may still be occupying the port. If this occurs, find the process that is holding the port and kill it.
  296 +
  297 +> Note 2: Data scraping needs to be performed as a separate operation. Please refer to the instructions in section 5.3.
287 298
288 Visit http://localhost:5000 to use the complete system 299 Visit http://localhost:5000 to use the complete system
289 300
@@ -230,7 +230,7 @@ playwright install chromium @@ -230,7 +230,7 @@ playwright install chromium
230 230
231 #### 4.1 配置API密钥 231 #### 4.1 配置API密钥
232 232
233 -编辑 `config.py` 文件,填入您的API密钥(您也可以选择自己的模型、搜索代理): 233 +编辑 `config.py` 文件,填入您的API密钥(您也可以选择自己的模型、搜索代理,详情见config文件内):
234 234
235 ```python 235 ```python
236 # MySQL数据库配置 236 # MySQL数据库配置
@@ -292,7 +292,9 @@ conda activate your_conda_name @@ -292,7 +292,9 @@ conda activate your_conda_name
292 python app.py 292 python app.py
293 ``` 293 ```
294 294
295 -> 注:数据爬取需要单独操作,见5.3指引 295 +> 注1:一次运行终止后,streamlit app可能结束异常仍然占用端口,此时搜索占用端口的进程kill掉即可
  296 +
  297 +> 注2:数据爬取需要单独操作,见5.3指引
296 298
297 访问 http://localhost:5000 即可使用完整系统 299 访问 http://localhost:5000 即可使用完整系统
298 300
@@ -192,6 +192,7 @@ class ReportAgent: @@ -192,6 +192,7 @@ class ReportAgent:
192 return GeminiLLM( 192 return GeminiLLM(
193 api_key=self.config.gemini_api_key, 193 api_key=self.config.gemini_api_key,
194 model_name=self.config.gemini_model, 194 model_name=self.config.gemini_model,
  195 + base_url=self.config.gemini_base_url,
195 config=self.config # 传入配置对象以支持动态超时设置 196 config=self.config # 传入配置对象以支持动态超时设置
196 ) 197 )
197 else: 198 else:
@@ -9,6 +9,8 @@ from typing import Optional, Dict, Any @@ -9,6 +9,8 @@ from typing import Optional, Dict, Any
9 from openai import OpenAI 9 from openai import OpenAI
10 from .base import BaseLLM 10 from .base import BaseLLM
11 11
  12 +DEFAULT_GEMINI_BASE_URL = "https://www.chataiapi.com/v1"
  13 +
12 # 导入根目录的config 14 # 导入根目录的config
13 try: 15 try:
14 current_dir = os.path.dirname(os.path.abspath(__file__)) 16 current_dir = os.path.dirname(os.path.abspath(__file__))
@@ -50,13 +52,14 @@ except ImportError: @@ -50,13 +52,14 @@ except ImportError:
50 class GeminiLLM(BaseLLM): 52 class GeminiLLM(BaseLLM):
51 """Report Engine Gemini LLM实现类""" 53 """Report Engine Gemini LLM实现类"""
52 54
53 - def __init__(self, api_key: Optional[str] = None, model_name: Optional[str] = None, config=None): 55 + def __init__(self, api_key: Optional[str] = None, model_name: Optional[str] = None, base_url: Optional[str] = None, config=None):
54 """ 56 """
55 初始化Gemini客户端 57 初始化Gemini客户端
56 58
57 Args: 59 Args:
58 api_key: Gemini API密钥,如果不提供则从config或环境变量读取 60 api_key: Gemini API密钥,如果不提供则从config或环境变量读取
59 model_name: 模型名称,默认使用gemini-2.5-pro 61 model_name: 模型名称,默认使用gemini-2.5-pro
  62 + base_url: Gemini API基础地址
60 config: 配置对象,用于获取超时设置 63 config: 配置对象,用于获取超时设置
61 """ 64 """
62 if api_key is None: 65 if api_key is None:
@@ -78,6 +81,13 @@ class GeminiLLM(BaseLLM): @@ -78,6 +81,13 @@ class GeminiLLM(BaseLLM):
78 # 从配置获取超时时间,默认15分钟(适应7分钟平均生成时间) 81 # 从配置获取超时时间,默认15分钟(适应7分钟平均生成时间)
79 timeout = config.api_timeout if config and hasattr(config, 'api_timeout') else 900.0 82 timeout = config.api_timeout if config and hasattr(config, 'api_timeout') else 900.0
80 83
  84 + self.base_url = (
  85 + base_url
  86 + or (getattr(self.config, 'gemini_base_url', None) if self.config else None)
  87 + or os.getenv('GEMINI_BASE_URL')
  88 + or DEFAULT_GEMINI_BASE_URL
  89 + )
  90 +
81 # 创建针对此实例的重试配置 91 # 创建针对此实例的重试配置
82 self.retry_config = create_report_retry_config(config) 92 self.retry_config = create_report_retry_config(config)
83 93
@@ -85,7 +95,7 @@ class GeminiLLM(BaseLLM): @@ -85,7 +95,7 @@ class GeminiLLM(BaseLLM):
85 # 专门为报告生成设置长超时(15分钟),适应7分钟平均生成时间 95 # 专门为报告生成设置长超时(15分钟),适应7分钟平均生成时间
86 self.client = OpenAI( 96 self.client = OpenAI(
87 api_key=self.api_key, 97 api_key=self.api_key,
88 - base_url="https://www.chataiapi.com/v1", 98 + base_url=self.base_url,
89 timeout=timeout 99 timeout=timeout
90 ) 100 )
91 101
@@ -188,6 +198,6 @@ class GeminiLLM(BaseLLM): @@ -188,6 +198,6 @@ class GeminiLLM(BaseLLM):
188 return { 198 return {
189 "provider": "Gemini", 199 "provider": "Gemini",
190 "model": self.default_model, 200 "model": self.default_model,
191 - "api_base": "https://www.chataiapi.com/v1", 201 + "api_base": self.base_url,
192 "purpose": "Report Generation" 202 "purpose": "Report Generation"
193 } 203 }
@@ -13,6 +13,7 @@ class Config: @@ -13,6 +13,7 @@ class Config:
13 """Report Engine配置类""" 13 """Report Engine配置类"""
14 # API密钥 14 # API密钥
15 gemini_api_key: Optional[str] = None 15 gemini_api_key: Optional[str] = None
  16 + gemini_base_url: str = "https://www.chataiapi.com/v1"
16 17
17 # 模型配置 18 # 模型配置
18 default_llm_provider: str = "gemini" 19 default_llm_provider: str = "gemini"
@@ -56,6 +57,7 @@ class Config: @@ -56,6 +57,7 @@ class Config:
56 57
57 return cls( 58 return cls(
58 gemini_api_key=getattr(config_module, "GEMINI_API_KEY", None), 59 gemini_api_key=getattr(config_module, "GEMINI_API_KEY", None),
  60 + gemini_base_url=getattr(config_module, "GEMINI_BASE_URL", "https://www.chataiapi.com/v1"),
59 default_llm_provider=getattr(config_module, "DEFAULT_LLM_PROVIDER", "gemini"), 61 default_llm_provider=getattr(config_module, "DEFAULT_LLM_PROVIDER", "gemini"),
60 gemini_model=getattr(config_module, "GEMINI_MODEL", "gemini-2.5-pro"), 62 gemini_model=getattr(config_module, "GEMINI_MODEL", "gemini-2.5-pro"),
61 max_content_length=getattr(config_module, "MAX_CONTENT_LENGTH", 200000), 63 max_content_length=getattr(config_module, "MAX_CONTENT_LENGTH", 200000),
@@ -82,6 +84,7 @@ class Config: @@ -82,6 +84,7 @@ class Config:
82 84
83 return cls( 85 return cls(
84 gemini_api_key=config_dict.get("GEMINI_API_KEY"), 86 gemini_api_key=config_dict.get("GEMINI_API_KEY"),
  87 + gemini_base_url=config_dict.get("GEMINI_BASE_URL", "https://www.chataiapi.com/v1"),
85 default_llm_provider=config_dict.get("DEFAULT_LLM_PROVIDER", "gemini"), 88 default_llm_provider=config_dict.get("DEFAULT_LLM_PROVIDER", "gemini"),
86 gemini_model=config_dict.get("GEMINI_MODEL", "gemini-2.5-pro"), 89 gemini_model=config_dict.get("GEMINI_MODEL", "gemini-2.5-pro"),
87 max_content_length=int(config_dict.get("MAX_CONTENT_LENGTH", "200000")), 90 max_content_length=int(config_dict.get("MAX_CONTENT_LENGTH", "200000")),
@@ -27,7 +27,18 @@ except locale.Error: @@ -27,7 +27,18 @@ except locale.Error:
27 sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) 27 sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
28 28
29 from InsightEngine import DeepSearchAgent, Config 29 from InsightEngine import DeepSearchAgent, Config
30 -from config import DEEPSEEK_API_KEY, KIMI_API_KEY, DB_HOST, DB_USER, DB_PASSWORD, DB_NAME, DB_PORT, DB_CHARSET 30 +from config import (
  31 + DEEPSEEK_API_KEY,
  32 + DEEPSEEK_BASE_URL,
  33 + KIMI_API_KEY,
  34 + KIMI_BASE_URL,
  35 + DB_HOST,
  36 + DB_USER,
  37 + DB_PASSWORD,
  38 + DB_NAME,
  39 + DB_PORT,
  40 + DB_CHARSET,
  41 +)
31 42
32 43
33 def main(): 44 def main():
@@ -111,6 +122,8 @@ def main(): @@ -111,6 +122,8 @@ def main():
111 deepseek_api_key=None, 122 deepseek_api_key=None,
112 openai_api_key=None, 123 openai_api_key=None,
113 kimi_api_key=KIMI_API_KEY, # 强制使用配置文件中的Kimi Key 124 kimi_api_key=KIMI_API_KEY, # 强制使用配置文件中的Kimi Key
  125 + deepseek_base_url=DEEPSEEK_BASE_URL,
  126 + kimi_base_url=KIMI_BASE_URL,
114 db_host=db_host, 127 db_host=db_host,
115 db_user=db_user, 128 db_user=db_user,
116 db_password=db_password, 129 db_password=db_password,
@@ -27,7 +27,13 @@ except locale.Error: @@ -27,7 +27,13 @@ except locale.Error:
27 sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) 27 sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
28 28
29 from MediaEngine import DeepSearchAgent, Config 29 from MediaEngine import DeepSearchAgent, Config
30 -from config import DEEPSEEK_API_KEY, BOCHA_Web_Search_API_KEY, GEMINI_API_KEY 30 +from config import (
  31 + DEEPSEEK_API_KEY,
  32 + DEEPSEEK_BASE_URL,
  33 + BOCHA_Web_Search_API_KEY,
  34 + GEMINI_API_KEY,
  35 + GEMINI_BASE_URL,
  36 +)
31 37
32 38
33 def main(): 39 def main():
@@ -112,6 +118,8 @@ def main(): @@ -112,6 +118,8 @@ def main():
112 openai_api_key=None, 118 openai_api_key=None,
113 gemini_api_key=gemini_key, 119 gemini_api_key=gemini_key,
114 bocha_api_key=bocha_key, 120 bocha_api_key=bocha_key,
  121 + deepseek_base_url=DEEPSEEK_BASE_URL,
  122 + gemini_base_url=GEMINI_BASE_URL,
115 default_llm_provider=llm_provider, 123 default_llm_provider=llm_provider,
116 deepseek_model="deepseek-chat", # 保留默认值以兼容 124 deepseek_model="deepseek-chat", # 保留默认值以兼容
117 openai_model="gpt-4o-mini", # 保留默认值以兼容 125 openai_model="gpt-4o-mini", # 保留默认值以兼容
@@ -27,7 +27,7 @@ except locale.Error: @@ -27,7 +27,7 @@ except locale.Error:
27 sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) 27 sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
28 28
29 from QueryEngine import DeepSearchAgent, Config 29 from QueryEngine import DeepSearchAgent, Config
30 -from config import DEEPSEEK_API_KEY, TAVILY_API_KEY 30 +from config import DEEPSEEK_API_KEY, DEEPSEEK_BASE_URL, TAVILY_API_KEY
31 31
32 32
33 def main(): 33 def main():
@@ -110,6 +110,7 @@ def main(): @@ -110,6 +110,7 @@ def main():
110 deepseek_api_key=deepseek_key, 110 deepseek_api_key=deepseek_key,
111 openai_api_key=None, 111 openai_api_key=None,
112 tavily_api_key=tavily_key, 112 tavily_api_key=tavily_key,
  113 + deepseek_base_url=DEEPSEEK_BASE_URL,
113 default_llm_provider=llm_provider, 114 default_llm_provider=llm_provider,
114 deepseek_model=model_name, 115 deepseek_model=model_name,
115 openai_model="gpt-4o-mini", # 保留默认值以兼容 116 openai_model="gpt-4o-mini", # 保留默认值以兼容
@@ -4,35 +4,52 @@ Intelligence Public Opinion Analysis Platform Configuration File @@ -4,35 +4,52 @@ Intelligence Public Opinion Analysis Platform Configuration File
4 Stores database connection information and API keys 4 Stores database connection information and API keys
5 """ 5 """
6 6
  7 +# ============================== 数据库配置 ==============================
7 # MySQL Database Configuration 8 # MySQL Database Configuration
8 DB_HOST = "your_database_host" # e.g., "localhost" or "127.0.0.1" 9 DB_HOST = "your_database_host" # e.g., "localhost" or "127.0.0.1"
9 -DB_PORT = 3306 10 +DB_PORT = 3306 # e.g., 3306
10 DB_USER = "your_database_user" 11 DB_USER = "your_database_user"
11 DB_PASSWORD = "your_database_password" 12 DB_PASSWORD = "your_database_password"
12 DB_NAME = "your_database_name" 13 DB_NAME = "your_database_name"
13 DB_CHARSET = "utf8mb4" 14 DB_CHARSET = "utf8mb4"
14 -# 我们也提供云数据库资源便捷配置,日均10w+数据,目前推广阶段可免费申请,联系我们:670939375@qq.com 15 +# 我们也提供云数据库资源便捷配置,日均10w+数据,学术研究可免费申请,联系我们:670939375@qq.com
15 16
16 -# DeepSeek API Key  
17 -# 申请地址https://www.deepseek.com/  
18 -DEEPSEEK_API_KEY = "your_deepseek_api_key"  
19 17
20 -# Tavily Search API Key  
21 -# 申请地址https://www.tavily.com/  
22 -TAVILY_API_KEY = "your_tavily_api_key" 18 +# ============================== LLM配置 ==============================
  19 +# 重要提醒:推荐第一次先按照默认模型安排配置,成功跑通后再更改自己的模型!
23 20
24 -# Kimi API Key  
25 -# 申请地址https://www.kimi.com/  
26 -KIMI_API_KEY = "your_kimi_api_key" 21 +# DeepSeek API Key (openai调用格式)
  22 +# 用于Query Agent
  23 +# 申请地址https://www.deepseek.com/
  24 +DEEPSEEK_API_KEY = "sk-xxxxxxxxxxxxxxxxx"
  25 +DEEPSEEK_BASE_URL = "https://api.deepseek.com"
27 26
28 -# Gemini API Key (via OpenAI format proxy) 27 +# Kimi API Key (openai调用格式)
  28 +# 用于Insight Agent
  29 +# 申请地址https://platform.moonshot.cn/
  30 +KIMI_API_KEY = "sk-xxxxxxxxxxxxxxxxx"
  31 +KIMI_BASE_URL = "https://api.moonshot.cn/v1"
  32 +
  33 +# Gemini API Key (openai调用格式)
  34 +# 用于Media Agent与Report Agent
29 # 这里我用了一个中转api来接入Gemini,申请地址https://api.chataiapi.com/,你也可以使用其他 35 # 这里我用了一个中转api来接入Gemini,申请地址https://api.chataiapi.com/,你也可以使用其他
30 -GEMINI_API_KEY = "your_gemini_api_key" 36 +GEMINI_API_KEY = "sk-xxxxxxxxxxxxxxxxx"
  37 +GEMINI_BASE_URL = "https://www.chataiapi.com/v1"
  38 +
  39 +# Siliconflow API Key (openai调用格式)
  40 +# 用于Forum Host与keyword Optimizer
  41 +# 申请地址https://siliconflow.cn/
  42 +GUIJI_QWEN3_API_KEY = "sk-xxxxxxxxxxxxxxxxx"
  43 +GUIJI_QWEN3_BASE_URL = "https://api.siliconflow.cn/v1"
  44 +
  45 +# 调试阶段出于成本考虑,没有使用ChatGPT与Claude,您也可以接入自己的模型,只要符合openai调用格式即可
  46 +
  47 +
  48 +# ============================== Web工具配置 ==============================
  49 +# Tavily Search API Key
  50 +# 申请地址https://www.tavily.com/
  51 +TAVILY_API_KEY = "tvly-xxxxxxxxxxxxxxxxx"
31 52
32 # Bocha Search API Key 53 # Bocha Search API Key
33 # 申请地址https://open.bochaai.com/ 54 # 申请地址https://open.bochaai.com/
34 -BOCHA_Web_Search_API_KEY = "your_bocha_web_search_api_key"  
35 -  
36 -# Guiji Flow API Key  
37 -# 申请地址https://siliconflow.cn/  
38 -GUIJI_QWEN3_API_KEY = "your_guiji_qwen3_api_key"  
  55 +BOCHA_Web_Search_API_KEY = "sk-xxxxxxxxxxxxxxxxx"