✨ feat(main): 增加PDF知识库和MCP工具集成
- 创建PDF知识库以从本地文件加载数据并支持向量搜索 - 集成MCP工具以与PostgreSQL数据库进行交互 - 通过组合知识库实现多源知识集成
This commit is contained in:
77
数据库优化工程师.py
77
数据库优化工程师.py
@@ -20,12 +20,77 @@ if __name__ == "__main__":
|
||||
load_dotenv()
|
||||
from agno.agent import Agent, RunResponse # noqa
|
||||
from agno.models.deepseek import DeepSeek
|
||||
from agno.knowledge.pdf_url import PDFUrlKnowledgeBase
|
||||
from agno.knowledge.pdf import PDFKnowledgeBase
|
||||
from agno.vectordb.lancedb import LanceDb, SearchType
|
||||
from agno.embedder.openai import OpenAIEmbedder
|
||||
from agno.knowledge.combined import CombinedKnowledgeBase
|
||||
from agno.tools.mcp import MCPTools
|
||||
from mcp import StdioServerParameters
|
||||
|
||||
agent = Agent(model=DeepSeek(id="deepseek-chat"), markdown=True)
|
||||
# Create a knowledge base of PDFs from URLs
|
||||
# pdf_url_kb = PDFUrlKnowledgeBase(
|
||||
# urls=["https://agno-public.s3.amazonaws.com/recipes/ThaiRecipes.pdf"],
|
||||
# # Use LanceDB as the vector database and store embeddings in the `recipes` table
|
||||
# vector_db=LanceDb(
|
||||
# table_name="recipes",
|
||||
# uri="tmp/lancedb",
|
||||
# search_type=SearchType.vector,
|
||||
# embedder=OpenAIEmbedder(id="text-embedding-3-small"),
|
||||
# ),
|
||||
# )
|
||||
# Create Local PDF knowledge base
|
||||
local_pdf_kb = PDFKnowledgeBase(
|
||||
path="D:\\Sources\\DONGJAK-TOOLS\\pdfs\\Database Fundamentals.pdf",
|
||||
vector_db=LanceDb(
|
||||
table_name="database_fundamentals",
|
||||
uri="tmp/lancedb",
|
||||
search_type=SearchType.vector,
|
||||
embedder=OpenAIEmbedder(id="text-embedding-3-small"),
|
||||
),
|
||||
)
|
||||
|
||||
# Get the response in a variable
|
||||
# run: RunResponse = agent.run("Share a 2 sentence horror story")
|
||||
# print(run.content)
|
||||
# Combine knowledge bases
|
||||
knowledge_base = CombinedKnowledgeBase(
|
||||
sources=[
|
||||
local_pdf_kb,
|
||||
],
|
||||
vector_db=LanceDb(
|
||||
table_name="combined_documents",
|
||||
uri="tmp/lancedb",
|
||||
search_type=SearchType.vector,
|
||||
embedder=OpenAIEmbedder(id="text-embedding-3-small"),
|
||||
),
|
||||
)
|
||||
# Load the knowledge base: Comment after first run as the knowledge base is already loaded
|
||||
knowledge_base.load()
|
||||
|
||||
# Print the response in the terminal
|
||||
agent.print_response("你好")
|
||||
server_params = StdioServerParameters(
|
||||
command="cmd", # 或 "uvx",取决于你的安装方式
|
||||
args=[
|
||||
"/c",
|
||||
"npx",
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-postgres",
|
||||
"postgresql://postgres:postgres@192.168.1.7:5432/postgres",
|
||||
],
|
||||
env={}, # 可选的环境变量
|
||||
)
|
||||
with MCPTools(server_params=server_params) as postgres_server:
|
||||
# 使用mcp_tools
|
||||
|
||||
agent = Agent(
|
||||
model=DeepSeek(id="deepseek-chat"),
|
||||
markdown=True,
|
||||
knowledge=knowledge_base,
|
||||
search_knowledge=True,
|
||||
show_tool_calls=True,
|
||||
tools=[postgres_server],
|
||||
)
|
||||
|
||||
# Get the response in a variable
|
||||
# run: RunResponse = agent.run("Share a 2 sentence horror story")
|
||||
# print(run.content)
|
||||
|
||||
# Print the response in the terminal
|
||||
agent.print_response("看下aq这个数据库", stream=True)
|
||||
|
||||
Reference in New Issue
Block a user