init commit
This commit is contained in:
84
search_server/search.py
Normal file
84
search_server/search.py
Normal file
@@ -0,0 +1,84 @@
|
||||
import httpx
|
||||
import json
|
||||
from mcp.server.fastmcp import FastMCP
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
mcp = FastMCP("search")
|
||||
|
||||
GOOGLE_SEARCH_URL = "https://google.serper.dev/search"
|
||||
GOOGLE_API_KEY = "2bc74e437bc6b48a82672b7d6ae005d0cd9f369a"
|
||||
|
||||
async def fetch_page_content(url: str) -> str:
|
||||
try:
|
||||
async with httpx.AsyncClient(follow_redirects=True, timeout=10.0) as client:
|
||||
resp = await client.get(url, headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36",
|
||||
"Referer": url,
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"cookie":"bFC1D2a8-fB14-cF61-3F50-AF98CCbcef62"
|
||||
})
|
||||
resp.raise_for_status()
|
||||
html = resp.text
|
||||
soup = BeautifulSoup(html, "lxml")
|
||||
main = soup.find('main')
|
||||
text = main.get_text(separator=' ', strip=True) if main else soup.body.get_text(separator=' ', strip=True)
|
||||
return text[:2000]
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
async def search_google(query: str) -> list[dict[str, str]]:
|
||||
headers = {
|
||||
"X-API-KEY": GOOGLE_API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
payload = json.dumps({
|
||||
"q": query,
|
||||
})
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
response = await client.post(GOOGLE_SEARCH_URL, headers=headers, data=payload, timeout=30.0)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
results = []
|
||||
for item in data.get("organic", [])[:3]:
|
||||
title = item.get("title")
|
||||
link = item.get("link")
|
||||
snippet = item.get("snippet", "")
|
||||
if title and link:
|
||||
content = await fetch_page_content(link)
|
||||
results.append({
|
||||
"title": title,
|
||||
"link": link,
|
||||
"snippet": snippet,
|
||||
"content": content
|
||||
})
|
||||
return results
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
def format_search_results(results: list[dict[str, str]]) -> str:
|
||||
if not results:
|
||||
return "No results found or unable to fetch results."
|
||||
formatted = []
|
||||
for r in results:
|
||||
formatted.append(f"""
|
||||
Title: {r['title']}
|
||||
Link: {r['link']}
|
||||
Snippet: {r['snippet']}
|
||||
Content: {r['content']}
|
||||
""")
|
||||
return "\n---\n".join(formatted)
|
||||
|
||||
@mcp.tool()
|
||||
async def search_web(query: str) -> str:
|
||||
"""When user input unable to confirm or need search web or other tool can not use, this tool can search the web using the given query.
|
||||
returens a formatted string with the title, link, snippet, and content of the top results.
|
||||
Args:
|
||||
query: The search query to use for the web search(Note that it is recommended to use English for the search query.).
|
||||
"""
|
||||
results = await search_google(query)
|
||||
return format_search_results(results)
|
||||
|
||||
if __name__ == "__main__":
|
||||
mcp.run(transport='stdio')
|
21
search_server/test.txt
Normal file
21
search_server/test.txt
Normal file
File diff suppressed because one or more lines are too long
10
search_server/test_search_web.py
Normal file
10
search_server/test_search_web.py
Normal file
@@ -0,0 +1,10 @@
|
||||
import asyncio
|
||||
from search import search_web
|
||||
|
||||
async def test_search_web_output():
|
||||
query = "曼联最新的英超排名是多少"
|
||||
result = await search_web(query)
|
||||
print("搜索结果输出:\n", result)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(test_search_web_output())
|
Reference in New Issue
Block a user