Fetching urls with a tiny model

This commit is contained in:
Willem van den Ende 2025-04-30 13:13:48 +01:00
parent 80292ef8ea
commit f43293819f
3 changed files with 2010 additions and 0 deletions

46
agentic_search.py Normal file
View File

@ -0,0 +1,46 @@
from qwen_agent.agents import Assistant
# Define LLM
llm_cfg = {
'model': 'qwen3:0.6B',
# Use the endpoint provided by Alibaba Model Studio:
# 'model_type': 'qwen_dashscope',
# 'api_key': os.getenv('DASHSCOPE_API_KEY'),
# Use a custom endpoint compatible with OpenAI API:
'model_server': 'http://localhost:11434/v1', # api_base
'api_key': 'EMPTY',
# Other parameters:
# 'generate_cfg': {
# # Add: When the response content is `<think>this is the thought</think>this is the answer;
# # Do not add: When the response has been separated by reasoning_content and content.
# 'thought_in_content': True,
# },
}
# Define Tools
tools = [
{'mcpServers': { # You can specify the MCP configuration file
'time': {
'command': 'uvx',
'args': ['mcp-server-time', '--local-timezone=Asia/Shanghai']
},
"fetch": {
"command": "uvx",
"args": ["mcp-server-fetch"]
}
}
},
'code_interpreter', # Built-in tools
]
# Define Agent
bot = Assistant(llm=llm_cfg, function_list=tools)
# Streaming generation
messages = [{'role': 'user', 'content': 'https://qwenlm.github.io/blog/ Introduce the latest developments of Qwen'}]
for responses in bot.run(messages=messages):
pass
print(responses)

View File

@ -5,6 +5,10 @@ description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"mcp>=1.6.0",
"mlx>=0.25.1",
"mlx-lm>=0.24.0",
"python-dateutil>=2.9.0.post0",
"python-dotenv>=1.1.0",
"qwen-agent[code-interpreter]>=0.0.20",
]

1960
uv.lock generated

File diff suppressed because it is too large Load Diff