Valyu integrates seamlessly with LlamaIndex as a search tool, allowing you to enhance your AI agents and RAG applications with real-time web search and proprietary data sources. The integration provides LLM-ready context from multiple sources including web pages, academic journals, financial data, and more.
The most powerful way to use Valyu is within LlamaIndex agents, where the AI can dynamically decide when and how to search:
import osimport asynciofrom llama_index.tools.valyu import ValyuToolSpecfrom llama_index.core.agent.workflow import AgentWorkflowfrom llama_index.llms.openai import OpenAI# Set API keysos.environ["VALYU_API_KEY"] = "your-valyu-api-key"os.environ["OPENAI_API_KEY"] = "your-openai-api-key"# Initialize componentsllm = OpenAI(model="gpt-4o-mini")valyu_tool_spec = ValyuToolSpec( api_key=os.environ["VALYU_API_KEY"], verbose=True, max_price=25.0)# Create agent workflow with Valyu search capabilityagent = AgentWorkflow.from_tools_or_functions( tools_or_functions=valyu_tool_spec.to_tool_list(), llm=llm, system_prompt="You are a helpful research assistant with access to real-time web search and academic databases through Valyu.")# Use the agentasync def main(): response = await agent.run( user_msg="What are the key factors driving recent stock market volatility, " "and how do macroeconomic indicators influence equity prices across different sectors?" ) print(response)# Run the async functionasyncio.run(main())
Use Valyu in complex multi-agent systems with the new AgentWorkflow:
import asynciofrom llama_index.tools.valyu import ValyuToolSpecfrom llama_index.core.agent.workflow import AgentWorkflow, FunctionAgentfrom llama_index.llms.openai import OpenAI# Create specialized research agentresearch_llm = OpenAI(model="gpt-4o-mini", temperature=0.1)research_tool = ValyuToolSpec( api_key=os.environ["VALYU_API_KEY"], max_price=20.0)research_agent = FunctionAgent( name="ResearchAgent", description="Specialist in finding and analyzing academic and scientific sources", tools=research_tool.to_tool_list(), llm=research_llm, system_prompt="You are a research specialist. Use Valyu to find authoritative sources and provide well-cited answers.")# Create analysis agentanalysis_llm = OpenAI(model="gpt-4o-mini", temperature=0.3)analysis_tool = ValyuToolSpec( api_key=os.environ["VALYU_API_KEY"], max_price=30.0)analysis_agent = FunctionAgent( name="AnalysisAgent", description="Specialist in analyzing data and providing insights", tools=analysis_tool.to_tool_list(), llm=analysis_llm, system_prompt="You are an analyst. Use current data to provide insights and recommendations.")# Create multi-agent workflowworkflow = AgentWorkflow(agents=[research_agent, analysis_agent])# Coordinate agents for complex queriesasync def main(): research_response = await research_agent.run( user_msg="Find recent papers on transformer architecture improvements" ) analysis_response = await analysis_agent.run( user_msg="Analyze market trends in AI chip demand" ) print("Research Results:", research_response) print("Analysis Results:", analysis_response)asyncio.run(main())
import asynciofrom llama_index.tools.valyu import ValyuToolSpecfrom llama_index.core.agent.workflow import AgentWorkflowfrom llama_index.llms.openai import OpenAI# Create financial research agentfinancial_llm = OpenAI(model="gpt-4o-mini")valyu_tool = ValyuToolSpec( api_key=os.environ["VALYU_API_KEY"], max_price=25.0)financial_agent = AgentWorkflow.from_tools_or_functions( tools_or_functions=valyu_tool.to_tool_list(), llm=financial_llm, system_prompt="""You are a financial research assistant. Use Valyu to search for: - Real-time market data and news - Academic research on financial models - Economic indicators and analysis Always cite your sources and provide context about data recency.""")# Query financial marketsasync def main(): response = await financial_agent.run( user_msg="What are the latest developments in cryptocurrency regulation " "and their impact on institutional adoption?" ) print(response)asyncio.run(main())
from llama_index.core.agent.workflow import AgentWorkflow# Optimize agent behavior with good system messagessystem_message = """You are an AI research assistant with access to Valyu search.SEARCH GUIDELINES:- Use search_type="proprietary" for academic/scientific queries- Use search_type="web" for current events and news- Use search_type="all" for comprehensive research- Set higher relevance_threshold (0.6+) for precise results- Use async/await patterns with AgentWorkflow for better performance- Always cite sources from search resultsRESPONSE FORMAT:- Provide direct answers based on search results- Include source citations with URLs when available- Mention publication dates for time-sensitive information- Indicate if information might be outdated"""agent = AgentWorkflow.from_tools_or_functions( tools_or_functions=valyu_tool.to_tool_list(), llm=llm, system_prompt=system_message)