Overview

Valyu provides seamless integration with the Anthropic API through tool use, enabling your Anthropic models to access proprietary data sources, real-time web search, academic data sources, and financial data. This integration allows your AI applications to provide more informed and up-to-date responses without changing your core Anthropic workflow.

Installation

Install the required packages:

pip install anthropic requests

You’ll also need to set your API keys:

export ANTHROPIC_API_KEY="your-anthropic-api-key"
export VALYU_API_KEY="your-valyu-api-key"

Free Credits

Get your API key with $10 credit from the Valyu Platform.

Basic Integration

Tool Definition

First, define the Valyu search tool for Claude to use:

import anthropic
import requests
import json
import os
from typing import Literal

# Initialize Anthropic client
client = anthropic.Anthropic()

def valyu_search(
    query: str,
    search_type: Literal["all", "web", "proprietary"] = "all",
    max_num_results: int = 5,
    relevance_threshold: float = 0.5,
    max_price: float = 20.0,
    category: str = None
) -> str:
    """
    Search for information using Valyu's comprehensive knowledge base.

    Args:
        query: Natural language search query
        search_type: Type of search - "all", "web", or "proprietary"
        max_num_results: Number of results to return (1-20)
        relevance_threshold: Minimum relevance score (0.0-1.0)
        max_price: Maximum cost in dollars
        category: Natural language category to guide search

    Returns:
        JSON string with search results
    """
    url = "https://api.valyu.network/v1/deepsearch"

    payload = {
        "query": query,
        "search_type": search_type,
        "max_num_results": max_num_results,
        "relevance_threshold": relevance_threshold,
        "max_price": max_price,
        "is_tool_call": True
    }

    if category:
        payload["category"] = category

    headers = {
        "Authorization": f"Bearer {os.environ['VALYU_API_KEY']}",
        "Content-Type": "application/json"
    }

    try:
        response = requests.post(url, json=payload, headers=headers)
        response.raise_for_status()
        return json.dumps(response.json(), indent=2)
    except Exception as e:
        return f"Search error: {str(e)}"

# Define the tool schema for Claude
valyu_tool = {
    "name": "valyu_search",
    "description": "Search for real-time information, academic papers, and comprehensive knowledge using Valyu's database",
    "input_schema": {
        "type": "object",
        "properties": {
            "query": {
                "type": "string",
                "description": "Natural language search query"
            },
            "search_type": {
                "type": "string",
                "enum": ["all", "web", "proprietary"],
                "description": "Type of search: 'all' for comprehensive, 'web' for current events, 'proprietary' for academic"
            },
            "max_num_results": {
                "type": "integer",
                "minimum": 1,
                "maximum": 20,
                "description": "Number of results to return"
            },
            "relevance_threshold": {
                "type": "number",
                "minimum": 0.0,
                "maximum": 1.0,
                "description": "Minimum relevance score for results"
            },
            "max_price": {
                "type": "number",
                "description": "Maximum cost in dollars for this search"
            },
            "category": {
                "type": "string",
                "description": "Natural language category to guide search context"
            }
        },
        "required": ["query"]
    }
}

Basic Usage

Use the tool with Claude’s tool use feature:

def chat_with_search(user_message: str):
    messages = [
        {
            "role": "user",
            "content": user_message
        }
    ]

    # Initial completion with tool use
    response = client.messages.create(
        model="claude-sonnet-4-20250514",
        max_tokens=1024,
        system="You are a helpful assistant with access to real-time search. Use the valyu_search tool to find current information when needed.",
        messages=messages,
        tools=[valyu_tool]
    )

    # Check if Claude wants to use a tool
    if response.stop_reason == "tool_use":
        # Add Claude's response to messages
        messages.append({
            "role": "assistant",
            "content": response.content
        })

        # Process each tool use
        tool_results = []
        for content_block in response.content:
            if content_block.type == "tool_use":
                # Call the function
                search_results = valyu_search(**content_block.input)

                tool_results.append({
                    "type": "tool_result",
                    "tool_use_id": content_block.id,
                    "content": search_results
                })

        # Add tool results to messages
        messages.append({
            "role": "user",
            "content": tool_results
        })

        # Get final response with search results
        final_response = client.messages.create(
            model="claude-sonnet-4-20250514",
            max_tokens=1024,
            messages=messages,
            tools=[valyu_tool]
        )

        return final_response.content[0].text
    else:
        return response.content[0].text

# Example usage
result = chat_with_search("What are the latest developments in quantum computing?")
print(result)

Advanced Patterns

Streaming with Tool Use

Handle streaming responses with tool use:

def stream_chat_with_search(user_message: str):
    messages = [
        {
            "role": "user",
            "content": user_message
        }
    ]

    # Stream the initial response
    with client.messages.stream(
        model="claude-sonnet-4-20250514",
        max_tokens=1024,
        system="You are a helpful assistant with access to real-time search.",
        messages=messages,
        tools=[valyu_tool]
    ) as stream:
        tool_uses = []

        for text in stream.text_stream:
            print(text, end="", flush=True)

        # Get the final message
        message = stream.get_final_message()

        # Check for tool use
        for content_block in message.content:
            if content_block.type == "tool_use":
                tool_uses.append(content_block)

        if tool_uses:
            # Add Claude's response to messages
            messages.append({
                "role": "assistant",
                "content": message.content
            })

            # Process tool uses
            tool_results = []
            for tool_use in tool_uses:
                search_results = valyu_search(**tool_use.input)
                tool_results.append({
                    "type": "tool_result",
                    "tool_use_id": tool_use.id,
                    "content": search_results
                })

            # Add tool results
            messages.append({
                "role": "user",
                "content": tool_results
            })

            # Stream final response
            print("\n\nBased on the search results:\n")
            with client.messages.stream(
                model="claude-sonnet-4-20250514",
                max_tokens=1024,
                messages=messages,
                tools=[valyu_tool]
            ) as final_stream:
                for text in final_stream.text_stream:
                    print(text, end="", flush=True)

# Example usage
stream_chat_with_search("What are the latest AI safety research developments?")

Multi-Turn Conversations

Maintain context across multiple exchanges:

class ConversationWithSearch:
    def __init__(self):
        self.messages = []
        self.system_prompt = "You are a helpful research assistant with access to real-time search. Use the valyu_search tool when you need current information or specific data."

    def add_user_message(self, content: str):
        self.messages.append({
            "role": "user",
            "content": content
        })

    def get_response(self):
        response = client.messages.create(
            model="claude-sonnet-4-20250514",
            max_tokens=1024,
            system=self.system_prompt,
            messages=self.messages,
            tools=[valyu_tool]
        )

        # Add Claude's response
        self.messages.append({
            "role": "assistant",
            "content": response.content
        })

        # Handle tool use
        if response.stop_reason == "tool_use":
            tool_results = []
            for content_block in response.content:
                if content_block.type == "tool_use":
                    search_results = valyu_search(**content_block.input)
                    tool_results.append({
                        "type": "tool_result",
                        "tool_use_id": content_block.id,
                        "content": search_results
                    })

            # Add tool results
            self.messages.append({
                "role": "user",
                "content": tool_results
            })

            # Get final response
            final_response = client.messages.create(
                model="claude-sonnet-4-20250514",
                max_tokens=1024,
                messages=self.messages,
                tools=[valyu_tool]
            )

            # Add final response
            self.messages.append({
                "role": "assistant",
                "content": final_response.content
            })

            return final_response.content[0].text
        else:
            return response.content[0].text

# Example usage
conversation = ConversationWithSearch()
conversation.add_user_message("What are the latest developments in renewable energy?")
response1 = conversation.get_response()
print(response1)

conversation.add_user_message("How do these developments compare to last year's progress?")
response2 = conversation.get_response()
print(response2)

Specialized Use Cases

Financial Analysis Assistant

def financial_analysis_claude(query: str):
    system_prompt = """You are a financial analyst with access to real-time market data and academic research.
    Use valyu_search with search_type='web' for current market news and
    search_type='proprietary' for academic financial research. Always provide data-driven insights."""

    response = client.messages.create(
        model="claude-sonnet-4-20250514",
        max_tokens=1024,
        system=system_prompt,
        messages=[{"role": "user", "content": query}],
        tools=[valyu_tool]
    )

    return process_claude_response_with_tools(response)

# Example
analysis = financial_analysis_claude("Analyze the recent news and the historical prices of Microsoft stock")

Academic Research Assistant

def academic_research_claude(research_question: str):
    # Custom tool for academic searches
    academic_tool = {
        "name": "valyu_search",
        "description": "Search academic databases for research papers and scholarly articles",
        "input_schema": {
            "type": "object",
            "properties": {
                "query": {"type": "string"},
                "search_type": {"type": "string", "enum": ["proprietary"]},
                "max_num_results": {"type": "integer", "minimum": 5, "maximum": 15},
                "relevance_threshold": {"type": "number", "minimum": 0.6},
                "category": {"type": "string"}
            },
            "required": ["query"]
        }
    }

    system_prompt = """You are an academic research assistant. Focus on peer-reviewed sources and provide proper citations.
    Use the search tool to find relevant academic papers and synthesize the findings."""

    response = client.messages.create(
        model="claude-sonnet-4-20250514",
        max_tokens=1024,
        system=system_prompt,
        messages=[{"role": "user", "content": research_question}],
        tools=[academic_tool]
    )

    return process_claude_response_with_tools(response)

# Example
research = academic_research_claude("What are the latest findings on CRISPR gene editing safety?")

News Analysis Assistant

def news_analysis_claude(topic: str):
    system_prompt = """You are a news analyst with access to real-time information.
    Use valyu_search with search_type='web' to find current news and provide balanced analysis."""

    response = client.messages.create(
        model="claude-sonnet-4-20250514",
        max_tokens=1024,
        system=system_prompt,
        messages=[{
            "role": "user",
            "content": f"Provide a comprehensive analysis of recent developments regarding: {topic}"
        }],
        tools=[valyu_tool]
    )

    return process_claude_response_with_tools(response)

# Example
news_analysis = news_analysis_claude("artificial intelligence regulation in the European Union")

Best Practices

1. Error Handling and Fallbacks

def robust_claude_search(user_message: str):
    try:
        return chat_with_search(user_message)
    except requests.RequestException as e:
        print(f"Search API error: {e}")
        # Fallback to standard Claude without search
        response = client.messages.create(
            model="claude-sonnet-4-20250514",
            max_tokens=1024,
            system="You are a helpful assistant.",
            messages=[{"role": "user", "content": user_message}]
        )
        return response.content[0].text
    except Exception as e:
        print(f"Unexpected error: {e}")
        return "I apologize, but I encountered an error processing your request."

2. Cost Management

def cost_controlled_claude_search(query: str, max_budget: float = 10.0):
    # Adjust search parameters based on budget
    if max_budget < 5.0:
        search_params = {
            "max_num_results": 3,
            "search_type": "web",
            "max_price": max_budget
        }
    elif max_budget < 15.0:
        search_params = {
            "max_num_results": 5,
            "search_type": "all",
            "max_price": max_budget
        }
    else:
        search_params = {
            "max_num_results": 10,
            "search_type": "all",
            "max_price": max_budget,
            "relevance_threshold": 0.7
        }

    # Create custom tool with budget constraints
    budget_tool = {
        "name": "valyu_search",
        "description": f"Search with budget limit of ${max_budget}",
        "input_schema": {
            "type": "object",
            "properties": {
                "query": {"type": "string"},
                "search_type": {"type": "string", "enum": [search_params["search_type"]]},
                "max_num_results": {"type": "integer", "maximum": search_params["max_num_results"]},
                "max_price": {"type": "number", "maximum": search_params["max_price"]}
            },
            "required": ["query"]
        }
    }

    response = client.messages.create(
        model="claude-sonnet-4-20250514",
        max_tokens=1024,
        system=f"You have a search budget of ${max_budget}. Use searches efficiently.",
        messages=[{"role": "user", "content": query}],
        tools=[budget_tool]
    )

    return process_claude_response_with_tools(response)

3. Async Support

import asyncio
import aiohttp
from anthropic import AsyncAnthropic

async_client = AsyncAnthropic()

async def async_valyu_search(query: str, **kwargs):
    """Async version of Valyu search"""
    url = "https://api.valyu.network/v1/deepsearch"

    payload = {
        "query": query,
        "is_tool_call": True,
        **kwargs
    }

    headers = {
        "Authorization": f"Bearer {os.environ['VALYU_API_KEY']}",
        "Content-Type": "application/json"
    }

    async with aiohttp.ClientSession() as session:
        async with session.post(url, json=payload, headers=headers) as response:
            result = await response.json()
            return json.dumps(result, indent=2)

async def async_chat_with_search(user_message: str):
    """Async chat with search capabilities"""
    response = await async_client.messages.create(
        model="claude-sonnet-4-20250514",
        max_tokens=1024,
        system="You are a helpful assistant with access to real-time search.",
        messages=[{"role": "user", "content": user_message}],
        tools=[valyu_tool]
    )

    # Process tool use asynchronously
    if response.stop_reason == "tool_use":
        tool_results = []
        for content_block in response.content:
            if content_block.type == "tool_use":
                search_results = await async_valyu_search(**content_block.input)
                tool_results.append({
                    "type": "tool_result",
                    "tool_use_id": content_block.id,
                    "content": search_results
                })

        # Continue conversation with results
        messages = [
            {"role": "user", "content": user_message},
            {"role": "assistant", "content": response.content},
            {"role": "user", "content": tool_results}
        ]

        final_response = await async_client.messages.create(
            model="claude-sonnet-4-20250514",
            max_tokens=1024,
            messages=messages,
            tools=[valyu_tool]
        )

        return final_response.content[0].text
    else:
        return response.content[0].text

# Example usage
async def main():
    result = await async_chat_with_search("What are the latest AI developments?")
    print(result)

# asyncio.run(main())

Helper Functions

Response Processing

def process_claude_response_with_tools(response):
    """Helper function to process Claude responses with tool use"""
    if response.stop_reason == "tool_use":
        # Extract text content
        text_content = ""
        tool_uses = []

        for content_block in response.content:
            if content_block.type == "text":
                text_content += content_block.text
            elif content_block.type == "tool_use":
                tool_uses.append(content_block)

        # Process tool uses
        tool_results = []
        for tool_use in tool_uses:
            if tool_use.name == "valyu_search":
                search_results = valyu_search(**tool_use.input)
                tool_results.append({
                    "tool_use_id": tool_use.id,
                    "results": search_results
                })

        return {
            "text": text_content,
            "tool_results": tool_results,
            "needs_continuation": True
        }
    else:
        return {
            "text": response.content[0].text,
            "tool_results": [],
            "needs_continuation": False
        }

API Reference

Tool Parameters

The valyu_search tool supports all v2 API parameters:

  • query (required): Natural language search query
  • search_type: "all", "web", or "proprietary" (default: "all")
  • max_num_results: 1-20 results (default: 5)
  • relevance_threshold: 0.0-1.0 relevance filter (default: 0.5)
  • max_price: Maximum cost in dollars (default: 20.0)
  • category: Natural language context guide (optional)
  • included_sources: List of specific datasets/URLs (optional)
  • start_date/end_date: Time filtering (YYYY-MM-DD format, optional)

Response Format

Search results are returned as JSON with the following structure:

{
  "results": [
    {
      "title": "Result title",
      "content": "Result content/snippet",
      "url": "Source URL",
      "relevance_score": 0.85,
      "source_type": "web|academic|financial",
      "published_date": "2024-01-15"
    }
  ],
  "total_results": 5,
  "search_metadata": {
    "query": "original query",
    "search_type": "all",
    "cost": 2.5
  }
}

Additional Resources