Google Gemini Integration
Enhance Gemini with real-time search and academic context
Overview
Valyu provides seamless integration with the Google Gemini API through function calling, enabling your Gemini models to access proprietary data sources, real-time web search, academic data sources, and financial data. This integration allows your AI applications to provide more informed and up-to-date responses without changing your core Gemini workflow.
Installation
Install the required packages:
pip install google-generativeai requests
You’ll also need to set your API keys:
export GOOGLE_API_KEY="your-google-api-key"
export VALYU_API_KEY="your-valyu-api-key"
Free Credits
Get your API key with $10 credit from the Valyu Platform.
Basic Integration
Function Definition
First, define the Valyu search function for Gemini to use:
import google.generativeai as genai
import requests
import json
import os
from typing import Literal
# Configure Gemini
genai.configure(api_key=os.environ['GOOGLE_API_KEY'])
def valyu_search(
query: str,
search_type: Literal["all", "web", "proprietary"] = "all",
max_num_results: int = 5,
relevance_threshold: float = 0.5,
max_price: float = 20.0,
category: str = None
) -> str:
"""
Search for information using Valyu's comprehensive knowledge base.
Args:
query: Natural language search query
search_type: Type of search - "all", "web", or "proprietary"
max_num_results: Number of results to return (1-20)
relevance_threshold: Minimum relevance score (0.0-1.0)
max_price: Maximum cost in dollars
category: Natural language category to guide search
Returns:
JSON string with search results
"""
url = "https://api.valyu.network/v1/deepsearch"
payload = {
"query": query,
"search_type": search_type,
"max_num_results": max_num_results,
"relevance_threshold": relevance_threshold,
"max_price": max_price,
"is_tool_call": True
}
if category:
payload["category"] = category
headers = {
"Authorization": f"Bearer {os.environ['VALYU_API_KEY']}",
"Content-Type": "application/json"
}
try:
response = requests.post(url, json=payload, headers=headers)
response.raise_for_status()
return json.dumps(response.json(), indent=2)
except Exception as e:
return f"Search error: {str(e)}"
# Define the function declaration for Gemini
valyu_function_declaration = genai.protos.FunctionDeclaration(
name="valyu_search",
description="Search for real-time information, academic papers, and comprehensive knowledge using Valyu's database",
parameters=genai.protos.Schema(
type=genai.protos.Type.OBJECT,
properties={
"query": genai.protos.Schema(
type=genai.protos.Type.STRING,
description="Natural language search query"
),
"search_type": genai.protos.Schema(
type=genai.protos.Type.STRING,
enum=["all", "web", "proprietary"],
description="Type of search: 'all' for comprehensive, 'web' for current events, 'proprietary' for academic"
),
"max_num_results": genai.protos.Schema(
type=genai.protos.Type.INTEGER,
description="Number of results to return (1-20)"
),
"relevance_threshold": genai.protos.Schema(
type=genai.protos.Type.NUMBER,
description="Minimum relevance score for results (0.0-1.0)"
),
"max_price": genai.protos.Schema(
type=genai.protos.Type.NUMBER,
description="Maximum cost in dollars for this search"
),
"category": genai.protos.Schema(
type=genai.protos.Type.STRING,
description="Natural language category to guide search context"
)
},
required=["query"]
)
)
# Create the tool
valyu_tool = genai.protos.Tool(
function_declarations=[valyu_function_declaration]
)
# Initialize the model
model = genai.GenerativeModel(
model_name="gemini-2.0-flash-exp",
tools=[valyu_tool]
)
Basic Usage
Use the function with Gemini’s function calling:
def chat_with_search(user_message: str):
# Start a chat session
chat = model.start_chat()
# Send the user message
response = chat.send_message(
f"You are a helpful assistant with access to real-time search. "
f"Use the valyu_search function to find current information when needed. "
f"User query: {user_message}"
)
# Check if Gemini wants to call a function
if response.candidates[0].content.parts:
for part in response.candidates[0].content.parts:
if hasattr(part, 'function_call') and part.function_call:
function_call = part.function_call
if function_call.name == "valyu_search":
# Extract function arguments
function_args = {}
for key, value in function_call.args.items():
function_args[key] = value
# Call the function
search_results = valyu_search(**function_args)
# Send function response back to Gemini
function_response = genai.protos.Part(
function_response=genai.protos.FunctionResponse(
name="valyu_search",
response={"result": search_results}
)
)
# Get final response with search results
final_response = chat.send_message(function_response)
return final_response.text
# Return direct response if no function call
return response.text
# Example usage
result = chat_with_search("What are the latest developments in quantum computing?")
print(result)
Advanced Patterns
Streaming with Function Calls
Handle streaming responses with function calling:
def stream_chat_with_search(user_message: str):
chat = model.start_chat()
# Send message and stream response
response = chat.send_message(
f"You are a helpful assistant with access to real-time search. "
f"Use the valyu_search function when needed. User query: {user_message}",
stream=True
)
function_calls = []
text_content = ""
# Process streaming response
for chunk in response:
if chunk.candidates[0].content.parts:
for part in chunk.candidates[0].content.parts:
if hasattr(part, 'text') and part.text:
text_content += part.text
print(part.text, end="", flush=True)
elif hasattr(part, 'function_call') and part.function_call:
function_calls.append(part.function_call)
# Process function calls if any
if function_calls:
print("\n\nSearching for information...")
for function_call in function_calls:
if function_call.name == "valyu_search":
# Extract and call function
function_args = {}
for key, value in function_call.args.items():
function_args[key] = value
search_results = valyu_search(**function_args)
# Send function response
function_response = genai.protos.Part(
function_response=genai.protos.FunctionResponse(
name="valyu_search",
response={"result": search_results}
)
)
# Stream final response
print("\nBased on the search results:\n")
final_response = chat.send_message(function_response, stream=True)
for chunk in final_response:
if chunk.candidates[0].content.parts:
for part in chunk.candidates[0].content.parts:
if hasattr(part, 'text') and part.text:
print(part.text, end="", flush=True)
# Example usage
stream_chat_with_search("What are the latest AI safety research developments?")
Multi-Turn Conversations
Maintain context across multiple exchanges:
class GeminiConversationWithSearch:
def __init__(self):
self.chat = model.start_chat()
self.system_prompt = "You are a helpful research assistant with access to real-time search. Use the valyu_search function when you need current information or specific data."
def send_message(self, user_message: str):
# Combine system prompt with user message
full_message = f"{self.system_prompt}\n\nUser: {user_message}"
response = self.chat.send_message(full_message)
# Check for function calls
if response.candidates[0].content.parts:
for part in response.candidates[0].content.parts:
if hasattr(part, 'function_call') and part.function_call:
function_call = part.function_call
if function_call.name == "valyu_search":
# Extract and execute function
function_args = {}
for key, value in function_call.args.items():
function_args[key] = value
search_results = valyu_search(**function_args)
# Send function response
function_response = genai.protos.Part(
function_response=genai.protos.FunctionResponse(
name="valyu_search",
response={"result": search_results}
)
)
# Get final response
final_response = self.chat.send_message(function_response)
return final_response.text
return response.text
# Example usage
conversation = GeminiConversationWithSearch()
response1 = conversation.send_message("What are the latest developments in renewable energy?")
print(response1)
response2 = conversation.send_message("How do these developments compare to last year's progress?")
print(response2)
Batch Processing
Process multiple queries efficiently:
def batch_search_analysis(queries: list[str]):
"""Process multiple search queries in batch"""
results = []
for query in queries:
chat = model.start_chat()
response = chat.send_message(
f"Analyze this query using search if needed: {query}"
)
# Process function calls
if response.candidates[0].content.parts:
for part in response.candidates[0].content.parts:
if hasattr(part, 'function_call') and part.function_call:
function_call = part.function_call
if function_call.name == "valyu_search":
function_args = {}
for key, value in function_call.args.items():
function_args[key] = value
search_results = valyu_search(**function_args)
function_response = genai.protos.Part(
function_response=genai.protos.FunctionResponse(
name="valyu_search",
response={"result": search_results}
)
)
final_response = chat.send_message(function_response)
results.append({
"query": query,
"response": final_response.text,
"search_used": True
})
break
else:
results.append({
"query": query,
"response": response.text,
"search_used": False
})
else:
results.append({
"query": query,
"response": response.text,
"search_used": False
})
return results
# Example usage
queries = [
"Latest AI developments",
"Current stock market trends",
"Recent climate change research"
]
results = batch_search_analysis(queries)
for result in results:
print(f"Query: {result['query']}")
print(f"Used search: {result['search_used']}")
print(f"Response: {result['response'][:200]}...")
print("-" * 50)
Specialized Use Cases
Financial Analysis Assistant
def financial_analysis_gemini(query: str):
# Create specialized model for financial analysis
financial_model = genai.GenerativeModel(
model_name="gemini-2.0-flash-exp",
tools=[valyu_tool],
system_instruction="""You are a financial analyst with access to real-time market data and academic research.
Use valyu_search with search_type='web' for current market news and
search_type='proprietary' for academic financial research. Always provide data-driven insights."""
)
chat = financial_model.start_chat()
response = chat.send_message(query)
# Process function calls
return process_gemini_response_with_functions(chat, response)
# Example
analysis = financial_analysis_gemini("Analyze the recent news and the historical prices of Microsoft stock")
Academic Research Assistant
def academic_research_gemini(research_question: str):
# Create academic-focused function declaration
academic_function = genai.protos.FunctionDeclaration(
name="valyu_search",
description="Search academic databases for research papers and scholarly articles",
parameters=genai.protos.Schema(
type=genai.protos.Type.OBJECT,
properties={
"query": genai.protos.Schema(type=genai.protos.Type.STRING),
"search_type": genai.protos.Schema(
type=genai.protos.Type.STRING,
enum=["proprietary"]
),
"max_num_results": genai.protos.Schema(
type=genai.protos.Type.INTEGER,
description="Number of results (5-15 for academic research)"
),
"relevance_threshold": genai.protos.Schema(
type=genai.protos.Type.NUMBER,
description="Minimum relevance score (0.6+ for academic quality)"
),
"category": genai.protos.Schema(type=genai.protos.Type.STRING)
},
required=["query"]
)
)
academic_tool = genai.protos.Tool(function_declarations=[academic_function])
academic_model = genai.GenerativeModel(
model_name="gemini-2.0-flash-exp",
tools=[academic_tool],
system_instruction="""You are an academic research assistant. Focus on peer-reviewed sources and provide proper citations.
Use the search tool to find relevant academic papers and synthesize the findings."""
)
chat = academic_model.start_chat()
response = chat.send_message(research_question)
return process_gemini_response_with_functions(chat, response)
# Example
research = academic_research_gemini("What are the latest findings on CRISPR gene editing safety?")
News Analysis Assistant
def news_analysis_gemini(topic: str):
news_model = genai.GenerativeModel(
model_name="gemini-2.0-flash-exp",
tools=[valyu_tool],
system_instruction="""You are a news analyst with access to real-time information.
Use valyu_search with search_type='web' to find current news and provide balanced analysis."""
)
chat = news_model.start_chat()
response = chat.send_message(
f"Provide a comprehensive analysis of recent developments regarding: {topic}"
)
return process_gemini_response_with_functions(chat, response)
# Example
news_analysis = news_analysis_gemini("artificial intelligence regulation in the European Union")
Best Practices
1. Error Handling and Fallbacks
def robust_gemini_search(user_message: str):
try:
return chat_with_search(user_message)
except requests.RequestException as e:
print(f"Search API error: {e}")
# Fallback to standard Gemini without search
fallback_model = genai.GenerativeModel("gemini-2.0-flash-exp")
response = fallback_model.generate_content(user_message)
return response.text
except Exception as e:
print(f"Unexpected error: {e}")
return "I apologize, but I encountered an error processing your request."
2. Cost Management
def cost_controlled_gemini_search(query: str, max_budget: float = 10.0):
# Adjust search parameters based on budget
if max_budget < 5.0:
search_config = {
"max_num_results": 3,
"search_type": "web",
"max_price": max_budget
}
elif max_budget < 15.0:
search_config = {
"max_num_results": 5,
"search_type": "all",
"max_price": max_budget
}
else:
search_config = {
"max_num_results": 10,
"search_type": "all",
"max_price": max_budget,
"relevance_threshold": 0.7
}
# Create budget-aware model
budget_model = genai.GenerativeModel(
model_name="gemini-2.0-flash-exp",
tools=[valyu_tool],
system_instruction=f"You have a search budget of ${max_budget}. Use searches efficiently with these constraints: {search_config}"
)
chat = budget_model.start_chat()
response = chat.send_message(query)
return process_gemini_response_with_functions(chat, response)
Helper Functions
Response Processing
def process_gemini_response_with_functions(chat, response):
"""Helper function to process Gemini responses with function calls"""
if response.candidates[0].content.parts:
for part in response.candidates[0].content.parts:
if hasattr(part, 'function_call') and part.function_call:
function_call = part.function_call
if function_call.name == "valyu_search":
# Extract function arguments
function_args = {}
for key, value in function_call.args.items():
function_args[key] = value
# Call the function
search_results = valyu_search(**function_args)
# Send function response
function_response = genai.protos.Part(
function_response=genai.protos.FunctionResponse(
name="valyu_search",
response={"result": search_results}
)
)
# Get final response
final_response = chat.send_message(function_response)
return final_response.text
return response.text
def extract_function_calls(response):
"""Extract function calls from Gemini response"""
function_calls = []
if response.candidates[0].content.parts:
for part in response.candidates[0].content.parts:
if hasattr(part, 'function_call') and part.function_call:
function_calls.append(part.function_call)
return function_calls
API Reference
Function Parameters
The valyu_search
function supports all v2 API parameters:
query
(required): Natural language search querysearch_type
:"all"
,"web"
, or"proprietary"
(default:"all"
)max_num_results
: 1-20 results (default: 5)relevance_threshold
: 0.0-1.0 relevance filter (default: 0.5)max_price
: Maximum cost in dollars (default: 20.0)category
: Natural language context guide (optional)included_sources
: List of specific datasets/URLs (optional)start_date
/end_date
: Time filtering (YYYY-MM-DD format, optional)
Gemini Models
Available Gemini 2.0 models:
gemini-2.0-flash-exp
: Latest experimental model with enhanced capabilitiesgemini-2.0-flash-thinking-exp
: Model with enhanced reasoning capabilitiesgemini-1.5-pro
: Production-ready model for complex tasksgemini-1.5-flash
: Fast model for quick responses
Response Format
Search results are returned as JSON with the following structure:
{
"results": [
{
"title": "Result title",
"content": "Result content/snippet",
"url": "Source URL",
"relevance_score": 0.85,
"source_type": "web|academic|financial",
"published_date": "2024-01-15"
}
],
"total_results": 5,
"search_metadata": {
"query": "original query",
"search_type": "all",
"cost": 2.5
}
}
Additional Resources
- Overview
- Installation
- Basic Integration
- Function Definition
- Basic Usage
- Advanced Patterns
- Streaming with Function Calls
- Multi-Turn Conversations
- Batch Processing
- Specialized Use Cases
- Financial Analysis Assistant
- Academic Research Assistant
- News Analysis Assistant
- Best Practices
- 1. Error Handling and Fallbacks
- 2. Cost Management
- Helper Functions
- Response Processing
- API Reference
- Function Parameters
- Gemini Models
- Response Format
- Additional Resources