Gemini manual function calling
from logging import basicConfig, ERROR from google.genai import Client from google.genai.types import AutomaticFunctionCallingConfig, Content, FunctionResponse, \ GenerateContentConfig, Part from requests import get basicConfig(level=ERROR) def get_weather(latitude: str, longitude: str) -> str: response = get('https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m') data = response.json() temperature = data['current']['temperature_2m'] return str(temperature) # process the given function call def process_function_call(function_calls): # we can have more than one function call in a single response for function_call in function_calls: # if we don't know the function, we raise an error if function_call.name != 'get_weather': raise Exception(f'Unknown function call: {function_call.name}') # calling our weather API temperature = get_weather(**function_call.args) # bulding the object representing the call (this one was hard to figure it out!) function_response = FunctionResponse(name='get_weather', response={'result': temperature}) part = Part(function_response=function_response) content = Content(parts=[part], role='function') # adding the result of the call to the chat history chat_history.append(content) # Gemini client client = Client() # we are keeping track of the chat history ourselves (and not via "session" from google's library) chat_history = [] response = None while True: # if we got a function call if response and response.function_calls: process_function_call(response.function_calls) # else it's just a regular prompt, as user for input else: user_input = input('You: ').strip() # buliding the object representing the user prompt part = Part(text=user_input) content = Content(role='user', parts=[part]) # adding the user prompt to the chat history chat_history.append(content) # calling Gemini passing the current chat history and registering our weather API (with automatic function calling disabled) response = client.models.generate_content( model='gemini-2.0-flash', contents=chat_history, config=GenerateContentConfig( tools=[get_weather], automatic_function_calling=AutomaticFunctionCallingConfig(disable=True), ) ) # getting the Gemini response content response_content = response.candidates[0].content # adding the Gemini response to the chat history, this time we don't need to build the object ourselves chat_history.append(response_content) # getting the Gemini reply text out of the response object response_text = response_content.parts[0].text if response_content.parts else None # printing the Gemini response (if any) if response_text: print('Gemini: ', response_text) Running it: You: hey what can you do for me Gemini: I can fetch weather information for a specific latitude and longitude. Just let me know the coordinates! You: whats the weather in 43,-75 right now Gemini: The weather is -8.3 degrees. Using google-genai package at version 1.2.0.

from logging import basicConfig, ERROR
from google.genai import Client
from google.genai.types import AutomaticFunctionCallingConfig, Content, FunctionResponse, \
GenerateContentConfig, Part
from requests import get
basicConfig(level=ERROR)
def get_weather(latitude: str, longitude: str) -> str:
response = get('https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m')
data = response.json()
temperature = data['current']['temperature_2m']
return str(temperature)
# process the given function call
def process_function_call(function_calls):
# we can have more than one function call in a single response
for function_call in function_calls:
# if we don't know the function, we raise an error
if function_call.name != 'get_weather':
raise Exception(f'Unknown function call: {function_call.name}')
# calling our weather API
temperature = get_weather(**function_call.args)
# bulding the object representing the call (this one was hard to figure it out!)
function_response = FunctionResponse(name='get_weather', response={'result': temperature})
part = Part(function_response=function_response)
content = Content(parts=[part], role='function')
# adding the result of the call to the chat history
chat_history.append(content)
# Gemini client
client = Client()
# we are keeping track of the chat history ourselves (and not via "session" from google's library)
chat_history = []
response = None
while True:
# if we got a function call
if response and response.function_calls:
process_function_call(response.function_calls)
# else it's just a regular prompt, as user for input
else:
user_input = input('You: ').strip()
# buliding the object representing the user prompt
part = Part(text=user_input)
content = Content(role='user', parts=[part])
# adding the user prompt to the chat history
chat_history.append(content)
# calling Gemini passing the current chat history and registering our weather API (with automatic function calling disabled)
response = client.models.generate_content(
model='gemini-2.0-flash',
contents=chat_history,
config=GenerateContentConfig(
tools=[get_weather],
automatic_function_calling=AutomaticFunctionCallingConfig(disable=True),
)
)
# getting the Gemini response content
response_content = response.candidates[0].content
# adding the Gemini response to the chat history, this time we don't need to build the object ourselves
chat_history.append(response_content)
# getting the Gemini reply text out of the response object
response_text = response_content.parts[0].text if response_content.parts else None
# printing the Gemini response (if any)
if response_text:
print('Gemini: ', response_text)
Running it:
You: hey what can you do for me
Gemini: I can fetch weather information for a specific latitude and longitude. Just let me know the coordinates!
You: whats the weather in 43,-75 right now
Gemini: The weather is -8.3 degrees.
Using google-genai
package at version 1.2.0
.