Google Gen AI 5-Day Intensive: Day Four – Part 2 (4/5)

Codelab #2 – Use Google Search In Generation

This is the first assigned codelab on day four of the intensive. Download it here from Github to run locally or run in this Kaggle notebook.

"""Use Google Search in Generation

Google Gen AI 5-Day Intensive Course
Host: Kaggle

Day: 4

Codelab: https://www.kaggle.com/code/markishere/day-4-google-search-grounding
"""

import io
import os
from pprint import pprint

from google import genai
from google.api_core import retry
from google.genai import types
from IPython.display import HTML, Image, Markdown, display

client = genai.Client(api_key=os.environ["GOOGLE_API_KEY"])

# Define a retry policy. The model might make multiple consecutive calls automatically
# for a complex query, this ensures the client retries if it hits quota limits.
is_retriable = lambda e: (
    isinstance(e, genai.errors.APIError) and e.code in {429, 503}
)

if not hasattr(genai.models.Models.generate_content, "__wrapped__"):
    genai.models.Models.generate_content = retry.Retry(predicate=is_retriable)(
        genai.models.Models.generate_content
    )

# To enable search grounding, specify it as a tool 'google_search'
# as a parameter in `GenerateContentConfig` passed to `generate_content`

# Ask for information without search grounding
response = client.models.generate_content(
    model="gemini-2.0-flash",
    contents="When and where is Billie Eilish's next concert?",
)
Markdown(response.text)

# And now rerun the same query with search grounding enabled.
config_with_search = types.GenerateContentConfig(
    tools=[types.Tool(google_search=types.GoogleSearch())]
)


def query_with_grounding():
    response = client.models.generate_content(
        model="gemini-2.0-flash",
        contents="When and where is Billie Eilish's next concert?",
        config=config_with_search,
    )
    
    return response


rc = query_with_grounding()
Markdown(rc.text)


# Response metadata
# Get links to search suggestions, supporting documents and information
# on how they were used.
while (
    not rc.grounding_metadata.grounding_supports
    or not rc.grounding_metadata.grounding_chunks
):
    # If incomplete groundind data was returned, retry.
    rc = query_with_grounding()

chunks = rc.grounding_metadata.grounding_chunks
for chunk in chunks:
    print(f"{chunk.web.title}: {chunk.web.url}")

HTML(rc.grounding_metadata.search_entry_point.rendered_content)

supports = rc.grounding_metadata.grounding_supports
for support in supports:
    pprint(support.to_json_dict())

markdown_buffer = io.StringIO()

# Print the text with footnote markers.
markdown_buffer.write("Supported text:\n\n")
for support in supports:
    markdown_buffer.write(" * ")
    markdown_buffer.write(
        rc.content.parts[0].text[
            support.segment.start_index : support.segment.end_index
        ]
    )

    for i in support.grounding_chunk_indices:
        chunk = chunks[i].web
        markdown_buffer.write(f"<sup>[{i + 1}]</sup>")

    markdown_buffer.write("\n\n")

# Print footnotes.
markdown_buffer.write("Citations:\n\n")
for i, chunk in enumerate(chunks, start=1):
    markdown_buffer.write(f"{i}. [{chunk.web.title}]({chunk.web.url})\n")

Markdown(markdown_buffer.getvalue())


# Search with tools
# Use Google search grounding and code generation tools
def show_response(response):
    for p in response.candidates[0].content.parts:
        if p.text:
            display(Markdown(p.text))
        elif p.inline_data:
            display(Image(p.inline_data.data))
        else:
            print(p.to_json_dict())
        
        display(Markdown('----'))
        
config_with_search = types.GenerateContentConfig(
    tools=[types.Tool(google_search=types.GoogleSearch())],
    temperature=0.0
)

chat = client.chats.create(model='gemini-2.0-flash')

response = chat.send_message(
    message="What were the medal tallies, by top-10 countries, for the 2024 Olympics?",
    config=config_with_search
)

show_response(response)

config_with_code = types.GenerateContentConfig(
    tools=[types.Tool(code_execution=types.ToolCodeExecution())],
    temperature=0.0
)

response = chat.send_message(
    message="Now plot this as a Seaborn chart. Break out the medals too.",
    config=config_with_code
)

show_response(response)

Leave a Reply

Your email address will not be published. Required fields are marked *