Skip to content

Commit 6a993e4

Browse files
author
giulio-leone
committed
Python: fix(python/google): preserve thought_signature in Gemini function call parts
- Preserve thought_signature in Gemini function call parts for both Google AI and Vertex AI connectors (serialization and deserialization) - Use getattr() for defensive access to thought_signature on Part objects, ensuring compatibility with older google-genai SDK versions - Add deserialization-path tests for _create_chat_message_content and _create_streaming_chat_message_content in both GoogleAI and VertexAI - Add getattr guard test simulating missing attribute on older SDKs Closes #13480
1 parent 86d7a49 commit 6a993e4

File tree

8 files changed

+471
-14
lines changed

8 files changed

+471
-14
lines changed

python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -303,13 +303,18 @@ def _create_chat_message_content(
303303
if part.text:
304304
items.append(TextContent(text=part.text, inner_content=response, metadata=response_metadata))
305305
elif part.function_call:
306+
fc_metadata: dict[str, Any] = {}
307+
thought_sig = getattr(part, "thought_signature", None)
308+
if thought_sig:
309+
fc_metadata["thought_signature"] = thought_sig
306310
items.append(
307311
FunctionCallContent(
308312
id=f"{part.function_call.name}_{idx!s}",
309313
name=format_gemini_function_name_to_kernel_function_fully_qualified_name(
310314
part.function_call.name # type: ignore[arg-type]
311315
),
312316
arguments={k: v for k, v in part.function_call.args.items()}, # type: ignore
317+
metadata=fc_metadata if fc_metadata else None,
313318
)
314319
)
315320

@@ -360,13 +365,18 @@ def _create_streaming_chat_message_content(
360365
)
361366
)
362367
elif part.function_call:
368+
fc_metadata: dict[str, Any] = {}
369+
thought_sig = getattr(part, "thought_signature", None)
370+
if thought_sig:
371+
fc_metadata["thought_signature"] = thought_sig
363372
items.append(
364373
FunctionCallContent(
365374
id=f"{part.function_call.name}_{idx!s}",
366375
name=format_gemini_function_name_to_kernel_function_fully_qualified_name(
367376
part.function_call.name # type: ignore[arg-type]
368377
),
369378
arguments={k: v for k, v in part.function_call.args.items()}, # type: ignore
379+
metadata=fc_metadata if fc_metadata else None,
370380
)
371381
)
372382

python/semantic_kernel/connectors/ai/google/google_ai/services/utils.py

Lines changed: 17 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -91,12 +91,24 @@ def format_assistant_message(message: ChatMessageContent) -> list[Part]:
9191
if item.text:
9292
parts.append(Part.from_text(text=item.text))
9393
elif isinstance(item, FunctionCallContent):
94-
parts.append(
95-
Part.from_function_call(
96-
name=item.name, # type: ignore[arg-type]
97-
args=json.loads(item.arguments) if isinstance(item.arguments, str) else item.arguments, # type: ignore[arg-type]
94+
thought_signature = item.metadata.get("thought_signature") if item.metadata else None
95+
if thought_signature:
96+
parts.append(
97+
Part(
98+
function_call={
99+
"name": item.name, # type: ignore[arg-type]
100+
"args": json.loads(item.arguments) if isinstance(item.arguments, str) else item.arguments,
101+
},
102+
thought_signature=thought_signature,
103+
)
104+
)
105+
else:
106+
parts.append(
107+
Part.from_function_call(
108+
name=item.name, # type: ignore[arg-type]
109+
args=json.loads(item.arguments) if isinstance(item.arguments, str) else item.arguments, # type: ignore[arg-type]
110+
)
98111
)
99-
)
100112
elif isinstance(item, ImageContent):
101113
parts.append(_create_image_part(item))
102114
else:

python/semantic_kernel/connectors/ai/google/vertex_ai/services/utils.py

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import json
44
import logging
5-
from typing import TYPE_CHECKING
5+
from typing import TYPE_CHECKING, Any
66

77
from google.cloud.aiplatform_v1beta1.types.content import Candidate
88
from vertexai.generative_models import FunctionDeclaration, Part, Tool, ToolConfig
@@ -89,14 +89,16 @@ def format_assistant_message(message: ChatMessageContent) -> list[Part]:
8989
if item.text:
9090
parts.append(Part.from_text(item.text))
9191
elif isinstance(item, FunctionCallContent):
92-
parts.append(
93-
Part.from_dict({
94-
"function_call": {
95-
"name": item.name,
96-
"args": json.loads(item.arguments) if isinstance(item.arguments, str) else item.arguments,
97-
}
98-
})
99-
)
92+
part_dict: dict[str, Any] = {
93+
"function_call": {
94+
"name": item.name, # type: ignore[arg-type]
95+
"args": json.loads(item.arguments) if isinstance(item.arguments, str) else item.arguments,
96+
}
97+
}
98+
thought_signature = item.metadata.get("thought_signature") if item.metadata else None
99+
if thought_signature:
100+
part_dict["thought_signature"] = thought_signature
101+
parts.append(Part.from_dict(part_dict))
100102
elif isinstance(item, ImageContent):
101103
parts.append(_create_image_part(item))
102104
else:

python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -252,13 +252,18 @@ def _create_chat_message_content(self, response: GenerationResponse, candidate:
252252
if "text" in part_dict:
253253
items.append(TextContent(text=part.text, inner_content=response, metadata=response_metadata))
254254
elif "function_call" in part_dict:
255+
fc_metadata: dict[str, Any] = {}
256+
thought_sig = part_dict.get("thought_signature")
257+
if thought_sig:
258+
fc_metadata["thought_signature"] = thought_sig
255259
items.append(
256260
FunctionCallContent(
257261
id=f"{part.function_call.name}_{idx!s}",
258262
name=format_gemini_function_name_to_kernel_function_fully_qualified_name(
259263
part.function_call.name
260264
),
261265
arguments={k: v for k, v in part.function_call.args.items()},
266+
metadata=fc_metadata if fc_metadata else None,
262267
)
263268
)
264269

@@ -309,13 +314,18 @@ def _create_streaming_chat_message_content(
309314
)
310315
)
311316
elif "function_call" in part_dict:
317+
fc_metadata_s: dict[str, Any] = {}
318+
thought_sig_s = part_dict.get("thought_signature")
319+
if thought_sig_s:
320+
fc_metadata_s["thought_signature"] = thought_sig_s
312321
items.append(
313322
FunctionCallContent(
314323
id=f"{part.function_call.name}_{idx!s}",
315324
name=format_gemini_function_name_to_kernel_function_fully_qualified_name(
316325
part.function_call.name
317326
),
318327
arguments={k: v for k, v in part.function_call.args.items()},
328+
metadata=fc_metadata_s if fc_metadata_s else None,
319329
)
320330
)
321331

python/tests/unit/connectors/ai/google/google_ai/services/test_google_ai_chat_completion.py

Lines changed: 190 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -440,3 +440,193 @@ def test_google_ai_chat_completion_parse_chat_history_correctly(google_ai_unit_t
440440
assert parsed_chat_history[0].parts[0].text == "test_user_message"
441441
assert parsed_chat_history[1].role == "model"
442442
assert parsed_chat_history[1].parts[0].text == "test_assistant_message"
443+
444+
445+
# region deserialization (Part → FunctionCallContent round-trip)
446+
447+
448+
def test_create_chat_message_content_with_thought_signature(google_ai_unit_test_env) -> None:
449+
"""Test that thought_signature from a Part is deserialized into FunctionCallContent.metadata."""
450+
from unittest.mock import MagicMock
451+
452+
from google.genai.types import (
453+
Candidate,
454+
Content,
455+
FinishReason as GFinishReason,
456+
GenerateContentResponse,
457+
GenerateContentResponseUsageMetadata,
458+
Part,
459+
)
460+
461+
from semantic_kernel.contents.function_call_content import FunctionCallContent
462+
463+
thought_sig_value = b"test-thought-sig-bytes"
464+
part = Part.from_function_call(name="test_function", args={"key": "value"})
465+
part.thought_signature = thought_sig_value
466+
467+
candidate = Candidate()
468+
candidate.index = 0
469+
candidate.content = Content(role="user", parts=[part])
470+
candidate.finish_reason = GFinishReason.STOP
471+
472+
response = GenerateContentResponse()
473+
response.candidates = [candidate]
474+
response.usage_metadata = GenerateContentResponseUsageMetadata(
475+
prompt_token_count=0, cached_content_token_count=0, candidates_token_count=0, total_token_count=0
476+
)
477+
478+
completion = GoogleAIChatCompletion()
479+
result = completion._create_chat_message_content(response, candidate)
480+
481+
fc_items = [item for item in result.items if isinstance(item, FunctionCallContent)]
482+
assert len(fc_items) == 1
483+
assert fc_items[0].metadata is not None
484+
assert fc_items[0].metadata["thought_signature"] == thought_sig_value
485+
486+
487+
def test_create_chat_message_content_without_thought_signature(google_ai_unit_test_env) -> None:
488+
"""Test that FunctionCallContent works when Part has no thought_signature."""
489+
from google.genai.types import (
490+
Candidate,
491+
Content,
492+
FinishReason as GFinishReason,
493+
GenerateContentResponse,
494+
GenerateContentResponseUsageMetadata,
495+
Part,
496+
)
497+
498+
from semantic_kernel.contents.function_call_content import FunctionCallContent
499+
500+
part = Part.from_function_call(name="test_function", args={"key": "value"})
501+
502+
candidate = Candidate()
503+
candidate.index = 0
504+
candidate.content = Content(role="user", parts=[part])
505+
candidate.finish_reason = GFinishReason.STOP
506+
507+
response = GenerateContentResponse()
508+
response.candidates = [candidate]
509+
response.usage_metadata = GenerateContentResponseUsageMetadata(
510+
prompt_token_count=0, cached_content_token_count=0, candidates_token_count=0, total_token_count=0
511+
)
512+
513+
completion = GoogleAIChatCompletion()
514+
result = completion._create_chat_message_content(response, candidate)
515+
516+
fc_items = [item for item in result.items if isinstance(item, FunctionCallContent)]
517+
assert len(fc_items) == 1
518+
assert fc_items[0].metadata is None
519+
520+
521+
def test_create_streaming_chat_message_content_with_thought_signature(google_ai_unit_test_env) -> None:
522+
"""Test that thought_signature from a Part is deserialized in streaming path."""
523+
from google.genai.types import (
524+
Candidate,
525+
Content,
526+
FinishReason as GFinishReason,
527+
GenerateContentResponse,
528+
GenerateContentResponseUsageMetadata,
529+
Part,
530+
)
531+
532+
from semantic_kernel.contents.function_call_content import FunctionCallContent
533+
534+
thought_sig_value = b"streaming-thought-sig"
535+
part = Part.from_function_call(name="stream_func", args={"a": "b"})
536+
part.thought_signature = thought_sig_value
537+
538+
candidate = Candidate()
539+
candidate.index = 0
540+
candidate.content = Content(role="user", parts=[part])
541+
candidate.finish_reason = GFinishReason.STOP
542+
543+
chunk = GenerateContentResponse()
544+
chunk.candidates = [candidate]
545+
chunk.usage_metadata = GenerateContentResponseUsageMetadata(
546+
prompt_token_count=0, cached_content_token_count=0, candidates_token_count=0, total_token_count=0
547+
)
548+
549+
completion = GoogleAIChatCompletion()
550+
result = completion._create_streaming_chat_message_content(chunk, candidate)
551+
552+
fc_items = [item for item in result.items if isinstance(item, FunctionCallContent)]
553+
assert len(fc_items) == 1
554+
assert fc_items[0].metadata is not None
555+
assert fc_items[0].metadata["thought_signature"] == thought_sig_value
556+
557+
558+
def test_create_streaming_chat_message_content_without_thought_signature(google_ai_unit_test_env) -> None:
559+
"""Test that streaming FunctionCallContent works when Part lacks thought_signature."""
560+
from google.genai.types import (
561+
Candidate,
562+
Content,
563+
FinishReason as GFinishReason,
564+
GenerateContentResponse,
565+
GenerateContentResponseUsageMetadata,
566+
Part,
567+
)
568+
569+
from semantic_kernel.contents.function_call_content import FunctionCallContent
570+
571+
part = Part.from_function_call(name="stream_func", args={"a": "b"})
572+
573+
candidate = Candidate()
574+
candidate.index = 0
575+
candidate.content = Content(role="user", parts=[part])
576+
candidate.finish_reason = GFinishReason.STOP
577+
578+
chunk = GenerateContentResponse()
579+
chunk.candidates = [candidate]
580+
chunk.usage_metadata = GenerateContentResponseUsageMetadata(
581+
prompt_token_count=0, cached_content_token_count=0, candidates_token_count=0, total_token_count=0
582+
)
583+
584+
completion = GoogleAIChatCompletion()
585+
result = completion._create_streaming_chat_message_content(chunk, candidate)
586+
587+
fc_items = [item for item in result.items if isinstance(item, FunctionCallContent)]
588+
assert len(fc_items) == 1
589+
assert fc_items[0].metadata is None
590+
591+
592+
def test_create_chat_message_content_getattr_guard_on_missing_attribute(google_ai_unit_test_env) -> None:
593+
"""Test that getattr guard handles SDK versions where thought_signature doesn't exist on Part."""
594+
from unittest.mock import MagicMock, PropertyMock
595+
596+
from google.genai.types import (
597+
Candidate,
598+
Content,
599+
FinishReason as GFinishReason,
600+
GenerateContentResponse,
601+
GenerateContentResponseUsageMetadata,
602+
)
603+
604+
from semantic_kernel.contents.function_call_content import FunctionCallContent
605+
606+
# Create a mock Part that lacks 'thought_signature' attribute entirely
607+
mock_part = MagicMock()
608+
mock_part.text = None
609+
mock_part.function_call.name = "test_func"
610+
mock_part.function_call.args = {"x": "y"}
611+
del mock_part.thought_signature # simulate older SDK without the field
612+
613+
candidate = Candidate()
614+
candidate.index = 0
615+
candidate.content = Content(role="user", parts=[mock_part])
616+
candidate.finish_reason = GFinishReason.STOP
617+
618+
response = GenerateContentResponse()
619+
response.candidates = [candidate]
620+
response.usage_metadata = GenerateContentResponseUsageMetadata(
621+
prompt_token_count=0, cached_content_token_count=0, candidates_token_count=0, total_token_count=0
622+
)
623+
624+
completion = GoogleAIChatCompletion()
625+
result = completion._create_chat_message_content(response, candidate)
626+
627+
fc_items = [item for item in result.items if isinstance(item, FunctionCallContent)]
628+
assert len(fc_items) == 1
629+
assert fc_items[0].metadata is None
630+
631+
632+
# endregion deserialization

python/tests/unit/connectors/ai/google/google_ai/services/test_google_ai_utils.py

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,3 +113,47 @@ def test_format_assistant_message_with_unsupported_items() -> None:
113113

114114
with pytest.raises(ServiceInvalidRequestError):
115115
format_assistant_message(assistant_message)
116+
117+
118+
def test_format_assistant_message_with_thought_signature() -> None:
119+
"""Test that thought_signature is preserved in function call parts."""
120+
import base64
121+
122+
thought_sig = base64.b64encode(b"test_thought_signature_data")
123+
assistant_message = ChatMessageContent(
124+
role=AuthorRole.ASSISTANT,
125+
items=[
126+
FunctionCallContent(
127+
name="test_function",
128+
arguments={"arg1": "value1"},
129+
metadata={"thought_signature": thought_sig},
130+
),
131+
],
132+
)
133+
134+
formatted = format_assistant_message(assistant_message)
135+
assert len(formatted) == 1
136+
assert isinstance(formatted[0], Part)
137+
assert formatted[0].function_call.name == "test_function"
138+
assert formatted[0].function_call.args == {"arg1": "value1"}
139+
assert formatted[0].thought_signature == thought_sig
140+
141+
142+
def test_format_assistant_message_without_thought_signature() -> None:
143+
"""Test that function calls without thought_signature still work."""
144+
assistant_message = ChatMessageContent(
145+
role=AuthorRole.ASSISTANT,
146+
items=[
147+
FunctionCallContent(
148+
name="test_function",
149+
arguments={"arg1": "value1"},
150+
),
151+
],
152+
)
153+
154+
formatted = format_assistant_message(assistant_message)
155+
assert len(formatted) == 1
156+
assert isinstance(formatted[0], Part)
157+
assert formatted[0].function_call.name == "test_function"
158+
assert formatted[0].function_call.args == {"arg1": "value1"}
159+
assert not getattr(formatted[0], "thought_signature", None)

0 commit comments

Comments
 (0)