2222from pydantic import BaseModel
2323
2424from haystack import component , default_from_dict , default_to_dict , logging
25- from haystack .components .generators .utils import _convert_streaming_chunks_to_chat_message
25+ from haystack .components .generators .utils import _convert_streaming_chunks_to_chat_message , _serialize_object
2626from haystack .dataclasses import (
2727 AsyncStreamingCallbackT ,
2828 ChatMessage ,
@@ -563,16 +563,17 @@ def _convert_chat_completion_to_chat_message(
563563 _arguments = arguments_str ,
564564 )
565565
566- chat_message = ChatMessage .from_assistant (
567- text = text ,
568- tool_calls = tool_calls ,
569- meta = {
570- "model" : completion .model ,
571- "index" : choice .index ,
572- "finish_reason" : choice .finish_reason ,
573- "usage" : _serialize_usage (completion .usage ),
574- },
575- )
566+ logprobs = _serialize_object (choice .logprobs ) if choice .logprobs else None
567+ meta = {
568+ "model" : completion .model ,
569+ "index" : choice .index ,
570+ "finish_reason" : choice .finish_reason ,
571+ "usage" : _serialize_object (completion .usage ),
572+ }
573+ if logprobs :
574+ meta ["logprobs" ] = logprobs
575+
576+ chat_message = ChatMessage .from_assistant (text = text , tool_calls = tool_calls , meta = meta )
576577
577578 return chat_message
578579
@@ -610,7 +611,7 @@ def _convert_chat_completion_chunk_to_streaming_chunk(
610611 meta = {
611612 "model" : chunk .model ,
612613 "received_at" : datetime .now ().isoformat (),
613- "usage" : _serialize_usage (chunk .usage ),
614+ "usage" : _serialize_object (chunk .usage ),
614615 },
615616 )
616617
@@ -643,7 +644,7 @@ def _convert_chat_completion_chunk_to_streaming_chunk(
643644 "tool_calls" : choice .delta .tool_calls ,
644645 "finish_reason" : choice .finish_reason ,
645646 "received_at" : datetime .now ().isoformat (),
646- "usage" : _serialize_usage (chunk .usage ),
647+ "usage" : _serialize_object (chunk .usage ),
647648 },
648649 )
649650 return chunk_message
@@ -658,6 +659,23 @@ def _convert_chat_completion_chunk_to_streaming_chunk(
658659 # NOTE: We may need to revisit this if OpenAI allows planning/thinking content before tool calls like
659660 # Anthropic Claude
660661 resolved_index = 0
662+
663+ # Initialize meta dictionary
664+ meta = {
665+ "model" : chunk .model ,
666+ "index" : choice .index ,
667+ "tool_calls" : choice .delta .tool_calls ,
668+ "finish_reason" : choice .finish_reason ,
669+ "received_at" : datetime .now ().isoformat (),
670+ "usage" : _serialize_object (chunk .usage ),
671+ }
672+
673+ # check if logprobs are present
674+ # logprobs are returned only for text content
675+ logprobs = _serialize_object (choice .logprobs ) if choice .logprobs else None
676+ if logprobs :
677+ meta ["logprobs" ] = logprobs
678+
661679 chunk_message = StreamingChunk (
662680 content = choice .delta .content or "" ,
663681 component_info = component_info ,
@@ -666,27 +684,6 @@ def _convert_chat_completion_chunk_to_streaming_chunk(
666684 # and previous_chunks is length 1 then this is the start of text content.
667685 start = len (previous_chunks ) == 1 ,
668686 finish_reason = finish_reason_mapping .get (choice .finish_reason ) if choice .finish_reason else None ,
669- meta = {
670- "model" : chunk .model ,
671- "index" : choice .index ,
672- "tool_calls" : choice .delta .tool_calls ,
673- "finish_reason" : choice .finish_reason ,
674- "received_at" : datetime .now ().isoformat (),
675- "usage" : _serialize_usage (chunk .usage ),
676- },
687+ meta = meta ,
677688 )
678689 return chunk_message
679-
680-
681- def _serialize_usage (usage ):
682- """Convert OpenAI usage object to serializable dict recursively"""
683- if hasattr (usage , "model_dump" ):
684- return usage .model_dump ()
685- elif hasattr (usage , "__dict__" ):
686- return {k : _serialize_usage (v ) for k , v in usage .__dict__ .items () if not k .startswith ("_" )}
687- elif isinstance (usage , dict ):
688- return {k : _serialize_usage (v ) for k , v in usage .items ()}
689- elif isinstance (usage , list ):
690- return [_serialize_usage (item ) for item in usage ]
691- else :
692- return usage
0 commit comments