OpenAI - update semantic conventions to the latest version, write logs based events instead of span events (#2925)

This commit is contained in:
Liudmila Molkova 2024-10-30 14:07:24 -07:00 committed by GitHub
parent 54c7ee80ab
commit 07c3324a3b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 2812 additions and 330 deletions

View File

@ -0,0 +1,15 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## Unreleased
- Update OpenAI instrumentation to Semantic Conventions v1.28.0: add new attributes
and switch prompts and completions to log-based events.
([#2925](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/2925))
- Initial OpenAI instrumentation
([#2759](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/2759))

View File

@ -1,12 +1,12 @@
OpenTelemetry OpenAI Instrumentation
====================================
====================================
|pypi|
.. |pypi| image:: https://badge.fury.io/py/opentelemetry-instrumentation-openai-v2.svg
:target: https://pypi.org/project/opentelemetry-instrumentation-openai-v2/
Instrumentation with OpenAI that supports the openai library and is
Instrumentation with OpenAI that supports the OpenAI library and is
specified to trace_integration using 'OpenAI'.

View File

@ -24,7 +24,7 @@ Usage
.. code:: python
from openai import OpenAI
from opentelemetry.instrumentation.openai import OpenAIInstrumentor
from opentelemetry.instrumentation.openai_v2 import OpenAIInstrumentor
OpenAIInstrumentor().instrument()
@ -44,8 +44,10 @@ from typing import Collection
from wrapt import wrap_function_wrapper
from opentelemetry._events import get_event_logger
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.openai_v2.package import _instruments
from opentelemetry.instrumentation.openai_v2.utils import is_content_enabled
from opentelemetry.instrumentation.utils import unwrap
from opentelemetry.semconv.schemas import Schemas
from opentelemetry.trace import get_tracer
@ -64,15 +66,25 @@ class OpenAIInstrumentor(BaseInstrumentor):
__name__,
"",
tracer_provider,
schema_url=Schemas.V1_27_0.value,
schema_url=Schemas.V1_28_0.value,
)
event_logger_provider = kwargs.get("event_logger_provider")
event_logger = get_event_logger(
__name__,
"",
schema_url=Schemas.V1_28_0.value,
event_logger_provider=event_logger_provider,
)
wrap_function_wrapper(
module="openai.resources.chat.completions",
name="Completions.create",
wrapper=chat_completions_create(tracer),
wrapper=chat_completions_create(
tracer, event_logger, is_content_enabled()
),
)
def _uninstrument(self, **kwargs):
import openai
import openai # pylint: disable=import-outside-toplevel
unwrap(openai.resources.chat.completions.Completions, "create")

View File

@ -13,8 +13,11 @@
# limitations under the License.
import json
from typing import Optional
from openai import Stream
from opentelemetry._events import Event, EventLogger
from opentelemetry.semconv._incubating.attributes import (
gen_ai_attributes as GenAIAttributes,
)
@ -25,97 +28,73 @@ from opentelemetry.trace import Span, SpanKind, Tracer
from opentelemetry.trace.status import Status, StatusCode
from .utils import (
extract_content,
extract_tools_prompt,
choice_to_event,
get_llm_request_attributes,
is_streaming,
set_event_completion,
set_event_prompt,
message_to_event,
set_span_attribute,
silently_fail,
)
def chat_completions_create(tracer: Tracer):
def chat_completions_create(
tracer: Tracer, event_logger: EventLogger, capture_content: bool
):
"""Wrap the `create` method of the `ChatCompletion` class to trace it."""
def traced_method(wrapped, instance, args, kwargs):
llm_prompts = []
span_attributes = {**get_llm_request_attributes(kwargs, instance)}
for item in kwargs.get("messages", []):
tools_prompt = extract_tools_prompt(item)
llm_prompts.append(tools_prompt if tools_prompt else item)
span_attributes = {**get_llm_request_attributes(kwargs)}
span_name = f"{span_attributes[GenAIAttributes.GEN_AI_OPERATION_NAME]} {span_attributes[GenAIAttributes.GEN_AI_REQUEST_MODEL]}"
with tracer.start_as_current_span(
name=span_name,
kind=SpanKind.CLIENT,
attributes=span_attributes,
end_on_exit=False,
) as span:
if span.is_recording():
for message in kwargs.get("messages", []):
event_logger.emit(
message_to_event(message, capture_content)
)
span = tracer.start_span(
name=span_name, kind=SpanKind.CLIENT, attributes=span_attributes
)
if span.is_recording():
_set_input_attributes(span, span_attributes)
set_event_prompt(span, json.dumps(llm_prompts))
try:
result = wrapped(*args, **kwargs)
if is_streaming(kwargs):
return StreamWrapper(
result, span, event_logger, capture_content
)
try:
result = wrapped(*args, **kwargs)
if is_streaming(kwargs):
return StreamWrapper(
result,
span,
function_call=kwargs.get("functions") is not None,
tool_calls=kwargs.get("tools") is not None,
)
else:
if span.is_recording():
_set_response_attributes(span, result)
_set_response_attributes(
span, result, event_logger, capture_content
)
span.end()
return result
except Exception as error:
span.set_status(Status(StatusCode.ERROR, str(error)))
if span.is_recording():
span.set_attribute(
ErrorAttributes.ERROR_TYPE, type(error).__qualname__
)
span.end()
raise
except Exception as error:
span.set_status(Status(StatusCode.ERROR, str(error)))
if span.is_recording():
span.set_attribute(
ErrorAttributes.ERROR_TYPE, type(error).__qualname__
)
span.end()
raise
return traced_method
@silently_fail
def _set_input_attributes(span, attributes):
for field, value in attributes.items():
set_span_attribute(span, field, value)
@silently_fail
def _set_response_attributes(span, result):
def _set_response_attributes(
span, result, event_logger: EventLogger, capture_content: bool
):
set_span_attribute(
span, GenAIAttributes.GEN_AI_RESPONSE_MODEL, result.model
)
if getattr(result, "choices", None):
choices = result.choices
responses = [
{
"role": (
choice.message.role
if choice.message and choice.message.role
else "assistant"
),
"content": extract_content(choice),
**(
{
"content_filter_results": choice[
"content_filter_results"
]
}
if "content_filter_results" in choice
else {}
),
}
for choice in choices
]
for choice in choices:
event_logger.emit(choice_to_event(choice, capture_content))
finish_reasons = []
for choice in choices:
finish_reasons.append(choice.finish_reason or "error")
@ -125,11 +104,17 @@ def _set_response_attributes(span, result):
GenAIAttributes.GEN_AI_RESPONSE_FINISH_REASONS,
finish_reasons,
)
set_event_completion(span, responses)
if getattr(result, "id", None):
set_span_attribute(span, GenAIAttributes.GEN_AI_RESPONSE_ID, result.id)
if getattr(result, "service_tier", None):
set_span_attribute(
span,
GenAIAttributes.GEN_AI_OPENAI_REQUEST_SERVICE_TIER,
result.service_tier,
)
# Get the usage
if getattr(result, "usage", None):
set_span_attribute(
@ -144,27 +129,65 @@ def _set_response_attributes(span, result):
)
class ToolCallBuffer:
def __init__(self, index, tool_call_id, function_name):
self.index = index
self.function_name = function_name
self.tool_call_id = tool_call_id
self.arguments = []
def append_arguments(self, arguments):
self.arguments.append(arguments)
class ChoiceBuffer:
def __init__(self, index):
self.index = index
self.finish_reason = None
self.text_content = []
self.tool_calls_buffers = []
def append_text_content(self, content):
self.text_content.append(content)
def append_tool_call(self, tool_call):
idx = tool_call.index
# make sure we have enough tool call buffers
for _ in range(len(self.tool_calls_buffers), idx + 1):
self.tool_calls_buffers.append(None)
if not self.tool_calls_buffers[idx]:
self.tool_calls_buffers[idx] = ToolCallBuffer(
idx, tool_call.id, tool_call.function.name
)
self.tool_calls_buffers[idx].append_arguments(
tool_call.function.arguments
)
class StreamWrapper:
span: Span
response_id: str = ""
response_model: str = ""
response_id: Optional[str] = None
response_model: Optional[str] = None
service_tier: Optional[str] = None
finish_reasons: list = []
prompt_tokens: Optional[int] = 0
completion_tokens: Optional[int] = 0
def __init__(
self,
stream,
span,
prompt_tokens=0,
function_call=False,
tool_calls=False,
stream: Stream,
span: Span,
event_logger: EventLogger,
capture_content: bool,
):
self.stream = stream
self.span = span
self.prompt_tokens = prompt_tokens
self.function_call = function_call
self.tool_calls = tool_calls
self.result_content = []
self.completion_tokens = 0
self.choice_buffers = []
self._span_started = False
self.capture_content = capture_content
self.event_logger = event_logger
self.setup()
def setup(self):
@ -197,16 +220,62 @@ class StreamWrapper:
GenAIAttributes.GEN_AI_USAGE_OUTPUT_TOKENS,
self.completion_tokens,
)
set_event_completion(
set_span_attribute(
self.span,
[
{
"role": "assistant",
"content": "".join(self.result_content),
}
],
GenAIAttributes.GEN_AI_OPENAI_RESPONSE_SERVICE_TIER,
self.service_tier,
)
set_span_attribute(
self.span,
GenAIAttributes.GEN_AI_RESPONSE_FINISH_REASONS,
self.finish_reasons,
)
for idx, choice in enumerate(self.choice_buffers):
message = {"role": "assistant"}
if self.capture_content and choice.text_content:
message["content"] = "".join(choice.text_content)
if choice.tool_calls_buffers:
tool_calls = []
for tool_call in choice.tool_calls_buffers:
function = {"name": tool_call.function_name}
if self.capture_content:
function["arguments"] = "".join(
tool_call.arguments
)
tool_call_dict = {
"id": tool_call.tool_call_id,
"type": "function",
"function": function,
}
tool_calls.append(tool_call_dict)
message["tool_calls"] = tool_calls
body = {
"index": idx,
"finish_reason": choice.finish_reason or "error",
"message": message,
}
event_attributes = {
GenAIAttributes.GEN_AI_SYSTEM: GenAIAttributes.GenAiSystemValues.OPENAI.value
}
# this span is not current, so we need to manually set the context on event
span_ctx = self.span.get_span_context()
self.event_logger.emit(
Event(
name="gen_ai.choice",
attributes=event_attributes,
body=body,
trace_id=span_ctx.trace_id,
span_id=span_ctx.span_id,
trace_flags=span_ctx.trace_flags,
)
)
self.span.end()
self._span_started = False
@ -225,6 +294,10 @@ class StreamWrapper:
self.cleanup()
return False # Propagate the exception
def close(self):
self.stream.close()
self.cleanup()
def __iter__(self):
return self
@ -258,50 +331,41 @@ class StreamWrapper:
if getattr(chunk, "id", None):
self.response_id = chunk.id
def set_response_service_tier(self, chunk):
if self.service_tier:
return
if getattr(chunk, "service_tier", None):
self.service_tier = chunk.service_tier
def build_streaming_response(self, chunk):
if getattr(chunk, "choices", None) is None:
return
choices = chunk.choices
content = []
if not self.function_call and not self.tool_calls:
for choice in choices:
if choice.delta and choice.delta.content is not None:
content = [choice.delta.content]
elif self.function_call:
for choice in choices:
if (
choice.delta
and choice.delta.function_call is not None
and choice.delta.function_call.arguments is not None
):
content = [choice.delta.function_call.arguments]
elif self.tool_calls:
for choice in choices:
if choice.delta and choice.delta.tool_calls is not None:
toolcalls = choice.delta.tool_calls
content = []
for tool_call in toolcalls:
if (
tool_call
and tool_call.function is not None
and tool_call.function.arguments is not None
):
content.append(tool_call.function.arguments)
finish_reasons = []
for choice in choices:
finish_reasons.append(choice.finish_reason or "error")
if not choice.delta:
continue
set_span_attribute(
self.span,
GenAIAttributes.GEN_AI_RESPONSE_FINISH_REASONS,
finish_reasons,
)
if content:
self.result_content.append(content[0])
# make sure we have enough choice buffers
for idx in range(len(self.choice_buffers), choice.index + 1):
self.choice_buffers.append(ChoiceBuffer(idx))
if choice.finish_reason:
self.choice_buffers[
choice.index
].finish_reason = choice.finish_reason
if choice.delta.content is not None:
self.choice_buffers[choice.index].append_text_content(
choice.delta.content
)
if choice.delta.tool_calls is not None:
for tool_call in choice.delta.tool_calls:
self.choice_buffers[choice.index].append_tool_call(
tool_call
)
def set_usage(self, chunk):
if getattr(chunk, "usage", None):
@ -311,5 +375,6 @@ class StreamWrapper:
def process_chunk(self, chunk):
self.set_response_id(chunk)
self.set_response_model(chunk)
self.set_response_service_tier(chunk)
self.build_streaming_response(chunk)
self.set_usage(chunk)

View File

@ -12,100 +12,148 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
from os import environ
from typing import Optional, Union
from urllib.parse import urlparse
from httpx import URL
from openai import NOT_GIVEN
from opentelemetry._events import Event
from opentelemetry.semconv._incubating.attributes import (
gen_ai_attributes as GenAIAttributes,
)
from opentelemetry.semconv._incubating.attributes import (
server_attributes as ServerAttributes,
)
OTEL_INSTRUMENTATION_OPENAI_CAPTURE_MESSAGE_CONTENT = (
"OTEL_INSTRUMENTATION_OPENAI_CAPTURE_MESSAGE_CONTENT"
)
def silently_fail(func):
"""
A decorator that catches exceptions thrown by the decorated function and logs them as warnings.
"""
def is_content_enabled() -> bool:
capture_content = environ.get(
OTEL_INSTRUMENTATION_OPENAI_CAPTURE_MESSAGE_CONTENT, "false"
)
logger = logging.getLogger(func.__module__)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as exception:
logger.warning(
"Failed to execute %s, error: %s",
func.__name__,
str(exception),
)
return wrapper
return capture_content.lower() == "true"
def extract_content(choice):
if getattr(choice, "message", None) is None:
return ""
# Check if choice.message exists and has a content attribute
message = choice.message
if getattr(message, "content", None):
return choice.message.content
# Check if choice.message has tool_calls and extract information accordingly
elif getattr(message, "tool_calls", None):
result = [
{
"id": tool_call.id,
"type": tool_call.type,
"function": {
"name": tool_call.function.name,
"arguments": tool_call.function.arguments,
},
}
for tool_call in choice.message.tool_calls
]
return result
# Check if choice.message has a function_call and extract information accordingly
elif getattr(message, "function_call", None):
return {
"name": choice.message.function_call.name,
"arguments": choice.message.function_call.arguments,
}
# Return an empty string if none of the above conditions are met
else:
return ""
def extract_tools_prompt(item):
tool_calls = getattr(item, "tool_calls", None)
def extract_tool_calls(item, capture_content):
tool_calls = get_property_value(item, "tool_calls")
if tool_calls is None:
return
return None
calls = []
for tool_call in tool_calls:
tool_call_dict = {
"id": getattr(tool_call, "id", ""),
"type": getattr(tool_call, "type", ""),
}
tool_call_dict = {}
call_id = get_property_value(tool_call, "id")
if call_id:
tool_call_dict["id"] = call_id
tool_type = get_property_value(tool_call, "type")
if tool_type:
tool_call_dict["type"] = tool_type
func = get_property_value(tool_call, "function")
if func:
tool_call_dict["function"] = {}
name = get_property_value(func, "name")
if name:
tool_call_dict["function"]["name"] = name
arguments = get_property_value(func, "arguments")
if capture_content and arguments:
if isinstance(arguments, str):
arguments = arguments.replace("\n", "")
tool_call_dict["function"]["arguments"] = arguments
if hasattr(tool_call, "function"):
tool_call_dict["function"] = {
"name": getattr(tool_call.function, "name", ""),
"arguments": getattr(tool_call.function, "arguments", ""),
}
calls.append(tool_call_dict)
return calls
def set_event_prompt(span, prompt):
span.add_event(
name="gen_ai.content.prompt",
attributes={
GenAIAttributes.GEN_AI_PROMPT: prompt,
},
def set_server_address_and_port(client_instance, attributes):
base_client = getattr(client_instance, "_client", None)
base_url = getattr(base_client, "base_url", None)
if not base_url:
return
port = -1
if isinstance(base_url, URL):
attributes[ServerAttributes.SERVER_ADDRESS] = base_url.host
port = base_url.port
elif isinstance(base_url, str):
url = urlparse(base_url)
attributes[ServerAttributes.SERVER_ADDRESS] = url.hostname
port = url.port
if port and port != 443 and port > 0:
attributes[ServerAttributes.SERVER_PORT] = port
def get_property_value(obj, property_name):
if isinstance(obj, dict):
return obj.get(property_name, None)
return getattr(obj, property_name, None)
def message_to_event(message, capture_content):
attributes = {
GenAIAttributes.GEN_AI_SYSTEM: GenAIAttributes.GenAiSystemValues.OPENAI.value
}
role = get_property_value(message, "role")
content = get_property_value(message, "content")
body = {}
if capture_content and content:
body["content"] = content
if role == "assistant":
tool_calls = extract_tool_calls(message, capture_content)
if tool_calls:
body = {"tool_calls": tool_calls}
elif role == "tool":
tool_call_id = get_property_value(message, "tool_call_id")
if tool_call_id:
body["id"] = tool_call_id
return Event(
name=f"gen_ai.{role}.message",
attributes=attributes,
body=body if body else None,
)
def choice_to_event(choice, capture_content):
attributes = {
GenAIAttributes.GEN_AI_SYSTEM: GenAIAttributes.GenAiSystemValues.OPENAI.value
}
body = {
"index": choice.index,
"finish_reason": choice.finish_reason or "error",
}
if choice.message:
message = {
"role": choice.message.role
if choice.message and choice.message.role
else None
}
tool_calls = extract_tool_calls(choice.message, capture_content)
if tool_calls:
message["tool_calls"] = tool_calls
content = get_property_value(choice.message, "content")
if capture_content and content:
message["content"] = content
body["message"] = message
return Event(
name="gen_ai.choice",
attributes=attributes,
body=body,
)
@ -114,15 +162,6 @@ def set_span_attributes(span, attributes: dict):
set_span_attribute(span, field, value)
def set_event_completion(span, result_content):
span.add_event(
name="gen_ai.content.completion",
attributes={
GenAIAttributes.GEN_AI_COMPLETION: json.dumps(result_content),
},
)
def set_span_attribute(span, name, value):
if non_numerical_value_is_set(value) is False:
return
@ -140,14 +179,13 @@ def non_numerical_value_is_set(value: Optional[Union[bool, str]]):
def get_llm_request_attributes(
kwargs,
client_instance,
operation_name=GenAIAttributes.GenAiOperationNameValues.CHAT.value,
):
attributes = {
GenAIAttributes.GEN_AI_OPERATION_NAME: operation_name,
GenAIAttributes.GEN_AI_SYSTEM: GenAIAttributes.GenAiSystemValues.OPENAI.value,
GenAIAttributes.GEN_AI_REQUEST_MODEL: kwargs.get(
"model", "gpt-3.5-turbo"
),
GenAIAttributes.GEN_AI_REQUEST_MODEL: kwargs.get("model"),
GenAIAttributes.GEN_AI_REQUEST_TEMPERATURE: kwargs.get("temperature"),
GenAIAttributes.GEN_AI_REQUEST_TOP_P: kwargs.get("p")
or kwargs.get("top_p"),
@ -158,7 +196,17 @@ def get_llm_request_attributes(
GenAIAttributes.GEN_AI_REQUEST_FREQUENCY_PENALTY: kwargs.get(
"frequency_penalty"
),
GenAIAttributes.GEN_AI_OPENAI_REQUEST_RESPONSE_FORMAT: kwargs.get(
"response_format"
),
GenAIAttributes.GEN_AI_OPENAI_REQUEST_SEED: kwargs.get("seed"),
}
set_server_address_and_port(client_instance, attributes)
service_tier = kwargs.get("service_tier")
attributes[GenAIAttributes.GEN_AI_OPENAI_RESPONSE_SERVICE_TIER] = (
service_tier if service_tier != "auto" else None
)
# filter out None values
return {k: v for k, v in attributes.items() if v is not None}

View File

@ -1,4 +1,4 @@
openai==1.26.0
openai==1.26.0
pydantic==2.8.2
Deprecated==1.2.14
importlib-metadata==6.11.0

View File

@ -0,0 +1,75 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "Say this is a test"}], "model":
"this-model-does-not-exist"}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '103'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.52.2
x-stainless-arch:
- other:amd64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Windows
x-stainless-package-version:
- 1.52.2
x-stainless-retry-count:
- '2'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: "{\n \"error\": {\n \"message\": \"The model `this-model-does-not-exist`
does not exist or you do not have access to it.\",\n \"type\": \"invalid_request_error\",\n
\ \"param\": null,\n \"code\": \"model_not_found\"\n }\n}\n"
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8d88b04c2c5db9eb-SEA
Connection:
- keep-alive
Content-Type:
- application/json; charset=utf-8
Date:
- Sat, 26 Oct 2024 07:21:17 GMT
Server:
- cloudflare
Set-Cookie: test_set_cookie
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
alt-svc:
- h3=":443"; ma=86400
content-length:
- '231'
openai-organization: test_organization
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
vary:
- Origin
x-request-id:
- req_8529656422f0360e8bcba8c2b8fe34e9
status:
code: 404
message: Not Found
version: 1

View File

@ -0,0 +1,99 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "Say this is a test"}], "model":
"gpt-4o-mini", "max_tokens": 50, "seed": 42, "service_tier": "auto", "stream":
false, "temperature": 0.5}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '180'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.52.2
x-stainless-arch:
- other:amd64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Windows
x-stainless-package-version:
- 1.52.2
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: "{\n \"id\": \"chatcmpl-AMTlCEj20ZcsgWKZt8EizFMDItWNf\",\n \"object\":
\"chat.completion\",\n \"created\": 1729920978,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": \"This is a test. How can I assist you
further?\",\n \"refusal\": null\n },\n \"logprobs\": null,\n
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
12,\n \"completion_tokens\": 12,\n \"total_tokens\": 24,\n \"prompt_tokens_details\":
{\n \"cached_tokens\": 0\n },\n \"completion_tokens_details\":
{\n \"reasoning_tokens\": 0\n }\n },\n \"service_tier\": \"default\",\n
\ \"system_fingerprint\": \"fp_f59a81427f\"\n}\n"
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8d881682197c7571-SEA
Connection:
- keep-alive
Content-Type:
- application/json
Date:
- Sat, 26 Oct 2024 05:36:18 GMT
Server:
- cloudflare
Set-Cookie: test_set_cookie
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
content-length:
- '697'
openai-organization: test_organization
openai-processing-ms:
- '275'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '200'
x-ratelimit-limit-tokens:
- '60000'
x-ratelimit-remaining-requests:
- '195'
x-ratelimit-remaining-tokens:
- '59944'
x-ratelimit-reset-requests:
- 32m17.492s
x-ratelimit-reset-tokens:
- 56ms
x-request-id:
- req_181075e8f861d6685fe1ae5d4bfc9b25
status:
code: 200
message: OK
version: 1

View File

@ -0,0 +1,100 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "Say this is a test"}], "model":
"gpt-4o-mini", "n": 2, "stream": false}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '114'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.52.2
x-stainless-arch:
- other:amd64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Windows
x-stainless-package-version:
- 1.52.2
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: "{\n \"id\": \"chatcmpl-AMT4dWD9gi2PyDcXBK10harskeREO\",\n \"object\":
\"chat.completion\",\n \"created\": 1729918339,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": \"This is a test.\",\n \"refusal\":
null\n },\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n
\ },\n {\n \"index\": 1,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": \"This is a test. How can I assist you
further?\",\n \"refusal\": null\n },\n \"logprobs\": null,\n
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
12,\n \"completion_tokens\": 17,\n \"total_tokens\": 29,\n \"prompt_tokens_details\":
{\n \"cached_tokens\": 0\n },\n \"completion_tokens_details\":
{\n \"reasoning_tokens\": 0\n }\n },\n \"system_fingerprint\": \"fp_f59a81427f\"\n}\n"
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8d87d6120a899357-SEA
Connection:
- keep-alive
Content-Type:
- application/json
Date:
- Sat, 26 Oct 2024 04:52:19 GMT
Server:
- cloudflare
Set-Cookie: test_set_cookie
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
content-length:
- '872'
openai-organization: test_organization
openai-processing-ms:
- '664'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '200'
x-ratelimit-limit-tokens:
- '60000'
x-ratelimit-remaining-requests:
- '199'
x-ratelimit-remaining-tokens:
- '59962'
x-ratelimit-reset-requests:
- 7m12s
x-ratelimit-reset-tokens:
- 38ms
x-request-id:
- req_d8b7a2507994f22fe0d4511c7e0a3bdc
status:
code: 200
message: OK
version: 1

View File

@ -0,0 +1,491 @@
interactions:
- request:
body: '{"messages": [{"role": "system", "content": "You''re a helpful assistant."},
{"role": "user", "content": "What''s the weather in Seattle and San Francisco
today?"}], "model": "gpt-4o-mini", "n": 2, "stream": true, "stream_options":
{"include_usage": true}}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '254'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.52.2
x-stainless-arch:
- other:amd64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Windows
x-stainless-package-version:
- 1.52.2
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"I''m"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"I''m"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
unable"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
sorry"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
provide"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
but"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
real"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"-time"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
can''t"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
provide"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
updates"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
real"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"-time"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
However"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
updates"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
you"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
as"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
can"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
my"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
easily"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
training"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
check"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
data"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
goes"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
current"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
up"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
in"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
Seattle"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
October"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
and"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
San"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
Francisco"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"202"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"1"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
and"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
using"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
websites"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
like"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
don''t"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
have"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
access"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
Weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
live"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
Channel"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
data"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
Acc"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"u"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"Weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
recommend"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
checking"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
reliable"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
website"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
or"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
using"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
or"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
app"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
on"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
your"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"
smartphone"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
app"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
for"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
most"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
current"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
information"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
on"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
in"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
Seattle"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
and"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
San"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"
Francisco"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":1,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null}
data: {"id":"chatcmpl-AMUBWEnUmLx8XIYS8oNiZopa2WpJf","object":"chat.completion.chunk","created":1729922610,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[],"usage":{"prompt_tokens":26,"completion_tokens":100,"total_tokens":126,"prompt_tokens_details":{"cached_tokens":0},"completion_tokens_details":{"reasoning_tokens":0}}}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8d883e57eb7227f9-SEA
Connection:
- keep-alive
Content-Type:
- text/event-stream; charset=utf-8
Date:
- Sat, 26 Oct 2024 06:03:30 GMT
Server:
- cloudflare
Set-Cookie: test_set_cookie
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
openai-organization: test_organization
openai-processing-ms:
- '216'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '200'
x-ratelimit-limit-tokens:
- '60000'
x-ratelimit-remaining-requests:
- '194'
x-ratelimit-remaining-tokens:
- '59945'
x-ratelimit-reset-requests:
- 41m5.83s
x-ratelimit-reset-tokens:
- 55ms
x-request-id:
- req_2cafe46d3edfcc0f61e4885c27cca46e
status:
code: 200
message: OK
version: 1

View File

@ -0,0 +1,155 @@
interactions:
- request:
body: '{"messages": [{"role": "system", "content": "You''re a helpful assistant."},
{"role": "user", "content": "What''s the weather in Seattle and San Francisco
today?"}], "model": "gpt-4o-mini", "parallel_tool_calls": true, "stream": true,
"stream_options": {"include_usage": true}, "tool_choice": "auto", "tools": [{"type":
"function", "function": {"name": "get_current_weather", "description": "Get
the current weather in a given location", "parameters": {"type": "object", "properties":
{"location": {"type": "string", "description": "The city and state, e.g. Boston,
MA"}}, "required": ["location"], "additionalProperties": false}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '631'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.52.2
x-stainless-arch:
- other:amd64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Windows
x-stainless-package-version:
- 1.52.2
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"role":"assistant","content":null},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"call_yy3bBunEUS0C3HGZECG9EGCj","type":"function","function":{"name":"get_current_weather","arguments":""}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"lo"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"catio"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n\":
\"S"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"eatt"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"le,
W"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"A\"}"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"call_ug3IE7qMdaP4tBJwJHacc7GO","type":"function","function":{"name":"get_current_weather","arguments":""}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\"lo"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"catio"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"n\":
\"S"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"an
F"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"ranci"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"sco,
C"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"A\"}"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[],"usage":{"prompt_tokens":75,"completion_tokens":51,"total_tokens":126,"prompt_tokens_details":{"cached_tokens":0},"completion_tokens_details":{"reasoning_tokens":0}}}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8d887ec3b9047639-SEA
Connection:
- keep-alive
Content-Type:
- text/event-stream; charset=utf-8
Date:
- Sat, 26 Oct 2024 06:47:29 GMT
Server:
- cloudflare
Set-Cookie: test_set_cookie
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
openai-organization: test_organization
openai-processing-ms:
- '723'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '200'
x-ratelimit-limit-tokens:
- '60000'
x-ratelimit-remaining-requests:
- '197'
x-ratelimit-remaining-tokens:
- '59961'
x-ratelimit-reset-requests:
- 18m43.156s
x-ratelimit-reset-tokens:
- 39ms
x-request-id:
- req_81b3cd6dbe38ffddcdca90c699f71f84
status:
code: 200
message: OK
version: 1

View File

@ -0,0 +1,155 @@
interactions:
- request:
body: '{"messages": [{"role": "system", "content": "You''re a helpful assistant."},
{"role": "user", "content": "What''s the weather in Seattle and San Francisco
today?"}], "model": "gpt-4o-mini", "parallel_tool_calls": true, "stream": true,
"stream_options": {"include_usage": true}, "tool_choice": "auto", "tools": [{"type":
"function", "function": {"name": "get_current_weather", "description": "Get
the current weather in a given location", "parameters": {"type": "object", "properties":
{"location": {"type": "string", "description": "The city and state, e.g. Boston,
MA"}}, "required": ["location"], "additionalProperties": false}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '631'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.52.2
x-stainless-arch:
- other:amd64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Windows
x-stainless-package-version:
- 1.52.2
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"role":"assistant","content":null},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"call_yy3bBunEUS0C3HGZECG9EGCj","type":"function","function":{"name":"get_current_weather","arguments":""}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"lo"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"catio"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n\":
\"S"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"eatt"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"le,
W"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"A\"}"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"call_ug3IE7qMdaP4tBJwJHacc7GO","type":"function","function":{"name":"get_current_weather","arguments":""}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\"lo"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"catio"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"n\":
\"S"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"an
F"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"ranci"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"sco,
C"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"A\"}"}}]},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}
data: {"id":"chatcmpl-AMUs4TXmQG6Rbn1KXpwESVDmy6eoL","object":"chat.completion.chunk","created":1729925248,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_f59a81427f","choices":[],"usage":{"prompt_tokens":75,"completion_tokens":51,"total_tokens":126,"prompt_tokens_details":{"cached_tokens":0},"completion_tokens_details":{"reasoning_tokens":0}}}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8d887ec3b9047639-SEA
Connection:
- keep-alive
Content-Type:
- text/event-stream; charset=utf-8
Date:
- Sat, 26 Oct 2024 06:47:29 GMT
Server:
- cloudflare
Set-Cookie: test_set_cookie
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
openai-organization: test_organization
openai-processing-ms:
- '723'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '200'
x-ratelimit-limit-tokens:
- '60000'
x-ratelimit-remaining-requests:
- '197'
x-ratelimit-remaining-tokens:
- '59961'
x-ratelimit-reset-requests:
- 18m43.156s
x-ratelimit-reset-tokens:
- 39ms
x-request-id:
- req_81b3cd6dbe38ffddcdca90c699f71f84
status:
code: 200
message: OK
version: 1

View File

@ -0,0 +1,117 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "Say this is a test"}], "model":
"gpt-4", "stream": true, "stream_options": {"include_usage": true}}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '142'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.52.2
x-stainless-arch:
- other:amd64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Windows
x-stainless-package-version:
- 1.52.2
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-AMVXI9sDOpHDCsGW99OJ6fYqHSu0J","object":"chat.completion.chunk","created":1729927804,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMVXI9sDOpHDCsGW99OJ6fYqHSu0J","object":"chat.completion.chunk","created":1729927804,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"This"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMVXI9sDOpHDCsGW99OJ6fYqHSu0J","object":"chat.completion.chunk","created":1729927804,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
is"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMVXI9sDOpHDCsGW99OJ6fYqHSu0J","object":"chat.completion.chunk","created":1729927804,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMVXI9sDOpHDCsGW99OJ6fYqHSu0J","object":"chat.completion.chunk","created":1729927804,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
test"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMVXI9sDOpHDCsGW99OJ6fYqHSu0J","object":"chat.completion.chunk","created":1729927804,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-AMVXI9sDOpHDCsGW99OJ6fYqHSu0J","object":"chat.completion.chunk","created":1729927804,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null}
data: {"id":"chatcmpl-AMVXI9sDOpHDCsGW99OJ6fYqHSu0J","object":"chat.completion.chunk","created":1729927804,"model":"gpt-4-0613","system_fingerprint":null,"choices":[],"usage":{"prompt_tokens":12,"completion_tokens":5,"total_tokens":17,"prompt_tokens_details":{"cached_tokens":0},"completion_tokens_details":{"reasoning_tokens":0}}}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8d88bd2c0d9e308c-SEA
Connection:
- keep-alive
Content-Type:
- text/event-stream; charset=utf-8
Date:
- Sat, 26 Oct 2024 07:30:05 GMT
Server:
- cloudflare
Set-Cookie: test_set_cookie
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
openai-organization: test_organization
openai-processing-ms:
- '176'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '200'
x-ratelimit-limit-tokens:
- '10000'
x-ratelimit-remaining-requests:
- '199'
x-ratelimit-remaining-tokens:
- '9978'
x-ratelimit-reset-requests:
- 7m12s
x-ratelimit-reset-tokens:
- 132ms
x-request-id:
- req_6ff271d00318adcd2408d8dcd5f93ec7
status:
code: 200
message: OK
version: 1

View File

@ -0,0 +1,215 @@
interactions:
- request:
body: '{"messages": [{"role": "system", "content": "You''re a helpful assistant."},
{"role": "user", "content": "What''s the weather in Seattle and San Francisco
today?"}], "model": "gpt-4o-mini", "parallel_tool_calls": true, "tool_choice":
"auto", "tools": [{"type": "function", "function": {"name": "get_current_weather",
"description": "Get the current weather in a given location", "parameters":
{"type": "object", "properties": {"location": {"type": "string", "description":
"The city and state, e.g. Boston, MA"}}, "required": ["location"], "additionalProperties":
false}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '572'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.52.2
x-stainless-arch:
- other:amd64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Windows
x-stainless-package-version:
- 1.52.2
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: "{\n \"id\": \"chatcmpl-AMTFkpz8qllCCkh1A8xzoe14DwAQN\",\n \"object\":
\"chat.completion\",\n \"created\": 1729919028,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": null,\n \"tool_calls\": [\n {\n
\ \"id\": \"call_YBUn0S5ErhjkfnjFbWWJYIvR\",\n \"type\":
\"function\",\n \"function\": {\n \"name\": \"get_current_weather\",\n
\ \"arguments\": \"{\\\"location\\\": \\\"Seattle, WA\\\"}\"\n
\ }\n },\n {\n \"id\": \"call_Ail5xuGFVRk2wUPidQWDYytn\",\n
\ \"type\": \"function\",\n \"function\": {\n \"name\":
\"get_current_weather\",\n \"arguments\": \"{\\\"location\\\":
\\\"San Francisco, CA\\\"}\"\n }\n }\n ],\n \"refusal\":
null\n },\n \"logprobs\": null,\n \"finish_reason\": \"tool_calls\"\n
\ }\n ],\n \"usage\": {\n \"prompt_tokens\": 75,\n \"completion_tokens\":
51,\n \"total_tokens\": 126,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
0\n },\n \"completion_tokens_details\": {\n \"reasoning_tokens\":
0\n }\n },\n \"system_fingerprint\": \"fp_f59a81427f\"\n}\n"
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8d87e6e7ba75dee2-SEA
Connection:
- keep-alive
Content-Type:
- application/json
Date:
- Sat, 26 Oct 2024 05:03:49 GMT
Server:
- cloudflare
Set-Cookie: test_set_cookie
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
content-length:
- '1180'
openai-organization: test_organization
openai-processing-ms:
- '649'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '200'
x-ratelimit-limit-tokens:
- '60000'
x-ratelimit-remaining-requests:
- '198'
x-ratelimit-remaining-tokens:
- '59961'
x-ratelimit-reset-requests:
- 14m23.304s
x-ratelimit-reset-tokens:
- 39ms
x-request-id:
- req_b2f085c19f19c32c8aa891887e228402
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "system", "content": "You''re a helpful assistant."},
{"role": "user", "content": "What''s the weather in Seattle and San Francisco
today?"}, {"role": "assistant", "tool_calls": [{"id": "call_YBUn0S5ErhjkfnjFbWWJYIvR",
"function": {"arguments": "{\"location\": \"Seattle, WA\"}", "name": "get_current_weather"},
"type": "function"}, {"id": "call_Ail5xuGFVRk2wUPidQWDYytn", "function": {"arguments":
"{\"location\": \"San Francisco, CA\"}", "name": "get_current_weather"}, "type":
"function"}]}, {"role": "tool", "content": "50 degrees and raining", "tool_call_id":
"call_YBUn0S5ErhjkfnjFbWWJYIvR"}, {"role": "tool", "content": "70 degrees and
sunny", "tool_call_id": "call_Ail5xuGFVRk2wUPidQWDYytn"}], "model": "gpt-4o-mini"}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '746'
content-type:
- application/json
cookie:
- test_set_cookie
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.52.2
x-stainless-arch:
- other:amd64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Windows
x-stainless-package-version:
- 1.52.2
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: "{\n \"id\": \"chatcmpl-AMTFlFwfqawbaH57GE3mXL8O17xo1\",\n \"object\":
\"chat.completion\",\n \"created\": 1729919029,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": \"Today, the weather in Seattle is 50
degrees and raining, while in San Francisco, it is 70 degrees and sunny.\",\n
\ \"refusal\": null\n },\n \"logprobs\": null,\n \"finish_reason\":
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 99,\n \"completion_tokens\":
26,\n \"total_tokens\": 125,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
0\n },\n \"completion_tokens_details\": {\n \"reasoning_tokens\":
0\n }\n },\n \"system_fingerprint\": \"fp_f59a81427f\"\n}\n"
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8d87e6ec8ddedee2-SEA
Connection:
- keep-alive
Content-Type:
- application/json
Date:
- Sat, 26 Oct 2024 05:03:49 GMT
Server:
- cloudflare
Set-Cookie: test_set_cookie
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
content-length:
- '732'
openai-organization: test_organization
openai-processing-ms:
- '537'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '200'
x-ratelimit-limit-tokens:
- '60000'
x-ratelimit-remaining-requests:
- '197'
x-ratelimit-remaining-tokens:
- '59948'
x-ratelimit-reset-requests:
- 21m34.534s
x-ratelimit-reset-tokens:
- 52ms
x-request-id:
- req_ca78782b171f5cd60841ba443de92731
status:
code: 200
message: OK
version: 1

View File

@ -0,0 +1,215 @@
interactions:
- request:
body: '{"messages": [{"role": "system", "content": "You''re a helpful assistant."},
{"role": "user", "content": "What''s the weather in Seattle and San Francisco
today?"}], "model": "gpt-4o-mini", "parallel_tool_calls": true, "tool_choice":
"auto", "tools": [{"type": "function", "function": {"name": "get_current_weather",
"description": "Get the current weather in a given location", "parameters":
{"type": "object", "properties": {"location": {"type": "string", "description":
"The city and state, e.g. Boston, MA"}}, "required": ["location"], "additionalProperties":
false}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '572'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.52.2
x-stainless-arch:
- other:amd64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Windows
x-stainless-package-version:
- 1.52.2
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: "{\n \"id\": \"chatcmpl-AMTFkpz8qllCCkh1A8xzoe14DwAQN\",\n \"object\":
\"chat.completion\",\n \"created\": 1729919028,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": null,\n \"tool_calls\": [\n {\n
\ \"id\": \"call_YBUn0S5ErhjkfnjFbWWJYIvR\",\n \"type\":
\"function\",\n \"function\": {\n \"name\": \"get_current_weather\",\n
\ \"arguments\": \"{\\\"location\\\": \\\"Seattle, WA\\\"}\"\n
\ }\n },\n {\n \"id\": \"call_Ail5xuGFVRk2wUPidQWDYytn\",\n
\ \"type\": \"function\",\n \"function\": {\n \"name\":
\"get_current_weather\",\n \"arguments\": \"{\\\"location\\\":
\\\"San Francisco, CA\\\"}\"\n }\n }\n ],\n \"refusal\":
null\n },\n \"logprobs\": null,\n \"finish_reason\": \"tool_calls\"\n
\ }\n ],\n \"usage\": {\n \"prompt_tokens\": 75,\n \"completion_tokens\":
51,\n \"total_tokens\": 126,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
0\n },\n \"completion_tokens_details\": {\n \"reasoning_tokens\":
0\n }\n },\n \"system_fingerprint\": \"fp_f59a81427f\"\n}\n"
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8d87e6e7ba75dee2-SEA
Connection:
- keep-alive
Content-Type:
- application/json
Date:
- Sat, 26 Oct 2024 05:03:49 GMT
Server:
- cloudflare
Set-Cookie: test_set_cookie
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
content-length:
- '1180'
openai-organization: test_organization
openai-processing-ms:
- '649'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '200'
x-ratelimit-limit-tokens:
- '60000'
x-ratelimit-remaining-requests:
- '198'
x-ratelimit-remaining-tokens:
- '59961'
x-ratelimit-reset-requests:
- 14m23.304s
x-ratelimit-reset-tokens:
- 39ms
x-request-id:
- req_b2f085c19f19c32c8aa891887e228402
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "system", "content": "You''re a helpful assistant."},
{"role": "user", "content": "What''s the weather in Seattle and San Francisco
today?"}, {"role": "assistant", "tool_calls": [{"id": "call_YBUn0S5ErhjkfnjFbWWJYIvR",
"function": {"arguments": "{\"location\": \"Seattle, WA\"}", "name": "get_current_weather"},
"type": "function"}, {"id": "call_Ail5xuGFVRk2wUPidQWDYytn", "function": {"arguments":
"{\"location\": \"San Francisco, CA\"}", "name": "get_current_weather"}, "type":
"function"}]}, {"role": "tool", "content": "50 degrees and raining", "tool_call_id":
"call_YBUn0S5ErhjkfnjFbWWJYIvR"}, {"role": "tool", "content": "70 degrees and
sunny", "tool_call_id": "call_Ail5xuGFVRk2wUPidQWDYytn"}], "model": "gpt-4o-mini"}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '746'
content-type:
- application/json
cookie:
- test_set_cookie
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.52.2
x-stainless-arch:
- other:amd64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- Windows
x-stainless-package-version:
- 1.52.2
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: "{\n \"id\": \"chatcmpl-AMTFlFwfqawbaH57GE3mXL8O17xo1\",\n \"object\":
\"chat.completion\",\n \"created\": 1729919029,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": \"Today, the weather in Seattle is 50
degrees and raining, while in San Francisco, it is 70 degrees and sunny.\",\n
\ \"refusal\": null\n },\n \"logprobs\": null,\n \"finish_reason\":
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 99,\n \"completion_tokens\":
26,\n \"total_tokens\": 125,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
0\n },\n \"completion_tokens_details\": {\n \"reasoning_tokens\":
0\n }\n },\n \"system_fingerprint\": \"fp_f59a81427f\"\n}\n"
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8d87e6ec8ddedee2-SEA
Connection:
- keep-alive
Content-Type:
- application/json
Date:
- Sat, 26 Oct 2024 05:03:49 GMT
Server:
- cloudflare
Set-Cookie: test_set_cookie
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
content-length:
- '732'
openai-organization: test_organization
openai-processing-ms:
- '537'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '200'
x-ratelimit-limit-tokens:
- '60000'
x-ratelimit-remaining-requests:
- '197'
x-ratelimit-remaining-tokens:
- '59948'
x-ratelimit-reset-requests:
- 21m34.534s
x-ratelimit-reset-tokens:
- 52ms
x-request-id:
- req_ca78782b171f5cd60841ba443de92731
status:
code: 200
message: OK
version: 1

View File

@ -5,8 +5,16 @@ import os
import pytest
from openai import OpenAI
from opentelemetry import trace
from opentelemetry.instrumentation.openai_v2 import OpenAIInstrumentor
from opentelemetry.instrumentation.openai_v2.utils import (
OTEL_INSTRUMENTATION_OPENAI_CAPTURE_MESSAGE_CONTENT,
)
from opentelemetry.sdk._events import EventLoggerProvider
from opentelemetry.sdk._logs import LoggerProvider
from opentelemetry.sdk._logs.export import (
InMemoryLogExporter,
SimpleLogRecordProcessor,
)
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
@ -14,21 +22,32 @@ from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
)
@pytest.fixture(scope="session")
def exporter():
@pytest.fixture(scope="function", name="span_exporter")
def fixture_span_exporter():
exporter = InMemorySpanExporter()
processor = SimpleSpanProcessor(exporter)
yield exporter
@pytest.fixture(scope="function", name="log_exporter")
def fixture_log_exporter():
exporter = InMemoryLogExporter()
yield exporter
@pytest.fixture(scope="function", name="tracer_provider")
def fixture_tracer_provider(span_exporter):
provider = TracerProvider()
provider.add_span_processor(processor)
trace.set_tracer_provider(provider)
return exporter
provider.add_span_processor(SimpleSpanProcessor(span_exporter))
return provider
@pytest.fixture(autouse=True)
def clear_exporter(exporter):
exporter.clear()
@pytest.fixture(scope="function", name="event_logger_provider")
def fixture_event_logger_provider(log_exporter):
provider = LoggerProvider()
provider.add_log_record_processor(SimpleLogRecordProcessor(log_exporter))
event_logger_provider = EventLoggerProvider(provider)
return event_logger_provider
@pytest.fixture(autouse=True)
@ -51,14 +70,32 @@ def vcr_config():
}
@pytest.fixture(scope="session", autouse=True)
def instrument():
OpenAIInstrumentor().instrument()
@pytest.fixture(scope="function")
def instrument_no_content(tracer_provider, event_logger_provider):
instrumentor = OpenAIInstrumentor()
instrumentor.instrument(
tracer_provider=tracer_provider,
event_logger_provider=event_logger_provider,
)
yield instrumentor
instrumentor.uninstrument()
@pytest.fixture(scope="session", autouse=True)
def uninstrument():
OpenAIInstrumentor().uninstrument()
@pytest.fixture(scope="function")
def instrument_with_content(tracer_provider, event_logger_provider):
os.environ.update(
{OTEL_INSTRUMENTATION_OPENAI_CAPTURE_MESSAGE_CONTENT: "True"}
)
instrumentor = OpenAIInstrumentor()
instrumentor.instrument(
tracer_provider=tracer_provider,
event_logger_provider=event_logger_provider,
)
yield instrumentor
os.environ.pop(OTEL_INSTRUMENTATION_OPENAI_CAPTURE_MESSAGE_CONTENT, None)
instrumentor.uninstrument()
def scrub_response_headers(response):

View File

@ -1,82 +1,390 @@
import json
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-locals
from typing import Optional
import pytest
from openai import OpenAI
from openai.resources.chat.completions import ChatCompletion
from opentelemetry.sdk.trace import ReadableSpan
from opentelemetry.semconv._incubating.attributes import (
error_attributes as ErrorAttributes,
)
from opentelemetry.semconv._incubating.attributes import (
event_attributes as EventAttributes,
)
from opentelemetry.semconv._incubating.attributes import (
gen_ai_attributes as GenAIAttributes,
)
from opentelemetry.semconv._incubating.attributes import (
server_attributes as ServerAttributes,
)
@pytest.mark.vcr()
def test_chat_completion(exporter, openai_client):
llm_model_value = "gpt-4"
def test_chat_completion_with_content(
span_exporter, log_exporter, openai_client, instrument_with_content
):
llm_model_value = "gpt-4o-mini"
messages_value = [{"role": "user", "content": "Say this is a test"}]
kwargs = {
"model": llm_model_value,
"messages": messages_value,
"stream": False,
response = openai_client.chat.completions.create(
messages=messages_value, model=llm_model_value, stream=False
)
spans = span_exporter.get_finished_spans()
assert_completion_attributes(spans[0], llm_model_value, response)
logs = log_exporter.get_finished_logs()
assert len(logs) == 2
user_message = {"content": messages_value[0]["content"]}
assert_message_in_logs(
logs[0], "gen_ai.user.message", user_message, spans[0]
)
choice_event = {
"index": 0,
"finish_reason": "stop",
"message": {
"role": "assistant",
"content": response.choices[0].message.content,
},
}
assert_message_in_logs(logs[1], "gen_ai.choice", choice_event, spans[0])
response = openai_client.chat.completions.create(**kwargs)
spans = exporter.get_finished_spans()
chat_completion_span = spans[0]
# assert that the span name is correct
assert chat_completion_span.name == f"chat {llm_model_value}"
attributes = chat_completion_span.attributes
operation_name = attributes[GenAIAttributes.GEN_AI_OPERATION_NAME]
system = attributes[GenAIAttributes.GEN_AI_SYSTEM]
request_model = attributes[GenAIAttributes.GEN_AI_REQUEST_MODEL]
response_model = attributes[GenAIAttributes.GEN_AI_RESPONSE_MODEL]
response_id = attributes[GenAIAttributes.GEN_AI_RESPONSE_ID]
input_tokens = attributes[GenAIAttributes.GEN_AI_USAGE_INPUT_TOKENS]
output_tokens = attributes[GenAIAttributes.GEN_AI_USAGE_OUTPUT_TOKENS]
# assert that the attributes are correct
assert (
operation_name == GenAIAttributes.GenAiOperationNameValues.CHAT.value
)
assert system == GenAIAttributes.GenAiSystemValues.OPENAI.value
assert request_model == llm_model_value
assert response_model == response.model
assert response_id == response.id
assert input_tokens == response.usage.prompt_tokens
assert output_tokens == response.usage.completion_tokens
def test_chat_completion_bad_endpoint(span_exporter, instrument_no_content):
llm_model_value = "gpt-4o-mini"
messages_value = [{"role": "user", "content": "Say this is a test"}]
events = chat_completion_span.events
client = OpenAI(base_url="http://localhost:4242")
# assert that the prompt and completion events are present
prompt_event = list(
filter(
lambda event: event.name == "gen_ai.content.prompt",
events,
exception = None
try:
client.chat.completions.create(
messages=messages_value,
model=llm_model_value,
timeout=0.1,
)
assert False, "Expected an exception"
except Exception as ex: # pylint: disable=broad-exception-caught
exception = ex
spans = span_exporter.get_finished_spans()
assert_all_attributes(
spans[0], llm_model_value, server_address="localhost"
)
completion_event = list(
filter(
lambda event: event.name == "gen_ai.content.completion",
events,
)
)
assert prompt_event
assert completion_event
# assert that the prompt and completion events have the correct attributes
assert prompt_event[0].attributes[
GenAIAttributes.GEN_AI_PROMPT
] == json.dumps(messages_value)
assert 4242 == spans[0].attributes[ServerAttributes.SERVER_PORT]
assert (
json.loads(
completion_event[0].attributes[GenAIAttributes.GEN_AI_COMPLETION]
)[0]["content"]
== response.choices[0].message.content
type(exception).__qualname__
== spans[0].attributes[ErrorAttributes.ERROR_TYPE]
)
@pytest.mark.vcr()
def test_chat_completion_streaming(exporter, openai_client):
def test_chat_completion_404(
span_exporter, openai_client, instrument_no_content
):
llm_model_value = "this-model-does-not-exist"
messages_value = [{"role": "user", "content": "Say this is a test"}]
exception = None
try:
openai_client.chat.completions.create(
messages=messages_value,
model=llm_model_value,
timeout=0.1,
)
assert False, "Expected an exception"
except Exception as ex: # pylint: disable=broad-exception-caught
exception = ex
spans = span_exporter.get_finished_spans()
assert_all_attributes(spans[0], llm_model_value)
assert (
type(exception).__qualname__
== spans[0].attributes[ErrorAttributes.ERROR_TYPE]
)
@pytest.mark.vcr()
def test_chat_completion_extra_params(
span_exporter, openai_client, instrument_no_content
):
llm_model_value = "gpt-4o-mini"
messages_value = [{"role": "user", "content": "Say this is a test"}]
response = openai_client.chat.completions.create(
messages=messages_value,
model=llm_model_value,
seed=42,
temperature=0.5,
max_tokens=50,
stream=False,
extra_body={"service_tier": "default"},
)
spans = span_exporter.get_finished_spans()
assert_completion_attributes(spans[0], llm_model_value, response)
assert (
spans[0].attributes[GenAIAttributes.GEN_AI_OPENAI_REQUEST_SEED] == 42
)
assert (
spans[0].attributes[GenAIAttributes.GEN_AI_REQUEST_TEMPERATURE] == 0.5
)
assert spans[0].attributes[GenAIAttributes.GEN_AI_REQUEST_MAX_TOKENS] == 50
assert (
spans[0].attributes[GenAIAttributes.GEN_AI_OPENAI_REQUEST_SERVICE_TIER]
== "default"
)
@pytest.mark.vcr()
def test_chat_completion_multiple_choices(
span_exporter, log_exporter, openai_client, instrument_with_content
):
llm_model_value = "gpt-4o-mini"
messages_value = [{"role": "user", "content": "Say this is a test"}]
response = openai_client.chat.completions.create(
messages=messages_value, model=llm_model_value, n=2, stream=False
)
spans = span_exporter.get_finished_spans()
assert_completion_attributes(spans[0], llm_model_value, response)
logs = log_exporter.get_finished_logs()
assert len(logs) == 3 # 1 user message + 2 choice messages
user_message = {"content": messages_value[0]["content"]}
assert_message_in_logs(
logs[0], "gen_ai.user.message", user_message, spans[0]
)
choice_event_0 = {
"index": 0,
"finish_reason": "stop",
"message": {
"role": "assistant",
"content": response.choices[0].message.content,
},
}
assert_message_in_logs(logs[1], "gen_ai.choice", choice_event_0, spans[0])
choice_event_1 = {
"index": 1,
"finish_reason": "stop",
"message": {
"role": "assistant",
"content": response.choices[1].message.content,
},
}
assert_message_in_logs(logs[2], "gen_ai.choice", choice_event_1, spans[0])
@pytest.mark.vcr()
def test_chat_completion_tool_calls_with_content(
span_exporter, log_exporter, openai_client, instrument_with_content
):
chat_completion_tool_call(span_exporter, log_exporter, openai_client, True)
@pytest.mark.vcr()
def test_chat_completion_tool_calls_no_content(
span_exporter, log_exporter, openai_client, instrument_no_content
):
chat_completion_tool_call(
span_exporter, log_exporter, openai_client, False
)
def chat_completion_tool_call(
span_exporter, log_exporter, openai_client, expect_content
):
llm_model_value = "gpt-4o-mini"
messages_value = [
{"role": "system", "content": "You're a helpful assistant."},
{
"role": "user",
"content": "What's the weather in Seattle and San Francisco today?",
},
]
response_0 = openai_client.chat.completions.create(
messages=messages_value,
model=llm_model_value,
tool_choice="auto",
tools=[get_current_weather_tool_definition()],
)
# sanity check
assert "tool_calls" in response_0.choices[0].finish_reason
# final request
messages_value.append(
{
"role": "assistant",
"tool_calls": response_0.choices[0].message.to_dict()[
"tool_calls"
],
}
)
tool_call_result_0 = {
"role": "tool",
"content": "50 degrees and raining",
"tool_call_id": response_0.choices[0].message.tool_calls[0].id,
}
tool_call_result_1 = {
"role": "tool",
"content": "70 degrees and sunny",
"tool_call_id": response_0.choices[0].message.tool_calls[1].id,
}
messages_value.append(tool_call_result_0)
messages_value.append(tool_call_result_1)
response_1 = openai_client.chat.completions.create(
messages=messages_value, model=llm_model_value
)
# sanity check
assert "stop" in response_1.choices[0].finish_reason
# validate both calls
spans = span_exporter.get_finished_spans()
assert len(spans) == 2
assert_completion_attributes(spans[0], llm_model_value, response_0)
assert_completion_attributes(spans[1], llm_model_value, response_1)
logs = log_exporter.get_finished_logs()
assert len(logs) == 9 # 3 logs for first completion, 6 for second
# call one
system_message = (
{"content": messages_value[0]["content"]} if expect_content else None
)
assert_message_in_logs(
logs[0], "gen_ai.system.message", system_message, spans[0]
)
user_message = (
{"content": messages_value[1]["content"]} if expect_content else None
)
assert_message_in_logs(
logs[1], "gen_ai.user.message", user_message, spans[0]
)
function_call_0 = {"name": "get_current_weather"}
function_call_1 = {"name": "get_current_weather"}
if expect_content:
function_call_0["arguments"] = (
response_0.choices[0]
.message.tool_calls[0]
.function.arguments.replace("\n", "")
)
function_call_1["arguments"] = (
response_0.choices[0]
.message.tool_calls[1]
.function.arguments.replace("\n", "")
)
choice_event = {
"index": 0,
"finish_reason": "tool_calls",
"message": {
"role": "assistant",
"tool_calls": [
{
"id": response_0.choices[0].message.tool_calls[0].id,
"type": "function",
"function": function_call_0,
},
{
"id": response_0.choices[0].message.tool_calls[1].id,
"type": "function",
"function": function_call_1,
},
],
},
}
assert_message_in_logs(logs[2], "gen_ai.choice", choice_event, spans[0])
# call two
system_message = (
{"content": messages_value[0]["content"]} if expect_content else None
)
assert_message_in_logs(
logs[3], "gen_ai.system.message", system_message, spans[1]
)
user_message = (
{"content": messages_value[1]["content"]} if expect_content else None
)
assert_message_in_logs(
logs[4], "gen_ai.user.message", user_message, spans[1]
)
assistant_tool_call = {"tool_calls": messages_value[2]["tool_calls"]}
if not expect_content:
assistant_tool_call["tool_calls"][0]["function"]["arguments"] = None
assistant_tool_call["tool_calls"][1]["function"]["arguments"] = None
assert_message_in_logs(
logs[5], "gen_ai.assistant.message", assistant_tool_call, spans[1]
)
tool_message_0 = {
"id": tool_call_result_0["tool_call_id"],
"content": tool_call_result_0["content"] if expect_content else None,
}
assert_message_in_logs(
logs[6], "gen_ai.tool.message", tool_message_0, spans[1]
)
tool_message_1 = {
"id": tool_call_result_1["tool_call_id"],
"content": tool_call_result_1["content"] if expect_content else None,
}
assert_message_in_logs(
logs[7], "gen_ai.tool.message", tool_message_1, spans[1]
)
message = {
"role": "assistant",
"content": response_1.choices[0].message.content
if expect_content
else None,
}
choice = {
"index": 0,
"finish_reason": "stop",
"message": message,
}
assert_message_in_logs(logs[8], "gen_ai.choice", choice, spans[1])
@pytest.mark.vcr()
def test_chat_completion_streaming(
span_exporter, log_exporter, openai_client, instrument_with_content
):
llm_model_value = "gpt-4"
messages_value = [{"role": "user", "content": "Say this is a test"}]
@ -102,56 +410,428 @@ def test_chat_completion_streaming(exporter, openai_client):
response_stream_model = chunk.model
response_stream_id = chunk.id
spans = exporter.get_finished_spans()
streaming_span = spans[0]
spans = span_exporter.get_finished_spans()
assert_all_attributes(
spans[0],
llm_model_value,
response_stream_id,
response_stream_model,
response_stream_usage.prompt_tokens,
response_stream_usage.completion_tokens,
)
assert streaming_span.name == f"chat {llm_model_value}"
attributes = streaming_span.attributes
logs = log_exporter.get_finished_logs()
assert len(logs) == 2
operation_name = attributes[GenAIAttributes.GEN_AI_OPERATION_NAME]
system = attributes[GenAIAttributes.GEN_AI_SYSTEM]
request_model = attributes[GenAIAttributes.GEN_AI_REQUEST_MODEL]
response_model = attributes[GenAIAttributes.GEN_AI_RESPONSE_MODEL]
response_id = attributes[GenAIAttributes.GEN_AI_RESPONSE_ID]
input_tokens = attributes[GenAIAttributes.GEN_AI_USAGE_INPUT_TOKENS]
output_tokens = attributes[GenAIAttributes.GEN_AI_USAGE_OUTPUT_TOKENS]
user_message = {"content": "Say this is a test"}
assert_message_in_logs(
logs[0], "gen_ai.user.message", user_message, spans[0]
)
choice_event = {
"index": 0,
"finish_reason": "stop",
"message": {"role": "assistant", "content": response_stream_result},
}
assert_message_in_logs(logs[1], "gen_ai.choice", choice_event, spans[0])
@pytest.mark.vcr()
def test_chat_completion_streaming_not_complete(
span_exporter, log_exporter, openai_client, instrument_with_content
):
llm_model_value = "gpt-4"
messages_value = [{"role": "user", "content": "Say this is a test"}]
kwargs = {
"model": llm_model_value,
"messages": messages_value,
"stream": True,
}
response_stream_model = None
response_stream_id = None
response_stream_result = ""
response = openai_client.chat.completions.create(**kwargs)
for idx, chunk in enumerate(response):
if chunk.choices:
response_stream_result += chunk.choices[0].delta.content or ""
if idx == 1:
# fake a stop
break
if chunk.model:
response_stream_model = chunk.model
if chunk.id:
response_stream_id = chunk.id
response.close()
spans = span_exporter.get_finished_spans()
assert_all_attributes(
spans[0], llm_model_value, response_stream_id, response_stream_model
)
logs = log_exporter.get_finished_logs()
assert len(logs) == 2
user_message = {"content": "Say this is a test"}
assert_message_in_logs(
logs[0], "gen_ai.user.message", user_message, spans[0]
)
choice_event = {
"index": 0,
"finish_reason": "error",
"message": {"role": "assistant", "content": response_stream_result},
}
assert_message_in_logs(logs[1], "gen_ai.choice", choice_event, spans[0])
@pytest.mark.vcr()
def test_chat_completion_multiple_choices_streaming(
span_exporter, log_exporter, openai_client, instrument_with_content
):
llm_model_value = "gpt-4o-mini"
messages_value = [
{"role": "system", "content": "You're a helpful assistant."},
{
"role": "user",
"content": "What's the weather in Seattle and San Francisco today?",
},
]
response_0 = openai_client.chat.completions.create(
messages=messages_value,
model=llm_model_value,
n=2,
stream=True,
stream_options={"include_usage": True},
)
# two strings for each choice
response_stream_result = ["", ""]
finish_reasons = ["", ""]
for chunk in response_0:
if chunk.choices:
for choice in chunk.choices:
response_stream_result[choice.index] += (
choice.delta.content or ""
)
if choice.finish_reason:
finish_reasons[choice.index] = choice.finish_reason
# get the last chunk
if getattr(chunk, "usage", None):
response_stream_usage = chunk.usage
response_stream_model = chunk.model
response_stream_id = chunk.id
# sanity check
assert "stop" == finish_reasons[0]
spans = span_exporter.get_finished_spans()
assert_all_attributes(
spans[0],
llm_model_value,
response_stream_id,
response_stream_model,
response_stream_usage.prompt_tokens,
response_stream_usage.completion_tokens,
)
logs = log_exporter.get_finished_logs()
assert len(logs) == 4
system_message = {"content": messages_value[0]["content"]}
assert_message_in_logs(
logs[0], "gen_ai.system.message", system_message, spans[0]
)
user_message = {
"content": "What's the weather in Seattle and San Francisco today?"
}
assert_message_in_logs(
logs[1], "gen_ai.user.message", user_message, spans[0]
)
choice_event_0 = {
"index": 0,
"finish_reason": "stop",
"message": {
"role": "assistant",
"content": "".join(response_stream_result[0]),
},
}
assert_message_in_logs(logs[2], "gen_ai.choice", choice_event_0, spans[0])
choice_event_1 = {
"index": 1,
"finish_reason": "stop",
"message": {
"role": "assistant",
"content": "".join(response_stream_result[1]),
},
}
assert_message_in_logs(logs[3], "gen_ai.choice", choice_event_1, spans[0])
@pytest.mark.vcr()
def test_chat_completion_multiple_tools_streaming_with_content(
span_exporter, log_exporter, openai_client, instrument_with_content
):
chat_completion_multiple_tools_streaming(
span_exporter, log_exporter, openai_client, True
)
@pytest.mark.vcr()
def test_chat_completion_multiple_tools_streaming_no_content(
span_exporter, log_exporter, openai_client, instrument_no_content
):
chat_completion_multiple_tools_streaming(
span_exporter, log_exporter, openai_client, False
)
def chat_completion_multiple_tools_streaming(
span_exporter, log_exporter, openai_client, expect_content
):
llm_model_value = "gpt-4o-mini"
messages_value = [
{"role": "system", "content": "You're a helpful assistant."},
{
"role": "user",
"content": "What's the weather in Seattle and San Francisco today?",
},
]
response = openai_client.chat.completions.create(
messages=messages_value,
model=llm_model_value,
tool_choice="auto",
tools=[get_current_weather_tool_definition()],
stream=True,
stream_options={"include_usage": True},
)
finish_reason = None
# two tools
tool_names = ["", ""]
tool_call_ids = ["", ""]
tool_args = ["", ""]
for chunk in response:
if chunk.choices:
if chunk.choices[0].finish_reason:
finish_reason = chunk.choices[0].finish_reason
for tool_call in chunk.choices[0].delta.tool_calls or []:
t_idx = tool_call.index
if tool_call.id:
tool_call_ids[t_idx] = tool_call.id
if tool_call.function:
if tool_call.function.arguments:
tool_args[t_idx] += tool_call.function.arguments
if tool_call.function.name:
tool_names[t_idx] = tool_call.function.name
# get the last chunk
if getattr(chunk, "usage", None):
response_stream_usage = chunk.usage
response_stream_model = chunk.model
response_stream_id = chunk.id
# sanity check
assert "tool_calls" == finish_reason
spans = span_exporter.get_finished_spans()
assert_all_attributes(
spans[0],
llm_model_value,
response_stream_id,
response_stream_model,
response_stream_usage.prompt_tokens,
response_stream_usage.completion_tokens,
)
logs = log_exporter.get_finished_logs()
assert len(logs) == 3
system_message = (
{"content": messages_value[0]["content"]} if expect_content else None
)
assert_message_in_logs(
logs[0], "gen_ai.system.message", system_message, spans[0]
)
user_message = (
{"content": "What's the weather in Seattle and San Francisco today?"}
if expect_content
else None
)
assert_message_in_logs(
logs[1], "gen_ai.user.message", user_message, spans[0]
)
choice_event = {
"index": 0,
"finish_reason": "tool_calls",
"message": {
"role": "assistant",
"tool_calls": [
{
"id": tool_call_ids[0],
"type": "function",
"function": {
"name": tool_names[0],
"arguments": tool_args[0].replace("\n", "")
if expect_content
else None,
},
},
{
"id": tool_call_ids[1],
"type": "function",
"function": {
"name": tool_names[1],
"arguments": tool_args[1].replace("\n", "")
if expect_content
else None,
},
},
],
},
}
assert_message_in_logs(logs[2], "gen_ai.choice", choice_event, spans[0])
def assert_message_in_logs(log, event_name, expected_content, parent_span):
assert log.log_record.attributes[EventAttributes.EVENT_NAME] == event_name
assert (
operation_name == GenAIAttributes.GenAiOperationNameValues.CHAT.value
log.log_record.attributes[GenAIAttributes.GEN_AI_SYSTEM]
== GenAIAttributes.GenAiSystemValues.OPENAI.value
)
assert system == GenAIAttributes.GenAiSystemValues.OPENAI.value
assert request_model == llm_model_value
assert response_model == response_stream_model
assert response_id == response_stream_id
assert input_tokens == response_stream_usage.prompt_tokens
assert output_tokens == response_stream_usage.completion_tokens
events = streaming_span.events
# assert that the prompt and completion events are present
prompt_event = list(
filter(
lambda event: event.name == "gen_ai.content.prompt",
events,
)
)
completion_event = list(
filter(
lambda event: event.name == "gen_ai.content.completion",
events,
if not expected_content:
assert not log.log_record.body
else:
assert dict(log.log_record.body) == remove_none_values(
expected_content
)
assert_log_parent(log, parent_span)
def remove_none_values(body):
result = {}
for key, value in body.items():
if value is None:
continue
if isinstance(value, dict):
result[key] = remove_none_values(value)
elif isinstance(value, list):
result[key] = [remove_none_values(i) for i in value]
else:
result[key] = value
return result
def assert_completion_attributes(
span: ReadableSpan,
request_model: str,
response: ChatCompletion,
operation_name: str = "chat",
server_address: str = "api.openai.com",
):
return assert_all_attributes(
span,
request_model,
response.id,
response.model,
response.usage.prompt_tokens,
response.usage.completion_tokens,
operation_name,
server_address,
)
assert prompt_event
assert completion_event
# assert that the prompt and completion events have the correct attributes
assert prompt_event[0].attributes[
GenAIAttributes.GEN_AI_PROMPT
] == json.dumps(messages_value)
def assert_all_attributes(
span: ReadableSpan,
request_model: str,
response_id: str = None,
response_model: str = None,
input_tokens: Optional[int] = None,
output_tokens: Optional[int] = None,
operation_name: str = "chat",
server_address: str = "api.openai.com",
):
assert span.name == f"{operation_name} {request_model}"
assert (
json.loads(
completion_event[0].attributes[GenAIAttributes.GEN_AI_COMPLETION]
)[0]["content"]
== response_stream_result
operation_name
== span.attributes[GenAIAttributes.GEN_AI_OPERATION_NAME]
)
assert (
GenAIAttributes.GenAiSystemValues.OPENAI.value
== span.attributes[GenAIAttributes.GEN_AI_SYSTEM]
)
assert (
request_model == span.attributes[GenAIAttributes.GEN_AI_REQUEST_MODEL]
)
if response_model:
assert (
response_model
== span.attributes[GenAIAttributes.GEN_AI_RESPONSE_MODEL]
)
else:
assert GenAIAttributes.GEN_AI_RESPONSE_MODEL not in span.attributes
if response_id:
assert (
response_id == span.attributes[GenAIAttributes.GEN_AI_RESPONSE_ID]
)
else:
assert GenAIAttributes.GEN_AI_RESPONSE_ID not in span.attributes
if input_tokens:
assert (
input_tokens
== span.attributes[GenAIAttributes.GEN_AI_USAGE_INPUT_TOKENS]
)
else:
assert GenAIAttributes.GEN_AI_USAGE_INPUT_TOKENS not in span.attributes
if output_tokens:
assert (
output_tokens
== span.attributes[GenAIAttributes.GEN_AI_USAGE_OUTPUT_TOKENS]
)
else:
assert (
GenAIAttributes.GEN_AI_USAGE_OUTPUT_TOKENS not in span.attributes
)
assert server_address == span.attributes[ServerAttributes.SERVER_ADDRESS]
def assert_log_parent(log, span):
assert log.log_record.trace_id == span.get_span_context().trace_id
assert log.log_record.span_id == span.get_span_context().span_id
assert log.log_record.trace_flags == span.get_span_context().trace_flags
def get_current_weather_tool_definition():
return {
"type": "function",
"function": {
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. Boston, MA",
},
},
"required": ["location"],
"additionalProperties": False,
},
},
}

View File

@ -910,6 +910,9 @@ commands =
test-instrumentation-mysqlclient: pytest {toxinidir}/instrumentation/opentelemetry-instrumentation-mysqlclient/tests {posargs}
lint-instrumentation-mysqlclient: sh -c "cd instrumentation && pylint --rcfile ../.pylintrc opentelemetry-instrumentation-mysqlclient"
test-instrumentation-openai-v2: pytest {toxinidir}/instrumentation/opentelemetry-instrumentation-openai-v2/tests {posargs}
lint-instrumentation-openai-v2: sh -c "cd instrumentation && pylint --rcfile ../.pylintrc opentelemetry-instrumentation-openai-v2"
test-instrumentation-sio-pika: pytest {toxinidir}/instrumentation/opentelemetry-instrumentation-pika/tests {posargs}
lint-instrumentation-sio-pika: sh -c "cd instrumentation && pylint --rcfile ../.pylintrc opentelemetry-instrumentation-pika"