From cc6bb0cd245d137963f8260b2c929bbd7ca58e76 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Thu, 14 May 2026 08:45:32 -0500 Subject: [PATCH 1/2] fix: Use tokens= instead of usage= on LDAIMetrics constructors - bedrock_example.py and langgraph_agent_example.py both passed usage= to LDAIMetrics(...) constructors, which raises TypeError on Python AI SDK 0.16+. The dataclass field is tokens. Refs AIC-2383 Co-Authored-By: Claude Opus 4.7 (1M context) --- getting_started/bedrock/converse/bedrock_example.py | 2 +- .../langgraph/react_agent/langgraph_agent_example.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/getting_started/bedrock/converse/bedrock_example.py b/getting_started/bedrock/converse/bedrock_example.py index 1fded50..85d8ba9 100755 --- a/getting_started/bedrock/converse/bedrock_example.py +++ b/getting_started/bedrock/converse/bedrock_example.py @@ -34,7 +34,7 @@ def get_bedrock_metrics(response): duration_ms = response.get("metrics", {}).get("latencyMs") - return LDAIMetrics(success=success, usage=usage, duration_ms=duration_ms) + return LDAIMetrics(success=success, tokens=usage, duration_ms=duration_ms) # Set sdk_key to your LaunchDarkly SDK key. sdk_key = os.getenv('LAUNCHDARKLY_SDK_KEY') diff --git a/getting_started/langgraph/react_agent/langgraph_agent_example.py b/getting_started/langgraph/react_agent/langgraph_agent_example.py index 0244865..ab299e9 100644 --- a/getting_started/langgraph/react_agent/langgraph_agent_example.py +++ b/getting_started/langgraph/react_agent/langgraph_agent_example.py @@ -33,7 +33,7 @@ def map_provider_to_langchain(provider_name): def get_langgraph_metrics(response): """Extract aggregated metrics from a LangGraph agent response.""" messages = response.get("messages", []) - return LDAIMetrics(success=True, usage=sum_token_usage_from_messages(messages)) + return LDAIMetrics(success=True, tokens=sum_token_usage_from_messages(messages)) def get_weather(city: str) -> str: """Get the weather for a given city.""" From 067ae712baeb320d275343d7c089375fc14349a6 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Thu, 14 May 2026 11:18:05 -0500 Subject: [PATCH 2/2] fix: Pin pyproject.toml to launchdarkly-server-sdk-ai>=0.20.0 The tokens= constructor kwarg requires SDK 0.20+; 0.19 still uses usage=. Bump the pyproject.toml pins across all 10 example projects so users land on a compatible SDK. Refs AIC-2383 Co-Authored-By: Claude Opus 4.7 (1M context) --- features/create_agent/pyproject.toml | 2 +- features/create_agent_graph/pyproject.toml | 2 +- features/create_judge/pyproject.toml | 2 +- features/create_model/pyproject.toml | 2 +- getting_started/bedrock/converse/pyproject.toml | 2 +- getting_started/gemini/generate_content/pyproject.toml | 2 +- getting_started/langchain/invoke/pyproject.toml | 2 +- getting_started/langgraph/react_agent/pyproject.toml | 2 +- getting_started/langgraph/state_graph/pyproject.toml | 2 +- getting_started/openai/chat_completions/pyproject.toml | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/features/create_agent/pyproject.toml b/features/create_agent/pyproject.toml index 9bc2c9e..b0c8de1 100644 --- a/features/create_agent/pyproject.toml +++ b/features/create_agent/pyproject.toml @@ -13,7 +13,7 @@ agent = "create_agent_example:main" [tool.poetry.dependencies] python = "^3.10" python-dotenv = ">=1.0.0" -launchdarkly-server-sdk-ai = ">=0.19.0" +launchdarkly-server-sdk-ai = ">=0.20.0" launchdarkly-observability = ">=0.1.0" launchdarkly-server-sdk-ai-openai = {version = ">=0.5.0", extras = ["agents"]} launchdarkly-server-sdk-ai-langchain = ">=0.6.0" diff --git a/features/create_agent_graph/pyproject.toml b/features/create_agent_graph/pyproject.toml index febcd58..97d916e 100644 --- a/features/create_agent_graph/pyproject.toml +++ b/features/create_agent_graph/pyproject.toml @@ -13,7 +13,7 @@ agent-graph = "create_agent_graph_example:main" [tool.poetry.dependencies] python = "^3.10" python-dotenv = ">=1.0.0" -launchdarkly-server-sdk-ai = ">=0.19.0" +launchdarkly-server-sdk-ai = ">=0.20.0" launchdarkly-observability = ">=0.1.0" launchdarkly-server-sdk-ai-openai = {version = ">=0.5.0", extras = ["agents"]} launchdarkly-server-sdk-ai-langchain = {version = ">=0.6.0", extras = ["graph"]} diff --git a/features/create_judge/pyproject.toml b/features/create_judge/pyproject.toml index 421f082..f7d58c0 100644 --- a/features/create_judge/pyproject.toml +++ b/features/create_judge/pyproject.toml @@ -13,7 +13,7 @@ judge = "create_judge_example:main" [tool.poetry.dependencies] python = "^3.10" python-dotenv = ">=1.0.0" -launchdarkly-server-sdk-ai = ">=0.19.0" +launchdarkly-server-sdk-ai = ">=0.20.0" launchdarkly-observability = ">=0.1.0" launchdarkly-server-sdk-ai-openai = ">=0.5.0" launchdarkly-server-sdk-ai-langchain = ">=0.6.0" diff --git a/features/create_model/pyproject.toml b/features/create_model/pyproject.toml index 7003705..8f2a2a3 100644 --- a/features/create_model/pyproject.toml +++ b/features/create_model/pyproject.toml @@ -13,7 +13,7 @@ model = "create_model_example:main" [tool.poetry.dependencies] python = "^3.10" python-dotenv = ">=1.0.0" -launchdarkly-server-sdk-ai = ">=0.19.0" +launchdarkly-server-sdk-ai = ">=0.20.0" launchdarkly-observability = ">=0.1.0" launchdarkly-server-sdk-ai-openai = ">=0.5.0" launchdarkly-server-sdk-ai-langchain = ">=0.6.0" diff --git a/getting_started/bedrock/converse/pyproject.toml b/getting_started/bedrock/converse/pyproject.toml index 4341134..8e7a559 100644 --- a/getting_started/bedrock/converse/pyproject.toml +++ b/getting_started/bedrock/converse/pyproject.toml @@ -13,7 +13,7 @@ bedrock = "bedrock_example:main" [tool.poetry.dependencies] python = "^3.10" python-dotenv = ">=1.0.0" -launchdarkly-server-sdk-ai = ">=0.19.0" +launchdarkly-server-sdk-ai = ">=0.20.0" launchdarkly-observability = ">=0.1.0" boto3 = ">=0.2.0" diff --git a/getting_started/gemini/generate_content/pyproject.toml b/getting_started/gemini/generate_content/pyproject.toml index 3082b27..d0ab63f 100644 --- a/getting_started/gemini/generate_content/pyproject.toml +++ b/getting_started/gemini/generate_content/pyproject.toml @@ -13,7 +13,7 @@ gemini = "gemini_example:main" [tool.poetry.dependencies] python = "^3.10" python-dotenv = ">=1.0.0" -launchdarkly-server-sdk-ai = ">=0.19.0" +launchdarkly-server-sdk-ai = ">=0.20.0" launchdarkly-observability = ">=0.1.0" google-genai = "^1.30.0" diff --git a/getting_started/langchain/invoke/pyproject.toml b/getting_started/langchain/invoke/pyproject.toml index cfcf97f..8e16823 100644 --- a/getting_started/langchain/invoke/pyproject.toml +++ b/getting_started/langchain/invoke/pyproject.toml @@ -13,7 +13,7 @@ langchain = "langchain_example:main" [tool.poetry.dependencies] python = "^3.10" python-dotenv = ">=1.0.0" -launchdarkly-server-sdk-ai = ">=0.19.0" +launchdarkly-server-sdk-ai = ">=0.20.0" launchdarkly-observability = ">=0.1.0" launchdarkly-server-sdk-ai-langchain = ">=0.6.0" langchain = "^1.0.0" diff --git a/getting_started/langgraph/react_agent/pyproject.toml b/getting_started/langgraph/react_agent/pyproject.toml index febeb78..df7bda5 100644 --- a/getting_started/langgraph/react_agent/pyproject.toml +++ b/getting_started/langgraph/react_agent/pyproject.toml @@ -13,7 +13,7 @@ agent = "langgraph_agent_example:main" [tool.poetry.dependencies] python = "^3.10" python-dotenv = ">=1.0.0" -launchdarkly-server-sdk-ai = ">=0.19.0" +launchdarkly-server-sdk-ai = ">=0.20.0" launchdarkly-observability = ">=0.1.0" launchdarkly-server-sdk-ai-langchain = ">=0.6.0" langchain = "^1.0.0" diff --git a/getting_started/langgraph/state_graph/pyproject.toml b/getting_started/langgraph/state_graph/pyproject.toml index 6ccf126..18f47b6 100644 --- a/getting_started/langgraph/state_graph/pyproject.toml +++ b/getting_started/langgraph/state_graph/pyproject.toml @@ -13,7 +13,7 @@ agent-graph = "langgraph_multi_agent_example:main" [tool.poetry.dependencies] python = "^3.10" python-dotenv = ">=1.0.0" -launchdarkly-server-sdk-ai = ">=0.19.0" +launchdarkly-server-sdk-ai = ">=0.20.0" launchdarkly-observability = ">=0.1.0" launchdarkly-server-sdk-ai-langchain = ">=0.6.0" langchain = "^1.0.0" diff --git a/getting_started/openai/chat_completions/pyproject.toml b/getting_started/openai/chat_completions/pyproject.toml index 92f33f2..6e3d86a 100644 --- a/getting_started/openai/chat_completions/pyproject.toml +++ b/getting_started/openai/chat_completions/pyproject.toml @@ -13,7 +13,7 @@ openai = "openai_example:main" [tool.poetry.dependencies] python = "^3.10" python-dotenv = ">=1.0.0" -launchdarkly-server-sdk-ai = ">=0.19.0" +launchdarkly-server-sdk-ai = ">=0.20.0" launchdarkly-observability = ">=0.1.0" launchdarkly-server-sdk-ai-openai = ">=0.5.0" openai = ">=1.0.0"