diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 7da3bd4caf..58f8a4601d 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
 {
-  ".": "1.59.7"
+  ".": "1.59.8"
 }
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 08674b4a36..9f301cedff 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,29 @@
 # Changelog
 
+## 1.59.8 (2025-01-17)
+
+Full Changelog: [v1.59.7...v1.59.8](https://github.com/openai/openai-python/compare/v1.59.7...v1.59.8)
+
+### Bug Fixes
+
+* streaming ([c16f58e](https://github.com/openai/openai-python/commit/c16f58ead0bc85055b164182689ba74b7e939dfa))
+* **structured outputs:** avoid parsing empty empty content ([#2023](https://github.com/openai/openai-python/issues/2023)) ([6d3513c](https://github.com/openai/openai-python/commit/6d3513c86f6e5800f8f73a45e089b7a205327121))
+* **structured outputs:** correct schema coercion for inline ref expansion ([#2025](https://github.com/openai/openai-python/issues/2025)) ([2f4f0b3](https://github.com/openai/openai-python/commit/2f4f0b374207f162060c328b71ec995049dc42e8))
+* **types:** correct type for vector store chunking strategy ([#2017](https://github.com/openai/openai-python/issues/2017)) ([e389279](https://github.com/openai/openai-python/commit/e38927950a5cdad99065853fe7b72aad6bb322e9))
+
+
+### Chores
+
+* **examples:** update realtime model ([f26746c](https://github.com/openai/openai-python/commit/f26746cbcd893d66cf8a3fd68a7c3779dc8c833c)), closes [#2020](https://github.com/openai/openai-python/issues/2020)
+* **internal:** bump pyright dependency ([#2021](https://github.com/openai/openai-python/issues/2021)) ([0a9a0f5](https://github.com/openai/openai-python/commit/0a9a0f5d8b9d5457643798287f893305006dd518))
+* **internal:** streaming refactors ([#2012](https://github.com/openai/openai-python/issues/2012)) ([d76a748](https://github.com/openai/openai-python/commit/d76a748f606743407f94dfc26758095560e2082a))
+* **internal:** update deps ([#2015](https://github.com/openai/openai-python/issues/2015)) ([514e0e4](https://github.com/openai/openai-python/commit/514e0e415f87ab4510262d29ed6125384e017b84))
+
+
+### Documentation
+
+* **examples/azure:** example script with realtime API ([#1967](https://github.com/openai/openai-python/issues/1967)) ([84f2f9c](https://github.com/openai/openai-python/commit/84f2f9c0439229a7db7136fe78419292d34d1f81))
+
 ## 1.59.7 (2025-01-13)
 
 Full Changelog: [v1.59.6...v1.59.7](https://github.com/openai/openai-python/compare/v1.59.6...v1.59.7)
diff --git a/README.md b/README.md
index ad1c9afd10..ec556bd27a 100644
--- a/README.md
+++ b/README.md
@@ -275,7 +275,7 @@ from openai import AsyncOpenAI
 async def main():
     client = AsyncOpenAI()
 
-    async with client.beta.realtime.connect(model="gpt-4o-realtime-preview-2024-10-01") as connection:
+    async with client.beta.realtime.connect(model="gpt-4o-realtime-preview") as connection:
         await connection.session.update(session={'modalities': ['text']})
 
         await connection.conversation.item.create(
@@ -309,7 +309,7 @@ Whenever an error occurs, the Realtime API will send an [`error` event](https://
 ```py
 client = AsyncOpenAI()
 
-async with client.beta.realtime.connect(model="gpt-4o-realtime-preview-2024-10-01") as connection:
+async with client.beta.realtime.connect(model="gpt-4o-realtime-preview") as connection:
     ...
     async for event in connection:
         if event.type == 'error':
diff --git a/api.md b/api.md
index ace93e0559..1edd3f6589 100644
--- a/api.md
+++ b/api.md
@@ -314,7 +314,7 @@ from openai.types.beta import (
     OtherFileChunkingStrategyObject,
     StaticFileChunkingStrategy,
     StaticFileChunkingStrategyObject,
-    StaticFileChunkingStrategyParam,
+    StaticFileChunkingStrategyObjectParam,
     VectorStore,
     VectorStoreDeleted,
 )
diff --git a/examples/realtime/azure_realtime.py b/examples/realtime/azure_realtime.py
new file mode 100644
index 0000000000..de88d47052
--- /dev/null
+++ b/examples/realtime/azure_realtime.py
@@ -0,0 +1,57 @@
+import os
+import asyncio
+
+from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider
+
+from openai import AsyncAzureOpenAI
+
+# Azure OpenAI Realtime Docs
+
+# How-to: https://learn.microsoft.com/azure/ai-services/openai/how-to/realtime-audio
+# Supported models and API versions: https://learn.microsoft.com/azure/ai-services/openai/how-to/realtime-audio#supported-models
+# Entra ID auth: https://learn.microsoft.com/azure/ai-services/openai/how-to/managed-identity
+
+
+async def main() -> None:
+    """The following example demonstrates how to configure Azure OpenAI to use the Realtime API.
+    For an audio example, see push_to_talk_app.py and update the client and model parameter accordingly.
+
+    When prompted for user input, type a message and hit enter to send it to the model.
+    Enter "q" to quit the conversation.
+    """
+
+    credential = DefaultAzureCredential()
+    client = AsyncAzureOpenAI(
+        azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
+        azure_ad_token_provider=get_bearer_token_provider(credential, "https://cognitiveservices.azure.com/.default"),
+        api_version="2024-10-01-preview",
+    )
+    async with client.beta.realtime.connect(
+        model="gpt-4o-realtime-preview",  # deployment name for your model
+    ) as connection:
+        await connection.session.update(session={"modalities": ["text"]})  # type: ignore
+        while True:
+            user_input = input("Enter a message: ")
+            if user_input == "q":
+                break
+
+            await connection.conversation.item.create(
+                item={
+                    "type": "message",
+                    "role": "user",
+                    "content": [{"type": "input_text", "text": user_input}],
+                }
+            )
+            await connection.response.create()
+            async for event in connection:
+                if event.type == "response.text.delta":
+                    print(event.delta, flush=True, end="")
+                elif event.type == "response.text.done":
+                    print()
+                elif event.type == "response.done":
+                    break
+
+    await credential.close()
+
+
+asyncio.run(main())
diff --git a/examples/realtime/push_to_talk_app.py b/examples/realtime/push_to_talk_app.py
index d46945a8ed..8dc303a83a 100755
--- a/examples/realtime/push_to_talk_app.py
+++ b/examples/realtime/push_to_talk_app.py
@@ -152,7 +152,7 @@ async def on_mount(self) -> None:
         self.run_worker(self.send_mic_audio())
 
     async def handle_realtime_connection(self) -> None:
-        async with self.client.beta.realtime.connect(model="gpt-4o-realtime-preview-2024-10-01") as conn:
+        async with self.client.beta.realtime.connect(model="gpt-4o-realtime-preview") as conn:
             self.connection = conn
             self.connected.set()
 
diff --git a/mypy.ini b/mypy.ini
index 1ea1fe909d..660f1a086e 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -44,7 +44,7 @@ cache_fine_grained = True
 # ```
 # Changing this codegen to make mypy happy would increase complexity
 # and would not be worth it.
-disable_error_code = func-returns-value
+disable_error_code = func-returns-value,overload-cannot-match
 
 # https://github.com/python/mypy/issues/12162
 [mypy.overrides]
diff --git a/pyproject.toml b/pyproject.toml
index e769f4a95f..a75d24e1eb 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
 [project]
 name = "openai"
-version = "1.59.7"
+version = "1.59.8"
 description = "The official Python library for the openai API"
 dynamic = ["readme"]
 license = "Apache-2.0"
diff --git a/requirements-dev.lock b/requirements-dev.lock
index 15ecbf081a..ef26591f12 100644
--- a/requirements-dev.lock
+++ b/requirements-dev.lock
@@ -83,7 +83,7 @@ msal==1.31.0
     # via msal-extensions
 msal-extensions==1.2.0
     # via azure-identity
-mypy==1.13.0
+mypy==1.14.1
 mypy-extensions==1.0.0
     # via black
     # via mypy
@@ -124,7 +124,7 @@ pygments==2.18.0
     # via rich
 pyjwt==2.8.0
     # via msal
-pyright==1.1.390
+pyright==1.1.392.post0
 pytest==8.3.3
     # via pytest-asyncio
 pytest-asyncio==0.24.0
diff --git a/src/openai/_legacy_response.py b/src/openai/_legacy_response.py
index 7a14f27adb..25680049dc 100644
--- a/src/openai/_legacy_response.py
+++ b/src/openai/_legacy_response.py
@@ -269,7 +269,9 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T:
         if origin == LegacyAPIResponse:
             raise RuntimeError("Unexpected state - cast_to is `APIResponse`")
 
-        if inspect.isclass(origin) and issubclass(origin, httpx.Response):
+        if inspect.isclass(
+            origin  # pyright: ignore[reportUnknownArgumentType]
+        ) and issubclass(origin, httpx.Response):
             # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response
             # and pass that class to our request functions. We cannot change the variance to be either
             # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct
@@ -279,7 +281,13 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T:
                 raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`")
             return cast(R, response)
 
-        if inspect.isclass(origin) and not issubclass(origin, BaseModel) and issubclass(origin, pydantic.BaseModel):
+        if (
+            inspect.isclass(
+                origin  # pyright: ignore[reportUnknownArgumentType]
+            )
+            and not issubclass(origin, BaseModel)
+            and issubclass(origin, pydantic.BaseModel)
+        ):
             raise TypeError("Pydantic models must subclass our base model type, e.g. `from openai import BaseModel`")
 
         if (
diff --git a/src/openai/_response.py b/src/openai/_response.py
index 1527446585..36c7ea1281 100644
--- a/src/openai/_response.py
+++ b/src/openai/_response.py
@@ -214,7 +214,13 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T:
                 raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`")
             return cast(R, response)
 
-        if inspect.isclass(origin) and not issubclass(origin, BaseModel) and issubclass(origin, pydantic.BaseModel):
+        if (
+            inspect.isclass(
+                origin  # pyright: ignore[reportUnknownArgumentType]
+            )
+            and not issubclass(origin, BaseModel)
+            and issubclass(origin, pydantic.BaseModel)
+        ):
             raise TypeError("Pydantic models must subclass our base model type, e.g. `from openai import BaseModel`")
 
         if (
diff --git a/src/openai/_version.py b/src/openai/_version.py
index 656d17ff63..d6f55997e7 100644
--- a/src/openai/_version.py
+++ b/src/openai/_version.py
@@ -1,4 +1,4 @@
 # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
 
 __title__ = "openai"
-__version__ = "1.59.7"  # x-release-please-version
+__version__ = "1.59.8"  # x-release-please-version
diff --git a/src/openai/lib/_parsing/_completions.py b/src/openai/lib/_parsing/_completions.py
index f1fa9f2b55..33c4ccb946 100644
--- a/src/openai/lib/_parsing/_completions.py
+++ b/src/openai/lib/_parsing/_completions.py
@@ -157,7 +157,7 @@ def maybe_parse_content(
     response_format: type[ResponseFormatT] | ResponseFormatParam | NotGiven,
     message: ChatCompletionMessage | ParsedChatCompletionMessage[object],
 ) -> ResponseFormatT | None:
-    if has_rich_response_format(response_format) and message.content is not None and not message.refusal:
+    if has_rich_response_format(response_format) and message.content and not message.refusal:
         return _parse_content(response_format, message.content)
 
     return None
diff --git a/src/openai/lib/_pydantic.py b/src/openai/lib/_pydantic.py
index 4e8bc772be..c2d73e5fc6 100644
--- a/src/openai/lib/_pydantic.py
+++ b/src/openai/lib/_pydantic.py
@@ -108,6 +108,9 @@ def _ensure_strict_json_schema(
         # properties from the json schema take priority over the ones on the `$ref`
         json_schema.update({**resolved, **json_schema})
         json_schema.pop("$ref")
+        # Since the schema expanded from `$ref` might not have `additionalProperties: false` applied,
+        # we call `_ensure_strict_json_schema` again to fix the inlined schema and ensure it's valid.
+        return _ensure_strict_json_schema(json_schema, path=path, root=root)
 
     return json_schema
 
diff --git a/src/openai/types/beta/__init__.py b/src/openai/types/beta/__init__.py
index 7f76fed0cd..b9ea792bfa 100644
--- a/src/openai/types/beta/__init__.py
+++ b/src/openai/types/beta/__init__.py
@@ -43,3 +43,6 @@
 from .assistant_response_format_option_param import (
     AssistantResponseFormatOptionParam as AssistantResponseFormatOptionParam,
 )
+from .static_file_chunking_strategy_object_param import (
+    StaticFileChunkingStrategyObjectParam as StaticFileChunkingStrategyObjectParam,
+)
diff --git a/src/openai/types/beta/file_chunking_strategy_param.py b/src/openai/types/beta/file_chunking_strategy_param.py
index 46383358e5..25d94286d8 100644
--- a/src/openai/types/beta/file_chunking_strategy_param.py
+++ b/src/openai/types/beta/file_chunking_strategy_param.py
@@ -6,8 +6,8 @@
 from typing_extensions import TypeAlias
 
 from .auto_file_chunking_strategy_param import AutoFileChunkingStrategyParam
-from .static_file_chunking_strategy_param import StaticFileChunkingStrategyParam
+from .static_file_chunking_strategy_object_param import StaticFileChunkingStrategyObjectParam
 
 __all__ = ["FileChunkingStrategyParam"]
 
-FileChunkingStrategyParam: TypeAlias = Union[AutoFileChunkingStrategyParam, StaticFileChunkingStrategyParam]
+FileChunkingStrategyParam: TypeAlias = Union[AutoFileChunkingStrategyParam, StaticFileChunkingStrategyObjectParam]
diff --git a/src/openai/types/beta/static_file_chunking_strategy_object_param.py b/src/openai/types/beta/static_file_chunking_strategy_object_param.py
new file mode 100644
index 0000000000..0cdf35c0df
--- /dev/null
+++ b/src/openai/types/beta/static_file_chunking_strategy_object_param.py
@@ -0,0 +1,16 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing_extensions import Literal, Required, TypedDict
+
+from .static_file_chunking_strategy_param import StaticFileChunkingStrategyParam
+
+__all__ = ["StaticFileChunkingStrategyObjectParam"]
+
+
+class StaticFileChunkingStrategyObjectParam(TypedDict, total=False):
+    static: Required[StaticFileChunkingStrategyParam]
+
+    type: Required[Literal["static"]]
+    """Always `static`."""
diff --git a/tests/lib/test_pydantic.py b/tests/lib/test_pydantic.py
index 99b9e96d21..7e128b70c0 100644
--- a/tests/lib/test_pydantic.py
+++ b/tests/lib/test_pydantic.py
@@ -7,6 +7,7 @@
 
 import openai
 from openai._compat import PYDANTIC_V2
+from openai.lib._pydantic import to_strict_json_schema
 
 from .schema_types.query import Query
 
@@ -235,3 +236,176 @@ def test_enums() -> None:
                 },
             }
         )
+
+
+class Star(BaseModel):
+    name: str = Field(description="The name of the star.")
+
+
+class Galaxy(BaseModel):
+    name: str = Field(description="The name of the galaxy.")
+    largest_star: Star = Field(description="The largest star in the galaxy.")
+
+
+class Universe(BaseModel):
+    name: str = Field(description="The name of the universe.")
+    galaxy: Galaxy = Field(description="A galaxy in the universe.")
+
+
+def test_nested_inline_ref_expansion() -> None:
+    if PYDANTIC_V2:
+        assert to_strict_json_schema(Universe) == snapshot(
+            {
+                "title": "Universe",
+                "type": "object",
+                "$defs": {
+                    "Star": {
+                        "title": "Star",
+                        "type": "object",
+                        "properties": {
+                            "name": {
+                                "type": "string",
+                                "title": "Name",
+                                "description": "The name of the star.",
+                            }
+                        },
+                        "required": ["name"],
+                        "additionalProperties": False,
+                    },
+                    "Galaxy": {
+                        "title": "Galaxy",
+                        "type": "object",
+                        "properties": {
+                            "name": {
+                                "type": "string",
+                                "title": "Name",
+                                "description": "The name of the galaxy.",
+                            },
+                            "largest_star": {
+                                "title": "Star",
+                                "type": "object",
+                                "properties": {
+                                    "name": {
+                                        "type": "string",
+                                        "title": "Name",
+                                        "description": "The name of the star.",
+                                    }
+                                },
+                                "required": ["name"],
+                                "description": "The largest star in the galaxy.",
+                                "additionalProperties": False,
+                            },
+                        },
+                        "required": ["name", "largest_star"],
+                        "additionalProperties": False,
+                    },
+                },
+                "properties": {
+                    "name": {
+                        "type": "string",
+                        "title": "Name",
+                        "description": "The name of the universe.",
+                    },
+                    "galaxy": {
+                        "title": "Galaxy",
+                        "type": "object",
+                        "properties": {
+                            "name": {
+                                "type": "string",
+                                "title": "Name",
+                                "description": "The name of the galaxy.",
+                            },
+                            "largest_star": {
+                                "title": "Star",
+                                "type": "object",
+                                "properties": {
+                                    "name": {
+                                        "type": "string",
+                                        "title": "Name",
+                                        "description": "The name of the star.",
+                                    }
+                                },
+                                "required": ["name"],
+                                "description": "The largest star in the galaxy.",
+                                "additionalProperties": False,
+                            },
+                        },
+                        "required": ["name", "largest_star"],
+                        "description": "A galaxy in the universe.",
+                        "additionalProperties": False,
+                    },
+                },
+                "required": ["name", "galaxy"],
+                "additionalProperties": False,
+            }
+        )
+    else:
+        assert to_strict_json_schema(Universe) == snapshot(
+            {
+                "title": "Universe",
+                "type": "object",
+                "definitions": {
+                    "Star": {
+                        "title": "Star",
+                        "type": "object",
+                        "properties": {
+                            "name": {"title": "Name", "description": "The name of the star.", "type": "string"}
+                        },
+                        "required": ["name"],
+                        "additionalProperties": False,
+                    },
+                    "Galaxy": {
+                        "title": "Galaxy",
+                        "type": "object",
+                        "properties": {
+                            "name": {"title": "Name", "description": "The name of the galaxy.", "type": "string"},
+                            "largest_star": {
+                                "title": "Largest Star",
+                                "description": "The largest star in the galaxy.",
+                                "type": "object",
+                                "properties": {
+                                    "name": {"title": "Name", "description": "The name of the star.", "type": "string"}
+                                },
+                                "required": ["name"],
+                                "additionalProperties": False,
+                            },
+                        },
+                        "required": ["name", "largest_star"],
+                        "additionalProperties": False,
+                    },
+                },
+                "properties": {
+                    "name": {
+                        "title": "Name",
+                        "description": "The name of the universe.",
+                        "type": "string",
+                    },
+                    "galaxy": {
+                        "title": "Galaxy",
+                        "description": "A galaxy in the universe.",
+                        "type": "object",
+                        "properties": {
+                            "name": {
+                                "title": "Name",
+                                "description": "The name of the galaxy.",
+                                "type": "string",
+                            },
+                            "largest_star": {
+                                "title": "Largest Star",
+                                "description": "The largest star in the galaxy.",
+                                "type": "object",
+                                "properties": {
+                                    "name": {"title": "Name", "description": "The name of the star.", "type": "string"}
+                                },
+                                "required": ["name"],
+                                "additionalProperties": False,
+                            },
+                        },
+                        "required": ["name", "largest_star"],
+                        "additionalProperties": False,
+                    },
+                },
+                "required": ["name", "galaxy"],
+                "additionalProperties": False,
+            }
+        )