Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.1.0-alpha.42"
".": "0.1.0"
}
4 changes: 2 additions & 2 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 32
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/mixedbread%2Fmixedbread-82c2c1c322149cd73b2e8e45f475919b941752a89e74464ccecd1aee9352e9be.yml
openapi_spec_hash: dbd7616a32c90fd25b32994830fb12f6
config_hash: 20a40be2c85d83a4bb34aee53cefd856
openapi_spec_hash: a47fe4cb39ee0cb74ee5888de2f0a5e1
config_hash: f87f729f63f3b34364d1c144753b920d
14 changes: 14 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,19 @@
# Changelog

## 0.1.0 (2025-04-04)

Full Changelog: [v0.1.0-alpha.42...v0.1.0](https://github.com/mixedbread-ai/mixedbread-python/compare/v0.1.0-alpha.42...v0.1.0)

### Features

* **api:** update via SDK Studio ([#189](https://github.com/mixedbread-ai/mixedbread-python/issues/189)) ([7d2c3ea](https://github.com/mixedbread-ai/mixedbread-python/commit/7d2c3eaf947540a650971b52c6aeb907c35f330b))
* **api:** update via SDK Studio ([#191](https://github.com/mixedbread-ai/mixedbread-python/issues/191)) ([ecc7b0c](https://github.com/mixedbread-ai/mixedbread-python/commit/ecc7b0ca2ef2ced5c03615aac32a3cc6e79ac162))


### Chores

* **internal:** remove trailing character ([#192](https://github.com/mixedbread-ai/mixedbread-python/issues/192)) ([1d85db1](https://github.com/mixedbread-ai/mixedbread-python/commit/1d85db180e2c005f90348abd96622c7bb51cd39b))

## 0.1.0-alpha.42 (2025-04-03)

Full Changelog: [v0.1.0-alpha.41...v0.1.0-alpha.42](https://github.com/mixedbread-ai/mixedbread-python/compare/v0.1.0-alpha.41...v0.1.0-alpha.42)
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ The REST API documentation can be found on [mixedbread.com](https://mixedbread.c

```sh
# install from PyPI
pip install --pre mixedbread
pip install mixedbread
```

## Usage
Expand Down
2 changes: 1 addition & 1 deletion api.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Shared Types

```python
from mixedbread.types import SearchFilter, SearchFilterCondition
from mixedbread.types import SearchFilter, SearchFilterCondition, Usage
```

# Mixedbread
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "mixedbread"
version = "0.1.0-alpha.42"
version = "0.1.0"
description = "The official Python library for the Mixedbread API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand Down
4 changes: 2 additions & 2 deletions src/mixedbread/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def embed(
self,
*,
model: str,
input: List[str],
input: Union[str, List[str]],
dimensions: Optional[int] | NotGiven = NOT_GIVEN,
prompt: Optional[str] | NotGiven = NOT_GIVEN,
normalized: bool | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -599,7 +599,7 @@ async def embed(
self,
*,
model: str,
input: List[str],
input: Union[str, List[str]],
dimensions: Optional[int] | NotGiven = NOT_GIVEN,
prompt: Optional[str] | NotGiven = NOT_GIVEN,
normalized: bool | NotGiven = NOT_GIVEN,
Expand Down
2 changes: 1 addition & 1 deletion src/mixedbread/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "mixedbread"
__version__ = "0.1.0-alpha.42" # x-release-please-version
__version__ = "0.1.0" # x-release-please-version
4 changes: 2 additions & 2 deletions src/mixedbread/resources/embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def create(
self,
*,
model: str,
input: List[str],
input: Union[str, List[str]],
dimensions: Optional[int] | NotGiven = NOT_GIVEN,
prompt: Optional[str] | NotGiven = NOT_GIVEN,
normalized: bool | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -141,7 +141,7 @@ async def create(
self,
*,
model: str,
input: List[str],
input: Union[str, List[str]],
dimensions: Optional[int] | NotGiven = NOT_GIVEN,
prompt: Optional[str] | NotGiven = NOT_GIVEN,
normalized: bool | NotGiven = NOT_GIVEN,
Expand Down
2 changes: 1 addition & 1 deletion src/mixedbread/types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from __future__ import annotations

from .shared import SearchFilter as SearchFilter, SearchFilterCondition as SearchFilterCondition
from .shared import Usage as Usage, SearchFilter as SearchFilter, SearchFilterCondition as SearchFilterCondition
from .embedding import Embedding as Embedding
from .file_counts import FileCounts as FileCounts
from .file_object import FileObject as FileObject
Expand Down
2 changes: 1 addition & 1 deletion src/mixedbread/types/client_embed_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ class ClientEmbedParams(TypedDict, total=False):
model: Required[str]
"""The model to use for creating embeddings."""

input: Required[List[str]]
input: Required[Union[str, List[str]]]
"""The input to create embeddings for."""

dimensions: Optional[int]
Expand Down
2 changes: 1 addition & 1 deletion src/mixedbread/types/embedding_create_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ class EmbeddingCreateParams(TypedDict, total=False):
model: Required[str]
"""The model to use for creating embeddings."""

input: Required[List[str]]
input: Required[Union[str, List[str]]]
"""The input to create embeddings for."""

dimensions: Optional[int]
Expand Down
14 changes: 2 additions & 12 deletions src/mixedbread/types/embedding_create_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,20 +5,10 @@

from .._models import BaseModel
from .embedding import Embedding
from .shared.usage import Usage
from .multi_encoding_embedding import MultiEncodingEmbedding

__all__ = ["EmbeddingCreateResponse", "Usage"]


class Usage(BaseModel):
prompt_tokens: int
"""The number of tokens used for the prompt"""

total_tokens: int
"""The total number of tokens used"""

completion_tokens: Optional[int] = None
"""The number of tokens used for the completion"""
__all__ = ["EmbeddingCreateResponse"]


class EmbeddingCreateResponse(BaseModel):
Expand Down
14 changes: 2 additions & 12 deletions src/mixedbread/types/rerank_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,9 @@
from typing_extensions import Literal

from .._models import BaseModel
from .shared.usage import Usage

__all__ = ["RerankResponse", "Usage", "Data"]


class Usage(BaseModel):
prompt_tokens: int
"""The number of tokens used for the prompt"""

total_tokens: int
"""The total number of tokens used"""

completion_tokens: Optional[int] = None
"""The number of tokens used for the completion"""
__all__ = ["RerankResponse", "Data"]


class Data(BaseModel):
Expand Down
1 change: 1 addition & 0 deletions src/mixedbread/types/shared/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from .usage import Usage as Usage
from .search_filter import SearchFilter as SearchFilter
from .search_filter_condition import SearchFilterCondition as SearchFilterCondition
18 changes: 18 additions & 0 deletions src/mixedbread/types/shared/usage.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import Optional

from ..._models import BaseModel

__all__ = ["Usage"]


class Usage(BaseModel):
prompt_tokens: int
"""The number of tokens used for the prompt"""

total_tokens: int
"""The total number of tokens used"""

completion_tokens: Optional[int] = None
"""The number of tokens used for the completion"""
40 changes: 20 additions & 20 deletions tests/api_resources/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,15 @@ class TestClient:
def test_method_embed(self, client: Mixedbread) -> None:
client_ = client.embed(
model="mixedbread-ai/mxbai-embed-large-v1",
input=["string"],
input="x",
)
assert_matches_type(EmbeddingCreateResponse, client_, path=["response"])

@parametrize
def test_method_embed_with_all_params(self, client: Mixedbread) -> None:
client_ = client.embed(
model="mixedbread-ai/mxbai-embed-large-v1",
input=["string"],
input="x",
dimensions=768,
prompt="Provide a detailed summary of the following text.",
normalized=True,
Expand All @@ -45,7 +45,7 @@ def test_method_embed_with_all_params(self, client: Mixedbread) -> None:
def test_raw_response_embed(self, client: Mixedbread) -> None:
response = client.with_raw_response.embed(
model="mixedbread-ai/mxbai-embed-large-v1",
input=["string"],
input="x",
)

assert response.is_closed is True
Expand All @@ -57,7 +57,7 @@ def test_raw_response_embed(self, client: Mixedbread) -> None:
def test_streaming_response_embed(self, client: Mixedbread) -> None:
with client.with_streaming_response.embed(
model="mixedbread-ai/mxbai-embed-large-v1",
input=["string"],
input="x",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
Expand Down Expand Up @@ -95,18 +95,18 @@ def test_streaming_response_info(self, client: Mixedbread) -> None:
@parametrize
def test_method_rerank(self, client: Mixedbread) -> None:
client_ = client.rerank(
query="What is mixedbread ai?",
query="What are the key features of the Mixedbread embedding model?",
input=["Document 1", "Document 2"],
)
assert_matches_type(RerankResponse, client_, path=["response"])

@parametrize
def test_method_rerank_with_all_params(self, client: Mixedbread) -> None:
client_ = client.rerank(
model="x",
query="What is mixedbread ai?",
model="mixedbread-ai/mxbai-rerank-large-v2",
query="What are the key features of the Mixedbread embedding model?",
input=["Document 1", "Document 2"],
rank_fields=["field1", "field2"],
rank_fields=["content", "title"],
top_k=10,
return_input=False,
)
Expand All @@ -115,7 +115,7 @@ def test_method_rerank_with_all_params(self, client: Mixedbread) -> None:
@parametrize
def test_raw_response_rerank(self, client: Mixedbread) -> None:
response = client.with_raw_response.rerank(
query="What is mixedbread ai?",
query="What are the key features of the Mixedbread embedding model?",
input=["Document 1", "Document 2"],
)

Expand All @@ -127,7 +127,7 @@ def test_raw_response_rerank(self, client: Mixedbread) -> None:
@parametrize
def test_streaming_response_rerank(self, client: Mixedbread) -> None:
with client.with_streaming_response.rerank(
query="What is mixedbread ai?",
query="What are the key features of the Mixedbread embedding model?",
input=["Document 1", "Document 2"],
) as response:
assert not response.is_closed
Expand All @@ -146,15 +146,15 @@ class TestAsyncClient:
async def test_method_embed(self, async_client: AsyncMixedbread) -> None:
client = await async_client.embed(
model="mixedbread-ai/mxbai-embed-large-v1",
input=["string"],
input="x",
)
assert_matches_type(EmbeddingCreateResponse, client, path=["response"])

@parametrize
async def test_method_embed_with_all_params(self, async_client: AsyncMixedbread) -> None:
client = await async_client.embed(
model="mixedbread-ai/mxbai-embed-large-v1",
input=["string"],
input="x",
dimensions=768,
prompt="Provide a detailed summary of the following text.",
normalized=True,
Expand All @@ -166,7 +166,7 @@ async def test_method_embed_with_all_params(self, async_client: AsyncMixedbread)
async def test_raw_response_embed(self, async_client: AsyncMixedbread) -> None:
response = await async_client.with_raw_response.embed(
model="mixedbread-ai/mxbai-embed-large-v1",
input=["string"],
input="x",
)

assert response.is_closed is True
Expand All @@ -178,7 +178,7 @@ async def test_raw_response_embed(self, async_client: AsyncMixedbread) -> None:
async def test_streaming_response_embed(self, async_client: AsyncMixedbread) -> None:
async with async_client.with_streaming_response.embed(
model="mixedbread-ai/mxbai-embed-large-v1",
input=["string"],
input="x",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
Expand Down Expand Up @@ -216,18 +216,18 @@ async def test_streaming_response_info(self, async_client: AsyncMixedbread) -> N
@parametrize
async def test_method_rerank(self, async_client: AsyncMixedbread) -> None:
client = await async_client.rerank(
query="What is mixedbread ai?",
query="What are the key features of the Mixedbread embedding model?",
input=["Document 1", "Document 2"],
)
assert_matches_type(RerankResponse, client, path=["response"])

@parametrize
async def test_method_rerank_with_all_params(self, async_client: AsyncMixedbread) -> None:
client = await async_client.rerank(
model="x",
query="What is mixedbread ai?",
model="mixedbread-ai/mxbai-rerank-large-v2",
query="What are the key features of the Mixedbread embedding model?",
input=["Document 1", "Document 2"],
rank_fields=["field1", "field2"],
rank_fields=["content", "title"],
top_k=10,
return_input=False,
)
Expand All @@ -236,7 +236,7 @@ async def test_method_rerank_with_all_params(self, async_client: AsyncMixedbread
@parametrize
async def test_raw_response_rerank(self, async_client: AsyncMixedbread) -> None:
response = await async_client.with_raw_response.rerank(
query="What is mixedbread ai?",
query="What are the key features of the Mixedbread embedding model?",
input=["Document 1", "Document 2"],
)

Expand All @@ -248,7 +248,7 @@ async def test_raw_response_rerank(self, async_client: AsyncMixedbread) -> None:
@parametrize
async def test_streaming_response_rerank(self, async_client: AsyncMixedbread) -> None:
async with async_client.with_streaming_response.rerank(
query="What is mixedbread ai?",
query="What are the key features of the Mixedbread embedding model?",
input=["Document 1", "Document 2"],
) as response:
assert not response.is_closed
Expand Down
Loading
Loading