Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.3.0-alpha.5"
".": "0.3.0-alpha.6"
}
8 changes: 4 additions & 4 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 115
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-8a12a05ba6892999ac506f69d5cbbc7218f28ee1a11bf8e0e548c603435bb643.yml
openapi_spec_hash: 871ce212a98bdad4a44ec7fbf58d9fcb
config_hash: 4c1ba9dc45c31189cd1b039d003a3544
configured_endpoints: 111
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-15a929a0b71de779accc56bd09d1e5f580e216affdb408cf9890bc7a37847e9e.yml
openapi_spec_hash: 5db9f7c7e80427cfa0298cbb01689559
config_hash: 06758df5c4f261f9c97eafcef7e0028f
13 changes: 13 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
# Changelog

## 0.3.0-alpha.6 (2025-10-13)

Full Changelog: [v0.3.0-alpha.5...v0.3.0-alpha.6](https://github.com/llamastack/llama-stack-client-python/compare/v0.3.0-alpha.5...v0.3.0-alpha.6)

### Features

* **api:** updates to vector_store, etc. ([6c796ca](https://github.com/llamastack/llama-stack-client-python/commit/6c796ca49ee5e38c2951216558151fe349a303bf))


### Chores

* **internal:** detect missing future annotations with ruff ([e497f03](https://github.com/llamastack/llama-stack-client-python/commit/e497f0391aad3e75dceee288e299e0f74bd7f294))

## 0.3.0-alpha.5 (2025-10-10)

Full Changelog: [v0.3.0-alpha.4...v0.3.0-alpha.5](https://github.com/llamastack/llama-stack-client-python/compare/v0.3.0-alpha.4...v0.3.0-alpha.5)
Expand Down
20 changes: 0 additions & 20 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -234,26 +234,6 @@ Methods:
- <code title="post /v1/vector-io/insert">client.vector_io.<a href="./src/llama_stack_client/resources/vector_io.py">insert</a>(\*\*<a href="src/llama_stack_client/types/vector_io_insert_params.py">params</a>) -> None</code>
- <code title="post /v1/vector-io/query">client.vector_io.<a href="./src/llama_stack_client/resources/vector_io.py">query</a>(\*\*<a href="src/llama_stack_client/types/vector_io_query_params.py">params</a>) -> <a href="./src/llama_stack_client/types/query_chunks_response.py">QueryChunksResponse</a></code>

# VectorDBs

Types:

```python
from llama_stack_client.types import (
ListVectorDBsResponse,
VectorDBRetrieveResponse,
VectorDBListResponse,
VectorDBRegisterResponse,
)
```

Methods:

- <code title="get /v1/vector-dbs/{vector_db_id}">client.vector_dbs.<a href="./src/llama_stack_client/resources/vector_dbs.py">retrieve</a>(vector_db_id) -> <a href="./src/llama_stack_client/types/vector_db_retrieve_response.py">VectorDBRetrieveResponse</a></code>
- <code title="get /v1/vector-dbs">client.vector_dbs.<a href="./src/llama_stack_client/resources/vector_dbs.py">list</a>() -> <a href="./src/llama_stack_client/types/vector_db_list_response.py">VectorDBListResponse</a></code>
- <code title="post /v1/vector-dbs">client.vector_dbs.<a href="./src/llama_stack_client/resources/vector_dbs.py">register</a>(\*\*<a href="src/llama_stack_client/types/vector_db_register_params.py">params</a>) -> <a href="./src/llama_stack_client/types/vector_db_register_response.py">VectorDBRegisterResponse</a></code>
- <code title="delete /v1/vector-dbs/{vector_db_id}">client.vector_dbs.<a href="./src/llama_stack_client/resources/vector_dbs.py">unregister</a>(vector_db_id) -> None</code>

# VectorStores

Types:
Expand Down
6 changes: 5 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "llama_stack_client"
version = "0.3.0-alpha.5"
version = "0.3.0-alpha.6"
description = "The official Python library for the llama-stack-client API"
dynamic = ["readme"]
license = "MIT"
Expand Down Expand Up @@ -194,6 +194,8 @@ select = [
"B",
# remove unused imports
"F401",
# check for missing future annotations
"FA102",
# bare except statements
"E722",
# unused arguments
Expand All @@ -216,6 +218,8 @@ unfixable = [
"T203",
]

extend-safe-fixes = ["FA102"]

[tool.ruff.lint.flake8-tidy-imports.banned-api]
"functools.lru_cache".msg = "This function does not retain type information for the wrapped function's arguments; The `lru_cache` function from `_utils` should be used instead"

Expand Down
38 changes: 0 additions & 38 deletions src/llama_stack_client/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@
benchmarks,
embeddings,
toolgroups,
vector_dbs,
completions,
moderations,
tool_runtime,
Expand All @@ -75,7 +74,6 @@
from .resources.benchmarks import BenchmarksResource, AsyncBenchmarksResource
from .resources.embeddings import EmbeddingsResource, AsyncEmbeddingsResource
from .resources.toolgroups import ToolgroupsResource, AsyncToolgroupsResource
from .resources.vector_dbs import VectorDBsResource, AsyncVectorDBsResource
from .resources.alpha.alpha import AlphaResource, AsyncAlphaResource
from .resources.completions import CompletionsResource, AsyncCompletionsResource
from .resources.moderations import ModerationsResource, AsyncModerationsResource
Expand Down Expand Up @@ -225,12 +223,6 @@ def vector_io(self) -> VectorIoResource:

return VectorIoResource(self)

@cached_property
def vector_dbs(self) -> VectorDBsResource:
from .resources.vector_dbs import VectorDBsResource

return VectorDBsResource(self)

@cached_property
def vector_stores(self) -> VectorStoresResource:
from .resources.vector_stores import VectorStoresResource
Expand Down Expand Up @@ -553,12 +545,6 @@ def vector_io(self) -> AsyncVectorIoResource:

return AsyncVectorIoResource(self)

@cached_property
def vector_dbs(self) -> AsyncVectorDBsResource:
from .resources.vector_dbs import AsyncVectorDBsResource

return AsyncVectorDBsResource(self)

@cached_property
def vector_stores(self) -> AsyncVectorStoresResource:
from .resources.vector_stores import AsyncVectorStoresResource
Expand Down Expand Up @@ -830,12 +816,6 @@ def vector_io(self) -> vector_io.VectorIoResourceWithRawResponse:

return VectorIoResourceWithRawResponse(self._client.vector_io)

@cached_property
def vector_dbs(self) -> vector_dbs.VectorDBsResourceWithRawResponse:
from .resources.vector_dbs import VectorDBsResourceWithRawResponse

return VectorDBsResourceWithRawResponse(self._client.vector_dbs)

@cached_property
def vector_stores(self) -> vector_stores.VectorStoresResourceWithRawResponse:
from .resources.vector_stores import VectorStoresResourceWithRawResponse
Expand Down Expand Up @@ -993,12 +973,6 @@ def vector_io(self) -> vector_io.AsyncVectorIoResourceWithRawResponse:

return AsyncVectorIoResourceWithRawResponse(self._client.vector_io)

@cached_property
def vector_dbs(self) -> vector_dbs.AsyncVectorDBsResourceWithRawResponse:
from .resources.vector_dbs import AsyncVectorDBsResourceWithRawResponse

return AsyncVectorDBsResourceWithRawResponse(self._client.vector_dbs)

@cached_property
def vector_stores(self) -> vector_stores.AsyncVectorStoresResourceWithRawResponse:
from .resources.vector_stores import AsyncVectorStoresResourceWithRawResponse
Expand Down Expand Up @@ -1158,12 +1132,6 @@ def vector_io(self) -> vector_io.VectorIoResourceWithStreamingResponse:

return VectorIoResourceWithStreamingResponse(self._client.vector_io)

@cached_property
def vector_dbs(self) -> vector_dbs.VectorDBsResourceWithStreamingResponse:
from .resources.vector_dbs import VectorDBsResourceWithStreamingResponse

return VectorDBsResourceWithStreamingResponse(self._client.vector_dbs)

@cached_property
def vector_stores(self) -> vector_stores.VectorStoresResourceWithStreamingResponse:
from .resources.vector_stores import VectorStoresResourceWithStreamingResponse
Expand Down Expand Up @@ -1323,12 +1291,6 @@ def vector_io(self) -> vector_io.AsyncVectorIoResourceWithStreamingResponse:

return AsyncVectorIoResourceWithStreamingResponse(self._client.vector_io)

@cached_property
def vector_dbs(self) -> vector_dbs.AsyncVectorDBsResourceWithStreamingResponse:
from .resources.vector_dbs import AsyncVectorDBsResourceWithStreamingResponse

return AsyncVectorDBsResourceWithStreamingResponse(self._client.vector_dbs)

@cached_property
def vector_stores(self) -> vector_stores.AsyncVectorStoresResourceWithStreamingResponse:
from .resources.vector_stores import AsyncVectorStoresResourceWithStreamingResponse
Expand Down
14 changes: 0 additions & 14 deletions src/llama_stack_client/resources/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,14 +144,6 @@
ToolgroupsResourceWithStreamingResponse,
AsyncToolgroupsResourceWithStreamingResponse,
)
from .vector_dbs import (
VectorDBsResource,
AsyncVectorDBsResource,
VectorDBsResourceWithRawResponse,
AsyncVectorDBsResourceWithRawResponse,
VectorDBsResourceWithStreamingResponse,
AsyncVectorDBsResourceWithStreamingResponse,
)
from .completions import (
CompletionsResource,
AsyncCompletionsResource,
Expand Down Expand Up @@ -276,12 +268,6 @@
"AsyncVectorIoResourceWithRawResponse",
"VectorIoResourceWithStreamingResponse",
"AsyncVectorIoResourceWithStreamingResponse",
"VectorDBsResource",
"AsyncVectorDBsResource",
"VectorDBsResourceWithRawResponse",
"AsyncVectorDBsResourceWithRawResponse",
"VectorDBsResourceWithStreamingResponse",
"AsyncVectorDBsResourceWithStreamingResponse",
"VectorStoresResource",
"AsyncVectorStoresResource",
"VectorStoresResourceWithRawResponse",
Expand Down
20 changes: 0 additions & 20 deletions src/llama_stack_client/resources/completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,11 @@ def create(
best_of: int | Omit = omit,
echo: bool | Omit = omit,
frequency_penalty: float | Omit = omit,
guided_choice: SequenceNotStr[str] | Omit = omit,
logit_bias: Dict[str, float] | Omit = omit,
logprobs: bool | Omit = omit,
max_tokens: int | Omit = omit,
n: int | Omit = omit,
presence_penalty: float | Omit = omit,
prompt_logprobs: int | Omit = omit,
seed: int | Omit = omit,
stop: Union[str, SequenceNotStr[str]] | Omit = omit,
stream: Literal[False] | Omit = omit,
Expand Down Expand Up @@ -139,13 +137,11 @@ def create(
best_of: int | Omit = omit,
echo: bool | Omit = omit,
frequency_penalty: float | Omit = omit,
guided_choice: SequenceNotStr[str] | Omit = omit,
logit_bias: Dict[str, float] | Omit = omit,
logprobs: bool | Omit = omit,
max_tokens: int | Omit = omit,
n: int | Omit = omit,
presence_penalty: float | Omit = omit,
prompt_logprobs: int | Omit = omit,
seed: int | Omit = omit,
stop: Union[str, SequenceNotStr[str]] | Omit = omit,
stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
Expand Down Expand Up @@ -223,13 +219,11 @@ def create(
best_of: int | Omit = omit,
echo: bool | Omit = omit,
frequency_penalty: float | Omit = omit,
guided_choice: SequenceNotStr[str] | Omit = omit,
logit_bias: Dict[str, float] | Omit = omit,
logprobs: bool | Omit = omit,
max_tokens: int | Omit = omit,
n: int | Omit = omit,
presence_penalty: float | Omit = omit,
prompt_logprobs: int | Omit = omit,
seed: int | Omit = omit,
stop: Union[str, SequenceNotStr[str]] | Omit = omit,
stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
Expand Down Expand Up @@ -306,13 +300,11 @@ def create(
best_of: int | Omit = omit,
echo: bool | Omit = omit,
frequency_penalty: float | Omit = omit,
guided_choice: SequenceNotStr[str] | Omit = omit,
logit_bias: Dict[str, float] | Omit = omit,
logprobs: bool | Omit = omit,
max_tokens: int | Omit = omit,
n: int | Omit = omit,
presence_penalty: float | Omit = omit,
prompt_logprobs: int | Omit = omit,
seed: int | Omit = omit,
stop: Union[str, SequenceNotStr[str]] | Omit = omit,
stream: Literal[False] | Literal[True] | Omit = omit,
Expand All @@ -337,13 +329,11 @@ def create(
"best_of": best_of,
"echo": echo,
"frequency_penalty": frequency_penalty,
"guided_choice": guided_choice,
"logit_bias": logit_bias,
"logprobs": logprobs,
"max_tokens": max_tokens,
"n": n,
"presence_penalty": presence_penalty,
"prompt_logprobs": prompt_logprobs,
"seed": seed,
"stop": stop,
"stream": stream,
Expand Down Expand Up @@ -395,13 +385,11 @@ async def create(
best_of: int | Omit = omit,
echo: bool | Omit = omit,
frequency_penalty: float | Omit = omit,
guided_choice: SequenceNotStr[str] | Omit = omit,
logit_bias: Dict[str, float] | Omit = omit,
logprobs: bool | Omit = omit,
max_tokens: int | Omit = omit,
n: int | Omit = omit,
presence_penalty: float | Omit = omit,
prompt_logprobs: int | Omit = omit,
seed: int | Omit = omit,
stop: Union[str, SequenceNotStr[str]] | Omit = omit,
stream: Literal[False] | Omit = omit,
Expand Down Expand Up @@ -480,13 +468,11 @@ async def create(
best_of: int | Omit = omit,
echo: bool | Omit = omit,
frequency_penalty: float | Omit = omit,
guided_choice: SequenceNotStr[str] | Omit = omit,
logit_bias: Dict[str, float] | Omit = omit,
logprobs: bool | Omit = omit,
max_tokens: int | Omit = omit,
n: int | Omit = omit,
presence_penalty: float | Omit = omit,
prompt_logprobs: int | Omit = omit,
seed: int | Omit = omit,
stop: Union[str, SequenceNotStr[str]] | Omit = omit,
stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
Expand Down Expand Up @@ -564,13 +550,11 @@ async def create(
best_of: int | Omit = omit,
echo: bool | Omit = omit,
frequency_penalty: float | Omit = omit,
guided_choice: SequenceNotStr[str] | Omit = omit,
logit_bias: Dict[str, float] | Omit = omit,
logprobs: bool | Omit = omit,
max_tokens: int | Omit = omit,
n: int | Omit = omit,
presence_penalty: float | Omit = omit,
prompt_logprobs: int | Omit = omit,
seed: int | Omit = omit,
stop: Union[str, SequenceNotStr[str]] | Omit = omit,
stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
Expand Down Expand Up @@ -647,13 +631,11 @@ async def create(
best_of: int | Omit = omit,
echo: bool | Omit = omit,
frequency_penalty: float | Omit = omit,
guided_choice: SequenceNotStr[str] | Omit = omit,
logit_bias: Dict[str, float] | Omit = omit,
logprobs: bool | Omit = omit,
max_tokens: int | Omit = omit,
n: int | Omit = omit,
presence_penalty: float | Omit = omit,
prompt_logprobs: int | Omit = omit,
seed: int | Omit = omit,
stop: Union[str, SequenceNotStr[str]] | Omit = omit,
stream: Literal[False] | Literal[True] | Omit = omit,
Expand All @@ -678,13 +660,11 @@ async def create(
"best_of": best_of,
"echo": echo,
"frequency_penalty": frequency_penalty,
"guided_choice": guided_choice,
"logit_bias": logit_bias,
"logprobs": logprobs,
"max_tokens": max_tokens,
"n": n,
"presence_penalty": presence_penalty,
"prompt_logprobs": prompt_logprobs,
"seed": seed,
"stop": stop,
"stream": stream,
Expand Down
5 changes: 2 additions & 3 deletions src/llama_stack_client/resources/safety.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
)
from .._base_client import make_request_options
from ..types.run_shield_response import RunShieldResponse
from ..types.shared_params.message import Message

__all__ = ["SafetyResource", "AsyncSafetyResource"]

Expand Down Expand Up @@ -47,7 +46,7 @@ def with_streaming_response(self) -> SafetyResourceWithStreamingResponse:
def run_shield(
self,
*,
messages: Iterable[Message],
messages: Iterable[safety_run_shield_params.Message],
params: Dict[str, Union[bool, float, str, Iterable[object], object, None]],
shield_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down Expand Up @@ -116,7 +115,7 @@ def with_streaming_response(self) -> AsyncSafetyResourceWithStreamingResponse:
async def run_shield(
self,
*,
messages: Iterable[Message],
messages: Iterable[safety_run_shield_params.Message],
params: Dict[str, Union[bool, float, str, Iterable[object], object, None]],
shield_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
Expand Down
Loading
Loading