From 37e0eb3776698e8c36ec3254950093824bfe6b00 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Tue, 3 Jun 2025 11:14:42 -0400 Subject: [PATCH 1/8] Implement docs with sphinx --- .github/actions/build-docs/action.yml | 9 +- .../actions/build-docs/fix-relative-links.py | 66 -- .github/workflows/build-and-publish-docs.yaml | 2 +- .gitignore | 1 + pinecone/__init__.py | 2 - pinecone/__init__.pyi | 2 +- pinecone/db_data/__init__.py | 27 +- pinecone/db_data/import_error.py | 62 - pinecone/db_data/index.py | 15 +- pinecone/db_data/index_asyncio.py | 5 +- pinecone/db_data/index_asyncio_interface.py | 765 ++++++------ pinecone/db_data/interfaces.py | 554 ++++----- pinecone/deprecation_warnings.py | 27 +- pinecone/grpc/__init__.py | 4 +- pinecone/grpc/index_grpc.py | 64 +- pinecone/grpc/pinecone.py | 119 +- pinecone/inference/inference.py | 233 ++-- pinecone/inference/models/embedding_list.py | 3 + pinecone/legacy_pinecone_interface.py | 842 ++++++------- pinecone/pinecone.py | 146 +++ pinecone/pinecone_asyncio.py | 61 +- pinecone/pinecone_interface_asyncio.py | 1054 +++++++++-------- pinecone/utils/lazy_imports.py | 13 + pinecone/utils/version.py | 1 + poetry.lock | 251 +++- pyproject.toml | 4 +- sphinx/asyncio.rst | 107 ++ sphinx/conf.py | 31 + sphinx/grpc.rst | 82 ++ sphinx/index.rst | 247 ++++ sphinx/rest.rst | 126 ++ 31 files changed, 2845 insertions(+), 2080 deletions(-) delete mode 100644 .github/actions/build-docs/fix-relative-links.py delete mode 100644 pinecone/db_data/import_error.py create mode 100644 sphinx/asyncio.rst create mode 100644 sphinx/conf.py create mode 100644 sphinx/grpc.rst create mode 100644 sphinx/index.rst create mode 100644 sphinx/rest.rst diff --git a/.github/actions/build-docs/action.yml b/.github/actions/build-docs/action.yml index 4bb280f9..b47bdd5c 100644 --- a/.github/actions/build-docs/action.yml +++ b/.github/actions/build-docs/action.yml @@ -17,11 +17,4 @@ runs: - name: Build html documentation shell: bash run: | - poetry run pdoc pinecone '!pinecone.core' '!pinecone.utils' --favicon ./favicon-32x32.png --docformat google -o ./pdoc - - - name: Fix relative links - shell: bash - run: | - poetry run python3 ./.github/actions/build-docs/fix-relative-links.py ./pdoc ./pdoc - env: - BASE_URL: "https://github.com/pinecone-io/pinecone-python-client/blob/main/" + poetry run sphinx-build -b html sphinx docsbuild diff --git a/.github/actions/build-docs/fix-relative-links.py b/.github/actions/build-docs/fix-relative-links.py deleted file mode 100644 index 3ed29d46..00000000 --- a/.github/actions/build-docs/fix-relative-links.py +++ /dev/null @@ -1,66 +0,0 @@ -import os -import sys -from bs4 import BeautifulSoup -from urllib.parse import urljoin - -# Base URL to prepend to relative links -BASE_URL = os.environ.get( - "BASE_URL", "https://github.com/pinecone-io/pinecone-python-client/blob/main/" -) - - -def replace_relative_links(html): - soup = BeautifulSoup(html, "html.parser") - - # Find all anchor tags with an href attribute - for a in soup.find_all("a", href=True): - href = a["href"] - # Skip if the link is already absolute or an anchor link - if href.startswith(("http://", "https://", "#")): - continue - - # Skip if the link is not a markdown file - if not href.endswith(".md"): - continue - - # Replace the relative link with an absolute URL - new_href = urljoin(BASE_URL, href) - print(f"{href} => {new_href}") - a["href"] = new_href - return str(soup) - - -if __name__ == "__main__": - if len(sys.argv) < 2: - print("Usage: python fix-relative-links.py input-dir [output-dir]") - sys.exit(1) - - input_dir = sys.argv[1] - output_dir = sys.argv[2] if len(sys.argv) > 2 else None - - # Recursively process all html files in the input directory - for root, dirs, files in os.walk(input_dir): - for file in files: - if not file.endswith(".html"): - continue - - print(f"Processing {file}") - input_path = os.path.join(root, file) - - with open(input_path, "r", encoding="utf-8") as f: - html = f.read() - - updated_html = replace_relative_links(html) - - if output_dir: - # Get the relative path from input_dir to maintain folder structure - rel_path = os.path.relpath(input_path, input_dir) - output_path = os.path.join(output_dir, rel_path) - - # Create the necessary subdirectories - os.makedirs(os.path.dirname(output_path), exist_ok=True) - - with open(output_path, "w", encoding="utf-8") as f: - f.write(updated_html) - else: - print(updated_html) diff --git a/.github/workflows/build-and-publish-docs.yaml b/.github/workflows/build-and-publish-docs.yaml index 99e08ccb..5110a34c 100644 --- a/.github/workflows/build-and-publish-docs.yaml +++ b/.github/workflows/build-and-publish-docs.yaml @@ -24,7 +24,7 @@ jobs: env: SSH_DEPLOY_KEY: ${{ secrets.SSH_DEPLOY_KEY }} with: - source-directory: pdoc + source-directory: docsbuild destination-github-username: pinecone-io destination-repository-name: sdk-docs user-email: clients@pinecone.io diff --git a/.gitignore b/.gitignore index 189e5964..343aa43f 100644 --- a/.gitignore +++ b/.gitignore @@ -144,6 +144,7 @@ venv.bak/ pdoc/* !pdoc/pinecone-python-client-fork.png !pdoc/favicon-32x32.png +docsbuild # mypy .mypy_cache/ diff --git a/pinecone/__init__.py b/pinecone/__init__.py index da726366..2e55fe84 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -71,8 +71,6 @@ "pinecone.db_data.errors", "SparseValuesDictionaryExpectedError", ), - "Index": ("pinecone.db_data.import_error", "Index"), - "Inference": ("pinecone.db_data.import_error", "Inference"), } _db_control_lazy_imports = { diff --git a/pinecone/__init__.pyi b/pinecone/__init__.pyi index da6cc5ae..06de82ed 100644 --- a/pinecone/__init__.pyi +++ b/pinecone/__init__.pyi @@ -1,7 +1,7 @@ from pinecone.config import Config from pinecone.config import ConfigBuilder from pinecone.config import PineconeConfig -from .exceptions import ( +from pinecone.exceptions import ( PineconeException, PineconeApiTypeError, PineconeApiValueError, diff --git a/pinecone/db_data/__init__.py b/pinecone/db_data/__init__.py index ebe6da51..2a400300 100644 --- a/pinecone/db_data/__init__.py +++ b/pinecone/db_data/__init__.py @@ -1,5 +1,5 @@ from .index import ( - Index as _Index, + Index, FetchResponse, QueryResponse, DescribeIndexStatsResponse, @@ -7,13 +7,8 @@ SparseValues, Vector, ) + from .dataclasses import * -from .import_error import ( - Index, - IndexClientInstantiationError, - Inference, - InferenceInstantiationError, -) from .index_asyncio import * from .errors import ( VectorDictionaryMissingKeysError, @@ -24,9 +19,14 @@ SparseValuesDictionaryExpectedError, MetadataDictionaryExpectedError, ) +import warnings + from .resources.sync.bulk_import import ImportErrorMode +_Index = Index # alias for backwards compatibility + + __all__ = [ "_Index", "_IndexAsyncio", @@ -34,9 +34,8 @@ "FetchResponse", "ImportErrorMode", "Index", - "IndexClientInstantiationError", + "IndexAsyncio", "Inference", - "InferenceInstantiationError", "MetadataDictionaryExpectedError", "QueryResponse", "SearchQuery", @@ -53,8 +52,6 @@ "VectorTupleLengthError", ] -import warnings - def _get_deprecated_import(name, from_module, to_module): warnings.warn( @@ -66,10 +63,10 @@ def _get_deprecated_import(name, from_module, to_module): ) # Import from the new location from pinecone.inference import ( - Inference as _Inference, # noqa: F401 - AsyncioInference as _AsyncioInference, # noqa: F401 - RerankModel, # noqa: F401 - EmbedModel, # noqa: F401 + Inference as _Inference, # noqa: F401 + AsyncioInference as _AsyncioInference, # noqa: F401 + RerankModel, # noqa: F401 + EmbedModel, # noqa: F401 ) return locals()[name] diff --git a/pinecone/db_data/import_error.py b/pinecone/db_data/import_error.py deleted file mode 100644 index d0b7d1ec..00000000 --- a/pinecone/db_data/import_error.py +++ /dev/null @@ -1,62 +0,0 @@ -class IndexClientInstantiationError(Exception): - def __init__(self, index_args, index_kwargs) -> None: - formatted_args = ", ".join(map(repr, index_args)) - formatted_kwargs = ", ".join(f"{key}={repr(value)}" for key, value in index_kwargs.items()) - combined_args = ", ".join([a for a in [formatted_args, formatted_kwargs] if a.strip()]) - - self.message = f"""You are attempting to access the Index client directly from the pinecone module. The Index client must be instantiated through the parent Pinecone client instance so that it can inherit shared configurations such as API key. - - INCORRECT USAGE: - ``` - import pinecone - - pc = pinecone.Pinecone(api_key='your-api-key') - index = pinecone.Index({combined_args}) - ``` - - CORRECT USAGE: - ``` - from pinecone import Pinecone - - pc = Pinecone(api_key='your-api-key') - index = pc.Index({combined_args}) - ``` - """ - super().__init__(self.message) - - -class InferenceInstantiationError(Exception): - def __init__(self) -> None: - self.message = """You are attempting to access the Inference client directly from the pinecone module. Inference functionality such as `embed` and `rerank` should only be accessed through the parent Pinecone client instance. - - INCORRECT USAGE: - ``` - import pinecone - - pinecone.Inference().embed(...) - ``` - - CORRECT USAGE: - ``` - from pinecone import Pinecone - - pc = Pinecone(api_key='your-api-key') - - embeddings = pc.inference.embed( - model='multilingual-e5-large', - inputs=["The quick brown fox jumps over the lazy dog.", "lorem ipsum"], - parameters={"input_type": "query", "truncate": "END"}, - ) - ``` - """ - super().__init__(self.message) - - -class Index: - def __init__(self, *args, **kwargs) -> None: - raise IndexClientInstantiationError(args, kwargs) - - -class Inference: - def __init__(self, *args, **kwargs) -> None: - raise InferenceInstantiationError() diff --git a/pinecone/db_data/index.py b/pinecone/db_data/index.py index 8878dee2..b3205eb6 100644 --- a/pinecone/db_data/index.py +++ b/pinecone/db_data/index.py @@ -524,16 +524,17 @@ def list_imports(self, **kwargs) -> Iterator["ImportModel"]: Returns a generator that yields each import operation. It automatically handles pagination tokens on your behalf so you can easily iterate over all results. The `list_imports` method accepts all of the same arguments as list_imports_paginated - ```python - for op in index.list_imports(): - print(op) - ``` + .. code-block:: python + + for op in index.list_imports(): + print(op) + You can convert the generator into a list by wrapping the generator in a call to the built-in `list` function: - ```python - operations = list(index.list_imports()) - ``` + .. code-block:: python + + operations = list(index.list_imports()) You should be cautious with this approach because it will fetch all operations at once, which could be a large number of network calls and a lot of memory to hold the results. diff --git a/pinecone/db_data/index_asyncio.py b/pinecone/db_data/index_asyncio.py index dee9d4e7..b185682d 100644 --- a/pinecone/db_data/index_asyncio.py +++ b/pinecone/db_data/index_asyncio.py @@ -61,7 +61,7 @@ logger = logging.getLogger(__name__) """ @private """ -__all__ = ["_IndexAsyncio"] +__all__ = ["_IndexAsyncio", "IndexAsyncio"] _OPENAPI_ENDPOINT_PARAMS = ( "_return_http_data_only", @@ -649,3 +649,6 @@ async def cancel_import(self, id: str): id (str): The id of the import operation to cancel. """ return await self.bulk_import.cancel(id=id) + + +IndexAsyncio = _IndexAsyncio diff --git a/pinecone/db_data/index_asyncio_interface.py b/pinecone/db_data/index_asyncio_interface.py index a2647d0f..0e145bff 100644 --- a/pinecone/db_data/index_asyncio_interface.py +++ b/pinecone/db_data/index_asyncio_interface.py @@ -53,112 +53,117 @@ async def upsert( The upsert operation writes vectors into a namespace. If a new value is upserted for an existing vector id, it will overwrite the previous value. - To upsert in parallel follow: https://docs.pinecone.io/docs/insert-data#sending-upserts-in-parallel + To upsert in parallel follow `this link `_. - ## Upserting dense vectors + **Upserting dense vectors** - **Note:** the dimension of each dense vector must match the dimension of the index. + .. admonition:: Note - A vector can be represented in a variety of ways. - - ```python - import asyncio - from pinecone import Pinecone, Vector - - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: - # A Vector object - await idx.upsert( - namespace = 'my-namespace', - vectors = [ - Vector(id='id1', values=[0.1, 0.2, 0.3, 0.4], metadata={'metadata_key': 'metadata_value'}), - ] - ) - - # A vector tuple - await idx.upsert( - namespace = 'my-namespace', - vectors = [ - ('id1', [0.1, 0.2, 0.3, 0.4]), - ] - ) + The dimension of each dense vector must match the dimension of the index. - # A vector tuple with metadata - await idx.upsert( - namespace = 'my-namespace', - vectors = [ - ('id1', [0.1, 0.2, 0.3, 0.4], {'metadata_key': 'metadata_value'}), - ] - ) + A vector can be represented in a variety of ways. - # A vector dictionary - await idx.upsert( - namespace = 'my-namespace', - vectors = [ - {"id": 1, "values": [0.1, 0.2, 0.3, 0.4], "metadata": {"metadata_key": "metadata_value"}}, - ] + .. code-block:: python + + import asyncio + from pinecone import Pinecone, Vector + + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: + # A Vector object + await idx.upsert( + namespace = 'my-namespace', + vectors = [ + Vector(id='id1', values=[0.1, 0.2, 0.3, 0.4], metadata={'metadata_key': 'metadata_value'}), + ] + ) + + # A vector tuple + await idx.upsert( + namespace = 'my-namespace', + vectors = [ + ('id1', [0.1, 0.2, 0.3, 0.4]), + ] + ) + + # A vector tuple with metadata + await idx.upsert( + namespace = 'my-namespace', + vectors = [ + ('id1', [0.1, 0.2, 0.3, 0.4], {'metadata_key': 'metadata_value'}), + ] + ) + + # A vector dictionary + await idx.upsert( + namespace = 'my-namespace', + vectors = [ + {"id": 1, "values": [0.1, 0.2, 0.3, 0.4], "metadata": {"metadata_key": "metadata_value"}}, + ] + + asyncio.run(main()) + + + **Upserting sparse vectors** + + .. code-block:: python + + import asyncio + from pinecone import Pinecone, Vector, SparseValues + + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: + # A Vector object + await idx.upsert( + namespace = 'my-namespace', + vectors = [ + Vector(id='id1', sparse_values=SparseValues(indices=[1, 2], values=[0.2, 0.4])), + ] + ) + + # A dictionary + await idx.upsert( + namespace = 'my-namespace', + vectors = [ + {"id": 1, "sparse_values": {"indices": [1, 2], "values": [0.2, 0.4]}}, + ] + ) + + asyncio.run(main()) + + + **Batch upsert** - asyncio.run(main()) - ``` + If you have a large number of vectors, you can upsert them in batches. - ## Upserting sparse vectors + .. code-block:: python - ```python - import asyncio - from pinecone import Pinecone, Vector, SparseValues + import asyncio + from pinecone import Pinecone, Vector, SparseValues - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: - # A Vector object - await idx.upsert( - namespace = 'my-namespace', - vectors = [ - Vector(id='id1', sparse_values=SparseValues(indices=[1, 2], values=[0.2, 0.4])), - ] - ) + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: - # A dictionary await idx.upsert( namespace = 'my-namespace', vectors = [ - {"id": 1, "sparse_values": {"indices": [1, 2], "values": [0.2, 0.4]}}, - ] + {'id': 'id1', 'values': [0.1, 0.2, 0.3, 0.4]}, + {'id': 'id2', 'values': [0.2, 0.3, 0.4, 0.5]}, + {'id': 'id3', 'values': [0.3, 0.4, 0.5, 0.6]}, + {'id': 'id4', 'values': [0.4, 0.5, 0.6, 0.7]}, + {'id': 'id5', 'values': [0.5, 0.6, 0.7, 0.8]}, + # More vectors here + ], + batch_size = 50 ) - asyncio.run(main()) - ``` - - ## Batch upsert + asyncio.run(main()) - If you have a large number of vectors, you can upsert them in batches. - - ```python - import asyncio - from pinecone import Pinecone, Vector, SparseValues - - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: - - await idx.upsert( - namespace = 'my-namespace', - vectors = [ - {'id': 'id1', 'values': [0.1, 0.2, 0.3, 0.4]}, - {'id': 'id2', 'values': [0.2, 0.3, 0.4, 0.5]}, - {'id': 'id3', 'values': [0.3, 0.4, 0.5, 0.6]}, - {'id': 'id4', 'values': [0.4, 0.5, 0.6, 0.7]}, - {'id': 'id5', 'values': [0.5, 0.6, 0.7, 0.8]}, - # More vectors here - ], - batch_size = 50 - ) - asyncio.run(main()) - ``` - - ## Visual progress bar with tqdm + **Visual progress bar with tqdm** To see a progress bar when upserting in batches, you will need to separately install the `tqdm` package. If `tqdm` is present, the client will detect and use it to display progress when `show_progress=True`. @@ -203,40 +208,39 @@ async def delete( an error if you delete from the wrong namespace. Delete can occur in the following mutual exclusive ways: + 1. Delete by ids from a single namespace 2. Delete all vectors from a single namespace by setting delete_all to True 3. Delete all vectors from a single namespace by specifying a metadata filter (note that for this option delete all must be set to False) - API reference: https://docs.pinecone.io/reference/delete_post + .. code-block:: python - ```python - import asyncio - from pinecone import Pinecone, Vector, SparseValues + import asyncio + from pinecone import Pinecone, Vector, SparseValues - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: - # Delete specific ids - await idx.delete( - ids=['id1', 'id2'], - namespace='my_namespace' - ) + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: + # Delete specific ids + await idx.delete( + ids=['id1', 'id2'], + namespace='my_namespace' + ) - # Delete everything in a namespace - await idx.delete( - delete_all=True, - namespace='my_namespace' - ) + # Delete everything in a namespace + await idx.delete( + delete_all=True, + namespace='my_namespace' + ) - # Delete by metadata filter - await idx.delete( - filter={'key': 'value'}, - namespace='my_namespace' - ) + # Delete by metadata filter + await idx.delete( + filter={'key': 'value'}, + namespace='my_namespace' + ) - asyncio.run(main()) - ``` + asyncio.run(main()) Returns: An empty dictionary if the delete operation was successful. """ @@ -250,28 +254,26 @@ async def fetch( The fetch operation looks up and returns vectors, by ID, from a single namespace. The returned vectors include the vector data and/or metadata. - API reference: https://docs.pinecone.io/reference/fetch + .. code-block:: python - ``` - import asyncio - from pinecone import Pinecone, Vector, SparseValues + import asyncio + from pinecone import Pinecone, Vector, SparseValues - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: - # Fetch specific ids in namespace - fetched = await idx.fetch( - ids=['id1', 'id2'], - namespace='my_namespace' - ) - for vec_id in fetched.vectors: - vector = fetched.vectors[vec_id] - print(vector.id) - print(vector.metadata) - print(vector.values) + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: + # Fetch specific ids in namespace + fetched = await idx.fetch( + ids=['id1', 'id2'], + namespace='my_namespace' + ) + for vec_id in fetched.vectors: + vector = fetched.vectors[vec_id] + print(vector.id) + print(vector.metadata) + print(vector.values) - asyncio.run(main()) - ``` + asyncio.run(main()) Args: ids (List[str]): The vector IDs to fetch. @@ -300,73 +302,75 @@ async def query( The Query operation searches a namespace, using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. - API reference: https://docs.pinecone.io/reference/query + **Querying with dense vectors** - ## Querying with dense vectors + .. code-block:: python - ```python - import asyncio - from pinecone import Pinecone, Vector, SparseValues + import asyncio + from pinecone import Pinecone, Vector, SparseValues - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: - query_embedding = [0.1, 0.2, 0.3, ...] # An embedding that matches the index dimension + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: + query_embedding = [0.1, 0.2, 0.3, ...] # An embedding that matches the index dimension - # Query by vector values - results = await idx.query( - vector=query_embedding, - top_k=10, - filter={'genre': {"$eq": "drama"}}, # Optionally filter by metadata - namespace='my_namespace', - include_values=False, - include_metadata=True - ) + # Query by vector values + results = await idx.query( + vector=query_embedding, + top_k=10, + filter={'genre': {"$eq": "drama"}}, # Optionally filter by metadata + namespace='my_namespace', + include_values=False, + include_metadata=True + ) - # Query using vector id (the values from this stored vector will be used to query) - results = await idx.query( - id='1', - top_k=10, - filter={"year": {"$gt": 2000}}, - namespace='my_namespace', - ) + # Query using vector id (the values from this stored vector will be used to query) + results = await idx.query( + id='1', + top_k=10, + filter={"year": {"$gt": 2000}}, + namespace='my_namespace', + ) - asyncio.run(main()) - ``` + asyncio.run(main()) - ## Query with sparse vectors - ```python - import asyncio - from pinecone import Pinecone, Vector, SparseValues + **Query with sparse vectors** - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: - query_embedding = [0.1, 0.2, 0.3, ...] # An embedding that matches the index dimension + .. code-block:: python - # Query by vector values - results = await idx.query( - vector=query_embedding, - top_k=10, - filter={'genre': {"$eq": "drama"}}, # Optionally filter by metadata - namespace='my_namespace', - include_values=False, - include_metadata=True - ) + import asyncio + from pinecone import Pinecone, Vector, SparseValues - # Query using vector id (the values from this stored vector will be used to query) - results = await idx.query( - id='1', - top_k=10, - filter={"year": {"$gt": 2000}}, - namespace='my_namespace', - ) + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: + query_embedding = [0.1, 0.2, 0.3, ...] # An embedding that matches the index dimension + + # Query by vector values + results = await idx.query( + vector=query_embedding, + top_k=10, + filter={'genre': {"$eq": "drama"}}, # Optionally filter by metadata + namespace='my_namespace', + include_values=False, + include_metadata=True + ) - asyncio.run(main()) - ``` + # Query using vector id (the values from this stored vector will be used to query) + results = await idx.query( + id='1', + top_k=10, + filter={"year": {"$gt": 2000}}, + namespace='my_namespace', + ) + + asyncio.run(main()) Examples: + + .. code-block:: python + >>> index.query(vector=[1, 2, 3], top_k=10, namespace='my_namespace') >>> index.query(id='id1', top_k=10, namespace='my_namespace') >>> index.query(vector=[1, 2, 3], top_k=10, namespace='my_namespace', filter={'key': 'value'}) @@ -430,33 +434,34 @@ async def query_namespaces( Examples: - ```python - import asyncio - from pinecone import Pinecone + .. code-block:: python - async def main(): - pc = Pinecone(api_key="your-api-key") - idx = pc.IndexAsyncio( - host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io", - ) + import asyncio + from pinecone import Pinecone - query_vec = [0.1, 0.2, 0.3] # An embedding that matches the index dimension - combined_results = await idx.query_namespaces( - vector=query_vec, - namespaces=['ns1', 'ns2', 'ns3', 'ns4'], - top_k=10, - filter={'genre': {"$eq": "drama"}}, - include_values=True, - include_metadata=True - ) - for vec in combined_results.matches: - print(vec.id, vec.score) - print(combined_results.usage) + async def main(): + pc = Pinecone(api_key="your-api-key") + idx = pc.IndexAsyncio( + host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io", + ) + + query_vec = [0.1, 0.2, 0.3] # An embedding that matches the index dimension + combined_results = await idx.query_namespaces( + vector=query_vec, + namespaces=['ns1', 'ns2', 'ns3', 'ns4'], + top_k=10, + filter={'genre': {"$eq": "drama"}}, + include_values=True, + include_metadata=True + ) + for vec in combined_results.matches: + print(vec.id, vec.score) + print(combined_results.usage) + + await idx.close() - await idx.close() + asyncio.run(main()) - asyncio.run(main()) - ``` """ pass @@ -487,46 +492,46 @@ async def update( If a set_metadata is included, the values of the fields specified in it will be added or overwrite the previous value. - API reference: https://docs.pinecone.io/reference/update Examples: - ```python - import asyncio - from pinecone import Pinecone, Vector, SparseValues - - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: - # Update vector values - await idx.update( - id='id1', - values=[0.1, 0.2, 0.3, ...], - namespace='my_namespace' - ) - - # Update metadata - await idx.update( - id='id1', - set_metadata={'key': 'value'}, - namespace='my_namespace' - ) - - # Update sparse values - await idx.update( - id='id1', - sparse_values={'indices': [1, 2], 'values': [0.2, 0.4]}, - namespace='my_namespace' - ) - # Update sparse values with SparseValues object - await idx.update( - id='id1', - sparse_values=SparseValues(indices=[234781, 5432], values=[0.2, 0.4]), - namespace='my_namespace' - ) - - asyncio.run(main()) - ``` + .. code-block:: python + + import asyncio + from pinecone import Pinecone, Vector, SparseValues + + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: + # Update vector values + await idx.update( + id='id1', + values=[0.1, 0.2, 0.3, ...], + namespace='my_namespace' + ) + + # Update metadata + await idx.update( + id='id1', + set_metadata={'key': 'value'}, + namespace='my_namespace' + ) + + # Update sparse values + await idx.update( + id='id1', + sparse_values={'indices': [1, 2], 'values': [0.2, 0.4]}, + namespace='my_namespace' + ) + + # Update sparse values with SparseValues object + await idx.update( + id='id1', + sparse_values=SparseValues(indices=[234781, 5432], values=[0.2, 0.4]), + namespace='my_namespace' + ) + + asyncio.run(main()) """ pass @@ -539,8 +544,6 @@ async def describe_index_stats( The DescribeIndexStats operation returns statistics about the index's contents. For example: The vector count per namespace and the number of dimensions. - API reference: https://docs.pinecone.io/reference/describe_index_stats_post - Args: filter (Dict[str, Union[str, float, int, bool, List, dict]]): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. @@ -548,17 +551,18 @@ async def describe_index_stats( Returns: DescribeIndexStatsResponse object which contains stats about the index. - ```python - import asyncio - from pinecone import Pinecone, Vector, SparseValues + .. code-block:: python + + import asyncio + from pinecone import Pinecone, Vector, SparseValues + + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: + print(await idx.describe_index_stats()) - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: - print(await idx.describe_index_stats()) + asyncio.run(main()) - asyncio.run(main()) - ``` """ pass @@ -638,70 +642,71 @@ async def upsert_records(self, namespace: str, records: List[Dict]): When records are upserted, Pinecone converts mapped fields into embeddings and upserts them into the specified namespacce of the index. - ```python - import asyncio - from pinecone import ( - Pinecone, - CloudProvider, - AwsRegion, - EmbedModel - IndexEmbed - ) - - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: - # upsert records - await idx.upsert_records( - namespace="my-namespace", - records=[ - { - "_id": "test1", - "my_text_field": "Apple is a popular fruit known for its sweetness and crisp texture.", - }, - { - "_id": "test2", - "my_text_field": "The tech company Apple is known for its innovative products like the iPhone.", - }, - { - "_id": "test3", - "my_text_field": "Many people enjoy eating apples as a healthy snack.", - }, - { - "_id": "test4", - "my_text_field": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", - }, - { - "_id": "test5", - "my_text_field": "An apple a day keeps the doctor away, as the saying goes.", - }, - { - "_id": "test6", - "my_text_field": "Apple Computer Company was founded on April 1, 1976, by Steve Jobs, Steve Wozniak, and Ronald Wayne as a partnership.", - }, - ], - ) + .. code-block:: python - from pinecone import SearchQuery, SearchRerank, RerankModel - - # search for similar records - response = await idx.search_records( - namespace="my-namespace", - query=SearchQuery( - inputs={ - "text": "Apple corporation", - }, - top_k=3, - ), - rerank=SearchRerank( - model=RerankModel.Bge_Reranker_V2_M3, - rank_fields=["my_text_field"], - top_n=3, - ), - ) + import asyncio + from pinecone import ( + Pinecone, + CloudProvider, + AwsRegion, + EmbedModel + IndexEmbed + ) + + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: + # upsert records + await idx.upsert_records( + namespace="my-namespace", + records=[ + { + "_id": "test1", + "my_text_field": "Apple is a popular fruit known for its sweetness and crisp texture.", + }, + { + "_id": "test2", + "my_text_field": "The tech company Apple is known for its innovative products like the iPhone.", + }, + { + "_id": "test3", + "my_text_field": "Many people enjoy eating apples as a healthy snack.", + }, + { + "_id": "test4", + "my_text_field": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", + }, + { + "_id": "test5", + "my_text_field": "An apple a day keeps the doctor away, as the saying goes.", + }, + { + "_id": "test6", + "my_text_field": "Apple Computer Company was founded on April 1, 1976, by Steve Jobs, Steve Wozniak, and Ronald Wayne as a partnership.", + }, + ], + ) + + from pinecone import SearchQuery, SearchRerank, RerankModel + + # search for similar records + response = await idx.search_records( + namespace="my-namespace", + query=SearchQuery( + inputs={ + "text": "Apple corporation", + }, + top_k=3, + ), + rerank=SearchRerank( + model=RerankModel.Bge_Reranker_V2_M3, + rank_fields=["my_text_field"], + top_n=3, + ), + ) + + asyncio.run(main()) - asyncio.run(main()) - ``` """ pass @@ -727,70 +732,70 @@ async def search( This operation converts a query to a vector embedding and then searches a namespace. You can optionally provide a reranking operation as part of the search. - ```python - import asyncio - from pinecone import ( - Pinecone, - CloudProvider, - AwsRegion, - EmbedModel - IndexEmbed - ) - - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: - # upsert records - await idx.upsert_records( - namespace="my-namespace", - records=[ - { - "_id": "test1", - "my_text_field": "Apple is a popular fruit known for its sweetness and crisp texture.", - }, - { - "_id": "test2", - "my_text_field": "The tech company Apple is known for its innovative products like the iPhone.", - }, - { - "_id": "test3", - "my_text_field": "Many people enjoy eating apples as a healthy snack.", - }, - { - "_id": "test4", - "my_text_field": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", - }, - { - "_id": "test5", - "my_text_field": "An apple a day keeps the doctor away, as the saying goes.", - }, - { - "_id": "test6", - "my_text_field": "Apple Computer Company was founded on April 1, 1976, by Steve Jobs, Steve Wozniak, and Ronald Wayne as a partnership.", - }, - ], - ) + .. code-block:: python - from pinecone import SearchQuery, SearchRerank, RerankModel - - # search for similar records - response = await idx.search_records( - namespace="my-namespace", - query=SearchQuery( - inputs={ - "text": "Apple corporation", - }, - top_k=3, - ), - rerank=SearchRerank( - model=RerankModel.Bge_Reranker_V2_M3, - rank_fields=["my_text_field"], - top_n=3, - ), - ) + import asyncio + from pinecone import ( + Pinecone, + CloudProvider, + AwsRegion, + EmbedModel + IndexEmbed + ) - asyncio.run(main()) - ``` + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="example-dojoi3u.svc.aped-4627-b74a.pinecone.io") as idx: + # upsert records + await idx.upsert_records( + namespace="my-namespace", + records=[ + { + "_id": "test1", + "my_text_field": "Apple is a popular fruit known for its sweetness and crisp texture.", + }, + { + "_id": "test2", + "my_text_field": "The tech company Apple is known for its innovative products like the iPhone.", + }, + { + "_id": "test3", + "my_text_field": "Many people enjoy eating apples as a healthy snack.", + }, + { + "_id": "test4", + "my_text_field": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", + }, + { + "_id": "test5", + "my_text_field": "An apple a day keeps the doctor away, as the saying goes.", + }, + { + "_id": "test6", + "my_text_field": "Apple Computer Company was founded on April 1, 1976, by Steve Jobs, Steve Wozniak, and Ronald Wayne as a partnership.", + }, + ], + ) + + from pinecone import SearchQuery, SearchRerank, RerankModel + + # search for similar records + response = await idx.search_records( + namespace="my-namespace", + query=SearchQuery( + inputs={ + "text": "Apple corporation", + }, + top_k=3, + ), + rerank=SearchRerank( + model=RerankModel.Bge_Reranker_V2_M3, + rank_fields=["my_text_field"], + top_n=3, + ), + ) + + asyncio.run(main()) """ pass diff --git a/pinecone/db_data/interfaces.py b/pinecone/db_data/interfaces.py index cbcc84b5..be2b4e3e 100644 --- a/pinecone/db_data/interfaces.py +++ b/pinecone/db_data/interfaces.py @@ -56,100 +56,105 @@ def upsert( To upsert in parallel follow: https://docs.pinecone.io/docs/insert-data#sending-upserts-in-parallel - ## Upserting dense vectors + **Upserting dense vectors** - **Note:** the dimension of each dense vector must match the dimension of the index. + .. admonition:: Note + + The dimension of each dense vector must match the dimension of the index. A vector can be represented in a variety of ways. - ```python - from pinecone import Pinecone, Vector - - pc = Pinecone() - idx = pc.Index("index-name") - - # A Vector object - idx.upsert( - namespace = 'my-namespace', - vectors = [ - Vector(id='id1', values=[0.1, 0.2, 0.3, 0.4], metadata={'metadata_key': 'metadata_value'}), - ] - ) - - # A vector tuple - idx.upsert( - namespace = 'my-namespace', - vectors = [ - ('id1', [0.1, 0.2, 0.3, 0.4]), - ] - ) - - # A vector tuple with metadata - idx.upsert( - namespace = 'my-namespace', - vectors = [ - ('id1', [0.1, 0.2, 0.3, 0.4], {'metadata_key': 'metadata_value'}), - ] - ) - - # A vector dictionary - idx.upsert( - namespace = 'my-namespace', - vectors = [ - {"id": 1, "values": [0.1, 0.2, 0.3, 0.4], "metadata": {"metadata_key": "metadata_value"}}, - ] - ``` - - ## Upserting sparse vectors - - ```python - from pinecone import Pinecone, Vector, SparseValues - - pc = Pinecone() - idx = pc.Index("index-name") - - # A Vector object - idx.upsert( - namespace = 'my-namespace', - vectors = [ - Vector(id='id1', sparse_values=SparseValues(indices=[1, 2], values=[0.2, 0.4])), - ] - ) - - # A dictionary - idx.upsert( - namespace = 'my-namespace', - vectors = [ - {"id": 1, "sparse_values": {"indices": [1, 2], "values": [0.2, 0.4]}}, - ] - ) - ``` - - ## Batch upsert + .. code-block:: python + + from pinecone import Pinecone, Vector + + pc = Pinecone() + idx = pc.Index("index-name") + + # A Vector object + idx.upsert( + namespace = 'my-namespace', + vectors = [ + Vector(id='id1', values=[0.1, 0.2, 0.3, 0.4], metadata={'metadata_key': 'metadata_value'}), + ] + ) + + # A vector tuple + idx.upsert( + namespace = 'my-namespace', + vectors = [ + ('id1', [0.1, 0.2, 0.3, 0.4]), + ] + ) + + # A vector tuple with metadata + idx.upsert( + namespace = 'my-namespace', + vectors = [ + ('id1', [0.1, 0.2, 0.3, 0.4], {'metadata_key': 'metadata_value'}), + ] + ) + + # A vector dictionary + idx.upsert( + namespace = 'my-namespace', + vectors = [ + {"id": 1, "values": [0.1, 0.2, 0.3, 0.4], "metadata": {"metadata_key": "metadata_value"}}, + ] + + + **Upserting sparse vectors** + + .. code-block:: python + + from pinecone import Pinecone, Vector, SparseValues + + pc = Pinecone() + idx = pc.Index("index-name") + + # A Vector object + idx.upsert( + namespace = 'my-namespace', + vectors = [ + Vector(id='id1', sparse_values=SparseValues(indices=[1, 2], values=[0.2, 0.4])), + ] + ) + + # A dictionary + idx.upsert( + namespace = 'my-namespace', + vectors = [ + {"id": 1, "sparse_values": {"indices": [1, 2], "values": [0.2, 0.4]}}, + ] + ) + + + **Batch upsert** If you have a large number of vectors, you can upsert them in batches. - ```python - from pinecone import Pinecone, Vector + .. code-block:: python - pc = Pinecone() - idx = pc.Index("index-name") + from pinecone import Pinecone, Vector - idx.upsert( - namespace = 'my-namespace', - vectors = [ - {'id': 'id1', 'values': [0.1, 0.2, 0.3, 0.4]}, - {'id': 'id2', 'values': [0.2, 0.3, 0.4, 0.5]}, - {'id': 'id3', 'values': [0.3, 0.4, 0.5, 0.6]}, - {'id': 'id4', 'values': [0.4, 0.5, 0.6, 0.7]}, - {'id': 'id5', 'values': [0.5, 0.6, 0.7, 0.8]}, - # More vectors here - ], - batch_size = 50 - ) - ``` + pc = Pinecone() + idx = pc.Index("index-name") + + idx.upsert( + namespace = 'my-namespace', + vectors = [ + {'id': 'id1', 'values': [0.1, 0.2, 0.3, 0.4]}, + {'id': 'id2', 'values': [0.2, 0.3, 0.4, 0.5]}, + {'id': 'id3', 'values': [0.3, 0.4, 0.5, 0.6]}, + {'id': 'id4', 'values': [0.4, 0.5, 0.6, 0.7]}, + {'id': 'id5', 'values': [0.5, 0.6, 0.7, 0.8]}, + # More vectors here + ], + batch_size = 50 + ) - ## Visual progress bar with tqdm + + **Visual progress bar with tqdm** To see a progress bar when upserting in batches, you will need to separately install the `tqdm` package. If `tqdm` is present, the client will detect and use it to display progress when `show_progress=True`. @@ -186,80 +191,81 @@ def upsert_records(self, namespace: str, records: List[Dict]): When records are upserted, Pinecone converts mapped fields into embeddings and upserts them into the specified namespacce of the index. - ```python - from pinecone import ( - Pinecone, - CloudProvider, - AwsRegion, - EmbedModel - IndexEmbed - ) - - pc = Pinecone(api_key="<>") - - # Create an index for your embedding model - index_model = pc.create_index_for_model( - name="my-model-index", - cloud=CloudProvider.AWS, - region=AwsRegion.US_WEST_2, - embed=IndexEmbed( - model=EmbedModel.Multilingual_E5_Large, - field_map={"text": "my_text_field"} + .. code-block:: python + + from pinecone import ( + Pinecone, + CloudProvider, + AwsRegion, + EmbedModel + IndexEmbed + ) + + pc = Pinecone(api_key="<>") + + # Create an index for your embedding model + index_model = pc.create_index_for_model( + name="my-model-index", + cloud=CloudProvider.AWS, + region=AwsRegion.US_WEST_2, + embed=IndexEmbed( + model=EmbedModel.Multilingual_E5_Large, + field_map={"text": "my_text_field"} + ) ) - ) - - # Instantiate the index client - idx = pc.Index(host=index_model.host) - - # upsert records - idx.upsert_records( - namespace="my-namespace", - records=[ - { - "_id": "test1", - "my_text_field": "Apple is a popular fruit known for its sweetness and crisp texture.", - }, - { - "_id": "test2", - "my_text_field": "The tech company Apple is known for its innovative products like the iPhone.", - }, - { - "_id": "test3", - "my_text_field": "Many people enjoy eating apples as a healthy snack.", - }, - { - "_id": "test4", - "my_text_field": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", - }, - { - "_id": "test5", - "my_text_field": "An apple a day keeps the doctor away, as the saying goes.", - }, - { - "_id": "test6", - "my_text_field": "Apple Computer Company was founded on April 1, 1976, by Steve Jobs, Steve Wozniak, and Ronald Wayne as a partnership.", - }, - ], - ) - - from pinecone import SearchQuery, SearchRerank, RerankModel - - # search for similar records - response = idx.search_records( - namespace="my-namespace", - query=SearchQuery( - inputs={ - "text": "Apple corporation", - }, - top_k=3, - ), - rerank=SearchRerank( - model=RerankModel.Bge_Reranker_V2_M3, - rank_fields=["my_text_field"], - top_n=3, - ), - ) - ``` + + # Instantiate the index client + idx = pc.Index(host=index_model.host) + + # upsert records + idx.upsert_records( + namespace="my-namespace", + records=[ + { + "_id": "test1", + "my_text_field": "Apple is a popular fruit known for its sweetness and crisp texture.", + }, + { + "_id": "test2", + "my_text_field": "The tech company Apple is known for its innovative products like the iPhone.", + }, + { + "_id": "test3", + "my_text_field": "Many people enjoy eating apples as a healthy snack.", + }, + { + "_id": "test4", + "my_text_field": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", + }, + { + "_id": "test5", + "my_text_field": "An apple a day keeps the doctor away, as the saying goes.", + }, + { + "_id": "test6", + "my_text_field": "Apple Computer Company was founded on April 1, 1976, by Steve Jobs, Steve Wozniak, and Ronald Wayne as a partnership.", + }, + ], + ) + + from pinecone import SearchQuery, SearchRerank, RerankModel + + # search for similar records + response = idx.search_records( + namespace="my-namespace", + query=SearchQuery( + inputs={ + "text": "Apple corporation", + }, + top_k=3, + ), + rerank=SearchRerank( + model=RerankModel.Bge_Reranker_V2_M3, + rank_fields=["my_text_field"], + top_n=3, + ), + ) + """ pass @@ -285,80 +291,81 @@ def search( This operation converts a query to a vector embedding and then searches a namespace. You can optionally provide a reranking operation as part of the search. - ```python - from pinecone import ( - Pinecone, - CloudProvider, - AwsRegion, - EmbedModel - IndexEmbed - ) - - pc = Pinecone(api_key="<>") - - # Create an index for your embedding model - index_model = pc.create_index_for_model( - name="my-model-index", - cloud=CloudProvider.AWS, - region=AwsRegion.US_WEST_2, - embed=IndexEmbed( - model=EmbedModel.Multilingual_E5_Large, - field_map={"text": "my_text_field"} + .. code-block:: python + + from pinecone import ( + Pinecone, + CloudProvider, + AwsRegion, + EmbedModel + IndexEmbed + ) + + pc = Pinecone(api_key="<>") + + # Create an index for your embedding model + index_model = pc.create_index_for_model( + name="my-model-index", + cloud=CloudProvider.AWS, + region=AwsRegion.US_WEST_2, + embed=IndexEmbed( + model=EmbedModel.Multilingual_E5_Large, + field_map={"text": "my_text_field"} + ) ) - ) - - # Instantiate the index client - idx = pc.Index(host=index_model.host) - - # upsert records - idx.upsert_records( - namespace="my-namespace", - records=[ - { - "_id": "test1", - "my_text_field": "Apple is a popular fruit known for its sweetness and crisp texture.", - }, - { - "_id": "test2", - "my_text_field": "The tech company Apple is known for its innovative products like the iPhone.", - }, - { - "_id": "test3", - "my_text_field": "Many people enjoy eating apples as a healthy snack.", - }, - { - "_id": "test4", - "my_text_field": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", - }, - { - "_id": "test5", - "my_text_field": "An apple a day keeps the doctor away, as the saying goes.", - }, - { - "_id": "test6", - "my_text_field": "Apple Computer Company was founded on April 1, 1976, by Steve Jobs, Steve Wozniak, and Ronald Wayne as a partnership.", - }, - ], - ) - - from pinecone import SearchQuery, SearchRerank, RerankModel - - # search for similar records - response = idx.search_records( - namespace="my-namespace", - query=SearchQuery( - inputs={ - "text": "Apple corporation", - }, - top_k=3, - ), - rerank=SearchRerank( - model=RerankModel.Bge_Reranker_V2_M3, - rank_fields=["my_text_field"], - top_n=3, - ), - ) - ``` + + # Instantiate the index client + idx = pc.Index(host=index_model.host) + + # upsert records + idx.upsert_records( + namespace="my-namespace", + records=[ + { + "_id": "test1", + "my_text_field": "Apple is a popular fruit known for its sweetness and crisp texture.", + }, + { + "_id": "test2", + "my_text_field": "The tech company Apple is known for its innovative products like the iPhone.", + }, + { + "_id": "test3", + "my_text_field": "Many people enjoy eating apples as a healthy snack.", + }, + { + "_id": "test4", + "my_text_field": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", + }, + { + "_id": "test5", + "my_text_field": "An apple a day keeps the doctor away, as the saying goes.", + }, + { + "_id": "test6", + "my_text_field": "Apple Computer Company was founded on April 1, 1976, by Steve Jobs, Steve Wozniak, and Ronald Wayne as a partnership.", + }, + ], + ) + + from pinecone import SearchQuery, SearchRerank, RerankModel + + # search for similar records + response = idx.search_records( + namespace="my-namespace", + query=SearchQuery( + inputs={ + "text": "Apple corporation", + }, + top_k=3, + ), + rerank=SearchRerank( + model=RerankModel.Bge_Reranker_V2_M3, + rank_fields=["my_text_field"], + top_n=3, + ), + ) + """ pass @@ -404,14 +411,16 @@ def delete( an error if you delete from the wrong namespace. Delete can occur in the following mutual exclusive ways: + 1. Delete by ids from a single namespace 2. Delete all vectors from a single namespace by setting delete_all to True 3. Delete all vectors from a single namespace by specifying a metadata filter (note that for this option delete all must be set to False) - API reference: https://docs.pinecone.io/reference/delete_post - Examples: + + .. code-block:: python + >>> index.delete(ids=['id1', 'id2'], namespace='my_namespace') >>> index.delete(delete_all=True, namespace='my_namespace') >>> index.delete(filter={'key': 'value'}, namespace='my_namespace') @@ -427,9 +436,10 @@ def fetch(self, ids: List[str], namespace: Optional[str] = None, **kwargs) -> Fe The fetch operation looks up and returns vectors, by ID, from a single namespace. The returned vectors include the vector data and/or metadata. - API reference: https://docs.pinecone.io/reference/fetch - Examples: + + .. code-block:: python + >>> index.fetch(ids=['id1', 'id2'], namespace='my_namespace') >>> index.fetch(ids=['id1', 'id2']) @@ -460,9 +470,10 @@ def query( The Query operation searches a namespace, using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. - API reference: https://docs.pinecone.io/reference/query - Examples: + + .. code-block:: python + >>> index.query(vector=[1, 2, 3], top_k=10, namespace='my_namespace') >>> index.query(id='id1', top_k=10, namespace='my_namespace') >>> index.query(vector=[1, 2, 3], top_k=10, namespace='my_namespace', filter={'key': 'value'}) @@ -516,30 +527,31 @@ def query_namespaces( Examples: - ```python - from pinecone import Pinecone - - pc = Pinecone(api_key="your-api-key") - index = pc.Index( - host="index-name", - pool_threads=32, - connection_pool_maxsize=32 - ) - - query_vec = [0.1, 0.2, 0.3] # An embedding that matches the index dimension - combined_results = index.query_namespaces( - vector=query_vec, - namespaces=['ns1', 'ns2', 'ns3', 'ns4'], - metric="cosine", - top_k=10, - filter={'genre': {"$eq": "drama"}}, - include_values=True, - include_metadata=True - ) - for vec in combined_results.matches: - print(vec.id, vec.score) - print(combined_results.usage) - ``` + .. code-block:: python + + from pinecone import Pinecone + + pc = Pinecone(api_key="your-api-key") + index = pc.Index( + host="index-name", + pool_threads=32, + connection_pool_maxsize=32 + ) + + query_vec = [0.1, 0.2, 0.3] # An embedding that matches the index dimension + combined_results = index.query_namespaces( + vector=query_vec, + namespaces=['ns1', 'ns2', 'ns3', 'ns4'], + metric="cosine", + top_k=10, + filter={'genre': {"$eq": "drama"}}, + include_values=True, + include_metadata=True + ) + for vec in combined_results.matches: + print(vec.id, vec.score) + print(combined_results.usage) + Args: vector (List[float]): The query vector, must be the same length as the dimension of the index being queried. @@ -572,9 +584,10 @@ def update( If a set_metadata is included, the values of the fields specified in it will be added or overwrite the previous value. - API reference: https://docs.pinecone.io/reference/update - Examples: + + .. code-block:: python + >>> index.update(id='id1', values=[1, 2, 3], namespace='my_namespace') >>> index.update(id='id1', set_metadata={'key': 'value'}, namespace='my_namespace') >>> index.update(id='id1', values=[1, 2, 3], sparse_values={'indices': [1, 2], 'values': [0.2, 0.4]}, @@ -605,18 +618,20 @@ def describe_index_stats( The DescribeIndexStats operation returns statistics about the index's contents. For example: The vector count per namespace and the number of dimensions. - API reference: https://docs.pinecone.io/reference/describe_index_stats_post - - Examples: - >>> index.describe_index_stats() - >>> index.describe_index_stats(filter={'key': 'value'}) - Args: filter (Dict[str, Union[str, float, int, bool, List, dict]]): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See https://www.pinecone.io/docs/metadata-filtering/.. [optional] Returns: DescribeIndexStatsResponse object which contains stats about the index. + + .. code-block:: python + + >>> pc = Pinecone() + >>> index = pc.Index(index_name="my-index") + >>> index.describe_index_stats() + >>> index.describe_index_stats(filter={'key': 'value'}) + """ pass @@ -637,6 +652,9 @@ def list_paginated( Consider using the `list` method to avoid having to handle pagination tokens manually. Examples: + + .. code-block:: python + >>> results = index.list_paginated(prefix='99', limit=5, namespace='my_namespace') >>> [v.id for v in results.vectors] ['99', '990', '991', '992', '993'] diff --git a/pinecone/deprecation_warnings.py b/pinecone/deprecation_warnings.py index ec458e78..a7c15a91 100644 --- a/pinecone/deprecation_warnings.py +++ b/pinecone/deprecation_warnings.py @@ -9,6 +9,7 @@ def _build_class_migration_message(method_name: str, example: str): def init(*args, **kwargs): + """@private""" example = """ import os from pinecone import Pinecone, ServerlessSpec @@ -20,8 +21,8 @@ def init(*args, **kwargs): # Now do stuff if 'my_index' not in pc.list_indexes().names(): pc.create_index( - name='my_index', - dimension=1536, + name='my_index', + dimension=1536, metric='euclidean', spec=ServerlessSpec( cloud='aws', @@ -40,9 +41,10 @@ def init(*args, **kwargs): def list_indexes(*args, **kwargs): + """@private""" example = """ from pinecone import Pinecone - + pc = Pinecone(api_key='YOUR_API_KEY') index_name = "quickstart" # or your index name @@ -54,9 +56,10 @@ def list_indexes(*args, **kwargs): def describe_index(*args, **kwargs): + """@private""" example = """ from pinecone import Pinecone - + pc = Pinecone(api_key='YOUR_API_KEY') pc.describe_index('my_index') """ @@ -64,6 +67,7 @@ def describe_index(*args, **kwargs): def create_index(*args, **kwargs): + """@private""" example = """ from pinecone import Pinecone, ServerlessSpec @@ -82,6 +86,7 @@ def create_index(*args, **kwargs): def delete_index(*args, **kwargs): + """@private""" example = """ from pinecone import Pinecone @@ -92,6 +97,7 @@ def delete_index(*args, **kwargs): def scale_index(*args, **kwargs): + """@private""" example = """ from pinecone import Pinecone @@ -110,9 +116,10 @@ def scale_index(*args, **kwargs): def create_collection(*args, **kwargs): + """@private""" example = """ from pinecone import Pinecone - + pc = Pinecone(api_key='YOUR_API_KEY') pc.create_collection(name='my_collection', source='my_index') """ @@ -120,6 +127,7 @@ def create_collection(*args, **kwargs): def list_collections(*args, **kwargs): + """@private""" example = """ from pinecone import Pinecone @@ -130,9 +138,10 @@ def list_collections(*args, **kwargs): def delete_collection(*args, **kwargs): + """@private""" example = """ from pinecone import Pinecone - + pc = Pinecone(api_key='YOUR_API_KEY') pc.delete_collection('my_collection') """ @@ -140,9 +149,10 @@ def delete_collection(*args, **kwargs): def describe_collection(*args, **kwargs): + """@private""" example = """ from pinecone import Pinecone - + pc = Pinecone(api_key='YOUR_API_KEY') pc.describe_collection('my_collection') """ @@ -150,9 +160,10 @@ def describe_collection(*args, **kwargs): def configure_index(*args, **kwargs): + """@private""" example = """ from pinecone import Pinecone - + pc = Pinecone(api_key='YOUR_API_KEY') pc.configure_index('my_index', replicas=2) """ diff --git a/pinecone/grpc/__init__.py b/pinecone/grpc/__init__.py index 66adb916..307b2a20 100644 --- a/pinecone/grpc/__init__.py +++ b/pinecone/grpc/__init__.py @@ -16,7 +16,7 @@ pip3 install pinecone[grpc] # Install a specific version -pip3 install "pinecone[grpc]"==3.0.0 +pip3 install "pinecone[grpc]"==7.0.2 ``` #### Installing with poetry @@ -26,7 +26,7 @@ poetry add pinecone --extras grpc # Install a specific version -poetry add pinecone==3.0.0 --extras grpc +poetry add pinecone==7.0.2 --extras grpc ``` ### Using the gRPC client diff --git a/pinecone/grpc/index_grpc.py b/pinecone/grpc/index_grpc.py index bfaf8fff..e9081e82 100644 --- a/pinecone/grpc/index_grpc.py +++ b/pinecone/grpc/index_grpc.py @@ -79,6 +79,9 @@ def upsert( If a new value is upserted for an existing vector id, it will overwrite the previous value. Examples: + + .. code-block:: python + >>> index.upsert([('id1', [1.0, 2.0, 3.0], {'key': 'value'}), ('id2', [1.0, 2.0, 3.0]) ], @@ -184,7 +187,7 @@ def upsert_from_dataframe( namespace: The namespace to upsert into. batch_size: The number of rows to upsert in a single batch. use_async_requests: Whether to upsert multiple requests at the same time using asynchronous request mechanism. - Set to `False` + Set to ``False`` show_progress: Whether to show a progress bar. """ try: @@ -240,18 +243,6 @@ def delete( """ The Delete operation deletes vectors from the index, from a single namespace. No error raised if the vector id does not exist. - Note: for any delete call, if namespace is not specified, the default namespace is used. - - Delete can occur in the following mutual exclusive ways: - 1. Delete by ids from a single namespace - 2. Delete all vectors from a single namespace by setting delete_all to True - 3. Delete all vectors from a single namespace by specifying a metadata filter - (note that for this option delete all must be set to False) - - Examples: - >>> index.delete(ids=['id1', 'id2'], namespace='my_namespace') - >>> index.delete(delete_all=True, namespace='my_namespace') - >>> index.delete(filter={'key': 'value'}, namespace='my_namespace', async_req=True) Args: ids (List[str]): Vector ids to delete [optional] @@ -267,6 +258,25 @@ def delete( Defaults to False. [optional] Returns: DeleteResponse (contains no data) or a PineconeGrpcFuture object if async_req is True. + + .. admonition:: Note + + For any delete call, if namespace is not specified, the default namespace is used. + + Delete can occur in the following mutual exclusive ways: + + 1. Delete by ids from a single namespace + 2. Delete all vectors from a single namespace by setting delete_all to True + 3. Delete all vectors from a single namespace by specifying a metadata filter + (note that for this option delete all must be set to False) + + Examples: + + .. code-block:: python + + >>> index.delete(ids=['id1', 'id2'], namespace='my_namespace') + >>> index.delete(delete_all=True, namespace='my_namespace') + >>> index.delete(filter={'key': 'value'}, namespace='my_namespace', async_req=True) """ if filter is not None: @@ -303,6 +313,9 @@ def fetch( The returned vectors include the vector data and/or metadata. Examples: + + .. code-block:: python + >>> index.fetch(ids=['id1', 'id2'], namespace='my_namespace') >>> index.fetch(ids=['id1', 'id2']) @@ -346,6 +359,9 @@ def query( It retrieves the ids of the most similar items in a namespace, along with their similarity scores. Examples: + + .. code-block:: python + >>> index.query(vector=[1, 2, 3], top_k=10, namespace='my_namespace') >>> index.query(id='id1', top_k=10, namespace='my_namespace') >>> index.query(vector=[1, 2, 3], top_k=10, namespace='my_namespace', filter={'key': 'value'}) @@ -357,11 +373,11 @@ def query( Args: vector (List[float]): The query vector. This should be the same length as the dimension of the index - being queried. Each `query()` request can contain only one of the parameters - `id` or `vector`.. [optional] + being queried. Each ``query()`` request can contain only one of the parameters + ``id`` or ``vector``.. [optional] id (str): The unique ID of the vector to be used as a query vector. - Each `query()` request can contain only one of the parameters - `vector` or `id`.. [optional] + Each ``query()`` request can contain only one of the parameters + ``vector`` or ``id``.. [optional] top_k (int): The number of results to return for each query. Must be an integer greater than 1. namespace (str): The namespace to fetch vectors from. If not specified, the default namespace is used. [optional] @@ -475,6 +491,9 @@ def update( the values of the fields specified in it will be added or overwrite the previous value. Examples: + + .. code-block:: python + >>> index.update(id='id1', values=[1, 2, 3], namespace='my_namespace') >>> index.update(id='id1', set_metadata={'key': 'value'}, namespace='my_namespace', async_req=True) >>> index.update(id='id1', values=[1, 2, 3], sparse_values={'indices': [1, 2], 'values': [0.2, 0.4]}, @@ -533,9 +552,12 @@ def list_paginated( It returns matching ids in a paginated form, with a pagination token to fetch the next page of results. This id list can then be passed to fetch or delete operations, depending on your use case. - Consider using the `list` method to avoid having to handle pagination tokens manually. + Consider using the ``list`` method to avoid having to handle pagination tokens manually. Examples: + + .. code-block:: python + >>> results = index.list_paginated(prefix='99', limit=5, namespace='my_namespace') >>> [v.id for v in results.vectors] ['99', '990', '991', '992', '993'] @@ -581,6 +603,9 @@ def list(self, **kwargs): behalf. Examples: + + .. code-block:: python + >>> for ids in index.list(prefix='99', limit=5, namespace='my_namespace'): >>> print(ids) ['99', '990', '991', '992', '993'] @@ -618,6 +643,9 @@ def describe_index_stats( For example: The vector count per namespace and the number of dimensions. Examples: + + .. code-block:: python + >>> index.describe_index_stats() >>> index.describe_index_stats(filter={'key': 'value'}) diff --git a/pinecone/grpc/pinecone.py b/pinecone/grpc/pinecone.py index 7c869e8f..3aeb7fe6 100644 --- a/pinecone/grpc/pinecone.py +++ b/pinecone/grpc/pinecone.py @@ -8,42 +8,44 @@ class PineconeGRPC(Pinecone): An alternative version of the Pinecone client that uses gRPC instead of HTTP for data operations. - ### Installing the gRPC client + **Installing the gRPC client** You must install extra dependencies in order to install the GRPC client. - #### Installing with pip + **Installing with pip** - ```bash - # Install the latest version - pip3 install pinecone[grpc] + .. code-block:: bash - # Install a specific version - pip3 install "pinecone[grpc]"==3.0.0 - ``` + # Install the latest version + pip3 install pinecone[grpc] - #### Installing with poetry + # Install a specific version + pip3 install "pinecone[grpc]" - ```bash - # Install the latest version - poetry add pinecone --extras grpc + **Installing with poetry** - # Install a specific version - poetry add pinecone==3.0.0 --extras grpc - ``` + .. code-block:: bash - ### Using the gRPC client + # Install the latest version + poetry add pinecone --extras grpc - ```python - import os - from pinecone.grpc import PineconeGRPC + # Install a specific version + poetry add pinecone --extras grpc - client = PineconeGRPC(api_key=os.environ.get("PINECONE_API_KEY")) - # From this point on, usage is identical to the HTTP client. - index = client.Index("my-index", host=os.environ("PINECONE_INDEX_HOST")) - index.query(...) - ``` + **Using the gRPC client** + + .. code-block:: python + + import os + from pinecone.grpc import PineconeGRPC + + pc = PineconeGRPC(api_key=os.environ.get("PINECONE_API_KEY")) + + # From this point on, usage is identical to the HTTP client. + index = pc.Index("my-index", host=os.environ("PINECONE_INDEX_HOST")) + index.query(...) + """ @@ -58,36 +60,34 @@ def Index(self, name: str = "", host: str = "", **kwargs): eliminate a round trip to the Pinecone control plane by specifying the host of the index. - ```python - import os - from pinecone.grpc import PineconeGRPC + .. code-block:: python + import os + from pinecone.grpc import PineconeGRPC - api_key = os.environ.get("PINECONE_API_KEY") - index_host = os.environ.get("PINECONE_INDEX_HOST") + api_key = os.environ.get("PINECONE_API_KEY") + index_host = os.environ.get("PINECONE_INDEX_HOST") - pc = PineconeGRPC(api_key=api_key) - index = pc.Index(host=index_host) + pc = PineconeGRPC(api_key=api_key) + index = pc.Index(host=index_host) - # Now you're ready to perform data operations - index.query(vector=[...], top_k=10) - ``` + # Now you're ready to perform data operations + index.query(vector=[...], top_k=10) To find your host url, you can use the Pinecone control plane to describe the index. The host url is returned in the response. Or, alternatively, the host is displayed in the Pinecone web console. - ```python - import os - from pinecone import Pinecone + .. code-block:: python + import os + from pinecone import Pinecone - pc = Pinecone( - api_key=os.environ.get("PINECONE_API_KEY") - ) + pc = Pinecone( + api_key=os.environ.get("PINECONE_API_KEY") + ) - host = pc.describe_index('index-name').host - ``` + host = pc.describe_index('index-name').host - ### Target an index by name (not recommended for production) + **Target an index by name (not recommended for production)** For more casual usage, such as when you are playing and exploring with Pinecone in a notebook setting, you can also target an index by name. If you use this @@ -98,25 +98,26 @@ def Index(self, name: str = "", host: str = "", **kwargs): will only incur the overhead of only one call. But this approach is not recommended for production usage. - ```python - import os - from pinecone import ServerlessSpec - from pinecone.grpc import PineconeGRPC + .. code-block:: python - api_key = os.environ.get("PINECONE_API_KEY") + import os + from pinecone import ServerlessSpec + from pinecone.grpc import PineconeGRPC - pc = PineconeGRPC(api_key=api_key) - pc.create_index( - name='my-index', - dimension=1536, - metric='cosine', - spec=ServerlessSpec(cloud='aws', region='us-west-2') - ) - index = pc.Index('my-index') + api_key = os.environ.get("PINECONE_API_KEY") + + pc = PineconeGRPC(api_key=api_key) + pc.create_index( + name='my-index', + dimension=1536, + metric='cosine', + spec=ServerlessSpec(cloud='aws', region='us-west-2') + ) + index = pc.Index('my-index') + + # Now you're ready to perform data operations + index.query(vector=[...], top_k=10) - # Now you're ready to perform data operations - index.query(vector=[...], top_k=10) - ``` """ if name == "" and host == "": raise ValueError("Either name or host must be specified") diff --git a/pinecone/inference/inference.py b/pinecone/inference/inference.py index 53a52aa3..48ed56d3 100644 --- a/pinecone/inference/inference.py +++ b/pinecone/inference/inference.py @@ -26,25 +26,26 @@ class Inference(PluginAware): """ - The `Inference` class configures and uses the Pinecone Inference API to generate embeddings and + The ``Inference`` class configures and uses the Pinecone Inference API to generate embeddings and rank documents. - It is generally not instantiated directly, but rather accessed through a parent `Pinecone` client + It is generally not instantiated directly, but rather accessed through a parent ``Pinecone`` client object that is responsible for managing shared configurations. - ```python - from pinecone import Pinecone + .. code-block:: python - pc = Pinecone() - embeddings = pc.inference.embed( - model="text-embedding-3-small", - inputs=["Hello, world!"], - parameters={"input_type": "passage", "truncate": "END"} - ) - ``` + from pinecone import Pinecone - :param config: A `pinecone.config.Config` object, configured and built in the Pinecone class. - :type config: `pinecone.config.Config`, required + pc = Pinecone() + embeddings = pc.inference.embed( + model="text-embedding-3-small", + inputs=["Hello, world!"], + parameters={"input_type": "passage", "truncate": "END"} + ) + + + :param config: A ``pinecone.config.Config`` object, configured and built in the ``Pinecone`` class. + :type config: ``pinecone.config.Config``, required """ EmbedModel = EmbedModelEnum @@ -114,26 +115,26 @@ def model(self) -> "ModelResource": Curently you can get or list models. - ```python - pc = Pinecone() + .. code-block:: python + pc = Pinecone() + + # List all models + models = pc.inference.model.list() - # List all models - models = pc.inference.model.list() + # List models, with model type filtering + models = pc.inference.model.list(type="embed") + models = pc.inference.model.list(type="rerank") - # List models, with model type filtering - models = pc.inference.model.list(type="embed") - models = pc.inference.model.list(type="rerank") + # List models, with vector type filtering + models = pc.inference.model.list(vector_type="dense") + models = pc.inference.model.list(vector_type="sparse") - # List models, with vector type filtering - models = pc.inference.model.list(vector_type="dense") - models = pc.inference.model.list(vector_type="sparse") + # List models, with both type and vector type filtering + models = pc.inference.model.list(type="rerank", vector_type="dense") - # List models, with both type and vector type filtering - models = pc.inference.model.list(type="rerank", vector_type="dense") + # Get details on a specific model + model = pc.inference.model.get("text-embedding-3-small") - # Get details on a specific model - model = pc.inference.model.get("text-embedding-3-small") - ``` """ if self._model is None: from .resources.sync.model import Model as ModelResource @@ -157,29 +158,32 @@ def embed( :param model: The model to use for generating embeddings. :type model: str, required - :param inputs: A list of items to generate embeddings for. :type inputs: list, required - :param parameters: A dictionary of parameters to use when generating embeddings. :type parameters: dict, optional - :return: EmbeddingsList object with keys `data`, `model`, and `usage`. The `data` key contains a list of - `n` embeddings, where `n` = len(inputs) and type(n) = Embedding. Precision of returned embeddings is either - float16 or float32, with float32 being the default. `model` key is the model used to generate the embeddings. - `usage` key contains the total number of tokens used at request-time. + :return: ``EmbeddingsList`` object with keys ``data``, ``model``, and ``usage``. The ``data`` key contains a list of + ``n`` embeddings, where ``n`` = len(inputs). Precision of returned embeddings is either + float16 or float32, with float32 being the default. ``model`` key is the model used to generate the embeddings. + ``usage`` key contains the total number of tokens used at request-time. Example: - >>> inputs = ["Who created the first computer?"] - >>> outputs = pc.inference.embed(model="multilingual-e5-large", inputs=inputs, parameters={"input_type": "passage", "truncate": "END"}) - >>> print(outputs) - EmbeddingsList( - model='multilingual-e5-large', - data=[ - {'values': [0.1, ...., 0.2]}, - ], - usage={'total_tokens': 6} - ) + + .. code-block:: python + + >>> pc = Pinecone() + >>> inputs = ["Who created the first computer?"] + >>> outputs = pc.inference.embed(model="multilingual-e5-large", inputs=inputs, parameters={"input_type": "passage", "truncate": "END"}) + >>> print(outputs) + EmbeddingsList( + model='multilingual-e5-large', + data=[ + {'values': [0.1, ...., 0.2]}, + ], + usage={'total_tokens': 6} + ) + """ request_body = InferenceRequestBuilder.embed_request( model=model, inputs=inputs, parameters=parameters @@ -203,56 +207,61 @@ def rerank( :param model: The model to use for reranking. :type model: str, required - :param query: The query to compare with documents. :type query: str, required - :param documents: A list of documents or strings to rank. :type documents: list, required - :param rank_fields: A list of document fields to use for ranking. Defaults to ["text"]. :type rank_fields: list, optional - :param return_documents: Whether to include the documents in the response. Defaults to True. :type return_documents: bool, optional - :param top_n: How many documents to return. Defaults to len(documents). :type top_n: int, optional - :param parameters: A dictionary of parameters to use when ranking documents. :type parameters: dict, optional - :return: RerankResult object with keys `data` and `usage`. The `data` key contains a list of - `n` documents, where `n` = `top_n` and type(n) = Document. The documents are sorted in order of - relevance, with the first being the most relevant. The `index` field can be used to locate the document - relative to the list of documents specified in the request. Each document contains a `score` key - representing how close the document relates to the query. + :return: ``RerankResult`` object with keys ``data`` and ``usage``. The ``data`` key contains a list of + ``n`` documents, where ``n`` = ``top_n`` and type(n) = Document. The documents are sorted in order of + relevance, with the first being the most relevant. The ``index`` field can be used to locate the document + relative to the list of documents specified in the request. Each document contains a ``score`` key + representing how close the document relates to the query. Example: - >>> result = pc.inference.rerank( - model="bge-reranker-v2-m3", - query="Tell me about tech companies", - documents=[ - "Apple is a popular fruit known for its sweetness and crisp texture.", - "Software is still eating the world.", - "Many people enjoy eating apples as a healthy snack.", - "Acme Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", - "An apple a day keeps the doctor away, as the saying goes.", + + .. code-block:: python + + >>> pc = Pinecone() + >>> pc.inference.rerank( + model="bge-reranker-v2-m3", + query="Tell me about tech companies", + documents=[ + "Apple is a popular fruit known for its sweetness and crisp texture.", + "Software is still eating the world.", + "Many people enjoy eating apples as a healthy snack.", + "Acme Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", + "An apple a day keeps the doctor away, as the saying goes.", ], top_n=2, return_documents=True, ) - >>> print(result) - RerankResult( - model='bge-reranker-v2-m3', - data=[ - { index=3, score=0.020980744, - document={text="Acme Inc. has rev..."} }, - { index=1, score=0.00034015716, - document={text="Software is still..."} } - ], - usage={'rerank_units': 1} - ) + RerankResult( + model='bge-reranker-v2-m3', + data=[{ + index=3, + score=0.020924192, + document={ + text='Acme Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.' + } + },{ + index=1, + score=0.00034464317, + document={ + text='Software is still eating the world.' + } + }], + usage={'rerank_units': 1} + ) + """ rerank_request = InferenceRequestBuilder.rerank( model=model, @@ -271,34 +280,36 @@ def list_models( self, *, type: Optional[str] = None, vector_type: Optional[str] = None ) -> "ModelInfoList": """ - List all available models. + List all available models. + :param type: The type of model to list. Either "embed" or "rerank". + :type type: str, optional - ```python - pc = Pinecone() + :param vector_type: The type of vector to list. Either "dense" or "sparse". + :type vector_type: str, optional - # List all models - models = pc.inference.list_models() + :return: A list of models. - # List models, with model type filtering - models = pc.inference.list_models(type="embed") - models = pc.inference.list_models(type="rerank") + Example: + + .. code-block:: python - # List models, with vector type filtering - models = pc.inference.list_models(vector_type="dense") - models = pc.inference.list_models(vector_type="sparse") + pc = Pinecone() - # List models, with both type and vector type filtering - models = pc.inference.list_models(type="rerank", vector_type="dense") - ``` + # List all models + models = pc.inference.list_models() - :param type: The type of model to list. Either "embed" or "rerank". - :type type: str, optional + # List models, with model type filtering + models = pc.inference.list_models(type="embed") + models = pc.inference.list_models(type="rerank") - :param vector_type: The type of vector to list. Either "dense" or "sparse". - :type vector_type: str, optional + # List models, with vector type filtering + models = pc.inference.list_models(vector_type="dense") + models = pc.inference.list_models(vector_type="sparse") + + # List models, with both type and vector type filtering + models = pc.inference.list_models(type="rerank", vector_type="dense") - :return: A list of models. """ return self.model.list(type=type, vector_type=vector_type) @@ -307,15 +318,37 @@ def get_model(self, model_name: str) -> "ModelInfo": """ Get details on a specific model. - ```python - pc = Pinecone() - - model = pc.inference.get_model(model_name="text-embedding-3-small") - ``` - :param model_name: The name of the model to get details on. :type model_name: str, required :return: A ModelInfo object. + + .. code-block:: python + + >>> pc = Pinecone() + >>> pc.inference.get_model(model_name="pinecone-rerank-v0") + { + "model": "pinecone-rerank-v0", + "short_description": "A state of the art reranking model that out-performs competitors on widely accepted benchmarks. It can handle chunks up to 512 tokens (1-2 paragraphs)", + "type": "rerank", + "supported_parameters": [ + { + "parameter": "truncate", + "type": "one_of", + "value_type": "string", + "required": false, + "default": "END", + "allowed_values": [ + "END", + "NONE" + ] + } + ], + "modality": "text", + "max_sequence_length": 512, + "max_batch_size": 100, + "provider_name": "Pinecone", + "supported_metrics": [] + } """ return self.model.get(model_name=model_name) diff --git a/pinecone/inference/models/embedding_list.py b/pinecone/inference/models/embedding_list.py index f28a2f82..c54e3dab 100644 --- a/pinecone/inference/models/embedding_list.py +++ b/pinecone/inference/models/embedding_list.py @@ -8,7 +8,10 @@ class EmbeddingsList: def __init__(self, embeddings_list: OpenAPIEmbeddingsList): self.embeddings_list = embeddings_list + """ @private """ + self.current = 0 + """ @private """ def __getitem__(self, index): return self.embeddings_list.get("data")[index] diff --git a/pinecone/legacy_pinecone_interface.py b/pinecone/legacy_pinecone_interface.py index 27e893d7..7c50bdb5 100644 --- a/pinecone/legacy_pinecone_interface.py +++ b/pinecone/legacy_pinecone_interface.py @@ -43,153 +43,7 @@ def __init__( pool_threads: Optional[int] = 1, **kwargs, ): - """ - The `Pinecone` class is the main entry point for interacting with Pinecone via this Python SDK. - Instances of the `Pinecone` class are used to create, delete, and manage your indexes and collections. The class also holds inference functionality (embed, rerank) under the `inference` namespace. - Methods which create or modify index and collection resources result in network calls to https://api.pinecone.io. - - When you are ready to perform data operations on an index, you will need to instantiate an index client. Though the functionality of the index client is defined in a different - class, it is instantiated through the `Index()` method in order for configurations to be shared between the two objects. - - :param api_key: The API key to use for authentication. If not passed via kwarg, the API key will be read from the environment variable `PINECONE_API_KEY`. - :type api_key: str, optional - :param host: The control plane host. If unspecified, the host `api.pinecone.io` will be used. - :type host: str, optional - :param proxy_url: The URL of the proxy to use for the connection. - :type proxy_url: str, optional - :param proxy_headers: Additional headers to pass to the proxy. Use this if your proxy setup requires authentication. - :type proxy_headers: Dict[str, str], optional - :param ssl_ca_certs: The path to the SSL CA certificate bundle to use for the connection. This path should point to a file in PEM format. When not passed, the SDK will use the certificate bundle returned from `certifi.where()`. - :type ssl_ca_certs: str, optional - :param ssl_verify: SSL verification is performed by default, but can be disabled using the boolean flag when testing with Pinecone Local or troubleshooting a proxy setup. You should never run with SSL verification disabled in production. - :type ssl_verify: bool, optional - :param additional_headers: Additional headers to pass to the API. This is mainly to support internal testing at Pinecone. End users should not need to use this unless following specific instructions to do so. - :type additional_headers: Dict[str, str], optional - :param pool_threads: The number of threads to use for the ThreadPool when using methods that support the `async_req` keyword argument. The default number of threads is 5 * the number of CPUs in your execution environment. - :type pool_threads: int, optional - - ### Configuration with environment variables - - If you instantiate the Pinecone client with no arguments, it will attempt to read the API key from the environment variable `PINECONE_API_KEY`. - - ```python - from pinecone import Pinecone - - pc = Pinecone() - ``` - - ### Configuration with keyword arguments - - If you prefer being more explicit in your code, you can also pass the API key as a keyword argument. This is also where you will pass additional configuration options such as proxy settings if you wish to use those. - - ```python - import os - from pinecone import Pinecone - - pc = Pinecone( - api_key=os.environ.get("PINECONE_API_KEY"), - host="https://api-staging.pinecone.io" - ) - ``` - - ### Environment variables - - The Pinecone client supports the following environment variables: - - - `PINECONE_API_KEY`: The API key to use for authentication. If not passed via - kwarg, the API key will be read from the environment variable `PINECONE_API_KEY`. - - - `PINECONE_DEBUG_CURL`: When troubleshooting it can be very useful to run curl - commands against the control plane API to see exactly what data is being sent - and received without all the abstractions and transformations applied by the Python - SDK. If you set this environment variable to `true`, the Pinecone client will use - request parameters to print out an equivalent curl command that you can run yourself - or share with Pinecone support. **Be very careful with this option, as it will print out - your API key** which forms part of a required authentication header. The main use of - is to help evaluate whether a problem you are experiencing is due to the API's behavior - or the behavior of the SDK itself. - - ### Proxy configuration - - If your network setup requires you to interact with Pinecone via a proxy, you will need - to pass additional configuration using optional keyword parameters. These optional parameters - are forwarded to `urllib3`, which is the underlying library currently used by the Pinecone client to - make HTTP requests. You may find it helpful to refer to the - [urllib3 documentation on working with proxies](https://urllib3.readthedocs.io/en/stable/advanced-usage.html#http-and-https-proxies) - while troubleshooting these settings. - - Here is a basic example: - - ```python - from pinecone import Pinecone - - pc = Pinecone( - api_key='YOUR_API_KEY', - proxy_url='https://your-proxy.com' - ) - - pc.list_indexes() - ``` - - If your proxy requires authentication, you can pass those values in a header dictionary using the `proxy_headers` parameter. - - ```python - from pinecone import Pinecone - import urllib3 import make_headers - - pc = Pinecone( - api_key='YOUR_API_KEY', - proxy_url='https://your-proxy.com', - proxy_headers=make_headers(proxy_basic_auth='username:password') - ) - - pc.list_indexes() - ``` - - ### Using proxies with self-signed certificates - - By default the Pinecone Python client will perform SSL certificate verification - using the CA bundle maintained by Mozilla in the [certifi](https://pypi.org/project/certifi/) package. - If your proxy server is using a self-signed certificate, you will need to pass the path to the certificate - in PEM format using the `ssl_ca_certs` parameter. - - ```python - from pinecone import Pinecone - import urllib3 import make_headers - - pc = Pinecone( - api_key='YOUR_API_KEY', - proxy_url='https://your-proxy.com', - proxy_headers=make_headers(proxy_basic_auth='username:password'), - ssl_ca_certs='path/to/cert-bundle.pem' - ) - - pc.list_indexes() - ``` - - ### Disabling SSL verification - - If you would like to disable SSL verification, you can pass the `ssl_verify` - parameter with a value of `False`. We do not recommend going to production with SSL verification disabled. - - ```python - from pinecone import Pinecone - import urllib3 import make_headers - - pc = Pinecone( - api_key='YOUR_API_KEY', - proxy_url='https://your-proxy.com', - proxy_headers=make_headers(proxy_basic_auth='username:password'), - ssl_ca_certs='path/to/cert-bundle.pem', - ssl_verify=False - ) - - pc.list_indexes() - - ``` - """ - - pass + pass @abstractmethod def create_index( @@ -211,94 +65,96 @@ def create_index( cannot be changed once created. Allowed characters are lowercase letters, numbers, and hyphens and the name may not begin or end with hyphens. Maximum length is 45 characters. :type name: str - :param metric: Type of similarity metric used in the vector index when querying, one of `{"cosine", "dotproduct", "euclidean"}`. + :param metric: Type of similarity metric used in the vector index when querying, one of ``{"cosine", "dotproduct", "euclidean"}``. :type metric: str, optional :param spec: A dictionary containing configurations describing how the index should be deployed. For serverless indexes, specify region and cloud. For pod indexes, specify replicas, shards, pods, pod_type, metadata_config, and source_collection. - Alternatively, use the `ServerlessSpec`, `PodSpec`, or `ByocSpec` objects to specify these configurations. + Alternatively, use the ``ServerlessSpec``, ``PodSpec``, or ``ByocSpec`` objects to specify these configurations. :type spec: Dict - :param dimension: If you are creating an index with `vector_type="dense"` (which is the default), you need to specify `dimension` to indicate the size of your vectors. + :param dimension: If you are creating an index with ``vector_type="dense"`` (which is the default), you need to specify ``dimension`` to indicate the size of your vectors. This should match the dimension of the embeddings you will be inserting. For example, if you are using - OpenAI's CLIP model, you should use `dimension=1536`. Dimension is a required field when - creating an index with `vector_type="dense"` and should not be passed when `vector_type="sparse"`. + OpenAI's CLIP model, you should use ``dimension=1536``. Dimension is a required field when + creating an index with ``vector_type="dense"`` and should not be passed when ``vector_type="sparse"``. :type dimension: int :type timeout: int, optional :param timeout: Specify the number of seconds to wait until index gets ready. If None, wait indefinitely; if >=0, time out after this many seconds; if -1, return immediately and do not wait. :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. :type deletion_protection: Optional[Literal["enabled", "disabled"]] - :param vector_type: The type of vectors to be stored in the index. One of `{"dense", "sparse"}`. + :param vector_type: The type of vectors to be stored in the index. One of ``{"dense", "sparse"}``. :type vector_type: str, optional :param tags: Tags are key-value pairs you can attach to indexes to better understand, organize, and identify your resources. Some example use cases include tagging indexes with the name of the model that generated the embeddings, the date the index was created, or the purpose of the index. :type tags: Optional[Dict[str, str]] - :return: A `IndexModel` instance containing a description of the index that was created. - - ### Creating a serverless index - - ```python - import os - from pinecone import ( - Pinecone, - ServerlessSpec, - CloudProvider, - AwsRegion, - Metric, - DeletionProtection, - VectorType - ) - - pc = Pinecone(api_key=os.environ.get("PINECONE_API_KEY")) - - pc.create_index( - name="my_index", - dimension=1536, - metric=Metric.COSINE, - spec=ServerlessSpec( - cloud=CloudProvider.AWS, - region=AwsRegion.US_WEST_2 - ), - deletion_protection=DeletionProtection.DISABLED, - vector_type=VectorType.DENSE, - tags={ - "model": "clip", - "app": "image-search", - "env": "testing" - } - ) - ``` - - ### Creating a pod index - - ```python - import os - from pinecone import ( - Pinecone, - PodSpec, - PodIndexEnvironment, - PodType, - Metric, - DeletionProtection, - VectorType - ) - - pc = Pinecone(api_key=os.environ.get("PINECONE_API_KEY")) - - pc.create_index( - name="my_index", - dimension=1536, - metric=Metric.COSINE, - spec=PodSpec( - environment=PodIndexEnvironment.US_EAST4_GCP, - pod_type=PodType.P1_X1 - ), - deletion_protection=DeletionProtection.DISABLED, - tags={ - "model": "clip", - "app": "image-search", - "env": "testing" - } - ) - ``` + :return: A ``IndexModel`` instance containing a description of the index that was created. + + Creating a serverless index + --------------------------- + + .. code-block:: python + + import os + from pinecone import ( + Pinecone, + ServerlessSpec, + CloudProvider, + AwsRegion, + Metric, + DeletionProtection, + VectorType + ) + + pc = Pinecone(api_key=os.environ.get("PINECONE_API_KEY")) + + pc.create_index( + name="my_index", + dimension=1536, + metric=Metric.COSINE, + spec=ServerlessSpec( + cloud=CloudProvider.AWS, + region=AwsRegion.US_WEST_2 + ), + deletion_protection=DeletionProtection.DISABLED, + vector_type=VectorType.DENSE, + tags={ + "model": "clip", + "app": "image-search", + "env": "testing" + } + ) + + Creating a pod index + --------------------- + + .. code-block:: python + + import os + from pinecone import ( + Pinecone, + PodSpec, + PodIndexEnvironment, + PodType, + Metric, + DeletionProtection, + VectorType + ) + + pc = Pinecone(api_key=os.environ.get("PINECONE_API_KEY")) + + pc.create_index( + name="my_index", + dimension=1536, + metric=Metric.COSINE, + spec=PodSpec( + environment=PodIndexEnvironment.US_EAST4_GCP, + pod_type=PodType.P1_X1 + ), + deletion_protection=DeletionProtection.DISABLED, + tags={ + "model": "clip", + "app": "image-search", + "env": "testing" + } + ) """ pass @@ -315,13 +171,13 @@ def create_index_from_backup( """ Create an index from a backup. - Call `list_backups` to get a list of backups for your project. + Call ``list_backups`` to get a list of backups for your project. :param name: The name of the index to create. :type name: str :param backup_id: The ID of the backup to restore. :type backup_id: str - :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. This setting can be changed with `configure_index`. + :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. This setting can be changed with ``configure_index``. :type deletion_protection: Optional[Literal["enabled", "disabled"]] :param tags: Tags are key-value pairs you can attach to indexes to better understand, organize, and identify your resources. Some example use cases include tagging indexes with the name of the model that generated the embeddings, the date the index was created, or the purpose of the index. :type tags: Optional[Dict[str, str]] @@ -351,15 +207,15 @@ def create_index_for_model( cannot be changed once created. Allowed characters are lowercase letters, numbers, and hyphens and the name may not begin or end with hyphens. Maximum length is 45 characters. :type name: str - :param cloud: The cloud provider to use for the index. One of `{"aws", "gcp", "azure"}`. + :param cloud: The cloud provider to use for the index. One of ``{"aws", "gcp", "azure"}``. :type cloud: str - :param region: The region to use for the index. Enum objects `AwsRegion`, `GcpRegion`, and `AzureRegion` are also available to help you quickly set these parameters, but may not be up to date as new regions become available. + :param region: The region to use for the index. Enum objects ``AwsRegion``, ``GcpRegion``, and ``AzureRegion`` are also available to help you quickly set these parameters, but may not be up to date as new regions become available. :type region: str - :param embed: The embedding configuration for the index. This param accepts a dictionary or an instance of the `IndexEmbed` object. + :param embed: The embedding configuration for the index. This param accepts a dictionary or an instance of the ``IndexEmbed`` object. :type embed: Union[Dict, IndexEmbed] :param tags: Tags are key-value pairs you can attach to indexes to better understand, organize, and identify your resources. Some example use cases include tagging indexes with the name of the model that generated the embeddings, the date the index was created, or the purpose of the index. :type tags: Optional[Dict[str, str]] - :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. This setting can be changed with `configure_index`. + :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. This setting can be changed with ``configure_index``. :type deletion_protection: Optional[Literal["enabled", "disabled"]] :type timeout: Optional[int] :param timeout: Specify the number of seconds to wait until index is ready to receive data. If None, wait indefinitely; if >=0, time out after this many seconds; @@ -369,40 +225,43 @@ def create_index_for_model( This method is used to create a Serverless index that is configured for use with Pinecone's integrated inference models. - The resulting index can be described, listed, configured, and deleted like any other Pinecone index with the `describe_index`, `list_indexes`, `configure_index`, and `delete_index` methods. - - After the model is created, you can upsert records into the index with the `upsert_records` method, and search your records with the `search` method. - - ```python - from pinecone import ( - Pinecone, - IndexEmbed, - CloudProvider, - AwsRegion, - EmbedModel, - Metric, - ) - - pc = Pinecone() - - if not pc.has_index("book-search"): - desc = await pc.create_index_for_model( - name="book-search", - cloud=CloudProvider.AWS, - region=AwsRegion.US_EAST_1, - embed=IndexEmbed( - model=EmbedModel.Multilingual_E5_Large, - metric=Metric.COSINE, - field_map={ - "text": "description", - }, - ) + The resulting index can be described, listed, configured, and deleted like any other Pinecone index with the ``describe_index``, ``list_indexes``, ``configure_index``, and ``delete_index`` methods. + + After the model is created, you can upsert records into the index with the ``upsert_records`` method, and search your records with the ``search`` method. + + .. code-block:: python + + from pinecone import ( + Pinecone, + IndexEmbed, + CloudProvider, + AwsRegion, + EmbedModel, + Metric, ) - ``` - To see the available cloud regions, see this [Pinecone documentation](https://docs.pinecone.io/troubleshooting/available-cloud-regions) page. + pc = Pinecone() + + if not pc.has_index("book-search"): + desc = await pc.create_index_for_model( + name="book-search", + cloud=CloudProvider.AWS, + region=AwsRegion.US_EAST_1, + embed=IndexEmbed( + model=EmbedModel.Multilingual_E5_Large, + metric=Metric.COSINE, + field_map={ + "text": "description", + }, + ) + ) + + + See also: + + * See `available cloud regions `_ + * See the `Model Gallery `_ to learn about available models - See the [Model Gallery](https://docs.pinecone.io/models/overview) to learn about available models. """ pass @@ -423,41 +282,42 @@ def delete_index(self, name: str, timeout: Optional[int] = None): the index, but the termination is not synchronous because resources take a few moments to be released. - By default the `delete_index` method will block until polling of the `describe_index` method + By default the ``delete_index`` method will block until polling of the ``describe_index`` method shows that the delete operation has completed. If you prefer to return immediately and not - wait for the index to be deleted, you can pass `timeout=-1` to the method. + wait for the index to be deleted, you can pass ``timeout=-1`` to the method. - After the delete request is submitted, polling `describe_index` will show that the index - transitions into a `Terminating` state before eventually resulting in a 404 after it has been removed. + After the delete request is submitted, polling ``describe_index`` will show that the index + transitions into a ``Terminating`` state before eventually resulting in a 404 after it has been removed. - This operation can fail if the index is configured with `deletion_protection="enabled"`. - In this case, you will need to call `configure_index` to disable deletion protection before + This operation can fail if the index is configured with ``deletion_protection="enabled"``. + In this case, you will need to call ``configure_index`` to disable deletion protection before you can delete the index. - ```python - from pinecone import Pinecone + .. code-block:: python + + from pinecone import Pinecone + + pc = Pinecone() - pc = Pinecone() + index_name = "my_index" + desc = pc.describe_index(name=index_name) - index_name = "my_index" - desc = pc.describe_index(name=index_name) + if desc.deletion_protection == "enabled": + # If for some reason deletion protection is enabled, you will need to disable it first + # before you can delete the index. But use caution as this operation is not reversible + # and if somebody enabled deletion protection, they probably had a good reason. + pc.configure_index(name=index_name, deletion_protection="disabled") - if desc.deletion_protection == "enabled": - # If for some reason deletion protection is enabled, you will need to disable it first - # before you can delete the index. But use caution as this operation is not reversible - # and if somebody enabled deletion protection, they probably had a good reason. - pc.configure_index(name=index_name, deletion_protection="disabled") + pc.delete_index(name=index_name) - pc.delete_index(name=index_name) - ``` """ pass @abstractmethod def list_indexes(self) -> "IndexList": """ - :return: Returns an `IndexList` object, which is iterable and contains a - list of `IndexModel` objects. The `IndexList` also has a convenience method `names()` + :return: Returns an ``IndexList`` object, which is iterable and contains a + list of ``IndexModel`` objects. The ``IndexList`` also has a convenience method ``names()`` which returns a list of index names for situations where you just want to iterate over all index names. @@ -466,23 +326,24 @@ def list_indexes(self) -> "IndexList": The results include a description of all indexes in your project, including the index name, dimension, metric, status, and spec. - If you simply want to check whether an index exists, see the `has_index()` convenience method. + If you simply want to check whether an index exists, see the ``has_index()`` convenience method. - You can use the `list_indexes()` method to iterate over descriptions of every index in your project. + You can use the ``list_indexes()`` method to iterate over descriptions of every index in your project. - ```python - from pinecone import Pinecone + .. code-block:: python - pc = Pinecone() + from pinecone import Pinecone + + pc = Pinecone() + + for index in pc.list_indexes(): + print(index.name) + print(index.dimension) + print(index.metric) + print(index.status) + print(index.host) + print(index.spec) - for index in pc.list_indexes(): - print(index.name) - print(index.dimension) - print(index.metric) - print(index.status) - print(index.host) - print(index.spec) - ``` """ pass @@ -490,14 +351,14 @@ def list_indexes(self) -> "IndexList": def describe_index(self, name: str) -> "IndexModel": """ :param name: the name of the index to describe. - :return: Returns an `IndexModel` object - which gives access to properties such as the - index name, dimension, metric, host url, status, - and spec. + :return: Returns an ``IndexModel`` object + which gives access to properties such as the + index name, dimension, metric, host url, status, + and spec. Describes a Pinecone index. - ### Getting your index host url + **Getting your index host url** In a real production situation, you probably want to store the host url in an environment variable so you @@ -505,41 +366,42 @@ def describe_index(self, name: str) -> "IndexModel": every time you want to use the index. But this example shows how to get the value from the API using describe_index. - ```python - from pinecone import Pinecone, Index - - pc = Pinecone() - - index_name="my_index" - description = pc.describe_index(name=index_name) - print(description) - # { - # "name": "my_index", - # "metric": "cosine", - # "host": "my_index-dojoi3u.svc.aped-4627-b74a.pinecone.io", - # "spec": { - # "serverless": { - # "cloud": "aws", - # "region": "us-east-1" - # } - # }, - # "status": { - # "ready": true, - # "state": "Ready" - # }, - # "vector_type": "dense", - # "dimension": 1024, - # "deletion_protection": "enabled", - # "tags": { - # "environment": "production" - # } - # } - - print(f"Your index is hosted at {description.host}") - - index = pc.Index(host=description.host) - index.upsert(vectors=[...]) - ``` + .. code-block:: python + + from pinecone import Pinecone, Index + + pc = Pinecone() + + index_name="my_index" + description = pc.describe_index(name=index_name) + print(description) + # { + # "name": "my_index", + # "metric": "cosine", + # "host": "my_index-dojoi3u.svc.aped-4627-b74a.pinecone.io", + # "spec": { + # "serverless": { + # "cloud": "aws", + # "region": "us-east-1" + # } + # }, + # "status": { + # "ready": true, + # "state": "Ready" + # }, + # "vector_type": "dense", + # "dimension": 1024, + # "deletion_protection": "enabled", + # "tags": { + # "environment": "production" + # } + # } + + print(f"Your index is hosted at {description.host}") + + index = pc.Index(host=description.host) + index.upsert(vectors=[...]) + """ pass @@ -547,25 +409,25 @@ def describe_index(self, name: str) -> "IndexModel": def has_index(self, name: str) -> bool: """ :param name: The name of the index to check for existence. - :return: Returns `True` if the index exists, `False` otherwise. + :return: Returns ``True`` if the index exists, ``False`` otherwise. Checks if a Pinecone index exists. - ```python - from pinecone import Pinecone, ServerlessSpec + .. code-block:: python - pc = Pinecone() + from pinecone import Pinecone, ServerlessSpec - index_name = "my_index" - if not pc.has_index(index_name): - print("Index does not exist, creating...") - pc.create_index( - name=index_name, - dimension=768, - metric="cosine", - spec=ServerlessSpec(cloud="aws", region="us-west-2") - ) - ``` + pc = Pinecone() + + index_name = "my_index" + if not pc.has_index(index_name): + print("Index does not exist, creating...") + pc.create_index( + name=index_name, + dimension=768, + metric="cosine", + spec=ServerlessSpec(cloud="aws", region="us-west-2") + ) """ pass @@ -579,85 +441,92 @@ def configure_index( tags: Optional[Dict[str, str]] = None, ): """ - :param: name: the name of the Index - :param: replicas: the desired number of replicas, lowest value is 0. - :param: pod_type: the new pod_type for the index. To learn more about the - available pod types, please see [Understanding Indexes](https://docs.pinecone.io/docs/indexes) - :param: deletion_protection: If set to 'enabled', the index cannot be deleted. If 'disabled', the index can be deleted. - :param: tags: A dictionary of tags to apply to the index. Tags are key-value pairs that can be used to organize and manage indexes. To remove a tag, set the value to "". Tags passed to configure_index will be merged with existing tags and any with the value empty string will be removed. + :param name: the name of the Index + :type name: str, required + :param replicas: the desired number of replicas, lowest value is 0. + :type replicas: int, optional + :param pod_type: the new ``pod_type`` for the index. To learn more about the + available pod types, please see `Understanding Indexes `_. + Note that pod type is only available for pod-based indexes. + :type pod_type: str or PodType, optional + :param deletion_protection: If set to ``'enabled'``, the index cannot be deleted. If ``'disabled'``, the index can be deleted. + :type deletion_protection: str or DeletionProtection, optional + :param tags: A dictionary of tags to apply to the index. Tags are key-value pairs that can be used to organize and manage indexes. To remove a tag, set the value to "". Tags passed to configure_index will be merged with existing tags and any with the value empty string will be removed. + :type tags: Dict[str, str], optional This method is used to modify an index's configuration. It can be used to: - - Scale a pod-based index horizontally using `replicas` - - Scale a pod-based index vertically using `pod_type` - - Enable or disable deletion protection using `deletion_protection` - - Add, change, or remove tags using `tags` + * Scale a pod-based index horizontally using ``replicas`` + * Scale a pod-based index vertically using ``pod_type`` + * Enable or disable deletion protection using ``deletion_protection`` + * Add, change, or remove tags using ``tags`` - ## Scaling pod-based indexes + **Scaling pod-based indexes** - To scale your pod-based index, you pass a `replicas` and/or `pod_type` param to the `configure_index` method. `pod_type` may be a string or a value from the `PodType` enum. + To scale your pod-based index, you pass a ``replicas`` and/or ``pod_type`` param to the ``configure_index`` method. ``pod_type`` may be a string or a value from the ``PodType`` enum. - ```python - from pinecone import Pinecone, PodType + .. code-block:: python - pc = Pinecone() - pc.configure_index( - name="my_index", - replicas=2, - pod_type=PodType.P1_X2 - ) - ``` + from pinecone import Pinecone, PodType - After providing these new configurations, you must call `describe_index` to see the status of the index as the changes are applied. + pc = Pinecone() + pc.configure_index( + name="my_index", + replicas=2, + pod_type=PodType.P1_X2 + ) + + After providing these new configurations, you must call ``describe_index`` to see the status of the index as the changes are applied. + + **Enabling or disabling deletion protection** - ## Enabling or disabling deletion protection + To enable or disable deletion protection, pass the ``deletion_protection`` parameter to the ``configure_index`` method. When deletion protection + is enabled, the index cannot be deleted with the ``delete_index`` method. - To enable or disable deletion protection, pass the `deletion_protection` parameter to the `configure_index` method. When deletion protection - is enabled, the index cannot be deleted with the `delete_index` method. + .. code-block:: python - ```python - from pinecone import Pinecone, DeletionProtection + from pinecone import Pinecone, DeletionProtection - pc = Pinecone() + pc = Pinecone() - # Enable deletion protection - pc.configure_index( - name="my_index", - deletion_protection=DeletionProtection.ENABLED - ) + # Enable deletion protection + pc.configure_index( + name="my_index", + deletion_protection=DeletionProtection.ENABLED + ) - # Call describe_index to see the change was applied. - assert pc.describe_index("my_index").deletion_protection == "enabled" + # Call describe_index to see the change was applied. + assert pc.describe_index("my_index").deletion_protection == "enabled" - # Disable deletion protection - pc.configure_index( - name="my_index", - deletion_protection=DeletionProtection.DISABLED - ) - ``` + # Disable deletion protection + pc.configure_index( + name="my_index", + deletion_protection=DeletionProtection.DISABLED + ) - ## Adding, changing, or removing tags + **Adding, changing, or removing tags** - To add, change, or remove tags, pass the `tags` parameter to the `configure_index` method. When tags are passed using `configure_index`, + To add, change, or remove tags, pass the ``tags`` parameter to the ``configure_index`` method. When tags are passed using ``configure_index``, they are merged with any existing tags already on the index. To remove a tag, set the value of the key to an empty string. - ```python - from pinecone import Pinecone + .. code-block:: python + + from pinecone import Pinecone - pc = Pinecone() + pc = Pinecone() - # Add a tag - pc.configure_index(name="my_index", tags={"environment": "staging"}) + # Add a tag + pc.configure_index(name="my_index", tags={"environment": "staging"}) - # Change a tag - pc.configure_index(name="my_index", tags={"environment": "production"}) + # Change a tag + pc.configure_index(name="my_index", tags={"environment": "production"}) - # Remove a tag - pc.configure_index(name="my_index", tags={"environment": ""}) + # Remove a tag + pc.configure_index(name="my_index", tags={"environment": ""}) + + # Call describe_index to view the tags are changed + print(pc.describe_index("my_index").tags) - # Call describe_index to view the tags are changed - print(pc.describe_index("my_index").tags) - ``` """ pass @@ -666,7 +535,9 @@ def create_collection(self, name: str, source: str) -> None: """Create a collection from a pod-based index :param name: Name of the collection + :type name: str, required :param source: Name of the source index + :type source: str, required """ pass @@ -674,28 +545,29 @@ def create_collection(self, name: str, source: str) -> None: def list_collections(self) -> "CollectionList": """List all collections - ```python - from pinecone import Pinecone + .. code-block:: python + + from pinecone import Pinecone - pc = Pinecone() + pc = Pinecone() - for collection in pc.list_collections(): - print(collection.name) - print(collection.source) + for collection in pc.list_collections(): + print(collection.name) + print(collection.source) + + # You can also iterate specifically over the collection + # names with the .names() helper. + collection_name="my_collection" + for collection_name in pc.list_collections().names(): + print(collection_name) - # You can also iterate specifically over the collection - # names with the .names() helper. - collection_name="my_collection" - for collection_name in pc.list_collections().names(): - print(collection_name) - ``` """ pass @abstractmethod def delete_collection(self, name: str) -> None: """ - :param name: The name of the collection to delete. + :param str name: The name of the collection to delete. Deletes a collection. @@ -704,36 +576,40 @@ def delete_collection(self, name: str) -> None: This method tells Pinecone you would like to delete a collection, but it takes a few moments to complete the operation. Use the - `describe_collection()` method to confirm that the collection + ``describe_collection()`` method to confirm that the collection has been deleted. - ```python - from pinecone import Pinecone + .. code-block:: python + + from pinecone import Pinecone - pc = Pinecone() + pc = Pinecone() + + pc.delete_collection(name="my_collection") - pc.delete_collection(name="my_collection") - ``` """ pass @abstractmethod def describe_collection(self, name: str): """Describes a collection. - :param: The name of the collection + + :param str name: The name of the collection + :return: Description of the collection - ```python - from pinecone import Pinecone + .. code-block:: python - pc = Pinecone() + from pinecone import Pinecone + + pc = Pinecone() + + description = pc.describe_collection("my_collection") + print(description.name) + print(description.source) + print(description.status) + print(description.size) - description = pc.describe_collection("my_collection") - print(description.name) - print(description.source) - print(description.status) - print(description.size) - ``` """ pass @@ -746,7 +622,7 @@ def create_backup( Args: index_name (str): The name of the index to backup. backup_name (str): The name to give the backup. - description (str): Optional description of the backup. + description (str, optional): Optional description of the backup. """ pass @@ -760,12 +636,12 @@ def list_backups( ) -> "BackupList": """List backups. - If index_name is provided, the backups will be filtered by index. If no index_name is provided, all backups in the projectwill be returned. + If ``index_name`` is provided, the backups will be filtered by index. If no ``index_name`` is provided, all backups in the project will be returned. Args: - index_name (str): The name of the index to list backups for. - limit (int): The maximum number of backups to return. - pagination_token (str): The pagination token to use for pagination. + index_name (str, optional): The name of the index to list backups for. + limit (int, optional): The maximum number of backups to return. + pagination_token (str, optional): The pagination token to use for pagination. """ pass @@ -813,15 +689,19 @@ def Index(self, name: str = "", host: str = "", **kwargs): """ :param name: The name of the index to target. If you specify the name of the index, the client will fetch the host url from the Pinecone control plane. + :type name: str, optional :param host: The host url of the index to target. If you specify the host url, the client will use the host url directly without making any additional calls to the control plane. + :type host: str, optional :param pool_threads: The number of threads to use when making parallel requests by calling index methods with optional kwarg async_req=True, or using methods that make use of thread-based parallelism automatically such as query_namespaces(). + :type pool_threads: int, optional :param connection_pool_maxsize: The maximum number of connections to keep in the connection pool. - :return: An instance of the `Index` class. + :type connection_pool_maxsize: int, optional + :return: An instance of the ``Index`` class. Target an index for data operations. - ### Target an index by host url + **Target an index by host url** In production situations, you want to uspert or query your data as quickly as possible. If you know in advance the host url of your index, you can @@ -830,36 +710,36 @@ def Index(self, name: str = "", host: str = "", **kwargs): will need to make an additional call to api.pinecone.io to get the host url before any data operations can take place. - ```python - import os - from pinecone import Pinecone + .. code-block:: python + + import os + from pinecone import Pinecone - api_key = os.environ.get("PINECONE_API_KEY") - index_host = os.environ.get("PINECONE_INDEX_HOST") + api_key = os.environ.get("PINECONE_API_KEY") + index_host = os.environ.get("PINECONE_INDEX_HOST") - pc = Pinecone(api_key=api_key) - index = pc.Index(host=index_host) + pc = Pinecone(api_key=api_key) + index = pc.Index(host=index_host) - # Now you're ready to perform data operations - index.query(vector=[...], top_k=10) - ``` + # Now you're ready to perform data operations + index.query(vector=[...], top_k=10) To find your host url, you can use the describe_index method to call api.pinecone.io. The host url is returned in the response. Or, alternatively, the host is displayed in the Pinecone web console. - ```python - import os - from pinecone import Pinecone + .. code-block:: python - pc = Pinecone( - api_key=os.environ.get("PINECONE_API_KEY") - ) + import os + from pinecone import Pinecone - host = pc.describe_index('index-name').host - ``` + pc = Pinecone( + api_key=os.environ.get("PINECONE_API_KEY") + ) + + host = pc.describe_index('index-name').host - ### Target an index by name (not recommended for production) + **Target an index by name (not recommended for production)** For more casual usage, such as when you are playing and exploring with Pinecone in a notebook setting, you can also target an index by name. If you use this @@ -871,24 +751,25 @@ def Index(self, name: str = "", host: str = "", **kwargs): recommended for production usage because it introduces an unnecessary runtime dependency on api.pinecone.io. - ```python - import os - from pinecone import Pinecone, ServerlessSpec + .. code-block:: python + + import os + from pinecone import Pinecone, ServerlessSpec - api_key = os.environ.get("PINECONE_API_KEY") + api_key = os.environ.get("PINECONE_API_KEY") + + pc = Pinecone(api_key=api_key) + pc.create_index( + name='my_index', + dimension=1536, + metric='cosine', + spec=ServerlessSpec(cloud='aws', region='us-west-2') + ) + index = pc.Index('my_index') - pc = Pinecone(api_key=api_key) - pc.create_index( - name='my_index', - dimension=1536, - metric='cosine', - spec=ServerlessSpec(cloud='aws', region='us-west-2') - ) - index = pc.Index('my_index') + # Now you're ready to perform data operations + index.query(vector=[...], top_k=10) - # Now you're ready to perform data operations - index.query(vector=[...], top_k=10) - ``` """ pass @@ -896,8 +777,25 @@ def IndexAsyncio(self, host: str, **kwargs): """Build an asyncio-compatible Index object. :param host: The host url of the index to target. You can find this url in the Pinecone - web console or by calling describe_index method of `Pinecone` or `PineconeAsyncio`. + web console or by calling describe_index method of ``Pinecone`` or ``PineconeAsyncio``. + :type host: str, required + + :return: An instance of the ``IndexAsyncio`` class. + + .. code-block:: python + + import asyncio + import os + from pinecone import Pinecone + + async def main(): + pc = Pinecone(api_key=os.environ.get("PINECONE_API_KEY")) + async with pc.IndexAsyncio(host=os.environ.get("PINECONE_INDEX_HOST")) as index: + await index.query(vector=[...], top_k=10) + + asyncio.run(main()) + + See more docs for ``PineconeAsyncio`` `here <./asyncio.html#db-data-plane>`_. - :return: An instance of the `IndexAsyncio` class. """ pass diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py index 792190e3..f794419b 100644 --- a/pinecone/pinecone.py +++ b/pinecone/pinecone.py @@ -61,6 +61,152 @@ def __init__( pool_threads: Optional[int] = None, **kwargs, ): + """ + The ``Pinecone`` class is the main entry point for interacting with Pinecone via this Python SDK. + Instances of the ``Pinecone`` class are used to manage and interact with Pinecone resources such as + indexes, backups, and collections. When using the SDK, calls are made on your behalf to the API + documented at `https://docs.pinecone.io `_. + + The class also holds inference functionality (embed, rerank) under the ``inference`` namespace. + + + When you are ready to perform data operations on an index, you will need to instantiate an index client. Though the functionality of the index client is defined in a different + class, it is instantiated through the ``Index()`` method in order for configurations to be shared between the two objects. + + :param api_key: The API key to use for authentication. If not passed via kwarg, the API key will be read from the environment variable ``PINECONE_API_KEY``. + :type api_key: str, optional + :param host: The control plane host. If unspecified, the host ``api.pinecone.io`` will be used. + :type host: str, optional + :param proxy_url: The URL of the proxy to use for the connection. + :type proxy_url: str, optional + :param proxy_headers: Additional headers to pass to the proxy. Use this if your proxy setup requires authentication. + :type proxy_headers: Dict[str, str], optional + :param ssl_ca_certs: The path to the SSL CA certificate bundle to use for the connection. This path should point to a file in PEM format. When not passed, the SDK will use the certificate bundle returned from ``certifi.where()``. + :type ssl_ca_certs: str, optional + :param ssl_verify: SSL verification is performed by default, but can be disabled using the boolean flag when testing with Pinecone Local or troubleshooting a proxy setup. You should never run with SSL verification disabled in production. + :type ssl_verify: bool, optional + :param additional_headers: Additional headers to pass to the API. This is mainly to support internal testing at Pinecone. End users should not need to use this unless following specific instructions to do so. + :type additional_headers: Dict[str, str], optional + :param pool_threads: The number of threads to use for the ThreadPool when using methods that support the ``async_req`` keyword argument. The default number of threads is 5 * the number of CPUs in your execution environment. + :type pool_threads: int, optional + + **Configuration with environment variables** + + If you instantiate the Pinecone client with no arguments, it will attempt to read the API key from the environment variable ``PINECONE_API_KEY``. + + .. code-block:: python + + from pinecone import Pinecone + + pc = Pinecone() + + **Configuration with keyword arguments** + + If you prefer being more explicit in your code, you can also pass the API key as a keyword argument. This is also where you will pass additional configuration options such as proxy settings if you wish to use those. + + .. code-block:: python + + import os + from pinecone import Pinecone + + pc = Pinecone( + api_key=os.environ.get("PINECONE_API_KEY"), + host="https://api-staging.pinecone.io" + ) + + **Environment variables** + + The Pinecone client supports the following environment variables: + + * ``PINECONE_API_KEY``: The API key to use for authentication. If not passed via kwarg, the API key will be read from the environment variable ``PINECONE_API_KEY``. + * ``PINECONE_DEBUG_CURL``: Enable some additional debug logging representing the HTTP requests as curl commands. The main use of is to run calls outside of the SDK to help evaluate whether a problem you are experiencing is due to the API's behavior or the behavior of the SDK itself. + * ``PINECONE_ADDITIONAL_HEADERS``: A json string of a dictionary of header values to attach to all requests. This is primarily used for internal testing at Pinecone. + + .. warning:: + + Be very careful with the ``PINECONE_DEBUG_CURL`` environment variable, as it will print out your API key which forms part of a required authentication header. + + **Proxy configuration** + + If your network setup requires you to interact with Pinecone via a proxy, you will need + to pass additional configuration using optional keyword parameters. These optional parameters + are forwarded to ``urllib3``, which is the underlying library currently used by the Pinecone client to + make HTTP requests. You may find it helpful to refer to the + `urllib3 documentation on working with proxies `_ + while troubleshooting these settings. + + Here is a basic example: + + .. code-block:: python + + from pinecone import Pinecone + + pc = Pinecone( + api_key='YOUR_API_KEY', + proxy_url='https://your-proxy.com' + ) + + pc.list_indexes() + + If your proxy requires authentication, you can pass those values in a header dictionary using the ``proxy_headers`` parameter. + + .. code-block:: python + + from pinecone import Pinecone + import urllib3 import make_headers + + pc = Pinecone( + api_key='YOUR_API_KEY', + proxy_url='https://your-proxy.com', + proxy_headers=make_headers(proxy_basic_auth='username:password') + ) + + pc.list_indexes() + + + **Using proxies with self-signed certificates** + + By default the Pinecone Python client will perform SSL certificate verification + using the CA bundle maintained by Mozilla in the `certifi `_ package. + If your proxy server is using a self-signed certificate, you will need to pass the path to the certificate + in PEM format using the ``ssl_ca_certs`` parameter. + + .. code-block:: python + + from pinecone import Pinecone + import urllib3 import make_headers + + pc = Pinecone( + api_key='YOUR_API_KEY', + proxy_url='https://your-proxy.com', + proxy_headers=make_headers(proxy_basic_auth='username:password'), + ssl_ca_certs='path/to/cert-bundle.pem' + ) + + pc.list_indexes() + + + **Disabling SSL verification** + + If you would like to disable SSL verification, you can pass the ``ssl_verify`` + parameter with a value of ``False``. We do not recommend going to production with SSL verification disabled. + + .. code-block:: python + + from pinecone import Pinecone + import urllib3 import make_headers + + pc = Pinecone( + api_key='YOUR_API_KEY', + proxy_url='https://your-proxy.com', + proxy_headers=make_headers(proxy_basic_auth='username:password'), + ssl_ca_certs='path/to/cert-bundle.pem', + ssl_verify=False + ) + + pc.list_indexes() + + """ for deprecated_kwarg in {"config", "openapi_config", "index_api"}: if deprecated_kwarg in kwargs: raise NotImplementedError( diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index b8bd7ad5..13bdb27a 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -44,24 +44,25 @@ class PineconeAsyncio(PineconeAsyncioDBControlInterface): """ - `PineconeAsyncio` is an asyncio client for interacting with Pinecone's control plane API. + ``PineconeAsyncio`` is an asyncio client for interacting with Pinecone's control plane API. This class implements methods for managing and interacting with Pinecone resources such as collections and indexes. - To perform data operations such as inserting and querying vectors, use the `IndexAsyncio` class. + To perform data operations such as inserting and querying vectors, use the ``IndexAsyncio`` class. - ```python - import asyncio - from pinecone import Pinecone + .. code-block:: python - async def main(): - pc = Pinecone() - async with pc.IndexAsyncio(host="my-index.pinecone.io") as idx: - await idx.upsert(vectors=[(1, [1, 2, 3]), (2, [4, 5, 6])]) + import asyncio + from pinecone import Pinecone + + async def main(): + pc = Pinecone() + async with pc.IndexAsyncio(host="my-index.pinecone.io") as idx: + await idx.upsert(vectors=[(1, [1, 2, 3]), (2, [4, 5, 6])]) + + asyncio.run(main()) - asyncio.run(main()) - ``` """ def __init__( @@ -122,33 +123,33 @@ async def close(self): After close has been called, the client instance should not be used. - ```python - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python - async def main(): - pc = PineconeAsyncio() - desc = await pc.describe_index(name="my-index") - await pc.close() + import asyncio + from pinecone import PineconeAsyncio - asyncio.run(main()) - ``` + async def main(): + pc = PineconeAsyncio() + desc = await pc.describe_index(name="my-index") + await pc.close() + + asyncio.run(main()) If you are using the client as a context manager, the close method is called automatically when exiting. - ```python - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python - async def main(): - async with PineconeAsyncio() as pc: - desc = await pc.describe_index(name="my-index") + import asyncio + from pinecone import PineconeAsyncio - # No need to call close in this case because the "async with" syntax - # automatically calls close when exiting the block. - asyncio.run(main()) - ``` + async def main(): + async with PineconeAsyncio() as pc: + desc = await pc.describe_index(name="my-index") + + # No need to call close in this case because the "async with" syntax + # automatically calls close when exiting the block. + asyncio.run(main()) """ await self.db._index_api.api_client.close() diff --git a/pinecone/pinecone_interface_asyncio.py b/pinecone/pinecone_interface_asyncio.py index 4b8e1cc1..843ee83a 100644 --- a/pinecone/pinecone_interface_asyncio.py +++ b/pinecone/pinecone_interface_asyncio.py @@ -50,243 +50,242 @@ def __init__( **kwargs, ): """ - The `PineconeAsyncio` class is the main entry point for interacting with Pinecone using asyncio. + The ``PineconeAsyncio`` class is the main entry point for interacting with Pinecone using asyncio. It is used to create, delete, and manage your indexes and collections. Except for needing to use - `async with` when instantiating the client and `await` when calling its methods, the functionality - provided by this class is extremely similar to the functionality of the `Pinecone` class. + ``async with`` when instantiating the client and ``await`` when calling its methods, the functionality + provided by this class is extremely similar to the functionality of the ``Pinecone`` class. - :param api_key: The API key to use for authentication. If not passed via kwarg, the API key will be read from the environment variable `PINECONE_API_KEY`. + :param api_key: The API key to use for authentication. If not passed via kwarg, the API key will be read from the environment variable ``PINECONE_API_KEY``. :type api_key: str, optional :param host: The control plane host to connect to. :type host: str, optional - :param proxy_url: The URL of the proxy to use for the connection. Default: `None` + :param proxy_url: The URL of the proxy to use for the connection. Default: ``None`` :type proxy_url: str, optional - :param proxy_headers: Additional headers to pass to the proxy. Use this if your proxy setup requires authentication. Default: `{}` + :param proxy_headers: Additional headers to pass to the proxy. Use this if your proxy setup requires authentication. Default: ``{}`` :type proxy_headers: Dict[str, str], optional - :param ssl_ca_certs: The path to the SSL CA certificate bundle to use for the connection. This path should point to a file in PEM format. Default: `None` + :param ssl_ca_certs: The path to the SSL CA certificate bundle to use for the connection. This path should point to a file in PEM format. Default: ``None`` :type ssl_ca_certs: str, optional - :param ssl_verify: SSL verification is performed by default, but can be disabled using the boolean flag. Default: `True` + :param ssl_verify: SSL verification is performed by default, but can be disabled using the boolean flag. Default: ``True`` :type ssl_verify: bool, optional - :param config: A `pinecone.config.Config` object. If passed, the `api_key` and `host` parameters will be ignored. + :param config: A ``pinecone.config.Config`` object. If passed, the ``api_key`` and ``host`` parameters will be ignored. :type config: pinecone.config.Config, optional - :param additional_headers: Additional headers to pass to the API. Default: `{}` + :param additional_headers: Additional headers to pass to the API. Default: ``{}`` :type additional_headers: Dict[str, str], optional - ### Managing the async context + **Managing the async context** - The `PineconeAsyncio` class relies on an underlying `aiohttp` `ClientSession` to make asynchronous HTTP requests. To ensure that the session is properly closed, you - should use the `async with` syntax when creating a `PineconeAsyncio` object. This will ensure that the session is properly closed when the context is exited. + The ``PineconeAsyncio`` class relies on an underlying ``aiohttp`` ``ClientSession`` to make asynchronous HTTP requests. To ensure that the session is properly closed, you + should use the ``async with`` syntax when creating a ``PineconeAsyncio`` object. This will ensure that the session is properly closed when the context is exited. - ```python - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python - async def main(): - async with PineconeAsyncio(api_key='YOUR_API_KEY') as pc: - # Do async things - index_list = await pc.list_indexes() + import asyncio + from pinecone import PineconeAsyncio + + async def main(): + async with PineconeAsyncio(api_key='YOUR_API_KEY') as pc: + # Do async things + index_list = await pc.list_indexes() - asyncio.run(main()) - ``` + asyncio.run(main()) - As an alternative, if you prefer to avoid code with a nested appearance and are willing to manage cleanup yourself, you can await the `close()` method to close the session when you are done. + As an alternative, if you prefer to avoid code with a nested appearance and are willing to manage cleanup yourself, you can await the ``close()`` method to close the session when you are done. - ```python - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python - async def main(): - pc = PineconeAsyncio(api_key='YOUR_API_KEY') + import asyncio + from pinecone import PineconeAsyncio - # Do async things - index_list = await pc.list_indexes() + async def main(): + pc = PineconeAsyncio(api_key='YOUR_API_KEY') + + # Do async things + index_list = await pc.list_indexes() - # You're responsible for calling this yourself - await pc.close() + # You're responsible for calling this yourself + await pc.close() - asyncio.run(main()) - ``` + asyncio.run(main()) Failing to do this may result in error messages appearing from the underlyling aiohttp library. - ### Configuration with environment variables + **Configuration with environment variables** - If you instantiate the Pinecone client with no arguments, it will attempt to read the API key from the environment variable `PINECONE_API_KEY`. + If you instantiate the Pinecone client with no arguments, it will attempt to read the API key from the environment variable ``PINECONE_API_KEY``. - ```python - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python - async def main(): - async with PineconeAsyncio() as pc: - # Do async things - index_list = await pc.list_indexes() + import asyncio + from pinecone import PineconeAsyncio + + async def main(): + async with PineconeAsyncio() as pc: + # Do async things + index_list = await pc.list_indexes() - asyncio.run(main()) - ``` + asyncio.run(main()) - ### Configuration with keyword arguments + **Configuration with keyword arguments** If you prefer being more explicit in your code, you can also pass the API as - ### Configuration with environment variables + **Configuration with environment variables** - If you instantiate the Pinecone client with no arguments, it will attempt to read the API key from the environment variable `PINECONE_API_KEY`. + If you instantiate the Pinecone client with no arguments, it will attempt to read the API key from the environment variable ``PINECONE_API_KEY``. - ```python - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python - async def main(): - async with PineconeAsyncio() as pc: - # Do async things - index_list = await pc.list_indexes() + import asyncio + from pinecone import PineconeAsyncio - asyncio.run(main()) - ``` + async def main(): + async with PineconeAsyncio() as pc: + # Do async things + index_list = await pc.list_indexes() - ### Configuration with keyword arguments + asyncio.run(main()) - If you prefer being more explicit in your code, you can also pass the API as + **Configuration with environment variables** + If you instantiate the Pinecone client with no arguments, it will attempt to read the API key from the environment variable ``PINECONE_API_KEY``. - ### Configuration with environment variables + .. code-block:: python - If you instantiate the Pinecone client with no arguments, it will attempt to read the API key from the environment variable `PINECONE_API_KEY`. + import asyncio + from pinecone import PineconeAsyncio - ```python - import asyncio - from pinecone import PineconeAsyncio + async def main(): + async with PineconeAsyncio() as pc: + # Do async things + index_list = await pc.list_indexes() - async def main(): - async with PineconeAsyncio() as pc: - # Do async things - index_list = await pc.list_indexes() + asyncio.run(main()) - asyncio.run(main()) - ``` - - ### Configuration with keyword arguments + **Configuration with keyword arguments** If you prefer being more explicit in your code, you can also pass the API as a keyword argument. - ```python - import os - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python - async def main(): - async with Pinecone(api_key=os.environ.get("PINECONE_API_KEY")) as pc: - # Do async things - index_list = await pc.list_indexes() + import os + import asyncio + from pinecone import PineconeAsyncio + + async def main(): + async with Pinecone(api_key=os.environ.get("PINECONE_API_KEY")) as pc: + # Do async things + index_list = await pc.list_indexes() + + asyncio.run(main()) - asyncio.run(main()) - ``` - ### Environment variables + **Environment variables** The Pinecone client supports the following environment variables: - - `PINECONE_API_KEY`: The API key to use for authentication. If not passed via - kwarg, the API key will be read from the environment variable `PINECONE_API_KEY`. + - ``PINECONE_API_KEY``: The API key to use for authentication. If not passed via + kwarg, the API key will be read from the environment variable ``PINECONE_API_KEY``. - ### Proxy configuration + **Proxy configuration** If your network setup requires you to interact with Pinecone via a proxy, you will need to pass additional configuration using optional keyword parameters. These optional parameters - are used to configure an SSL context and passed to `aiohttp`, which is the underlying library + are used to configure an SSL context and passed to ``aiohttp``, which is the underlying library currently used by the PineconeAsyncio client to make HTTP requests. Here is a basic example: - ```python - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python - async def main(): - async with PineconeAsyncio( - api_key='YOUR_API_KEY', - proxy_url='https://your-proxy.com' - ) as pc: - # Do async things - index_list = await pc.list_indexes() + import asyncio + from pinecone import PineconeAsyncio - asyncio.run(main()) - ``` + async def main(): + async with PineconeAsyncio( + api_key='YOUR_API_KEY', + proxy_url='https://your-proxy.com' + ) as pc: + # Do async things + index_list = await pc.list_indexes() - ### Using proxies with self-signed certificates + asyncio.run(main()) + + + **Using proxies with self-signed certificates** By default the Pinecone Python client will perform SSL certificate verification - using the CA bundle maintained by Mozilla in the [certifi](https://pypi.org/project/certifi/) package. + using the CA bundle maintained by Mozilla in the `certifi `_ package. If your proxy server is using a self-signed certificate, you will need to pass the path to the certificate - in PEM format using the `ssl_ca_certs` parameter. - - ```python - import asyncio - from pinecone import PineconeAsyncio - - async def main(): - async with PineconeAsyncio( - api_key='YOUR_API_KEY', - proxy_url='https://your-proxy.com', - ssl_ca_certs='path/to/cert-bundle.pem' - ) as pc: - # Do async things - await pc.list_indexes() + in PEM format using the ``ssl_ca_certs`` parameter. + + .. code-block:: python + + import asyncio + from pinecone import PineconeAsyncio + + async def main(): + async with PineconeAsyncio( + api_key='YOUR_API_KEY', + proxy_url='https://your-proxy.com', + ssl_ca_certs='path/to/cert-bundle.pem' + ) as pc: + # Do async things + await pc.list_indexes() - asyncio.run(main()) - ``` + asyncio.run(main()) - ### Disabling SSL verification - If you would like to disable SSL verification, you can pass the `ssl_verify` - parameter with a value of `False`. We do not recommend going to production with SSL verification disabled + **Disabling SSL verification** + + If you would like to disable SSL verification, you can pass the ``ssl_verify`` + parameter with a value of ``False``. We do not recommend going to production with SSL verification disabled but there are situations where this is useful such as testing with Pinecone Local running in a docker container. - ```python - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python + + import asyncio + from pinecone import PineconeAsyncio + + async def main(): + async with PineconeAsyncio( + api_key='YOUR_API_KEY', + ssl_verify=False + ) as pc: + if not await pc.has_index('my_index'): + await pc.create_index( + name='my_index', + dimension=1536, + metric='cosine', + spec=ServerlessSpec(cloud='aws', region='us-west-2') + ) - async def main(): - async with PineconeAsyncio( - api_key='YOUR_API_KEY', - ssl_verify=False - ) as pc: - if not await pc.has_index('my_index'): - await pc.create_index( - name='my_index', - dimension=1536, - metric='cosine', - spec=ServerlessSpec(cloud='aws', region='us-west-2') - ) + asyncio.run(main()) - asyncio.run(main()) - ``` - ### Passing additional headers + **Passing additional headers** If you need to pass additional headers with each request to the Pinecone API, you can do so using the - `additional_headers` parameter. This is primarily for internal testing and end-users shouldn't need to + ``additional_headers`` parameter. This is primarily for internal testing and end-users shouldn't need to do this unless specifically instructed to do so. - ```python - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python - async def main(): - async with PineconeAsyncio( - api_key='YOUR_API_KEY', - host='https://api-staging.pinecone.io', - additional_headers={'X-My-Header': 'my-value'} - ) as pc: - # Do async things - await pc.list_indexes() + import asyncio + from pinecone import PineconeAsyncio + + async def main(): + async with PineconeAsyncio( + api_key='YOUR_API_KEY', + host='https://api-staging.pinecone.io', + additional_headers={'X-My-Header': 'my-value'} + ) as pc: + # Do async things + await pc.list_indexes() - asyncio.run(main()) - ``` + asyncio.run(main()) """ pass @@ -309,102 +308,103 @@ async def create_index( cannot be changed once created. Allowed characters are lowercase letters, numbers, and hyphens and the name may not begin or end with hyphens. Maximum length is 45 characters. :type name: str - :param metric: Type of similarity metric used in the vector index when querying, one of `{"cosine", "dotproduct", "euclidean"}`. + :param metric: Type of similarity metric used in the vector index when querying, one of ``{"cosine", "dotproduct", "euclidean"}``. :type metric: str, optional :param spec: A dictionary containing configurations describing how the index should be deployed. For serverless indexes, specify region and cloud. For pod indexes, specify replicas, shards, pods, pod_type, metadata_config, and source_collection. - Alternatively, use the `ServerlessSpec` or `PodSpec` objects to specify these configurations. + Alternatively, use the ``ServerlessSpec`` or ``PodSpec`` objects to specify these configurations. :type spec: Dict - :param dimension: If you are creating an index with `vector_type="dense"` (which is the default), you need to specify `dimension` to indicate the size of your vectors. + :param dimension: If you are creating an index with ``vector_type="dense"`` (which is the default), you need to specify ``dimension`` to indicate the size of your vectors. This should match the dimension of the embeddings you will be inserting. For example, if you are using - OpenAI's CLIP model, you should use `dimension=1536`. Dimension is a required field when - creating an index with `vector_type="dense"` and should not be passed when `vector_type="sparse"`. + OpenAI's CLIP model, you should use ``dimension=1536``. Dimension is a required field when + creating an index with ``vector_type="dense"`` and should not be passed when ``vector_type="sparse"``. :type dimension: int :type timeout: int, optional :param timeout: Specify the number of seconds to wait until index gets ready. If None, wait indefinitely; if >=0, time out after this many seconds; if -1, return immediately and do not wait. :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. :type deletion_protection: Optional[Literal["enabled", "disabled"]] - :param vector_type: The type of vectors to be stored in the index. One of `{"dense", "sparse"}`. + :param vector_type: The type of vectors to be stored in the index. One of ``{"dense", "sparse"}``. :type vector_type: str, optional :param tags: Tags are key-value pairs you can attach to indexes to better understand, organize, and identify your resources. Some example use cases include tagging indexes with the name of the model that generated the embeddings, the date the index was created, or the purpose of the index. :type tags: Optional[Dict[str, str]] - :return: A `IndexModel` instance containing a description of the index that was created. - - ### Creating a serverless index - - ```python - import os - import asyncio - - from pinecone import ( - PineconeAsyncio, - ServerlessSpec, - CloudProvider, - AwsRegion, - Metric, - DeletionProtection, - VectorType - ) - - async def main(): - async with PineconeAsyncio(api_key=os.environ.get("PINECONE_API_KEY")) as pc: - await pc.create_index( - name="my_index", - dimension=1536, - metric=Metric.COSINE, - spec=ServerlessSpec( - cloud=CloudProvider.AWS, - region=AwsRegion.US_WEST_2 - ), - deletion_protection=DeletionProtection.DISABLED, - vector_type=VectorType.DENSE, - tags={ - "model": "clip", - "app": "image-search", - "env": "testing" - } - ) - - asyncio.run(main()) - ``` - - ### Creating a pod index - - ```python - import os - import asyncio - - from pinecone import ( - Pinecone, - PodSpec, - PodIndexEnvironment, - PodType, - Metric, - DeletionProtection, - VectorType - ) - - async def main(): - async with Pinecone(api_key=os.environ.get("PINECONE_API_KEY")) as pc: - await pc.create_index( - name="my_index", - dimension=1536, - metric=Metric.COSINE, - spec=PodSpec( - environment=PodIndexEnvironment.US_EAST4_GCP, - pod_type=PodType.P1_X1 - ), - deletion_protection=DeletionProtection.DISABLED, - tags={ - "model": "clip", - "app": "image-search", - "env": "testing" - } - ) - - asyncio.run(main()) - ``` + :return: A ``IndexModel`` instance containing a description of the index that was created. + + **Creating a serverless index** + + .. code-block:: python + + import os + import asyncio + + from pinecone import ( + PineconeAsyncio, + ServerlessSpec, + CloudProvider, + AwsRegion, + Metric, + DeletionProtection, + VectorType + ) + + async def main(): + async with PineconeAsyncio(api_key=os.environ.get("PINECONE_API_KEY")) as pc: + await pc.create_index( + name="my_index", + dimension=1536, + metric=Metric.COSINE, + spec=ServerlessSpec( + cloud=CloudProvider.AWS, + region=AwsRegion.US_WEST_2 + ), + deletion_protection=DeletionProtection.DISABLED, + vector_type=VectorType.DENSE, + tags={ + "model": "clip", + "app": "image-search", + "env": "testing" + } + ) + + asyncio.run(main()) + + + **Creating a pod index** + + .. code-block:: python + + import os + import asyncio + + from pinecone import ( + Pinecone, + PodSpec, + PodIndexEnvironment, + PodType, + Metric, + DeletionProtection, + VectorType + ) + + async def main(): + async with Pinecone(api_key=os.environ.get("PINECONE_API_KEY")) as pc: + await pc.create_index( + name="my_index", + dimension=1536, + metric=Metric.COSINE, + spec=PodSpec( + environment=PodIndexEnvironment.US_EAST4_GCP, + pod_type=PodType.P1_X1 + ), + deletion_protection=DeletionProtection.DISABLED, + tags={ + "model": "clip", + "app": "image-search", + "env": "testing" + } + ) + + asyncio.run(main()) """ pass @@ -424,15 +424,15 @@ async def create_index_for_model( cannot be changed once created. Allowed characters are lowercase letters, numbers, and hyphens and the name may not begin or end with hyphens. Maximum length is 45 characters. :type name: str - :param cloud: The cloud provider to use for the index. One of `{"aws", "gcp", "azure"}`. + :param cloud: The cloud provider to use for the index. One of ``{"aws", "gcp", "azure"}``. :type cloud: str - :param region: The region to use for the index. Enum objects `AwsRegion`, `GcpRegion`, and `AzureRegion` are also available to help you quickly set these parameters, but may not be up to date as new regions become available. + :param region: The region to use for the index. Enum objects ``AwsRegion``, ``GcpRegion``, and ``AzureRegion`` are also available to help you quickly set these parameters, but may not be up to date as new regions become available. :type region: str - :param embed: The embedding configuration for the index. This param accepts a dictionary or an instance of the `IndexEmbed` object. + :param embed: The embedding configuration for the index. This param accepts a dictionary or an instance of the ``IndexEmbed`` object. :type embed: Union[Dict, IndexEmbed] :param tags: Tags are key-value pairs you can attach to indexes to better understand, organize, and identify your resources. Some example use cases include tagging indexes with the name of the model that generated the embeddings, the date the index was created, or the purpose of the index. :type tags: Optional[Dict[str, str]] - :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. This setting can be changed with `configure_index`. + :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. This setting can be changed with ``configure_index``. :type deletion_protection: Optional[Literal["enabled", "disabled"]] :type timeout: Optional[int] :param timeout: Specify the number of seconds to wait until index is ready to receive data. If None, wait indefinitely; if >=0, time out after this many seconds; @@ -442,44 +442,46 @@ async def create_index_for_model( This method is used to create a Serverless index that is configured for use with Pinecone's integrated inference models. - The resulting index can be described, listed, configured, and deleted like any other Pinecone index with the `describe_index`, `list_indexes`, `configure_index`, and `delete_index` methods. - - After the model is created, you can upsert records into the index with the `upsert_records` method, and search your records with the `search` method. - - ```python - import asyncio - - from pinecone import ( - PineconeAsyncio, - IndexEmbed, - CloudProvider, - AwsRegion, - EmbedModel, - Metric, - ) - - async def main(): - async with PineconeAsyncio() as pc: - if not await pc.has_index("book-search"): - desc = await pc.create_index_for_model( - name="book-search", - cloud=CloudProvider.AWS, - region=AwsRegion.US_EAST_1, - embed=IndexEmbed( - model=EmbedModel.Multilingual_E5_Large, - metric=Metric.COSINE, - field_map={ - "text": "description", - }, + The resulting index can be described, listed, configured, and deleted like any other Pinecone index with the ``describe_index``, ``list_indexes``, ``configure_index``, and ``delete_index`` methods. + + After the model is created, you can upsert records into the index with the ``upsert_records`` method, and search your records with the ``search`` method. + + .. code-block:: python + + import asyncio + + from pinecone import ( + PineconeAsyncio, + IndexEmbed, + CloudProvider, + AwsRegion, + EmbedModel, + Metric, + ) + + async def main(): + async with PineconeAsyncio() as pc: + if not await pc.has_index("book-search"): + desc = await pc.create_index_for_model( + name="book-search", + cloud=CloudProvider.AWS, + region=AwsRegion.US_EAST_1, + embed=IndexEmbed( + model=EmbedModel.Multilingual_E5_Large, + metric=Metric.COSINE, + field_map={ + "text": "description", + }, + ) ) - ) - asyncio.run(main()) - ``` + asyncio.run(main()) + + See also: - To see the available cloud regions, see this [Pinecone documentation](https://docs.pinecone.io/troubleshooting/available-cloud-regions) page. + * See `available cloud regions `_ + * See the `Model Gallery `_ to learn about available models - See the [Model Gallery](https://docs.pinecone.io/models/overview) to learn about available models. """ pass @@ -496,13 +498,13 @@ def create_index_from_backup( """ Create an index from a backup. - Call `list_backups` to get a list of backups for your project. + Call ``list_backups`` to get a list of backups for your project. :param name: The name of the index to create. :type name: str :param backup_id: The ID of the backup to restore. :type backup_id: str - :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. This setting can be changed with `configure_index`. + :param deletion_protection: If enabled, the index cannot be deleted. If disabled, the index can be deleted. This setting can be changed with ``configure_index``. :type deletion_protection: Optional[Literal["enabled", "disabled"]] :param tags: Tags are key-value pairs you can attach to indexes to better understand, organize, and identify your resources. Some example use cases include tagging indexes with the name of the model that generated the embeddings, the date the index was created, or the purpose of the index. :type tags: Optional[Dict[str, str]] @@ -530,47 +532,48 @@ async def delete_index(self, name: str, timeout: Optional[int] = None): the index, but the termination is not synchronous because resources take a few moments to be released. - By default the `delete_index` method will block until polling of the `describe_index` method + By default the ``delete_index`` method will block until polling of the ``describe_index`` method shows that the delete operation has completed. If you prefer to return immediately and not - wait for the index to be deleted, you can pass `timeout=-1` to the method. + wait for the index to be deleted, you can pass ``timeout=-1`` to the method. - After the delete request is submitted, polling `describe_index` will show that the index - transitions into a `Terminating` state before eventually resulting in a 404 after it has been removed. + After the delete request is submitted, polling ``describe_index`` will show that the index + transitions into a ``Terminating`` state before eventually resulting in a 404 after it has been removed. - This operation can fail if the index is configured with `deletion_protection="enabled"`. - In this case, you will need to call `configure_index` to disable deletion protection before + This operation can fail if the index is configured with ``deletion_protection="enabled"``. + In this case, you will need to call ``configure_index`` to disable deletion protection before you can delete the index. - ```python - import asyncio + .. code-block:: python + + import asyncio - from pinecone import PineconeAsyncio + from pinecone import PineconeAsyncio - async def main(): - pc = PineconeAsyncio() + async def main(): + pc = PineconeAsyncio() + + index_name = "my_index" + desc = await pc.describe_index(name=index_name) - index_name = "my_index" - desc = await pc.describe_index(name=index_name) + if desc.deletion_protection == "enabled": + # If for some reason deletion protection is enabled, you will need to disable it first + # before you can delete the index. But use caution as this operation is not reversible + # and if somebody enabled deletion protection, they probably had a good reason. + await pc.configure_index(name=index_name, deletion_protection="disabled") - if desc.deletion_protection == "enabled": - # If for some reason deletion protection is enabled, you will need to disable it first - # before you can delete the index. But use caution as this operation is not reversible - # and if somebody enabled deletion protection, they probably had a good reason. - await pc.configure_index(name=index_name, deletion_protection="disabled") + await pc.delete_index(name=index_name) + await pc.close() - await pc.delete_index(name=index_name) - await pc.close() + asyncio.run(main()) - asyncio.run(main()) - ``` """ pass @abstractmethod async def list_indexes(self) -> "IndexList": """ - :return: Returns an `IndexList` object, which is iterable and contains a - list of `IndexModel` objects. The `IndexList` also has a convenience method `names()` + :return: Returns an ``IndexList`` object, which is iterable and contains a + list of ``IndexModel`` objects. The ``IndexList`` also has a convenience method ``names()`` which returns a list of index names for situations where you just want to iterate over all index names. @@ -579,31 +582,32 @@ async def list_indexes(self) -> "IndexList": The results include a description of all indexes in your project, including the index name, dimension, metric, status, and spec. - If you simply want to check whether an index exists, see the `has_index()` convenience method. + If you simply want to check whether an index exists, see the ``has_index()`` convenience method. - You can use the `list_indexes()` method to iterate over descriptions of every index in your project. + You can use the ``list_indexes()`` method to iterate over descriptions of every index in your project. - ```python - import asyncio + .. code-block:: python - from pinecone import PineconeAsyncio + import asyncio - async def main(): - pc = PineconeAsyncio() + from pinecone import PineconeAsyncio - available_indexes = await pc.list_indexes() - for index in available_indexes: - print(index.name) - print(index.dimension) - print(index.metric) - print(index.status) - print(index.host) - print(index.spec) + async def main(): + pc = PineconeAsyncio() - await pc.close() + available_indexes = await pc.list_indexes() + for index in available_indexes: + print(index.name) + print(index.dimension) + print(index.metric) + print(index.status) + print(index.host) + print(index.spec) + + await pc.close() + + asyncio.run(main()) - asyncio.run(main()) - ``` """ pass @@ -611,14 +615,14 @@ async def main(): async def describe_index(self, name: str) -> "IndexModel": """ :param name: the name of the index to describe. - :return: Returns an `IndexModel` object - which gives access to properties such as the - index name, dimension, metric, host url, status, - and spec. + :return: Returns an ``IndexModel`` object + which gives access to properties such as the + index name, dimension, metric, host url, status, + and spec. Describes a Pinecone index. - ### Getting your index host url + **Getting your index host url** In a real production situation, you probably want to store the host url in an environment variable so you @@ -626,46 +630,47 @@ async def describe_index(self, name: str) -> "IndexModel": every time you want to use the index. But this example shows how to get the value from the API using describe_index. - ```python - import asyncio - from pinecone import Pinecone, PineconeAsyncio, Index - - async def main(): - pc = PineconeAsyncio() - - index_name="my_index" - description = await pc.describe_index(name=index_name) - print(description) - # { - # "name": "my_index", - # "metric": "cosine", - # "host": "my_index-dojoi3u.svc.aped-4627-b74a.pinecone.io", - # "spec": { - # "serverless": { - # "cloud": "aws", - # "region": "us-east-1" - # } - # }, - # "status": { - # "ready": true, - # "state": "Ready" - # }, - # "vector_type": "dense", - # "dimension": 1024, - # "deletion_protection": "enabled", - # "tags": { - # "environment": "production" - # } - # } - - print(f"Your index is hosted at {description.host}") - await pc.close() - - async with Pinecone().IndexAsyncio(host=description.host) as idx: - await idx.upsert(vectors=[...]) - - asyncio.run(main()) - ``` + .. code-block:: python + + import asyncio + from pinecone import Pinecone, PineconeAsyncio, Index + + async def main(): + pc = PineconeAsyncio() + + index_name="my_index" + description = await pc.describe_index(name=index_name) + print(description) + # { + # "name": "my_index", + # "metric": "cosine", + # "host": "my_index-dojoi3u.svc.aped-4627-b74a.pinecone.io", + # "spec": { + # "serverless": { + # "cloud": "aws", + # "region": "us-east-1" + # } + # }, + # "status": { + # "ready": true, + # "state": "Ready" + # }, + # "vector_type": "dense", + # "dimension": 1024, + # "deletion_protection": "enabled", + # "tags": { + # "environment": "production" + # } + # } + + print(f"Your index is hosted at {description.host}") + await pc.close() + + async with Pinecone().IndexAsyncio(host=description.host) as idx: + await idx.upsert(vectors=[...]) + + asyncio.run(main()) + """ pass @@ -673,28 +678,28 @@ async def main(): async def has_index(self, name: str) -> bool: """ :param name: The name of the index to check for existence. - :return: Returns `True` if the index exists, `False` otherwise. + :return: Returns ``True`` if the index exists, ``False`` otherwise. Checks if a Pinecone index exists. - ```python - import asyncio - from pinecone import PineconeAsyncio, ServerlessSpec - - async def main(): - async with PineconeAsyncio() as pc: - index_name = "my_index" - if not await pc.has_index(index_name): - print("Index does not exist, creating...") - pc.create_index( - name=index_name, - dimension=768, - metric="cosine", - spec=ServerlessSpec(cloud="aws", region="us-west-2") - ) + .. code-block:: python + + import asyncio + from pinecone import PineconeAsyncio, ServerlessSpec + + async def main(): + async with PineconeAsyncio() as pc: + index_name = "my_index" + if not await pc.has_index(index_name): + print("Index does not exist, creating...") + pc.create_index( + name=index_name, + dimension=768, + metric="cosine", + spec=ServerlessSpec(cloud="aws", region="us-west-2") + ) - asyncio.run(main()) - ``` + asyncio.run(main()) """ pass @@ -711,95 +716,98 @@ async def configure_index( :param: name: the name of the Index :param: replicas: the desired number of replicas, lowest value is 0. :param: pod_type: the new pod_type for the index. To learn more about the - available pod types, please see [Understanding Indexes](https://docs.pinecone.io/docs/indexes) + available pod types, please see `Understanding Indexes `_ :param: deletion_protection: If set to 'enabled', the index cannot be deleted. If 'disabled', the index can be deleted. :param: tags: A dictionary of tags to apply to the index. Tags are key-value pairs that can be used to organize and manage indexes. To remove a tag, set the value to "". Tags passed to configure_index will be merged with existing tags and any with the value empty string will be removed. This method is used to modify an index's configuration. It can be used to: - - Scale a pod-based index horizontally using `replicas` - - Scale a pod-based index vertically using `pod_type` - - Enable or disable deletion protection using `deletion_protection` - - Add, change, or remove tags using `tags` + - Scale a pod-based index horizontally using ``replicas`` + - Scale a pod-based index vertically using ``pod_type`` + - Enable or disable deletion protection using ``deletion_protection`` + - Add, change, or remove tags using ``tags`` + + **Scaling pod-based indexes** - ## Scaling pod-based indexes + To scale your pod-based index, you pass a ``replicas`` and/or ``pod_type`` param to the ``configure_index`` method. ``pod_type`` may be a string or a value from the ``PodType`` enum. - To scale your pod-based index, you pass a `replicas` and/or `pod_type` param to the `configure_index` method. `pod_type` may be a string or a value from the `PodType` enum. + .. code-block:: python - ```python - import asyncio - from pinecone import PineconeAsyncio, PodType + import asyncio + from pinecone import PineconeAsyncio, PodType - async def main(): - async with PineconeAsyncio() as pc: - await pc.configure_index( - name="my_index", - replicas=2, - pod_type=PodType.P1_X2 - ) + async def main(): + async with PineconeAsyncio() as pc: + await pc.configure_index( + name="my_index", + replicas=2, + pod_type=PodType.P1_X2 + ) - asyncio.run(main()) - ``` + asyncio.run(main()) - After providing these new configurations, you must call `describe_index` to see the status of the index as the changes are applied. - ## Enabling or disabling deletion protection + After providing these new configurations, you must call ``describe_index`` to see the status of the index as the changes are applied. - To enable or disable deletion protection, pass the `deletion_protection` parameter to the `configure_index` method. When deletion protection - is enabled, the index cannot be deleted with the `delete_index` method. + **Enabling or disabling deletion protection** - ```python - import asyncio - from pinecone import PineconeAsyncio, DeletionProtection + To enable or disable deletion protection, pass the ``deletion_protection`` parameter to the ``configure_index`` method. When deletion protection + is enabled, the index cannot be deleted with the ``delete_index`` method. - async def main(): - async with PineconeAsyncio() as pc: - # Enable deletion protection - await pc.configure_index( - name="my_index", - deletion_protection=DeletionProtection.ENABLED - ) + .. code-block:: python - # Call describe_index to see the change was applied. - desc = await pc.describe_index("my_index") - assert desc.deletion_protection == "enabled" + import asyncio + from pinecone import PineconeAsyncio, DeletionProtection - # Disable deletion protection - await pc.configure_index( - name="my_index", - deletion_protection=DeletionProtection.DISABLED - ) + async def main(): + async with PineconeAsyncio() as pc: + # Enable deletion protection + await pc.configure_index( + name="my_index", + deletion_protection=DeletionProtection.ENABLED + ) - asyncio.run(main()) - ``` + # Call describe_index to see the change was applied. + desc = await pc.describe_index("my_index") + assert desc.deletion_protection == "enabled" - ## Adding, changing, or removing tags + # Disable deletion protection + await pc.configure_index( + name="my_index", + deletion_protection=DeletionProtection.DISABLED + ) - To add, change, or remove tags, pass the `tags` parameter to the `configure_index` method. When tags are passed using `configure_index`, + asyncio.run(main()) + + + **Adding, changing, or removing tags** + + To add, change, or remove tags, pass the ``tags`` parameter to the ``configure_index`` method. When tags are passed using ``configure_index``, they are merged with any existing tags already on the index. To remove a tag, set the value of the key to an empty string. - ```python - import asyncio + .. code-block:: python + + import asyncio + + from pinecone import PineconeAsyncio - from pinecone import PineconeAsyncio + async def main(): + async with PineconeAsyncio() as pc: + # Add a tag + await pc.configure_index(name="my_index", tags={"environment": "staging"}) - async def main(): - async with PineconeAsyncio() as pc: - # Add a tag - await pc.configure_index(name="my_index", tags={"environment": "staging"}) + # Change a tag + await pc.configure_index(name="my_index", tags={"environment": "production"}) - # Change a tag - await pc.configure_index(name="my_index", tags={"environment": "production"}) + # Remove a tag + await pc.configure_index(name="my_index", tags={"environment": ""}) - # Remove a tag - await pc.configure_index(name="my_index", tags={"environment": ""}) + # Call describe_index to view the tags are changed + await pc.describe_index("my_index") + print(desc.tags) - # Call describe_index to view the tags are changed - await pc.describe_index("my_index") - print(desc.tags) + asyncio.run(main()) - asyncio.run(main()) - ``` """ pass @@ -887,30 +895,31 @@ async def create_collection(self, name: str, source: str): async def list_collections(self) -> "CollectionList": """List all collections - ```python - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python - async def main(): - pc = PineconeAsyncio() + import asyncio + from pinecone import PineconeAsyncio - collections = await pc.list_collections() - for collection in collections: - print(collection.name) - print(collection.source) + async def main(): + pc = PineconeAsyncio() - # You can also iterate specifically over - # a list of collection names by calling - # the .names() helper. - collection_name = "my_collection" - collections = await pc.list_collections() - if collection_name in collections.names(): - print('Collection exists') + collections = await pc.list_collections() + for collection in collections: + print(collection.name) + print(collection.source) - await pc.close() + # You can also iterate specifically over + # a list of collection names by calling + # the .names() helper. + collection_name = "my_collection" + collections = await pc.list_collections() + if collection_name in collections.names(): + print('Collection exists') + + await pc.close() + + asyncio.run(main()) - asyncio.run(main()) - ``` """ pass @@ -920,21 +929,22 @@ async def delete_collection(self, name: str): :param: The name of the collection :return: Description of the collection - ```python - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python + + import asyncio + from pinecone import PineconeAsyncio - async def main(): - async with PineconeAsyncio() as pc: + async def main(): + async with PineconeAsyncio() as pc: - description = await pc.describe_collection("my_collection") - print(description.name) - print(description.source) - print(description.status) - print(description.size) + description = await pc.describe_collection("my_collection") + print(description.name) + print(description.source) + print(description.status) + print(description.size) + + asyncio.run(main()) - asyncio.run(main()) - ``` """ pass @@ -944,21 +954,21 @@ async def describe_collection(self, name: str): :param: The name of the collection :return: Description of the collection - ```python - import asyncio - from pinecone import PineconeAsyncio + .. code-block:: python + + import asyncio + from pinecone import PineconeAsyncio + async def main(): + async with PineconeAsyncio() as pc: + description = await pc.describe_collection("my_collection") + print(description.name) + print(description.source) + print(description.status) + print(description.size) - async def main(): - async with PineconeAsyncio() as pc: - description = await pc.describe_collection("my_collection") - print(description.name) - print(description.source) - print(description.status) - print(description.size) + asyncio.run(main()) - asyncio.run(main()) - ``` """ pass @@ -969,63 +979,63 @@ def IndexAsyncio(self, host, **kwargs): :param host: The host url of the index. - ```python - import os - import asyncio + .. code-block:: python - from pinecone import PineconeAsyncio + import os + import asyncio - api_key = os.environ.get("PINECONE_API_KEY") - index_host = os.environ.get("PINECONE_INDEX_HOST") + from pinecone import PineconeAsyncio - async def main(): - async with Pinecone(api_key=api_key) as pc: - async with pc.Index(host=index_host) as idx: - # Now you're ready to perform data operations - await index.query(vector=[...], top_k=10) + api_key = os.environ.get("PINECONE_API_KEY") + index_host = os.environ.get("PINECONE_INDEX_HOST") + + async def main(): + async with Pinecone(api_key=api_key) as pc: + async with pc.Index(host=index_host) as idx: + # Now you're ready to perform data operations + await index.query(vector=[...], top_k=10) - asyncio.run(main()) - ``` + asyncio.run(main()) - To find your host url, you can use the `describe_index`. Or, alternatively, the + To find your host url, you can use the ``describe_index``. Or, alternatively, the host is displayed in the Pinecone web console. - ```python - import os - import asyncio + .. code-block:: python + + import os + import asyncio - from pinecone import PineconeAsyncio + from pinecone import PineconeAsyncio - async def main(): - async with PineconeAsyncio( - api_key=os.environ.get("PINECONE_API_KEY") - ) as pc: - host = await pc.describe_index('index-name').host + async def main(): + async with PineconeAsyncio( + api_key=os.environ.get("PINECONE_API_KEY") + ) as pc: + host = await pc.describe_index('index-name').host - asyncio.run(main()) - ``` + asyncio.run(main()) - ## Alternative setup + **Alternative setup** - Like instances of the `PineconeAsyncio` class, instances of `IndexAsyncio` have async context that + Like instances of the ``PineconeAsyncio`` class, instances of ``IndexAsyncio`` have async context that needs to be cleaned up when you are done with it in order to avoid error messages about unclosed session from aiohttp. Nesting these in code is a bit cumbersome, so if you are only planning to do data operations you - may prefer to setup the `IndexAsyncio` object via the `Pinecone` class which will avoid creating an outer async context. + may prefer to setup the ``IndexAsyncio`` object via the ``Pinecone`` class which will avoid creating an outer async context. - ```python - import os - import asyncio - from pinecone import Pinecone + .. code-block:: python - api_key = os.environ.get("PINECONE_API_KEY") + import os + import asyncio + from pinecone import Pinecone - async def main(): - pc = Pinecone(api_key=api_key) # sync client, so no async context to worry about + api_key = os.environ.get("PINECONE_API_KEY") - async with pc.AsyncioIndex(host='your_index_host') as idx: - # Now you're ready to perform data operations - await idx.query(vector=[...], top_k=10) + async def main(): + pc = Pinecone(api_key=api_key) # sync client, so no async context to worry about + + async with pc.AsyncioIndex(host='your_index_host') as idx: + # Now you're ready to perform data operations + await idx.query(vector=[...], top_k=10) - ``` """ pass diff --git a/pinecone/utils/lazy_imports.py b/pinecone/utils/lazy_imports.py index 0a55c8f4..6bb3d15b 100644 --- a/pinecone/utils/lazy_imports.py +++ b/pinecone/utils/lazy_imports.py @@ -24,6 +24,19 @@ def __init__(self, original_module, lazy_imports): self._lazy_imports = lazy_imports self._loaded_attrs = {} + @property + def __doc__(self): + return self._original_module.__doc__ + + @property + def __dict__(self): + # Get the base dictionary from the original module + base_dict = self._original_module.__dict__.copy() + # Add lazy-loaded items + for name, value in self._loaded_attrs.items(): + base_dict[name] = value + return base_dict + def __dir__(self): # Get the base directory listing from the original module base_dir = dir(self._original_module) diff --git a/pinecone/utils/version.py b/pinecone/utils/version.py index 1da30cfe..f7e49a93 100644 --- a/pinecone/utils/version.py +++ b/pinecone/utils/version.py @@ -6,3 +6,4 @@ def get_version(): __version__ = get_version() +""" The version of the `pinecone` package""" diff --git a/poetry.lock b/poetry.lock index 0d4618a0..04bb99bc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -99,7 +99,6 @@ files = [ [package.dependencies] aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" @@ -138,14 +137,14 @@ files = [ frozenlist = ">=1.1.0" [[package]] -name = "async-timeout" -version = "5.0.1" -description = "Timeout context manager for asyncio programs" -optional = true -python-versions = ">=3.8" +name = "alabaster" +version = "1.0.0" +description = "A light, configurable Sphinx theme" +optional = false +python-versions = ">=3.10" files = [ - {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, - {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, + {file = "alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b"}, + {file = "alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e"}, ] [[package]] @@ -167,6 +166,20 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +[[package]] +name = "babel" +version = "2.17.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, +] + +[package.extras] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] + [[package]] name = "beautifulsoup4" version = "4.13.3" @@ -397,19 +410,16 @@ files = [ ] [[package]] -name = "exceptiongroup" -version = "1.1.3" -description = "Backport of PEP 654 (exception groups)" +name = "docutils" +version = "0.21.2" +description = "Docutils -- Python Documentation Utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] -[package.extras] -test = ["pytest (>=6)"] - [[package]] name = "filelock" version = "3.15.1" @@ -650,6 +660,17 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + [[package]] name = "iniconfig" version = "2.0.0" @@ -897,9 +918,6 @@ files = [ {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} - [[package]] name = "mypy" version = "1.6.1" @@ -938,7 +956,6 @@ files = [ [package.dependencies] mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = ">=4.1.0" [package.extras] @@ -1077,7 +1094,6 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version == \"3.11\""}, {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] @@ -1125,25 +1141,6 @@ files = [ numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} types-pytz = ">=2022.1.1" -[[package]] -name = "pdoc" -version = "14.5.1" -description = "API Documentation for Python Projects" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pdoc-14.5.1-py3-none-any.whl", hash = "sha256:fda6365a06e438b43ca72235b58a2e2ecd66445fcc444313f6ebbde4b0abd94b"}, - {file = "pdoc-14.5.1.tar.gz", hash = "sha256:4ddd9c5123a79f511cedffd7231bf91a6e0bd0968610f768342ec5d00b5eefee"}, -] - -[package.dependencies] -Jinja2 = ">=2.11.0" -MarkupSafe = "*" -pygments = ">=2.12.0" - -[package.extras] -dev = ["hypothesis", "mypy", "pdoc-pyo3-sample-library (==1.0.11)", "pygments (>=2.14.0)", "pytest", "pytest-cov", "pytest-timeout", "ruff", "tox", "types-pygments"] - [[package]] name = "pinecone-plugin-assistant" version = "1.6.0" @@ -1397,17 +1394,17 @@ files = [ [[package]] name = "pygments" -version = "2.16.1" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] -plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" @@ -1422,11 +1419,9 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2.0" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] @@ -1674,6 +1669,21 @@ urllib3 = ">=1.25.10,<3.0" [package.extras] tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] +[[package]] +name = "roman-numerals-py" +version = "3.1.0" +description = "Manipulate well-formed Roman numerals" +optional = false +python-versions = ">=3.9" +files = [ + {file = "roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c"}, + {file = "roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d"}, +] + +[package.extras] +lint = ["mypy (==1.15.0)", "pyright (==1.1.394)", "ruff (==0.9.7)"] +test = ["pytest (>=8)"] + [[package]] name = "ruff" version = "0.9.3" @@ -1712,6 +1722,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "snowballstemmer" +version = "3.0.1" +description = "This package provides 32 stemmers for 30 languages generated from Snowball algorithms." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*" +files = [ + {file = "snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064"}, + {file = "snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895"}, +] + [[package]] name = "soupsieve" version = "2.6" @@ -1724,16 +1745,134 @@ files = [ ] [[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" +name = "sphinx" +version = "8.2.3" +description = "Python documentation generator" optional = false -python-versions = ">=3.7" +python-versions = ">=3.11" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3"}, + {file = "sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348"}, ] +[package.dependencies] +alabaster = ">=0.7.14" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" +imagesize = ">=1.3" +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +roman-numerals-py = ">=1.0.0" +snowballstemmer = ">=2.2" +sphinxcontrib-applehelp = ">=1.0.7" +sphinxcontrib-devhelp = ">=1.0.6" +sphinxcontrib-htmlhelp = ">=2.0.6" +sphinxcontrib-jsmath = ">=1.0.1" +sphinxcontrib-qthelp = ">=1.0.6" +sphinxcontrib-serializinghtml = ">=1.1.9" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["betterproto (==2.0.0b6)", "mypy (==1.15.0)", "pypi-attestations (==0.0.21)", "pyright (==1.1.395)", "pytest (>=8.0)", "ruff (==0.9.9)", "sphinx-lint (>=0.9)", "types-Pillow (==10.2.0.20240822)", "types-Pygments (==2.19.0.20250219)", "types-colorama (==0.4.15.20240311)", "types-defusedxml (==0.7.0.20240218)", "types-docutils (==0.21.0.20241128)", "types-requests (==2.32.0.20241016)", "types-urllib3 (==1.26.25.14)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "pytest-xdist[psutil] (>=3.4)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["defusedxml (>=0.7.1)", "pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + [[package]] name = "tuna" version = "0.5.11" @@ -1989,5 +2128,5 @@ grpc = ["googleapis-common-protos", "grpcio", "grpcio", "grpcio", "lz4", "protob [metadata] lock-version = "2.0" -python-versions = "^3.9" -content-hash = "6ec34aecf5d783b447124d4fbb752bff5321c1ba70a90e55734000f8f725d85d" +python-versions = "^3.11" +content-hash = "3d1dbdf5907b0210450c3b067c8a1a345e9413d533a9d5bb8d91a910cbe9bc04" diff --git a/pyproject.toml b/pyproject.toml index 8dc54c42..c3a26fba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ classifiers=[ repl = "scripts.repl:main" [tool.poetry.dependencies] -python = "^3.9" +python = "^3.11" typing-extensions = ">=3.7.4" urllib3 = [ { version = ">=1.26.0", python = ">=3.8,<3.12" }, @@ -85,7 +85,6 @@ pandas = [ { version = ">=1.3.5", python = ">=3.9" }, { version = ">=2.2.3", python = "^3.13" } ] -pdoc = "^14.1.0" pytest = "8.2.0" pytest-asyncio = "^0.25.2" pytest-cov = "2.10.1" @@ -102,6 +101,7 @@ beautifulsoup4 = "^4.13.3" vprof = "^0.38" tuna = "^0.5.11" python-dotenv = "^1.1.0" +sphinx = "^8.2.3" [tool.poetry.extras] grpc = ["grpcio", "googleapis-common-protos", "lz4", "protobuf", "protoc-gen-openapiv2"] diff --git a/sphinx/asyncio.rst b/sphinx/asyncio.rst new file mode 100644 index 00000000..94fc82a6 --- /dev/null +++ b/sphinx/asyncio.rst @@ -0,0 +1,107 @@ +=============== +PineconeAsyncio +=============== + +.. autoclass:: pinecone::PineconeAsyncio + +.. automethod:: pinecone::PineconeAsyncio.__init__ + +DB Control Plane +================ + +Indexes +------- + +.. automethod:: pinecone::PineconeAsyncio.create_index + +.. automethod:: pinecone::PineconeAsyncio.create_index_for_model + +.. automethod:: pinecone::PineconeAsyncio.create_index_from_backup + +.. automethod:: pinecone::PineconeAsyncio.list_indexes + +.. automethod:: pinecone::PineconeAsyncio.describe_index + +.. automethod:: pinecone::PineconeAsyncio.configure_index + +.. automethod:: pinecone::PineconeAsyncio.delete_index + +.. automethod:: pinecone::PineconeAsyncio.has_index + +Backups +------- + +.. automethod:: pinecone::PineconeAsyncio.create_backup + +.. automethod:: pinecone::PineconeAsyncio.list_backups + +.. automethod:: pinecone::PineconeAsyncio.describe_backup + +.. automethod:: pinecone::PineconeAsyncio.delete_backup + +Collections +----------- + +.. automethod:: pinecone::PineconeAsyncio.create_collection + +.. automethod:: pinecone::PineconeAsyncio.list_collections + +.. automethod:: pinecone::PineconeAsyncio.describe_collection + +.. automethod:: pinecone::PineconeAsyncio.delete_collection + +Restore Jobs +------------ + +.. automethod:: pinecone::PineconeAsyncio.list_restore_jobs + +.. automethod:: pinecone::PineconeAsyncio.describe_restore_job + +DB Data Plane +============= + +.. autoclass:: pinecone.db_data::IndexAsyncio + +.. automethod:: pinecone.db_data::IndexAsyncio.__init__ + +.. automethod:: pinecone.db_data::IndexAsyncio.describe_index_stats + +Vectors +------- + +.. automethod:: pinecone.db_data::IndexAsyncio.upsert + +.. automethod:: pinecone.db_data::IndexAsyncio.query + +.. automethod:: pinecone.db_data::IndexAsyncio.query_namespaces + +.. automethod:: pinecone.db_data::IndexAsyncio.delete + +.. automethod:: pinecone.db_data::IndexAsyncio.fetch + +.. automethod:: pinecone.db_data::IndexAsyncio.list + +.. automethod:: pinecone.db_data::IndexAsyncio.list_paginated + +Records +------- + +If you have created an index using integrated inference, you can use the following methods to +search and retrieve records. + +.. automethod:: pinecone.db_data::IndexAsyncio.search + +.. automethod:: pinecone.db_data::IndexAsyncio.search_records + + + +Inference +========= + +.. automethod:: pinecone.inference::Inference.embed + +.. automethod:: pinecone.inference::Inference.rerank + +.. automethod:: pinecone.inference::Inference.list_models + +.. automethod:: pinecone.inference::Inference.get_model diff --git a/sphinx/conf.py b/sphinx/conf.py new file mode 100644 index 00000000..9350a4c9 --- /dev/null +++ b/sphinx/conf.py @@ -0,0 +1,31 @@ +import pinecone + +project = "Pinecone Python SDK" +author = "Pinecone Systems, Inc." +version = pinecone.__version__ + +html_baseurl = "https://sdk.pinecone.io/python" + +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.viewcode", + "sphinx.ext.todo", + "sphinx.ext.napoleon", + "sphinx.ext.coverage", + "sphinx.ext.autodoc.typehints", +] + +# -- HTML Configuration ------------------------------------------------- + +html_theme = "alabaster" +html_theme_options = { + "github_user": "pinecone-io", + "github_repo": "pinecone-python-client", + "github_button": True, + "fixed_sidebar": True, + "page_width": "1140px", + "show_related": True, + # 'analytics_id': '', # TODO: add analytics id + "description": version, + "show_powered_by": False, +} diff --git a/sphinx/grpc.rst b/sphinx/grpc.rst new file mode 100644 index 00000000..2ced748d --- /dev/null +++ b/sphinx/grpc.rst @@ -0,0 +1,82 @@ +=============== +PineconeGRPC +=============== + +.. autoclass:: pinecone.grpc::PineconeGRPC + +DB Control Plane +================ + +Indexes +------- + +.. automethod:: pinecone.grpc::PineconeGRPC.create_index + +.. automethod:: pinecone.grpc::PineconeGRPC.create_index_for_model + +.. automethod:: pinecone.grpc::PineconeGRPC.create_index_from_backup + +.. automethod:: pinecone.grpc::PineconeGRPC.list_indexes + +.. automethod:: pinecone.grpc::PineconeGRPC.describe_index + +.. automethod:: pinecone.grpc::PineconeGRPC.configure_index + +.. automethod:: pinecone.grpc::PineconeGRPC.delete_index + +.. automethod:: pinecone.grpc::PineconeGRPC.has_index + +Backups +------- + +.. automethod:: pinecone.grpc::PineconeGRPC.create_backup + +.. automethod:: pinecone.grpc::PineconeGRPC.list_backups + +.. automethod:: pinecone.grpc::PineconeGRPC.describe_backup + +.. automethod:: pinecone.grpc::PineconeGRPC.delete_backup + +Collections +----------- + +.. automethod:: pinecone.grpc::PineconeGRPC.create_collection + +.. automethod:: pinecone.grpc::PineconeGRPC.list_collections + +.. automethod:: pinecone.grpc::PineconeGRPC.describe_collection + +.. automethod:: pinecone.grpc::PineconeGRPC.delete_collection + +Restore Jobs +------------ + +.. automethod:: pinecone.grpc::PineconeGRPC.list_restore_jobs + +.. automethod:: pinecone.grpc::PineconeGRPC.describe_restore_job + +DB Data Plane +============= + +.. autoclass:: pinecone.grpc::GRPCIndex + +.. automethod:: pinecone.grpc::GRPCIndex.__init__ + +.. automethod:: pinecone.grpc::GRPCIndex.describe_index_stats + +Vectors +------- + +.. automethod:: pinecone.grpc::GRPCIndex.upsert + +.. automethod:: pinecone.grpc::GRPCIndex.query + +.. automethod:: pinecone.grpc::GRPCIndex.query_namespaces + +.. automethod:: pinecone.grpc::GRPCIndex.delete + +.. automethod:: pinecone.grpc::GRPCIndex.fetch + +.. automethod:: pinecone.grpc::GRPCIndex.list + +.. automethod:: pinecone.grpc::GRPCIndex.list_paginated diff --git a/sphinx/index.rst b/sphinx/index.rst new file mode 100644 index 00000000..5a7becee --- /dev/null +++ b/sphinx/index.rst @@ -0,0 +1,247 @@ +.. toctree:: + :maxdepth: 5 + :hidden: + + rest + asyncio + grpc + +=================== +Pinecone Python SDK +=================== + +.. image:: https://img.shields.io/github/license/pinecone-io/pinecone-python-client?color=orange + :width: 100 + :alt: License + +The official Pinecone Python SDK. + +Documentation +============= + +- `Conceptual docs and guides `_ +- `Github Source `_ + +Points of interest +=================== + +DB control plane +---------------- + +- `Pinecone <./rest.html#pinecone.Pinecone>`_ +- `PineconeAsyncio <./asyncio.html#pinecone.PineconeAsyncio>`_ +- `PineconeGRPC <./grpc.html#pinecone.PineconeGRPC>`_ + +DB data operations +------------------ +- `Index <./rest.html#db-data-plane>`_ +- `IndexAsyncio <./asyncio.html#db-data-plane>`_ +- `GRPCIndex <./grpc.html#db-data-plane>`_ + +Inference API +------------- +- `Inference <./rest.html#inference>`_ +- `InferenceAsyncio <./asyncio.html#inference>`_ + +Upgrading the SDK +================= + +.. admonition:: Note + + The official SDK package was renamed from ``pinecone-client`` to ``pinecone`` beginning in version ``5.1.0``. + Please remove ``pinecone-client`` from your project dependencies and add ``pinecone`` instead to get + the latest updates. + +For notes on changes between major versions, see [Upgrading](./docs/upgrading.md) + +Prerequisites +============= + +* The Pinecone Python SDK is compatible with Python 3.9 and greater. It has been tested with CPython versions from 3.9 to 3.13. +* Before you can use the Pinecone SDK, you must sign up for an account and find your API key in the Pinecone console dashboard at `https://app.pinecone.io `_. + +Installation +============ + +The Pinecone Python SDK is distributed on PyPI using the package name `pinecone`. By default the `pinecone` has a minimal set of dependencies, but you can install some extras to unlock additional functionality. + +Available extras: + +* ``pinecone[asyncio]`` will add a dependency on ``aiohttp`` and enable usage of ``PineconeAsyncio``, the asyncio-enabled version of the client for use with highly asynchronous modern web frameworks such as FastAPI. +* ``pinecone[grpc]`` will add dependencies on ``grpcio`` and related libraries needed to make pinecone data calls such as ``upsert`` and ``query`` over `GRPC `_ for a modest performance improvement. See the guide on `tuning performance `_. + +Installing with pip +------------------- + +.. code-block:: shell + + # Install the latest version + pip3 install pinecone + + # Install the latest version, with optional dependencies + pip3 install "pinecone[asyncio,grpc]" + + +Installing with uv +------------------ + +`uv `_ is a modern package manager that runs 10-100x faster than pip and supports most pip syntax. + +.. code-block:: shell + + # Install the latest version + uv install pinecone + + # Install the latest version, optional dependencies + uv install "pinecone[asyncio,grpc]" + + +Installing with `poetry `_ +------------------------------------------------------ + +.. code-block:: shell + + # Install the latest version + poetry add pinecone + + # Install the latest version, with optional dependencies + poetry add pinecone --extras asyncio --extras grpc + + +Quickstart +========== + +Bringing your own vectors to Pinecone +------------------------------------- + +.. code-block:: python + + from pinecone import ( + Pinecone, + ServerlessSpec, + CloudProvider, + AwsRegion, + VectorType + ) + + # 1. Instantiate the Pinecone client + pc = Pinecone(api_key='YOUR_API_KEY') + + # 2. Create an index + index_config = pc.create_index( + name="index-name", + dimension=1536, + spec=ServerlessSpec( + cloud=CloudProvider.AWS, + region=AwsRegion.US_EAST_1 + ), + vector_type=VectorType.DENSE + ) + + # 3. Instantiate an Index client + idx = pc.Index(host=index_config.host) + + # 4. Upsert embeddings + idx.upsert( + vectors=[ + ("id1", [0.1, 0.2, 0.3, 0.4, ...], {"metadata_key": "value1"}), + ("id2", [0.2, 0.3, 0.4, 0.5, ...], {"metadata_key": "value2"}), + ], + namespace="example-namespace" + ) + + # 5. Query your index using an embedding + query_embedding = [...] # list should have length == index dimension + idx.query( + vector=query_embedding, + top_k=10, + include_metadata=True, + filter={"metadata_key": { "$eq": "value1" }} + ) + + +Bring your own data using Pinecone integrated inference +------------------------------------------------------- + +.. code-block:: python + + from pinecone import ( + Pinecone, + CloudProvider, + AwsRegion, + EmbedModel, + ) + + # 1. Instantiate the Pinecone client + pc = Pinecone(api_key="<>") + + # 2. Create an index configured for use with a particular model + index_config = pc.create_index_for_model( + name="my-model-index", + cloud=CloudProvider.AWS, + region=AwsRegion.US_EAST_1, + embed=IndexEmbed( + model=EmbedModel.Multilingual_E5_Large, + field_map={"text": "my_text_field"} + ) + ) + + # 3. Instantiate an Index client + idx = pc.Index(host=index_config.host) + + # 4. Upsert records + idx.upsert_records( + namespace="my-namespace", + records=[ + { + "_id": "test1", + "my_text_field": "Apple is a popular fruit known for its sweetness and crisp texture.", + }, + { + "_id": "test2", + "my_text_field": "The tech company Apple is known for its innovative products like the iPhone.", + }, + { + "_id": "test3", + "my_text_field": "Many people enjoy eating apples as a healthy snack.", + }, + { + "_id": "test4", + "my_text_field": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", + }, + { + "_id": "test5", + "my_text_field": "An apple a day keeps the doctor away, as the saying goes.", + }, + { + "_id": "test6", + "my_text_field": "Apple Computer Company was founded on April 1, 1976, by Steve Jobs, Steve Wozniak, and Ronald Wayne as a partnership.", + }, + ], + ) + + # 5. Search for similar records + from pinecone import SearchQuery, SearchRerank, RerankModel + + response = index.search_records( + namespace="my-namespace", + query=SearchQuery( + inputs={ + "text": "Apple corporation", + }, + top_k=3 + ), + rerank=SearchRerank( + model=RerankModel.Bge_Reranker_V2_M3, + rank_fields=["my_text_field"], + top_n=3, + ), + ) + + +Issues & Bugs +============= + +If you notice bugs or have feedback, please `file an issue `_. + +You can also get help in the `Pinecone Community Forum `_. diff --git a/sphinx/rest.rst b/sphinx/rest.rst new file mode 100644 index 00000000..a300df52 --- /dev/null +++ b/sphinx/rest.rst @@ -0,0 +1,126 @@ +======== +Pinecone +======== + +.. autoclass:: pinecone::Pinecone + +.. automethod:: pinecone::Pinecone.__init__ + +.. automethod:: pinecone::Pinecone.Index + +.. automethod:: pinecone::Pinecone.IndexAsyncio + +DB Control Plane +================ + +Indexes +------- + +.. automethod:: pinecone::Pinecone.create_index + +.. automethod:: pinecone::Pinecone.create_index_for_model + +.. automethod:: pinecone::Pinecone.create_index_from_backup + +.. automethod:: pinecone::Pinecone.list_indexes + +.. automethod:: pinecone::Pinecone.describe_index + +.. automethod:: pinecone::Pinecone.configure_index + +.. automethod:: pinecone::Pinecone.delete_index + +.. automethod:: pinecone::Pinecone.has_index + +Backups +------- + +.. automethod:: pinecone::Pinecone.create_backup + +.. automethod:: pinecone::Pinecone.list_backups + +.. automethod:: pinecone::Pinecone.describe_backup + +.. automethod:: pinecone::Pinecone.delete_backup + +Collections +----------- + +.. automethod:: pinecone::Pinecone.create_collection + +.. automethod:: pinecone::Pinecone.list_collections + +.. automethod:: pinecone::Pinecone.describe_collection + +.. automethod:: pinecone::Pinecone.delete_collection + +Restore Jobs +------------ + +.. automethod:: pinecone::Pinecone.list_restore_jobs + +.. automethod:: pinecone::Pinecone.describe_restore_job + +DB Data Plane +============= + +.. autoclass:: pinecone.db_data::Index + +.. automethod:: pinecone.db_data::Index.__init__ + +.. automethod:: pinecone.db_data::Index.describe_index_stats + +Vectors +------- + +.. automethod:: pinecone.db_data::Index.upsert + +.. automethod:: pinecone.db_data::Index.query + +.. automethod:: pinecone.db_data::Index.query_namespaces + +.. automethod:: pinecone.db_data::Index.delete + +.. automethod:: pinecone.db_data::Index.fetch + +.. automethod:: pinecone.db_data::Index.list + +.. automethod:: pinecone.db_data::Index.list_paginated + + +Bulk Import +----------- + +.. automethod:: pinecone.db_data::Index.start_import + +.. automethod:: pinecone.db_data::Index.list_imports + +.. automethod:: pinecone.db_data::Index.list_imports_paginated + +.. automethod:: pinecone.db_data::Index.describe_import + +.. automethod:: pinecone.db_data::Index.cancel_import + + +Records +------- + +If you have created an index using integrated inference, you can use the following methods to +search and retrieve records. + +.. automethod:: pinecone.db_data::Index.search + +.. automethod:: pinecone.db_data::Index.search_records + + + +Inference +========= + +.. automethod:: pinecone.inference::Inference.embed + +.. automethod:: pinecone.inference::Inference.rerank + +.. automethod:: pinecone.inference::Inference.list_models + +.. automethod:: pinecone.inference::Inference.get_model From 19705ea86a2933709b83cdac6f84f8f50ecec654 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Tue, 3 Jun 2025 15:31:32 -0400 Subject: [PATCH 2/8] Consolidate sphinx and docs directories --- docs/_static/custom.css | 14 +++++++++++ docs/_static/pinecone-logo.svg | 1 + {sphinx => docs}/asyncio.rst | 0 {sphinx => docs}/conf.py | 17 ++++++++++--- docs/db_control/pod-indexes.md | 2 +- docs/db_control/serverless-indexes.md | 2 +- docs/db_control/shared-index-configs.md | 2 +- docs/db_data/index-usage-byov.md | 2 +- docs/faq.md | 2 ++ {pdoc => docs}/favicon-32x32.png | Bin {sphinx => docs}/grpc.rst | 0 {sphinx => docs}/index.rst | 13 ++++++++++ docs/{ => inference}/inference-api.md | 0 {sphinx => docs}/rest.rst | 0 docs/upgrading.md | 25 ++++++++++---------- docs/working-with-indexes.rst | 10 ++++++++ pinecone/db_data/index_asyncio_interface.py | 2 +- pinecone/db_data/interfaces.py | 2 +- pinecone/grpc/pinecone.py | 6 ++--- pinecone/legacy_pinecone_interface.py | 7 +++--- 20 files changed, 80 insertions(+), 27 deletions(-) create mode 100644 docs/_static/custom.css create mode 100644 docs/_static/pinecone-logo.svg rename {sphinx => docs}/asyncio.rst (100%) rename {sphinx => docs}/conf.py (55%) rename {pdoc => docs}/favicon-32x32.png (100%) rename {sphinx => docs}/grpc.rst (100%) rename {sphinx => docs}/index.rst (96%) rename docs/{ => inference}/inference-api.md (100%) rename {sphinx => docs}/rest.rst (100%) create mode 100644 docs/working-with-indexes.rst diff --git a/docs/_static/custom.css b/docs/_static/custom.css new file mode 100644 index 00000000..32959129 --- /dev/null +++ b/docs/_static/custom.css @@ -0,0 +1,14 @@ +body, +div.body h1, +div.body h2, +div.body h3, +div.body h4, +div.body h5, +div.body h6, +div.admonition p.admonition-title { + font-family: "Inter", "Helvetica Neue", "Helvetica", "Arial", sans-serif; +} + +.blurb { + font-size: 16px; +} diff --git a/docs/_static/pinecone-logo.svg b/docs/_static/pinecone-logo.svg new file mode 100644 index 00000000..d1f7f03f --- /dev/null +++ b/docs/_static/pinecone-logo.svg @@ -0,0 +1 @@ + diff --git a/sphinx/asyncio.rst b/docs/asyncio.rst similarity index 100% rename from sphinx/asyncio.rst rename to docs/asyncio.rst diff --git a/sphinx/conf.py b/docs/conf.py similarity index 55% rename from sphinx/conf.py rename to docs/conf.py index 9350a4c9..d20af8b4 100644 --- a/sphinx/conf.py +++ b/docs/conf.py @@ -3,8 +3,11 @@ project = "Pinecone Python SDK" author = "Pinecone Systems, Inc." version = pinecone.__version__ +copyright = "%Y, Pinecone Systems, Inc." html_baseurl = "https://sdk.pinecone.io/python" +html_static_path = ["_static"] +html_favicon = "favicon-32x32.png" extensions = [ "sphinx.ext.autodoc", @@ -13,19 +16,27 @@ "sphinx.ext.napoleon", "sphinx.ext.coverage", "sphinx.ext.autodoc.typehints", + "myst_parser", ] # -- HTML Configuration ------------------------------------------------- html_theme = "alabaster" html_theme_options = { + "logo": "pinecone-logo.svg", + "description": "Pinecone Python SDK", "github_user": "pinecone-io", "github_repo": "pinecone-python-client", "github_button": True, "fixed_sidebar": True, "page_width": "1140px", - "show_related": True, - # 'analytics_id': '', # TODO: add analytics id - "description": version, + "sidebar_width": "300px", + "show_related": False, "show_powered_by": False, + "extra_nav_links": { + "Github Source": "https://github.com/pinecone-io/pinecone-python-client", + "Pinecone Home": "https://pinecone.io", + "Pinecone Docs": "https://docs.pinecone.io", + "Pinecone Console": "https://app.pinecone.io", + }, } diff --git a/docs/db_control/pod-indexes.md b/docs/db_control/pod-indexes.md index f09ffe25..773ef9ac 100644 --- a/docs/db_control/pod-indexes.md +++ b/docs/db_control/pod-indexes.md @@ -151,6 +151,6 @@ pc.configure_index( ) ``` -# Configuring, listing, describing, and deleting +## Configuring, listing, describing, and deleting See [shared index actions](shared-index-actions.md) to learn about how to manage the lifecycle of your index after it is created. diff --git a/docs/db_control/serverless-indexes.md b/docs/db_control/serverless-indexes.md index b78312a5..1c3944f3 100644 --- a/docs/db_control/serverless-indexes.md +++ b/docs/db_control/serverless-indexes.md @@ -126,6 +126,6 @@ pc.create_index( ) ``` -# Configuring, listing, describing, and deleting +## Configuring, listing, describing, and deleting See [shared index actions](shared-index-actions.md) to learn about how to manage the lifecycle of your index after it is created. diff --git a/docs/db_control/shared-index-configs.md b/docs/db_control/shared-index-configs.md index c6128bf6..553a014a 100644 --- a/docs/db_control/shared-index-configs.md +++ b/docs/db_control/shared-index-configs.md @@ -1,4 +1,4 @@ -# Optional Index configurations +# Index configuration This page covers some optional configurations that can be used with all index types. diff --git a/docs/db_data/index-usage-byov.md b/docs/db_data/index-usage-byov.md index a3cd263c..85277c4f 100644 --- a/docs/db_data/index-usage-byov.md +++ b/docs/db_data/index-usage-byov.md @@ -1,4 +1,4 @@ -# Using your Pinecone index +# Vectors ## Describe index statistics diff --git a/docs/faq.md b/docs/faq.md index 85a40081..2f89db2b 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -1,3 +1,5 @@ +# FAQ + ## How does connection pooling work in the Pinecone SDK? Before any data can be sent or received from Pinecone, your application must first establish a TCP connection with our API gateway. Establishing a TCP connection is a costly operation from a performance standpoint and so we use connection pooling to cache and reuse these connections across many different requests. diff --git a/pdoc/favicon-32x32.png b/docs/favicon-32x32.png similarity index 100% rename from pdoc/favicon-32x32.png rename to docs/favicon-32x32.png diff --git a/sphinx/grpc.rst b/docs/grpc.rst similarity index 100% rename from sphinx/grpc.rst rename to docs/grpc.rst diff --git a/sphinx/index.rst b/docs/index.rst similarity index 96% rename from sphinx/index.rst rename to docs/index.rst index 5a7becee..f64573b3 100644 --- a/sphinx/index.rst +++ b/docs/index.rst @@ -1,11 +1,24 @@ .. toctree:: :maxdepth: 5 :hidden: + :caption: Reference rest asyncio grpc +.. toctree:: + :maxdepth: 5 + :hidden: + :caption: Usage Info + + upgrading + FAQ + working-with-indexes + db_data/index-usage-byov.md + db_control/collections.md + inference/inference-api.md + =================== Pinecone Python SDK =================== diff --git a/docs/inference-api.md b/docs/inference/inference-api.md similarity index 100% rename from docs/inference-api.md rename to docs/inference/inference-api.md diff --git a/sphinx/rest.rst b/docs/rest.rst similarity index 100% rename from sphinx/rest.rst rename to docs/rest.rst diff --git a/docs/upgrading.md b/docs/upgrading.md index de24a91d..908719e7 100644 --- a/docs/upgrading.md +++ b/docs/upgrading.md @@ -1,9 +1,10 @@ -> [!NOTE] -> The official SDK package was renamed from `pinecone-client` to `pinecone` beginning in version 5.1.0. -> Please remove `pinecone-client` from your project dependencies and add `pinecone` instead to get -> the latest updates. +# Upgrading -# Upgrading from `6.x` to `7.x` +The official SDK package was renamed from `pinecone-client` to `pinecone` beginning in version 5.1.0. +Please remove `pinecone-client` from your project dependencies and add `pinecone` instead to get +the latest updates. + +## Upgrading from `6.x` to `7.x` There are no intentional breaking changes when moving from v6 to v7 of the SDK. The major version bump reflects the move from calling the `2025-01` to the `2025-04` version of the underlying API. @@ -196,16 +197,16 @@ pc.create_index( ) ``` -# Upgrading from `5.x` to `6.x` +## Upgrading from `5.x` to `6.x` -## Breaking changes in 6.x +### Breaking changes in 6.x - Dropped support for Python 3.8, which has now reached [official end of life](https://devguide.python.org/versions/). We added support for Python 3.13. - Removed the explicit dependency on [`tqdm`](https://github.com/tqdm/tqdm) which is used to provide a nice progress bar when upserting lots of data into Pinecone. If `tqdm` is available in the environment the Pinecone SDK will detect and use it but we will no longer require `tqdm` to be installed in order to run the SDK. Popular notebook platforms such as [Jupyter](https://jupyter.org/) and [Google Colab](https://colab.google/) already include `tqdm` in the environment by default so for many users this will not require any changes, but if you are running small scripts in other environments and want to continue seeing the progress bars you will need to separately install the `tqdm` package. - Removed some previously deprecated and rarely used keyword arguments (`config`, `openapi_config`, and `index_api`) to instead prefer dedicated keyword arguments for individual settings such as `api_key`, `proxy_url`, etc. These keyword arguments were primarily aimed at facilitating testing but were never documented for the end-user so we expect few people to be impacted by the change. Having multiple ways of passing in the same configuration values was adding significant amounts of complexity to argument validation, testing, and documentation that wasn't really being repaid by significant ease of use, so we've removed those options. ## Useful additions in 6.x: -## Compatibility with `asyncio` +### Compatibility with `asyncio` The v6 Python SDK introduces a new client variants, `PineconeAsyncio` and `IndexAsyncio`, which provide `async` methods for use with [asyncio](https://docs.python.org/3/library/asyncio.html). This should unblock those who wish to use Pinecone with modern async web frameworks such as [FastAPI](https://fastapi.tiangolo.com/), [Quart](https://quart.palletsprojects.com/en/latest/), [Sanic](https://sanic.dev/), etc. @@ -349,7 +350,7 @@ sparse_index.query( ) ``` -## Configuration UX with enums +### Configuration UX with enums Many enum objects have been added to help with the discoverability of some configuration options. Type hints in your editor will now suggest enums such as `Metric`, `AwsRegion`, `GcpRegion`, `PodType`, `EmbedModel`, `RerankModel` and more to help you quickly get going without having to go looking for documentation examples. This is a backwards compatible change and you should still be able to pass string values for fields exactly as before if you have preexisting code. @@ -396,17 +397,17 @@ pc.create_index( Both ways of working are equally valid. Some may prefer the more concise nature of passing simple string values, but others may prefer the support your editor gives you to tab complete when working with enums. -# Upgrading from `4.x` to `5.x` +## Upgrading from `4.x` to `5.x` As part of an overall move to stop exposing generated code in the package's public interface, an obscure configuration property (`openapi_config`) was removed in favor of individual configuration options such as `proxy_url`, `proxy_headers`, and `ssl_ca_certs`. All of these properties were available in v3 and v4 releases of the SDK, with deprecation notices shown to affected users. It is no longer necessary to install a separate plugin, `pinecone-plugin-inference`, to try out the [Inference API](https://docs.pinecone.io/guides/inference/understanding-inference); that plugin is now installed by default in the v5 SDK. See [usage instructions below](#inference-api). -# Upgrading from `3.x` to `4.x` +## Upgrading from `3.x` to `4.x` For this upgrade you are unlikely to be impacted by breaking changes unless you are using the `grpc` extras to use `PineconeGRPC` and have other dependencies in your project which place constraints on your grpc version. The `pinecone[grpc]` extras package got a breaking change to the underlying `grpcio` dependency to unlock significant performance improvements. Read full details in these [v4 Release Notes](https://github.com/pinecone-io/pinecone-python-client/releases/tag/v4.0.0). -# Upgrading to `3.x`: +## Upgrading to `3.x` Many things were changed in the v3 SDK to pave the way for Pinecone's new Serverless index offering as well as put in place a more object-oriented foundation for developing the SDK. These changes are covered in detail in the [**v3 Migration Guide**](https://canyon-quilt-082.notion.site/Pinecone-Python-SDK-v3-0-0-Migration-Guide-056d3897d7634bf7be399676a4757c7b#a21aff70b403416ba352fd30e300bce3). Serverless indexes are only available in `3.x` release versions or greater. diff --git a/docs/working-with-indexes.rst b/docs/working-with-indexes.rst new file mode 100644 index 00000000..c6eb8130 --- /dev/null +++ b/docs/working-with-indexes.rst @@ -0,0 +1,10 @@ +Indexes +======= + +.. toctree:: + :maxdepth: 2 + + db_control/serverless-indexes + db_control/pod-indexes + db_control/shared-index-actions + db_control/shared-index-configs diff --git a/pinecone/db_data/index_asyncio_interface.py b/pinecone/db_data/index_asyncio_interface.py index 0e145bff..35ad0484 100644 --- a/pinecone/db_data/index_asyncio_interface.py +++ b/pinecone/db_data/index_asyncio_interface.py @@ -242,7 +242,7 @@ async def main(): asyncio.run(main()) - Returns: An empty dictionary if the delete operation was successful. + Returns: An empty dictionary if the delete operation was successful. """ pass diff --git a/pinecone/db_data/interfaces.py b/pinecone/db_data/interfaces.py index be2b4e3e..5c511b92 100644 --- a/pinecone/db_data/interfaces.py +++ b/pinecone/db_data/interfaces.py @@ -426,7 +426,7 @@ def delete( >>> index.delete(filter={'key': 'value'}, namespace='my_namespace') - Returns: An empty dictionary if the delete operation was successful. + Returns: An empty dictionary if the delete operation was successful. """ pass diff --git a/pinecone/grpc/pinecone.py b/pinecone/grpc/pinecone.py index 3aeb7fe6..37202fb1 100644 --- a/pinecone/grpc/pinecone.py +++ b/pinecone/grpc/pinecone.py @@ -17,10 +17,10 @@ class PineconeGRPC(Pinecone): .. code-block:: bash # Install the latest version - pip3 install pinecone[grpc] + pip3 install "pinecone[grpc]" # Install a specific version - pip3 install "pinecone[grpc]" + pip3 install "pinecone[grpc]"==7.0.2 **Installing with poetry** @@ -30,7 +30,7 @@ class PineconeGRPC(Pinecone): poetry add pinecone --extras grpc # Install a specific version - poetry add pinecone --extras grpc + poetry add pinecone==7.0.2 --extras grpc **Using the gRPC client** diff --git a/pinecone/legacy_pinecone_interface.py b/pinecone/legacy_pinecone_interface.py index 7c50bdb5..26a84706 100644 --- a/pinecone/legacy_pinecone_interface.py +++ b/pinecone/legacy_pinecone_interface.py @@ -257,10 +257,11 @@ def create_index_for_model( ) - See also: + .. seealso:: - * See `available cloud regions `_ - * See the `Model Gallery `_ to learn about available models + Official docs on `available cloud regions `_ + + `Model Gallery `_ to learn about available models """ pass From 1596bc6039b6b417b91f4b285a650242a4545f49 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Tue, 3 Jun 2025 15:33:36 -0400 Subject: [PATCH 3/8] Remove pdoc/ --- pdoc/README.md | 225 --------------------------- pdoc/pinecone-python-client-fork.png | Bin 47754 -> 0 bytes 2 files changed, 225 deletions(-) delete mode 100644 pdoc/README.md delete mode 100644 pdoc/pinecone-python-client-fork.png diff --git a/pdoc/README.md b/pdoc/README.md deleted file mode 100644 index 06dea43a..00000000 --- a/pdoc/README.md +++ /dev/null @@ -1,225 +0,0 @@ -# Pinecone Python SDK -![License](https://img.shields.io/github/license/pinecone-io/pinecone-python-client?color=orange) - -The official Pinecone Python SDK. - -## Documentation - -- [**Conceptual docs and guides**](https://docs.pinecone.io) -- [**Github Source**](https://github.com/pinecone-io/pinecone-python-client) - -## Points of interest - -**DB control plane** - -- [`Pinecone`](./pinecone/control/pinecone.html) -- [`PineconeAsyncio`](./pinecone/control/pinecone_asyncio.html) -- [`PineconeGRPC`](./pinecone/grpc/pinecone.html) - -**DB data operations** -- [`Index`](./pinecone/data/index.html) -- [`IndexAsyncio`](./pinecone/data/index_asyncio.html) -- [`IndexGRPC`](./pinecone/grpc/index_grpc.html) - -**Inference API** -- [`Inference`](./pinecone/data/features/inference/inference.html) -- [`InferenceAsyncio`](./pinecone/data/features/inference/inference_asyncio.html) - -## Upgrading the SDK - -> [!NOTE] -> The official SDK package was renamed from `pinecone-client` to `pinecone` beginning in version `5.1.0`. -> Please remove `pinecone-client` from your project dependencies and add `pinecone` instead to get -> the latest updates. - -For notes on changes between major versions, see [Upgrading](./docs/upgrading.md) - -## Prerequisites - -- The Pinecone Python SDK is compatible with Python 3.9 and greater. It has been tested with CPython versions from 3.9 to 3.13. -- Before you can use the Pinecone SDK, you must sign up for an account and find your API key in the Pinecone console dashboard at [https://app.pinecone.io](https://app.pinecone.io). - -## Installation - -The Pinecone Python SDK is distributed on PyPI using the package name `pinecone`. By default the `pinecone` has a minimal set of dependencies, but you can install some extras to unlock additional functionality. - -Available extras: - -- `pinecone[asyncio]` will add a dependency on `aiohttp` and enable usage of `PineconeAsyncio`, the asyncio-enabled version of the client for use with highly asynchronous modern web frameworks such as FastAPI. -- `pinecone[grpc]` will add dependencies on `grpcio` and related libraries needed to make pinecone data calls such as `upsert` and `query` over [GRPC](https://grpc.io/) for a modest performance improvement. See the guide on [tuning performance](https://docs.pinecone.io/docs/performance-tuning). - -#### Installing with pip - -```shell -# Install the latest version -pip3 install pinecone - -# Install the latest version, with optional dependencies -pip3 install "pinecone[asyncio,grpc]" -``` - -#### Installing with uv - -[uv](https://docs.astral.sh/uv/) is a modern package manager that runs 10-100x faster than pip and supports most pip syntax. - -```shell -# Install the latest version -uv install pinecone - -# Install the latest version, optional dependencies -uv install "pinecone[asyncio,grpc]" -``` - -#### Installing with [poetry](https://python-poetry.org/) - -```shell -# Install the latest version -poetry add pinecone - -# Install the latest version, with optional dependencies -poetry add pinecone --extras asyncio --extras grpc -``` - -# Quickstart - -## Bringing your own vectors to Pinecone - -```python -from pinecone import ( - Pinecone, - ServerlessSpec, - CloudProvider, - AwsRegion, - VectorType -) - -# 1. Instantiate the Pinecone client -pc = Pinecone(api_key='YOUR_API_KEY') - -# 2. Create an index -index_config = pc.create_index( - name="index-name", - dimension=1536, - spec=ServerlessSpec( - cloud=CloudProvider.AWS, - region=AwsRegion.US_EAST_1 - ), - vector_type=VectorType.DENSE -) - -# 3. Instantiate an Index client -idx = pc.Index(host=index_config.host) - -# 4. Upsert embeddings -idx.upsert( - vectors=[ - ("id1", [0.1, 0.2, 0.3, 0.4, ...], {"metadata_key": "value1"}), - ("id2", [0.2, 0.3, 0.4, 0.5, ...], {"metadata_key": "value2"}), - ], - namespace="example-namespace" -) - -# 5. Query your index using an embedding -query_embedding = [...] # list should have length == index dimension -idx.query( - vector=query_embedding, - top_k=10, - include_metadata=True, - filter={"metadata_key": { "$eq": "value1" }} -) -``` - -## Bring your own data using Pinecone integrated inference - -```python -from pinecone import ( - Pinecone, - CloudProvider, - AwsRegion, - EmbedModel, -) - -# 1. Instantiate the Pinecone client -pc = Pinecone(api_key="<>") - -# 2. Create an index configured for use with a particular model -index_config = pc.create_index_for_model( - name="my-model-index", - cloud=CloudProvider.AWS, - region=AwsRegion.US_EAST_1, - embed=IndexEmbed( - model=EmbedModel.Multilingual_E5_Large, - field_map={"text": "my_text_field"} - ) -) - -# 3. Instantiate an Index client -idx = pc.Index(host=index_config.host) - -# 4. Upsert records -idx.upsert_records( - namespace="my-namespace", - records=[ - { - "_id": "test1", - "my_text_field": "Apple is a popular fruit known for its sweetness and crisp texture.", - }, - { - "_id": "test2", - "my_text_field": "The tech company Apple is known for its innovative products like the iPhone.", - }, - { - "_id": "test3", - "my_text_field": "Many people enjoy eating apples as a healthy snack.", - }, - { - "_id": "test4", - "my_text_field": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", - }, - { - "_id": "test5", - "my_text_field": "An apple a day keeps the doctor away, as the saying goes.", - }, - { - "_id": "test6", - "my_text_field": "Apple Computer Company was founded on April 1, 1976, by Steve Jobs, Steve Wozniak, and Ronald Wayne as a partnership.", - }, - ], -) - -# 5. Search for similar records -from pinecone import SearchQuery, SearchRerank, RerankModel - -response = index.search_records( - namespace="my-namespace", - query=SearchQuery( - inputs={ - "text": "Apple corporation", - }, - top_k=3 - ), - rerank=SearchRerank( - model=RerankModel.Bge_Reranker_V2_M3, - rank_fields=["my_text_field"], - top_n=3, - ), -) -``` - -## More information on usage - -Detailed information on specific ways of using the SDK are covered in these other pages. - - -- [Serverless Indexes](./docs/db_control/serverless-indexes.md) -- [Pod Indexes](./docs/db_control/pod-indexes.md) -- [Working with vectors](./docs/db_data/index-usage-byov.md) -- [Inference API](./docs/inference-api.md) -- [FAQ](./docs/faq.md) - - -# Issues & Bugs - -If you notice bugs or have feedback, please [file an issue](https://github.com/pinecone-io/pinecone-python-client/issues). - -You can also get help in the [Pinecone Community Forum](https://community.pinecone.io/). diff --git a/pdoc/pinecone-python-client-fork.png b/pdoc/pinecone-python-client-fork.png deleted file mode 100644 index 6eff5d96c2c6c5fcdc3cb031a996eed5605753a2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47754 zcmeFZ1y@|#()XJX5-bqBae{jj+#$i;od$vh3+@gHjk`O+-61#xcXtV-ad!{$uAFC_ z_qpTlz0dgo_cGS#0o|*6t*%*f&Z_$VieN=~2~=bP+U9+M958)uij?chp&_ay8e&XLVt@S8cLFG*zK5S3+qDYf6SQz&h?A+<${sBo}kC^ zi%H+z?$fk-U&gd~->yYPO9bIX8}ZI*{euvZQ@w;~-QTX>C<9uX$&fF>X8VwbPMVZpw_PBfm z^>)kO_yBf#bf^;;M75?>W%1K}Oi{6Iw;+unE7wB34DJ|Z^-K5dKbiJX_t)Pu~jxESL&G#Bko z+t5#iMPL83!y`(Sp?mJJ1oK>clZNoebX$n<(y&fd*TNEwipd#BP`E()9c<77HffI|o0#|oCC!oB?# z`2HIUnRsuqEVaPb?tY$y*=foysHi`hA{G(%#qY~= z{tCxrR(6*TVP+>Pt?L^W$@wzVsw}iWHvOB!_X*3_7WU-eOY+C&)aX0@q3}pplcNG2 ztV zp-*F~e!@Mp=4aU0JRzY2yT30^U+A_wC(9E*wd=Bc8)myhf_t9){u>sUS~f$L!*b?| zlcu*5sz5E*9bs8-?ZM|XGl(eZZNfc3% zPY#b6hE+{vZ)6?9gDc;{eYInGjgwcu+IuMr4T<4hbBg9PLC)kX`Q2hX5UaA{d24nm zTmdU~oEM#IG;mwX8v(QGTM9_Pg#IkSo$1938z2WI`-y9tGf5CF%(vH7= zPG+451&zH3vyEnR0oyz5%OJctDhOWhTY8z#g3`<$BK|ka(+_$0i$z$cKa$KPA(zIN zo-Tyzo##)nG2C{_(%nzpTlKDwpYKk)e&%P01>pq?Ly_}FrD7O)HV0!f)fu&ZE%YXi zW$s=d>vPua#tt$qI%jW&ZYSd~)pClp6l&*y39n6E8)aj1>1b{IglipPM46Bkx=}w{ z81|H}eA3&GHMY^fLWK)nn}bX-gIn>>L6C`#Z5&u zB6;)G5nJ+dvw2QH+7FLrg_Is1w6}^CN)FmhX5fe4t4BTv5I8vk48+1tC(qjC%ONE&a4 zUf0x5`QPAHv%#Ba>ixMw-OLCQp7&!J{OWyjGpLB2VkhWiJDEa0!>oR`SssnwvPrQ3 z2e4Irx)6NFq`L}9qV;gqF`J9;URkG+JDUyXsWF}KZS(Si(zvYfvwL>aQu$8XjK8m| zGaa+uTi+62ZSz*+oBw^WTOce3MI%l*67m+7BjC&>>k3f1z+>52oMD&_7E%AXIWDd| zX*w+V0s%GuH9qV3QoW7&Ufe+K=K;(yK~AfcF9e(xq7&H?6jF*uvo$oWZV%eTHa~o9 z7UP7{)*2=^WfF^gRt1U1#zZ_9Yn=$ov|2;j0FP>H{Vz%N`ve&wZQL`mUI*w6HpxGsdfWuPh40~YrqDX~*rR^f9!itEVUhn?K zaqobq$?k!d3Gyye`@4l4SV`B6k8O7*hwu2+O&J7!sn$k*=eVh6+VzEA?w zZFjqyP9<(T+~0&=KMU^R{xUXSE?N3~+nx4)^7#4JS4yyKF49=~X`TU;?lk~w0v>&!(ST;+{Pv^g!e@<@jg!JCy3 zd^&*&wz4>|)|a))A9PvSoTWN_BzVk)+LwD5=i3SdY!=vMdUeWbIO-!h-zgg_FzwNj ze_tO@O;UxL!|rdJ`YbH??S)zr3tbtq_sS!xo*GE zI2yv_A3a!b7@sOoC{j4qvxuPVi(m;I0x@grgmC{|oZv0~K9X3zQ$$L#K{1 zl$th6d1{h~ZhVnqvamm*RQo)b?fSUrVyyj1@exjp);uH*`-_N$koQFt+R>8LECrQX zienswhMq}Z^dqTYZ6=A(1$q!;IE7~AFM3CfVs>*n;qD;MBLrTz1H>hGn_>sc#~I(z z3|^_CsXUd3rCMv8;5kW{VID17pav*1;oMYV;<6-B3Jc6>qbaiBu#ttwVl=tLRbhTT zTE-~ddS1g3jN<3Z;`g2kQmWPd0iQlR)ZdO(w)$|%ab>kcONH{;Y0&T6|oko-0L5u;_JVcZ{u z<;Rmjw6JJ!=|c4>1gf~!7b6y0s1upNDC@Y%9SNE_wZ|D`_>l9tNcrCHGAICz^h0es`jI;p!)_QbmPqq#Lb=|gDMoqM5!b$ahVW^k?L-z{88}d z9z3pT4Ra;2nf3TN0!ouvl`}luTB~j@c5`Oxf{_0vIQyjme}M>Ev%*La5gYlA`Ka4D z*z8b9i-f-!Ysd)ke~llKQos9yyvN>sAgeIHoI#6;miC!fW7<9(x!0dD|@=V5=`e40Ji3=5T+HdC!oV&koJsUOTF6IP6g7l!O2S3;i?%q7s!*z3}!VR&2CZm_d^6_i3A$ht+5B*G|A|@NTJ`h^1wsg>_ggCta0fvW*x2=i| zkKdgtxP}w&6wQO_UR*Ke!(*~2CK~1x+JxY__xxgfbrcF3Ey=Go9_bbCP#=t8`N|qa z=mDv38{q7T2e7Z zmL`lI5K+VR%I{OF1V(P$HrnmtUO`$6DYjJ}bMh9d?QLfP^DZaoQY)ypPgtL2xnxJn zkU(c-Z#x4JQ9GMt>a7P;#ah7DD~%FG0^PlXv?kjLhgfTwWV62QduGKV}HX z;BHD$^#0C2=y~VZ|FNlWyL{u;v;UR{x%xRoaAVJ2Nc6|z&gQI#Vhz$2f@6H2dEE^DPzuS|m8D~1K$I9_8S%a*w$;4q(@F4}XsO{FeW zEm01ZuWWGK-Qz$WUDKx4`j+S6=>eJJD(>k5=V1s z$a8$c;PijGwppd+SgoH#I0dtCsaqWqMiiJ;t<}0Nu<|}(%X2{)_n5avLPC=xqVis4=-5wDY4(Vp2;na&B9Okh`-fUv zKkQO^C=so+&lze^jTHvH5J70THIvL27K<-Y4PUXlh*~Eqrn9Ksk#0bL^SbIX$k%pb zT#(miE3Qco@%tvb`AWc5)f_O@j#U#{-)>c2ceNipyrue7I?yJElI+QZ)@BI=EA+hg& zkidV&1*h@w)YB;h^?B9|9a($eI%E)^LX2+rSji!b`i&_ultIUN=^l|JDrE#SwDb&B zy`5rJolwq$V!t44ZGYjW3`&Iwst(mCr-zZ_vT=9A?2Q8(U7wkWK~}mcI^L5UO`^eUt|(BtpNY($F&;U-L?xeb;eZ|z1cQ)UNKRTJVd2nN zdoDK2QXJGGB9>?{W`l^FL>X{OW^+vyNErylv$CmnK@w{xjGt+uDc3`{MlN2{HCJbw^XY4&{K6yANxs7U&tJ2)dOfreTI_sq*|-A-O(Yxth#C#F??2 z%b^8>9gmNQame?8%9J!jDwCyV#xCe>Y!sKNcsN1&`6;#WDR~G8w$^)D;<$UStFc{OQ2@zDHUan{moeR7LgIx4mO^b-W z=xZbf69{E(ls{g~+NeQJ43+c_6?_66EL%ODcRJgIfw|Ypf$MYe^e!#^Hc+X!s4Vx; z20Zu%EbdBYc#)8X_LbYF?<{Xf78w-KdsV$TIQ98T54h*)ehhkq0Dp7>ezR|hf}Rg& z&^8~Jf)S!7%cr(h)yO^*NXj`k0vd_z`POj0_rY@tDzZLfe(yc@)?bv@3#_JR*x=Va z;bdKbDzW(^s!91m?N78uPiSqU(it>eax7r02*vwqo3;O4gzB$u>_ZK>ag?Hx;YsFtUsg7}Tv8>h5I^vn zcJ)Wj_!7+-Hqx&|SkDb+&credayM*ZLPdm?W#j`&7wjiP@=zQxIB_YejmL^Oqe$lW z1q_1}peU#Q+@2pROdpZDA_lgm;Dcjq9fK%|9w>0^_K>w3jWK$*`M)Lfl68t_c)r|S zj31(ZJ=+&aTIl>!T+csrh)DS14kyx#w(%TS_13Q5`iunHrD|`D2x>}rL(h}sJJJcw zxy#x1Ge2CU8A~&PPLxdOkDi7SfiPKVHLgC-Hkq9^;L=N@%uvMFp$ls994O{9COe^k zn6&G%rQ_*U8d>uMD(Kumq9ep?7P+if1l0QpM0lYIj3yNUhP7egKRw()kN$S+{9`!G zA#Mk1l^<6ip`Fl;_vo-3CYrL9dY4FEA?E|`i|d00i@DZ=YA&#zu-N6HL^6-R@`#0+ zK#5cmrIhl|FXzIV44Vt_CpQx!^x({lY$OmL4_VX3E2PojSmSF?TjO7g0QMjSZf+n8f_HzF__4azRCV`yFN^oSB?NLJu}} zb%R`z8k*rj4$GbAG(`!DN|;}yggq}wxr5LSRwbM)=f2ZJAlp~QD^3XZ`tCZPdy zU07f+7uD`gDnqMH)bQR7w2}$`sXn$|OfBA9LE2ZSVZugPrcBv?SEs$fc`o40&BRlm z*S!?;w#hj8%vo-F(qSEOajp_urT(7NgPrCDvnrUfLNq+nx;7}WI+pL=MYFfL~ zG=vw8^0By9Bgmi_c!+<*ONYm;MG_3<5PQK0W+L^y;Q=gq)#=VgA2ype%vgvZZzq~H z-ULK=dRQj}u>hRr>%NJ6EM@Ci%}#kgx%?8PHQl0OV>#xNFj7Qgnr(x>_3UuoSaAcCWH1c)i$NxslD!_T6swx6NWDk!)dcD>UO@FgOMYylCI66s`t_|&mdO4 zHY%27mnp@T>=q=S$(tABAd_v$RPoi7c%B>*L+l8Mr{e_hWiO4_gUvsBbnU+jDuv!lv(w56Ywu!Z}pM=#X5wcwEQ09>=ufd z&Nb-dVs~ZGte8rD?F=lQ{9uB8O;`LuJqq$#d}Mg#KE;OlIwORN@V4oQbLpUhzA6d{ z9_mj9n+q(4^NK0FZq?;g1d(F)#X&cex2&Q>(NBp^ju}`3iQB*D0J~+}POaM+46kht zf!q@*_x%2{=-AqCe;A`vIvbWN;mP@#Rb_hLdiTbWyZF_b%gA%%4IJ!2R$eg)@E^&cK z2xi3jy57*YK>I02bey$UkdFEXy@fY_CDu^IK zy;kmt=s#H|OOOK=yV2JcI~G~x1D&LjxiSrL#dTKGD@KAJy*{hhrwvI0rX;AQIaYk5 zY8X9K$TYZ%LvF9#PrG(%jQTz)_ze+#FRVx2rRV|tQxCZ$$xEvx zO7|u|B+Hhp4=U2`{UUCzivrDxJ-UYw$<(;4+sG!B-JESQBVDIV?UKJ731ltcQ5oHm zzZlL8C*?BgMkW>JNe=11;j^Fobc$>1wD+ zx`uwjEljok2hlv#1~z&!Vj;L{mhbs#;}Tyx#AQn~pu?o?oq}%Ad1cKwsHGm6$Vz2# z4x|nx6%te{R@8$w zR8vEOWO7~Ori*n0yDF%@A*6ltIL(IA7eH64QV1eTEI-F=f5_l>Q-wPfAsRFv|MC1{ zZ&Rk^U*!}E2wu(j+4zoLQX%5sD5GeU-!%RN2dJVgL0B4A{+_3uUDnuq&X7%}w}dQR z_L>EuX)+z!;MWdaEHn^FL(9|L`6{OuKik~8&K)t(DR&(M!nFFKTp@$akBWX?zf?es_mZUV#A|7HOqNqFBs-JkdAfY@2cJ#T_kOY~l7H+?KGFb%XbQR#$2 zO0>(Guz94hQv{5v?LYPFS0Yl$WeB@{;olnRN8oAii6lLu4rIRNo6H;QUK8OSZ9nxJ z%@FJusIJf^4&10mhkf82Y%}^jGtq@Wt1|nUQIhiGaooU)KZ^M{)L|PJtR4f=(!9O8 zBup1@_)=a~k030@b0RJ>)kY2|E`MU+5jPRUEWb2W_p8$6wl7q@YAo6*^m07I<7jAH z8>ztljz5`yD#yk>_X;k>Rw>lMhKMJdnK=M;1R<$7qw^&VZiVsy1!G(hxo}rr3FF6? z6oYAkt;i-&xW6&{Tl7lA9UO42lGJ=5NkiW&Fj=4g0Q!(^!y!u?UpDGB000G|{I8GGMXj&Q{a!D!;_ZZaw zH;Y8%#;l z^JRf;*j>vE6w&>?x0qwkp1T--R`C}%qOWf{;yfc{ez$2QrE2bl16*HEg;vRT3w_ZL zOQP9)lt3owL~#1u(t2NHoT)#hQ9=uLg0%aJQU;%bp2ckS%fKorv+uQq+;0VPz_!IRi=$fwHI=RJ3k9ZXKbc@9mLX9 z8kGQP21nJ-VEJ##hv?ss=7-Qs5kGXJNaYGR`;VRLnjLaj{`6aE_mXIK{+vD5VlZE+ zTC5&=b+~eLcr2J4mFC7RBz_N1f6#F1RC^^_ORHB*B;a+HBu^}~7CZEd2pASpbVSEa zu`m8eyWP8N$WYh!)1}(bo{+v;_lH*sr^4%wmwRgTTD3t^pU-g_vz1Mx_k~mVB%Ixc z8}StXMwI*?kmAPz61Q-J?G;Pver6EJeDaf`8@+72!e{*!??YD6P+#7T9T#6JK=(#T zMnc-9%u3GXF?Y%6;T6ihpaghH`MgP}k2M-S@_i&wz zS&?tiYmopbK-gXv&t0=ryYUOJ%g+0?aHSk|CXRIlQlYGPh6J6ohXQ7%z|N|TDva7h z9Pu}Q=kow=@vHO?1+zc=O&=Hs0>Afkkp5p`SWb!b>T%`sNnJl;Yz->>mo-EFW|aPi zu=$k;e83Hr6$AzT{SO-W7DS-ZU!WBH=?|n5``y!1euEZ_%_mRPnV5*+#ozh-zpx`xbjbm!+Dtq7 zd7uLP``72Rf6eCz8UW}B!eGM{3&V~kS#I9GVIYK%X@$gH0NJ_w(^1*a3QHo+!4o*3 zK(xRJk#wcSV4`$+^&jlb4#%?a3Xe>g^851@R5M)(pAXOAzaRp13N%cs1uW=&|DHKh z=o$;Y{Jv@%$B>+=7($f5&9p#FN{cjBkV@3Zxb zA-awe_AUmoNOxNv`p>HVK^Q#zst{y=&iUEs6#96(9eLPVfBIT#^k=CU zN-;2TdD-MbHgI(synjdV0xK>jfsiw!fcHgzFGDu(Kl&3l>GP@qDw*Vv$NR@J&UX}_ zo=JQ*+Lci4>Fqu*=K|0i5# zYY2FKnydXd$3I-)-)@3v0r(+&9e$rHFiihAU4fT=1S7p~uw5JYF!!I%0#1Beh~aCC zG5v{X{hv^sf8Qt#H9)2X(;(>nv(x^MF}{gE>l9N=wF?9N$6I^`pOyqLaWol3j;R0T z#4iNT5kViUdRg@Ti+%ooL;mOC{{OZiPv3LuffCKI$qeC<3Bg2A6vENHowjBln(O0L zK(vayxSgUYxm0=%e<1c_S-U5t7GzNS;vcpYbWOzf4Ty|v_}#r2boSUGSX^Kz8qQ)L z#S_3U3iw=wy$p*Kw0=SJ#g_x_d#PHflqI_z(LcEPTUvC0AET@x>gj>nZw*QSBvjtK z-CD6;tLtMJ$x6FKfjmiGB&i^`l)^(hVJD4vSd#O8e=1i6j-@GmHOaf}QFaQ!{ZNIu zVxI~r{zy;G)d*e&^#krWIu&*4cR$` zK$w*@-E5;jn83u>V;?T20oKnd)A>IB#m&L%rDgvKY&zGRZEw0w0Kg?CHLMoSH!$;t zl*;*U1yGfl_1y7t44ZpT*;Ui|o&BAUmo3gsjA}ae4|+mXn~af~hJ{|ZpB9nLFE%`M zDKegC&^d70ySNmnG2nU-ko8C|d{c5D;Ipr+v)&hcJOSbteAZmk2OItN$#@10vy&fw zfTFBbau~;`oAT-nzI3C5ae-ts`BXTc3$5w*(u<1$tdebB2k5VkuTw%#_v=yMJLj^y zz7%Zq2u~9HuPaSn2qvQ`@q=;nQi)*5xWJo$bLz3nC^F#~oiUQ8u{Ve~D*5s@{D!qp_xrz4QbaKkA!o4V{D)vy9=Iy|Fn zWZn9^n#+i2Z@j#0z@p>JOBJ7|>s24G8Je`$f8%+xh}`M6(d=%l+V$QP?WKL$q(a`~ zgq*qEp``lZMh}a1)8%rOmPV`|%lJ?f7XOB7gpQ6E9y`-jIsBhom>=F?Q8R81r$!>< zfXiFpw(bRb@=pQk#V)j?JM&g{pLZ^J>=NWcoLx zf+sbVN`Dx>5H$;0kzCh>2B5xOge4J6tsRtg_UlpL0`}lxLdm8F;$(s=XYA~GhWp|T zv0$NmI4%=`)9zw8t8u^N?fFJlBq@`}1-v*+xOcihK}dn3s7#h=roL9auUd&)q&F<1 zAd}lW4UnOZk1dsXf9-5X!`^N-dILbx-PtW0Q(C)kz#GDBOJsH!qspD*-g%ZHjC}A% ztIw*Q?lywIG!DH+fvlG3w6Vb#vEaZrM@5;74(|m)%6U>U!I(5^LS6RWnW5WblUt~k zp9%DWmGjK5mfhIh*5DWGEI|)Lak?*Hnu?V};oIv+VKKtUkWhXO8UtlWitV$-KchCGo;7xzx!&I;+vvaIL}ACvPn@5hsh; zTmZ1T35u&h0xgbi$?a>LTN_RFI$C{8$@;xvchxeVwR%qe?hk%az>+#eE<68D_4{WY zAZWs!f%#~gPnDnHSYKk4e!YaF$=&{ z32;12VhRA^=K=)`UYA`=!gwE*ql1Or#t$ogvqXb`veH_MP3gs2gBN09cv8?-Jp(LNL4feLs9Hw4ETw z7W8!GwZD0HdH6F`uhnx`a;w-Xm~6GpK_S)I#-jI#4oiG@)aN4*qvMssC?u30;a={H zWeN>)&y;w$m_WWee(o-u7_=3ve0p*T&~I~2PvNo@rimcdFbHaLKK=kG;7ZN63TfQ( zKbsu0<|_1xPSq=vVi#Qw3R_G`1YN%gV-xdN>T{X?+3x_5I`(pNQ6*n4XWT$o<;E5! z07b!IQ!j;xq3+G*jOh!OI2ct|Jw60pQ$taBjx_W@tWP64E{c44d3Y-l0pgOUKg>EV zFM@OuKBi!T|B$csM=LoABPkODu{1kt@g;F7F8n?K0mU%`l*{NfvfyS4wV@ZtAlQvy zlf%>Vct*Xj-tf%#fN});xr|k0KU2&|;PkCK7|TI2HFdh(N1xYWueK>-Igmm;JX=@{ zd8)bo*zC4CZ@N;+$U?K7q}Ahk)UbWy{^FqZo$dI?E0TftHmhxUMJm)&)l@R7OIx#! zeqW%HVc!iI<14k2S#t{zhr5<~zT!J(zF5oSQQi1&fYb3IMZ%1fWiTKTJz`Fb@$rCGxZV3P`sYYP zNf#>1bWUvW#)93`DVD`#B^fF{XPBsKoAV{Ili|cJIPfSIUB31ARZj6tvC>9=^e&MH z?$yq$n%4?JXziJ<#j~+wfB(fQD?X_um*W+sF^fitwVp_cND_gyW0|^{s@wDJi5nIp z)!%?-raZ^%arauS3^HMUgm20CH<8iXUl=FRwpkKM<0^f48b>W|W=w#jyB?2M#Tb8i z;1>;0X=>MkG?ierrq@tJ3}I+Jx5w6ND#@fMG!mX-y{0#$ypjVkR0R#EYegVyral0d zDQ|S>b2c%i)6FkMrciB_HZk$FGoEnfio_R`yq0%o8Fe}q_z@btq)i$Q4K-(bR1YYA zWV{m%Lf6B$DUeMOt$BP*WhAZv>Y^Bw3M(zs^^t^uTzBuScp@eDREvu2^FpvQdqQ3W zMk
    kU4mdv!`bQ>+YdZBE;uAP~?r6HYL*X1h)kq+rw=agGgMJN1kt;Yx7+ygI6f z3XT8Zy^`kkdAP;+JMqHHr;7m!OU*-j2peuH=8dtJS+7(k-cT5y86%|OolO0TSK%v< zf{n3Qox_{@0$aPY8>AFYC**V@1V^W7*LVy(tIz{V#w6wnOyZ3m4|YPv>!XDczJvW3TwH1A zAvApUD3=csc{wd55bqFMyWgXl`dnCr7Mv13?HcN8A9?;F<~w92;1s!#qZfIBM;S9$Vu)Q|-(7}ht* zBGU;yXlWT~J}^l641}` zDTYx@8u>cY<)_`3vr%CZ({Qla47@VwiBseOdB)v)XU zs~WopcN6&90^5mb5l4Wumv)kY?@Z^bXRwoH zK*43+((QCH-<|9kY@k~UULI3M#-*1X;_-|kfeE*GPS(`><89ZH^T^R_lxJfK>S#{^ z{Aga&>Ins9He`OLY5XwJnho9h-KMwUZIi(9R)i@S_eDww$OayQ30H8e4X zGH}kG6pui;C)DVv%`1Zh+x`dxcVcTeC2tWEmdzNt^is2OcCaxtu@FRag&WztbbVAJ z+TZMQs2XTudUv=~|Im%1Fy_#)0;nEr9-~C6+Z9dH%i&$O3EeW*r`ckFSoQvNu*UJ} zQNW1TLmYBOPNG_(B;qC%?2?Ev+n%^kADT;@1z>aLB_yrX^xGMjTnE z9Dd-gjg6yKg6u4o>9puPB5e*Q#LwCoN4{R!Wy*SOEKv}LU-L-?Xkv)7#b8yHBpAy2 zEE_L{Ma|9sy`IOYHepEk&V-&ns*KMP*eJBq#@@qc?0}yo)q6hBW0SGZBT`hS)0YH~dyy6U#iNHj{s*QDEBocHC5<#@eV}63#Ox7$)WkS6JjZd}_ zku;H{3|+a{9w;C~+3C)6NQ0*s)Zp&&_miG1j5~L3b|f`kqk9~Kf>t77I74JS%)s3r z1%r@EH2gqtR(eM-qH<{Dg1OC+vqCVEbQfxo2EV^+I2Ix zXK68)w+Fi_I{B+$r-#Ay&W0LH@?wj+J<7G!?3d^9Hy?K&abR{6Yk6^Bx>!=^f$sx2 zC>2b2F|(iK)oQzsMsU_<^OxaR*VlJyEEymXJK?*xNHd$iqhZH_1DAQ>eQ~yAB-CrS zZ^hciYB$mUsIS!iIE<6c4%~IbOZ5|eUcc3j$T3)6H%gOKF&x}dXJyusuO?nOblS-^ zNT#t@A(Iv2U_5O+0xQx+L(MXqcik&n;{RQY|Ghl7Q~Xi|mb@$eE^$W}7ni}&fZ=PR zRXqj~AWV>I?)eI|z5cS8TjfXFo``gTNlPWsVi0d9-7<4;gy1o{94!p-Rk7JJ-kv$$ zZP2NiavuL;(kXAj1n0`8u|n7RY;u6Yh%4D+k`bS!xQ2ur)CVByNRV)p0{7PVfkd@j&%?K@iSH}OvB3k%l*69eBaRk^2mn=@jD$O1+M zS7s22Q6=Dpy{9-W`|T_GPLIrx%I!YJ6)&&74XGNHeKU$G81#RZR-FQXHo(g;0w7br zO4Z#-!9ly|#PcCD5^Tc@f?+*{fZ&cgON~O!=sQ#*QJw`t2?p&04}h#RXz#APzT5{f zH)@1wCFxA%$2jOTah0;nM(Ajltm$I#i`vF7eF)jG(d^7}FIF!u&G6U?$(hU5M+Tjx z33)xXATOChC^b6Ry}_$qH$l>gv`0*zqo?cD8j{#e=)DY&$uSxMsU-8=^SVt%ZfzFM za?}2}U}<~qnzf$L^U(|T!53YMh!f3do+=f(1tg4Zt=G7i()KHoZkL^wW-O=OL6}tZ za_QXK*CrxRDVrogDV@7F=U?Sunz8s3PReGyW z($u$Nq5K+(a=1dP)r5^hs~ELiL6pAzyHx$s0-)E#WD+M|J~m$^GjEpxT`*;U2SzP} zh!^WF4-x>Er|0PsSYK-F`J3M0QHdWwd8f%n=H`XYo`u;)UX$i2X07!IfjsYm-XWqw z3M2j@0P^*4CsxXLd=2~-?|I+x@S8r(Z#tSd`(fc3JsBmX_1$XAlwMf2Av{)B*uh#t zZn@zFCTtD&c=+dchl0gu&B#{t>G=ll{9!FKlwG8vF;U`ULu zM$J=3b_pL4O$oR+PK0a@@rMf-!duU_@NAAor!x{`!N^s26sOB-`q0eFep;x}(?#Mg z>@Fu5nH(=Q_nqIY)|i%5>I7@5N&{+Zprv+cgcPe45;LnmR!#-bwKd1>iAgU_aK!cT znSl4bWY5@GG#ic6c6yUs6N-xDzgYkR_A}X8D^I(2#33VBIe~2d4BpwddqD8}&U2Jy zDlce)(JqFv4HvxSFrUm`4E_EoLfurguXz*3_kt51PsDnhqeDlCy4PkLZXoU5@N z6^i2U5ECU5Kfj}2E^Dp;kNJhxxL=W8|4ZJ#*3n|i@CulnjHTMy563l+LI#?0kdfVb zz8oFhvHJ1Qt@tnw5H}|Xiv@EH4PZ&^zB}&c1Y^<`;4j&NnqbP0m=dXJ{c2c1@N^DBz}`6s3_T9IeVEf+x+H8?TjS`b* zmXJeaGx(#AFK3F~n22%#R#OVz`|=KTw6Drf`80)gEEWI08c#5EGsCR|E{4Uy0cqN6 zQbETb1@`D4>q5%oDXA-s6)fr*vs$-I786OYv?fY_kb;d-xpD3aKoLMpOMbx%?E6@4 z+z5ZEp#Nw@0!03P+Y3Y>&`T1gcvW22@|#)vVu!w^4+{;SyjicyA4w#Y(|V&jmSjMR z%X&c?=rdCcPB{MAT;L1}`2gW(90l64@u;?$A-SEDLEK~mjXj7AJ={FkvrMUKqhI&Z zG(_y*VQ(tF#>DN3?r8^zG@>U{pnLhmK!i@YyPw^+heYZZo%m~+K3>PY+_h5~uwQ{f z#$-QBPLV56%J1w6HtH)v*_JNUsn3vL)#bcstTJGwM`5gHQ}-`SW;6s^UjoV@jis** zb9%$_s$1BmRcY~5mc3%qbQbOG?3CP?jCEVr#@S=yG?osn_jtXd5w)7j^EQ^hPfygy z1I;G($IHKTTNo0V3xctFRh;EN@jLat=5@I$t$?B+J+J?~Ngto~K)J0^)Wn#2Gj~r_sT+v)I^Sp~sHepsTFS zr`kO+(+hmr=o53?rK~FuR>wO#Tf8w*q?*SnhDhoMK7V-UbY>hsz5+Q7 z1zVprp+}aLBr-GDDa+tO>fD_Odb9eP91le2D)e)DThKNdGXfp3j>b^S_3|X+6X(iO?1Kgxn=WngUO(7C!WwY2@w zURt8@WG)hWxQN|=fZ@Qpuz#dK5|%RIN*p>E!d=j4`WY6|^jBKp`B&dapp`9!%Am6# z8yX<%e0~jc9n`8Z$w^9M*YpooW6(G&zje!CxuuYZ5N=&aQUm&N<<*IyB7C|Ue5F(i z-_?O$p}}AVLR+(3S9-nMIcEJL@iOgI-55O za%G2|GhGWkR0K2xa^DJyEB5$3Z+yF?y)M)U-Q^Q*ZB~k3Kur>qK0{(RGU>{BJ>PN`Tyt;37{V%Vp5Gf*x*28$$`4DzJAKi z03D1>S2=*P%J3E5pw-j6M_RY7#MKI6%)g`e#8Fsz&kemS2Q&c-BCmyPMw*-NZV zPl1jYbV(=&gyHXuq;jR*xYJ*IMGG65%Y_#Fa33{@@|{z=t2K`^jcW7|VKyx?J+9jW zSxz!%N?@W@e)!pxv7$ww(+C3FA(joqQq!{(=mw&x*6VqN5HJ%*UyVYmlR0GdAus3-m^TRTA8r8jHri9&@Y0?l>&X zH-ZeLpIz^h!!jYd>$c^=3mI8iVFF-EN7b+5BfG%=KUwLN-aV`cYed>+i+cBoOgKQ% z5Vp+Yz>oD_r`bpBBi^vEs~v73bc*^a0NsXHstGJL)2EOmX?|1SsvUf7)c*X16B4NR zycD`$tW<+5_vIO2)Wc!YT2Fr=WKC*F5gYbtsGid&Lj7|nQJWnc4*HT@4wVguvo6G^2)lMBp%OG z=mc!m4hGZ`nC@H_vGYP^yZ2dyNn~z81ULne!?M?yUbO*lRbsc_7E}k(S0I+SQ0Wd7 zYWJA}D)ht?E#;hIW)uD>hw!F5iRGgXR32q&?U2~#mu|T(@^IP$RFLmS?=P6$FqmuY z%(7qF6-O=H6W!i)w9G~LMC$T4bu);cP27BU+)%V6|E6JDLDdfSW`b}VdhW7)%H?Ks z4VFkzrxem8#XYE1jEraiFT(#D|M+)O0a!u@=|;Mt^3t&IINi5msCz#x%oIwNTQ|eV{q~a_c){SSdwSqFlCUFfk{4`}{(=lO9&KHKct|4^b-q zI@HckSHOwr|FHL#QE_G67HA+4Ah2Ho)$zW&s{0P3~!;>b8+jvUx5VpHMtlxDia_iOb+!9 zMSDT|HbznqKiD^#NDyp#mZx9k8e(XJmWp372}gQM|6w$&(b>Yjpl7hT)j|aZ=_CnA z{cKS9{}pU;MC3(BAo@^1-K16{X^<6*os_+Cj+(_%C{s5sKSSCJ%x;sI--_Z#13*!H zr`y-iY<~%y{HoTy{)5+)N^kk?Ug9jSpaVuE5zmA^zvn)O{BoJvlV%*N3xD)}E#&@W zRLCrJI7SZ0{WK1s3l=HC6;ouw1#=rVR{pTgEaugxORoN~0pg)ETh3@nlP&ab!gjeH_veX&e_G`DaTLAId?QuUjlY9*+dosX~ z4MJyzc7{9w;ZG&XfBYbdF!(zy>=;xRH9_)IAR=}`MO@_)SA?}bt*wC5Y|7kiKMKmF7npQGSB zV=oY@Ls>F0OkK@fjYO#k<*{NGLfm+AiN?f)A~0?JUUZf6eh zG%hvYnHhtlR#wAZ<4rHsgY{lU{-wwMYZTuqlB3W&jzB(Hyv)qxBFT%wBcXPvFdyPt z^35w+V4QP8qWzm+00fw*Q0SFaH567lJe;TgEn@z8`S|^lUH|~M!d+*E`xg`TKhRX) zB>?}lf(qLGcgOk5e@Br4P&s?6an`@__>UhW0X*LiScK`nf%Cn9B9H`P;M$Mu|5^0v zACHO;Apa^*af<%dP~1^KH0rGsAND(!{^G0jm@AVln*f^ZbwkOceQDk??N^(*79^9L1ph>Ms`7 z|8SFv7{EmMep&xB!1NE_{kR5%Bs&6~n!iHr{KxYckO3y*gcNoDj|=^7$74%KB~Z3v zO#urv2wdUgZ4Od^@jF^x+*!QiIzQ=@B&==GXurRO)%nQXe`QI=+ECyrL;#J9LMA~) zDE=$>4%y@GkU_|FX|hca44c6w_z^2W;pD)h8ip4Z`|@iD=YP2KAJA?Hs82A+@}X!H zSpaNmydFTp0mm#P@avRw20z6t5v71*)MsGA`SFVP?N+9*6G!OHbRf-JjnkcZSS)%R zig)x^K6VaIkC8_Y%MdA|276v5Nh~|ld$T^g^wtdhXW|)br_JLk$7$oD{SA9ixfdo1 zu_%oD(&*Lzy$Q-3^0b?kzpW$y5kE*I5MhH1tIg+$)OAVxmu~dmqSCL6lF3!vKbFdRl9!objDsc5JC7nNn=-d1~Om8zH+R zzBR~WWYMagb|VJpwXGomT{GZr0{aW;pr7yN^S|y!3v#6+C=Y;?sZ1ArDX{l%jHi^d zcvA`Xd3ZIfUlTu5x*!FpOJyG&9tv8(Q2jeS3(5-;(09r&U{u-xSacA+%)(|+j6j9q zB2|N6WW&E($;iilEc@EFz++Zo%50pQUzPnf!gJXiJJc617CNj~+rylLpD=?>zA3l5?-4jwFM_>-?F%0u{r0%-#&^sWQl|#R-?4@K0#1kj^p02Fh$ad43 zwXRBnHsZjZ+#BnMT4K<)X<*8;m;Z|V2#dsG)RjAouMTLBv1f^((~uJxhoF$C{?rNO zFn9Z@*<7I5KzWqyW_?wm1TX}19{pbx4PLl7<^Wo9MlLT=`~Yp2v6+JfwP-GYaWBy5 z4F9&J{^zu`$~VOwR%6vXH1Y5%x2q7yPB${~C@~<0e0|2W;9&7R6yF;#m1PR~OaKl% zCG>}y;TGj*PHSGF=&Zw^VvzDjT?Ot3^J?LQJ@>nr3dXr#)~D_t*HcN43=SXD=p;tx zrG~E_@r6b*f8m$;IF|{`__fS@^o7Un;5RwPaeCE^*jKIhE;|l6$_C%n6LRNXd}=Zu@LVXs5-_^SF<+L6`o=9U_Kse?83z?0*1vI ztO3q&Q@+25r{{1d^T!7^=%$YR+CSUcyh@V>Yc@O4JOde8&jziNV=26A%t*x|q*ye6 z#)}o9Jswc?QeTd8cN=yD7@m+vytby&pIEM7zP*5lLU<#`DE}MST!{EZXWg5(E@Kqu zIQcAXx4*&-NR|!vwbt*FU~wLm&PyWM=|Rmwb}Pmn`z=UbO|Q~~!DH14$7w9Si3^@` zS{@ltUoK3JOlbq)J<-KIfuR)+d+QX%*K9qh&9sCSM1(>5&WtoL1WUioC(qq+YvR^< z&#b*eJf7N~)J5kp2iVt|Idg=du@+f%Tby4W=rlPD$J4N3GHQ#}Hke-7GBI3k3NQE-DXCDF19d z<%GRJ#KCNF&ATwHc}!os4o_Ftwx_6Xrq%#2sgaSO7Rjsn6|fj@5nYw56V!gAkEEh5 z;qn}%`gapp&HixOyM@*@!t%?u-kWjSx+L`ITd`g3jh3vQLrLehfA8EHg8j`wFZW$s zvR<=S)*fB~1{hcxj$tu3^t-t_0x7lmJx}91J04ETNiuHJy{-!bX ziwSC<1k`1E)IL=>IXRd(x^von=7QT>(r`u%NOUcAB)c4rEiEvfSn)V0`awdQtzMr@ z6}#HjLaf|3II7;21BtReL)*-YlQpWZmFSdXwj;8{;+&WKKKCJ5Bbf@%YB*tEXyXD5 zK#gvdx;?7}5`c>wu(Y)vJjI(&ns0<{JS(kDt8SKP6(s7s*_|jP6Y#uMeJEAl7>MW5 z;l!IN&zu{)u-gNmcnI}B2(`F$d7Zy9WnQ>@hPj%#>6w0$iNIlTGm;gRNn(yUwfGWg zxHX!2rd(?_{@&^EC+_X>O3s{*_gbX!D(+sQ_~^Wy`bx)C%hSUGm&H;|s7(MO9_K2G zTe(?pops|#Wew76OjN7r=wBfG?58-?}Me|spE3D6rA*=2KSD;r-$)Ht}WV6`VtzU(% zOu5vp`^`$rxwQd=O*8Rf6?-@ls{s#B6MlVA7C={JHQ$`l*+#MDD@+2Cd@aowDe#!i zXCs8sOeBenN`RP0f22NRwehILN@s9BU1IszkMW$HS8dFASaXQWl}1z-v2h{2=l9+4 z^YyMw!)Wk!}^f4YyyMI6QAj&)qIO1Rjti}`pK8{!`>*uwbtk? zKrF<)L89?|r;y@`N^ZUegTaE``;=hDH0JEM2TD{_BG_S*OQSh%!XjZPiFVcxY&>DGLJ+ZagKd0_BNpa;}E zWN9_Q%-zx0nRnjNCZj3Faa;aN4fPzV@kS~3wp>U&{n`+8??6j`3k8LaX&u4FO_mD02GpAc*e7hQ<0h-nbUrr7Zn2N$;q9# z-t7U3@=>(4Jq#wTpbP;|iPkKSjHmK_Ikivf^txuHsTsW-CnAT)0g)4t zh?vg{^>+o>>zRslcCFU~scppgVL~chQZbs;++tUUOP9{e-EKTieZ(H;zp|$55CX%+ znhm!I@pA=zZ{M}tUF2U_&W>GJ>aA6W`&m3g^TER2(=J_7sD&2OyYW=EdKQaS(A^(w zg{24XS8p7PXD>%zdqhmOrt5n~D73OQRd2OF2MxyyeD>^BKtbgcD9j-a>NZ_{IN8p+ z9Wg7TUvbUwaJJCdk1g#G&+UsD-mJl4HiJ#)b^dg@KQE5UW}FL%FDm)^z<4}85<+hw z8q%obsvf=U3`VNFW~4Z>&J=ohYUv<)S6Z88YJDvMwp%%B2n@&2xVpb6*4W-pj?-(5 zXOLv{d6F*AfQPv}%0eD`XIr_k#5HX~-Rin$*D4Owi5&OdLWPbjHcnhu8ttHmt6bo7 zs2+)gV?RE5+DWzQl`nbkto2^8ZwM*4tohskY$EX$ktN3>)#5yqGX16rn$}cTZI=~WwtwW?+i!czr{oWi}T@p;6!!S{Zzlo`eSE0pjHQ{bjs&yl0xka zDVr|d3iO$2vCPX2gU?7i7CW#{#9 z`>^;zZW4FsM`gjprM3&&OI$t4XuFeu$k$3*>z+3qoJLUrkzw6y-q*jL-lt55ibMZ? zWD7<=jx^LkXIQFMtg!6m#HtIgP81WHuNZ0Kw%vP6kIS@CU9A*H35aMga+0>?hd09u zV%c7zMu#5?c-kjT=*DwPE7yiU0fZy=NG_e9&F+)c@BoaA;Pc{zis}7uDsqaF-#s+7 zj4JkqJ;}q53LnBRbo#;vDa&=v@X^k}tS(&U^AQe@^>9X4Bf2#nU)*z=O4nvp^rP+P zxz36&NK^~P?$ZUJeS2laDxVgJ$+Dwh5wF}}Zp-x(xYjUeK>=;S@g(&bL_BvAk%YZL zhm#gj?8sqlrlU%)#BY3-v&Cgw^&D!2EoMguYg{fYGG;TH=V)MT<2|HW8q&e{i#0s6 z=$)s!hn*#DPwGw4#;r3Y+R_v<3E7IxS>|^Mr8SOQE^*w(li^m4ENY}@p52N_IrD|y zd>4=8d9KSo{7@ivoazmnn2uPIg`g*#(Ioy*f((-3U%)8a3_w+ zB4^Wa>6dZy4Awoi#axAN!c6=W+tvMN&nnHz_v~LX1B1jnd;$1DvWL@Nc826BF1N`~ zH;Sqx_#48c*;L!vPbW9PiB%5c;Q9Pa0|`u7N^RH|H@bRB7RQMdZq1&*=Bl^Y!v!D! z!tx&BkwsLGgvm{nZR31vjYK&Ci1MB@M8n$qS0aD_riDb4ADBRjCRgA;PSRljo;? zU6mLD#xvdGOkmV6+YO_;iNKxQhHC;1B*rP{$$CcRdrTLFr2~Q*FryhfIS#HNRDsPQ z{fX!_wR0Ee_cv$HJgWHy%V;#Nb#Zs3qZ&ZEj`2aQoKdY@Pjnh{G?FDpqHvi3kiPiL zJM$y|!)VN~jsPe-FWUD6+mnXnea~02-Y7Z~OZP29Tc5(jY%10GsV0vLTI;!*(A8>c z;7b-}?-fWMAqx&9K;Hq$@zYJE10p~)N&j@VICtE7zBa3%nNBur|NYN;*UZ~RMKB74 zDYR#n7tjoWR<)#0Up=mv7ft!A`C88m${hPu9V~WTiIC`u$jnf}aOw`8Bb{bJ1r_!) zBy1=<(1q!aTs8hDQ|$FE?y}G(pd*qsZ9Y}P(WXkYe(@M{ib*X32)!uf-H|a{09r2O z85SvNI($jT=sD^2!4E+XTRoAT_Bnm^F%$K;(D2Up0O*>b+KbhzPzfbREG4V+b8&I8 z<*mJIeS$Kz;%kihRh#a`#ag$MRmFChUl0(ly|>8=5>_=g6Pjm+xNhFE%*( zvNiEJK1$My!?Wmxztt@hF*cSG^s>uT88sMmqe1OSW;0c~E8yNwzgzs(?sPG*VbBpM zHij1o{@k~m(!52dSA|h)w33a6Uw_`w4Sh+S`8{K5XAbr2@<)iNW_z+rmtk2BwEb@vzPET8!^5BoPC6~Y z_S@wQ$DD|*3?Yd^SUqEV1NgU%hOnu;}i zb07P$M=nWqGEn727+~r8cBHdvfQUshE47FpY3<2o?kFtXXP7^OAx+ zZE{c~z6$A*W7NGp7>#lL9jkeV+rVcQvWvBXRFmeXx=l<%HWPYdulkv;6Ed4Lu*EVd z1D5_>XT=J85@H~leO4{%eyC7Nx^hRD9`@S9U}6>@y?03MN`&2bfw?RNX*$rc1lZv8 zn^wgMn|qTFD_rh3T<=$CKWGF*2JZ74)dXJ7B|4rUTY-o!JFoU(9pfU~N~b)OB}}jw zcfFJnx84Uh@J}j>hqgD!HC?iYbW@y^)X!LzzC7@_i9*>bk@-?Q#^lvA-^MgnNlR8< z>99dZ&2$Y34L^^;*rEM$rUz>-0G?r91Pb1q)F27dIp$+qd5>IL-?9eGO-ZB9gUNj- zWa7*@DmK~;v&T=ji}Xh--+Zo>GwLRv(;IJ(klM}dl-MgRAK$))N(=zt^UlrPebW5V zCytt3%aichvx3W1am7b?o{nj-?9ad8oGcPX` zjcl7h{hC4{$L(YPa-tn?PJbGfIjp21yQxCW5e(w{=nHQPwic5IUO=f$O)-V$z~vH< z5WF_vwBE(IaDj`|tv3|?oYmm*g_d?&MH5gys0GxKY4OCg7jG}N??{9SgvzyRV{b2S zWW=56-7<}o^|`gnl1E~ox_L@Jvxho>`=Gxl*me2e_bs=01`iy!tFR%}+sr0-T2K43&PB7my zy_Fs%8-F8u!%QWYt@@#payZ0}B*PYS;7iWOtM1HTM_wkLpR$%c(51+ZDJ5xXyXO6# z3b}Ds)Mkq1dMHm;b5)ZLCZuw2zGP!F=5mI^3Qu$ynGfCxg@e_?xa{gu5)%`39c@*? z6{ZjUj=+7xaXtEVy4ThneHXs$lg)ml9VHv37)D(owO@_a0=XHoZG6-NUxz@;&z)KJzOs-MgWp-6?g-RN z;~k-iqwj{(f^k2?RD_h!r0K{V>5lIgqS-1+0}Z|>l?+5SBr!PYfL`Cj*UzgvSU$p6 z(;@$5{L$e%TF=XCdNQJk)g1tuvdTS`bXC1X(M?RAZQ5oGLbIMZWJp~Ts}x+mGKc3 zLjZV!$-J{cm@CGX)}h2o4Hjwx16wLWBfP{h`a;zYVqy#}b|kB2`9S2qdj2bL=m+2eC&QJ`CS=2CKj%<54RX@iR3l7M26NuE64wl z-09}Bbb+?^Aw-Gn$C(hz!{93`X$7*t#_qmij$z9sy_IZ5zx(s;m7K3W^EK9z0-jgO zzKKrnQX62kf!G0y$l*5mwKkryfB3SId|?&CPWe{ORhO^F>#u zv`H>&H+?U;*B1S4=5pC2Kn}6|@ixAtgM9VBo7ou<4YV&Ks*Y28tbs9nXT*>iDB(s!u@dw`bND{6vBq!_ zR`HD21G@ENKnDr`h=tP>)Jq1P+HApnLQ*+&<(^8aSOVBQ%8)7Q@g5^F@Q~Vty~DYd^uyDPKC@~}dy?{tyW8Tg zq>F((wLljb_X9M>a}GxO-VZdO^G&Cc)e|P`-E-WjVpCV3MAM!N#kU#)arT1m8~u8A ziwE-3ScaPIR~OZq&#k&NpzX;ADKT-g9uXE^8L=SaM&d!!L~Rsby8ti_1(SZG6hP2f zeK7KlB90!|io;2h-$uko!LC|i<>1q=&30>^Z5!T7KgVS)YD5DS^_ z^p-gq^)|wHGCCYgWLqax1EatoFn78&0GWkl^3)|oNLVIn+;;?2;}!q znCMNBY+iC)T2d9t^1h^~l2G0%)2<|iUAY$zFBEO!-2bfpE)Q{Xw;;q$DcgyLw-3p( zP*e5uQTUj)=HA<RjkC_ac|O5`J@?K4hPdH*qm36sDLSss-cWrWza5CWRjB>E!G9J zE_l$cc45k~q%Ao@E`-r4^K##apfyk4v(sip^`+Pgh|ow=&&&8=Hr(eb?*_7!*qqI< z1QPb5@Uzc)79XJ@T7X@1K1z-O=19Z#sQ6M)<%&8fGP#!;f83@0`uEkq@Iut!ig0YL zz%Wcz@}Phkto38H6Y984vq<<*q`n~{s$2C^59g6eZtaK3EB{xMK+3lhgRu)D>5_JAcv9InZTzWwp$LMB>G(ZX?s0Pl897`c-EJe)teqdI%x5@52_b=YfVDeLtQld006f9IvTDltb8FxW-O$CoaZ97qX_qwbs1hw& zOSWx_SvGEcWK|+ZbuiZs>+FIS0vhTzvu%MIA*`SxvO7~%UrPimw&iTeidV{0=qqJx zvDeaeH5B+@E6<&-aWF_@;nxqD6l(ci_yn zEF~RB9YjKNJD+Fiss=Awe1$c`7Urj&(y{|IrfZY|f99dTYx1*}@RurW&EdHMz+^QoC6g1aR~* zn^~B%NQ1`(nM^qnVy>l#rh)L@GSq`Si5%oKxjrjW>9W`_$#Np<#su$3sRsnrWohWo z1Oo%RC=u}#-LaAcS3JlZuRVoRV6kI2Ejw=!*~3P|&``^xD`um+6Ji>y<{ZvrEhUNE z-lQ{0tdWQYE9FNKn!%dFqxear?j zw8)?qhC3XL5qQ-hqcJ*J4$nUjVlp~Zu`Lt29D7_dS5RibzBhJ5eB|~jkeSW*Miw`2 zckN0VXAiJux}9=PFbXZ$8{SUCZ$at^R8j^bJp(XGP%3~-rSD_fz!yf*O!5b@hwn1N zCWUassT6R+K9wy4=-Zed%?x1gjJdvanc*{Svxrhb9hbem^b#N{ncR=WO8Bz(!~@P zM7cdt8B2+VnqZ$ZKAn(a~TSW-I13^|UR!P>3Gy)Rx zm=`8+*hfFi3`T$AV>0e*`JSfThQN;kTj+1ykQ@TZ+$DmJ>hfE8(_n+>Z2u!NMl{NI z@6>>{Njz-d0NVBLjIEetG{Gu==d7AOATp>Su4s*00VsR{Eqq{}la0r)kWsMMtaig8 z3_Tb+g$C^9R-TASb-?Bf4>hNLy*ly!{AI&P*Tvo}fSFa!*`xQ&-%YJu{dqepDhAR- zW$aR9bpG+}T}Ey;bneW&I^(=G9Yy3E`XTK~wp%DGt+vTUm~lHP!>-R9HT9=Wmf5Bs zx#%3^r`aayjYXPgO9Ff%{6$j@ID@F)(P|l>%kuyVd!&io0F(Wa`9e|N>9@@Gj!vKj zqH2m-Xwi222%9(esyJy9g&2BXH^ffFelrUko%vYg$n3dul<=kwid%)!9f#C!VnUz zwCHa@{U-X!r^~mja2GLRb*kakgnYFlh69LDC%Al23DmgM5!#%dZNVE45;L(mZDca! zrHY~OZpsDO`4PhCDh)$W)(h`-2@v8|opD_vLXXDN%*4vh1* zT=VIX;sEgZBITJcO!_gWo1?uKsy>RShAlT8~ z0g<7uFGfaS!4 z(%TxQp?vCVUo1s9Ugxe_u{+sKl6;l0UWPO$knzmbkqw>-gBQ8lyw91axkajONYY>F z2%K&+cgADiS83Q*moAGTDMifJjTM=^t;7ihkKtFgt*0fSNtAFHq6-OoVUm!g8r+nZ zek->(3}`u53s=ELSaXkc@#8W{=EknDqv`y4F2@AwGgQcs(@v=|Hz&*Aj|B!#kt0mmTtfm!v3c{0$CRWkGX{bWzTkp%V>EYyLB9rBe-iAo}cK!V6lRZO!m zTfH1)qW1RKsIuCp^BcSEr4v@HUR6w11{w>{3?Xj{eC^)Ko2yZxoGv~9L;qpFmKL-= zl%A;kO-2-$Vu~D$yPQ@c05t_p4Xjw0%}v^up-Li=^b7Cz!ay$^w}surVF4WaLY)=D z$wPSyWuQ8tG&Y%d`I9$*VAD+<3Q9qzl2;7JVrY2lLS+0dmRAd?p&^ifR14M9_taFg zD)*B;l5=!wO@i!5?ri&oGD5=x+D!73j3kkYKTWJEu9R)k`e&mt3Yx5ZxA=bx@5Vtw@j z3Ng}@FLiucsE)QMyv=6i3Ar7}FlW=vx@k1y#v<{#RAc602)YfU?F@^ltA{K#k%`heO!`16T*#_{+ZIF(mTn`7N_wtNDgQNm`>e`%(A4vhgG zgI4NdcP3wR-;XJCC)q2(W59XN3ZN{NKcV%=({TTK2?M{CKfAZL@spGsPAh! zt<5{jo7aU9?WA4jGLZq@11-O!0%}n?c^;EO`DDwTr#@ts{mILys1&cUX|OE15(FXl zfBAW}J1>^?b#*?|~owWjcGq;r?wWholzJgVmq9lp-={aA8YZj-+zej^8?gt zlzFgdx-{FI&#>^u+S<@)(XJ^BSPau?$LM)*>(313Nm0DKyv)w(O7D%Zsi?17eop8C zf$~6kOZZSNeNs#s)yN!Q^ZR!H((8#K6vo_4H3HgTB8Z5yNV^h2REAL~=G)r6Yx@=A9(wDd!B)iynEq2b9d}C-yQgwTLwIoM9oUkP zyXqAICgZyM#cE(qcHp_u2L_%yKGF-u0kwj6!J0{6ABGC7uQN4U6_in)9k-R7uUt!M zbVlFA%%;wrA+2;+Wp5(`GMvh&6%k;RoNQR^QE;ErWRINL_wPsBSDsh#`4eg*o^2tH z>?Q+g(zqqTZODcKL*_9(_Pg2RMm=^XKS$$9^lt6Q)#70(x8}epe6EKEXT&`>l!*9o zZW*#{vS6UEEsR_$N1T_p2-awvSJbc;)aP5Q3AF2*aZ*M_iVV#JIfO?v0sD`ZRA!|b zZjKZm4N z#hwN)xX5py&wJR@LGjsa?mGY4#pfk&h0aWgxAi*QI(MPKsY|3W35@Q zEv{--E4n`PH5AX&#THpZX`xI{rH+z4yPPW++mYHfz|N3U=^V&hqZGnwN`OKj^bl6A zG)t&es@Oec?v$o?rwfpSwVr&AT=Ce=h~+ju&ovPU94Dzr=52oN03;ZT?ju(N%cq;i zL~E#4S9$WIV^UBaUY+Mdg7$UzEAwDi3Sx5N^V4UB0C|A6XdcxTI!T8QqY3E%FLI!; z;PI>X-BqFDFu-qt(LvT*TAIo-lBDf3>?-659G*0Nbc&k%Ah({@m>}Wlp2IJSe(9tz z*mDa-@Xh1k*0xCVay>K-I5f`?0C|~~SP`ICUtLkFC_g9Xa-*|Ko}PB26Akna*n=sd zcuEkCae0fzgt@eml{uQccDf3MrX{OJ>3rQcD7bso;b@tSK|XSj)H0gkM3%SYKqrsz zq}?KBp{w1^sj+s|wJ9cajxFFE6XoL#^^(sCXP^uqRkrjd2#KOUlg|AkRaEBNa4cv% zZ>w4WD_Z14P}kw#x!bu|9-c%C6RpE#3%;8HzxfR@{1Fjy1TQVua=4E{f{G<5%#4Jn zbprAU;iLmId~EuaxV2thM&No+eTgAO+nzGL#^vW3{yg22A(Jay0S~98wjR!=1%n5k z)tmkOj}aw^Y=Q7JBSe|Ae(Zy{R3n1nd`m~a`-M7~**`s|c76XSM19+PXB8U6M@?nC zu4+9ok_O04P>-mzyo%fzN{(|nJf~qJ>Syon6S5I zh1o>aQi{>==rSHThXNjodz{vv`0praMZ;J)EGHtum3wnt@2`@pO$wc4v~}VAwxzlP zaYh|Cex-$**^qt3X52T-3=RKOuGgd<={Qvo!)~Grda0;&t1MiAZ_&62(lUN23s!--Rc>CA+rRL!9>o$*x!G|&f$Tz=KniZGl9l#4nLsVu_@Mlk{qUZ?R>f?;lXcWE56gT^m zj&k`tdbm9;(uF;Vh)I*7@>~d+bjngRx>&FK(2jd->=H~=(QNY$9c^OEy)MjG5~c_t zBW1>9mYwt~BIu0bKVm6A&Fp@W?aZqVo7?+7_iVoKD$pD8RvoblKnG2AY(A!z0u=|X zfohp&rPL?Pi)aW4MXmUxgC35TcjKoID2zjtVUNHuofH#7sBEFa{DF2LiKNBDMy_V; zvAF&7r^!Hg4AmWXEGAu%VleY5!==peYPRFW?krVX0GvPO`plB+$%Ju{YHb27fpzhm zC4aPP-1=&8ZKC(6UQ2;olZ$Z9e6=T6t@Yx3caGTAn@=Ap_4wZle@CB`koKS!!qOzx z_6nKy^ZYG!#{y}BQy2MDdb-`9^QtPdQ$ExzR|sS@RJ^2nhL5@_s@&w(d+yS>87S8I zsu{t~lEp^$5y%E0q|F?xymU+h&TE<(UN_UsK0Ge$VtyOPHG)H$FearC3MhUE$t|{S&Yl8kT|z&i^0}nh;m;ZOq*QT0#8|f^KX?P+|9>vVpWkfD6V5BP zwR%rEh8twr9xb)z_}=)AXN#|SKq^T4V03HZXjfzKx9;+N+++fkaK5ga|0EuMU6K3_ zSCTG)4!dykm#9!l%Ra}-YWEp-jq4E+9=n}E#Vy&JTX#EON4P!-Jt&0G?;brIx;0yMiOK(5kl{1;`xuV5a9#79dA#Tr`z7u-S>H)k#EARzZ$J z7KH77a&!Qoq^&yI@8{cGK0Ki@0D9(l46pN7BTWikN>AQhX1W$VX72Y3eqd|ljCEw_ zn%%8_8Vx2W>s?w{a{d*@ahw$hUz_j)^|S}kYc#1+@jzFl?hrS5HWk+~>*!@i;GEbv z5K7aih3;OFtxWz^#nvJEmCgL6DH&JS+B%&*GzppQweIRt!B8)v6r z@Cj^wXV0K~dJ7qK73ChdMn?@K?2rU?weU$7tb`KT6D^d@gD`rI!ob9EjEo z7Yp98(1dCEiwLd6ey6L$)*)v0v@%~vD?7zOs><>`7{_iLMH|E>_+46t0&a*52f9phkQgX!3^G-3hHz&ux zZzTTwikkWzAWZ$d{fNT&GYp^WZ+bESZ8yYUbU@F*{41V4BftLq_p$d`ik?Nq&+jk( z^FP1-7W^IWzvkAT-yY)t>`i_XT4t2Lox!l!nlJzSod5L+kto3N?0pCQ>aTPH{`0#l zke0wf@qhpOZuHE}NhqNpBuC|*VrGM$VdXO$WV@F9X*`(1qFCc zU_bmml0V-8lPA@0?fG==JQ-E9BqK6bTvoaQE(7e73y6d$0p)1CUl z;|$9bX?9rIV01i3B42adXuK)dcDpBy?ws*gX|ZkS<8uz@1=JEc;I?_cIV>fLR5B|^ z%gq)CG*mNd7MIfsy`Lt%o|rMqPEVxeh)dwrKl$I_`_9Hr&j zWX3FAx8{BHwcE6N*oSKK55ZP?qHN^c$ZoQYyn9A}0Q)RJ3}QlFfC%Wr@tqzL?*5*- z)z<6O_jam;aXcCh%FEkpr@iwojdM2?t|bfpp8?Ai<$LQuZG7ZmG&6EpI*j*`#8y|i z)O#64R+bwWe#hlscKHTSUd`Uy?cY45@B^KH)0(g$1!k_?S9^Sv7k(kKZ=1&<_3R5c zCq&(+$Yo($>l3-M8Tl>LnQ{KON`)LSq_brxInP6Cxh1^ZG|m%a7XBiWOA2|TwGhsp zN*aIT;LYNJ5_YG=y;QBmwEEi3W^uXs#sFq}&rH3(kOd&7XFb=P+~{(+&nco+ZIUA< zjW(XX;JUs0h0dml$p#8YJh=nmE`;APk_u?{V~6VZE}3=m0R|Z~#nYoilz%kFZ4z~W_8{lsDz`uQD@Ijsxc}NsFV{K?mV6 zzU!-~WVG^6u>G{a)xn}Hkh4N?c|xWDMPzb6gQBGz0Y!g9q5EH)lwed~K-QntIZlT- zGBAJv5r?v?a75KaZ{%=*WnbakhWtlJ`A!b`Ln9e#LV`=;QZflCPgE(Or9tYmRQf)ZUm_ni)67o}-T&AFa^WL04tLcCU zOLO+*hHRqw?s~t|^|7IIWO+Fg;R6M8cCJW6mDAcS;>DI9fHp})#2*W~*ef)>I;;se z%49Pg`Rc<20>9-bR%3%_yZvnicwWS@AfNA?TlxhI9~DI7@qq-=CS$We<3FQAMI10> zE|8AD9LS!X>7_xsTX+nQa1k}ti_~j%*Vow3ny#IaQJ?RxiuGCwmA?$AHjBYgysueL z!P1@#2v20vR~`Z={5#e87)&<&7pE-pY|~&v(TgomgX4HJG`#eLLO+MCUJFTEfY13+ z+mmvpUW?h{d~Hx*_`JTCFoSl@S9bHsi9(fpvx}lHHRoFtl(7;TzpXg`Sd-Hz4K~|H zxO^aKm8uZBR|vjgk3Pvz`S9Rbc{QAE<^$B%iM`o!^HHF;==xEweDrb*l0*cNT(L%3 zEFpyK+csb|8Ip?o_U_xL;N4FPqFLM4hXjkMBI(f#!94i`J|GX|;!xM0 zMQYRh-K!1|BC49#J!nMSKHY#n-%FkxSsK~7!e-P?o3-JeZ!~Y2^iE#%!(hL^U9jz= z%1{U?>lWQlEpKHt9vLFyx_u6Reti<(hrGXdZOs;`1XmdkO2DB}=02BO@BJs-iz29CT;wz2X=+d|Xs`NELcbD`<($uDHPP0o+05wV%$^ zy4h10$D?$-?pO4Y^L)uRf=E2R5_2PW4y4v?&52kU2znv&+jqLo>3o6Vw9`cs+bl-Y zS90z5Hw_tL(0J_Ep2|DfudxtriGZZPy9p}sOqmuSjf(wE3*;%%TCQFBy~1}{P`kl{ z!?-_1&HKse@P|@5P1}Rb3l5u~0YFa*DT;9H98LOIpb-4oGpi@CSRMBkh(lwr#H)~& z5YN*+jT~@`GKtbgf+~Ff$J;f9TYh&3UMGV$x@|6Mk<6IzSb<>}IehgtO0f!p-)778 z&e}bb<8oa051t9a6|3Put9spDUc1Sr2m;7RB;(m70eaG|oHfZPrJX9{0;V@f8kMFo z>?~o(_$OosKU@7zvHBzx9v%vvLSwUB7+^w(1UwytjQeMMB?C>6iP)(FP{omG9SmX; zSaf-dCD358drXe63*(%2I>dPg^h^^<&Cg5*CQE4WIU`=ne<@PUsymohI{nbU9WC_u z6==IKo;FvlCN!ab;>Y5cK;Le2ahBc9{uwzqp;R-J6=2G$&eu8?R|dDMJAEmwRLGVvir(*?rzt6f=-54Tm-?QBibVM+?<`1dzgOG}&p zMBV9h9gb+yBkUCQGg(z6;{8L0K$8{hlTTY;rYZ+w8)j9uKm$A@Vyeejwy@8s^=WF=pI-z*;y$LJMB zo?Lub(TIguv_3z}50&FVVi1a424^JDNOO3^s!h@V_JRhu%bKHK!nfNw(c23?%VVc` zex|~5V6el@YoOl)4eWY^R&{~q$tt_$=DQSjd&qQNNA(u;6`9ypEgX z=|-b(&9xlYbNA;QPlR1-Z_7il2YvK#3N;t*DuALwqVHX#i`7CI=Yu78{azOwx*6J& zzIHyNZUNg(84Q5)4 zMDrQr*>|YeL!garT4%H)aP-(6hHx$gmt^TQod7*@I#5J9bIj&wx@i6n#R6OHHdK~X zk;^xe*H^X&DbuQnk>!&-nU6m5&PK&Nw4ko7?G2jAe3eOw$nELkr_YxU=XFLi~`&X=~8vd0rRWzBDr zo!Kvr`I~O17^C((0&_ih-OfW~_Sj0yq;m-k;dG8~k+t&dF+y=#z8bIq~yywDl`{wB}|B)!Q%5OIkomO1e{0TDntOLb^RjcZW2R?}G1le)G=rzBAAB2RsbJ zz|3Kvv-hmM_FDJ-xvr~8SakpRSno$6tKq;GQC}9T=`vommKy9z%3hF2i?2Ll)Hj+T zDL+mG&l_rR@x-A^bw7Jv_`8IKK5nbN*3NV-oI-_+|GCSW)nsiH790O|3G9YD)g+F> zr0;pB=9Ir;jOn76vV5pu$QZ3EZllrK4vKm7U7chjt6hqNcBGT@KOU@AGscZwZgN{l zH<9p*fLJcfEyra=>7*rnhEA*LgaLqI!#hfn3|QnPrD^^G=nIT`5tNh_>a@vvZ-e-Y?}0 zyF(*PtuJ2NziHT|BG3WB2-55928z24j=NmjbCvrbU+$=`;IjTNM(&~7mBNMT^-Qws zZnM{?elCqx-@57-3%)*UNUJ|I#6KD{+L>6Ee@1x67O4rV79-LiNeLpu0ozmMi64*F zgg23e<-CWKP?@*=0)PFJ-}Blok}_+TNiMUo^fN39UJLNLWOvLBETvZ1b#i^Y_~W@D zL+Q~So!_>wOh=M@^k7=I^x(jt%k(IaW}_iXI&Rg!5Ra*?e$l^h)&zN6$bw&xluTSt zcJstJyxTjj>I}3;Ng!ARdFgwwDtae&*OWop<#NK|C?PeKtkPsGaM<{#B=A7WL5oy0 zY~Ozp5lYD-Yvie3f0i{~m`TZJm*A1#6O-{M29p1J7`LFFKCtwZ(8$Bcl8TSq5vY|AdXXfW$hxgud?UwjVEpyj%xK;O<@iFo#mf)TbTs@PXsgtt zzl~;@nCB(r3bcnUi*{P2W;Z5&9{jPoqmZ~+qzqFO)uSi;g;m(~MR*u`d3kCRZk@Ib ztvr!UW=ociWrA|3^(MBahC~_ne~UA-S^E7~+XL-P#!G;pbbS^j9Sf0L?v67SbymyIqLzMaGr7+-mP66aYA^`ayjFguimXIRK9ZX<% zDxcS=Ke%4<7PjT$Ni`^zacc-SLRb{PSgk2~TSa22KagAE6Qm$0CGCf`lR<2eDWAxK zhQAc8C&Zf6<28~hUE@E$+|HX7HdIKAgky%PZ1a^0FK6R6>U9J|ml;*&{lk=ia|{Iy z0)&3}QF*){e$N$qetM=jw&CE^ux+?Is7-J8Ys*56K_0+uOAZH-fq0B9I21!XXU2`V zEVe8C5=|sRBCGvL>39r^urK>DCiC@Q5E+XU_{^|`ki9OGfg;Pp$PfbOef+P$%Xp}a z27`f=m>8^%q2Es{zdP=fJPQvRHtv;G@*tA1@T@jYD9|!zKuC@Ti>Zwks4%a6%vgZR z7*2J+S>%a9?l#S4j3)_cV0mHh2z{mWePo-Q5c$fo<8h3PS~}a+$<`O#tZex&dYsd7 zhHcRH?y;ZT!l)}1-ZRoz`UKodFKf;I4ww~$d>PqgN&pLP#6toa=@|HeFFGMU1fJoP zq+4m?yAVLZ%n&Q0=2+@AOH@VDk{q6L@tfV)em^FSt*AC{l*;2|=k zHvlL{Xk`odsw2)Uo>@3tg!&UcHOIb=1&BQfZqozFx?_0-F(co>q3ZsVX@7Sw-7h6b84a= z^6J7DBW6+2Tz;S`>|PVxRjVh7mt)(2E{_GVC_>WTxV#(PA(sF_$>i~H3?R3Sy{GLw z3tV+fN%*U>7VFp#2>0BRGv16BTKV6(Lxe9SCaeq;c|EaASy!{icB`-Ew7|ME!Ux!w zO0o|*$%2Il66fWC4qe8mq6l*mH)Gq^ET`(s@L^=*%YB0cYqs~jCNUG@6JZ(RBcL{K z%12g_6)-;6BfepiBq$tQwt(Z2&|sBGvy?O#MXNCXXPyf&zHfs*Pf11?v^i8MGx#>U zgnOa)3&;4&_mgT6R4E{5E)ev}9K*@*S!cjb*4=r~Lav+cQ{k(mh^g-#M3Nn5Q}Y1`b?|;^G_43NN;?$A z>|b~c@KlZ&(I5m26fR$AaOb)Nc+$#Lk=Xm1NBjRFFSPLOhU|+J5zYe`{0u-OZRGQ^ zkJkrMo4&fae4x+x`#dx~+}_B{l3G^QHwb_Ay!bKR^l)~Lb9qiaI;ttNy5rYr#CKsz zI=&CvP#Jhy~k{f#;v8)bhIG>R8!Hf?RPpZ_NN!iGIjlhYTUo9^1)1x;e^u+8h+)9<9u!3+?K zd2L$VpRD}e&9v5THEl-QqYyAM3KUrE)2EIqjQ%tJAqA1uv(JIOYTXe%v>WeLWQIAWh2)5r-g?uDqyommG0ZothJ{HFXyc|ul9bm_vWW^{2AxOcMwm1Wol$r z@lAA42s?i?Y{98LyFb4vZE?D4=#M57wN0B}#VMQHn$9ZbEBj6-^^YGhleZhJ|yqnqGPNnx!(&a)99V`ET3Q8=QsTsxO>pyzVJ~%HYim zXr_l5si%INU*InqLVnd1{V@EOtZ;eYbj}lPnis$3rhiTic7~L=vh1WdpyF3~x0i3+ zG-K2!2$TmH6r6jGxmWvUt9(b!>PxqUzj>t6BJ7QA=EG(^R9W3lE>G;OXWiiiZ`G$i z9*w`f$OY(JmTXLdxv9abhfFtvD|^0IY}!M@RBNpD(hTu?)3%2-djKWTjc@g>ZN$_68aWN@0dbvJRfHKCiD1P3OA?z=QEysPO z=+dRZ#CWVXDj&`B(5;(+gC#|XkQ$+^lcqPJu;^VBH!6Bv;zXXC$mh5zCqpoxIz$pH zN6F&qx;R2BZ2kCH?aLbz6+@aJrTgcL&yL}>-U_8ddH53D7hw&0 zBUuVL0x|=(cpX0;VCSN{6!+-?8m!*7J&@wFDFXsMMLBnPuv02Z(d(`|WIHNz0e7C# zxJZBrD{HYtBR)TsLP0ru{Q9M=cU#c1tcE8Qs$Vdl5^Zt~4a zJ-`td<_VR=*&KXDotj~5fMYqDg2yqOv_tQ-sF~pkc*V)0+0V3+*`2l9Jf+Kx4prw9 z4e`vfW;5?cEJwKRSb%+t=?i|xG~;^0cq?6tXe5x*4e5)i4&7npv5XOWU?w+A+P5^y z+MmtAUM9q@PiipryKI$Qs+xha7gR}#k_bLyk)9wl3^gjjH`V=Hkl@7T+3x@F9BmWg^UFPtGl}%|^`u!FH0fC>Ts?R^00G%FS5E{(=Jrd2dm`x|A0f)GW!n^NBvcrbvZ71=M5;s+<%W*;{A zIwH<-e|z4)X+j#^7!A|P9o_@gFouWm4kaUVrmgKS@HjkAji>m6dZm&1~FaHJUDVBoeXX>+%BMG8FWQ% z5^F+nNyHmJS#AQ-nt^+x{rrNjj@9Mm&TFn_t7-=IpTyLCZ^@1+NjS}@B{*f3++W}!oX{Qf z)i)=bkmx#0qI{AWq{nw@5P~{)sQ8>0cQJ!}J{{o;fh>raCs^C)T)z(`Sce7?^y+$F zTs~PVz+e646Ld~uI8>);e2n+vX|LqhU9sTjxO)*vO0rUQuL7{%)mw+MPw`Ar6N!uQ zP_T(y!n~-)qeNl!SfZ|n`XLvd7)sU&JcCnMl=J(I0HyHjnFRMTd8A#|z%w-!GC94#A zww-xii2oqyUXX_hc!Y-Ob%y?EZTC3G4T!|C`T4~k<9S@Luwv%Kh~QGn*z@_;lnMM} zbi&tx)fsxZk8DGNnkk3QXpdN>a~Q?p#++pk#AugFjQY}fMZh3ayeR8WBj zC7tP6HVB^})h*^4-RCds-*By+aW1OFJITFP z4kq%kLQT51XGg8R+(;7B&h>Hvwt)xOs7gi#bLwQ2^8VHl5Cnb*Dv9tF2x498Wb}MSk@~GZHuh%*Iur5%1URhgQcpV$c13j{!V-xQml4ko z$kz+0^X&O0PFOVdQ&qo<9bqlv$tfCdlOh8n-HcV`@sH-*zbc8*_O}S>%#j28k>s%M zdcsI$MfF3ezABe)A;#fa#ulA97Q8hsKIIe8<;jlYOD_vAMD;z0OvOz^Qq5gZd=_g=8Gl;HI4!wJ{Y7iCa<0 zjPa5VsS4Q{(h`5QDkqgD33N4yr8Cv)SaVcx(N4t)0)Pz~K{_#2Zo1&2vVx1_ZYD9Z zmWLZD5zbj*y(1g*{g)yTY~0WFq!544_C#_b4L7U%o5%*G`_IiPpCFRj{jw7e_lYWJ zG{W)Ng(({Nm8O)8Auy5v?uw1ZeF@%5$(CZ#dbNSu1UaDr%<|}92x6~aHHPey& zn8}{Bzh+z%psSKcm9YSf)+FyrhCs$dG6N%{3FZEB?15d>I#T=kDZGtQHJwAf>TXe+`rmM)*S`hnbk#=R_ts za!S=&tlkgTS3)Y>12Nuvbm*Q#WunuCc<)Um#K?@f%CP*p#etLHfnLXW$+0zJ~&ab^cl+5o)|4^cQEKf7>YGL+E@N*7^AJwixVWb?f z?`P_vJ#nnCej;F~{HT(rdh|{rhX;z=fdknQ$P{`618F8EkR0{eK3M58M0xHEgGPB? zqvw@U>oD*WY6a=HGBGT2O7020u*iX$uMVOgu>5~iMvMH?E^!PE-U{oD_baG(+(LTX zSyzfN(|_?}q2FPKSW;Q`diOpA#hr_AHI#svk5IxY$4vKdd&DouPAEN~_sww+C!APF+*aBa9w=LN+-{G8rW>~4E$<`Xoexc1{opal8bL|MUH(@{) zXdqT!qFOPSQmGmMnss`uS6}ScR&1nkxpA%=P_m9?gz=3=GdfvQ23H%UOo<6sNvo}h zEEg80?nSPq2``_&EvHFdAKzBW<8kyxRIV|ykhmkSu{U4^FYpyK{oSWHVfd@D(W*zb z%B#qx96ChOch!L7r5=Zu1<;S<31~R1>@-pmk(Av-_vc$-wobRglW_$=+FbVj!Mc55 ziZ=&)C;v*z^z4df2s+DOnwqZBm3!-HU|a%}RsJ}&n5!pp-Mq)UQw=>Gi@sZ&S^@0V zL+|$lgM(XrjS6C&!-)v1%*cE=RZI0dv)$3olyC^1T`gY+u({y5yim{fjC2Q!SVe5g zcY%0JJg%%k6b$sL%W>qt@0Np^_Z|_YUbL0DVMy3bvN@mG&G>jem6#P^$U@zJ*D<& zp^T<6{{Vlcl_E;X&!QjUG39>p^W)hHYYM9 zS0MnGR`K%}Q>C0ddSdhK@_{HN-)=bkN)P7~b!+;QRa%01;YXHayfvv1)qE-JcP60y ziSIJqv6?g+qxMI-pbaulIM68pEux+~kAY~0;+EU&YztXAy_nL2nft*Pzi7@`v)$eg z0iV8{L;k+bMSevwoS&_~i}-eYcC?vv;0EHXZKL@d)}^0z+#_Pm3RNv9U>*~cFeBMo zRZd`3Wx@`Wt#Cbj|JtRDJ~>D{qS81e__iz;+#Y)~HK)DS-?o5%(=D2a?q;o(UbWw!;U%hd zQ7jo%_#3&LSuEoK^XkyNo;4lz{9W;PSbhu|9hM%RnVgrBMS1;~3ZBUivk>CDUNxja z{M^)hSN|;~3snN7okmR3nG9CJ4m(Gh*OVQ(02q@Q;sQu2u@8Xhq&$Drd;pLv9=V&r z&E+pHr(p|Q&`45Tp@!>fTS32>f?d_2e434*7*Q14Nh_@AU+3pD-& zSLi;pTF-Ep1rJB0TZW^PSfpjfXcieX(#pC$-K}SLuwM&I-S%j1V3Ee|^%k)~EHJ~o z#NJ7E1#OE0YYGKwRA0ROe%@^Rk_Q4~`Z;+tLw;1GgBCYb8U8IGx^ol6;#Oco2ha_4 z>E>QpiY`;Wp5m!6o|<9$EgfsuwO7SzhtmL)aL zF{)Q?G}YmXpSL)q2sbO%hr^`gFr={Yf%O<&Fd{I&dTD~pk0xfc*qDJ$;Cp7pdd>@yW^3b!(q-6g3u4B%A3q8}o=ms=u65)c ztPk?2_Vbe=B&eEBF7_|5(EXj*lI%=m&OO_|xt#2L_J-;E-~87Tn}<5u%V)KmWkRh2 zHSVN`j^LaDtISWAHjN3+vKLJpEFPZl-T-MN2f;bO*bOY;NVX8@$gU!A^yK<`Gylnk z(V;}y%9r(dF0J}s5HO%npbT&aeY`j17GO}e8u>8Z{LxH%i+XuJNXNtTb&H!dc3#w5T;x)JYngy=p7YHC4?+zw% zTfOP_MsM?Bd>0Jt&Osc0=SFm= z$$DZT!deO$Bb8l*f}>BYwQo;K88E$KdFvfN0JK-*^#)Z)DDXx(lPc%{Jy{zOMr{;q zJe-@q701sHs_=gpWNS2lIKTfCtO1rDVy>tQF~`T- z{2%ugoZCFct^fFx>a}Nsz((>3N|Iju&*=Y>#j!URzAvG~@dhL86IgSpbd;ywSSZ|W zI$Y?(Kw@J#qKlKO9ID|RbxHVFHP_u^%dhE8Y8E-~WRz^2h?L(r@UUTgEL|SqoU%zIoog zP-zB@%p7|6q3E0A6eUs>uCBo{qWlSVNiRjYjkL2JQLHpQl%{T>q&IIs-_X From 722e314b8ea849633f0e32cf282c739d874da30e Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Tue, 3 Jun 2025 15:47:02 -0400 Subject: [PATCH 4/8] Put back python version to 3.9 --- .github/actions/build-docs/action.yml | 25 +- .github/workflows/build-and-publish-docs.yaml | 2 +- poetry.lock | 355 ++++-------------- pyproject.toml | 3 +- 4 files changed, 90 insertions(+), 295 deletions(-) diff --git a/.github/actions/build-docs/action.yml b/.github/actions/build-docs/action.yml index b47bdd5c..f8b39246 100644 --- a/.github/actions/build-docs/action.yml +++ b/.github/actions/build-docs/action.yml @@ -1,5 +1,5 @@ name: 'Build client documentation' -description: 'Generates client documentation using pdoc' +description: 'Generates client documentation using sphinx' inputs: python-version: description: 'Python version to use' @@ -8,13 +8,34 @@ inputs: runs: using: 'composite' steps: + - name: Pretend this project requires Python 3.11 + shell: bash + run: | + # Poetry won't let me install sphinx as a dev dependency in this project + # because of the wide range of versions our library supports. So during this + # action, we'll pretend this project requires Python 3.11 or greater. + sed -i 's/python = "^3.9"/python = "^3.11"/' pyproject.toml + - name: Setup Poetry uses: ./.github/actions/setup-poetry with: include_grpc: 'true' include_dev: 'true' + include_asyncio: 'true' python_version: ${{ inputs.python-version }} + + - name: Install sphinx + shell: bash + run: | + poetry add sphinx myst-parser --group dev + - name: Build html documentation shell: bash run: | - poetry run sphinx-build -b html sphinx docsbuild + poetry run sphinx-build -b html docs docsbuild + + - name: Discard changes to pyproject.toml and poetry.lock + shell: bash + run: | + git checkout pyproject.toml + git checkout poetry.lock diff --git a/.github/workflows/build-and-publish-docs.yaml b/.github/workflows/build-and-publish-docs.yaml index 5110a34c..9b915b5e 100644 --- a/.github/workflows/build-and-publish-docs.yaml +++ b/.github/workflows/build-and-publish-docs.yaml @@ -14,7 +14,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - - name: Generate pdoc documentation + - name: Generate sphinx documentation uses: ./.github/actions/build-docs with: python-version: 3.11 diff --git a/poetry.lock b/poetry.lock index 04bb99bc..34acba57 100644 --- a/poetry.lock +++ b/poetry.lock @@ -99,6 +99,7 @@ files = [ [package.dependencies] aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" @@ -137,14 +138,14 @@ files = [ frozenlist = ">=1.1.0" [[package]] -name = "alabaster" -version = "1.0.0" -description = "A light, configurable Sphinx theme" -optional = false -python-versions = ">=3.10" +name = "async-timeout" +version = "5.0.1" +description = "Timeout context manager for asyncio programs" +optional = true +python-versions = ">=3.8" files = [ - {file = "alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b"}, - {file = "alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e"}, + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] [[package]] @@ -166,20 +167,6 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] -[[package]] -name = "babel" -version = "2.17.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.8" -files = [ - {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, - {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, -] - -[package.extras] -dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] - [[package]] name = "beautifulsoup4" version = "4.13.3" @@ -410,16 +397,22 @@ files = [ ] [[package]] -name = "docutils" -version = "0.21.2" -description = "Docutils -- Python Documentation Utilities" +name = "exceptiongroup" +version = "1.3.0" +description = "Backport of PEP 654 (exception groups)" optional = false -python-versions = ">=3.9" +python-versions = ">=3.7" files = [ - {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, - {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + +[package.extras] +test = ["pytest (>=6)"] + [[package]] name = "filelock" version = "3.15.1" @@ -660,17 +653,6 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] - [[package]] name = "iniconfig" version = "2.0.0" @@ -682,23 +664,6 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "jinja2" -version = "3.1.5" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, - {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - [[package]] name = "lz4" version = "4.3.2" @@ -748,75 +713,6 @@ docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] flake8 = ["flake8"] tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] -[[package]] -name = "markupsafe" -version = "2.1.3" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, -] - [[package]] name = "multidict" version = "6.1.0" @@ -918,6 +814,9 @@ files = [ {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + [[package]] name = "mypy" version = "1.6.1" @@ -956,6 +855,7 @@ files = [ [package.dependencies] mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = ">=4.1.0" [package.extras] @@ -1094,6 +994,7 @@ files = [ [package.dependencies] numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version == \"3.11\""}, {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] @@ -1392,20 +1293,6 @@ files = [ {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, ] -[[package]] -name = "pygments" -version = "2.19.1" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - [[package]] name = "pytest" version = "8.2.0" @@ -1419,9 +1306,11 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] @@ -1669,21 +1558,6 @@ urllib3 = ">=1.25.10,<3.0" [package.extras] tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] -[[package]] -name = "roman-numerals-py" -version = "3.1.0" -description = "Manipulate well-formed Roman numerals" -optional = false -python-versions = ">=3.9" -files = [ - {file = "roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c"}, - {file = "roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d"}, -] - -[package.extras] -lint = ["mypy (==1.15.0)", "pyright (==1.1.394)", "ruff (==0.9.7)"] -test = ["pytest (>=8)"] - [[package]] name = "ruff" version = "0.9.3" @@ -1722,17 +1596,6 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -[[package]] -name = "snowballstemmer" -version = "3.0.1" -description = "This package provides 32 stemmers for 30 languages generated from Snowball algorithms." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*" -files = [ - {file = "snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064"}, - {file = "snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895"}, -] - [[package]] name = "soupsieve" version = "2.6" @@ -1745,134 +1608,46 @@ files = [ ] [[package]] -name = "sphinx" -version = "8.2.3" -description = "Python documentation generator" -optional = false -python-versions = ">=3.11" -files = [ - {file = "sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3"}, - {file = "sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348"}, -] - -[package.dependencies] -alabaster = ">=0.7.14" -babel = ">=2.13" -colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} -docutils = ">=0.20,<0.22" -imagesize = ">=1.3" -Jinja2 = ">=3.1" -packaging = ">=23.0" -Pygments = ">=2.17" -requests = ">=2.30.0" -roman-numerals-py = ">=1.0.0" -snowballstemmer = ">=2.2" -sphinxcontrib-applehelp = ">=1.0.7" -sphinxcontrib-devhelp = ">=1.0.6" -sphinxcontrib-htmlhelp = ">=2.0.6" -sphinxcontrib-jsmath = ">=1.0.1" -sphinxcontrib-qthelp = ">=1.0.6" -sphinxcontrib-serializinghtml = ">=1.1.9" - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["betterproto (==2.0.0b6)", "mypy (==1.15.0)", "pypi-attestations (==0.0.21)", "pyright (==1.1.395)", "pytest (>=8.0)", "ruff (==0.9.9)", "sphinx-lint (>=0.9)", "types-Pillow (==10.2.0.20240822)", "types-Pygments (==2.19.0.20250219)", "types-colorama (==0.4.15.20240311)", "types-defusedxml (==0.7.0.20240218)", "types-docutils (==0.21.0.20241128)", "types-requests (==2.32.0.20241016)", "types-urllib3 (==1.26.25.14)"] -test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "pytest-xdist[psutil] (>=3.4)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "2.0.0" -description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, - {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "2.0.0" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, - {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.1.0" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, - {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "2.0.0" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, - {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["defusedxml (>=0.7.1)", "pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "2.0.0" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" -optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, - {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - [[package]] name = "tuna" version = "0.5.11" @@ -2128,5 +1903,5 @@ grpc = ["googleapis-common-protos", "grpcio", "grpcio", "grpcio", "lz4", "protob [metadata] lock-version = "2.0" -python-versions = "^3.11" -content-hash = "3d1dbdf5907b0210450c3b067c8a1a345e9413d533a9d5bb8d91a910cbe9bc04" +python-versions = "^3.9" +content-hash = "bd1c6f98884330cb57f648fa219387cce99f35647112e51f83a42b7d9e127f45" diff --git a/pyproject.toml b/pyproject.toml index c3a26fba..b19d3dd3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ classifiers=[ repl = "scripts.repl:main" [tool.poetry.dependencies] -python = "^3.11" +python = "^3.9" typing-extensions = ">=3.7.4" urllib3 = [ { version = ">=1.26.0", python = ">=3.8,<3.12" }, @@ -101,7 +101,6 @@ beautifulsoup4 = "^4.13.3" vprof = "^0.38" tuna = "^0.5.11" python-dotenv = "^1.1.0" -sphinx = "^8.2.3" [tool.poetry.extras] grpc = ["grpcio", "googleapis-common-protos", "lz4", "protobuf", "protoc-gen-openapiv2"] From 1d34f646119cbc441b5b95020583c1f2f9791117 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Tue, 3 Jun 2025 15:56:28 -0400 Subject: [PATCH 5/8] Docs build shenanigans --- .github/actions/build-docs/action.yml | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/.github/actions/build-docs/action.yml b/.github/actions/build-docs/action.yml index f8b39246..9a3cd2ae 100644 --- a/.github/actions/build-docs/action.yml +++ b/.github/actions/build-docs/action.yml @@ -8,14 +8,6 @@ inputs: runs: using: 'composite' steps: - - name: Pretend this project requires Python 3.11 - shell: bash - run: | - # Poetry won't let me install sphinx as a dev dependency in this project - # because of the wide range of versions our library supports. So during this - # action, we'll pretend this project requires Python 3.11 or greater. - sed -i 's/python = "^3.9"/python = "^3.11"/' pyproject.toml - - name: Setup Poetry uses: ./.github/actions/setup-poetry with: @@ -24,6 +16,16 @@ runs: include_asyncio: 'true' python_version: ${{ inputs.python-version }} + - name: Pretend this project requires Python 3.11 + shell: bash + run: | + # Poetry won't let me install sphinx as a dev dependency in this project + # because of the wide range of versions our library supports. So during this + # action, we'll pretend this project requires Python 3.11 or greater. + sed -i 's/python = "^3.9"/python = "^3.11"/' pyproject.toml + poetry lock --no-update + poetry install -E grpc -E asyncio + - name: Install sphinx shell: bash run: | From 6451596299d50dbc7d57e0ad66eb549b5e8bb8ff Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Tue, 3 Jun 2025 16:00:50 -0400 Subject: [PATCH 6/8] More shenaningans --- .github/actions/build-docs/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/build-docs/action.yml b/.github/actions/build-docs/action.yml index 9a3cd2ae..c1b04351 100644 --- a/.github/actions/build-docs/action.yml +++ b/.github/actions/build-docs/action.yml @@ -23,7 +23,7 @@ runs: # because of the wide range of versions our library supports. So during this # action, we'll pretend this project requires Python 3.11 or greater. sed -i 's/python = "^3.9"/python = "^3.11"/' pyproject.toml - poetry lock --no-update + poetry lock poetry install -E grpc -E asyncio - name: Install sphinx From a695e392defc376c4d6e9a60229c307370a43d0c Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Wed, 4 Jun 2025 14:04:53 -0400 Subject: [PATCH 7/8] Edit docstrings --- docs/_static/custom.css | 19 ++ pinecone/config/pinecone_config.py | 2 +- pinecone/db_control/db_control.py | 18 +- pinecone/db_control/db_control_asyncio.py | 16 +- pinecone/db_control/request_factory.py | 4 +- .../db_control/resources/asyncio/backup.py | 2 +- .../resources/asyncio/collection.py | 2 +- .../db_control/resources/asyncio/index.py | 2 +- .../resources/asyncio/restore_job.py | 2 +- pinecone/db_control/resources/sync/backup.py | 8 +- .../db_control/resources/sync/collection.py | 11 +- pinecone/db_control/resources/sync/index.py | 14 +- .../db_control/resources/sync/restore_job.py | 8 +- pinecone/db_data/index.py | 29 +-- pinecone/db_data/index_asyncio.py | 18 +- pinecone/db_data/index_asyncio_interface.py | 6 +- pinecone/db_data/interfaces.py | 213 +++++++++++++----- pinecone/db_data/request_factory.py | 2 +- pinecone/deprecation_warnings.py | 22 +- pinecone/grpc/channel_factory.py | 2 +- pinecone/grpc/index_grpc.py | 10 +- pinecone/grpc/retry.py | 2 +- pinecone/inference/inference.py | 16 +- pinecone/inference/inference_asyncio.py | 6 +- pinecone/inference/models/embedding_list.py | 4 +- pinecone/inference/resources/asyncio/model.py | 2 +- pinecone/inference/resources/sync/model.py | 10 +- pinecone/legacy_pinecone_interface.py | 8 +- .../openapi_support/asyncio_api_client.py | 2 +- pinecone/openapi_support/rest_urllib3.py | 2 +- pinecone/openapi_support/rest_utils.py | 2 +- pinecone/pinecone.py | 30 +-- pinecone/pinecone_asyncio.py | 14 +- pinecone/utils/plugin_aware.py | 4 +- .../integration/control/resources/conftest.py | 2 +- .../control/serverless/conftest.py | 2 +- .../serverless/test_index_instantiation_ux.py | 13 -- .../control_asyncio/resources/conftest.py | 2 +- 38 files changed, 315 insertions(+), 216 deletions(-) delete mode 100644 tests/integration/control/serverless/test_index_instantiation_ux.py diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 32959129..fbc650ae 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -12,3 +12,22 @@ div.admonition p.admonition-title { .blurb { font-size: 16px; } + +p.admonition-title:after { + content: ""; +} + +div.code-block-caption { + background-color: #EEE; + border-bottom: 1px solid #CCC; + font-size: 17px; + padding: 10px; +} + +dt:target { + background-color: #E8E8E8; +} + +.highlight { + background-color: #F8F8F8; +} diff --git a/pinecone/config/pinecone_config.py b/pinecone/config/pinecone_config.py index ad8125fc..f35fc425 100644 --- a/pinecone/config/pinecone_config.py +++ b/pinecone/config/pinecone_config.py @@ -5,7 +5,7 @@ from .config import ConfigBuilder, Config logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ DEFAULT_CONTROLLER_HOST = "https://api.pinecone.io" diff --git a/pinecone/db_control/db_control.py b/pinecone/db_control/db_control.py index ec6a412b..daa81b68 100644 --- a/pinecone/db_control/db_control.py +++ b/pinecone/db_control/db_control.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ if TYPE_CHECKING: from .resources.sync.index import IndexResource @@ -24,13 +24,13 @@ def __init__( self, config: "Config", openapi_config: "OpenApiConfiguration", pool_threads: int ) -> None: self.config = config - """ @private """ + """ :meta private: """ self._openapi_config = openapi_config - """ @private """ + """ :meta private: """ self._pool_threads = pool_threads - """ @private """ + """ :meta private: """ self._index_api = setup_openapi_client( api_client_klass=ApiClient, @@ -40,19 +40,19 @@ def __init__( pool_threads=self._pool_threads, api_version=API_VERSION, ) - """ @private """ + """ :meta private: """ self._index_resource: Optional["IndexResource"] = None - """ @private """ + """ :meta private: """ self._collection_resource: Optional["CollectionResource"] = None - """ @private """ + """ :meta private: """ self._restore_job_resource: Optional["RestoreJobResource"] = None - """ @private """ + """ :meta private: """ self._backup_resource: Optional["BackupResource"] = None - """ @private """ + """ :meta private: """ super().__init__() # Initialize PluginAware diff --git a/pinecone/db_control/db_control_asyncio.py b/pinecone/db_control/db_control_asyncio.py index 7ae2196a..005c25f2 100644 --- a/pinecone/db_control/db_control_asyncio.py +++ b/pinecone/db_control/db_control_asyncio.py @@ -8,7 +8,7 @@ from pinecone.core.openapi.db_control import API_VERSION logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ if TYPE_CHECKING: @@ -22,10 +22,10 @@ class DBControlAsyncio: def __init__(self, config: "Config", openapi_config: "OpenApiConfiguration") -> None: self._config = config - """ @private """ + """ :meta private: """ self._openapi_config = openapi_config - """ @private """ + """ :meta private: """ self._index_api = setup_async_openapi_client( api_client_klass=AsyncioApiClient, @@ -34,19 +34,19 @@ def __init__(self, config: "Config", openapi_config: "OpenApiConfiguration") -> openapi_config=self._openapi_config, api_version=API_VERSION, ) - """ @private """ + """ :meta private: """ self._index_resource: Optional["IndexResourceAsyncio"] = None - """ @private """ + """ :meta private: """ self._collection_resource: Optional["CollectionResourceAsyncio"] = None - """ @private """ + """ :meta private: """ self._restore_job_resource: Optional["RestoreJobResourceAsyncio"] = None - """ @private """ + """ :meta private: """ self._backup_resource: Optional["BackupResourceAsyncio"] = None - """ @private """ + """ :meta private: """ @property def index(self) -> "IndexResourceAsyncio": diff --git a/pinecone/db_control/request_factory.py b/pinecone/db_control/request_factory.py index 76fbd6a0..3d6a3735 100644 --- a/pinecone/db_control/request_factory.py +++ b/pinecone/db_control/request_factory.py @@ -49,12 +49,12 @@ logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ class PineconeDBControlRequestFactory: """ - @private + :meta private: This class facilitates translating user inputs into request objects. """ diff --git a/pinecone/db_control/resources/asyncio/backup.py b/pinecone/db_control/resources/asyncio/backup.py index 391da1e1..e726cf45 100644 --- a/pinecone/db_control/resources/asyncio/backup.py +++ b/pinecone/db_control/resources/asyncio/backup.py @@ -9,7 +9,7 @@ class BackupResourceAsyncio: def __init__(self, index_api: AsyncioManageIndexesApi): self._index_api = index_api - """ @private """ + """ :meta private: """ @require_kwargs async def list( diff --git a/pinecone/db_control/resources/asyncio/collection.py b/pinecone/db_control/resources/asyncio/collection.py index e7d98a66..27916df5 100644 --- a/pinecone/db_control/resources/asyncio/collection.py +++ b/pinecone/db_control/resources/asyncio/collection.py @@ -6,7 +6,7 @@ from pinecone.utils import require_kwargs logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ class CollectionResourceAsyncio: diff --git a/pinecone/db_control/resources/asyncio/index.py b/pinecone/db_control/resources/asyncio/index.py index ef030ddb..cb233bc4 100644 --- a/pinecone/db_control/resources/asyncio/index.py +++ b/pinecone/db_control/resources/asyncio/index.py @@ -29,7 +29,7 @@ from pinecone.utils import require_kwargs logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ class IndexResourceAsyncio: diff --git a/pinecone/db_control/resources/asyncio/restore_job.py b/pinecone/db_control/resources/asyncio/restore_job.py index 397a5050..aa25f31e 100644 --- a/pinecone/db_control/resources/asyncio/restore_job.py +++ b/pinecone/db_control/resources/asyncio/restore_job.py @@ -8,7 +8,7 @@ class RestoreJobResourceAsyncio: def __init__(self, index_api: AsyncioManageIndexesApi): self._index_api = index_api - """ @private """ + """ :meta private: """ @require_kwargs async def get(self, *, job_id: str) -> RestoreJobModel: diff --git a/pinecone/db_control/resources/sync/backup.py b/pinecone/db_control/resources/sync/backup.py index 8d5d2a0c..7dbb52a5 100644 --- a/pinecone/db_control/resources/sync/backup.py +++ b/pinecone/db_control/resources/sync/backup.py @@ -18,16 +18,16 @@ def __init__( pool_threads: int, ): self._index_api = index_api - """ @private """ + """ :meta private: """ self.config = config - """ @private """ + """ :meta private: """ self._openapi_config = openapi_config - """ @private """ + """ :meta private: """ self._pool_threads = pool_threads - """ @private """ + """ :meta private: """ super().__init__() # Initialize PluginAware diff --git a/pinecone/db_control/resources/sync/collection.py b/pinecone/db_control/resources/sync/collection.py index 950452e6..dafdfa5f 100644 --- a/pinecone/db_control/resources/sync/collection.py +++ b/pinecone/db_control/resources/sync/collection.py @@ -6,7 +6,7 @@ from pinecone.utils import PluginAware, require_kwargs logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ if TYPE_CHECKING: from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi @@ -22,17 +22,16 @@ def __init__( pool_threads: int, ): self.index_api = index_api - """ @private """ - + """ :meta private: """ self.config = config - """ @private """ + """ :meta private: """ self._openapi_config = openapi_config - """ @private """ + """ :meta private: """ self._pool_threads = pool_threads - """ @private """ + """ :meta private: """ super().__init__() # Initialize PluginAware diff --git a/pinecone/db_control/resources/sync/index.py b/pinecone/db_control/resources/sync/index.py index b17a607e..cf255ddf 100644 --- a/pinecone/db_control/resources/sync/index.py +++ b/pinecone/db_control/resources/sync/index.py @@ -12,7 +12,7 @@ from pinecone.core.openapi.db_control import API_VERSION logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ if TYPE_CHECKING: from pinecone.config import Config, OpenApiConfiguration @@ -39,19 +39,19 @@ def __init__( pool_threads: int, ): self._index_api = index_api - """ @private """ + """ :meta private: """ self.config = config - """ @private """ + """ :meta private: """ self._openapi_config = openapi_config - """ @private """ + """ :meta private: """ self._pool_threads = pool_threads - """ @private """ + """ :meta private: """ self._index_host_store = IndexHostStore() - """ @private """ + """ :meta private: """ super().__init__() # Initialize PluginAware @@ -238,7 +238,7 @@ def configure( api_instance.configure_index(name, configure_index_request=req) def _get_host(self, name: str) -> str: - """@private""" + """:meta private:""" return self._index_host_store.get_host( api=self._index_api, config=self.config, index_name=name ) diff --git a/pinecone/db_control/resources/sync/restore_job.py b/pinecone/db_control/resources/sync/restore_job.py index e47010c8..e1a3d3b6 100644 --- a/pinecone/db_control/resources/sync/restore_job.py +++ b/pinecone/db_control/resources/sync/restore_job.py @@ -17,16 +17,16 @@ def __init__( pool_threads: int, ): self._index_api = index_api - """ @private """ + """ :meta private: """ self.config = config - """ @private """ + """ :meta private: """ self._openapi_config = openapi_config - """ @private """ + """ :meta private: """ self._pool_threads = pool_threads - """ @private """ + """ :meta private: """ super().__init__() # Initialize PluginAware diff --git a/pinecone/db_data/index.py b/pinecone/db_data/index.py index b3205eb6..955ebf18 100644 --- a/pinecone/db_data/index.py +++ b/pinecone/db_data/index.py @@ -57,11 +57,11 @@ from .resources.sync.bulk_import import ImportErrorMode logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ def parse_query_response(response: QueryResponse): - """@private""" + """:meta private:""" response._data_store.pop("results", None) return response @@ -73,7 +73,7 @@ class Index(PluginAware, IndexInterface): """ _bulk_import_resource: Optional["BulkImportResource"] - """ @private """ + """ :meta private: """ def __init__( self, @@ -87,16 +87,16 @@ def __init__( self._config = ConfigBuilder.build( api_key=api_key, host=host, additional_headers=additional_headers, **kwargs ) - """ @private """ + """ :meta private: """ self._openapi_config = ConfigBuilder.build_openapi_config(self._config, openapi_config) - """ @private """ + """ :meta private: """ if pool_threads is None: self._pool_threads = 5 * cpu_count() - """ @private """ + """ :meta private: """ else: self._pool_threads = pool_threads - """ @private """ + """ :meta private: """ if kwargs.get("connection_pool_maxsize", None): self._openapi_config.connection_pool_maxsize = kwargs.get("connection_pool_maxsize") @@ -113,19 +113,19 @@ def __init__( self._api_client = self._vector_api.api_client self._bulk_import_resource = None - """ @private """ + """ :meta private: """ # Pass the same api_client to the ImportFeatureMixin super().__init__(api_client=self._api_client) @property def config(self) -> "Config": - """@private""" + """:meta private:""" return self._config @property def openapi_config(self) -> "OpenApiConfiguration": - """@private""" + """:meta private:""" warnings.warn( "The `openapi_config` property has been renamed to `_openapi_config`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, @@ -135,7 +135,7 @@ def openapi_config(self) -> "OpenApiConfiguration": @property def pool_threads(self) -> int: - """@private""" + """:meta private:""" warnings.warn( "The `pool_threads` property has been renamed to `_pool_threads`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, @@ -145,7 +145,7 @@ def pool_threads(self) -> int: @property def bulk_import(self) -> "BulkImportResource": - """@private""" + """:meta private:""" if self._bulk_import_resource is None: from .resources.sync.bulk_import import BulkImportResource @@ -555,12 +555,15 @@ def list_imports_paginated( Returns: ListImportsResponse object which contains the list of operations as ImportModel objects, pagination information, and usage showing the number of read_units consumed. - The list_imports_paginated operation returns information about import operations. + The list_imports_paginated() operation returns information about import operations. It returns operations in a paginated form, with a pagination token to fetch the next page of results. Consider using the `list_imports` method to avoid having to handle pagination tokens manually. Examples: + + .. code-block:: python + >>> results = index.list_imports_paginated(limit=5) >>> results.pagination.next eyJza2lwX3Bhc3QiOiI5OTMiLCJwcmVmaXgiOiI5OSJ9 diff --git a/pinecone/db_data/index_asyncio.py b/pinecone/db_data/index_asyncio.py index b185682d..18140d9c 100644 --- a/pinecone/db_data/index_asyncio.py +++ b/pinecone/db_data/index_asyncio.py @@ -59,7 +59,7 @@ logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ __all__ = ["_IndexAsyncio", "IndexAsyncio"] @@ -70,7 +70,7 @@ "_check_input_type", "_check_return_type", ) -""" @private """ +""" :meta private: """ def parse_query_response(response: QueryResponse): @@ -138,7 +138,7 @@ async def main(): """ _bulk_import_resource: Optional["BulkImportResourceAsyncio"] - """ @private """ + """ :meta private: """ def __init__( self, @@ -151,9 +151,9 @@ def __init__( self.config = ConfigBuilder.build( api_key=api_key, host=host, additional_headers=additional_headers, **kwargs ) - """ @private """ + """ :meta private: """ self._openapi_config = ConfigBuilder.build_openapi_config(self.config, openapi_config) - """ @private """ + """ :meta private: """ if kwargs.get("connection_pool_maxsize", None): self._openapi_config.connection_pool_maxsize = kwargs.get("connection_pool_maxsize") @@ -165,13 +165,13 @@ def __init__( openapi_config=self._openapi_config, api_version=API_VERSION, ) - """ @private """ + """ :meta private: """ self._api_client = self._vector_api.api_client - """ @private """ + """ :meta private: """ self._bulk_import_resource = None - """ @private """ + """ :meta private: """ async def __aenter__(self): return self @@ -234,7 +234,7 @@ async def main(): @property def bulk_import(self) -> "BulkImportResourceAsyncio": - """@private""" + """:meta private:""" if self._bulk_import_resource is None: from .resources.asyncio.bulk_import_asyncio import BulkImportResourceAsyncio diff --git a/pinecone/db_data/index_asyncio_interface.py b/pinecone/db_data/index_asyncio_interface.py index 35ad0484..73ce3f0c 100644 --- a/pinecone/db_data/index_asyncio_interface.py +++ b/pinecone/db_data/index_asyncio_interface.py @@ -196,7 +196,7 @@ async def delete( filter (Dict[str, Union[str, float, int, bool, List, dict]]): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. - See https://www.pinecone.io/docs/metadata-filtering/.. [optional] + See `metadata filtering _` [optional] The Delete operation deletes vectors from the index, from a single namespace. @@ -392,7 +392,7 @@ async def main(): If not specified, the default namespace is used. [optional] filter (Dict[str, Union[str, float, int, bool, List, dict]): The filter to apply. You can use vector metadata to limit your search. - See https://www.pinecone.io/docs/metadata-filtering/.. [optional] + See `metadata filtering _` [optional] include_values (bool): Indicates whether vector values are included in the response. If omitted the server will use the default value of False [optional] include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. @@ -547,7 +547,7 @@ async def describe_index_stats( Args: filter (Dict[str, Union[str, float, int, bool, List, dict]]): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. - See https://www.pinecone.io/docs/metadata-filtering/.. [optional] + See `metadata filtering _` [optional] Returns: DescribeIndexStatsResponse object which contains stats about the index. diff --git a/pinecone/db_data/interfaces.py b/pinecone/db_data/interfaces.py index 5c511b92..d22d03d7 100644 --- a/pinecone/db_data/interfaces.py +++ b/pinecone/db_data/interfaces.py @@ -51,35 +51,43 @@ def upsert( `UpsertResponse`, includes the number of vectors upserted. - The upsert operation writes vectors into a namespace. + The upsert operation writes vectors into a namespace of your index. + If a new value is upserted for an existing vector id, it will overwrite the previous value. - To upsert in parallel follow: https://docs.pinecone.io/docs/insert-data#sending-upserts-in-parallel + To upsert in parallel follow `this link `_. **Upserting dense vectors** - .. admonition:: Note - - The dimension of each dense vector must match the dimension of the index. + When working with dense vectors, the dimension of each vector must match the dimension configured for the + index. A vector can be represented in a variety of ways. .. code-block:: python + :caption: Upserting a dense vector using the Vector object + :emphasize-lines: 9-13 from pinecone import Pinecone, Vector pc = Pinecone() - idx = pc.Index("index-name") + idx = pc.Index(name="index-name") - # A Vector object idx.upsert( namespace = 'my-namespace', vectors = [ - Vector(id='id1', values=[0.1, 0.2, 0.3, 0.4], metadata={'metadata_key': 'metadata_value'}), + Vector( + id='id1', + values=[0.1, 0.2, 0.3, 0.4], + metadata={'metadata_key': 'metadata_value'} + ), ] ) - # A vector tuple + .. code-block:: python + :caption: Upserting a dense vector as a two-element tuple (no metadata) + :emphasize-lines: 4 + idx.upsert( namespace = 'my-namespace', vectors = [ @@ -87,44 +95,96 @@ def upsert( ] ) - # A vector tuple with metadata + .. code-block:: python + :caption: Upserting a dense vector as a three-element tuple with metadata + :emphasize-lines: 4-8 + idx.upsert( namespace = 'my-namespace', vectors = [ - ('id1', [0.1, 0.2, 0.3, 0.4], {'metadata_key': 'metadata_value'}), + ( + 'id1', + [0.1, 0.2, 0.3, 0.4], + {'metadata_key': 'metadata_value'} + ), ] ) - # A vector dictionary + .. code-block:: python + :caption: Upserting a dense vector using a vector dictionary + :emphasize-lines: 4-8 + idx.upsert( namespace = 'my-namespace', vectors = [ - {"id": 1, "values": [0.1, 0.2, 0.3, 0.4], "metadata": {"metadata_key": "metadata_value"}}, + { + "id": 1, + "values": [0.1, 0.2, 0.3, 0.4], + "metadata": {"metadata_key": "metadata_value"} + }, ] - **Upserting sparse vectors** .. code-block:: python + :caption: Upserting a sparse vector + :emphasize-lines: 32-38 - from pinecone import Pinecone, Vector, SparseValues + from pinecone import ( + Pinecone, + Metric, + Vector, + SparseValues, + VectorType, + ServerlessSpec, + CloudProvider, + AwsRegion + ) - pc = Pinecone() - idx = pc.Index("index-name") + pc = Pinecone() # Reads PINECONE_API_KEY from environment variable - # A Vector object + # Create a sparse index + index_description = pc.create_index( + name="example-sparse", + metric=Metric.Dotproduct, + vector_type=VectorType.Sparse, + spec=ServerlessSpec( + cloud=CloudProvider.AWS, + region=AwsRegion.US_WEST_2, + ) + ) + + # Target the index created above + idx = pc.Index(host=index_description.host) + + # Upsert a sparse vector idx.upsert( - namespace = 'my-namespace', - vectors = [ - Vector(id='id1', sparse_values=SparseValues(indices=[1, 2], values=[0.2, 0.4])), + namespace='my-namespace', + vectors=[ + Vector( + id='id1', + sparse_values=SparseValues( + indices=[1, 2], + values=[0.2, 0.4] + ) + ), ] ) - # A dictionary + .. code-block:: python + :caption: Upserting a sparse vector using a dictionary + :emphasize-lines: 4-10 + idx.upsert( namespace = 'my-namespace', vectors = [ - {"id": 1, "sparse_values": {"indices": [1, 2], "values": [0.2, 0.4]}}, + { + "id": 1, + "sparse_values": { + "indices": [1, 2], + "values": [0.2, 0.4] + } + }, ] ) @@ -134,30 +194,37 @@ def upsert( If you have a large number of vectors, you can upsert them in batches. .. code-block:: python + :caption: Upserting in batches + :emphasize-lines: 19 from pinecone import Pinecone, Vector + import random pc = Pinecone() - idx = pc.Index("index-name") + idx = pc.Index(host="example-index-dojoi3u.svc.preprod-aws-0.pinecone.io") + + # Create some fake vector data for demonstration + num_vectors = 100000 + vectors = [ + Vector( + id=f'id{i}', + values=[random.random() for _ in range(1536)]) + for i in range(num_vectors) + ] idx.upsert( - namespace = 'my-namespace', - vectors = [ - {'id': 'id1', 'values': [0.1, 0.2, 0.3, 0.4]}, - {'id': 'id2', 'values': [0.2, 0.3, 0.4, 0.5]}, - {'id': 'id3', 'values': [0.3, 0.4, 0.5, 0.6]}, - {'id': 'id4', 'values': [0.4, 0.5, 0.6, 0.7]}, - {'id': 'id5', 'values': [0.5, 0.6, 0.7, 0.8]}, - # More vectors here - ], - batch_size = 50 + namespace='my-namespace', + vectors=vectors, + batch_size=50 ) **Visual progress bar with tqdm** - To see a progress bar when upserting in batches, you will need to separately install the `tqdm` package. - If `tqdm` is present, the client will detect and use it to display progress when `show_progress=True`. + To see a progress bar when upserting in batches, you will need to separately install `tqdm `_. + If ``tqdm`` is present, the client will detect and use it to display progress when ``show_progress=True``. + + """ pass @@ -182,6 +249,7 @@ def upsert_records(self, namespace: str, records: List[Dict]): :type namespace: str, required :param records: The records to upsert into the index. :type records: List[Dict], required + :return: UpsertResponse object which contains the number of records upserted. Upsert records to a namespace. A record is a dictionary that contains eitiher an `id` or `_id` field along with other fields that will be stored as metadata. The `id` or `_id` field is used @@ -192,6 +260,7 @@ def upsert_records(self, namespace: str, records: List[Dict]): the specified namespacce of the index. .. code-block:: python + :caption: Upserting records to be embedded with Pinecone's integrated inference models from pinecone import ( Pinecone, @@ -203,7 +272,7 @@ def upsert_records(self, namespace: str, records: List[Dict]): pc = Pinecone(api_key="<>") - # Create an index for your embedding model + # Create an index configured for the multilingual-e5-large model index_model = pc.create_index_for_model( name="my-model-index", cloud=CloudProvider.AWS, @@ -250,7 +319,7 @@ def upsert_records(self, namespace: str, records: List[Dict]): from pinecone import SearchQuery, SearchRerank, RerankModel - # search for similar records + # Search for similar records response = idx.search_records( namespace="my-namespace", query=SearchQuery( @@ -399,14 +468,14 @@ def delete( filter (Dict[str, Union[str, float, int, bool, List, dict]]): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. - See https://www.pinecone.io/docs/metadata-filtering/.. [optional] + See `metadata filtering _` [optional] The Delete operation deletes vectors from the index, from a single namespace. No error is raised if the vector id does not exist. - Note: For any delete call, if namespace is not specified, the default namespace `""` is used. + Note: For any delete call, if namespace is not specified, the default namespace ``""`` is used. Since the delete operation does not error when ids are not present, this means you may not receive an error if you delete from the wrong namespace. @@ -495,7 +564,7 @@ def query( If not specified, the default namespace is used. [optional] filter (Dict[str, Union[str, float, int, bool, List, dict]): The filter to apply. You can use vector metadata to limit your search. - See https://www.pinecone.io/docs/metadata-filtering/.. [optional] + See `metadata filtering _` [optional] include_values (bool): Indicates whether vector values are included in the response. If omitted the server will use the default value of False [optional] include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. @@ -521,9 +590,33 @@ def query_namespaces( sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, **kwargs, ) -> QueryNamespacesResults: - """The query_namespaces() method is used to make a query to multiple namespaces in parallel and combine the results into one result set. + """The ``query_namespaces()`` method is used to make a query to multiple namespaces in parallel and combine the results into one result set. + + :param vector: The query vector, must be the same length as the dimension of the index being queried. + :type vector: List[float] + :param namespaces: The list of namespaces to query. + :type namespaces: List[str] + :param top_k: The number of results you would like to request from each namespace. Defaults to 10. + :type top_k: Optional[int] + :param metric: Must be one of 'cosine', 'euclidean', 'dotproduct'. This is needed in order to merge results across namespaces, since the interpretation of score depends on the index metric type. + :type metric: str + :param filter: Pass an optional filter to filter results based on metadata. Defaults to None. + :type filter: Optional[Dict[str, Union[str, float, int, bool, List, dict]]] + :param include_values: Boolean field indicating whether vector values should be included with results. Defaults to None. + :type include_values: Optional[bool] + :param include_metadata: Boolean field indicating whether vector metadata should be included with results. Defaults to None. + :type include_metadata: Optional[bool] + :param sparse_vector: If you are working with a dotproduct index, you can pass a sparse vector as part of your hybrid search. Defaults to None. + :type sparse_vector: Optional[ Union[SparseValues, Dict[str, Union[List[float], List[int]]]] ] + :return: A QueryNamespacesResults object containing the combined results from all namespaces, as well as the combined usage cost in read units. + :rtype: QueryNamespacesResults + + .. admonition:: Note - Since several asynchronous calls are made on your behalf when calling this method, you will need to tune the pool_threads and connection_pool_maxsize parameter of the Index constructor to suite your workload. + Since several asynchronous calls are made on your behalf when calling this method, you will need to tune + the **pool_threads** and **connection_pool_maxsize** parameter of the Index constructor to suite your workload. + If these values are too small in relation to your workload, you will experience performance issues as + requests queue up while waiting for a request thread to become available. Examples: @@ -531,7 +624,8 @@ def query_namespaces( from pinecone import Pinecone - pc = Pinecone(api_key="your-api-key") + pc = Pinecone() + index = pc.Index( host="index-name", pool_threads=32, @@ -548,23 +642,11 @@ def query_namespaces( include_values=True, include_metadata=True ) + for vec in combined_results.matches: print(vec.id, vec.score) print(combined_results.usage) - - Args: - vector (List[float]): The query vector, must be the same length as the dimension of the index being queried. - namespaces (List[str]): The list of namespaces to query. - top_k (Optional[int], optional): The number of results you would like to request from each namespace. Defaults to 10. - metric (str): Must be one of 'cosine', 'euclidean', 'dotproduct'. This is needed in order to merge results across namespaces, since the interpretation of score depends on the index metric type. - filter (Optional[Dict[str, Union[str, float, int, bool, List, dict]]], optional): Pass an optional filter to filter results based on metadata. Defaults to None. - include_values (Optional[bool], optional): Boolean field indicating whether vector values should be included with results. Defaults to None. - include_metadata (Optional[bool], optional): Boolean field indicating whether vector metadata should be included with results. Defaults to None. - sparse_vector (Optional[ Union[SparseValues, Dict[str, Union[List[float], List[int]]]] ], optional): If you are working with a dotproduct index, you can pass a sparse vector as part of your hybrid search. Defaults to None. - - Returns: - QueryNamespacesResults: A QueryNamespacesResults object containing the combined results from all namespaces, as well as the combined usage cost in read units. """ pass @@ -621,16 +703,27 @@ def describe_index_stats( Args: filter (Dict[str, Union[str, float, int, bool, List, dict]]): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. - See https://www.pinecone.io/docs/metadata-filtering/.. [optional] + See `metadata filtering _` [optional] Returns: DescribeIndexStatsResponse object which contains stats about the index. .. code-block:: python >>> pc = Pinecone() - >>> index = pc.Index(index_name="my-index") + >>> index = pc.Index(name="my-index") >>> index.describe_index_stats() - >>> index.describe_index_stats(filter={'key': 'value'}) + {'dimension': 1536, + 'index_fullness': 0.0, + 'metric': 'cosine', + 'namespaces': {'ns0': {'vector_count': 700}, + 'ns1': {'vector_count': 700}, + 'ns2': {'vector_count': 500}, + 'ns3': {'vector_count': 100}, + 'ns4': {'vector_count': 100}, + 'ns5': {'vector_count': 50}, + 'ns6': {'vector_count': 50}}, + 'total_vector_count': 2200, + 'vector_type': 'dense'} """ pass diff --git a/pinecone/db_data/request_factory.py b/pinecone/db_data/request_factory.py index fded9b6b..8233a85e 100644 --- a/pinecone/db_data/request_factory.py +++ b/pinecone/db_data/request_factory.py @@ -33,7 +33,7 @@ from .dataclasses import Vector, SparseValues, SearchQuery, SearchRerank, SearchQueryVector logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ def non_openapi_kwargs(kwargs: Dict[str, Any]) -> Dict[str, Any]: diff --git a/pinecone/deprecation_warnings.py b/pinecone/deprecation_warnings.py index a7c15a91..3cd89c05 100644 --- a/pinecone/deprecation_warnings.py +++ b/pinecone/deprecation_warnings.py @@ -9,7 +9,7 @@ def _build_class_migration_message(method_name: str, example: str): def init(*args, **kwargs): - """@private""" + """:meta private:""" example = """ import os from pinecone import Pinecone, ServerlessSpec @@ -41,7 +41,7 @@ def init(*args, **kwargs): def list_indexes(*args, **kwargs): - """@private""" + """:meta private:""" example = """ from pinecone import Pinecone @@ -56,7 +56,7 @@ def list_indexes(*args, **kwargs): def describe_index(*args, **kwargs): - """@private""" + """:meta private:""" example = """ from pinecone import Pinecone @@ -67,7 +67,7 @@ def describe_index(*args, **kwargs): def create_index(*args, **kwargs): - """@private""" + """:meta private:""" example = """ from pinecone import Pinecone, ServerlessSpec @@ -86,7 +86,7 @@ def create_index(*args, **kwargs): def delete_index(*args, **kwargs): - """@private""" + """:meta private:""" example = """ from pinecone import Pinecone @@ -97,7 +97,7 @@ def delete_index(*args, **kwargs): def scale_index(*args, **kwargs): - """@private""" + """:meta private:""" example = """ from pinecone import Pinecone @@ -116,7 +116,7 @@ def scale_index(*args, **kwargs): def create_collection(*args, **kwargs): - """@private""" + """:meta private:""" example = """ from pinecone import Pinecone @@ -127,7 +127,7 @@ def create_collection(*args, **kwargs): def list_collections(*args, **kwargs): - """@private""" + """:meta private:""" example = """ from pinecone import Pinecone @@ -138,7 +138,7 @@ def list_collections(*args, **kwargs): def delete_collection(*args, **kwargs): - """@private""" + """:meta private:""" example = """ from pinecone import Pinecone @@ -149,7 +149,7 @@ def delete_collection(*args, **kwargs): def describe_collection(*args, **kwargs): - """@private""" + """:meta private:""" example = """ from pinecone import Pinecone @@ -160,7 +160,7 @@ def describe_collection(*args, **kwargs): def configure_index(*args, **kwargs): - """@private""" + """:meta private:""" example = """ from pinecone import Pinecone diff --git a/pinecone/grpc/channel_factory.py b/pinecone/grpc/channel_factory.py index 4a302947..042d21df 100644 --- a/pinecone/grpc/channel_factory.py +++ b/pinecone/grpc/channel_factory.py @@ -11,7 +11,7 @@ from pinecone.utils.user_agent import get_user_agent_grpc _logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ class GrpcChannelFactory: diff --git a/pinecone/grpc/index_grpc.py b/pinecone/grpc/index_grpc.py index e9081e82..6da16a07 100644 --- a/pinecone/grpc/index_grpc.py +++ b/pinecone/grpc/index_grpc.py @@ -54,7 +54,7 @@ __all__ = ["GRPCIndex", "GRPCVector", "GRPCQueryVector", "GRPCSparseValues"] _logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ class GRPCIndex(GRPCIndexBase): @@ -62,7 +62,7 @@ class GRPCIndex(GRPCIndexBase): @property def stub_class(self): - """@private""" + """:meta private:""" return VectorServiceStub def upsert( @@ -253,7 +253,7 @@ def delete( filter (FilterTypedDict): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. - See https://www.pinecone.io/docs/metadata-filtering/.. [optional] + See `metadata filtering _` [optional] async_req (bool): If True, the delete operation will be performed asynchronously. Defaults to False. [optional] @@ -383,7 +383,7 @@ def query( If not specified, the default namespace is used. [optional] filter (Dict[str, Union[str, float, int, bool, List, dict]]): The filter to apply. You can use vector metadata to limit your search. - See https://www.pinecone.io/docs/metadata-filtering/.. [optional] + See `metadata filtering _` [optional] include_values (bool): Indicates whether vector values are included in the response. If omitted the server will use the default value of False [optional] include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. @@ -652,7 +652,7 @@ def describe_index_stats( Args: filter (Dict[str, Union[str, float, int, bool, List, dict]]): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. - See https://www.pinecone.io/docs/metadata-filtering/.. [optional] + See `metadata filtering _` [optional] Returns: DescribeIndexStatsResponse object which contains stats about the index. """ diff --git a/pinecone/grpc/retry.py b/pinecone/grpc/retry.py index 8ff1864e..556031ef 100644 --- a/pinecone/grpc/retry.py +++ b/pinecone/grpc/retry.py @@ -8,7 +8,7 @@ _logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ class SleepPolicy(abc.ABC): diff --git a/pinecone/inference/inference.py b/pinecone/inference/inference.py index 48ed56d3..8f705d3c 100644 --- a/pinecone/inference/inference.py +++ b/pinecone/inference/inference.py @@ -21,7 +21,7 @@ from .models import ModelInfo, ModelInfoList logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ class Inference(PluginAware): @@ -59,13 +59,13 @@ def __init__( **kwargs, ) -> None: self._config = config - """ @private """ + """ :meta private: """ self._openapi_config = openapi_config - """ @private """ + """ :meta private: """ self._pool_threads = pool_threads - """ @private """ + """ :meta private: """ self.__inference_api = setup_openapi_client( api_client_klass=ApiClient, @@ -77,20 +77,20 @@ def __init__( ) self._model: Optional["ModelResource"] = None # Lazy initialization - """ @private """ + """ :meta private: """ super().__init__() # Initialize PluginAware @property def config(self) -> "Config": - """@private""" + """:meta private:""" # The config property is considered private, but the name cannot be changed to include underscore # without breaking compatibility with plugins in the wild. return self._config @property def openapi_config(self) -> "OpenApiConfiguration": - """@private""" + """:meta private:""" warnings.warn( "The `openapi_config` property has been renamed to `_openapi_config`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, @@ -100,7 +100,7 @@ def openapi_config(self) -> "OpenApiConfiguration": @property def pool_threads(self) -> int: - """@private""" + """:meta private:""" warnings.warn( "The `pool_threads` property has been renamed to `_pool_threads`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, diff --git a/pinecone/inference/inference_asyncio.py b/pinecone/inference/inference_asyncio.py index 65ec8e79..7d362ea3 100644 --- a/pinecone/inference/inference_asyncio.py +++ b/pinecone/inference/inference_asyncio.py @@ -42,13 +42,13 @@ class AsyncioInference: def __init__(self, api_client, **kwargs) -> None: self.api_client = api_client - """ @private """ + """ :meta private: """ self._model: Optional["ModelAsyncioResource"] = None - """ @private """ + """ :meta private: """ self.__inference_api = AsyncioInferenceApi(api_client) - """ @private """ + """ :meta private: """ async def embed( self, diff --git a/pinecone/inference/models/embedding_list.py b/pinecone/inference/models/embedding_list.py index c54e3dab..468848de 100644 --- a/pinecone/inference/models/embedding_list.py +++ b/pinecone/inference/models/embedding_list.py @@ -8,10 +8,10 @@ class EmbeddingsList: def __init__(self, embeddings_list: OpenAPIEmbeddingsList): self.embeddings_list = embeddings_list - """ @private """ + """ :meta private: """ self.current = 0 - """ @private """ + """ :meta private: """ def __getitem__(self, index): return self.embeddings_list.get("data")[index] diff --git a/pinecone/inference/resources/asyncio/model.py b/pinecone/inference/resources/asyncio/model.py index 2d54ebd2..675a8d9d 100644 --- a/pinecone/inference/resources/asyncio/model.py +++ b/pinecone/inference/resources/asyncio/model.py @@ -10,7 +10,7 @@ class ModelAsyncio: def __init__(self, inference_api: "AsyncioInferenceApi") -> None: self.__inference_api = inference_api - """ @private """ + """ :meta private: """ super().__init__() # Initialize PluginAware diff --git a/pinecone/inference/resources/sync/model.py b/pinecone/inference/resources/sync/model.py index 19b97f90..06ee00a4 100644 --- a/pinecone/inference/resources/sync/model.py +++ b/pinecone/inference/resources/sync/model.py @@ -18,22 +18,22 @@ def __init__( **kwargs, ) -> None: self._config = config - """ @private """ + """ :meta private: """ self._openapi_config = openapi_config - """ @private """ + """ :meta private: """ self._pool_threads = kwargs.get("pool_threads", 1) - """ @private """ + """ :meta private: """ self.__inference_api = inference_api - """ @private """ + """ :meta private: """ super().__init__() # Initialize PluginAware @property def config(self) -> "Config": - """@private""" + """:meta private:""" # The config property is considered private, but the name cannot be changed to include underscore # without breaking compatibility with plugins in the wild. return self._config diff --git a/pinecone/legacy_pinecone_interface.py b/pinecone/legacy_pinecone_interface.py index 26a84706..2aca80ab 100644 --- a/pinecone/legacy_pinecone_interface.py +++ b/pinecone/legacy_pinecone_interface.py @@ -87,10 +87,10 @@ def create_index( :type tags: Optional[Dict[str, str]] :return: A ``IndexModel`` instance containing a description of the index that was created. - Creating a serverless index - --------------------------- + Examples: .. code-block:: python + :caption: Creating a serverless index import os from pinecone import ( @@ -122,10 +122,8 @@ def create_index( } ) - Creating a pod index - --------------------- - .. code-block:: python + :caption: Creating a pod index import os from pinecone import ( diff --git a/pinecone/openapi_support/asyncio_api_client.py b/pinecone/openapi_support/asyncio_api_client.py index 43c8e17b..dce8ec9f 100644 --- a/pinecone/openapi_support/asyncio_api_client.py +++ b/pinecone/openapi_support/asyncio_api_client.py @@ -22,7 +22,7 @@ from .auth_util import AuthUtil logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ class AsyncioApiClient(object): diff --git a/pinecone/openapi_support/rest_urllib3.py b/pinecone/openapi_support/rest_urllib3.py index 3f718347..e25d80a0 100644 --- a/pinecone/openapi_support/rest_urllib3.py +++ b/pinecone/openapi_support/rest_urllib3.py @@ -25,7 +25,7 @@ class bcolors: logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ class Urllib3RestClient(RestClientInterface): diff --git a/pinecone/openapi_support/rest_utils.py b/pinecone/openapi_support/rest_utils.py index 77bb4c37..d41e192a 100644 --- a/pinecone/openapi_support/rest_utils.py +++ b/pinecone/openapi_support/rest_utils.py @@ -11,7 +11,7 @@ ) logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ class RESTResponse(io.IOBase): diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py index f794419b..202bed3b 100644 --- a/pinecone/pinecone.py +++ b/pinecone/pinecone.py @@ -11,7 +11,7 @@ from .langchain_import_warnings import _build_langchain_attribute_error_message logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ if TYPE_CHECKING: from pinecone.config import Config, OpenApiConfiguration @@ -223,23 +223,23 @@ def __init__( ssl_verify=ssl_verify, **kwargs, ) - """ @private """ + """ :meta private: """ self._openapi_config = ConfigBuilder.build_openapi_config(self._config, **kwargs) - """ @private """ + """ :meta private: """ if pool_threads is None: self._pool_threads = 5 * cpu_count() - """ @private """ + """ :meta private: """ else: self._pool_threads = pool_threads - """ @private """ + """ :meta private: """ self._inference = None # Lazy initialization - """ @private """ + """ :meta private: """ self._db_control = None # Lazy initialization - """ @private """ + """ :meta private: """ super().__init__() # Initialize PluginAware @@ -275,7 +275,7 @@ def db(self): @property def index_host_store(self) -> "IndexHostStore": - """@private""" + """:meta private:""" warnings.warn( "The `index_host_store` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, @@ -285,14 +285,14 @@ def index_host_store(self) -> "IndexHostStore": @property def config(self) -> "Config": - """@private""" + """:meta private:""" # The config property is considered private, but the name cannot be changed to include underscore # without breaking compatibility with plugins in the wild. return self._config @property def openapi_config(self) -> "OpenApiConfiguration": - """@private""" + """:meta private:""" warnings.warn( "The `openapi_config` property has been renamed to `_openapi_config`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, @@ -302,7 +302,7 @@ def openapi_config(self) -> "OpenApiConfiguration": @property def pool_threads(self) -> int: - """@private""" + """:meta private:""" warnings.warn( "The `pool_threads` property has been renamed to `_pool_threads`. It is considered private and should not be used directly. This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, @@ -312,7 +312,7 @@ def pool_threads(self) -> int: @property def index_api(self) -> "ManageIndexesApi": - """@private""" + """:meta private:""" warnings.warn( "The `index_api` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, @@ -460,12 +460,12 @@ def describe_restore_job(self, *, job_id: str) -> "RestoreJobModel": @staticmethod def from_texts(*args, **kwargs): - """@private""" + """:meta private:""" raise AttributeError(_build_langchain_attribute_error_message("from_texts")) @staticmethod def from_documents(*args, **kwargs): - """@private""" + """:meta private:""" raise AttributeError(_build_langchain_attribute_error_message("from_documents")) def Index(self, name: str = "", host: str = "", **kwargs) -> "Index": @@ -518,7 +518,7 @@ def IndexAsyncio(self, host: str, **kwargs) -> "IndexAsyncio": def check_realistic_host(host: str) -> None: - """@private + """:meta private: Checks whether a user-provided host string seems plausible. Someone could erroneously pass an index name as the host by diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index 13bdb27a..36d86495 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -39,7 +39,7 @@ from pinecone.db_control.index_host_store import IndexHostStore logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ class PineconeAsyncio(PineconeAsyncioDBControlInterface): @@ -98,16 +98,16 @@ def __init__( ssl_verify=ssl_verify, **kwargs, ) - """ @private """ + """ :meta private: """ self._openapi_config = ConfigBuilder.build_openapi_config(self._config, **kwargs) - """ @private """ + """ :meta private: """ self._inference = None # Lazy initialization - """ @private """ + """ :meta private: """ self._db_control = None # Lazy initialization - """ @private """ + """ :meta private: """ async def __aenter__(self): return self @@ -175,7 +175,7 @@ def db(self): @property def index_host_store(self) -> "IndexHostStore": - """@private""" + """:meta private:""" warnings.warn( "The `index_host_store` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, @@ -185,7 +185,7 @@ def index_host_store(self) -> "IndexHostStore": @property def index_api(self) -> "AsyncioManageIndexesApi": - """@private""" + """:meta private:""" warnings.warn( "The `index_api` property is deprecated. This warning will become an error in a future version of the Pinecone Python SDK.", DeprecationWarning, diff --git a/pinecone/utils/plugin_aware.py b/pinecone/utils/plugin_aware.py index 56c54e90..540e9cd5 100644 --- a/pinecone/utils/plugin_aware.py +++ b/pinecone/utils/plugin_aware.py @@ -7,7 +7,7 @@ import logging logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ class PluginAware: @@ -37,7 +37,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: AttributeError: If required attributes are not set in the subclass. """ self._plugins_loaded = False - """ @private """ + """ :meta private: """ # Check for required attributes after super().__init__ has been called missing_attrs = [] diff --git a/tests/integration/control/resources/conftest.py b/tests/integration/control/resources/conftest.py index 93060a66..f1845cea 100644 --- a/tests/integration/control/resources/conftest.py +++ b/tests/integration/control/resources/conftest.py @@ -9,7 +9,7 @@ dotenv.load_dotenv() logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ # Generate a unique ID for the entire test run RUN_ID = str(uuid.uuid4()) diff --git a/tests/integration/control/serverless/conftest.py b/tests/integration/control/serverless/conftest.py index d1d880d2..d86b636b 100644 --- a/tests/integration/control/serverless/conftest.py +++ b/tests/integration/control/serverless/conftest.py @@ -6,7 +6,7 @@ from ...helpers import generate_index_name, get_environment_var logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ @pytest.fixture() diff --git a/tests/integration/control/serverless/test_index_instantiation_ux.py b/tests/integration/control/serverless/test_index_instantiation_ux.py deleted file mode 100644 index c34930d6..00000000 --- a/tests/integration/control/serverless/test_index_instantiation_ux.py +++ /dev/null @@ -1,13 +0,0 @@ -import pinecone -import pytest - - -class TestIndexInstantiationUX: - def test_index_instantiation_ux(self): - with pytest.raises(Exception) as e: - pinecone.Index(name="my-index", host="test-bt8x3su.svc.apw5-4e34-81fa.pinecone.io") - - assert ( - "You are attempting to access the Index client directly from the pinecone module." - in str(e.value) - ) diff --git a/tests/integration/control_asyncio/resources/conftest.py b/tests/integration/control_asyncio/resources/conftest.py index f7135575..40e153c1 100644 --- a/tests/integration/control_asyncio/resources/conftest.py +++ b/tests/integration/control_asyncio/resources/conftest.py @@ -9,7 +9,7 @@ dotenv.load_dotenv() logger = logging.getLogger(__name__) -""" @private """ +""" :meta private: """ # Generate a unique ID for the entire test run RUN_ID = str(uuid.uuid4()) From 897ae60081dc9c93bfa4afef68d692bcc61a57b2 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Wed, 4 Jun 2025 14:49:48 -0400 Subject: [PATCH 8/8] Fix workflow for dependency changes --- .github/workflows/on-pr-dep-change.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/on-pr-dep-change.yaml b/.github/workflows/on-pr-dep-change.yaml index be47eb00..a40d0cf2 100644 --- a/.github/workflows/on-pr-dep-change.yaml +++ b/.github/workflows/on-pr-dep-change.yaml @@ -30,6 +30,7 @@ jobs: if: ${{ always() }} needs: - dependency-tests + - create-project uses: './.github/workflows/project-cleanup.yaml' secrets: inherit with: