From abda18ebb587d06aa6a4d1acac602d66b5c4a3ad Mon Sep 17 00:00:00 2001 From: Elastic Machine Date: Mon, 25 Aug 2025 06:02:46 +0000 Subject: [PATCH] Auto-generated API code --- elasticsearch/_async/client/__init__.py | 16 +++++++++++----- elasticsearch/_async/client/esql.py | 8 ++++---- elasticsearch/_async/client/sql.py | 2 +- elasticsearch/_sync/client/__init__.py | 16 +++++++++++----- elasticsearch/_sync/client/esql.py | 8 ++++---- elasticsearch/_sync/client/sql.py | 2 +- 6 files changed, 32 insertions(+), 20 deletions(-) diff --git a/elasticsearch/_async/client/__init__.py b/elasticsearch/_async/client/__init__.py index 8b64c2040..c83f86edc 100644 --- a/elasticsearch/_async/client/__init__.py +++ b/elasticsearch/_async/client/__init__.py @@ -700,6 +700,7 @@ async def bulk(
  • JavaScript: Check out client.helpers.*
  • .NET: Check out BulkAllObservable
  • PHP: Check out bulk indexing.
  • +
  • Ruby: Check out Elasticsearch::Helpers::BulkHelper
  • Submitting bulk requests with cURL

    If you're providing text file input to curl, you must use the --data-binary flag instead of plain -d. @@ -1416,7 +1417,7 @@ async def delete( ) @_rewrite_parameters( - body_fields=("max_docs", "query", "slice"), + body_fields=("max_docs", "query", "slice", "sort"), parameter_aliases={"from": "from_"}, ) async def delete_by_query( @@ -1460,7 +1461,12 @@ async def delete_by_query( ] = None, slice: t.Optional[t.Mapping[str, t.Any]] = None, slices: t.Optional[t.Union[int, t.Union[str, t.Literal["auto"]]]] = None, - sort: t.Optional[t.Sequence[str]] = None, + sort: t.Optional[ + t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], + t.Union[str, t.Mapping[str, t.Any]], + ] + ] = None, stats: t.Optional[t.Sequence[str]] = None, terminate_after: t.Optional[int] = None, timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, @@ -1592,7 +1598,7 @@ async def delete_by_query( :param slice: Slice the request manually using the provided slice ID and total number of slices. :param slices: The number of slices this task should be divided into. - :param sort: A comma-separated list of `:` pairs. + :param sort: A sort object that specifies the order of deleted documents. :param stats: The specific `tag` of the request for logging and statistical purposes. :param terminate_after: The maximum number of documents to collect for each shard. If a query reaches this limit, Elasticsearch terminates the query early. @@ -1682,8 +1688,6 @@ async def delete_by_query( __query["search_type"] = search_type if slices is not None: __query["slices"] = slices - if sort is not None: - __query["sort"] = sort if stats is not None: __query["stats"] = stats if terminate_after is not None: @@ -1703,6 +1707,8 @@ async def delete_by_query( __body["query"] = query if slice is not None: __body["slice"] = slice + if sort is not None: + __body["sort"] = sort __headers = {"accept": "application/json", "content-type": "application/json"} return await self.perform_request( # type: ignore[return-value] "POST", diff --git a/elasticsearch/_async/client/esql.py b/elasticsearch/_async/client/esql.py index b413e69ed..ae83cd33b 100644 --- a/elasticsearch/_async/client/esql.py +++ b/elasticsearch/_async/client/esql.py @@ -44,7 +44,7 @@ class EsqlClient(NamespacedClient): async def async_query( self, *, - query: t.Optional[str] = None, + query: t.Optional[t.Union[str, "ESQLBase"]] = None, columnar: t.Optional[bool] = None, delimiter: t.Optional[str] = None, drop_null_columns: t.Optional[bool] = None, @@ -153,7 +153,7 @@ async def async_query( __query["pretty"] = pretty if not __body: if query is not None: - __body["query"] = query + __body["query"] = str(query) if columnar is not None: __body["columnar"] = columnar if filter is not None: @@ -391,7 +391,7 @@ async def async_query_stop( async def query( self, *, - query: t.Optional[str] = None, + query: t.Optional[t.Union[str, "ESQLBase"]] = None, columnar: t.Optional[bool] = None, delimiter: t.Optional[str] = None, drop_null_columns: t.Optional[bool] = None, @@ -480,7 +480,7 @@ async def query( __query["pretty"] = pretty if not __body: if query is not None: - __body["query"] = query + __body["query"] = str(query) if columnar is not None: __body["columnar"] = columnar if filter is not None: diff --git a/elasticsearch/_async/client/sql.py b/elasticsearch/_async/client/sql.py index 951379ddb..aac401fdf 100644 --- a/elasticsearch/_async/client/sql.py +++ b/elasticsearch/_async/client/sql.py @@ -283,7 +283,7 @@ async def query( keep_alive: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, keep_on_completion: t.Optional[bool] = None, page_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, - params: t.Optional[t.Mapping[str, t.Any]] = None, + params: t.Optional[t.Sequence[t.Any]] = None, pretty: t.Optional[bool] = None, query: t.Optional[str] = None, request_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, diff --git a/elasticsearch/_sync/client/__init__.py b/elasticsearch/_sync/client/__init__.py index 0ca40ba34..9fc61720a 100644 --- a/elasticsearch/_sync/client/__init__.py +++ b/elasticsearch/_sync/client/__init__.py @@ -698,6 +698,7 @@ def bulk(

  • JavaScript: Check out client.helpers.*
  • .NET: Check out BulkAllObservable
  • PHP: Check out bulk indexing.
  • +
  • Ruby: Check out Elasticsearch::Helpers::BulkHelper
  • Submitting bulk requests with cURL

    If you're providing text file input to curl, you must use the --data-binary flag instead of plain -d. @@ -1414,7 +1415,7 @@ def delete( ) @_rewrite_parameters( - body_fields=("max_docs", "query", "slice"), + body_fields=("max_docs", "query", "slice", "sort"), parameter_aliases={"from": "from_"}, ) def delete_by_query( @@ -1458,7 +1459,12 @@ def delete_by_query( ] = None, slice: t.Optional[t.Mapping[str, t.Any]] = None, slices: t.Optional[t.Union[int, t.Union[str, t.Literal["auto"]]]] = None, - sort: t.Optional[t.Sequence[str]] = None, + sort: t.Optional[ + t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], + t.Union[str, t.Mapping[str, t.Any]], + ] + ] = None, stats: t.Optional[t.Sequence[str]] = None, terminate_after: t.Optional[int] = None, timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, @@ -1590,7 +1596,7 @@ def delete_by_query( :param slice: Slice the request manually using the provided slice ID and total number of slices. :param slices: The number of slices this task should be divided into. - :param sort: A comma-separated list of `:` pairs. + :param sort: A sort object that specifies the order of deleted documents. :param stats: The specific `tag` of the request for logging and statistical purposes. :param terminate_after: The maximum number of documents to collect for each shard. If a query reaches this limit, Elasticsearch terminates the query early. @@ -1680,8 +1686,6 @@ def delete_by_query( __query["search_type"] = search_type if slices is not None: __query["slices"] = slices - if sort is not None: - __query["sort"] = sort if stats is not None: __query["stats"] = stats if terminate_after is not None: @@ -1701,6 +1705,8 @@ def delete_by_query( __body["query"] = query if slice is not None: __body["slice"] = slice + if sort is not None: + __body["sort"] = sort __headers = {"accept": "application/json", "content-type": "application/json"} return self.perform_request( # type: ignore[return-value] "POST", diff --git a/elasticsearch/_sync/client/esql.py b/elasticsearch/_sync/client/esql.py index b91308ea8..7a7202872 100644 --- a/elasticsearch/_sync/client/esql.py +++ b/elasticsearch/_sync/client/esql.py @@ -44,7 +44,7 @@ class EsqlClient(NamespacedClient): def async_query( self, *, - query: t.Optional[str] = None, + query: t.Optional[t.Union[str, "ESQLBase"]] = None, columnar: t.Optional[bool] = None, delimiter: t.Optional[str] = None, drop_null_columns: t.Optional[bool] = None, @@ -153,7 +153,7 @@ def async_query( __query["pretty"] = pretty if not __body: if query is not None: - __body["query"] = query + __body["query"] = str(query) if columnar is not None: __body["columnar"] = columnar if filter is not None: @@ -391,7 +391,7 @@ def async_query_stop( def query( self, *, - query: t.Optional[str] = None, + query: t.Optional[t.Union[str, "ESQLBase"]] = None, columnar: t.Optional[bool] = None, delimiter: t.Optional[str] = None, drop_null_columns: t.Optional[bool] = None, @@ -480,7 +480,7 @@ def query( __query["pretty"] = pretty if not __body: if query is not None: - __body["query"] = query + __body["query"] = str(query) if columnar is not None: __body["columnar"] = columnar if filter is not None: diff --git a/elasticsearch/_sync/client/sql.py b/elasticsearch/_sync/client/sql.py index b4655b99a..7928c70cf 100644 --- a/elasticsearch/_sync/client/sql.py +++ b/elasticsearch/_sync/client/sql.py @@ -283,7 +283,7 @@ def query( keep_alive: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, keep_on_completion: t.Optional[bool] = None, page_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None, - params: t.Optional[t.Mapping[str, t.Any]] = None, + params: t.Optional[t.Sequence[t.Any]] = None, pretty: t.Optional[bool] = None, query: t.Optional[str] = None, request_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,