diff --git a/ravendb/documents/commands/results.py b/ravendb/documents/commands/results.py index 7894b7c2..a0b4ddaf 100644 --- a/ravendb/documents/commands/results.py +++ b/ravendb/documents/commands/results.py @@ -1,5 +1,5 @@ from __future__ import annotations -from typing import Union, Optional, List, Dict +from typing import Union, Optional, List, Dict, Any class GetDocumentResult: @@ -39,3 +39,13 @@ def from_json(cls, json_dict: dict) -> GetDocumentsResult: json_dict.get("CompareExchangeValueIncludes", None), json_dict.get("NextPageStart", None), ) + + def to_json(self) -> Dict[str, Any]: + return { + "Includes": self.includes, + "Results": self.results, + "CounterIncludes": self.counter_includes, + "TimeSeriesIncludes": self.time_series_includes, + "CompareExchangeValueIncludes": self.compare_exchange_includes, + "NextPageStart": self.next_page_start, + } diff --git a/ravendb/documents/session/cluster_transaction_operation.py b/ravendb/documents/session/cluster_transaction_operation.py index 651d2165..7d88620e 100644 --- a/ravendb/documents/session/cluster_transaction_operation.py +++ b/ravendb/documents/session/cluster_transaction_operation.py @@ -50,12 +50,14 @@ def lazily(self) -> ILazyClusterTransactionOperations: pass @abc.abstractmethod - def get_compare_exchange_value(self, key: str, object_type: Type[_T] = None) -> Optional[CompareExchangeValue[_T]]: + def get_compare_exchange_value( + self, key: str, object_type: Optional[Type[_T]] = None + ) -> Optional[CompareExchangeValue[_T]]: pass @abc.abstractmethod def get_compare_exchange_values( - self, keys: List[str], object_type: Type[_T] + self, keys: List[str], object_type: Optional[Type[_T]] = None ) -> Dict[str, CompareExchangeValue[_T]]: pass @@ -289,11 +291,13 @@ def __init__(self, session: "DocumentSession"): def lazily(self) -> ILazyClusterTransactionOperations: return LazyClusterTransactionOperations(self.session) - def get_compare_exchange_value(self, key: str, object_type: Type[_T] = None) -> Optional[CompareExchangeValue[_T]]: + def get_compare_exchange_value( + self, key: str, object_type: Optional[Type[_T]] = None + ) -> Optional[CompareExchangeValue[_T]]: return self._get_compare_exchange_value_internal(key, object_type) def get_compare_exchange_values( - self, keys: List[str], object_type: Type[_T] + self, keys: List[str], object_type: Optional[Type[_T]] = None ) -> Dict[str, CompareExchangeValue[_T]]: return super()._get_compare_exchange_values_internal(keys, object_type) diff --git a/ravendb/documents/session/document_session.py b/ravendb/documents/session/document_session.py index 8d97c0bc..efeff637 100644 --- a/ravendb/documents/session/document_session.py +++ b/ravendb/documents/session/document_session.py @@ -375,16 +375,30 @@ def load_starting_with( start_after: Optional[str] = None, ) -> List[_T]: load_starting_with_operation = LoadStartingWithOperation(self) - self.__load_starting_with_internal( - id_prefix, load_starting_with_operation, None, matches, start, page_size, exclude, start_after + self._load_starting_with_internal( + id_prefix, load_starting_with_operation, matches, start, page_size, exclude, start_after ) return load_starting_with_operation.get_documents(object_type) - def __load_starting_with_internal( + def load_starting_with_into_stream( + self, + id_prefix: str, + matches: str = None, + start: int = 0, + page_size: int = 25, + exclude: str = None, + start_after: str = None, + ) -> bytes: + if id_prefix is None: + raise ValueError("Arg 'id_prefix' is cannot be None.") + return self._load_starting_with_into_stream_internal( + id_prefix, LoadStartingWithOperation(self), matches, start, page_size, exclude, start_after + ) + + def _load_starting_with_internal( self, id_prefix: str, operation: LoadStartingWithOperation, - stream, matches: str, start: int, page_size: int, @@ -395,12 +409,31 @@ def __load_starting_with_internal( command = operation.create_request() if command: self._request_executor.execute_command(command, self.session_info) - if stream: - pass # todo: stream - else: - operation.set_result(command.result) + operation.set_result(command.result) return command + def _load_starting_with_into_stream_internal( + self, + id_prefix: str, + operation: LoadStartingWithOperation, + matches: str, + start: int, + page_size: int, + exclude: str, + start_after: str, + ) -> bytes: + operation.with_start_with(id_prefix, matches, start, page_size, exclude, start_after) + command = operation.create_request() + bytes_result = None + if command: + self.request_executor.execute_command(command, self.session_info) + try: + result = command.result + bytes_result = json.dumps(result.to_json()).encode("utf-8") + except Exception as e: + raise RuntimeError("Unable sto serialize returned value into stream") from e + return bytes_result + def document_query_from_index_type(self, index_type: Type[_TIndex], object_type: Type[_T]) -> DocumentQuery[_T]: try: index = Utils.try_get_new_instance(index_type) @@ -456,6 +489,8 @@ def counters_for_entity(self, entity: object) -> SessionDocumentCounters: return SessionDocumentCounters(self, entity) def time_series_for(self, document_id: str, name: str = None) -> SessionDocumentTimeSeries: + if not isinstance(document_id, str): + raise TypeError("Method time_series_for expects a string. Did you want to call time_series_for_entity?") return SessionDocumentTimeSeries(self, document_id, name) def time_series_for_entity(self, entity: object, name: str = None) -> SessionDocumentTimeSeries: @@ -723,7 +758,7 @@ def lazily(self) -> LazySessionOperations: return self._session._lazily def graph_query(self, object_type: type, query: str): # -> GraphDocumentQuery: - pass + raise NotImplementedError("Dropped support for graph queries") def what_changed(self) -> Dict[str, List[DocumentsChanges]]: return self._session._what_changed() @@ -781,32 +816,6 @@ def wait_for_indexes_after_save_changes( index_options.wait_for_indexes = True - def __load_starting_with_internal( - self, - id_prefix: str, - operation: LoadStartingWithOperation, - stream: Union[None, bytes], - matches: str, - start: int, - page_size: int, - exclude: str, - start_after: str, - ) -> GetDocumentsCommand: - operation.with_start_with(id_prefix, matches, start, page_size, exclude, start_after) - command = operation.create_request() - if command is not None: - self._session._request_executor.execute(command, self._session.session_info) - if stream: - try: - result = command.result - stream_to_dict = json.loads(stream.decode("utf-8")) - result.__dict__.update(stream_to_dict) - except IOError as e: - raise RuntimeError(f"Unable to serialize returned value into stream {e.args[0]}", e) - else: - operation.set_result(command.result) - return command - def load_starting_with( self, id_prefix: str, @@ -824,26 +833,14 @@ def load_starting_with( def load_starting_with_into_stream( self, id_prefix: str, - output: bytes, matches: str = None, start: int = 0, page_size: int = 25, exclude: str = None, start_after: str = None, - ): - if not output: - raise ValueError("Output cannot be None") - if not id_prefix: - raise ValueError("Id prefix cannot be None") - self.__load_starting_with_internal( - id_prefix, - LoadStartingWithOperation(self._session), - output, - matches, - start, - page_size, - exclude, - start_after, + ) -> bytes: + return self._session.load_starting_with_into_stream( + id_prefix, matches, start, page_size, exclude, start_after ) def load_into_stream(self, keys: List[str], output: bytes) -> None: diff --git a/ravendb/documents/session/loaders/include.py b/ravendb/documents/session/loaders/include.py index 70ace43b..10541ec3 100644 --- a/ravendb/documents/session/loaders/include.py +++ b/ravendb/documents/session/loaders/include.py @@ -358,23 +358,29 @@ def include_all_counters(self) -> SubscriptionIncludeBuilder: self._include_all_counters("") return self + def include_time_series_by_range_type_and_time( + self, name: str, ts_type: TimeSeriesRangeType, time: TimeValue + ) -> SubscriptionIncludeBuilder: + self._include_time_series_by_range_type_and_time("", name, ts_type, time) + return self + + def include_time_series_by_range_type_and_count( + self, name: str, ts_type: TimeSeriesRangeType, count: int + ) -> SubscriptionIncludeBuilder: + self._include_time_series_by_range_type_and_count("", name, ts_type, count) + return self + + def include_all_time_series_by_range_type_and_count( + self, ts_type: TimeSeriesRangeType, count: int + ) -> SubscriptionIncludeBuilder: + self._include_time_series_by_range_type_and_count("", constants.TimeSeries.ALL, ts_type, count) + return self -# def include_time_series( -# self, -# name:str, -# ts_type: TimeSeriesRangeType, -# time: TimeValue -# ) -> SubscriptionIncludeBuilder: -# self._include_time_series_by_range_type_and_time("", name, ts_type, time) -# return self -# -# def include_time_series_by_range_type_and_count( -# self, -# name:str, -# ts_type: TimeSeriesRangeType, -# time: TimeValue -# ) -> SubscriptionIncludeBuilder: -# self._include_time_series_by_range_type_and_count("", name, type, count) + def include_all_time_series_by_range_type_and_time( + self, ts_type: TimeSeriesRangeType, time: TimeValue + ) -> SubscriptionIncludeBuilder: + self._include_time_series_by_range_type_and_time("", constants.TimeSeries.ALL, ts_type, time) + return self class TimeSeriesIncludeBuilder(IncludeBuilderBase): diff --git a/ravendb/documents/session/loaders/loaders.py b/ravendb/documents/session/loaders/loaders.py index d35bfa33..59943a88 100644 --- a/ravendb/documents/session/loaders/loaders.py +++ b/ravendb/documents/session/loaders/loaders.py @@ -1,6 +1,6 @@ from __future__ import annotations from abc import abstractmethod -from typing import TypeVar, TYPE_CHECKING, Dict, List, Type +from typing import TypeVar, TYPE_CHECKING, Dict, List, Type, Optional, Set, Union from ravendb.documents.store.lazy import Lazy @@ -16,21 +16,24 @@ def include(self, path: str) -> LoaderWithInclude: pass @abstractmethod - def load(self, object_type: _T, *ids: str) -> _T: + def load(self, id_: str, object_type: Optional[Type[_T]] = None) -> _T: pass class MultiLoaderWithInclude(LoaderWithInclude): def __init__(self, session: DocumentSession): self.__session = session - self.__includes: List[str] = [] + self.__includes: Set[str] = set() def include(self, path: str) -> LoaderWithInclude: - self.__includes.append(path) + self.__includes.add(path) return self - def load(self, object_type: Type[_T], *ids: str) -> Dict[str, _T]: - return self.__session._load_internal(object_type, list(ids), self.__includes) + def load(self, id_or_ids: Union[List[str], str], object_type: Optional[Type[_T]] = None) -> _T: + if not isinstance(id_or_ids, (str, list)): + raise TypeError(f"Expected str or list of str, got '{type(id_or_ids)}'") + ids = [id_or_ids] if isinstance(id_or_ids, str) else id_or_ids + return self.__session._load_internal(object_type, ids, self.__includes) class LazyMultiLoaderWithInclude(LoaderWithInclude): @@ -42,5 +45,8 @@ def include(self, path: str) -> LazyMultiLoaderWithInclude: self.__includes.append(path) return self - def load(self, object_type: Type[_T], *ids: str) -> Lazy[Dict[str, _T]]: - return self.__session.lazy_load_internal(object_type, list(ids), self.__includes, None) + def load(self, id_or_ids: Union[List[str], str], object_type: Optional[Type[_T]] = None) -> Lazy[Dict[str, _T]]: + if not isinstance(id_or_ids, (str, list)): + raise TypeError(f"Expected str or list of str, got '{type(id_or_ids)}'") + ids = [id_or_ids] if isinstance(id_or_ids, str) else id_or_ids + return self.__session.lazy_load_internal(object_type, ids, self.__includes, None) diff --git a/ravendb/documents/session/operations/lazy.py b/ravendb/documents/session/operations/lazy.py index 70f46c8d..f6c091a5 100644 --- a/ravendb/documents/session/operations/lazy.py +++ b/ravendb/documents/session/operations/lazy.py @@ -183,37 +183,37 @@ def handle_response(self, response: "GetResponse") -> None: class LazySessionOperations: def __init__(self, delegate: DocumentSession): - self._delegate = delegate + self._session = delegate def include(self, path: str) -> LazyMultiLoaderWithInclude: - return LazyMultiLoaderWithInclude(self._delegate).include(path) + return LazyMultiLoaderWithInclude(self._session).include(path) def load( - self, object_type: Type[_T], ids: Union[List[str], str], on_eval: Callable = None + self, ids: Union[List[str], str], object_type: Optional[Type[_T]] = None, on_eval: Callable = None ) -> Optional[Lazy[Union[Dict[str, object], object]]]: if not ids: return None if isinstance(ids, str): key = ids - if self._delegate.advanced.is_loaded(key): - return Lazy(lambda: self._delegate.load(key, object_type)) + if self._session.advanced.is_loaded(key): + return Lazy(lambda: self._session.load(key, object_type)) lazy_load_operation = LazyLoadOperation( - object_type, self._delegate, LoadOperation(self._delegate).by_key(key) + object_type, self._session, LoadOperation(self._session).by_key(key) ).by_key(key) - return self._delegate.add_lazy_operation(object_type, lazy_load_operation, on_eval) + return self._session.add_lazy_operation(object_type, lazy_load_operation, on_eval) elif isinstance(ids, list): - return self._delegate.lazy_load_internal(object_type, ids, [], on_eval) + return self._session.lazy_load_internal(object_type, ids, [], on_eval) - raise TypeError("Expected 'ids' as 'str' or 'list[str]'") + raise TypeError(f"Expected a 'str' or 'list' of 'str', the document ids. Got '{type(ids).__name__}'.") def load_starting_with( self, id_prefix: str, - object_type: Optional[Type[_T]], + object_type: Optional[Type[_T]] = None, matches: str = None, start: int = 0, page_size: int = 25, @@ -221,24 +221,24 @@ def load_starting_with( start_after: str = None, ) -> Lazy[Dict[str, _T]]: operation = LazyStartsWithOperation( - object_type, id_prefix, matches, exclude, start, page_size, self._delegate, start_after + object_type, id_prefix, matches, exclude, start, page_size, self._session, start_after ) - return self._delegate.add_lazy_operation(dict, operation, None) + return self._session.add_lazy_operation(dict, operation, None) def conditional_load( - self, key: str, change_vector: str, object_type: Type[_T] = None + self, key: str, change_vector: str, object_type: Optional[Type[_T]] = None ) -> Lazy[ConditionalLoadResult[_T]]: if not key or key.isspace(): raise ValueError("key cannot be None or whitespace") - if self._delegate.is_loaded(key): + if self._session.is_loaded(key): def __lazy_factory(): - entity = self._delegate.load(key, object_type) + entity = self._session.load(key, object_type) if entity is None: return ConditionalLoadResult.create(None, None) - cv = self._delegate.advanced.get_change_vector_for(entity) + cv = self._session.advanced.get_change_vector_for(entity) return ConditionalLoadResult.create(entity, cv) return Lazy(__lazy_factory) @@ -249,8 +249,8 @@ def __lazy_factory(): f"conditional load when change_vector is None or empty" ) - lazy_load_operation = LazyConditionalLoadOperation(object_type, key, change_vector, self._delegate) - return self._delegate.add_lazy_operation(ConditionalLoadResult, lazy_load_operation, None) + lazy_load_operation = LazyConditionalLoadOperation(object_type, key, change_vector, self._session) + return self._session.add_lazy_operation(ConditionalLoadResult, lazy_load_operation, None) class LazyLoadOperation(LazyOperation): diff --git a/ravendb/documents/subscriptions/document_subscriptions.py b/ravendb/documents/subscriptions/document_subscriptions.py index 80df2844..e18cdc70 100644 --- a/ravendb/documents/subscriptions/document_subscriptions.py +++ b/ravendb/documents/subscriptions/document_subscriptions.py @@ -2,6 +2,7 @@ from typing import Optional, Type, TypeVar, Dict, List, TYPE_CHECKING from ravendb.documents.operations.ongoing_tasks import ToggleOngoingTaskStateOperation, OngoingTaskType +from ravendb.documents.session.tokens.query_tokens.definitions import CounterIncludesToken, TimeSeriesIncludesToken from ravendb.documents.session.tokens.query_tokens.query_token import QueryToken from ravendb.documents.session.utils.includes_util import IncludesUtil from ravendb.documents.commands.subscriptions import ( @@ -96,11 +97,11 @@ def ensure_criteria(self, criteria: SubscriptionCreationOptions, object_type: Ty number_of_includes_added = 0 - if builder._documents_to_include is not None and not len(builder._documents_to_include) == 0: + if builder.documents_to_include is not None and not len(builder.documents_to_include) == 0: query_builder.append(os.linesep) query_builder.append("include ") - for inc in builder._documents_to_include: + for inc in builder.documents_to_include: include = "doc." + inc if number_of_includes_added > 0: query_builder.append(",") @@ -115,41 +116,41 @@ def ensure_criteria(self, criteria: SubscriptionCreationOptions, object_type: Ty query_builder.append(f"'{include}'" if QueryToken.is_keyword(include) else include) number_of_includes_added += 1 - # todo: uncomment on Counters and TimeSeries development - # if builder._is_all_counters: - # if number_of_includes_added == 0: - # query_builder.append(os.linesep) - # query_builder.append("include ") - # - # token = CountersIncludesToken.all("") - # token.write_to(query_builder) - # number_of_includes_added += 1 - # - # elif builder._counters_to_include: - # if number_of_includes_added: - # query_builder.append(os.linesep) - # query_builder.append("include ") - # - # for counter_name in builder._counters_to_include: - # if number_of_includes_added > 0: - # query_builder.append(",") - # - # token = CountersToIncludeToken.create("", counter_name) - # token.write_to(query_builder) - # - # number_of_includes_added += 1 - # - # if builder._time_series_to_include: - # for time_series_range in builder._time_series_to_include: - # if number_of_includes_added == 0: - # query_builder.append(os.linesep) - # query_builder.append("include ") - # - # if number_of_includes_added > 0: - # query_builder.append(",") - # - # token = TimeSeriesIncludeToken.create("", time_series_range) - # token.write_to(query_builder) + + if builder.is_all_counters: + if number_of_includes_added == 0: + query_builder.append(os.linesep) + query_builder.append("include ") + + token = CounterIncludesToken.all("") + token.write_to(query_builder) + number_of_includes_added += 1 + + elif builder.counters_to_include: + if number_of_includes_added: + query_builder.append(os.linesep) + query_builder.append("include ") + + for counter_name in builder.counters_to_include: + if number_of_includes_added > 0: + query_builder.append(",") + + token = CounterIncludesToken.create("", counter_name) + token.write_to(query_builder) + + number_of_includes_added += 1 + + if builder.time_series_to_include: + for time_series_range in builder.time_series_to_include: + if number_of_includes_added == 0: + query_builder.append(os.linesep) + query_builder.append("include ") + + if number_of_includes_added > 0: + query_builder.append(",") + + token = TimeSeriesIncludesToken.create("", time_series_range) + token.write_to(query_builder) criteria.query = "".join(query_builder) return criteria diff --git a/ravendb/http/request_executor.py b/ravendb/http/request_executor.py index 5e4c0541..54330262 100644 --- a/ravendb/http/request_executor.py +++ b/ravendb/http/request_executor.py @@ -47,7 +47,7 @@ class RequestExecutor: __INITIAL_TOPOLOGY_ETAG = -2 __GLOBAL_APPLICATION_IDENTIFIER = uuid.uuid4() - CLIENT_VERSION = "5.2.5" + CLIENT_VERSION = "5.2.6" logger = logging.getLogger("request_executor") # todo: initializer should take also cryptography certificates @@ -129,7 +129,8 @@ def close(self): self.__update_topology_timer.cancel() self._dispose_all_failed_nodes_timers() - self.__http_session.close() + if self.__http_session is not None: + self.__http_session.close() @property def certificate_path(self) -> str: @@ -169,7 +170,6 @@ def http_session(self): return self.__http_session def __create_http_session(self) -> requests.Session: - # todo: check if http client name is required session = requests.session() session.cert = self.__certificate_path session.verify = self.__trust_store_path if self.__trust_store_path else True diff --git a/ravendb/tests/jvm_migrated_tests/bugs_tests/caching_tests/test_caching_of_document_include.py b/ravendb/tests/jvm_migrated_tests/bugs_tests/caching_tests/test_caching_of_document_include.py index 3d4c9e4d..83af7f21 100644 --- a/ravendb/tests/jvm_migrated_tests/bugs_tests/caching_tests/test_caching_of_document_include.py +++ b/ravendb/tests/jvm_migrated_tests/bugs_tests/caching_tests/test_caching_of_document_include.py @@ -43,7 +43,7 @@ def test_can_avoid_using_server_for_load_with_include_if_everything_is_in_sessio session.load(user.partner_id) old = session.number_of_requests - new_user = session.include("partner_id").load(User, "users/2-A") + new_user = session.include("partner_id").load(["users/2-A"]) self.assertEqual(old, session.number_of_requests) def test_can_include_nested_paths(self): diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/documents_tests/commands_tests/test_get_statistics_command.py b/ravendb/tests/jvm_migrated_tests/client_tests/documents_tests/commands_tests/test_get_statistics_command.py index c9b55c6b..dd3502fd 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/documents_tests/commands_tests/test_get_statistics_command.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/documents_tests/commands_tests/test_get_statistics_command.py @@ -1,5 +1,8 @@ +from datetime import datetime, timedelta + from ravendb import GetStatisticsOperation from ravendb.documents.smuggler.common import DatabaseItemType +from ravendb.infrastructure.entities import User from ravendb.infrastructure.operations import CreateSampleDataOperation from ravendb.tests.test_base import TestBase @@ -74,3 +77,17 @@ def test_can_get_stats(self): self.assertIsNotNone(index_information.type) self.assertIsNotNone(index_information.last_indexing_time) + + def test_can_get_stats_for_counters_and_time_series(self): + with self.store.open_session() as session: + session.store(User(), "users/1") + session.counters_for("users/1").increment("c1") + session.counters_for("users/1").increment("c2") + tsf = session.time_series_for("users/1", "Heartrate") + tsf.append(datetime.now(), [70]) + tsf.append(datetime.now() + timedelta(minutes=1), [20]) + session.save_changes() + + db_statistics = self.store.maintenance.send(GetStatisticsOperation()) + self.assertEqual(1, db_statistics.count_of_counter_entries) + self.assertEqual(1, db_statistics.count_of_time_series_segments) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/revisions_tests/test_revisions.py b/ravendb/tests/jvm_migrated_tests/client_tests/revisions_tests/test_revisions.py index 6a0fd0ae..a81f9435 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/revisions_tests/test_revisions.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/revisions_tests/test_revisions.py @@ -356,7 +356,7 @@ def test_can_get_revisions_by_id_and_time_lazily(self): session.save_changes() with self.store.open_session() as session: - revision = session.advanced.lazily.load(User, "users/1") + revision = session.advanced.lazily.load("users/1", User) doc = revision.value self.assertEqual(1, session.advanced.number_of_requests) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/subscriptions_tests/test_basic_subscription.py b/ravendb/tests/jvm_migrated_tests/client_tests/subscriptions_tests/test_basic_subscription.py index fce96512..a7ecd041 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/subscriptions_tests/test_basic_subscription.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/subscriptions_tests/test_basic_subscription.py @@ -7,6 +7,7 @@ from typing import Optional, List from ravendb.documents.session.event_args import BeforeRequestEventArgs +from ravendb.documents.session.time_series import TimeSeriesRangeType from ravendb.documents.subscriptions.options import ( SubscriptionCreationOptions, SubscriptionWorkerOptions, @@ -20,7 +21,9 @@ ) from ravendb.infrastructure.entities import User from ravendb.infrastructure.orders import Company +from ravendb.primitives.time_series import TimeValue from ravendb.tests.test_base import TestBase +from ravendb.tools.raven_test_helper import RavenTestHelper class TestBasicSubscription(TestBase): @@ -587,3 +590,47 @@ def __subscription_callback(x: SubscriptionBatch): subscription.run(__subscription_callback) third_user_processed.wait(timeout=5) + + def test_can_create_subscription_with_include_time_series_last_range_by_time(self): + now = RavenTestHelper.utc_today() + + subscription_creation_options = SubscriptionCreationOptions() + subscription_creation_options.includes = lambda b: b.include_time_series_by_range_type_and_time( + "stock_price", TimeSeriesRangeType.LAST, TimeValue.of_months(1) + ) + + name = self.store.subscriptions.create_for_options_autocomplete_query(Company, subscription_creation_options) + + with self.store.subscriptions.get_subscription_worker_by_name(name, Company) as worker: + event = Event() + + def __subscription_callback(batch: SubscriptionBatch[Company]): + with batch.open_session() as session: + self.assertEqual(0, session.advanced.number_of_requests) + company = session.load("companies/1", Company) + self.assertEqual(0, session.advanced.number_of_requests) + + time_series = session.time_series_for_entity(company, "stock_price") + time_series_entries = time_series.get(now - datetime.timedelta(days=7), None) + + self.assertEqual(1, len(time_series_entries)) + self.assertEqual(now, time_series_entries[0].timestamp) + self.assertEqual(10, time_series_entries[0].value) + + self.assertEqual(0, session.advanced.number_of_requests) + + event.set() + + worker.run(__subscription_callback) + + with self.store.open_session() as session: + company = Company() + company.Id = "companies/1" + company.name = "HR" + + session.store(company) + + session.time_series_for_entity(company, "stock_price").append_single(now, 10) + session.save_changes() + + self.assertTrue(event.wait(30)) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/test_load_into_stream.py b/ravendb/tests/jvm_migrated_tests/client_tests/test_load_into_stream.py new file mode 100644 index 00000000..5ecc36ec --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/client_tests/test_load_into_stream.py @@ -0,0 +1,40 @@ +import json + +from ravendb import DocumentStore +from ravendb.documents.commands.results import GetDocumentsResult +from ravendb.infrastructure.orders import Employee +from ravendb.tests.test_base import TestBase + + +class TestLoadIntoStream(TestBase): + def setUp(self): + super().setUp() + + @staticmethod + def insert_data(store: DocumentStore): + with store.open_session() as session: + + def _insert_employee(name: str = None): + employee = Employee(first_name=name) + session.store(employee) + + _insert_employee("Aviv") + _insert_employee("Iftah") + _insert_employee("Tal") + _insert_employee("Maxim") + _insert_employee("Karmel") + _insert_employee("Grisha") + _insert_employee("Michael") + session.save_changes() + + def test_can_load_starting_with_into_stream(self): + self.insert_data(self.store) + with self.store.open_session() as session: + stream = session.advanced.load_starting_with_into_stream("employees/") + json_node = json.loads(stream.decode("utf-8")) + result = GetDocumentsResult.from_json(json_node) + self.assertEqual(7, len(result.results)) + names = ["Aviv", "Iftah", "Tal", "Maxim", "Karmel", "Grisha", "Michael"] + for name_from_results in [result["first_name"] for result in result.results]: + self.assertIn(name_from_results, names) + names.remove(name_from_results) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_includes.py b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_includes.py index 629f9bf2..9ccdcdc8 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_includes.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_includes.py @@ -4,7 +4,7 @@ from ravendb.infrastructure.orders import Company, Order from ravendb.primitives.constants import int_max from ravendb.primitives.time_series import TimeValue -from ravendb.tests.test_base import TestBase, User +from ravendb.tests.test_base import TestBase document_id = "users/gracjan" company_id = "companies/1-A" @@ -17,6 +17,13 @@ tag3 = "watches/bitfit" +class User: + def __init__(self, Id: str = None, name: str = None, works_at: str = None): + self.Id = Id + self.name = name + self.works_at = works_at + + class TestTimeSeriesIncludes(TestBase): def setUp(self): super(TestTimeSeriesIncludes, self).setUp() @@ -1184,3 +1191,69 @@ def test_should_throw_on_including_time_series_with_negative_count(self): TimeSeriesRangeType.LAST, -1024 ), ) + + def test_include_time_series_and_documents_and_counters(self): + with self.store.open_session() as session: + user = User() + user.name = "Oren" + user.works_at = "companies/1" + session.store(user, "users/ayende") + + company = Company(name="HR") + session.store(company, "companies/1") + + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for("users/ayende", "Heartrate") + + for i in range(360): + tsf.append_single(base_line + timedelta(seconds=i * 10), 67, "watches/fitbit") + + session.counters_for("users/ayende").increment("likes", 100) + session.counters_for("users/ayende").increment("dislikes", 5) + session.save_changes() + + with self.store.open_session() as session: + user = session.load( + "users/ayende", + User, + lambda i: i.include_documents("works_at") + .include_time_series("Heartrate", base_line, base_line + timedelta(minutes=30)) + .include_counter("likes") + .include_counter("dislikes"), + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual("Oren", user.name) + + # should not go to server + + company = session.load(user.works_at, Company) + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual("HR", company.name) + + # should not go to server + vals = session.time_series_for("users/ayende", "Heartrate").get( + base_line, base_line + timedelta(minutes=30) + ) + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(181, len(vals)) + + self.assertEqual(base_line, vals[0].timestamp) + self.assertEqual("watches/fitbit", vals[0].tag) + self.assertEqual(67, vals[0].values[0]) + self.assertEqual(base_line + timedelta(minutes=30), vals[180].timestamp) + + # should not go to server + counters = session.counters_for("users/ayende").get_all() + + self.assertEqual(1, session.advanced.number_of_requests) + + counter = counters.get("likes") + self.assertEqual(100, counter) + counter = counters.get("dislikes") + self.assertEqual(5, counter) diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15826.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15826.py index 46deca7a..34a10870 100644 --- a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15826.py +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15826.py @@ -23,9 +23,9 @@ def test_can_include_lazy_load_item_that_is_already_on_session(self): session.save_changes() with self.store.open_session() as session: - session.include("refs").load(Item, "items/d") # include, some loaded + session.include("refs").load("items/d", Item) # include, some loaded a: Item = session.load("items/c", Item) - items = session.advanced.lazily.load(Item, a.refs) + items = session.advanced.lazily.load(a.refs, Item) session.advanced.eagerly.execute_all_pending_lazy_operations() items_map = items.value self.assertEqual(len(items_map), len(a.refs)) diff --git a/ravendb/tests/jvm_migrated_tests/lazy_tests/test_lazy.py b/ravendb/tests/jvm_migrated_tests/lazy_tests/test_lazy.py index 8de06263..f6a3a75f 100644 --- a/ravendb/tests/jvm_migrated_tests/lazy_tests/test_lazy.py +++ b/ravendb/tests/jvm_migrated_tests/lazy_tests/test_lazy.py @@ -17,13 +17,13 @@ def test_can_lazily_load_entity(self): session.save_changes() with self.store.open_session() as session: - lazy_order = session.advanced.lazily.load(Company, "companies/1") + lazy_order = session.advanced.lazily.load("companies/1", Company) self.assertFalse(lazy_order.is_value_created) order: Company = lazy_order.value self.assertEqual("companies/1", order.Id) - lazy_orders = session.advanced.lazily.load(Company, ["companies/1", "companies/2"]) + lazy_orders = session.advanced.lazily.load(["companies/1", "companies/2"], Company) self.assertFalse(lazy_orders.is_value_created) orders: Dict[str, Company] = lazy_orders.value @@ -38,7 +38,7 @@ def test_can_lazily_load_entity(self): self.assertEqual("companies/1", company1.Id) self.assertEqual("companies/2", company2.Id) - lazy_order = session.advanced.lazily.load(Company, "companies/3") + lazy_order = session.advanced.lazily.load("companies/3", Company) self.assertFalse(lazy_order.is_value_created) @@ -46,7 +46,7 @@ def test_can_lazily_load_entity(self): self.assertEqual("companies/3", order.Id) - load = session.advanced.lazily.load(Company, ["no_such_1", "no_such_2"]) + load = session.advanced.lazily.load(["no_such_1", "no_such_2"], Company) missing_itmes = load.value self.assertIsNone(missing_itmes.get("no_such_1")) @@ -59,10 +59,10 @@ def test_can_use_cache_when_lazy_loading(self): session.save_changes() with self.store.open_session() as session: - session.advanced.lazily.load(User, "users/1").value + session.advanced.lazily.load("users/1", User).value old_request_count = session.number_of_requests - user: User = session.advanced.lazily.load(User, "users/1").value + user: User = session.advanced.lazily.load("users/1", User).value self.assertEqual("Oren", user.name) self.assertEqual(old_request_count, session.number_of_requests) @@ -87,8 +87,8 @@ def __inner(x): return __inner - session.advanced.lazily.load(Company, "companies/1", lambda x: __fun(company1ref)(x)) - session.advanced.lazily.load(Company, "companies/2", lambda x: __fun(company2ref)(x)) + session.advanced.lazily.load("companies/1", Company, lambda x: __fun(company1ref)(x)) + session.advanced.lazily.load("companies/2", Company, lambda x: __fun(company2ref)(x)) self.assertEqual(0, len(company1ref)) self.assertEqual(0, len(company2ref)) @@ -113,7 +113,7 @@ def test_with_queued_actions_load(self): def __fun(x): user_ref.append(x) - session.advanced.lazily.load(User, "users/1", lambda x: __fun(x)) + session.advanced.lazily.load("users/1", User, lambda x: __fun(x)) session.advanced.eagerly.execute_all_pending_lazy_operations() @@ -133,8 +133,8 @@ def test_dont_lazy_load_already_loaded_values(self): session.save_changes() with self.store.open_session() as session: - lazy_load = session.advanced.lazily.load(User, ["users/2", "users/3"]) - session.advanced.lazily.load(User, ["users/1", "users/3"]) + lazy_load = session.advanced.lazily.load(["users/2", "users/3"], User) + session.advanced.lazily.load(["users/1", "users/3"], User) session.load("users/2", User) session.load("users/3", User) @@ -147,7 +147,7 @@ def test_dont_lazy_load_already_loaded_values(self): self.assertEqual(2, len(users)) old_request_count = session.number_of_requests - lazy_load = session.advanced.lazily.load(User, ["users/3"]) + lazy_load = session.advanced.lazily.load(["users/3"], User) session.advanced.eagerly.execute_all_pending_lazy_operations() self.assertEqual(old_request_count, session.number_of_requests) diff --git a/setup.py b/setup.py index 683f89cf..2f959a90 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ setup( name="ravendb", packages=find_packages(exclude=["*.tests.*", "tests", "*.tests", "tests.*"]), - version="5.2.5.post1", + version="5.2.6", long_description_content_type="text/markdown", long_description=open("README_pypi.md").read(), description="Python client for RavenDB NoSQL Database",