Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion ravendb/documents/commands/results.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from __future__ import annotations
from typing import Union, Optional, List, Dict
from typing import Union, Optional, List, Dict, Any


class GetDocumentResult:
Expand Down Expand Up @@ -39,3 +39,13 @@ def from_json(cls, json_dict: dict) -> GetDocumentsResult:
json_dict.get("CompareExchangeValueIncludes", None),
json_dict.get("NextPageStart", None),
)

def to_json(self) -> Dict[str, Any]:
return {
"Includes": self.includes,
"Results": self.results,
"CounterIncludes": self.counter_includes,
"TimeSeriesIncludes": self.time_series_includes,
"CompareExchangeValueIncludes": self.compare_exchange_includes,
"NextPageStart": self.next_page_start,
}
12 changes: 8 additions & 4 deletions ravendb/documents/session/cluster_transaction_operation.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,12 +50,14 @@ def lazily(self) -> ILazyClusterTransactionOperations:
pass

@abc.abstractmethod
def get_compare_exchange_value(self, key: str, object_type: Type[_T] = None) -> Optional[CompareExchangeValue[_T]]:
def get_compare_exchange_value(
self, key: str, object_type: Optional[Type[_T]] = None
) -> Optional[CompareExchangeValue[_T]]:
pass

@abc.abstractmethod
def get_compare_exchange_values(
self, keys: List[str], object_type: Type[_T]
self, keys: List[str], object_type: Optional[Type[_T]] = None
) -> Dict[str, CompareExchangeValue[_T]]:
pass

Expand Down Expand Up @@ -289,11 +291,13 @@ def __init__(self, session: "DocumentSession"):
def lazily(self) -> ILazyClusterTransactionOperations:
return LazyClusterTransactionOperations(self.session)

def get_compare_exchange_value(self, key: str, object_type: Type[_T] = None) -> Optional[CompareExchangeValue[_T]]:
def get_compare_exchange_value(
self, key: str, object_type: Optional[Type[_T]] = None
) -> Optional[CompareExchangeValue[_T]]:
return self._get_compare_exchange_value_internal(key, object_type)

def get_compare_exchange_values(
self, keys: List[str], object_type: Type[_T]
self, keys: List[str], object_type: Optional[Type[_T]] = None
) -> Dict[str, CompareExchangeValue[_T]]:
return super()._get_compare_exchange_values_internal(keys, object_type)

Expand Down
97 changes: 47 additions & 50 deletions ravendb/documents/session/document_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,16 +375,30 @@ def load_starting_with(
start_after: Optional[str] = None,
) -> List[_T]:
load_starting_with_operation = LoadStartingWithOperation(self)
self.__load_starting_with_internal(
id_prefix, load_starting_with_operation, None, matches, start, page_size, exclude, start_after
self._load_starting_with_internal(
id_prefix, load_starting_with_operation, matches, start, page_size, exclude, start_after
)
return load_starting_with_operation.get_documents(object_type)

def __load_starting_with_internal(
def load_starting_with_into_stream(
self,
id_prefix: str,
matches: str = None,
start: int = 0,
page_size: int = 25,
exclude: str = None,
start_after: str = None,
) -> bytes:
if id_prefix is None:
raise ValueError("Arg 'id_prefix' is cannot be None.")
return self._load_starting_with_into_stream_internal(
id_prefix, LoadStartingWithOperation(self), matches, start, page_size, exclude, start_after
)

def _load_starting_with_internal(
self,
id_prefix: str,
operation: LoadStartingWithOperation,
stream,
matches: str,
start: int,
page_size: int,
Expand All @@ -395,12 +409,31 @@ def __load_starting_with_internal(
command = operation.create_request()
if command:
self._request_executor.execute_command(command, self.session_info)
if stream:
pass # todo: stream
else:
operation.set_result(command.result)
operation.set_result(command.result)
return command

def _load_starting_with_into_stream_internal(
self,
id_prefix: str,
operation: LoadStartingWithOperation,
matches: str,
start: int,
page_size: int,
exclude: str,
start_after: str,
) -> bytes:
operation.with_start_with(id_prefix, matches, start, page_size, exclude, start_after)
command = operation.create_request()
bytes_result = None
if command:
self.request_executor.execute_command(command, self.session_info)
try:
result = command.result
bytes_result = json.dumps(result.to_json()).encode("utf-8")
except Exception as e:
raise RuntimeError("Unable sto serialize returned value into stream") from e
return bytes_result

def document_query_from_index_type(self, index_type: Type[_TIndex], object_type: Type[_T]) -> DocumentQuery[_T]:
try:
index = Utils.try_get_new_instance(index_type)
Expand Down Expand Up @@ -456,6 +489,8 @@ def counters_for_entity(self, entity: object) -> SessionDocumentCounters:
return SessionDocumentCounters(self, entity)

def time_series_for(self, document_id: str, name: str = None) -> SessionDocumentTimeSeries:
if not isinstance(document_id, str):
raise TypeError("Method time_series_for expects a string. Did you want to call time_series_for_entity?")
return SessionDocumentTimeSeries(self, document_id, name)

def time_series_for_entity(self, entity: object, name: str = None) -> SessionDocumentTimeSeries:
Expand Down Expand Up @@ -723,7 +758,7 @@ def lazily(self) -> LazySessionOperations:
return self._session._lazily

def graph_query(self, object_type: type, query: str): # -> GraphDocumentQuery:
pass
raise NotImplementedError("Dropped support for graph queries")

def what_changed(self) -> Dict[str, List[DocumentsChanges]]:
return self._session._what_changed()
Expand Down Expand Up @@ -781,32 +816,6 @@ def wait_for_indexes_after_save_changes(

index_options.wait_for_indexes = True

def __load_starting_with_internal(
self,
id_prefix: str,
operation: LoadStartingWithOperation,
stream: Union[None, bytes],
matches: str,
start: int,
page_size: int,
exclude: str,
start_after: str,
) -> GetDocumentsCommand:
operation.with_start_with(id_prefix, matches, start, page_size, exclude, start_after)
command = operation.create_request()
if command is not None:
self._session._request_executor.execute(command, self._session.session_info)
if stream:
try:
result = command.result
stream_to_dict = json.loads(stream.decode("utf-8"))
result.__dict__.update(stream_to_dict)
except IOError as e:
raise RuntimeError(f"Unable to serialize returned value into stream {e.args[0]}", e)
else:
operation.set_result(command.result)
return command

def load_starting_with(
self,
id_prefix: str,
Expand All @@ -824,26 +833,14 @@ def load_starting_with(
def load_starting_with_into_stream(
self,
id_prefix: str,
output: bytes,
matches: str = None,
start: int = 0,
page_size: int = 25,
exclude: str = None,
start_after: str = None,
):
if not output:
raise ValueError("Output cannot be None")
if not id_prefix:
raise ValueError("Id prefix cannot be None")
self.__load_starting_with_internal(
id_prefix,
LoadStartingWithOperation(self._session),
output,
matches,
start,
page_size,
exclude,
start_after,
) -> bytes:
return self._session.load_starting_with_into_stream(
id_prefix, matches, start, page_size, exclude, start_after
)

def load_into_stream(self, keys: List[str], output: bytes) -> None:
Expand Down
38 changes: 22 additions & 16 deletions ravendb/documents/session/loaders/include.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,23 +358,29 @@ def include_all_counters(self) -> SubscriptionIncludeBuilder:
self._include_all_counters("")
return self

def include_time_series_by_range_type_and_time(
self, name: str, ts_type: TimeSeriesRangeType, time: TimeValue
) -> SubscriptionIncludeBuilder:
self._include_time_series_by_range_type_and_time("", name, ts_type, time)
return self

def include_time_series_by_range_type_and_count(
self, name: str, ts_type: TimeSeriesRangeType, count: int
) -> SubscriptionIncludeBuilder:
self._include_time_series_by_range_type_and_count("", name, ts_type, count)
return self

def include_all_time_series_by_range_type_and_count(
self, ts_type: TimeSeriesRangeType, count: int
) -> SubscriptionIncludeBuilder:
self._include_time_series_by_range_type_and_count("", constants.TimeSeries.ALL, ts_type, count)
return self

# def include_time_series(
# self,
# name:str,
# ts_type: TimeSeriesRangeType,
# time: TimeValue
# ) -> SubscriptionIncludeBuilder:
# self._include_time_series_by_range_type_and_time("", name, ts_type, time)
# return self
#
# def include_time_series_by_range_type_and_count(
# self,
# name:str,
# ts_type: TimeSeriesRangeType,
# time: TimeValue
# ) -> SubscriptionIncludeBuilder:
# self._include_time_series_by_range_type_and_count("", name, type, count)
def include_all_time_series_by_range_type_and_time(
self, ts_type: TimeSeriesRangeType, time: TimeValue
) -> SubscriptionIncludeBuilder:
self._include_time_series_by_range_type_and_time("", constants.TimeSeries.ALL, ts_type, time)
return self


class TimeSeriesIncludeBuilder(IncludeBuilderBase):
Expand Down
22 changes: 14 additions & 8 deletions ravendb/documents/session/loaders/loaders.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import annotations
from abc import abstractmethod
from typing import TypeVar, TYPE_CHECKING, Dict, List, Type
from typing import TypeVar, TYPE_CHECKING, Dict, List, Type, Optional, Set, Union

from ravendb.documents.store.lazy import Lazy

Expand All @@ -16,21 +16,24 @@ def include(self, path: str) -> LoaderWithInclude:
pass

@abstractmethod
def load(self, object_type: _T, *ids: str) -> _T:
def load(self, id_: str, object_type: Optional[Type[_T]] = None) -> _T:
pass


class MultiLoaderWithInclude(LoaderWithInclude):
def __init__(self, session: DocumentSession):
self.__session = session
self.__includes: List[str] = []
self.__includes: Set[str] = set()

def include(self, path: str) -> LoaderWithInclude:
self.__includes.append(path)
self.__includes.add(path)
return self

def load(self, object_type: Type[_T], *ids: str) -> Dict[str, _T]:
return self.__session._load_internal(object_type, list(ids), self.__includes)
def load(self, id_or_ids: Union[List[str], str], object_type: Optional[Type[_T]] = None) -> _T:
if not isinstance(id_or_ids, (str, list)):
raise TypeError(f"Expected str or list of str, got '{type(id_or_ids)}'")
ids = [id_or_ids] if isinstance(id_or_ids, str) else id_or_ids
return self.__session._load_internal(object_type, ids, self.__includes)


class LazyMultiLoaderWithInclude(LoaderWithInclude):
Expand All @@ -42,5 +45,8 @@ def include(self, path: str) -> LazyMultiLoaderWithInclude:
self.__includes.append(path)
return self

def load(self, object_type: Type[_T], *ids: str) -> Lazy[Dict[str, _T]]:
return self.__session.lazy_load_internal(object_type, list(ids), self.__includes, None)
def load(self, id_or_ids: Union[List[str], str], object_type: Optional[Type[_T]] = None) -> Lazy[Dict[str, _T]]:
if not isinstance(id_or_ids, (str, list)):
raise TypeError(f"Expected str or list of str, got '{type(id_or_ids)}'")
ids = [id_or_ids] if isinstance(id_or_ids, str) else id_or_ids
return self.__session.lazy_load_internal(object_type, ids, self.__includes, None)
36 changes: 18 additions & 18 deletions ravendb/documents/session/operations/lazy.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,62 +183,62 @@ def handle_response(self, response: "GetResponse") -> None:

class LazySessionOperations:
def __init__(self, delegate: DocumentSession):
self._delegate = delegate
self._session = delegate

def include(self, path: str) -> LazyMultiLoaderWithInclude:
return LazyMultiLoaderWithInclude(self._delegate).include(path)
return LazyMultiLoaderWithInclude(self._session).include(path)

def load(
self, object_type: Type[_T], ids: Union[List[str], str], on_eval: Callable = None
self, ids: Union[List[str], str], object_type: Optional[Type[_T]] = None, on_eval: Callable = None
) -> Optional[Lazy[Union[Dict[str, object], object]]]:
if not ids:
return None

if isinstance(ids, str):
key = ids
if self._delegate.advanced.is_loaded(key):
return Lazy(lambda: self._delegate.load(key, object_type))
if self._session.advanced.is_loaded(key):
return Lazy(lambda: self._session.load(key, object_type))

lazy_load_operation = LazyLoadOperation(
object_type, self._delegate, LoadOperation(self._delegate).by_key(key)
object_type, self._session, LoadOperation(self._session).by_key(key)
).by_key(key)

return self._delegate.add_lazy_operation(object_type, lazy_load_operation, on_eval)
return self._session.add_lazy_operation(object_type, lazy_load_operation, on_eval)

elif isinstance(ids, list):
return self._delegate.lazy_load_internal(object_type, ids, [], on_eval)
return self._session.lazy_load_internal(object_type, ids, [], on_eval)

raise TypeError("Expected 'ids' as 'str' or 'list[str]'")
raise TypeError(f"Expected a 'str' or 'list' of 'str', the document ids. Got '{type(ids).__name__}'.")

def load_starting_with(
self,
id_prefix: str,
object_type: Optional[Type[_T]],
object_type: Optional[Type[_T]] = None,
matches: str = None,
start: int = 0,
page_size: int = 25,
exclude: str = None,
start_after: str = None,
) -> Lazy[Dict[str, _T]]:
operation = LazyStartsWithOperation(
object_type, id_prefix, matches, exclude, start, page_size, self._delegate, start_after
object_type, id_prefix, matches, exclude, start, page_size, self._session, start_after
)

return self._delegate.add_lazy_operation(dict, operation, None)
return self._session.add_lazy_operation(dict, operation, None)

def conditional_load(
self, key: str, change_vector: str, object_type: Type[_T] = None
self, key: str, change_vector: str, object_type: Optional[Type[_T]] = None
) -> Lazy[ConditionalLoadResult[_T]]:
if not key or key.isspace():
raise ValueError("key cannot be None or whitespace")

if self._delegate.is_loaded(key):
if self._session.is_loaded(key):

def __lazy_factory():
entity = self._delegate.load(key, object_type)
entity = self._session.load(key, object_type)
if entity is None:
return ConditionalLoadResult.create(None, None)
cv = self._delegate.advanced.get_change_vector_for(entity)
cv = self._session.advanced.get_change_vector_for(entity)
return ConditionalLoadResult.create(entity, cv)

return Lazy(__lazy_factory)
Expand All @@ -249,8 +249,8 @@ def __lazy_factory():
f"conditional load when change_vector is None or empty"
)

lazy_load_operation = LazyConditionalLoadOperation(object_type, key, change_vector, self._delegate)
return self._delegate.add_lazy_operation(ConditionalLoadResult, lazy_load_operation, None)
lazy_load_operation = LazyConditionalLoadOperation(object_type, key, change_vector, self._session)
return self._session.add_lazy_operation(ConditionalLoadResult, lazy_load_operation, None)


class LazyLoadOperation(LazyOperation):
Expand Down
Loading