Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 19 additions & 18 deletions src/aleph/handlers/content/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,9 +107,6 @@ async def fetch_related_content(
# This check is essential to ensure that files are not added to the system
# or the current node when the configuration disables storing of files.
config = get_config()
if not config.storage.store_files.value:
return # Ignore if files are not to be stored.

content = message.parsed_content
assert isinstance(content, StoreContent)

Expand All @@ -122,6 +119,7 @@ async def fetch_related_content(
do_standard_lookup = True

# Sentinel value, the code below always sets a value but mypy does not see it.
# otherwise if config.storage.store_files is False, this will be the database value
size: int = -1

if engine == ItemType.ipfs and ipfs_enabled:
Expand Down Expand Up @@ -176,22 +174,25 @@ async def fetch_related_content(
do_standard_lookup = True

if do_standard_lookup:
try:
file_content = await self.storage_service.get_hash_content(
item_hash,
engine=engine,
tries=4,
timeout=15, # We only end up here for files < 1MB, a short timeout is okay
use_network=True,
use_ipfs=True,
store_value=True,
)
except AlephStorageException:
raise FileUnavailable(
"Could not retrieve file from storage at this time"
)
if config.storage.store_files.value:
try:
file_content = await self.storage_service.get_hash_content(
item_hash,
engine=engine,
tries=4,
timeout=15, # We only end up here for files < 1MB, a short timeout is okay
use_network=True,
use_ipfs=True,
store_value=True,
)
except AlephStorageException:
raise FileUnavailable(
"Could not retrieve file from storage at this time"
)

size = len(file_content)
size = len(file_content)
else:
size = -1

upsert_file(
session=session,
Expand Down
107 changes: 107 additions & 0 deletions tests/storage/test_store_message.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,3 +154,110 @@ async def test_handle_new_storage_directory(
assert stored_file.type == FileType.DIRECTORY

assert not storage_engine.called


@pytest.mark.asyncio
async def test_store_files_is_false(
mocker,
session_factory: DbSessionFactory,
mock_config: Config,
fixture_message_directory: MessageDb,
):
mock_ipfs_client = mocker.MagicMock()
ipfs_stats = {
"Hash": "QmPZrod87ceK4yVvXQzRexDcuDgmLxBiNJ1ajLjLoMx9sU",
"Size": 42,
"CumulativeSize": 4560,
"Blocks": 2,
"Type": "file",
}
mock_ipfs_client.files.stat = mocker.AsyncMock(return_value=ipfs_stats)

mock_config.storage.store_files.value = False

message = fixture_message_directory
storage_engine = mocker.AsyncMock()

storage_service = StorageService(
storage_engine=storage_engine,
ipfs_service=IpfsService(ipfs_client=mock_ipfs_client),
node_cache=mocker.AsyncMock(),
)
get_hash_content_mock = mocker.patch.object(storage_service, "get_hash_content")
store_message_handler = StoreMessageHandler(
storage_service=storage_service, grace_period=24
)

with session_factory() as session:
await store_message_handler.fetch_related_content(
session=session, message=message
)
session.commit()

with session_factory() as session:
stored_files = list((session.execute(select(StoredFileDb))).scalars())

assert len(stored_files) == 1
stored_file = stored_files[0]

# Check the updates to the message content
assert stored_file.hash == ipfs_stats["Hash"]
assert stored_file.size == -1
assert stored_file.type == FileType.FILE

storage_engine.assert_not_called()
get_hash_content_mock.assert_not_called()


@pytest.mark.asyncio
async def test_store_files_is_false_ipfs_is_disabled(
mocker,
session_factory: DbSessionFactory,
mock_config: Config,
fixture_message_directory: MessageDb,
):
mock_ipfs_client = mocker.MagicMock()
ipfs_stats = {
"Hash": "QmPZrod87ceK4yVvXQzRexDcuDgmLxBiNJ1ajLjLoMx9sU",
"Size": 42,
"CumulativeSize": 4560,
"Blocks": 2,
"Type": "file",
}
mock_ipfs_client.files.stat = mocker.AsyncMock(return_value=ipfs_stats)

mock_config.storage.store_files.value = False
mock_config.ipfs.enabled.value = False

message = fixture_message_directory
storage_engine = mocker.AsyncMock()

storage_service = StorageService(
storage_engine=storage_engine,
ipfs_service=IpfsService(ipfs_client=mock_ipfs_client),
node_cache=mocker.AsyncMock(),
)
get_hash_content_mock = mocker.patch.object(storage_service, "get_hash_content")
store_message_handler = StoreMessageHandler(
storage_service=storage_service, grace_period=24
)

with session_factory() as session:
await store_message_handler.fetch_related_content(
session=session, message=message
)
session.commit()

with session_factory() as session:
stored_files = list((session.execute(select(StoredFileDb))).scalars())

assert len(stored_files) == 1
stored_file = stored_files[0]

# Check the updates to the message content
assert stored_file.hash == ipfs_stats["Hash"]
assert stored_file.size == -1
assert stored_file.type == FileType.FILE

storage_engine.assert_not_called()
get_hash_content_mock.assert_not_called()
Loading