From 3a3858edfd0a7448c69c3bf5d1da58ef09ac3004 Mon Sep 17 00:00:00 2001 From: Pavel Tisnovsky Date: Thu, 25 Sep 2025 08:19:24 +0200 Subject: [PATCH 1/4] New dependency --- pyproject.toml | 1 + uv.lock | 13 +++++++++++++ 2 files changed, 14 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 66d196226..ddefb273a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,6 +50,7 @@ dependencies = [ "semver<4.0.0", # Used by authorization resolvers "jsonpath-ng>=1.6.1", + "psycopg2>=2.9.10", ] diff --git a/uv.lock b/uv.lock index 310a9d025..e2bd1f09e 100644 --- a/uv.lock +++ b/uv.lock @@ -1306,6 +1306,7 @@ dependencies = [ { name = "llama-stack-client" }, { name = "openai" }, { name = "prometheus-client" }, + { name = "psycopg2" }, { name = "rich" }, { name = "semver" }, { name = "sqlalchemy" }, @@ -1386,6 +1387,7 @@ requires-dist = [ { name = "llama-stack-client", specifier = "==0.2.20" }, { name = "openai", specifier = ">=1.99.9" }, { name = "prometheus-client", specifier = ">=0.22.1" }, + { name = "psycopg2", specifier = ">=2.9.10" }, { name = "rich", specifier = ">=14.0.0" }, { name = "semver", specifier = "<4.0.0" }, { name = "sqlalchemy", specifier = ">=2.0.42" }, @@ -2451,6 +2453,17 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/26/65/1070a6e3c036f39142c2820c4b52e9243246fcfc3f96239ac84472ba361e/psutil-7.1.0-cp37-abi3-win_arm64.whl", hash = "sha256:6937cb68133e7c97b6cc9649a570c9a18ba0efebed46d8c5dae4c07fa1b67a07", size = 244971, upload-time = "2025-09-17T20:15:12.262Z" }, ] +[[package]] +name = "psycopg2" +version = "2.9.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/62/51/2007ea29e605957a17ac6357115d0c1a1b60c8c984951c19419b3474cdfd/psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11", size = 385672, upload-time = "2024-10-16T11:24:54.832Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/16/4623fad6076448df21c1a870c93a9774ad8a7b4dd1660223b59082dd8fec/psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067", size = 1025113, upload-time = "2024-10-16T11:18:40.148Z" }, + { url = "https://files.pythonhosted.org/packages/66/de/baed128ae0fc07460d9399d82e631ea31a1f171c0c4ae18f9808ac6759e3/psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e", size = 1163951, upload-time = "2024-10-16T11:18:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/ae/49/a6cfc94a9c483b1fa401fbcb23aca7892f60c7269c5ffa2ac408364f80dc/psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2", size = 2569060, upload-time = "2025-01-04T20:09:15.28Z" }, +] + [[package]] name = "psycopg2-binary" version = "2.9.10" From 0dfc102b4d3f3509b759307843d045b9dbbfcc6a Mon Sep 17 00:00:00 2001 From: Pavel Tisnovsky Date: Thu, 25 Sep 2025 08:20:18 +0200 Subject: [PATCH 2/4] PostgreSQL-based conversation cache --- src/cache/postgres_cache.py | 215 +++++++++++++++++++++++++++++++++--- 1 file changed, 198 insertions(+), 17 deletions(-) diff --git a/src/cache/postgres_cache.py b/src/cache/postgres_cache.py index 2ab635c3d..20ca3c0a3 100644 --- a/src/cache/postgres_cache.py +++ b/src/cache/postgres_cache.py @@ -1,6 +1,9 @@ """PostgreSQL cache implementation.""" +import psycopg2 + from cache.cache import Cache +from cache.cache_error import CacheError from models.cache_entry import CacheEntry from models.config import PostgreSQLDatabaseConfiguration from log import get_logger @@ -10,22 +13,143 @@ class PostgresCache(Cache): - """PostgreSQL cache implementation.""" + """Cache that uses PostgreSQL to store cached values. + + The cache itself is stored in following table: + + ``` + Column | Type | Nullable | + -----------------+-----------------------------+----------+ + user_id | text | not null | + conversation_id | text | not null | + created_at | int | not null | + query | text | | + response | text | | + provider | text | | + model | text | | + Indexes: + "cache_pkey" PRIMARY KEY, btree (user_id, conversation_id) + "cache_key_key" UNIQUE CONSTRAINT, btree (key) + "timestamps" btree (updated_at) + Access method: heap + ``` + """ + + CREATE_CACHE_TABLE = """ + CREATE TABLE IF NOT EXISTS cache ( + user_id text NOT NULL, + conversation_id text NOT NULL, + created_at timestamp NOT NULL, + query text, + response text, + provider text, + model text, + PRIMARY KEY(user_id, conversation_id, created_at) + ); + """ + + CREATE_INDEX = """ + CREATE INDEX IF NOT EXISTS timestamps + ON cache (created_at) + """ + + SELECT_CONVERSATION_HISTORY_STATEMENT = """ + SELECT query, response, provider, model + FROM cache + WHERE user_id=%s AND conversation_id=%s + ORDER BY created_at + """ + + INSERT_CONVERSATION_HISTORY_STATEMENT = """ + INSERT INTO cache(user_id, conversation_id, created_at, query, response, provider, model) + VALUES (%s, %s, CURRENT_TIMESTAMP, %s, %s, %s, %s) + """ + + QUERY_CACHE_SIZE = """ + SELECT count(*) FROM cache; + """ + + DELETE_SINGLE_CONVERSATION_STATEMENT = """ + DELETE FROM cache + WHERE user_id=%s AND conversation_id=%s + """ + + LIST_CONVERSATIONS_STATEMENT = """ + SELECT conversation_id, max(created_at) AS created_at + FROM cache + WHERE user_id=%s + GROUP BY conversation_id + ORDER BY created_at DESC + """ def __init__(self, config: PostgreSQLDatabaseConfiguration) -> None: """Create a new instance of PostgreSQL cache.""" self.postgres_config = config + # initialize connection to DB + self.connect() + # self.capacity = config.max_entries + + # pylint: disable=W0201 def connect(self) -> None: """Initialize connection to database.""" logger.info("Connecting to storage") + # make sure the connection will have known state + # even if PostgreSQL is not alive + self.connection = None + config = self.postgres_config + try: + self.connection = psycopg2.connect( + host=config.host, + port=config.port, + user=config.user, + password=config.password.get_secret_value(), + dbname=config.db, + sslmode=config.ssl_mode, + sslrootcert=config.ca_cert_path, + gssencmode=config.gss_encmode, + ) + self.initialize_cache() + except Exception as e: + if self.connection is not None: + self.connection.close() + logger.exception("Error initializing Postgres cache:\n%s", e) + raise + self.connection.autocommit = True def connected(self) -> bool: """Check if connection to cache is alive.""" - return True + if self.connection is None: + logger.warning("Not connected, need to reconnect later") + return False + try: + with self.connection.cursor() as cursor: + cursor.execute("SELECT 1") + logger.info("Connection to storage is ok") + return True + except (psycopg2.OperationalError, psycopg2.InterfaceError) as e: + logger.error("Disconnected from storage: %s", e) + return False def initialize_cache(self) -> None: - """Initialize cache.""" + """Initialize cache - clean it up etc.""" + if self.connection is None: + logger.error("Cache is disconnected") + raise CacheError("Initialize_cache: cache is disconnected") + + # cursor as context manager is not used there on purpose + # any CREATE statement can raise it's own exception + # and it should not interfere with other statements + cursor = self.connection.cursor() + + logger.info("Initializing table for cache") + cursor.execute(PostgresCache.CREATE_CACHE_TABLE) + + logger.info("Initializing index for cache") + cursor.execute(PostgresCache.CREATE_INDEX) + + cursor.close() + self.connection.commit() @connection def get( @@ -39,11 +163,29 @@ def get( skip_user_id_check: Skip user_id suid check. Returns: - Empty list. + The value associated with the key, or None if not found. """ - # just check if user_id and conversation_id are UUIDs - super().construct_key(user_id, conversation_id, skip_user_id_check) - return [] + if self.connection is None: + logger.error("Cache is disconnected") + raise CacheError("get: cache is disconnected") + + with self.connection.cursor() as cursor: + cursor.execute( + self.SELECT_CONVERSATION_HISTORY_STATEMENT, (user_id, conversation_id) + ) + conversation_entries = cursor.fetchall() + + result = [] + for conversation_entry in conversation_entries: + cache_entry = CacheEntry( + query=conversation_entry[0], + response=conversation_entry[1], + provider=conversation_entry[2], + model=conversation_entry[3], + ) + result.append(cache_entry) + + return result @connection def insert_or_append( @@ -62,8 +204,28 @@ def insert_or_append( skip_user_id_check: Skip user_id suid check. """ - # just check if user_id and conversation_id are UUIDs - super().construct_key(user_id, conversation_id, skip_user_id_check) + if self.connection is None: + logger.error("Cache is disconnected") + raise CacheError("insert_or_append: cache is disconnected") + + try: + # the whole operation is run in one transaction + with self.connection.cursor() as cursor: + cursor.execute( + PostgresCache.INSERT_CONVERSATION_HISTORY_STATEMENT, + ( + user_id, + conversation_id, + cache_entry.query, + cache_entry.response, + cache_entry.provider, + cache_entry.model, + ), + ) + # commit is implicit at this point + except psycopg2.DatabaseError as e: + logger.error("PostgresCache.insert_or_append: %s", e) + raise CacheError("PostgresCache.insert_or_append", e) from e @connection def delete( @@ -77,12 +239,24 @@ def delete( skip_user_id_check: Skip user_id suid check. Returns: - bool: True in all cases. + bool: True if the conversation was deleted, False if not found. """ - # just check if user_id and conversation_id are UUIDs - super().construct_key(user_id, conversation_id, skip_user_id_check) - return True + if self.connection is None: + logger.error("Cache is disconnected") + raise CacheError("delete: cache is disconnected") + + try: + with self.connection.cursor() as cursor: + cursor.execute( + PostgresCache.DELETE_SINGLE_CONVERSATION_STATEMENT, + (user_id, conversation_id), + ) + deleted = cursor.rowcount + return deleted > 0 + except psycopg2.DatabaseError as e: + logger.error("PostgresCache.delete: %s", e) + raise CacheError("PostgresCache.delete", e) from e @connection def list(self, user_id: str, skip_user_id_check: bool = False) -> list[str]: @@ -93,16 +267,23 @@ def list(self, user_id: str, skip_user_id_check: bool = False) -> list[str]: skip_user_id_check: Skip user_id suid check. Returns: - An empty list. + A list of conversation ids from the cache """ - super()._check_user_id(user_id, skip_user_id_check) - return [] + if self.connection is None: + logger.error("Cache is disconnected") + raise CacheError("list: cache is disconnected") + + with self.connection.cursor() as cursor: + cursor.execute(self.LIST_CONVERSATIONS_STATEMENT, (user_id,)) + conversations = cursor.fetchall() + + return [conversation[0] for conversation in conversations] def ready(self) -> bool: """Check if the cache is ready. Returns: - True in all cases. + True if the cache is ready, False otherwise. """ return True From 722fe3c547ddf49b5cd207695d70f60e161b67a6 Mon Sep 17 00:00:00 2001 From: Pavel Tisnovsky Date: Thu, 25 Sep 2025 08:29:11 +0200 Subject: [PATCH 3/4] New dependency: PsycoPG2 --- pyproject.toml | 2 +- uv.lock | 15 ++------------- 2 files changed, 3 insertions(+), 14 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ddefb273a..a4a95214a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,7 @@ dependencies = [ "semver<4.0.0", # Used by authorization resolvers "jsonpath-ng>=1.6.1", - "psycopg2>=2.9.10", + "psycopg2-binary>=2.9.10", ] diff --git a/uv.lock b/uv.lock index e2bd1f09e..4b06e4986 100644 --- a/uv.lock +++ b/uv.lock @@ -1306,7 +1306,7 @@ dependencies = [ { name = "llama-stack-client" }, { name = "openai" }, { name = "prometheus-client" }, - { name = "psycopg2" }, + { name = "psycopg2-binary" }, { name = "rich" }, { name = "semver" }, { name = "sqlalchemy" }, @@ -1387,7 +1387,7 @@ requires-dist = [ { name = "llama-stack-client", specifier = "==0.2.20" }, { name = "openai", specifier = ">=1.99.9" }, { name = "prometheus-client", specifier = ">=0.22.1" }, - { name = "psycopg2", specifier = ">=2.9.10" }, + { name = "psycopg2-binary", specifier = ">=2.9.10" }, { name = "rich", specifier = ">=14.0.0" }, { name = "semver", specifier = "<4.0.0" }, { name = "sqlalchemy", specifier = ">=2.0.42" }, @@ -2453,17 +2453,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/26/65/1070a6e3c036f39142c2820c4b52e9243246fcfc3f96239ac84472ba361e/psutil-7.1.0-cp37-abi3-win_arm64.whl", hash = "sha256:6937cb68133e7c97b6cc9649a570c9a18ba0efebed46d8c5dae4c07fa1b67a07", size = 244971, upload-time = "2025-09-17T20:15:12.262Z" }, ] -[[package]] -name = "psycopg2" -version = "2.9.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/62/51/2007ea29e605957a17ac6357115d0c1a1b60c8c984951c19419b3474cdfd/psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11", size = 385672, upload-time = "2024-10-16T11:24:54.832Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/16/4623fad6076448df21c1a870c93a9774ad8a7b4dd1660223b59082dd8fec/psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067", size = 1025113, upload-time = "2024-10-16T11:18:40.148Z" }, - { url = "https://files.pythonhosted.org/packages/66/de/baed128ae0fc07460d9399d82e631ea31a1f171c0c4ae18f9808ac6759e3/psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e", size = 1163951, upload-time = "2024-10-16T11:18:44.377Z" }, - { url = "https://files.pythonhosted.org/packages/ae/49/a6cfc94a9c483b1fa401fbcb23aca7892f60c7269c5ffa2ac408364f80dc/psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2", size = 2569060, upload-time = "2025-01-04T20:09:15.28Z" }, -] - [[package]] name = "psycopg2-binary" version = "2.9.10" From da7dcd54b746c6fd5f868e93b7381c081de12287 Mon Sep 17 00:00:00 2001 From: Pavel Tisnovsky Date: Thu, 25 Sep 2025 08:34:58 +0200 Subject: [PATCH 4/4] Updated existing unit tests, added new unit tests --- tests/unit/cache/test_cache_factory.py | 3 +- tests/unit/cache/test_postgres_cache.py | 299 ++++++++++++++++++++++++ 2 files changed, 301 insertions(+), 1 deletion(-) create mode 100644 tests/unit/cache/test_postgres_cache.py diff --git a/tests/unit/cache/test_cache_factory.py b/tests/unit/cache/test_cache_factory.py index f895f3599..611953f15 100644 --- a/tests/unit/cache/test_cache_factory.py +++ b/tests/unit/cache/test_cache_factory.py @@ -112,8 +112,9 @@ def test_conversation_cache_sqlite_improper_config(tmpdir): _ = CacheFactory.conversation_cache(cc) -def test_conversation_cache_postgres(postgres_cache_config_fixture): +def test_conversation_cache_postgres(postgres_cache_config_fixture, mocker): """Check if PostgreSQL is returned by factory with proper configuration.""" + mocker.patch("psycopg2.connect") cache = CacheFactory.conversation_cache(postgres_cache_config_fixture) assert cache is not None # check if the object has the right type diff --git a/tests/unit/cache/test_postgres_cache.py b/tests/unit/cache/test_postgres_cache.py new file mode 100644 index 000000000..04a55fc59 --- /dev/null +++ b/tests/unit/cache/test_postgres_cache.py @@ -0,0 +1,299 @@ +"""Unit tests for PostgreSQL cache implementation.""" + +import pytest + +import psycopg2 + +from cache.cache_error import CacheError +from cache.postgres_cache import PostgresCache +from models.config import PostgreSQLDatabaseConfiguration +from models.cache_entry import CacheEntry +from utils import suid + + +USER_ID_1 = suid.get_suid() +USER_ID_2 = suid.get_suid() +CONVERSATION_ID_1 = suid.get_suid() +CONVERSATION_ID_2 = suid.get_suid() +cache_entry_1 = CacheEntry( + query="user message1", response="AI message1", provider="foo", model="bar" +) +cache_entry_2 = CacheEntry( + query="user message2", response="AI message2", provider="foo", model="bar" +) + +# pylint: disable=fixme + +# pylint: disable=too-few-public-methods +class CursorMock: + """Mock class for simulating DB cursor exceptions.""" + + def __init__(self): + """Construct the mock cursor class.""" + + def execute(self, command): + """Execute any SQL command.""" + raise psycopg2.DatabaseError("can not INSERT") + + +# pylint: disable=too-few-public-methods +class ConnectionMock: + """Mock class for connection.""" + + def __init__(self): + """Construct the connection mock class.""" + + def cursor(self): + """Getter for mock cursor.""" + raise psycopg2.OperationalError("can not SELECT") + + +@pytest.fixture(scope="module", name="postgres_cache_config_fixture") +def postgres_cache_config(): + """Fixture containing initialized instance of PostgreSQL cache.""" + # can be any configuration, becuase tests won't really try to + # connect to database + return PostgreSQLDatabaseConfiguration( + host="localhost", port=1234, db="database", user="user", password="password" + ) + + +def test_cache_initialization(postgres_cache_config_fixture, mocker): + """Test the get operation when DB is connected.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + assert cache is not None + + # connection is mocked only, but it should exists + assert cache.connection is not None + + +def test_cache_initialization_on_error(postgres_cache_config_fixture, mocker): + """Test the get operation when DB is not connected.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect", side_effect=Exception("foo")) + + # exception should be thrown during PG connection + with pytest.raises(Exception, match="foo"): + _ = PostgresCache(postgres_cache_config_fixture) + + +def test_cache_initialization_connect_finalizer(postgres_cache_config_fixture, mocker): + """Test the get operation when DB is not connected.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + + # cache initialization should raise an exception + mocker.patch( + "cache.postgres_cache.PostgresCache.initialize_cache", + side_effect=Exception("foo"), + ) + + # exception should be thrown during cache initialization + with pytest.raises(Exception, match="foo"): + _ = PostgresCache(postgres_cache_config_fixture) + + +def test_connected_when_connected(postgres_cache_config_fixture, mocker): + """Test the connected() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + + # cache should be connected by default (even if it's mocked connection) + assert cache.connected() is True + + +def test_connected_when_disconnected(postgres_cache_config_fixture, mocker): + """Test the connected() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + # simulate disconnected cache + cache.connection = None + + # now the cache should be disconnected + assert cache.connected() is False + + +def test_connected_when_connection_error(postgres_cache_config_fixture, mocker): + """Test the connected() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + # simulate connection error + cache = PostgresCache(postgres_cache_config_fixture) + cache.connection = ConnectionMock() + assert cache.connection is not None + assert cache.connected() is False + + +def test_initialize_cache_when_connected(postgres_cache_config_fixture, mocker): + """Test the initialize_cache().""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + # should not fail + cache.initialize_cache() + + +def test_initialize_cache_when_disconnected(postgres_cache_config_fixture, mocker): + """Test the initialize_cache().""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + cache.connection = None + + with pytest.raises(CacheError, match="cache is disconnected"): + cache.initialize_cache() + + +def test_ready_method(postgres_cache_config_fixture, mocker): + """Test the ready() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + + # should not fail + ready = cache.ready() + assert ready is True + + +def test_get_operation_when_disconnected(postgres_cache_config_fixture, mocker): + """Test the get() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + + cache.connection = None + # no operation for @connection decorator + cache.connect = lambda: None + + with pytest.raises(CacheError, match="cache is disconnected"): + cache.get(USER_ID_1, CONVERSATION_ID_1, False) + + +def test_get_operation_when_connected(postgres_cache_config_fixture, mocker): + """Test the get() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + + # should not fail + lst = cache.get(USER_ID_1, CONVERSATION_ID_1, False) + assert not lst + + +def test_get_operation_returned_values(): + """Test the get() method.""" + # TODO: LCORE-721 + # TODO: Implement proper unit test for testing PostgreSQL cache 'get' operation + # returning 'real' values + # Need to mock the cursor.execute() method + + +def test_insert_or_append_when_disconnected(postgres_cache_config_fixture, mocker): + """Test the insert_or_append() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + cache.connection = None + # no operation for @connection decorator + cache.connect = lambda: None + + with pytest.raises(CacheError, match="cache is disconnected"): + cache.insert_or_append(USER_ID_1, CONVERSATION_ID_1, cache_entry_1, False) + + +def test_insert_or_append_operation_when_connected( + postgres_cache_config_fixture, mocker +): + """Test the insert_or_append() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + + # should not fail + cache.insert_or_append(USER_ID_1, CONVERSATION_ID_1, cache_entry_1, False) + + +def test_insert_or_append_operation_operation_error( + postgres_cache_config_fixture, mocker +): + """Test the insert_or_append() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + + # no operation for @connection decorator + cache.connect = lambda: None + cache.connection = ConnectionMock() + + with pytest.raises(CacheError, match="insert_or_append"): + cache.insert_or_append(USER_ID_1, CONVERSATION_ID_1, cache_entry_1, False) + + +def test_delete_when_disconnected(postgres_cache_config_fixture, mocker): + """Test the delete() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + + cache.connection = None + # no operation for @connection decorator + cache.connect = lambda: None + + with pytest.raises(CacheError, match="cache is disconnected"): + cache.delete(USER_ID_1, CONVERSATION_ID_1, False) + + +def test_delete_operation_when_connected(postgres_cache_config_fixture, mocker): + """Test the delete() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + + # this is dirty trick! + # TODO: fix the `return deleted > 0` by using + # proper mock object! + with pytest.raises(TypeError, match=">"): + cache.delete(USER_ID_1, CONVERSATION_ID_1, False) + + +def test_delete_operation_operation_error(postgres_cache_config_fixture, mocker): + """Test the delete() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + + # no operation for @connection decorator + cache.connect = lambda: None + cache.connection = ConnectionMock() + + with pytest.raises(CacheError, match="delete"): + cache.delete(USER_ID_1, CONVERSATION_ID_1, False) + + +def test_list_operation_when_disconnected(postgres_cache_config_fixture, mocker): + """Test the list() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + + cache.connection = None + # no operation for @connection decorator + cache.connect = lambda: None + + with pytest.raises(CacheError, match="cache is disconnected"): + cache.list(USER_ID_1, False) + + +def test_list_operation_when_connected(postgres_cache_config_fixture, mocker): + """Test the list() method.""" + # prevent real connection to PG instance + mocker.patch("psycopg2.connect") + cache = PostgresCache(postgres_cache_config_fixture) + + # should not fail + lst = cache.list(USER_ID_1, False) + assert not lst