Conversation
|
|
||
| result = await func(*args, **kwargs) | ||
| if result is not None: | ||
| await self.cache.set(_key, result, ttl=current_ttl) |
There was a problem hiding this comment.
p3. even if result is None, I think it's better to cache. it helps the code not query a source repeatedly.
There was a problem hiding this comment.
But redis / valkey don’t allow to set None value. so you should find other way.
In my case, I used a special value to represent None in valkey. e.g) '_NA'
| logger = logging.getLogger(__name__) | ||
|
|
||
|
|
||
| class ValkeyCacheDecorator(CacheDecoratorInterface): |
There was a problem hiding this comment.
p3. I'll return to fundamental questions. do we need a decorator implementation for each cache?
I think the sync/async decorator is enough, adding another kind of cache would make the source code duplicate a lot.
for example, redis cache decorator and valkey cache decorator have a similar interface and logic. (mostly same).
| cached_keys: list[str] = await self.cache.get_keys_regex( | ||
| target_func_name=target_func_name, pattern=pattern | ||
| ) | ||
| for cache_key in cached_keys: |
There was a problem hiding this comment.
p3. you can use pipeline to send commands at once. but it might need to add interface for pipeline in cache class.
| dependencies = [ | ||
| "redis>=6.2.0", | ||
| "valkey-glide>=2.0.1", | ||
| ] |
| async def _serialize(self, value: Any) -> bytes: | ||
| data = pickle.dumps(value) | ||
| return data | ||
|
|
||
| async def _deserialize(self, data: bytes | None) -> Any: | ||
| if data is None: | ||
| return None | ||
| data = pickle.loads(data) # noqa: S301 | ||
| return data |
There was a problem hiding this comment.
- I think we can serialize with mspack
- Can we sperate serial/deserializer layer?
resolve issue #3 (Valkey Support)