mirror of
https://github.com/saymrwulf/pytorch.git
synced 2026-05-14 20:57:59 +00:00
Summary: We need an implementation of RedisRemoteCacheBackend with the same API that we're using for FbMemcacheRemoteFxGraphCacheBackend. So we'll stop using the Triton implementation and adapt a version for use by inductor. I also renamed parameters and cache entries to match our cache terminology. Test Plan: Ran this command twice and inspected log output to ensure I got cache hits: ``` TORCH_LOGS=+torch._inductor.codecache TORCHINDUCTOR_FX_GRAPH_REMOTE_CACHE=1 python benchmarks/dynamo/torchbench.py --performance --inductor --device cuda --training --amp --print-compilation-time --only dcgan ``` Pull Request resolved: https://github.com/pytorch/pytorch/pull/127480 Approved by: https://github.com/oulgen
46 lines
1.1 KiB
Python
46 lines
1.1 KiB
Python
import os
|
|
from abc import abstractmethod
|
|
|
|
|
|
class RemoteCacheBackend:
|
|
"""
|
|
A backend implementation for accessing a remote/distributed cache.
|
|
"""
|
|
|
|
def __init__(self, cache_id: str):
|
|
pass
|
|
|
|
@abstractmethod
|
|
def get(self, key: str):
|
|
pass
|
|
|
|
@abstractmethod
|
|
def put(self, key: str, data: bytes):
|
|
pass
|
|
|
|
|
|
class RedisRemoteCacheBackend(RemoteCacheBackend):
|
|
"""
|
|
A Redis implementation of a remote/distributed cache.
|
|
"""
|
|
|
|
def __init__(self, cache_id: str):
|
|
import redis
|
|
|
|
self._cache_id = cache_id
|
|
self._key_fmt = os.environ.get(
|
|
"TORCHINDUCTOR_REDIS_KEY_FORMAT", "pt2:{cache_id}:{key}"
|
|
)
|
|
self._redis = redis.Redis(
|
|
host=os.environ.get("TRITON_REDIS_HOST", "localhost"),
|
|
port=int(os.environ.get("TRITON_REDIS_PORT", 6379)),
|
|
)
|
|
|
|
def _get_key(self, key: str) -> str:
|
|
return self._key_fmt.format(cache_id=self._cache_id, key=key)
|
|
|
|
def get(self, key: str):
|
|
return self._redis.get(self._get_key(key))
|
|
|
|
def put(self, key: str, data: bytes):
|
|
return self._redis.set(self._get_key(key), data)
|