You've already forked favicon-api-async
25.09.10
This commit is contained in:
@@ -1,134 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import logging
|
||||
from typing import AsyncGenerator, Optional
|
||||
|
||||
from redis.asyncio import ConnectionPool, Redis
|
||||
|
||||
import setting
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
REDIS_URL = setting.REDIS_URL
|
||||
|
||||
# Redis键前缀
|
||||
FAILED_DOMAINS_PREFIX = "favicon:failed_domain:" # 失败域名前缀
|
||||
TASK_QUEUE_PREFIX = "favicon:task_queue:" # 任务队列前缀
|
||||
PROCESSING_SET_PREFIX = "favicon:processing:" # 处理中任务集合前缀
|
||||
ICON_QUEUE_PREFIX = "favicon:icon_queue:"
|
||||
|
||||
pool = ConnectionPool.from_url(
|
||||
REDIS_URL,
|
||||
max_connections=200,
|
||||
decode_responses=True,
|
||||
)
|
||||
|
||||
|
||||
async def get_redis() -> AsyncGenerator[Redis, None]:
|
||||
async with Redis(connection_pool=pool) as conn:
|
||||
yield conn
|
||||
|
||||
|
||||
async def set_cache(key: str, value: [str | int], ttl: int = None, prefix: str = None) -> None:
|
||||
if not key:
|
||||
return
|
||||
|
||||
try:
|
||||
async for redis in get_redis():
|
||||
_key = key
|
||||
if prefix:
|
||||
_key = f"{prefix}{key}"
|
||||
await redis.sadd(prefix, key)
|
||||
await redis.expire(prefix, ttl)
|
||||
await redis.set(_key, value, ex=ttl)
|
||||
except Exception as e:
|
||||
logger.error(f"存入redis时出错:{e}")
|
||||
|
||||
|
||||
async def get_cache(key: str, prefix: str = None) -> Optional[str | int]:
|
||||
if not key:
|
||||
return None
|
||||
|
||||
try:
|
||||
async for redis in get_redis():
|
||||
if prefix:
|
||||
key = f"{prefix}{key}"
|
||||
return await redis.get(key)
|
||||
except Exception as e:
|
||||
logger.error(f"读取redis时出错:{e}")
|
||||
|
||||
|
||||
async def exist_cache(key: str, prefix: str = None) -> bool:
|
||||
if not key:
|
||||
return False
|
||||
|
||||
try:
|
||||
async for redis in get_redis():
|
||||
if prefix:
|
||||
key = f"{prefix}{key}"
|
||||
result = await redis.exists(key)
|
||||
return result > 0
|
||||
except Exception as e:
|
||||
logger.error(f"读取redis时出错:{e}")
|
||||
return False
|
||||
|
||||
|
||||
async def remove_cache(key: str, prefix: str = None) -> None:
|
||||
if not key:
|
||||
return
|
||||
|
||||
try:
|
||||
async for redis in get_redis():
|
||||
_key = key
|
||||
if prefix:
|
||||
_key = f"{prefix}{key}"
|
||||
await redis.srem(prefix, key)
|
||||
await redis.delete(_key)
|
||||
except Exception as e:
|
||||
logger.error(f"删除redis时出错:{e}")
|
||||
|
||||
|
||||
async def get_cache_size(prefix: str = None) -> int:
|
||||
"""根据前缀统计数量,用于统计Set集合
|
||||
"""
|
||||
try:
|
||||
async for redis in get_redis():
|
||||
return await redis.scard(prefix)
|
||||
except Exception as e:
|
||||
logger.error(f"获取队列大小时出错:{e}")
|
||||
return 0
|
||||
|
||||
|
||||
async def set_failed_domain(domain: str, expire_seconds: int = None) -> None:
|
||||
if not domain:
|
||||
return
|
||||
|
||||
try:
|
||||
await set_cache(f"{domain}", domain, ttl=expire_seconds, prefix=FAILED_DOMAINS_PREFIX)
|
||||
|
||||
logger.debug(f"已将失败域名 {domain} 存入Redis,过期时间:{expire_seconds}秒")
|
||||
except Exception as e:
|
||||
logger.error(f"将失败域名存入Redis时出错:{e}")
|
||||
|
||||
|
||||
async def is_domain_failed(domain: str) -> bool:
|
||||
if not domain:
|
||||
return False
|
||||
|
||||
try:
|
||||
return await exist_cache(domain, prefix=FAILED_DOMAINS_PREFIX)
|
||||
except Exception as e:
|
||||
logger.error(f"检查域名是否失败时出错:{e}")
|
||||
return False
|
||||
|
||||
|
||||
async def delete_failed_domain(domain: str) -> None:
|
||||
if not domain:
|
||||
return
|
||||
|
||||
try:
|
||||
await remove_cache(domain, prefix=FAILED_DOMAINS_PREFIX)
|
||||
|
||||
logger.debug(f"已从Redis删除失败域名 {domain}")
|
||||
except Exception as e:
|
||||
logger.error(f"从Redis删除失败域名时出错:{e}")
|
||||
Reference in New Issue
Block a user