Coverage for core / cache / db_cache.py: 90%

181 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-04-04 15:08 +0000

1# core/cache/db_cache.py 

2 

3import os 

4import json 

5import time 

6import sqlite3 

7import threading 

8from contextlib import contextmanager 

9from typing import Optional, Any 

10from dataclasses import fields, is_dataclass 

11from pydantic import BaseModel 

12from core.cache.cache_config import load_cache_config 

13from core.cache.file_cache import load_deserializable_classes 

14 

15class DBCache: 

16 def __init__(self, config: Optional[dict] = None): 

17 if config is None: 

18 config = load_cache_config() 

19 self._base_dir = config["cache"]["base_dir"] 

20 self._db_path = os.path.join(self._base_dir, "cache.db") 

21 self._logger = None 

22 self._deserializable_classes = load_deserializable_classes(config["cache"].get("deserializable_classes", [])) 

23 

24 os.makedirs(self._base_dir, exist_ok=True) 

25 self._lock = threading.Lock() 

26 self._conn = sqlite3.connect(self._db_path, check_same_thread=False) 

27 self._init_db() 

28 

29 def _init_db(self): 

30 try: 

31 with self._get_connection() as conn: 

32 conn.execute("PRAGMA journal_mode=WAL") 

33 conn.execute(""" 

34 CREATE TABLE IF NOT EXISTS cache ( 

35 key TEXT PRIMARY KEY, 

36 value TEXT, 

37 updated_at REAL 

38 ) 

39 """) 

40 conn.execute("CREATE INDEX IF NOT EXISTS idx_updated_at ON cache(updated_at)") 

41 except Exception as e: 

42 if self._logger: 42 ↛ exitline 42 didn't return from function '_init_db' because the condition on line 42 was always true

43 self._logger.error(f"❌ DB 초기화 실패: {e}") 

44 

45 @contextmanager 

46 def _get_connection(self): 

47 with self._lock: 

48 try: 

49 yield self._conn 

50 self._conn.commit() 

51 except Exception: 

52 self._conn.rollback() 

53 raise 

54 

55 def __del__(self): 

56 if hasattr(self, "_conn") and self._conn: 56 ↛ exitline 56 didn't return from function '__del__' because the condition on line 56 was always true

57 self._conn.close() 

58 

59 def set_logger(self, logger): 

60 self._logger = logger 

61 

62 def _serialize(self, value: Any) -> Any: 

63 if hasattr(value, "to_dict") and callable(getattr(value, "to_dict")): 

64 return value.to_dict() 

65 elif isinstance(value, BaseModel): 

66 return value.model_dump() 

67 elif isinstance(value, (list, tuple)): 

68 return [self._serialize(item) for item in value] 

69 elif isinstance(value, dict): 

70 return {k: self._serialize(v) for k, v in value.items()} 

71 return value 

72 

73 def _deserialize(self, raw_data: Any) -> Any: 

74 if isinstance(raw_data, dict): 

75 best_cls = None 

76 best_ratio = 0.0 

77 best_is_dataclass = False 

78 

79 for cls in self._deserializable_classes: 

80 try: 

81 if issubclass(cls, BaseModel): 

82 cls_fields = set(cls.model_fields.keys()) 

83 elif is_dataclass(cls): 83 ↛ 86line 83 didn't jump to line 86 because the condition on line 83 was always true

84 cls_fields = {f.name for f in fields(cls)} 

85 else: 

86 continue 

87 

88 if not cls_fields.issubset(raw_data.keys()): 

89 continue 

90 

91 ratio = len(cls_fields) / len(raw_data) if raw_data else 0 

92 if ratio < 0.5: 

93 continue 

94 

95 if cls.__name__ == "ResCommonResponse": 

96 if "data" in raw_data: 96 ↛ 98line 96 didn't jump to line 98 because the condition on line 96 was always true

97 raw_data["data"] = self._deserialize(raw_data["data"]) 

98 if is_dataclass(cls): 98 ↛ 100line 98 didn't jump to line 100 because the condition on line 98 was always true

99 return cls.from_dict(raw_data) 

100 return cls.model_validate(raw_data) 

101 

102 if ratio > best_ratio: 102 ↛ 79line 102 didn't jump to line 79 because the condition on line 102 was always true

103 best_ratio = ratio 

104 best_cls = cls 

105 best_is_dataclass = is_dataclass(cls) 

106 except Exception: 

107 ... 

108 

109 if best_cls is not None: 

110 try: 

111 if best_is_dataclass: 

112 return best_cls.from_dict(raw_data) 

113 return best_cls.model_validate(raw_data) 

114 except Exception: 

115 ... 

116 

117 return {k: self._deserialize(v) for k, v in raw_data.items()} 

118 elif isinstance(raw_data, (list, tuple)): 

119 return [self._deserialize(item) for item in raw_data] 

120 return raw_data 

121 

122 def set(self, key: str, value: Any, save_to_file: bool = False): 

123 if save_to_file: 

124 try: 

125 serialized_data = self._serialize(value) 

126 json_str = json.dumps(serialized_data, ensure_ascii=False) 

127 now = time.time() 

128 

129 with self._get_connection() as conn: 

130 conn.execute("INSERT OR REPLACE INTO cache (key, value, updated_at) VALUES (?, ?, ?)", (key, json_str, now)) 

131 

132 if self._logger: 

133 self._logger.debug(f"💾 DB cache 저장: {key}") 

134 except Exception as e: 

135 if self._logger: 135 ↛ exitline 135 didn't return from function 'set' because the condition on line 135 was always true

136 self._logger.error(f"❌ DB cache 저장 실패: {e}") 

137 

138 def delete(self, key: str): 

139 try: 

140 with self._get_connection() as conn: 

141 conn.execute("DELETE FROM cache WHERE key = ?", (key,)) 

142 if self._logger: 142 ↛ 143line 142 didn't jump to line 143 because the condition on line 142 was never true

143 self._logger.debug(f"🗑️ DB cache 삭제됨: {key}") 

144 except Exception as e: 

145 if self._logger: 145 ↛ exitline 145 didn't return from function 'delete' because the condition on line 145 was always true

146 self._logger.error(f"❌ DB cache 삭제 실패: {e}") 

147 

148 def clear(self): 

149 try: 

150 with self._get_connection() as conn: 

151 conn.execute("DELETE FROM cache") 

152 if self._logger: 152 ↛ 153line 152 didn't jump to line 153 because the condition on line 152 was never true

153 self._logger.debug("🗑️ 전체 DB cache 삭제됨") 

154 except Exception as e: 

155 if self._logger: 155 ↛ exitline 155 didn't return from function 'clear' because the condition on line 155 was always true

156 self._logger.error(f"❌ 전체 DB 캐시 삭제 실패: {e}") 

157 

158 def cleanup_old_files(self, days: int = 7, max_size_mb: int = 0): 

159 cutoff = time.time() - (days * 86400) 

160 ohlcv_cutoff = time.time() - (365 * 86400) # OHLCV 데이터는 1년 보관 

161 

162 try: 

163 with self._get_connection() as conn: 

164 # 일반 데이터 삭제 (OHLCV 및 지표 제외) 

165 conn.execute("DELETE FROM cache WHERE updated_at < ? AND key NOT LIKE 'ohlcv_past_%' AND key NOT LIKE 'indicators_chart_%'", (cutoff,)) 

166 # OHLCV 및 지표 데이터 삭제 (1년 경과) 

167 conn.execute("DELETE FROM cache WHERE updated_at < ? AND (key LIKE 'ohlcv_past_%' OR key LIKE 'indicators_chart_%')", (ohlcv_cutoff,)) 

168 

169 # 용량 제한 적용 (데이터 크기 기준) 

170 if max_size_mb > 0: 

171 cursor = conn.execute("SELECT SUM(LENGTH(value)) FROM cache") 

172 total_size = cursor.fetchone()[0] or 0 

173 limit_size = max_size_mb * 1024 * 1024 

174 

175 if total_size > limit_size: 

176 bytes_to_remove = total_size - limit_size 

177 # 오래된 순으로 조회하여 삭제할 키 수집 

178 rows = conn.execute("SELECT key, LENGTH(value) FROM cache ORDER BY updated_at ASC").fetchall() 

179 

180 keys_to_delete = [] 

181 removed_amount = 0 

182 for key, size in rows: 182 ↛ 188line 182 didn't jump to line 188 because the loop on line 182 didn't complete

183 keys_to_delete.append(key) 

184 removed_amount += (size or 0) 

185 if removed_amount >= bytes_to_remove: 185 ↛ 182line 185 didn't jump to line 182 because the condition on line 185 was always true

186 break 

187 

188 if keys_to_delete: 188 ↛ 198line 188 didn't jump to line 198

189 # SQLite 변수 제한 고려하여 배치 삭제 (900개씩) 

190 for i in range(0, len(keys_to_delete), 900): 

191 batch = keys_to_delete[i:i+900] 

192 placeholders = ','.join('?' for _ in batch) 

193 conn.execute(f"DELETE FROM cache WHERE key IN ({placeholders})", batch) 

194 

195 if self._logger: 195 ↛ 196line 195 didn't jump to line 196 because the condition on line 195 was never true

196 self._logger.debug(f"🗑️ DB cache 용량 초과로 {len(keys_to_delete)}개 항목 삭제됨") 

197 

198 if self._logger: 

199 self._logger.debug(f"🗑️ 오래된 DB cache 정리 완료 (기준: {days}일 전)") 

200 except Exception as e: 

201 if self._logger: 201 ↛ exitline 201 didn't return from function 'cleanup_old_files' because the condition on line 201 was always true

202 self._logger.error(f"❌ DB 캐시 정리 실패: {e}") 

203 

204 def get_raw(self, key: str): 

205 try: 

206 with self._get_connection() as conn: 

207 cursor = conn.execute("SELECT value FROM cache WHERE key = ?", (key,)) 

208 row = cursor.fetchone() 

209 if row: 209 ↛ 217line 209 didn't jump to line 217

210 wrapper = json.loads(row[0]) 

211 data = wrapper["data"] 

212 wrapper['data'] = self._deserialize(data) 

213 return wrapper 

214 except Exception as e: 

215 if self._logger: 215 ↛ 217line 215 didn't jump to line 217 because the condition on line 215 was always true

216 self._logger.error(f"[DBCache] Load Error: {e}") 

217 return None 

218 

219 def exists(self, key: str) -> bool: 

220 try: 

221 with self._get_connection() as conn: 

222 cursor = conn.execute("SELECT 1 FROM cache WHERE key = ?", (key,)) 

223 return cursor.fetchone() is not None 

224 except: 

225 return False