1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220
|
@Service public class RedisBatchWriteService { @Autowired private RedisTemplate<String, Object> redisTemplate; @Autowired private RedisTotalCheckService totalCheckService; @Autowired private RedisBatchWriteProperties properties; @Autowired private RedisBatchWriteMonitorService monitorService; private static final Logger logger = LoggerFactory.getLogger(RedisBatchWriteService.class);
public RedisBatchWriteResult batchWriteWithTotalCheck(Map<String, Object> dataMap) { logger.info("开始批量写入操作,数据量: {}", dataMap.size()); RedisBatchWriteResult result = new RedisBatchWriteResult(); result.setTotalCount(dataMap.size()); result.setStartTime(System.currentTimeMillis()); try { TotalCheckResult checkResult = totalCheckService.checkTotal(dataMap.size()); if (!checkResult.isAllowed()) { logger.warn("总量检查失败: {}", checkResult.getMessage()); result.setSuccess(false); result.setError(checkResult.getMessage()); result.setEndTime(System.currentTimeMillis()); return result; } result = executeBatchWrite(dataMap); monitorService.recordBatchWrite(result); logger.info("批量写入完成,成功: {}, 失败: {}, 总耗时: {}ms", result.getSuccessCount(), result.getFailureCount(), result.getDuration()); return result; } catch (Exception e) { logger.error("批量写入异常", e); result.setSuccess(false); result.setError("批量写入异常: " + e.getMessage()); result.setEndTime(System.currentTimeMillis()); return result; } }
public RedisBatchWriteResult batchWriteWithTotalCheck(Map<String, Object> dataMap, int batchSize) { logger.info("开始分批写入操作,总数据量: {}, 批次大小: {}", dataMap.size(), batchSize); RedisBatchWriteResult result = new RedisBatchWriteResult(); result.setTotalCount(dataMap.size()); result.setStartTime(System.currentTimeMillis()); try { TotalCheckResult checkResult = totalCheckService.checkTotal(dataMap.size()); if (!checkResult.isAllowed()) { logger.warn("总量检查失败: {}", checkResult.getMessage()); result.setSuccess(false); result.setError(checkResult.getMessage()); result.setEndTime(System.currentTimeMillis()); return result; } List<Map<String, Object>> batches = partitionMap(dataMap, batchSize); int successCount = 0; int failureCount = 0; for (int i = 0; i < batches.size(); i++) { Map<String, Object> batch = batches.get(i); try { RedisBatchWriteResult batchResult = executeBatchWrite(batch); if (batchResult.isSuccess()) { successCount += batchResult.getSuccessCount(); failureCount += batchResult.getFailureCount(); } else { failureCount += batch.size(); } logger.info("批次 {} 写入完成,成功: {}, 失败: {}", i + 1, batchResult.getSuccessCount(), batchResult.getFailureCount()); if (i < batches.size() - 1) { Thread.sleep(properties.getCheckInterval()); } } catch (Exception e) { logger.error("批次 {} 写入异常", i + 1, e); failureCount += batch.size(); } } result.setSuccessCount(successCount); result.setFailureCount(failureCount); result.setSuccess(successCount > 0); result.setEndTime(System.currentTimeMillis()); monitorService.recordBatchWrite(result); logger.info("分批写入完成,成功: {}, 失败: {}, 总耗时: {}ms", successCount, failureCount, result.getDuration()); return result; } catch (Exception e) { logger.error("分批写入异常", e); result.setSuccess(false); result.setError("分批写入异常: " + e.getMessage()); result.setEndTime(System.currentTimeMillis()); return result; } }
private RedisBatchWriteResult executeBatchWrite(Map<String, Object> dataMap) { RedisBatchWriteResult result = new RedisBatchWriteResult(); result.setTotalCount(dataMap.size()); result.setStartTime(System.currentTimeMillis()); try { List<Object> results = redisTemplate.executePipelined(new RedisCallback<Object>() { @Override public Object doInRedis(RedisConnection connection) throws DataAccessException { for (Map.Entry<String, Object> entry : dataMap.entrySet()) { connection.set(entry.getKey().getBytes(), redisTemplate.getValueSerializer().serialize(entry.getValue())); } return null; } }); int successCount = 0; int failureCount = 0; for (Object obj : results) { if (obj != null && "OK".equals(obj.toString())) { successCount++; } else { failureCount++; } } result.setSuccessCount(successCount); result.setFailureCount(failureCount); result.setSuccess(successCount > 0); result.setEndTime(System.currentTimeMillis()); return result; } catch (Exception e) { logger.error("执行批量写入异常", e); result.setSuccess(false); result.setError("执行批量写入异常: " + e.getMessage()); result.setEndTime(System.currentTimeMillis()); return result; } }
private List<Map<String, Object>> partitionMap(Map<String, Object> dataMap, int batchSize) { List<Map<String, Object>> batches = new ArrayList<>(); List<Map.Entry<String, Object>> entries = new ArrayList<>(dataMap.entrySet()); for (int i = 0; i < entries.size(); i += batchSize) { int end = Math.min(i + batchSize, entries.size()); Map<String, Object> batch = new HashMap<>(); for (int j = i; j < end; j++) { Map.Entry<String, Object> entry = entries.get(j); batch.put(entry.getKey(), entry.getValue()); } batches.add(batch); } return batches; } }
|