部分缺陷修改
This commit is contained in:
parent
8862c20532
commit
476d3cd454
@ -3,6 +3,7 @@ package com.yfd.platform.modules.experimentalData.service.impl;
|
||||
import cn.hutool.core.collection.CollUtil;
|
||||
|
||||
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.*;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
@ -63,6 +64,9 @@ import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.data.redis.core.RedisTemplate;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
|
||||
import org.springframework.security.core.context.SecurityContextHolder;
|
||||
@ -127,6 +131,10 @@ public class TsFilesServiceImpl extends ServiceImpl<TsFilesMapper, TsFiles> impl
|
||||
private TaskStatusHolder taskStatusHolder;
|
||||
|
||||
|
||||
@Autowired
|
||||
@Qualifier("myCustomRedisTemplate") // 指定自定义的 Bean 名称
|
||||
private RedisTemplate<String, Object> redisTemplate;
|
||||
|
||||
/**********************************
|
||||
* 用途说明: 分页查询试验数据管理-文档内容
|
||||
* 参数说明
|
||||
@ -143,6 +151,20 @@ public class TsFilesServiceImpl extends ServiceImpl<TsFilesMapper, TsFiles> impl
|
||||
***********************************/
|
||||
@Override
|
||||
public Page<TsFiles> getTsFilesPage(String id, String fileName, String startDate, String endDate, String keywords, String nodeId, String taskId, String fileName1, String childNode, Page<TsFiles> page) throws Exception {
|
||||
|
||||
// int currentPage = (int) page.getCurrent(); // 获取当前页码
|
||||
// // 判断是否是前五页
|
||||
// if (currentPage >= 1 && currentPage <= 5) {
|
||||
// // 生成带页码的完整RedisKey
|
||||
// String redisKey = "tsfiles_" + taskId + "_" + nodeId + "_page_" + currentPage;
|
||||
//
|
||||
// // 尝试从Redis获取缓存
|
||||
// Page<TsFiles> cachedPage = (Page<TsFiles>) redisTemplate.opsForValue().get(redisKey);
|
||||
// if (cachedPage != null) {
|
||||
// return cachedPage; // 直接返回缓存数据
|
||||
// }
|
||||
// }
|
||||
|
||||
//查询字典表获取压缩文件后缀
|
||||
QueryWrapper<SysDictionaryItems> queryWrapperSysDictionary = new QueryWrapper<>();
|
||||
queryWrapperSysDictionary.eq("parentcode", "compressType");
|
||||
@ -250,8 +272,17 @@ public class TsFilesServiceImpl extends ServiceImpl<TsFilesMapper, TsFiles> impl
|
||||
// String ProcessingPath = processingPath(tsFiles.getWorkPath(), nodeId);
|
||||
// tsFiles.setWorkPath(ProcessingPath);
|
||||
}
|
||||
|
||||
|
||||
tsFilesPage.setRecords(records); // 同步到 tsFilesPage
|
||||
|
||||
|
||||
System.out.println("Updated records: " + records);
|
||||
// // 如果是前五页,将结果存入Redis(有效期建议30分钟)
|
||||
// if (currentPage >= 1 && currentPage <= 5) {
|
||||
// String redisKey = "tsfiles_" + taskId + "_" + nodeId + "_page_" + currentPage;
|
||||
// redisTemplate.opsForValue().set(redisKey, tsFilesPage, 2 * 60 * 60);
|
||||
// }
|
||||
return tsFilesPage;
|
||||
|
||||
}
|
||||
@ -357,6 +388,13 @@ public class TsFilesServiceImpl extends ServiceImpl<TsFilesMapper, TsFiles> impl
|
||||
@Override
|
||||
public ResponseResult addTsFiles(TsFiles tsFiles) {
|
||||
|
||||
// //todo 新增成功以后 删除redis
|
||||
// for (int page = 1; page <= 5; page++) {
|
||||
// String redisKey = "tsfiles_" + tsFiles.getTaskId() + "_" + tsFiles.getNodeId() + "_page_" + page;
|
||||
// redisTemplate.delete(redisKey);
|
||||
// }
|
||||
// LOGGER.info("已清理缓存:taskid={}, node={}, pages=1-5", tsFiles.getTaskId(), tsFiles.getNodeId());
|
||||
|
||||
Boolean value = true;
|
||||
//文件名称和大小 因为支持多个上传所以用,分隔
|
||||
List<String> names = Arrays.asList(tsFiles.getFileName().split(","));
|
||||
@ -482,6 +520,13 @@ public class TsFilesServiceImpl extends ServiceImpl<TsFilesMapper, TsFiles> impl
|
||||
@Transactional(rollbackFor = Exception.class)// 添加事务注解,遇到异常时回滚
|
||||
public ResponseResult addTsFile(TsFiles tsFiles) throws IOException {
|
||||
|
||||
// //todo 新增成功以后 删除redis
|
||||
// for (int page = 1; page <= 5; page++) {
|
||||
// String redisKey = "tsfiles_" + tsFiles.getTaskId() + "_" + tsFiles.getNodeId() + "_page_" + page;
|
||||
// redisTemplate.delete(redisKey);
|
||||
// }
|
||||
// LOGGER.info("已清理缓存:taskid={}, node={}, pages=1-5", tsFiles.getTaskId(), tsFiles.getNodeId());
|
||||
|
||||
if (tsFiles.getIsFile().equals("FILE")) {
|
||||
StorageSourceConfig config = getStorageConfig("filePath", "local");
|
||||
String basePath = config.getValue() + tsFiles.getWorkPath();
|
||||
@ -585,7 +630,12 @@ public class TsFilesServiceImpl extends ServiceImpl<TsFilesMapper, TsFiles> impl
|
||||
@Transactional(rollbackFor = Exception.class)// 添加事务注解,遇到异常时回滚
|
||||
public ResponseResult updateTsFiles(TsFiles tsFiles) {
|
||||
|
||||
|
||||
// //todo 新增成功以后 删除redis
|
||||
// for (int page = 1; page <= 5; page++) {
|
||||
// String redisKey = "tsfiles_" + tsFiles.getTaskId() + "_" + tsFiles.getNodeId() + "_page_" + page;
|
||||
// redisTemplate.delete(redisKey);
|
||||
// }
|
||||
// LOGGER.info("已清理缓存:taskid={}, node={}, pages=1-5", tsFiles.getTaskId(), tsFiles.getNodeId());
|
||||
// 校验文件名是否包含非法字符
|
||||
String fileName = tsFiles.getFileName();
|
||||
if (containsInvalidCharacters(fileName)) {
|
||||
@ -861,10 +911,18 @@ public class TsFilesServiceImpl extends ServiceImpl<TsFilesMapper, TsFiles> impl
|
||||
public String deleteTsFilesByIds(List<String> dataset, String type) {
|
||||
List<TsFiles> filesList = tsFilesMapper.selectBatchIds(dataset);
|
||||
|
||||
|
||||
int LocalSuccessCount = 0, LocalFailCount = 0, Localtotal = CollUtil.size(dataset);
|
||||
//Todo 最直接的办法 循环出来 一条一条删除
|
||||
for (TsFiles files : filesList) {
|
||||
|
||||
// //todo 新增成功以后 删除redis
|
||||
// for (int page = 1; page <= 5; page++) {
|
||||
// String redisKey = "tsfiles_" + files.getTaskId() + "_" + files.getNodeId() + "_page_" + page;
|
||||
// redisTemplate.delete(redisKey);
|
||||
// }
|
||||
// LOGGER.info("已清理缓存:taskid={}, node={}, pages=1-5", files.getTaskId(), files.getNodeId());
|
||||
|
||||
//判断是文件还是文件夹
|
||||
if ("FOLDER".equals(files.getIsFile())) {
|
||||
//如果是文件夹
|
||||
@ -2102,67 +2160,107 @@ public class TsFilesServiceImpl extends ServiceImpl<TsFilesMapper, TsFiles> impl
|
||||
|
||||
// 保持原有解压逻辑结构的ZIP实现
|
||||
// 修改后的ZIP解压方法(修复解压问题)
|
||||
|
||||
private File unzipFile(Path sourcePath, String baseDir) throws IOException {
|
||||
// 创建目标目录
|
||||
Path destRoot = Paths.get(baseDir);
|
||||
Files.createDirectories(destRoot);
|
||||
Path destRoot = Paths.get(baseDir).normalize();
|
||||
java.nio.file.Files.createDirectories(destRoot);
|
||||
|
||||
try (ZipInputStream zis = new ZipInputStream(
|
||||
Files.newInputStream(sourcePath), StandardCharsets.UTF_8)) {
|
||||
|
||||
ZipEntry entry;
|
||||
while ((entry = zis.getNextEntry()) != null) {
|
||||
try {
|
||||
// 跳过 macOS 系统文件
|
||||
if (entry.getName().startsWith("__MACOSX")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// 标准化路径并处理目录标识
|
||||
String entryName = entry.getName()
|
||||
.replace("\\", "/")
|
||||
.replaceFirst("^/+", ""); // 去除开头的斜杠
|
||||
|
||||
// 检测是否为目录(兼容以'/'结尾的条目)
|
||||
boolean isDirectory = entry.isDirectory() || entry.getName().endsWith("/");
|
||||
|
||||
// 调整路径:去除顶层目录(假设所有文件在单一顶层目录下)
|
||||
if (entryName.contains("/")) {
|
||||
int firstSlash = entryName.indexOf('/');
|
||||
entryName = entryName.substring(firstSlash + 1);
|
||||
|
||||
// 若处理后名称为空,则跳过顶层目录条目
|
||||
if (entryName.isEmpty() && isDirectory) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
Path targetPath = destRoot.resolve(entryName).normalize();
|
||||
validatePathSafetya(targetPath, destRoot);
|
||||
|
||||
// 处理目录
|
||||
if (isDirectory) {
|
||||
Files.createDirectories(targetPath);
|
||||
}
|
||||
// 处理文件
|
||||
else {
|
||||
if (Files.exists(targetPath)) {
|
||||
LOGGER.warn("文件已存在,跳过覆盖: {}", targetPath);
|
||||
continue;
|
||||
}
|
||||
// 确保父目录存在
|
||||
Files.createDirectories(targetPath.getParent());
|
||||
try (OutputStream os = Files.newOutputStream(targetPath)) {
|
||||
IOUtils.copy(zis, os);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
zis.closeEntry(); // 确保每个条目只关闭一次
|
||||
// 尝试UTF-8和GBK编码
|
||||
Charset[] charsets = {StandardCharsets.UTF_8, Charset.forName("GBK")};
|
||||
for (Charset charset : charsets) {
|
||||
try (ZipInputStream zis = new ZipInputStream(java.nio.file.Files.newInputStream(sourcePath), charset)) {
|
||||
ZipEntry entry;
|
||||
while ((entry = zis.getNextEntry()) != null) {
|
||||
processZipEntry(zis, entry, destRoot);
|
||||
}
|
||||
return destRoot.toFile(); // 解压成功直接返回
|
||||
} catch (IllegalArgumentException | IOException e) {
|
||||
LOGGER.debug("编码 {} 解压失败,尝试下一个编码", charset, e);
|
||||
}
|
||||
}
|
||||
throw new IOException("无法使用UTF-8或GBK编码解压文件");
|
||||
}
|
||||
|
||||
return destRoot.toFile();
|
||||
private void processZipEntry(ZipInputStream zis, ZipEntry entry, Path destRoot) throws IOException {
|
||||
// 1. 跳过所有系统文件和隐藏文件
|
||||
if (shouldSkipEntry(entry)) {
|
||||
LOGGER.debug("跳过系统文件:{}", entry.getName());
|
||||
return;
|
||||
}
|
||||
|
||||
// 2. 消毒文件名并构建安全路径
|
||||
String sanitizedName = sanitizeFileName(entry.getName());
|
||||
Path targetPath = buildSafePath(destRoot, sanitizedName);
|
||||
|
||||
// 3. 仅处理实际文件(跳过空目录)
|
||||
if (!entry.isDirectory()) {
|
||||
writeFileContent(zis, targetPath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 判断是否需要跳过条目
|
||||
*/
|
||||
private boolean shouldSkipEntry(ZipEntry entry) {
|
||||
String name = entry.getName();
|
||||
return name.startsWith("__MACOSX/") || // macOS系统文件
|
||||
name.contains("/.DS_Store") || // macOS资源文件
|
||||
name.startsWith(".") || // 隐藏文件
|
||||
name.endsWith("Thumbs.db"); // Windows缩略图文件
|
||||
}
|
||||
|
||||
/**
|
||||
* 文件名消毒逻辑(保留路径分隔符 /)
|
||||
*/
|
||||
private String sanitizeFileName(String original) {
|
||||
// 替换非法字符(保留 / 和 \)
|
||||
String sanitized = original
|
||||
.replaceAll("[*?\"<>|\0]", "_") // 仅替换 * ? " < > | 和空字符
|
||||
.replace("\\", "/") // 统一路径分隔符为 /
|
||||
.replaceFirst("^/+", ""); // 去除开头的斜杠
|
||||
|
||||
// 防御路径穿越攻击
|
||||
sanitized = sanitized.replaceAll("\\.\\./", "_");
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建安全路径
|
||||
*/
|
||||
private Path buildSafePath(Path destRoot, String sanitizedName) throws IOException {
|
||||
Path targetPath = destRoot.resolve(sanitizedName).normalize();
|
||||
|
||||
// 二次路径校验
|
||||
if (!targetPath.startsWith(destRoot)) {
|
||||
throw new IOException("检测到非法路径穿越:" + targetPath);
|
||||
}
|
||||
return targetPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* 写入文件内容(仅在需要时创建目录)
|
||||
*/
|
||||
private void writeFileContent(ZipInputStream zis, Path targetPath) throws IOException {
|
||||
// 仅当文件不存在时才写入
|
||||
if (!java.nio.file.Files.exists(targetPath)) {
|
||||
// 按需创建父目录
|
||||
Path parent = targetPath.getParent();
|
||||
if (parent != null && !java.nio.file.Files.exists(parent)) {
|
||||
java.nio.file.Files.createDirectories(parent);
|
||||
}
|
||||
|
||||
// 使用缓冲流提升性能
|
||||
try (BufferedOutputStream os = new BufferedOutputStream(java.nio.file.Files.newOutputStream(targetPath))) {
|
||||
byte[] buffer = new byte[8192];
|
||||
int bytesRead;
|
||||
while ((bytesRead = zis.read(buffer)) != -1) {
|
||||
os.write(buffer, 0, bytesRead);
|
||||
}
|
||||
}
|
||||
LOGGER.debug("成功解压文件:{}", targetPath);
|
||||
} else {
|
||||
LOGGER.warn("文件已存在,跳过覆盖:{}", targetPath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -665,15 +665,15 @@ public class TsNodesServiceImpl extends ServiceImpl<TsNodesMapper, TsNodes> impl
|
||||
throw new Exception("该试验任务管理项目目录不存在或没有项目文档,请先建立项目目录和文档。");
|
||||
}
|
||||
|
||||
//获取数据库父节点为0的数据 任务ID 上级节点时00
|
||||
List<TsNodes> tsNodes = tsNodesMapper.selectList(new LambdaQueryWrapper<TsNodes>().eq(TsNodes::getParentId, "00").eq(TsNodes::getTaskId, id));
|
||||
|
||||
// 步骤 1:提取现有的 nodeName
|
||||
Set<String> existingNodeNames = tsNodes.stream().map(TsNodes::getNodeName).collect(Collectors.toSet());
|
||||
|
||||
// 步骤 2:筛选新增数据 找到需要新增到数据库的文件夹 这个属于第一层架
|
||||
List<FileItemResult> fileItemNewList = fileItemList.stream().filter(fileItem -> !existingNodeNames.contains(fileItem.getName())).collect(Collectors.toList());
|
||||
firstLayerData(fileItemNewList, id);
|
||||
// //获取数据库父节点为0的数据 任务ID 上级节点时00
|
||||
// List<TsNodes> tsNodes = tsNodesMapper.selectList(new LambdaQueryWrapper<TsNodes>().eq(TsNodes::getParentId, "00").eq(TsNodes::getTaskId, id));
|
||||
//
|
||||
// // 步骤 1:提取现有的 nodeName
|
||||
// Set<String> existingNodeNames = tsNodes.stream().map(TsNodes::getNodeName).collect(Collectors.toSet());
|
||||
//
|
||||
// // 步骤 2:筛选新增数据 找到需要新增到数据库的文件夹 这个属于第一层架
|
||||
// List<FileItemResult> fileItemNewList = fileItemList.stream().filter(fileItem -> !existingNodeNames.contains(fileItem.getName())).collect(Collectors.toList());
|
||||
firstLayerData(fileItemList, id);
|
||||
|
||||
return "扫描完成";
|
||||
}
|
||||
|
@ -95,18 +95,18 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
||||
// 生成Redis缓存Key的基础部分(nodeId + projectId)
|
||||
|
||||
|
||||
int currentPage = (int) page.getCurrent(); // 获取当前页码
|
||||
// 判断是否是前五页
|
||||
if (currentPage >= 1 && currentPage <= 5) {
|
||||
// 生成带页码的完整RedisKey
|
||||
String redisKey = "sdfiles_" + projectId + "_" + nodeId + "_page_" + currentPage;
|
||||
|
||||
// 尝试从Redis获取缓存
|
||||
Page<Files> cachedPage = (Page<Files>) redisTemplate.opsForValue().get(redisKey);
|
||||
if (cachedPage != null) {
|
||||
return cachedPage; // 直接返回缓存数据
|
||||
}
|
||||
}
|
||||
// int currentPage = (int) page.getCurrent(); // 获取当前页码
|
||||
// // 判断是否是前五页
|
||||
// if (currentPage >= 1 && currentPage <= 5) {
|
||||
// // 生成带页码的完整RedisKey
|
||||
// String redisKey = "sdfiles_" + projectId + "_" + nodeId + "_page_" + currentPage;
|
||||
//
|
||||
// // 尝试从Redis获取缓存
|
||||
// Page<Files> cachedPage = (Page<Files>) redisTemplate.opsForValue().get(redisKey);
|
||||
// if (cachedPage != null) {
|
||||
// return cachedPage; // 直接返回缓存数据
|
||||
// }
|
||||
// }
|
||||
|
||||
|
||||
//先查询路径下的所有文件
|
||||
@ -199,11 +199,11 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
||||
}
|
||||
filesPage.setRecords(records);
|
||||
|
||||
// 如果是前五页,将结果存入Redis(有效期建议30分钟)
|
||||
if (currentPage >= 1 && currentPage <= 5) {
|
||||
String redisKey = "sdfiles:" + projectId + ":" + nodeId + ":page:" + currentPage;
|
||||
redisTemplate.opsForValue().set(redisKey, filesPage, 30 * 60);
|
||||
}
|
||||
// // 如果是前五页,将结果存入Redis(有效期建议30分钟)
|
||||
// if (currentPage >= 1 && currentPage <= 5) {
|
||||
// String redisKey = "sdfiles:" + projectId + ":" + nodeId + ":page:" + currentPage;
|
||||
// redisTemplate.opsForValue().set(redisKey, filesPage, 2 * 60 * 60);
|
||||
// }
|
||||
|
||||
return filesPage;
|
||||
}
|
||||
@ -281,12 +281,12 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
||||
for (Files filess : filesToSave) {
|
||||
int valueAdded = filesMapper.insert(filess);
|
||||
if (valueAdded == 1) {
|
||||
//todo 新增成功以后 删除redis
|
||||
for (int page = 1; page <= 5; page++) {
|
||||
String redisKey = "sdfiles_" + filess.getProjectId() + "_" + filess.getNodeId() + "_page_" + page;
|
||||
redisTemplate.delete(redisKey);
|
||||
}
|
||||
LOGGER.info("已清理缓存:project={}, node={}, pages=1-5", filess.getProjectId(), filess.getNodeId());
|
||||
// //todo 新增成功以后 删除redis
|
||||
// for (int page = 1; page <= 5; page++) {
|
||||
// String redisKey = "sdfiles_" + filess.getProjectId() + "_" + filess.getNodeId() + "_page_" + page;
|
||||
// redisTemplate.delete(redisKey);
|
||||
// }
|
||||
// LOGGER.info("已清理缓存:project={}, node={}, pages=1-5", filess.getProjectId(), filess.getNodeId());
|
||||
|
||||
|
||||
value = true;
|
||||
@ -316,6 +316,12 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
||||
if (containsInvalidCharacters(fileName)) {
|
||||
return false;
|
||||
}
|
||||
// //todo 修改删除redis
|
||||
// for (int page = 1; page <= 5; page++) {
|
||||
// String redisKey = "sdfiles_" + files.getProjectId() + "_" + files.getNodeId() + "_page_" + page;
|
||||
// redisTemplate.delete(redisKey);
|
||||
// }
|
||||
// LOGGER.info("已清理缓存:project={}, node={}, pages=1-5", files.getProjectId(), files.getNodeId());
|
||||
|
||||
// 修改之前查询表中的文件名是否修改,如果发生变动先修改 minio 然后再修改表结构
|
||||
Files filesData = filesMapper.selectById(files.getId());
|
||||
@ -328,12 +334,7 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
||||
LOGGER.error("表结构修改失败");
|
||||
throw new RuntimeException("更新数据库失败");
|
||||
}
|
||||
//todo 修改删除redis
|
||||
for (int page = 1; page <= 5; page++) {
|
||||
String redisKey = "sdfiles_" + files.getProjectId() + "_" + files.getNodeId() + "_page_" + page;
|
||||
redisTemplate.delete(redisKey);
|
||||
}
|
||||
LOGGER.info("已清理缓存:project={}, node={}, pages=1-5", files.getProjectId(), files.getNodeId());
|
||||
|
||||
|
||||
// 修改 MinIO 文件名
|
||||
boolean minioUpdateSuccess = updateMinioFileName(filesData, files);
|
||||
@ -348,11 +349,6 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
||||
} else {
|
||||
// 如果文件名没有修改,仅更新数据库
|
||||
int valueUpdate = filesMapper.updateById(files);
|
||||
for (int page = 1; page <= 5; page++) {
|
||||
String redisKey = "sdfiles_" + files.getProjectId() + "_" + files.getNodeId() + "_page_" + page;
|
||||
redisTemplate.delete(redisKey);
|
||||
}
|
||||
LOGGER.info("已清理缓存:project={}, node={}, pages=1-5", files.getProjectId(), files.getNodeId());
|
||||
return valueUpdate == 1;
|
||||
}
|
||||
|
||||
@ -383,11 +379,11 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
||||
int SuccessCount = 0, FailCount = 0, total = CollUtil.size(dataset);
|
||||
//Todo 最直接的办法 循环出来 一条一条删除
|
||||
for (Files files : filesList) {
|
||||
//todo 删除的时候删除redis
|
||||
for (int page = 1; page <= 5; page++) {
|
||||
String redisKey = "sdfiles_" + files.getProjectId() + "_" + files.getNodeId() + "_page_" + page;
|
||||
redisTemplate.delete(redisKey);
|
||||
}
|
||||
// //todo 删除的时候删除redis
|
||||
// for (int page = 1; page <= 5; page++) {
|
||||
// String redisKey = "sdfiles_" + files.getProjectId() + "_" + files.getNodeId() + "_page_" + page;
|
||||
// redisTemplate.delete(redisKey);
|
||||
// }
|
||||
|
||||
List<BatchDeleteRequest.DeleteItem> deleteItemList = new ArrayList<>();
|
||||
BatchDeleteRequest.DeleteItem deleteItemData = new BatchDeleteRequest.DeleteItem();
|
||||
|
Loading…
Reference in New Issue
Block a user