提交redis以及部分缺陷修改

This commit is contained in:
lilin 2025-05-24 17:17:41 +08:00
parent fd2092ed2d
commit 8862c20532
7 changed files with 214 additions and 62 deletions

View File

@ -0,0 +1,29 @@
package com.yfd.platform.modules.config.redis;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.GenericJackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
@Configuration
public class RedisConfig {
@Bean
public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory factory) {
RedisTemplate<String, Object> template = new RedisTemplate<>();
template.setConnectionFactory(factory);
// Key使用String序列化
template.setKeySerializer(new StringRedisSerializer());
// Value使用JSON序列化需引入Jackson依赖
template.setValueSerializer(new GenericJackson2JsonRedisSerializer());
// Hash Key/Value序列化同理
template.setHashKeySerializer(new StringRedisSerializer());
template.setHashValueSerializer(new GenericJackson2JsonRedisSerializer());
template.afterPropertiesSet();
return template;
}
}

View File

@ -0,0 +1,36 @@
package com.yfd.platform.modules.config.redis;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
import java.util.concurrent.TimeUnit;
@Component
public class RedisExample {
@Autowired
private RedisTemplate<String, Object> redisTemplate;
// 保存数据
public void setValue(String key, Object value) {
redisTemplate.opsForValue().set(key, value);
// 设置过期时间例如10分钟
redisTemplate.expire(key, 10, TimeUnit.MINUTES);
}
// 获取数据
public Object getValue(String key) {
return redisTemplate.opsForValue().get(key);
}
// 删除数据
public Boolean deleteKey(String key) {
return redisTemplate.delete(key);
}
// 操作Hash
public void setHashValue(String key, String field, Object value) {
redisTemplate.opsForHash().put(key, field, value);
}
}

View File

@ -214,10 +214,10 @@ public class TsFilesServiceImpl extends ServiceImpl<TsFilesMapper, TsFiles> impl
//
queryWrapper.isNotNull("work_path");
queryWrapper.ne("work_path", "");
//排序
//文件还是文件夹
queryWrapper.orderByDesc("is_file");
//时间
queryWrapper.orderByDesc("upload_time");
//文件名称
queryWrapper.orderByDesc("file_name");
//分页查询
Page<TsFiles> tsFilesPage = tsFilesMapper.selectPage(page, queryWrapper);
if (tsFilesPage == null) {

View File

@ -661,6 +661,10 @@ public class TsNodesServiceImpl extends ServiceImpl<TsNodesMapper, TsNodes> impl
//todo 首先获取两个集合 对比出数据库中没有的文件夹以及文件递归增加
List<FileItemResult> fileItemList = fileService.fileList(fileListRequest.getPath());
if (fileItemList.size() == 0) {
throw new Exception("该试验任务管理项目目录不存在或没有项目文档,请先建立项目目录和文档。");
}
//获取数据库父节点为0的数据 任务ID 上级节点时00
List<TsNodes> tsNodes = tsNodesMapper.selectList(new LambdaQueryWrapper<TsNodes>().eq(TsNodes::getParentId, "00").eq(TsNodes::getTaskId, id));

View File

@ -30,6 +30,7 @@ import io.netty.channel.ChannelInboundHandlerAdapter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Service;
@ -73,6 +74,9 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
@Resource
private FileChain fileChain;
@Autowired
private RedisTemplate redisTemplate;
/**********************************
* 用途说明: 分页查询专项文档管理-文档内容
@ -88,6 +92,23 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
***********************************/
@Override
public Page<Files> getFilesPage(String fileName, String startDate, String endDate, String keywords, String nodeId, String projectId, String fileName1, Page<Files> page) throws Exception {
// 生成Redis缓存Key的基础部分nodeId + projectId
int currentPage = (int) page.getCurrent(); // 获取当前页码
// 判断是否是前五页
if (currentPage >= 1 && currentPage <= 5) {
// 生成带页码的完整RedisKey
String redisKey = "sdfiles_" + projectId + "_" + nodeId + "_page_" + currentPage;
// 尝试从Redis获取缓存
Page<Files> cachedPage = (Page<Files>) redisTemplate.opsForValue().get(redisKey);
if (cachedPage != null) {
return cachedPage; // 直接返回缓存数据
}
}
//先查询路径下的所有文件
//首先通过项目ID 和节点ID去查询表 获取一个路径 如果不是空 就调用minio的获取文件列表接口 查询的数据放在集合中
FileInfoResult fileInfoResult = null;
@ -152,7 +173,7 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
}
queryWrapperfiles.eq(Files::getProjectId, projectId);//所属项目ID
queryWrapperfiles.eq(Files::getNodeId, nodeId);//节点ID
queryWrapperfiles.orderByDesc(Files::getUploadTime);//时间
queryWrapperfiles.orderByDesc(Files::getFileName);//时间
//分页查询
Page<Files> filesPage = filesMapper.selectPage(page, queryWrapperfiles);
//处理文件内容
@ -177,6 +198,13 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
}
}
filesPage.setRecords(records);
// 如果是前五页将结果存入Redis有效期建议30分钟
if (currentPage >= 1 && currentPage <= 5) {
String redisKey = "sdfiles:" + projectId + ":" + nodeId + ":page:" + currentPage;
redisTemplate.opsForValue().set(redisKey, filesPage, 30 * 60);
}
return filesPage;
}
@ -223,7 +251,7 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
}
//校验是否真正上传
String pathAndName = files.getFilePath()+"/" + name;
String pathAndName = files.getFilePath() + "/" + name;
//准备获取文件的信息
AbstractBaseFileService<?> fileService = storageSourceContext.getByStorageKey("sdlocal");
FileItemResult fileItemResult = fileService.getFileItem(pathAndName);
@ -248,11 +276,19 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
return ResponseResult.error("文件大小必须是有效的数字!");
}
}
if(filesToSave.size()>0){
if (filesToSave.size() > 0) {
//循环新增
for(Files filess : filesToSave){
for (Files filess : filesToSave) {
int valueAdded = filesMapper.insert(filess);
if (valueAdded == 1) {
//todo 新增成功以后 删除redis
for (int page = 1; page <= 5; page++) {
String redisKey = "sdfiles_" + filess.getProjectId() + "_" + filess.getNodeId() + "_page_" + page;
redisTemplate.delete(redisKey);
}
LOGGER.info("已清理缓存project={}, node={}, pages=1-5", filess.getProjectId(), filess.getNodeId());
value = true;
} else {
value = false;
@ -292,6 +328,12 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
LOGGER.error("表结构修改失败");
throw new RuntimeException("更新数据库失败");
}
//todo 修改删除redis
for (int page = 1; page <= 5; page++) {
String redisKey = "sdfiles_" + files.getProjectId() + "_" + files.getNodeId() + "_page_" + page;
redisTemplate.delete(redisKey);
}
LOGGER.info("已清理缓存project={}, node={}, pages=1-5", files.getProjectId(), files.getNodeId());
// 修改 MinIO 文件名
boolean minioUpdateSuccess = updateMinioFileName(filesData, files);
@ -306,8 +348,15 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
} else {
// 如果文件名没有修改仅更新数据库
int valueUpdate = filesMapper.updateById(files);
for (int page = 1; page <= 5; page++) {
String redisKey = "sdfiles_" + files.getProjectId() + "_" + files.getNodeId() + "_page_" + page;
redisTemplate.delete(redisKey);
}
LOGGER.info("已清理缓存project={}, node={}, pages=1-5", files.getProjectId(), files.getNodeId());
return valueUpdate == 1;
}
}
// 校验文件名是否包含非法字符
@ -334,6 +383,12 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
int SuccessCount = 0, FailCount = 0, total = CollUtil.size(dataset);
//Todo 最直接的办法 循环出来 一条一条删除
for (Files files : filesList) {
//todo 删除的时候删除redis
for (int page = 1; page <= 5; page++) {
String redisKey = "sdfiles_" + files.getProjectId() + "_" + files.getNodeId() + "_page_" + page;
redisTemplate.delete(redisKey);
}
List<BatchDeleteRequest.DeleteItem> deleteItemList = new ArrayList<>();
BatchDeleteRequest.DeleteItem deleteItemData = new BatchDeleteRequest.DeleteItem();
deleteItemData.setName(files.getFileName());

View File

@ -689,15 +689,15 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
}
//获取数据库父节点为0的文件夹数据 通过所属项目ID和父节点查询
List<Nodes> nodesList = nodesMapper.selectList(new LambdaQueryWrapper<Nodes>().eq(Nodes::getParentId, "00").eq(Nodes::getProjectId, id));
// 步骤 1提取现有的 nodeName
Set<String> existingNodeNames = nodesList.stream().map(Nodes::getNodeName).collect(Collectors.toSet());
// 步骤 2筛选新增数据 找到需要新增到数据库的文件夹
List<FileItemResult> fileItemNewList = fileItemList.stream().filter(fileItem -> !existingNodeNames.contains(fileItem.getName())).collect(Collectors.toList());
firstLayerData(fileItemNewList, id);
// //获取数据库父节点为0的文件夹数据 通过所属项目ID和父节点查询
// List<Nodes> nodesList = nodesMapper.selectList(new LambdaQueryWrapper<Nodes>().eq(Nodes::getParentId, "00").eq(Nodes::getProjectId, id));
//
// // 步骤 1提取现有的 nodeName
// Set<String> existingNodeNames = nodesList.stream().map(Nodes::getNodeName).collect(Collectors.toSet());
//
// // 步骤 2筛选新增数据 找到需要新增到数据库的文件夹
// List<FileItemResult> fileItemNewList = fileItemList.stream().filter(fileItem -> !existingNodeNames.contains(fileItem.getName())).collect(Collectors.toList());
firstLayerData(fileItemList, id);
return "扫描完成";
}
@ -794,7 +794,7 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
//先查询有没有 如果没有就新增
LambdaQueryWrapper<Nodes> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(Nodes::getProjectId, projectId);
queryWrapper.eq(Nodes::getParentId, TOP_LEVEL_PARENT_NODE);
queryWrapper.eq(Nodes::getParentId, parentId);
queryWrapper.eq(Nodes::getNodeName, item.getName());
// queryWrapper.eq(Nodes::getNodeOrder, obtainNodeType(index));
Nodes nodeData = nodesMapper.selectOne(queryWrapper);
@ -1098,11 +1098,13 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
// 如果MD5值不相同则进行操作 //如果一致 则直接复制源文件到目标文件
if (StringUtils.isNoneEmpty(sourceLocalMD5, targetLocalMD5) && !sourceLocalMD5.equals(targetLocalMD5)) {
LOGGER.info("MD5值不一样的路径"+sourcePath);
LOGGER.info("MD5值不一样的替换路径"+targetPath);
//拷贝文件到目标目录 将原来的文件名更改以及将数据库表结构更改
//更改目标路径下的文件名称
renameFile(targetPath);
String newTargetPath = renameFile(sourcePath);
//将源目录文件 复制到 目标目录文件
copyWithOverride(sourcePath, targetPath);
copyToDirectoryFile(newTargetPath, value + targetFolderPath + "/");
} else {
//将源目录文件 复制到 目标目录文件 todo这个地方是覆盖
@ -1128,6 +1130,23 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
return "上传完成";
}
private static void copyToDirectoryFile(String source, String targetDirectory) throws IOException {
File srcFile = new File(source);
File destDir = new File(targetDirectory);
// 确保目标目录存在如果不存在则创建
if (!destDir.exists()) {
destDir.mkdirs();
}
// 创建目标文件对象在目标目录下使用源文件名
File destFile = new File(destDir, srcFile.getName());
// 复制文件到目标目录
FileUtils.copyFile(srcFile, destFile);
System.out.println("成功复制文件到: " + destFile.getAbsolutePath());
}
public String uploadProject(String projectName) throws Exception {
@ -1176,15 +1195,15 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
//todo 首先获取两个集合 对比出数据库中没有的文件夹以及文件递归增加
List<FileItemResult> fileItemList = fileService.fileList(fileListRequest.getPath());
//获取数据库父节点为0的文件夹数据 通过所属项目ID和父节点查询
List<Nodes> nodesList = nodesMapper.selectList(new LambdaQueryWrapper<Nodes>().eq(Nodes::getParentId, "00").eq(Nodes::getParentId, project.getId()));
// 步骤 1提取现有的 nodeName
Set<String> existingNodeNames = nodesList.stream().map(Nodes::getNodeName).collect(Collectors.toSet());
// 步骤 2筛选新增数据 找到需要新增到数据库的文件夹
List<FileItemResult> fileItemNewList = fileItemList.stream().filter(fileItem -> !existingNodeNames.contains(fileItem.getName())).collect(Collectors.toList());
firstLayerData(fileItemNewList, project.getId());
// //获取数据库父节点为0的文件夹数据 通过所属项目ID和父节点查询
// List<Nodes> nodesList = nodesMapper.selectList(new LambdaQueryWrapper<Nodes>().eq(Nodes::getParentId, "00").eq(Nodes::getProjectId, project.getId()));
//
// // 步骤 1提取现有的 nodeName
// Set<String> existingNodeNames = nodesList.stream().map(Nodes::getNodeName).collect(Collectors.toSet());
//
// // 步骤 2筛选新增数据 找到需要新增到数据库的文件夹
// List<FileItemResult> fileItemNewList = fileItemList.stream().filter(fileItem -> !existingNodeNames.contains(fileItem.getName())).collect(Collectors.toList());
firstLayerData(fileItemList, project.getId());
return "扫描完成";
}

View File

@ -3,43 +3,52 @@ server:
tomcat:
connection-timeout: 300000
spring:
#应用名称
application:
name: Project-plateform
datasource:
type: com.alibaba.druid.pool.DruidDataSource
druid:
master:
driverClassName: com.mysql.cj.jdbc.Driver
# url: jdbc:mysql://120.27.210.161:3306/testdb?useUnicode=true&characterEncoding=UTF8&rewriteBatchedStatements=true
# username: testdb
# password: 27CTfsyJmZRESmsa
url: jdbc:mysql://121.37.111.42:3306/filemanagedb?useUnicode=true&characterEncoding=UTF8&rewriteBatchedStatements=true
username: filemanagedb
password: GAPchydbCKYFjjAa
mvc:
pathmatch:
matching-strategy: ant_path_matcher
servlet:
multipart:
max-file-size: 50GB
max-request-size: 50GB
tomcat:
max-swallow-size: -1
connection-timeout: 86400000
max-http-form-post-size: -1
#应用名称
application:
name: Project-plateform
datasource:
type: com.alibaba.druid.pool.DruidDataSource
druid:
master:
driverClassName: com.mysql.cj.jdbc.Driver
# url: jdbc:mysql://120.27.210.161:3306/testdb?useUnicode=true&characterEncoding=UTF8&rewriteBatchedStatements=true
# username: testdb
# password: 27CTfsyJmZRESmsa
url: jdbc:mysql://121.37.111.42:3306/filemanagedb?useUnicode=true&characterEncoding=UTF8&rewriteBatchedStatements=true
username: filemanagedb
password: GAPchydbCKYFjjAa
#url: jdbc:mysql://43.138.168.68:3306/filemanagedb?useUnicode=true&characterEncoding=UTF8&rewriteBatchedStatements=true
#username: root
#password: ylfw20230626@
mvc:
pathmatch:
matching-strategy: ant_path_matcher
servlet:
multipart:
max-file-size: 50GB
max-request-size: 50GB
tomcat:
max-swallow-size: -1
connection-timeout: 86400000
max-http-form-post-size: -1
data:
redis:
host: 127.0.0.1
port: 6379
password:
database: 0
logging:
file:
path: E:/projectJava/FileManage/logs/
name: logs/projectname.log
level:
com.genersoft.iot: debug
com.genersoft.iot.vmp.storager.dao: info
com.genersoft.iot.vmp.gb28181: info
file:
path: E:/projectJava/FileManage/logs/
name: logs/projectname.log
level:
com.genersoft.iot: debug
com.genersoft.iot.vmp.storager.dao: info
com.genersoft.iot.vmp.gb28181: info
# 在线文档: swagger-ui生产环境建议关闭
swagger-ui:
enabled: flase
enabled: flase
mybatis-plus:
configuration:
default-enum-type-handler: com.yfd.platform.config.MybatisEnumTypeHandler
@ -73,7 +82,7 @@ ip:
local-parsing: true
file-space: #项目文档空间
file-space: #项目文档空间
system: D:\file\system\ #单独上传的文件
# 文件预览大小