提交redis以及部分缺陷修改
This commit is contained in:
parent
fd2092ed2d
commit
8862c20532
@ -0,0 +1,29 @@
|
|||||||
|
package com.yfd.platform.modules.config.redis;
|
||||||
|
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.data.redis.connection.RedisConnectionFactory;
|
||||||
|
import org.springframework.data.redis.core.RedisTemplate;
|
||||||
|
import org.springframework.data.redis.serializer.GenericJackson2JsonRedisSerializer;
|
||||||
|
import org.springframework.data.redis.serializer.StringRedisSerializer;
|
||||||
|
@Configuration
|
||||||
|
public class RedisConfig {
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory factory) {
|
||||||
|
RedisTemplate<String, Object> template = new RedisTemplate<>();
|
||||||
|
template.setConnectionFactory(factory);
|
||||||
|
|
||||||
|
// Key使用String序列化
|
||||||
|
template.setKeySerializer(new StringRedisSerializer());
|
||||||
|
// Value使用JSON序列化(需引入Jackson依赖)
|
||||||
|
template.setValueSerializer(new GenericJackson2JsonRedisSerializer());
|
||||||
|
|
||||||
|
// Hash Key/Value序列化同理
|
||||||
|
template.setHashKeySerializer(new StringRedisSerializer());
|
||||||
|
template.setHashValueSerializer(new GenericJackson2JsonRedisSerializer());
|
||||||
|
|
||||||
|
template.afterPropertiesSet();
|
||||||
|
return template;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,36 @@
|
|||||||
|
package com.yfd.platform.modules.config.redis;
|
||||||
|
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.data.redis.core.RedisTemplate;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
@Component
|
||||||
|
public class RedisExample {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private RedisTemplate<String, Object> redisTemplate;
|
||||||
|
|
||||||
|
// 保存数据
|
||||||
|
public void setValue(String key, Object value) {
|
||||||
|
redisTemplate.opsForValue().set(key, value);
|
||||||
|
// 设置过期时间(例如10分钟)
|
||||||
|
redisTemplate.expire(key, 10, TimeUnit.MINUTES);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 获取数据
|
||||||
|
public Object getValue(String key) {
|
||||||
|
return redisTemplate.opsForValue().get(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 删除数据
|
||||||
|
public Boolean deleteKey(String key) {
|
||||||
|
return redisTemplate.delete(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 操作Hash
|
||||||
|
public void setHashValue(String key, String field, Object value) {
|
||||||
|
redisTemplate.opsForHash().put(key, field, value);
|
||||||
|
}
|
||||||
|
}
|
@ -214,10 +214,10 @@ public class TsFilesServiceImpl extends ServiceImpl<TsFilesMapper, TsFiles> impl
|
|||||||
//
|
//
|
||||||
queryWrapper.isNotNull("work_path");
|
queryWrapper.isNotNull("work_path");
|
||||||
queryWrapper.ne("work_path", "");
|
queryWrapper.ne("work_path", "");
|
||||||
//排序
|
//文件还是文件夹
|
||||||
queryWrapper.orderByDesc("is_file");
|
queryWrapper.orderByDesc("is_file");
|
||||||
//时间
|
//文件名称
|
||||||
queryWrapper.orderByDesc("upload_time");
|
queryWrapper.orderByDesc("file_name");
|
||||||
//分页查询
|
//分页查询
|
||||||
Page<TsFiles> tsFilesPage = tsFilesMapper.selectPage(page, queryWrapper);
|
Page<TsFiles> tsFilesPage = tsFilesMapper.selectPage(page, queryWrapper);
|
||||||
if (tsFilesPage == null) {
|
if (tsFilesPage == null) {
|
||||||
|
@ -661,6 +661,10 @@ public class TsNodesServiceImpl extends ServiceImpl<TsNodesMapper, TsNodes> impl
|
|||||||
//todo 首先获取两个集合 对比出数据库中没有的文件夹以及文件,递归增加
|
//todo 首先获取两个集合 对比出数据库中没有的文件夹以及文件,递归增加
|
||||||
List<FileItemResult> fileItemList = fileService.fileList(fileListRequest.getPath());
|
List<FileItemResult> fileItemList = fileService.fileList(fileListRequest.getPath());
|
||||||
|
|
||||||
|
if (fileItemList.size() == 0) {
|
||||||
|
throw new Exception("该试验任务管理项目目录不存在或没有项目文档,请先建立项目目录和文档。");
|
||||||
|
}
|
||||||
|
|
||||||
//获取数据库父节点为0的数据 任务ID 上级节点时00
|
//获取数据库父节点为0的数据 任务ID 上级节点时00
|
||||||
List<TsNodes> tsNodes = tsNodesMapper.selectList(new LambdaQueryWrapper<TsNodes>().eq(TsNodes::getParentId, "00").eq(TsNodes::getTaskId, id));
|
List<TsNodes> tsNodes = tsNodesMapper.selectList(new LambdaQueryWrapper<TsNodes>().eq(TsNodes::getParentId, "00").eq(TsNodes::getTaskId, id));
|
||||||
|
|
||||||
|
@ -30,6 +30,7 @@ import io.netty.channel.ChannelInboundHandlerAdapter;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.data.redis.core.RedisTemplate;
|
||||||
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
|
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
|
||||||
import org.springframework.security.core.context.SecurityContextHolder;
|
import org.springframework.security.core.context.SecurityContextHolder;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
@ -73,6 +74,9 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
|||||||
@Resource
|
@Resource
|
||||||
private FileChain fileChain;
|
private FileChain fileChain;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private RedisTemplate redisTemplate;
|
||||||
|
|
||||||
|
|
||||||
/**********************************
|
/**********************************
|
||||||
* 用途说明: 分页查询专项文档管理-文档内容
|
* 用途说明: 分页查询专项文档管理-文档内容
|
||||||
@ -88,6 +92,23 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
|||||||
***********************************/
|
***********************************/
|
||||||
@Override
|
@Override
|
||||||
public Page<Files> getFilesPage(String fileName, String startDate, String endDate, String keywords, String nodeId, String projectId, String fileName1, Page<Files> page) throws Exception {
|
public Page<Files> getFilesPage(String fileName, String startDate, String endDate, String keywords, String nodeId, String projectId, String fileName1, Page<Files> page) throws Exception {
|
||||||
|
// 生成Redis缓存Key的基础部分(nodeId + projectId)
|
||||||
|
|
||||||
|
|
||||||
|
int currentPage = (int) page.getCurrent(); // 获取当前页码
|
||||||
|
// 判断是否是前五页
|
||||||
|
if (currentPage >= 1 && currentPage <= 5) {
|
||||||
|
// 生成带页码的完整RedisKey
|
||||||
|
String redisKey = "sdfiles_" + projectId + "_" + nodeId + "_page_" + currentPage;
|
||||||
|
|
||||||
|
// 尝试从Redis获取缓存
|
||||||
|
Page<Files> cachedPage = (Page<Files>) redisTemplate.opsForValue().get(redisKey);
|
||||||
|
if (cachedPage != null) {
|
||||||
|
return cachedPage; // 直接返回缓存数据
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
//先查询路径下的所有文件
|
//先查询路径下的所有文件
|
||||||
//首先通过项目ID 和节点ID去查询表 获取一个路径 如果不是空 就调用minio的获取文件列表接口 查询的数据放在集合中
|
//首先通过项目ID 和节点ID去查询表 获取一个路径 如果不是空 就调用minio的获取文件列表接口 查询的数据放在集合中
|
||||||
FileInfoResult fileInfoResult = null;
|
FileInfoResult fileInfoResult = null;
|
||||||
@ -152,7 +173,7 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
|||||||
}
|
}
|
||||||
queryWrapperfiles.eq(Files::getProjectId, projectId);//所属项目ID
|
queryWrapperfiles.eq(Files::getProjectId, projectId);//所属项目ID
|
||||||
queryWrapperfiles.eq(Files::getNodeId, nodeId);//节点ID
|
queryWrapperfiles.eq(Files::getNodeId, nodeId);//节点ID
|
||||||
queryWrapperfiles.orderByDesc(Files::getUploadTime);//时间
|
queryWrapperfiles.orderByDesc(Files::getFileName);//时间
|
||||||
//分页查询
|
//分页查询
|
||||||
Page<Files> filesPage = filesMapper.selectPage(page, queryWrapperfiles);
|
Page<Files> filesPage = filesMapper.selectPage(page, queryWrapperfiles);
|
||||||
//处理文件内容
|
//处理文件内容
|
||||||
@ -177,6 +198,13 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
filesPage.setRecords(records);
|
filesPage.setRecords(records);
|
||||||
|
|
||||||
|
// 如果是前五页,将结果存入Redis(有效期建议30分钟)
|
||||||
|
if (currentPage >= 1 && currentPage <= 5) {
|
||||||
|
String redisKey = "sdfiles:" + projectId + ":" + nodeId + ":page:" + currentPage;
|
||||||
|
redisTemplate.opsForValue().set(redisKey, filesPage, 30 * 60);
|
||||||
|
}
|
||||||
|
|
||||||
return filesPage;
|
return filesPage;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -223,7 +251,7 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
|||||||
}
|
}
|
||||||
|
|
||||||
//校验是否真正上传
|
//校验是否真正上传
|
||||||
String pathAndName = files.getFilePath()+"/" + name;
|
String pathAndName = files.getFilePath() + "/" + name;
|
||||||
//准备获取文件的信息
|
//准备获取文件的信息
|
||||||
AbstractBaseFileService<?> fileService = storageSourceContext.getByStorageKey("sdlocal");
|
AbstractBaseFileService<?> fileService = storageSourceContext.getByStorageKey("sdlocal");
|
||||||
FileItemResult fileItemResult = fileService.getFileItem(pathAndName);
|
FileItemResult fileItemResult = fileService.getFileItem(pathAndName);
|
||||||
@ -248,11 +276,19 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
|||||||
return ResponseResult.error("文件大小必须是有效的数字!");
|
return ResponseResult.error("文件大小必须是有效的数字!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if(filesToSave.size()>0){
|
if (filesToSave.size() > 0) {
|
||||||
//循环新增
|
//循环新增
|
||||||
for(Files filess : filesToSave){
|
for (Files filess : filesToSave) {
|
||||||
int valueAdded = filesMapper.insert(filess);
|
int valueAdded = filesMapper.insert(filess);
|
||||||
if (valueAdded == 1) {
|
if (valueAdded == 1) {
|
||||||
|
//todo 新增成功以后 删除redis
|
||||||
|
for (int page = 1; page <= 5; page++) {
|
||||||
|
String redisKey = "sdfiles_" + filess.getProjectId() + "_" + filess.getNodeId() + "_page_" + page;
|
||||||
|
redisTemplate.delete(redisKey);
|
||||||
|
}
|
||||||
|
LOGGER.info("已清理缓存:project={}, node={}, pages=1-5", filess.getProjectId(), filess.getNodeId());
|
||||||
|
|
||||||
|
|
||||||
value = true;
|
value = true;
|
||||||
} else {
|
} else {
|
||||||
value = false;
|
value = false;
|
||||||
@ -292,6 +328,12 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
|||||||
LOGGER.error("表结构修改失败");
|
LOGGER.error("表结构修改失败");
|
||||||
throw new RuntimeException("更新数据库失败");
|
throw new RuntimeException("更新数据库失败");
|
||||||
}
|
}
|
||||||
|
//todo 修改删除redis
|
||||||
|
for (int page = 1; page <= 5; page++) {
|
||||||
|
String redisKey = "sdfiles_" + files.getProjectId() + "_" + files.getNodeId() + "_page_" + page;
|
||||||
|
redisTemplate.delete(redisKey);
|
||||||
|
}
|
||||||
|
LOGGER.info("已清理缓存:project={}, node={}, pages=1-5", files.getProjectId(), files.getNodeId());
|
||||||
|
|
||||||
// 修改 MinIO 文件名
|
// 修改 MinIO 文件名
|
||||||
boolean minioUpdateSuccess = updateMinioFileName(filesData, files);
|
boolean minioUpdateSuccess = updateMinioFileName(filesData, files);
|
||||||
@ -306,8 +348,15 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
|||||||
} else {
|
} else {
|
||||||
// 如果文件名没有修改,仅更新数据库
|
// 如果文件名没有修改,仅更新数据库
|
||||||
int valueUpdate = filesMapper.updateById(files);
|
int valueUpdate = filesMapper.updateById(files);
|
||||||
|
for (int page = 1; page <= 5; page++) {
|
||||||
|
String redisKey = "sdfiles_" + files.getProjectId() + "_" + files.getNodeId() + "_page_" + page;
|
||||||
|
redisTemplate.delete(redisKey);
|
||||||
|
}
|
||||||
|
LOGGER.info("已清理缓存:project={}, node={}, pages=1-5", files.getProjectId(), files.getNodeId());
|
||||||
return valueUpdate == 1;
|
return valueUpdate == 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 校验文件名是否包含非法字符
|
// 校验文件名是否包含非法字符
|
||||||
@ -334,6 +383,12 @@ public class FilesServiceImpl extends ServiceImpl<FilesMapper, Files> implements
|
|||||||
int SuccessCount = 0, FailCount = 0, total = CollUtil.size(dataset);
|
int SuccessCount = 0, FailCount = 0, total = CollUtil.size(dataset);
|
||||||
//Todo 最直接的办法 循环出来 一条一条删除
|
//Todo 最直接的办法 循环出来 一条一条删除
|
||||||
for (Files files : filesList) {
|
for (Files files : filesList) {
|
||||||
|
//todo 删除的时候删除redis
|
||||||
|
for (int page = 1; page <= 5; page++) {
|
||||||
|
String redisKey = "sdfiles_" + files.getProjectId() + "_" + files.getNodeId() + "_page_" + page;
|
||||||
|
redisTemplate.delete(redisKey);
|
||||||
|
}
|
||||||
|
|
||||||
List<BatchDeleteRequest.DeleteItem> deleteItemList = new ArrayList<>();
|
List<BatchDeleteRequest.DeleteItem> deleteItemList = new ArrayList<>();
|
||||||
BatchDeleteRequest.DeleteItem deleteItemData = new BatchDeleteRequest.DeleteItem();
|
BatchDeleteRequest.DeleteItem deleteItemData = new BatchDeleteRequest.DeleteItem();
|
||||||
deleteItemData.setName(files.getFileName());
|
deleteItemData.setName(files.getFileName());
|
||||||
|
@ -689,15 +689,15 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
//获取数据库父节点为0的文件夹数据 通过所属项目ID和父节点查询
|
// //获取数据库父节点为0的文件夹数据 通过所属项目ID和父节点查询
|
||||||
List<Nodes> nodesList = nodesMapper.selectList(new LambdaQueryWrapper<Nodes>().eq(Nodes::getParentId, "00").eq(Nodes::getProjectId, id));
|
// List<Nodes> nodesList = nodesMapper.selectList(new LambdaQueryWrapper<Nodes>().eq(Nodes::getParentId, "00").eq(Nodes::getProjectId, id));
|
||||||
|
//
|
||||||
// 步骤 1:提取现有的 nodeName
|
// // 步骤 1:提取现有的 nodeName
|
||||||
Set<String> existingNodeNames = nodesList.stream().map(Nodes::getNodeName).collect(Collectors.toSet());
|
// Set<String> existingNodeNames = nodesList.stream().map(Nodes::getNodeName).collect(Collectors.toSet());
|
||||||
|
//
|
||||||
// 步骤 2:筛选新增数据 找到需要新增到数据库的文件夹
|
// // 步骤 2:筛选新增数据 找到需要新增到数据库的文件夹
|
||||||
List<FileItemResult> fileItemNewList = fileItemList.stream().filter(fileItem -> !existingNodeNames.contains(fileItem.getName())).collect(Collectors.toList());
|
// List<FileItemResult> fileItemNewList = fileItemList.stream().filter(fileItem -> !existingNodeNames.contains(fileItem.getName())).collect(Collectors.toList());
|
||||||
firstLayerData(fileItemNewList, id);
|
firstLayerData(fileItemList, id);
|
||||||
|
|
||||||
return "扫描完成";
|
return "扫描完成";
|
||||||
}
|
}
|
||||||
@ -794,7 +794,7 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
|
|||||||
//先查询有没有 如果没有就新增
|
//先查询有没有 如果没有就新增
|
||||||
LambdaQueryWrapper<Nodes> queryWrapper = new LambdaQueryWrapper<>();
|
LambdaQueryWrapper<Nodes> queryWrapper = new LambdaQueryWrapper<>();
|
||||||
queryWrapper.eq(Nodes::getProjectId, projectId);
|
queryWrapper.eq(Nodes::getProjectId, projectId);
|
||||||
queryWrapper.eq(Nodes::getParentId, TOP_LEVEL_PARENT_NODE);
|
queryWrapper.eq(Nodes::getParentId, parentId);
|
||||||
queryWrapper.eq(Nodes::getNodeName, item.getName());
|
queryWrapper.eq(Nodes::getNodeName, item.getName());
|
||||||
// queryWrapper.eq(Nodes::getNodeOrder, obtainNodeType(index));
|
// queryWrapper.eq(Nodes::getNodeOrder, obtainNodeType(index));
|
||||||
Nodes nodeData = nodesMapper.selectOne(queryWrapper);
|
Nodes nodeData = nodesMapper.selectOne(queryWrapper);
|
||||||
@ -1098,11 +1098,13 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
|
|||||||
|
|
||||||
// 如果MD5值不相同则进行操作 //如果一致 则直接复制源文件到目标文件
|
// 如果MD5值不相同则进行操作 //如果一致 则直接复制源文件到目标文件
|
||||||
if (StringUtils.isNoneEmpty(sourceLocalMD5, targetLocalMD5) && !sourceLocalMD5.equals(targetLocalMD5)) {
|
if (StringUtils.isNoneEmpty(sourceLocalMD5, targetLocalMD5) && !sourceLocalMD5.equals(targetLocalMD5)) {
|
||||||
|
LOGGER.info("MD5值不一样的路径"+sourcePath);
|
||||||
|
LOGGER.info("MD5值不一样的替换路径"+targetPath);
|
||||||
//拷贝文件到目标目录 将原来的文件名更改以及将数据库表结构更改
|
//拷贝文件到目标目录 将原来的文件名更改以及将数据库表结构更改
|
||||||
//更改目标路径下的文件名称
|
//更改目标路径下的文件名称
|
||||||
renameFile(targetPath);
|
String newTargetPath = renameFile(sourcePath);
|
||||||
//将源目录文件 复制到 目标目录文件
|
//将源目录文件 复制到 目标目录文件
|
||||||
copyWithOverride(sourcePath, targetPath);
|
copyToDirectoryFile(newTargetPath, value + targetFolderPath + "/");
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
//将源目录文件 复制到 目标目录文件 todo这个地方是覆盖
|
//将源目录文件 复制到 目标目录文件 todo这个地方是覆盖
|
||||||
@ -1128,6 +1130,23 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
|
|||||||
return "上传完成";
|
return "上传完成";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void copyToDirectoryFile(String source, String targetDirectory) throws IOException {
|
||||||
|
File srcFile = new File(source);
|
||||||
|
File destDir = new File(targetDirectory);
|
||||||
|
|
||||||
|
// 确保目标目录存在,如果不存在则创建
|
||||||
|
if (!destDir.exists()) {
|
||||||
|
destDir.mkdirs();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 创建目标文件对象(在目标目录下使用源文件名)
|
||||||
|
File destFile = new File(destDir, srcFile.getName());
|
||||||
|
|
||||||
|
// 复制文件到目标目录
|
||||||
|
FileUtils.copyFile(srcFile, destFile);
|
||||||
|
System.out.println("成功复制文件到: " + destFile.getAbsolutePath());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
public String uploadProject(String projectName) throws Exception {
|
public String uploadProject(String projectName) throws Exception {
|
||||||
|
|
||||||
@ -1176,15 +1195,15 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
|
|||||||
//todo 首先获取两个集合 对比出数据库中没有的文件夹以及文件,递归增加
|
//todo 首先获取两个集合 对比出数据库中没有的文件夹以及文件,递归增加
|
||||||
List<FileItemResult> fileItemList = fileService.fileList(fileListRequest.getPath());
|
List<FileItemResult> fileItemList = fileService.fileList(fileListRequest.getPath());
|
||||||
|
|
||||||
//获取数据库父节点为0的文件夹数据 通过所属项目ID和父节点查询
|
// //获取数据库父节点为0的文件夹数据 通过所属项目ID和父节点查询
|
||||||
List<Nodes> nodesList = nodesMapper.selectList(new LambdaQueryWrapper<Nodes>().eq(Nodes::getParentId, "00").eq(Nodes::getParentId, project.getId()));
|
// List<Nodes> nodesList = nodesMapper.selectList(new LambdaQueryWrapper<Nodes>().eq(Nodes::getParentId, "00").eq(Nodes::getProjectId, project.getId()));
|
||||||
|
//
|
||||||
// 步骤 1:提取现有的 nodeName
|
// // 步骤 1:提取现有的 nodeName
|
||||||
Set<String> existingNodeNames = nodesList.stream().map(Nodes::getNodeName).collect(Collectors.toSet());
|
// Set<String> existingNodeNames = nodesList.stream().map(Nodes::getNodeName).collect(Collectors.toSet());
|
||||||
|
//
|
||||||
// 步骤 2:筛选新增数据 找到需要新增到数据库的文件夹
|
// // 步骤 2:筛选新增数据 找到需要新增到数据库的文件夹
|
||||||
List<FileItemResult> fileItemNewList = fileItemList.stream().filter(fileItem -> !existingNodeNames.contains(fileItem.getName())).collect(Collectors.toList());
|
// List<FileItemResult> fileItemNewList = fileItemList.stream().filter(fileItem -> !existingNodeNames.contains(fileItem.getName())).collect(Collectors.toList());
|
||||||
firstLayerData(fileItemNewList, project.getId());
|
firstLayerData(fileItemList, project.getId());
|
||||||
|
|
||||||
return "扫描完成";
|
return "扫描完成";
|
||||||
}
|
}
|
||||||
|
@ -3,43 +3,52 @@ server:
|
|||||||
tomcat:
|
tomcat:
|
||||||
connection-timeout: 300000
|
connection-timeout: 300000
|
||||||
spring:
|
spring:
|
||||||
#应用名称
|
#应用名称
|
||||||
application:
|
application:
|
||||||
name: Project-plateform
|
name: Project-plateform
|
||||||
datasource:
|
datasource:
|
||||||
type: com.alibaba.druid.pool.DruidDataSource
|
type: com.alibaba.druid.pool.DruidDataSource
|
||||||
druid:
|
druid:
|
||||||
master:
|
master:
|
||||||
driverClassName: com.mysql.cj.jdbc.Driver
|
driverClassName: com.mysql.cj.jdbc.Driver
|
||||||
# url: jdbc:mysql://120.27.210.161:3306/testdb?useUnicode=true&characterEncoding=UTF8&rewriteBatchedStatements=true
|
# url: jdbc:mysql://120.27.210.161:3306/testdb?useUnicode=true&characterEncoding=UTF8&rewriteBatchedStatements=true
|
||||||
# username: testdb
|
# username: testdb
|
||||||
# password: 27CTfsyJmZRESmsa
|
# password: 27CTfsyJmZRESmsa
|
||||||
url: jdbc:mysql://121.37.111.42:3306/filemanagedb?useUnicode=true&characterEncoding=UTF8&rewriteBatchedStatements=true
|
url: jdbc:mysql://121.37.111.42:3306/filemanagedb?useUnicode=true&characterEncoding=UTF8&rewriteBatchedStatements=true
|
||||||
username: filemanagedb
|
username: filemanagedb
|
||||||
password: GAPchydbCKYFjjAa
|
password: GAPchydbCKYFjjAa
|
||||||
mvc:
|
#url: jdbc:mysql://43.138.168.68:3306/filemanagedb?useUnicode=true&characterEncoding=UTF8&rewriteBatchedStatements=true
|
||||||
pathmatch:
|
#username: root
|
||||||
matching-strategy: ant_path_matcher
|
#password: ylfw20230626@
|
||||||
servlet:
|
mvc:
|
||||||
multipart:
|
pathmatch:
|
||||||
max-file-size: 50GB
|
matching-strategy: ant_path_matcher
|
||||||
max-request-size: 50GB
|
servlet:
|
||||||
tomcat:
|
multipart:
|
||||||
max-swallow-size: -1
|
max-file-size: 50GB
|
||||||
connection-timeout: 86400000
|
max-request-size: 50GB
|
||||||
max-http-form-post-size: -1
|
tomcat:
|
||||||
|
max-swallow-size: -1
|
||||||
|
connection-timeout: 86400000
|
||||||
|
max-http-form-post-size: -1
|
||||||
|
data:
|
||||||
|
redis:
|
||||||
|
host: 127.0.0.1
|
||||||
|
port: 6379
|
||||||
|
password:
|
||||||
|
database: 0
|
||||||
logging:
|
logging:
|
||||||
file:
|
file:
|
||||||
path: E:/projectJava/FileManage/logs/
|
path: E:/projectJava/FileManage/logs/
|
||||||
name: logs/projectname.log
|
name: logs/projectname.log
|
||||||
level:
|
level:
|
||||||
com.genersoft.iot: debug
|
com.genersoft.iot: debug
|
||||||
com.genersoft.iot.vmp.storager.dao: info
|
com.genersoft.iot.vmp.storager.dao: info
|
||||||
com.genersoft.iot.vmp.gb28181: info
|
com.genersoft.iot.vmp.gb28181: info
|
||||||
|
|
||||||
# 在线文档: swagger-ui(生产环境建议关闭)
|
# 在线文档: swagger-ui(生产环境建议关闭)
|
||||||
swagger-ui:
|
swagger-ui:
|
||||||
enabled: flase
|
enabled: flase
|
||||||
mybatis-plus:
|
mybatis-plus:
|
||||||
configuration:
|
configuration:
|
||||||
default-enum-type-handler: com.yfd.platform.config.MybatisEnumTypeHandler
|
default-enum-type-handler: com.yfd.platform.config.MybatisEnumTypeHandler
|
||||||
@ -73,7 +82,7 @@ ip:
|
|||||||
local-parsing: true
|
local-parsing: true
|
||||||
|
|
||||||
|
|
||||||
file-space: #项目文档空间
|
file-space: #项目文档空间
|
||||||
system: D:\file\system\ #单独上传的文件
|
system: D:\file\system\ #单独上传的文件
|
||||||
|
|
||||||
# 文件预览大小
|
# 文件预览大小
|
||||||
|
Loading…
Reference in New Issue
Block a user