ソースを参照

1.更新 rpc 接口实现
2.添加 spring cache

reghao 2 年 前
コミット
94b2c1a1f9
26 ファイル変更459 行追加193 行削除
  1. 2 18
      dfs-store/pom.xml
  2. 0 36
      dfs-store/src/main/java/cn/reghao/dfs/store/cache/CacheConfig.java
  3. 40 0
      dfs-store/src/main/java/cn/reghao/dfs/store/config/CacheConfig.java
  4. 1 1
      dfs-store/src/main/java/cn/reghao/dfs/store/controller/ObjectMultipartUploadController.java
  5. 1 1
      dfs-store/src/main/java/cn/reghao/dfs/store/controller/ObjectUploadController.java
  6. 5 5
      dfs-store/src/main/java/cn/reghao/dfs/store/db/mapper/FileMetaMapper.java
  7. 42 0
      dfs-store/src/main/java/cn/reghao/dfs/store/db/repository/FileMetaRepository.java
  8. 6 4
      dfs-store/src/main/java/cn/reghao/dfs/store/inerceptor/AccessLogInterceptor.java
  9. 2 2
      dfs-store/src/main/java/cn/reghao/dfs/store/inerceptor/JwtTokenFilter.java
  10. 8 14
      dfs-store/src/main/java/cn/reghao/dfs/store/model/po/FileMeta.java
  11. 72 0
      dfs-store/src/main/java/cn/reghao/dfs/store/rpc/DiskServiceImpl.java
  12. 0 46
      dfs-store/src/main/java/cn/reghao/dfs/store/rpc/FileServiceImpl.java
  13. 48 0
      dfs-store/src/main/java/cn/reghao/dfs/store/rpc/TrashServiceImpl.java
  14. 5 25
      dfs-store/src/main/java/cn/reghao/dfs/store/service/GetObjectService.java
  15. 5 1
      dfs-store/src/main/java/cn/reghao/dfs/store/service/PutObjectService.java
  16. 9 6
      dfs-store/src/main/java/cn/reghao/dfs/store/task/ConvertTask.java
  17. 3 4
      dfs-store/src/main/java/cn/reghao/dfs/store/task/VideoFileProcessor.java
  18. 109 0
      dfs-store/src/main/java/cn/reghao/dfs/store/util/media/FFmpegWrapper.java
  19. 17 0
      dfs-store/src/main/java/cn/reghao/dfs/store/util/media/po/AudioProps.java
  20. 23 0
      dfs-store/src/main/java/cn/reghao/dfs/store/util/media/po/MediaProps.java
  21. 19 0
      dfs-store/src/main/java/cn/reghao/dfs/store/util/media/po/VideoProps.java
  22. 6 12
      dfs-store/src/main/java/cn/reghao/dfs/store/util/s3/AWS4Signer.java
  23. 30 1
      dfs-store/src/main/java/cn/reghao/dfs/store/util/s3/OssUtil.java
  24. 1 7
      dfs-store/src/main/java/cn/reghao/dfs/store/util/store/LoadBalancer.java
  25. 5 6
      dfs-store/src/main/resources/mapper/FileMetaMapper.xml
  26. 0 4
      dfs-store/src/test/java/FileMetaTest.java

+ 2 - 18
dfs-store/pom.xml

@@ -36,23 +36,12 @@
             <artifactId>tool</artifactId>
             <version>1.0.0-SNAPSHOT</version>
         </dependency>
-        <dependency>
-            <groupId>cn.reghao.jutil</groupId>
-            <artifactId>media</artifactId>
-            <version>1.0.0-SNAPSHOT</version>
-        </dependency>
         <dependency>
             <groupId>cn.reghao.jutil</groupId>
             <artifactId>web</artifactId>
             <version>1.0.0-SNAPSHOT</version>
         </dependency>
 
-        <dependency>
-            <groupId>cn.reghao.oss</groupId>
-            <artifactId>oss-common</artifactId>
-            <version>1.0.0-SNAPSHOT</version>
-        </dependency>
-
         <dependency>
             <groupId>cn.reghao.dfs</groupId>
             <artifactId>dfs-api</artifactId>
@@ -95,18 +84,13 @@
             <scope>test</scope>
         </dependency>
 
-        <dependency>
-            <groupId>org.springframework.boot</groupId>
-            <artifactId>spring-boot-starter-data-redis</artifactId>
-        </dependency>
-
         <dependency>
             <groupId>org.springframework.boot</groupId>
             <artifactId>spring-boot-starter-cache</artifactId>
         </dependency>
         <dependency>
-            <groupId>com.github.ben-manes.caffeine</groupId>
-            <artifactId>caffeine</artifactId>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-data-redis</artifactId>
         </dependency>
 
         <dependency>

+ 0 - 36
dfs-store/src/main/java/cn/reghao/dfs/store/cache/CacheConfig.java

@@ -1,36 +0,0 @@
-package cn.reghao.dfs.store.cache;
-
-import com.github.benmanes.caffeine.cache.*;
-import org.springframework.cache.CacheManager;
-import org.springframework.cache.annotation.EnableCaching;
-import org.springframework.cache.caffeine.CaffeineCacheManager;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import java.util.concurrent.TimeUnit;
-
-/**
- * 缓存配置
- *
- * @author reghao
- * @date 2020-03-06 10:58:04
- */
-@EnableCaching
-@Configuration
-public class CacheConfig {
-    @Bean(name = "caffeineCacheManager")
-    public CacheManager cacheManager() {
-        CaffeineCacheManager cacheManager = new CaffeineCacheManager();
-        Caffeine<Object, Object> caffeineCache = caffeineCache();
-        cacheManager.setCaffeine(caffeineCache);
-        return cacheManager;
-    }
-
-    @Bean("caffeineCache")
-    public Caffeine<Object, Object> caffeineCache() {
-        return Caffeine.newBuilder()
-                .initialCapacity(1000)
-                .maximumSize(10_000)
-                .expireAfterAccess(365, TimeUnit.DAYS);
-    }
-}

+ 40 - 0
dfs-store/src/main/java/cn/reghao/dfs/store/config/CacheConfig.java

@@ -0,0 +1,40 @@
+package cn.reghao.dfs.store.config;
+
+import org.springframework.cache.CacheManager;
+import org.springframework.cache.annotation.EnableCaching;
+
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.data.redis.cache.RedisCacheConfiguration;
+import org.springframework.data.redis.cache.RedisCacheManager;
+import org.springframework.data.redis.core.RedisTemplate;
+import org.springframework.data.redis.serializer.RedisSerializationContext;
+
+import java.time.Duration;
+import java.util.Objects;
+
+/**
+ * 缓存配置
+ *
+ * @author reghao
+ * @date 2023-05-22 16:31:04
+ */
+@EnableCaching
+@Configuration
+public class CacheConfig {
+    @Bean
+    public CacheManager cacheManager(RedisTemplate<String, Object> template) {
+        RedisCacheConfiguration config = RedisCacheConfiguration
+                .defaultCacheConfig()
+                .serializeKeysWith(RedisSerializationContext.SerializationPair.fromSerializer(template.getStringSerializer()))
+                .serializeValuesWith(RedisSerializationContext.SerializationPair.fromSerializer(template.getValueSerializer()))
+                .disableCachingNullValues()
+                .entryTtl(Duration.ofHours(1));
+
+        return RedisCacheManager.RedisCacheManagerBuilder
+                        .fromConnectionFactory(Objects.requireNonNull(template.getConnectionFactory()))
+                        .cacheDefaults(config)
+                        .transactionAware()
+                        .build();
+    }
+}

+ 1 - 1
dfs-store/src/main/java/cn/reghao/dfs/store/controller/ObjectMultipartUploadController.java

@@ -2,7 +2,7 @@ package cn.reghao.dfs.store.controller;
 
 import cn.reghao.dfs.store.model.s3.*;
 import cn.reghao.dfs.store.service.FileStoreService;
-import cn.reghao.dfs.store.util.OssUtil;
+import cn.reghao.dfs.store.util.s3.OssUtil;
 import cn.reghao.jutil.jdk.converter.ByteHex;
 import cn.reghao.jutil.jdk.result.WebResult;
 import cn.reghao.jutil.jdk.security.Base64Util;

+ 1 - 1
dfs-store/src/main/java/cn/reghao/dfs/store/controller/ObjectUploadController.java

@@ -4,12 +4,12 @@ import cn.reghao.dfs.store.model.dto.PostObject;
 import cn.reghao.dfs.store.model.s3.PostResponse;
 import cn.reghao.dfs.store.redis.ds.RedisString;
 import cn.reghao.dfs.store.service.PutObjectService;
+import cn.reghao.dfs.store.util.s3.OssUtil;
 import cn.reghao.jutil.jdk.converter.ByteHex;
 import cn.reghao.jutil.jdk.result.WebResult;
 import cn.reghao.jutil.jdk.security.Base64Util;
 import cn.reghao.jutil.jdk.security.DigestUtil;
 import cn.reghao.jutil.web.ServletUtil;
-import cn.reghao.oss.common.OssUtil;
 import org.springframework.http.ResponseEntity;
 import org.springframework.validation.annotation.Validated;
 import org.springframework.web.bind.annotation.PostMapping;

+ 5 - 5
dfs-store/src/main/java/cn/reghao/dfs/store/db/mapper/FileMetaMapper.java

@@ -1,7 +1,7 @@
 package cn.reghao.dfs.store.db.mapper;
 
 import cn.reghao.dfs.api.dto.DeleteFile;
-import cn.reghao.dfs.api.dto.FileInfoVO;
+import cn.reghao.dfs.api.dto.FileInfo;
 import cn.reghao.dfs.store.model.po.FileMeta;
 import cn.reghao.dfs.store.model.vo.ObjectMeta;
 import cn.reghao.jutil.jdk.db.BaseMapper;
@@ -37,12 +37,12 @@ public interface FileMetaMapper extends BaseMapper<FileMeta> {
     void deleteByObjectId(String objectId);
 
     int countByPid(String pid);
-    List<FileInfoVO> findFileInfoByPage(Page page, @Param("pid") String pid);
-    List<FileInfoVO> findFileInfo1ByPage(Page page, @Param("pid") String pid);
-    List<FileInfoVO> findDirectories(String pid);
+    List<FileInfo> findFileInfoByPage(Page page, @Param("pid") String pid);
+    List<FileInfo> findFileInfo1ByPage(Page page, @Param("pid") String pid);
+    List<FileInfo> findDirectories(String pid);
 
     int countByKeyword(String keyword);
-    List<FileInfoVO> findKeywordByPage(Page page, @Param("keyword") String keyword);
+    List<FileInfo> findKeywordByPage(Page page, @Param("keyword") String keyword);
 
     int countDeletedFiles();
     List<DeleteFile> findDeletedFileByPage(Page page);

+ 42 - 0
dfs-store/src/main/java/cn/reghao/dfs/store/db/repository/FileMetaRepository.java

@@ -0,0 +1,42 @@
+package cn.reghao.dfs.store.db.repository;
+
+import cn.reghao.dfs.store.db.mapper.FileMetaMapper;
+import cn.reghao.dfs.store.model.po.FileMeta;
+import cn.reghao.dfs.store.model.vo.ObjectMeta;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.cache.annotation.CacheEvict;
+import org.springframework.cache.annotation.CachePut;
+import org.springframework.cache.annotation.Cacheable;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author reghao
+ * @date 2023-05-22 16:48:40
+ */
+@Slf4j
+@Service
+public class FileMetaRepository {
+    private final FileMetaMapper fileMetaMapper;
+
+    public FileMetaRepository(FileMetaMapper fileMetaMapper) {
+        this.fileMetaMapper = fileMetaMapper;
+    }
+
+    @CachePut
+    public void save(FileMeta fileMeta) {
+    }
+
+    public void update(FileMeta fileMeta) {
+    }
+
+    @CacheEvict(cacheNames = "objectMeta", key = "#fileMeta.objectName")
+    public void delete(FileMeta fileMeta) {
+    }
+
+    @Cacheable(cacheNames = "objectMeta", key = "#objectName")
+    public ObjectMeta getObjectMeta(String objectName) {
+        log.info("查找 db");
+        ObjectMeta objectMeta = fileMetaMapper.findObjectMeta(objectName);
+        return objectMeta;
+    }
+}

+ 6 - 4
dfs-store/src/main/java/cn/reghao/dfs/store/inerceptor/AccessLogInterceptor.java

@@ -3,6 +3,7 @@ package cn.reghao.dfs.store.inerceptor;
 import cn.reghao.dfs.store.db.mapper.FileMetaMapper;
 import cn.reghao.dfs.store.model.constant.ObjectACL;
 import cn.reghao.dfs.store.model.po.FileMeta;
+import cn.reghao.tnb.account.api.dto.UserPermission;
 import cn.reghao.tnb.account.api.iface.UserQuery;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.dubbo.config.annotation.DubboReference;
@@ -36,7 +37,8 @@ public class AccessLogInterceptor implements HandlerInterceptor {
     @Override
     public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler)
             throws Exception {
-        String uri = request.getRequestURI();
+        return true;
+        /*String uri = request.getRequestURI();
         String method = request.getMethod();
         if (method.equals("OPTIONS")) {
             return true;
@@ -62,9 +64,9 @@ public class AccessLogInterceptor implements HandlerInterceptor {
         String sign = request.getParameter("sign");
 
         long userId = Long.parseLong(request.getHeader("x-user-id"));
-        //UserPermission userPermission = userQuery.getUserPermission(userId);
+        UserPermission userPermission = userQuery.getUserPermission(userId);
         log.info("{}: {} -> {}", userId, method, uri);
-        return false;
+        return false;*/
     }
 
     @Override
@@ -78,6 +80,6 @@ public class AccessLogInterceptor implements HandlerInterceptor {
                                 Object handler, @Nullable Exception ex) throws Exception {
         String uri = request.getRequestURI();
         int statusCode = response.getStatus();
-        log.info("{} -> {}", statusCode, uri);
+        //log.info("{} -> {}", statusCode, uri);
     }
 }

+ 2 - 2
dfs-store/src/main/java/cn/reghao/dfs/store/inerceptor/JwtTokenFilter.java

@@ -29,7 +29,7 @@ public class JwtTokenFilter implements Filter {
         MutableHttpServletRequest mutableHttpServletRequest = new MutableHttpServletRequest(httpServletRequest);
         String jwtToken = getJwtToken(httpServletRequest);
         if (jwtToken != null) {
-            try {
+            /*try {
                 JwtPayload jwtPayload = Jwt.parse(jwtToken, "");
                 String userId = jwtPayload.getUserId();
                 mutableHttpServletRequest.putHeader("x-user-id", userId);
@@ -45,7 +45,7 @@ public class JwtTokenFilter implements Filter {
                 HttpServletResponse httpServletResponse = (HttpServletResponse) response;
                 writeResponse(httpServletResponse, msg);
                 return;
-            }
+            }*/
         } else {
             mutableHttpServletRequest.putHeader("x-user-id", "-1");
         }

+ 8 - 14
dfs-store/src/main/java/cn/reghao/dfs/store/model/po/FileMeta.java

@@ -1,13 +1,10 @@
 package cn.reghao.dfs.store.model.po;
 
 import cn.reghao.jutil.jdk.db.BaseObject;
-import lombok.AllArgsConstructor;
 import lombok.Getter;
 import lombok.NoArgsConstructor;
 import lombok.Setter;
 
-import java.util.UUID;
-
 /**
  * 文件元数据
  *
@@ -39,17 +36,8 @@ public class FileMeta extends BaseObject<Integer> {
         this.size = 0L;
         this.objectName = objectName;
         this.uploadBy = 10000L;
-    }
-
-    // 目录对象
-    public FileMeta(String objectName) {
-        this.objectName = objectName;
-        this.objectId = UUID.randomUUID().toString().replace("-", "");
-        this.filename = "dir";
-        this.size = 0L;
-        this.fileType = 1000;
-        this.contentType = "dir";
-        this.sha256sum = "0";
+        this.diskFile = false;
+        this.acl = 1;
     }
 
     public FileMeta(String objectName, String objectId, String filename, long size,
@@ -63,6 +51,8 @@ public class FileMeta extends BaseObject<Integer> {
         this.sha256sum = sha256sum;
         this.pid = pid;
         this.uploadBy = 10000L;
+        this.diskFile = false;
+        this.acl = 1;
     }
 
     public FileMeta(String objectName, String objectId, String filename, FileMeta fileMeta) {
@@ -73,5 +63,9 @@ public class FileMeta extends BaseObject<Integer> {
         this.fileType = fileMeta.getFileType();
         this.contentType = fileMeta.getContentType();
         this.sha256sum = fileMeta.getSha256sum();
+        this.pid = fileMeta.getPid();
+        this.uploadBy = fileMeta.getUploadBy();
+        this.diskFile = fileMeta.getDiskFile();
+        this.acl = fileMeta.getAcl();
     }
 }

+ 72 - 0
dfs-store/src/main/java/cn/reghao/dfs/store/rpc/DiskServiceImpl.java

@@ -0,0 +1,72 @@
+package cn.reghao.dfs.store.rpc;
+
+import cn.reghao.dfs.api.dto.DeleteFile;
+import cn.reghao.dfs.api.dto.DirProp;
+import cn.reghao.dfs.api.dto.FileInfo;
+import cn.reghao.dfs.api.dto.FileProp;
+import cn.reghao.dfs.api.iface.DiskService;
+import cn.reghao.dfs.api.iface.FileService;
+import cn.reghao.dfs.store.db.mapper.FileMetaMapper;
+import cn.reghao.dfs.store.model.po.FileMeta;
+import cn.reghao.dfs.store.service.PutObjectService;
+import cn.reghao.jutil.jdk.db.Page;
+import cn.reghao.jutil.jdk.db.PageList;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.dubbo.config.annotation.DubboService;
+import org.springframework.stereotype.Service;
+
+import java.io.ByteArrayInputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.List;
+import java.util.UUID;
+
+/**
+ * @author reghao
+ * @date 2023-05-18 20:39:45
+ */
+@Slf4j
+@DubboService
+@Service
+public class DiskServiceImpl implements DiskService {
+    private PutObjectService putObjectService;
+    private final FileMetaMapper fileMetaMapper;
+    private final int pageSize = 10;
+
+    public DiskServiceImpl(PutObjectService putObjectService, FileMetaMapper fileMetaMapper) {
+        this.fileMetaMapper = fileMetaMapper;
+        this.putObjectService = putObjectService;
+    }
+
+    @Override
+    public PageList<FileInfo> getFileList(String pid, int pageNumber) {
+        Page page = new Page(pageNumber, pageSize);
+        int total = fileMetaMapper.countByPid(pid);
+        List<FileInfo> list = fileMetaMapper.findFileInfoByPage(page, pid);
+        PageList<FileInfo> pageList = PageList.pageList(pageNumber, pageSize, total, list);
+        return pageList;
+    }
+
+    @Override
+    public PageList<FileInfo> getFileCard(String pid, int pageNumber) {
+        Page page = new Page(pageNumber, pageSize);
+        int total = fileMetaMapper.countByPid(pid);
+        List<FileInfo> list = fileMetaMapper.findFileInfo1ByPage(page, pid);
+        PageList<FileInfo> pageList = PageList.pageList(pageNumber, pageSize, total, list);
+        return pageList;
+    }
+
+    @Override
+    public List<FileInfo> getDirectories(String pid) {
+        List<FileInfo> list = fileMetaMapper.findDirectories(pid);
+        return list;
+    }
+
+    @Override
+    public PageList<FileInfo> search(String keyword, int pageNumber) {
+        Page page = new Page(pageNumber, pageSize);
+        int total = fileMetaMapper.countByKeyword(keyword);
+        List<FileInfo> list = fileMetaMapper.findKeywordByPage(page, keyword);
+        PageList<FileInfo> pageList = PageList.pageList(pageNumber, pageSize, total, list);
+        return pageList;
+    }
+}

+ 0 - 46
dfs-store/src/main/java/cn/reghao/dfs/store/rpc/FileServiceImpl.java

@@ -1,15 +1,11 @@
 package cn.reghao.dfs.store.rpc;
 
-import cn.reghao.dfs.api.dto.DeleteFile;
 import cn.reghao.dfs.api.dto.DirProp;
-import cn.reghao.dfs.api.dto.FileInfoVO;
 import cn.reghao.dfs.api.dto.FileProp;
 import cn.reghao.dfs.api.iface.FileService;
 import cn.reghao.dfs.store.db.mapper.FileMetaMapper;
 import cn.reghao.dfs.store.model.po.FileMeta;
 import cn.reghao.dfs.store.service.PutObjectService;
-import cn.reghao.jutil.jdk.db.Page;
-import cn.reghao.jutil.jdk.db.PageList;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.dubbo.config.annotation.DubboService;
 import org.springframework.stereotype.Service;
@@ -36,39 +32,6 @@ public class FileServiceImpl implements FileService {
         this.putObjectService = putObjectService;
     }
 
-    @Override
-    public PageList<FileInfoVO> getFileList(String pid, int pageNumber) {
-        Page page = new Page(pageNumber, pageSize);
-        int total = fileMetaMapper.countByPid(pid);
-        List<FileInfoVO> list = fileMetaMapper.findFileInfoByPage(page, pid);
-        PageList<FileInfoVO> pageList = PageList.pageList(pageNumber, pageSize, total, list);
-        return pageList;
-    }
-
-    @Override
-    public PageList<FileInfoVO> getFileCard(String pid, int pageNumber) {
-        Page page = new Page(pageNumber, pageSize);
-        int total = fileMetaMapper.countByPid(pid);
-        List<FileInfoVO> list = fileMetaMapper.findFileInfo1ByPage(page, pid);
-        PageList<FileInfoVO> pageList = PageList.pageList(pageNumber, pageSize, total, list);
-        return pageList;
-    }
-
-    @Override
-    public List<FileInfoVO> getDirectories(String pid) {
-        List<FileInfoVO> list = fileMetaMapper.findDirectories(pid);
-        return list;
-    }
-
-    @Override
-    public PageList<DeleteFile> getTrashList(int pageNumber, int pageSize) {
-        int total = fileMetaMapper.countDeletedFiles();
-        Page page = new Page(pageNumber, pageSize);
-        List<DeleteFile> list = fileMetaMapper.findDeletedFileByPage(page);
-        PageList<DeleteFile> pageList = PageList.pageList(pageNumber, pageSize, total, list);
-        return pageList;
-    }
-
     @Override
     public String checkFilename(String pid, String filename) {
         List<FileMeta> list = fileMetaMapper.findByFilename(pid, filename);
@@ -163,13 +126,4 @@ public class FileServiceImpl implements FileService {
         }
         return new DirProp(totalDirs, totalFiles, totalSize);
     }
-
-    @Override
-    public PageList<FileInfoVO> search(String keyword, int pageNumber) {
-        Page page = new Page(pageNumber, pageSize);
-        int total = fileMetaMapper.countByKeyword(keyword);
-        List<FileInfoVO> list = fileMetaMapper.findKeywordByPage(page, keyword);
-        PageList<FileInfoVO> pageList = PageList.pageList(pageNumber, pageSize, total, list);
-        return pageList;
-    }
 }

+ 48 - 0
dfs-store/src/main/java/cn/reghao/dfs/store/rpc/TrashServiceImpl.java

@@ -0,0 +1,48 @@
+package cn.reghao.dfs.store.rpc;
+
+import cn.reghao.dfs.api.dto.DeleteFile;
+import cn.reghao.dfs.api.dto.DirProp;
+import cn.reghao.dfs.api.dto.FileInfo;
+import cn.reghao.dfs.api.dto.FileProp;
+import cn.reghao.dfs.api.iface.FileService;
+import cn.reghao.dfs.api.iface.TrashService;
+import cn.reghao.dfs.store.db.mapper.FileMetaMapper;
+import cn.reghao.dfs.store.model.po.FileMeta;
+import cn.reghao.dfs.store.service.PutObjectService;
+import cn.reghao.jutil.jdk.db.Page;
+import cn.reghao.jutil.jdk.db.PageList;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.dubbo.config.annotation.DubboService;
+import org.springframework.stereotype.Service;
+
+import java.io.ByteArrayInputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.List;
+import java.util.UUID;
+
+/**
+ * @author reghao
+ * @date 2023-05-18 20:39:45
+ */
+@Slf4j
+@DubboService
+@Service
+public class TrashServiceImpl implements TrashService {
+    private PutObjectService putObjectService;
+    private final FileMetaMapper fileMetaMapper;
+    private final int pageSize = 10;
+
+    public TrashServiceImpl(PutObjectService putObjectService, FileMetaMapper fileMetaMapper) {
+        this.fileMetaMapper = fileMetaMapper;
+        this.putObjectService = putObjectService;
+    }
+
+    @Override
+    public PageList<DeleteFile> getTrashList(int pageNumber, int pageSize) {
+        int total = fileMetaMapper.countDeletedFiles();
+        Page page = new Page(pageNumber, pageSize);
+        List<DeleteFile> list = fileMetaMapper.findDeletedFileByPage(page);
+        PageList<DeleteFile> pageList = PageList.pageList(pageNumber, pageSize, total, list);
+        return pageList;
+    }
+}

+ 5 - 25
dfs-store/src/main/java/cn/reghao/dfs/store/service/GetObjectService.java

@@ -2,13 +2,12 @@ package cn.reghao.dfs.store.service;
 
 import cn.reghao.dfs.store.cache.LocalCache;
 import cn.reghao.dfs.store.db.mapper.FileMetaMapper;
+import cn.reghao.dfs.store.db.repository.FileMetaRepository;
 import cn.reghao.dfs.store.model.dto.ContentRange;
 import cn.reghao.dfs.store.model.po.FileMeta;
 import cn.reghao.dfs.store.model.vo.ObjectMeta;
-import cn.reghao.dfs.store.redis.ds.RedisStringObj;
 import cn.reghao.jutil.web.ServletUtil;
 import lombok.extern.slf4j.Slf4j;
-import org.springframework.cache.annotation.Cacheable;
 import org.springframework.stereotype.Service;
 import org.springframework.util.StringUtils;
 
@@ -30,12 +29,12 @@ public class GetObjectService {
     // 10MiB
     private final int bufSize = 1024*1024*10;
     private final LocalCache localCache;
-    private final RedisStringObj redisStringObj;
+    private final FileMetaRepository fileMetaRepository;
 
-    public GetObjectService(FileMetaMapper fileMetaMapper, LocalCache localCache, RedisStringObj redisStringObj) {
+    public GetObjectService(FileMetaMapper fileMetaMapper, LocalCache localCache, FileMetaRepository fileMetaRepository) {
         this.fileMetaMapper = fileMetaMapper;
         this.localCache = localCache;
-        this.redisStringObj = redisStringObj;
+        this.fileMetaRepository = fileMetaRepository;
     }
     
     public void headObject(String objectName) throws IOException {
@@ -63,14 +62,7 @@ public class GetObjectService {
         String host = ServletUtil.getRequest().getHeader("host");
         HttpServletResponse response = ServletUtil.getResponse();
 
-        ObjectMeta objectMeta;
-        Object object = redisStringObj.get(objectName);
-        if (object != null) {
-            objectMeta = (ObjectMeta) object;
-        } else {
-            objectMeta = fileMetaMapper.findObjectMeta(objectName);
-        }
-
+        ObjectMeta objectMeta = fileMetaRepository.getObjectMeta(objectName);
         if (objectMeta == null) {
             response.setStatus(HttpServletResponse.SC_NOT_FOUND);
             OutputStream outputStream = response.getOutputStream();
@@ -100,18 +92,6 @@ public class GetObjectService {
         }
     }
 
-    @Cacheable
-    public ObjectMeta getObjectMeta(String objectName) {
-        ObjectMeta objectMeta;
-        Object object = redisStringObj.get(objectName);
-        if (object != null) {
-            objectMeta = (ObjectMeta) object;
-        } else {
-            objectMeta = fileMetaMapper.findObjectMeta(objectName);
-        }
-        return objectMeta;
-    }
-
     private void writeAcceptRanges(String contentType, long len) throws IOException {
         HttpServletResponse response = ServletUtil.getResponse();
         response.setStatus(HttpServletResponse.SC_OK);

+ 5 - 1
dfs-store/src/main/java/cn/reghao/dfs/store/service/PutObjectService.java

@@ -96,7 +96,11 @@ public class PutObjectService {
         list.forEach(parentName -> {
             FileMeta fileMeta = fileMetaMapper.findByObjectName(parentName);
             if (fileMeta == null) {
-                fileMetas.add(new FileMeta(parentName));
+                String pid = "0";
+                int index = parentName.lastIndexOf("/");
+                String objectId = UUID.randomUUID().toString().replace("-", "");
+                String filename = parentName.substring(index);
+                fileMetas.add(new FileMeta(pid, objectId, objectName, filename));
             }
         });
 

+ 9 - 6
dfs-store/src/main/java/cn/reghao/dfs/store/task/ConvertTask.java

@@ -3,13 +3,13 @@ package cn.reghao.dfs.store.task;
 import cn.reghao.dfs.api.dto.VideoUrlType;
 import cn.reghao.dfs.store.db.mapper.VideoUrlMapper;
 import cn.reghao.dfs.store.model.po.VideoUrl;
+import cn.reghao.dfs.store.util.media.FFmpegWrapper;
 import cn.reghao.dfs.store.util.media.MediaQuality;
 import cn.reghao.dfs.store.util.media.MediaResolution;
-import cn.reghao.jutil.media.video.FFmpegWrapper;
-import cn.reghao.jutil.media.video.VideoOps;
+import cn.reghao.dfs.store.util.media.po.MediaProps;
+import cn.reghao.dfs.store.util.media.po.VideoProps;
 
 import java.io.File;
-import java.util.Map;
 import java.util.UUID;
 
 /**
@@ -38,9 +38,12 @@ public class ConvertTask implements Runnable {
         try {
             String url = String.format("%s/%s", endpoint, objectName);
             File destFile = new File(destPath);
-            Map<String, Object> map = VideoOps.videoProps(destFile);
-            int width = (Integer) map.get("width");
-            int height = (Integer) map.get("height");
+            MediaProps mediaProps = FFmpegWrapper.getMediaProps(destPath);
+            VideoProps videoProps = mediaProps.getVideoProps();
+            videoProps.getCodedWidth();
+            videoProps.getCodedHeight();
+            int width = (int) videoProps.getCodedWidth();
+            int height = (int) videoProps.getCodedHeight();
 
             MediaResolution mediaResolution = MediaQuality.getQuality(width, height);
             VideoUrl videoUrl = new VideoUrl(videoId, objectName, VideoUrlType.mp4.name(), url, mediaResolution);

+ 3 - 4
dfs-store/src/main/java/cn/reghao/dfs/store/task/VideoFileProcessor.java

@@ -8,11 +8,11 @@ import cn.reghao.dfs.store.db.mapper.VideoFileMapper;
 import cn.reghao.dfs.store.db.mapper.VideoUrlMapper;
 import cn.reghao.dfs.store.model.po.VideoFile;
 import cn.reghao.dfs.store.model.po.VideoUrl;
+import cn.reghao.dfs.store.util.media.FFmpegWrapper;
 import cn.reghao.dfs.store.util.media.MediaQuality;
 import cn.reghao.dfs.store.util.media.MediaResolution;
+import cn.reghao.dfs.store.util.media.po.MediaProps;
 import cn.reghao.jutil.jdk.thread.ThreadPoolWrapper;
-import cn.reghao.jutil.media.po.MediaProps;
-import cn.reghao.jutil.media.video.FFmpegWrapper;
 import org.springframework.stereotype.Service;
 
 import java.util.List;
@@ -60,8 +60,7 @@ public class VideoFileProcessor {
                     threadPool.submit(convertTask);
                 }
             } else {
-                ConvertTask convertTask =
-                        new ConvertTask(videoId, destPath, endpoint, videoUrlMapper);
+                ConvertTask convertTask = new ConvertTask(videoId, destPath, endpoint, videoUrlMapper);
                 threadPool.submit(convertTask);
             }
         } catch (Exception e) {

+ 109 - 0
dfs-store/src/main/java/cn/reghao/dfs/store/util/media/FFmpegWrapper.java

@@ -0,0 +1,109 @@
+package cn.reghao.dfs.store.util.media;
+
+import cn.reghao.dfs.store.util.media.po.AudioProps;
+import cn.reghao.dfs.store.util.media.po.MediaProps;
+import cn.reghao.dfs.store.util.media.po.VideoProps;
+import cn.reghao.jutil.jdk.converter.DateTimeConverter;
+import cn.reghao.jutil.jdk.serializer.JsonConverter;
+import cn.reghao.jutil.jdk.shell.Shell;
+import cn.reghao.jutil.jdk.shell.ShellExecutor;
+import cn.reghao.jutil.jdk.shell.ShellResult;
+import com.google.gson.JsonArray;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+
+import java.time.LocalDateTime;
+
+/**
+ * @author reghao
+ * @date 2022-03-04 11:04:32
+ */
+public class FFmpegWrapper {
+    static ShellExecutor shellExecutor = new ShellExecutor();
+    private final static String ffprobe = "/usr/bin/ffprobe";
+    private final static String ffmpeg = "/usr/bin/ffmpeg";
+
+    public static MediaProps getMediaProps(String src) {
+        String cmd = String.format("%s -v quiet -print_format json -show_format -show_streams -i \"%s\"", ffprobe, src);
+        String result = Shell.execWithResult(cmd);
+        if (result != null) {
+            JsonObject jsonObject = JsonConverter.jsonToJsonElement(result).getAsJsonObject();
+            JsonArray streams = jsonObject.get("streams").getAsJsonArray();
+            AudioProps audioProps = null;
+            VideoProps videoProps = null;
+            for (JsonElement jsonElement : streams) {
+                JsonObject jsonObject1 = jsonElement.getAsJsonObject();
+                String codecType = jsonObject1.get("codec_type").getAsString();
+                if (codecType.equals("audio")) {
+                    String codecName = jsonObject1.get("codec_name").getAsString();
+                    String codecTagString = jsonObject1.get("codec_tag_string").getAsString();
+                    double bitRate = jsonObject1.get("bit_rate").getAsDouble();
+                    double duration = jsonObject1.get("duration").getAsDouble();
+                    audioProps = new AudioProps(codecName, codecTagString, bitRate, duration);
+                } else if (codecType.equals("video")) {
+                    String codecName = jsonObject1.get("codec_name").getAsString();
+                    String codecTagString = jsonObject1.get("codec_tag_string").getAsString();
+                    double bitRate = jsonObject1.get("bit_rate").getAsDouble();
+                    double duration = jsonObject1.get("duration").getAsDouble();
+                    double codedWidth = jsonObject1.get("coded_width").getAsDouble();
+                    double codedHeight = jsonObject1.get("coded_height").getAsDouble();
+                    videoProps = new VideoProps(codecName, codecTagString, bitRate, duration, codedWidth, codedHeight);
+                }
+            }
+
+            JsonObject format = jsonObject.get("format").getAsJsonObject();
+            double duration = format.get("duration").getAsDouble();
+            double size = format.get("size").getAsDouble();
+            double bitRate = format.get("bit_rate").getAsDouble();
+
+            MediaProps mediaProps = new MediaProps(audioProps, videoProps);
+            JsonObject tags = format.get("tags").getAsJsonObject();
+            JsonElement jsonElement = tags.get("creation_time");
+            if (jsonElement != null) {
+                String creationTime = jsonElement.getAsString();
+                LocalDateTime localDateTime = DateTimeConverter.localDateTime(creationTime);
+                mediaProps.setCreateTime(localDateTime);
+            }
+
+            return mediaProps;
+        }
+        return null;
+    }
+
+    public static int formatCovert(String src, String dest) {
+        String cmd = String.format("%s -y -i %s -c:a aac -c:v libx264 %s", ffmpeg, src, dest);
+        return Shell.exec(cmd);
+    }
+
+    public static int qualityCovert(String src, int width, int height, String dest) {
+        String audioBitRate = "128k";
+        String videoBitRate = "1500k";
+        String cmd = String.format("%s -i %s -s %sx%s -c:a aac -b:a %s -c:v libx264 -b:v %s -g 90 %s",
+                ffmpeg, src, width, height, audioBitRate, videoBitRate, dest);
+        return Shell.exec(cmd);
+    }
+
+    public static void mergeToMp4(String dir, String videoId, String videoFilePath, String audioFilePath) throws Exception {
+        String mp4FilePath = String.format("%s/%s.mp4", dir, videoId);
+
+        StringBuilder sb = new StringBuilder();
+        sb.append("ffmpeg -i ").append(audioFilePath).append(" ")
+                .append("-i ").append(videoFilePath).append(" ")
+                .append("-codec copy ").append(mp4FilePath);
+        ShellResult shellResult = shellExecutor.exec(dir, sb.toString().split("\\s+"));
+        if (!shellResult.isSuccess()) {
+            throw new Exception("合并成 mp4 文件异常: " + shellResult.getResult());
+        }
+    }
+
+    public static void generateDash(String dir, String video, String audio) throws Exception {
+        StringBuilder sb = new StringBuilder();
+        sb.append("MP4Box -dash 5000 -rap -frag-rap -profile dashavc264:onDemand -frag 5000 ")
+                .append(video).append(" ").append(audio).append(" ")
+                .append("-out index.mpd");
+        ShellResult shellResult = shellExecutor.exec(dir, sb.toString().split("\\s+"));
+        if (!shellResult.isSuccess()) {
+            throw new Exception("生成 dash 异常: " + shellResult.getResult());
+        }
+    }
+}

+ 17 - 0
dfs-store/src/main/java/cn/reghao/dfs/store/util/media/po/AudioProps.java

@@ -0,0 +1,17 @@
+package cn.reghao.dfs.store.util.media.po;
+
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+
+/**
+ * @author reghao
+ * @date 2023-03-28 10:07:48
+ */
+@AllArgsConstructor
+@Getter
+public class AudioProps {
+    private String codecName;
+    private String codecTagString;
+    private double bitRate;
+    private double duration;
+}

+ 23 - 0
dfs-store/src/main/java/cn/reghao/dfs/store/util/media/po/MediaProps.java

@@ -0,0 +1,23 @@
+package cn.reghao.dfs.store.util.media.po;
+
+import lombok.Getter;
+import lombok.Setter;
+
+import java.time.LocalDateTime;
+
+/**
+ * @author reghao
+ * @date 2023-03-28 10:07:59
+ */
+@Getter
+@Setter
+public class MediaProps {
+    private AudioProps audioProps;
+    private VideoProps videoProps;
+    private LocalDateTime createTime;
+
+    public MediaProps(AudioProps audioProps, VideoProps videoProps) {
+        this.audioProps = audioProps;
+        this.videoProps = videoProps;
+    }
+}

+ 19 - 0
dfs-store/src/main/java/cn/reghao/dfs/store/util/media/po/VideoProps.java

@@ -0,0 +1,19 @@
+package cn.reghao.dfs.store.util.media.po;
+
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+
+/**
+ * @author reghao
+ * @date 2023-03-28 10:07:53
+ */
+@AllArgsConstructor
+@Getter
+public class VideoProps {
+    private String codecName;
+    private String codecTagString;
+    private double bitRate;
+    private double duration;
+    private double codedWidth;
+    private double codedHeight;
+}

+ 6 - 12
dfs-store/src/main/java/cn/reghao/dfs/store/util/AWS4Signer.java → dfs-store/src/main/java/cn/reghao/dfs/store/util/s3/AWS4Signer.java

@@ -1,20 +1,14 @@
-package cn.reghao.dfs.store.util;
-
-import cn.reghao.jutil.jdk.security.DigestUtil;
+package cn.reghao.dfs.store.util.s3;
 
+import javax.crypto.Mac;
+import javax.crypto.spec.SecretKeySpec;
 import java.net.URLEncoder;
 import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import javax.crypto.Mac;
-import javax.crypto.spec.SecretKeySpec;
+import java.util.*;
 
 /**
  * Example: Signing AWS Requests with Signature Version 4 in Java.
@@ -395,8 +389,8 @@ public class AWS4Signer {
     }
 
     /**
-     * Using {@link URLEncoder#encode(java.lang.String, java.lang.String) } instead of
-     * {@link URLEncoder#encode(java.lang.String) }
+     * Using {@link URLEncoder#encode(String, String) } instead of
+     * {@link URLEncoder#encode(String) }
      *
      * @co-author https://github.com/dotkebi
      * @date 16th March, 2017

+ 30 - 1
dfs-store/src/main/java/cn/reghao/dfs/store/util/OssUtil.java → dfs-store/src/main/java/cn/reghao/dfs/store/util/s3/OssUtil.java

@@ -1,7 +1,9 @@
-package cn.reghao.dfs.store.util;
+package cn.reghao.dfs.store.util.s3;
 
 import cn.reghao.jutil.web.ServletUtil;
 
+import javax.crypto.Mac;
+import javax.crypto.spec.SecretKeySpec;
 import javax.servlet.http.HttpServletRequest;
 import java.io.File;
 import java.io.FileInputStream;
@@ -122,4 +124,31 @@ public class OssUtil {
         String signature1 = aWSV4Signer.getSignature();
         return signature.equals(signature1);
     }
+
+    public static byte[] getSigningKey(String secretAccessKey, String date, String regionName, String serviceName) throws Exception {
+        byte[] kSecret = ("AWS4" + secretAccessKey).getBytes(StandardCharsets.UTF_8);
+        byte[] kDate = hmacSha256(kSecret, date);
+        byte[] kRegion = hmacSha256(kDate, regionName);
+        byte[] kService = hmacSha256(kRegion, serviceName);
+        byte[] kSigning = hmacSha256(kService, "aws4_request");
+        return kSigning;
+    }
+
+    public static byte[] hmacSha256(byte[] key, String data) throws Exception {
+        String algorithm = "HmacSHA256";
+        Mac mac = Mac.getInstance(algorithm);
+        mac.init(new SecretKeySpec(key, algorithm));
+        return mac.doFinal(data.getBytes(StandardCharsets.UTF_8));
+    }
+
+    final static char[] hexArray = "0123456789ABCDEF".toCharArray();
+    public static String bytesToHex(byte[] bytes) {
+        char[] hexChars = new char[bytes.length * 2];
+        for (int j = 0; j < bytes.length; j++) {
+            int v = bytes[j] & 0xFF;
+            hexChars[j * 2] = hexArray[v >>> 4];
+            hexChars[j * 2 + 1] = hexArray[v & 0x0F];
+        }
+        return new String(hexChars).toLowerCase();
+    }
 }

+ 1 - 7
dfs-store/src/main/java/cn/reghao/dfs/store/util/store/LoadBalancer.java

@@ -22,13 +22,7 @@ public class LoadBalancer {
     public StoreDir getStoreDir(long fileSize) {
         LocalStore localStore = LocalStores.getMaxStore(fileSize);
         List<StoreDir> subDirs = LocalStores.getSubDirs(localStore.getMountedOn());
-        Collections.sort(subDirs, new Comparator<StoreDir>() {
-            @Override
-            public int compare(StoreDir storeDir1, StoreDir storeDir2) {
-                return storeDir1.getCount() - storeDir2.getCount();
-            }
-        });
-
+        subDirs.sort(Comparator.comparingInt(StoreDir::getCount));
         return subDirs.get(0);
     }
 

+ 5 - 6
dfs-store/src/main/resources/mapper/FileMetaMapper.xml

@@ -73,9 +73,8 @@
         select file_meta.size,file_meta.content_type,file_meta.object_name,file_meta.object_id,
         data_block.absolute_path
         from file_meta
-        inner join file_content
         inner join data_block
-        on file_meta.object_id=file_content.object_id and file_content.content_id=data_block.content_id
+        on file_meta.sha256sum=data_block.sha256sum
         and file_meta.object_name=#{objectName}
     </select>
     <select id="findObjectMetaByPage" resultType="cn.reghao.dfs.store.model.vo.ObjectMeta">
@@ -112,14 +111,14 @@
         from file_meta
         where pid=#{pid}
     </select>
-    <select id="findFileInfoByPage" resultType="cn.reghao.dfs.api.dto.FileInfoVO">
+    <select id="findFileInfoByPage" resultType="cn.reghao.dfs.api.dto.FileInfo">
         select file_meta.object_id as file_id,file_meta.filename,file_meta.file_type as type,file_meta.size,file_meta.update_time,file_type.icon
         from file_meta
         inner join file_type
         on file_meta.deleted=0 and file_meta.file_type=file_type.id and file_meta.pid=#{pid}
         order by file_type asc
     </select>
-    <select id="findFileInfo1ByPage" resultType="cn.reghao.dfs.api.dto.FileInfoVO">
+    <select id="findFileInfo1ByPage" resultType="cn.reghao.dfs.api.dto.FileInfo">
         select file_meta.object_id as file_id,file_meta.filename,file_meta.file_type as type,file_meta.size,file_meta.update_time,file_type.icon_large as icon
         from file_meta
         inner join file_type
@@ -140,7 +139,7 @@
         order by file_type asc
     </select>
 
-    <select id="findDirectories" resultType="cn.reghao.dfs.api.dto.FileInfoVO">
+    <select id="findDirectories" resultType="cn.reghao.dfs.api.dto.FileInfo">
         select file_meta.object_id as file_id,file_meta.filename,file_meta.file_type as type,file_meta.size,file_meta.update_time,file_type.icon
         from file_meta
         inner join file_type
@@ -171,7 +170,7 @@
         from file_meta
         where filename like concat('%',#{keyword},'%')
     </select>
-    <select id="findKeywordByPage" resultType="cn.reghao.dfs.api.dto.FileInfoVO">
+    <select id="findKeywordByPage" resultType="cn.reghao.dfs.api.dto.FileInfo">
         select file_meta.object_id as file_id,file_meta.filename,file_meta.file_type as type,file_meta.size,file_meta.update_time,file_type.icon
         from file_meta
         inner join file_type

+ 0 - 4
dfs-store/src/test/java/FileMetaTest.java

@@ -52,10 +52,6 @@ public class FileMetaTest {
                 dataBlockMapper.updateContentId(objectId, contentId);
             });
 
-            long start = System.currentTimeMillis();
-            dataBlockMapper.updateBatch(dataBlocks);
-            log.info("cost: {}", System.currentTimeMillis()-start);
-
             pageNumber++;
             page = new Page(pageNumber, pageSize);
             dataBlocks = dataBlockMapper.findDataBlockByPage(page);