Bladeren bron

评论改查代码提交

pans 7 maanden geleden
bovenliggende
commit
6f4650a75a
26 gewijzigde bestanden met toevoegingen van 1325 en 196 verwijderingen
  1. 7 1
      snowy-modules/snowy-web-app/src/main/java/vip/xiaonuo/web/core/config/GlobalConfigure.java
  2. 25 4
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/pom.xml
  3. 36 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/commpen/ProducerCommpen.java
  4. 10 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/config/kafka/KafkaProducerConfiguration.java
  5. 29 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/config/minio/MinioConfig.java
  6. 80 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/config/minio/MinioProperties.java
  7. 33 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/config/minio/WebMvcConfig.java
  8. 28 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/consumer/TranscodingConsumer.java
  9. 78 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/consumer/config/ConsumerConfig.java
  10. 62 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/controller/MinioController.java
  11. 1 5
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/controller/TranscodingController.java
  12. 16 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/domain/FileLink.java
  13. 12 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/domain/UploadFile.java
  14. 2 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/mapper/UploadTaskDetailMapper.java
  15. 3 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/mapper/mapping/UploadTaskDetailMapper.xml
  16. 47 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/properties/KafkaProducerProperties.java
  17. 15 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/service/MinioService.java
  18. 4 3
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/service/TranscodingService.java
  19. 198 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/service/impl/MinioServiceImpl.java
  20. 40 20
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/service/impl/ResourceFileServiceImpl.java
  21. 82 160
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/service/impl/TranscodingServiceImpl.java
  22. 1 1
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/util/CheckThreadPool.java
  23. 41 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/util/CustomThreadPoolFactory.java
  24. 105 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/util/KafKaProducerUtil.java
  25. 364 0
      snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/util/MinioUtil.java
  26. 6 2
      snowy-server/snowy-gateway-app/src/main/java/vip/xiaonuo/gateway/config/GatewayConfigure.java

+ 7 - 1
snowy-modules/snowy-web-app/src/main/java/vip/xiaonuo/web/core/config/GlobalConfigure.java

@@ -184,7 +184,13 @@ public class GlobalConfigure implements WebMvcConfigurer {
             /* 资源中心 */
             "/disk/resourcecentre/page",
             "/disk/resourcecentre/detail",
-            "/disk/courseauditrecord/addViewCount"
+            "/disk/courseauditrecord/addViewCount",
+
+            "/disk/minio/upload",
+            "/disk/minio/checkExits",
+            "/disk/minio/merge",
+            "/disk/minio/delete",
+            "/disk/minio/checkMd5List"
     };
 
     /**

+ 25 - 4
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/pom.xml

@@ -49,7 +49,7 @@
         <commons-io.version>2.11.0</commons-io.version>
         <jjwt.version>0.9.1</jjwt.version>
         <thumbnailator.version>0.4.16</thumbnailator.version>
-        <fastjson2.version>2.0.34</fastjson2.version>
+
         <swagger-annotations.version>2.1.4</swagger-annotations.version>
         <mybatis-plus.version>3.5.2</mybatis-plus.version>
         <poi-version>5.2.2</poi-version>
@@ -134,9 +134,8 @@
         </dependency>
         <!-- fastjson  -->
         <dependency>
-            <groupId>com.alibaba.fastjson2</groupId>
-            <artifactId>fastjson2</artifactId>
-            <version>${fastjson2.version}</version>
+            <groupId>com.alibaba</groupId>
+            <artifactId>fastjson</artifactId>
         </dependency>
 
         <!-- 导入Mysql数据库链接jar包-->
@@ -161,6 +160,28 @@
             <scope>test</scope>
         </dependency>
 
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka-clients</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework.kafka</groupId>
+            <artifactId>spring-kafka</artifactId>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.apache.kafka</groupId>
+                    <artifactId>kafka-clients</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka-clients</artifactId>
+            <version>2.0.1</version>
+        </dependency>
+
         <dependency>
             <groupId>com.baomidou</groupId>
             <artifactId>mybatis-plus-boot-starter</artifactId>

+ 36 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/commpen/ProducerCommpen.java

@@ -0,0 +1,36 @@
+package vip.xiaonuo.disk.commpen;
+
+
+import org.apache.kafka.clients.producer.Producer;
+import org.springframework.stereotype.Component;
+import vip.xiaonuo.disk.properties.KafkaProducerProperties;
+import vip.xiaonuo.disk.util.KafKaProducerUtil;
+
+import javax.annotation.Resource;
+
+/*******************************************************
+ * kafka生产者初始化 
+ * @ClassName : ProducerCommpen
+ * @author: lin xue peng
+ * @time: 2023-07-17 23:17:54
+  * *****************************************************/
+@Component
+//@Scope("singleton")
+public class ProducerCommpen {
+
+    private Producer<String, String> producerInstance;
+
+
+
+    @Resource
+    private KafkaProducerProperties kafkaPro;
+
+
+    public synchronized Producer<String, String> getProducer() {
+        if (producerInstance == null) {
+            System.err.println("初始化kafka服务!!!");
+            producerInstance = KafKaProducerUtil.initProps(kafkaPro.getUserName(),kafkaPro.getPwd(),kafkaPro.getServers());
+        }
+        return producerInstance;
+    }
+}

+ 10 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/config/kafka/KafkaProducerConfiguration.java

@@ -0,0 +1,10 @@
+package vip.xiaonuo.disk.config.kafka;
+
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+import vip.xiaonuo.disk.properties.KafkaProducerProperties;
+
+@Configuration
+@EnableConfigurationProperties(KafkaProducerProperties.class)
+public class KafkaProducerConfiguration {
+}

+ 29 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/config/minio/MinioConfig.java

@@ -0,0 +1,29 @@
+package vip.xiaonuo.disk.config.minio;
+
+import io.minio.MinioClient;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+/**
+ * @author 吴浩炼
+ * @since 2024/05/29
+ */
+@Configuration
+@EnableConfigurationProperties(MinioProperties.class)
+public class MinioConfig {
+
+    private final MinioProperties minioProperties;
+
+    public MinioConfig(MinioProperties minioProperties) {
+        this.minioProperties = minioProperties;
+    }
+
+    @Bean
+    public MinioClient minioClient() {
+        return MinioClient.builder()
+                .credentials(minioProperties.getAccessKey(), minioProperties.getSecretKey())
+                .endpoint(minioProperties.getEndpoint())
+                .build();
+    }
+}

+ 80 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/config/minio/MinioProperties.java

@@ -0,0 +1,80 @@
+package vip.xiaonuo.disk.config.minio;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+import java.util.Objects;
+
+/**
+ * @author 吴浩炼
+ * @since 2024/05/30
+ */
+@ConfigurationProperties(prefix = "ufop.minio")
+public class MinioProperties {
+    private String endpoint;
+
+    private String accessKey;
+
+    private String secretKey;
+
+    private String bucketName;
+
+    public String getEndpoint() {
+        return endpoint;
+    }
+
+    public void setEndpoint(String endpoint) {
+        this.endpoint = endpoint;
+    }
+
+    public String getAccessKey() {
+        return accessKey;
+    }
+
+    public void setAccessKey(String accessKey) {
+        this.accessKey = accessKey;
+    }
+
+    public String getSecretKey() {
+        return secretKey;
+    }
+
+    public void setSecretKey(String secretKey) {
+        this.secretKey = secretKey;
+    }
+
+    public String getBucketName() {
+        return bucketName;
+    }
+
+    public void setBucketName(String bucketName) {
+        this.bucketName = bucketName;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) {
+            return true;
+        }
+        if (o == null || getClass() != o.getClass()) {
+            return false;
+        }
+        MinioProperties that = (MinioProperties) o;
+        return Objects.equals(endpoint, that.endpoint) && Objects.equals(accessKey, that.accessKey) &&
+                Objects.equals(secretKey, that.secretKey) && Objects.equals(bucketName, that.bucketName);
+    }
+
+    @Override
+    public int hashCode() {
+        return Objects.hash(endpoint, accessKey, secretKey, bucketName);
+    }
+
+    @Override
+    public String toString() {
+        return "MinioProperties{" +
+                "endpoint='" + endpoint + '\'' +
+                ", accessKey='" + accessKey + '\'' +
+                ", secretKey='" + secretKey + '\'' +
+                ", bucketName='" + bucketName + '\'' +
+                '}';
+    }
+}

+ 33 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/config/minio/WebMvcConfig.java

@@ -0,0 +1,33 @@
+package vip.xiaonuo.disk.config.minio;
+
+import org.springframework.boot.web.servlet.FilterRegistrationBean;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.web.cors.CorsConfiguration;
+import org.springframework.web.cors.UrlBasedCorsConfigurationSource;
+import org.springframework.web.filter.CorsFilter;
+
+/**
+ * @author 吴浩炼
+ * @since 2024/05/30
+ */
+@Configuration
+public class WebMvcConfig {
+
+    /**
+     * 开启跨域
+     */
+    @Bean
+    public FilterRegistrationBean<CorsFilter> corsFilterBean() {
+        CorsConfiguration corsConfiguration = new CorsConfiguration();
+        corsConfiguration.setAllowCredentials(true);
+        corsConfiguration.addAllowedOriginPattern("*");
+        corsConfiguration.addAllowedHeader("*");
+        corsConfiguration.addAllowedMethod("*");
+        UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
+        source.registerCorsConfiguration("/**", corsConfiguration);
+        FilterRegistrationBean<CorsFilter> bean = new FilterRegistrationBean<>(new CorsFilter(source));
+        bean.setOrder(-111111);
+        return bean;
+    }
+}

+ 28 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/consumer/TranscodingConsumer.java

@@ -0,0 +1,28 @@
+package vip.xiaonuo.disk.consumer;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.support.Acknowledgment;
+import org.springframework.stereotype.Component;
+import vip.xiaonuo.disk.service.TranscodingService;
+
+import javax.annotation.Resource;
+import java.util.List;
+@Slf4j
+@Component
+public class TranscodingConsumer {
+
+    @Resource
+    private TranscodingService transcodingService;
+    @KafkaListener(containerFactory = "listenerFactory", topics = "TRANSCODING_RESOURCE", groupId = "transcoding_resource_group")
+    public void transcodingResourceListen(List<ConsumerRecord<String, String>> consumerRecordList, Acknowledgment acknowledgment) {
+        try {
+            log.info("transcodingResourceListen");
+            transcodingService.transcodingResourceParallel(consumerRecordList);
+            acknowledgment.acknowledge();
+        } catch (Exception e) {
+            log.error("transcodingResourceListen bath insert failed, exception is", e);
+        }
+    }
+}

+ 78 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/consumer/config/ConsumerConfig.java

@@ -0,0 +1,78 @@
+package vip.xiaonuo.disk.consumer.config;
+
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
+import org.springframework.kafka.config.KafkaListenerContainerFactory;
+import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
+import org.springframework.kafka.listener.ContainerProperties;
+import org.springframework.stereotype.Component;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+@Component
+public class ConsumerConfig {
+    @Value("${kafkaproducer-config.servers}")
+    private String servers;
+
+    @Value("${kafkaproducer-config.username}")
+    private String username;
+
+    @Value("${kafkaproducer-config.pwd}")
+    private String pwd;
+    @Bean
+    KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> listenerFactory() {
+        ConcurrentKafkaListenerContainerFactory<String, String> factory = new
+                ConcurrentKafkaListenerContainerFactory<>();
+        factory.setConsumerFactory(new DefaultKafkaConsumerFactory<>(consumerConfigs()));
+
+        // 3s拉取一次数据
+        factory.getContainerProperties().setIdleBetweenPolls(TimeUnit.SECONDS.toMillis(2));
+        // 此处需要额外设置为手动提交
+        factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL);
+        // 开启批量监听
+        factory.setBatchListener(true);
+
+        return factory;
+    }
+
+
+    private Map<String, Object> consumerConfigs() {
+        Map<String, Object> props = new HashMap<>();
+        //kafka消费的的地址
+        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
+        //是否自动提交
+        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
+
+        //请求超时时间,5/3 min
+        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, "100000");
+        //从poll(拉)的回话处理时长
+        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
+        //超时时间,5min
+        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "300000");
+        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, "86400000");
+        //单个分区获取消息的最大值为900M
+        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 943718400);
+        //一次最大拉取的条数
+        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 100);
+        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.FETCH_MAX_BYTES_CONFIG, 1073741824);
+        //earliest当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费
+        //latest当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,消费新产生的该分区下的数据
+        //none topic各分区都存在已提交的offset时,从offset后开始消费;只要有一个分区不存在已提交的offset,则抛出异常
+        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
+        //序列化
+        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
+        props.put(org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
+        props.put("sasl.jaas.config",
+                "org.apache.kafka.common.security.scram.ScramLoginModule required username=\"" + username + "\" password=\"" + pwd + "\";");
+        props.put("sasl.mechanism", "PLAIN");
+        props.put("security.protocol", "SASL_PLAINTEXT");
+
+        return props;
+    }
+
+}

+ 62 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/controller/MinioController.java

@@ -0,0 +1,62 @@
+package vip.xiaonuo.disk.controller;
+
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.*;
+import org.springframework.web.multipart.MultipartFile;
+import vip.xiaonuo.disk.domain.UploadFile;
+import vip.xiaonuo.disk.service.MinioService;
+import vip.xiaonuo.disk.util.MinioUtil;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * @author 吴浩炼
+ * @since 2024/05/29
+ */
+@RestController
+@RequestMapping("/disk/minio")
+public class MinioController {
+
+    @Autowired
+    private MinioService minioService;
+
+    @Autowired
+    private MinioUtil minioUtil;
+
+    Logger logger = LoggerFactory.getLogger(MinioUtil.class);
+
+
+    @GetMapping("/checkExits")
+    public ResponseEntity<Boolean> checkExits(@RequestParam String md5, @RequestParam String fileSuffix) {
+        return ResponseEntity.ok(minioUtil.checkExist(md5, fileSuffix));
+    }
+
+    @PostMapping("/upload")
+    public ResponseEntity<Boolean> upload(String md5,@RequestParam Integer chunkIndex, @RequestParam MultipartFile chunk)
+            throws IOException {
+        return ResponseEntity.ok(minioUtil.upload(md5, chunkIndex, chunk.getInputStream(), chunk.getSize()));
+    }
+
+    @PostMapping("/merge")
+    public ResponseEntity<String> merge(@RequestParam String md5, @RequestParam Integer chunkTotal, @RequestParam String fileSuffix) {
+        return ResponseEntity.ok(minioUtil.merge(md5, chunkTotal, fileSuffix) ? "成功!" : "失败!");
+    }
+
+
+
+    @GetMapping("/delete")
+    public ResponseEntity<Boolean> delete(@RequestParam String url) {
+        System.out.println("url34:" + url);
+        return ResponseEntity.ok(minioUtil.deleteFileLink(url));
+    }
+
+    @PostMapping("/checkMd5List")
+    public ResponseEntity<List<String>> checkFilesExits(@RequestBody List<UploadFile> genericItems){
+        return ResponseEntity.ok(minioUtil.checkFilesExits(genericItems));
+    }
+}

+ 1 - 5
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/controller/TranscodingController.java

@@ -7,7 +7,6 @@ import org.springframework.web.bind.annotation.RequestBody;
 import org.springframework.web.bind.annotation.RestController;
 import vip.xiaonuo.common.pojo.CommonResult;
 import vip.xiaonuo.disk.dto.file.TranscodingReqDTO;
-import vip.xiaonuo.disk.dto.file.TranscodingResourceReqDTO;
 import vip.xiaonuo.disk.service.TranscodingService;
 
 import javax.annotation.Resource;
@@ -38,9 +37,6 @@ public class TranscodingController {
     public CommonResult<String> transcodingVideo(@RequestBody TranscodingReqDTO transcodingReqDTO) {
         return transcodingService.transcodingVideo(transcodingReqDTO);
     }
-    @PostMapping("/transcodingResource/file")
-    public CommonResult<String> transcodingResource(@RequestBody TranscodingResourceReqDTO transcodingResourceReqDTO) {
-        return transcodingService.transcodingResource(transcodingResourceReqDTO);
-    }
+
 
 }

+ 16 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/domain/FileLink.java

@@ -0,0 +1,16 @@
+package vip.xiaonuo.disk.domain;
+
+import lombok.Data;
+
+import org.springframework.data.annotation.Id;
+
+
+
+@Data
+public class FileLink {
+
+    @Id
+    private String id;
+
+    private String url; // 文件地址
+}

+ 12 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/domain/UploadFile.java

@@ -0,0 +1,12 @@
+package vip.xiaonuo.disk.domain;
+
+import lombok.Data;
+
+import java.util.ArrayList;
+
+@Data
+public class UploadFile {
+    private String md5;
+    private String fileSuffix;
+    private ArrayList chunks;
+}

+ 2 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/mapper/UploadTaskDetailMapper.java

@@ -7,4 +7,6 @@ import java.util.List;
 
 public interface UploadTaskDetailMapper extends BaseMapper<UploadTaskDetail> {
     List<Integer> selectUploadedChunkNumList(String identifier);
+
+    void deleteUploadTaskDetail(String identifier);
 }

+ 3 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/mapper/mapping/UploadTaskDetailMapper.xml

@@ -11,4 +11,7 @@
         order by CHUNK_NUMBER asc
     </select>
 
+    <delete id="deleteUploadTaskDetail">
+      delete from  uploadtaskdetail where IDENTIFIER = #{identifier}
+    </delete>
 </mapper>

+ 47 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/properties/KafkaProducerProperties.java

@@ -0,0 +1,47 @@
+package vip.xiaonuo.disk.properties;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+/*******************************************************
+ * Kafka生产者Properties
+ * @ClassName : KafkaProducerProperties
+ * @author: lin xue peng
+ * @time: 2023-07-22 22:56:10
+ @Deseription : TODO
+  * *****************************************************/
+
+@ConfigurationProperties(prefix ="kafkaproducer-config")
+public class KafkaProducerProperties {
+    /**
+     * 账号
+     */
+    private String userName;
+
+    private String pwd;
+
+    private String servers;
+
+    public String getUserName() {
+        return userName;
+    }
+
+    public void setUserName(String userName) {
+        this.userName = userName;
+    }
+
+    public String getPwd() {
+        return pwd;
+    }
+
+    public void setPwd(String pwd) {
+        this.pwd = pwd;
+    }
+
+    public String getServers() {
+        return servers;
+    }
+
+    public void setServers(String servers) {
+        this.servers = servers;
+    }
+}

+ 15 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/service/MinioService.java

@@ -0,0 +1,15 @@
+package vip.xiaonuo.disk.service;
+
+import java.io.InputStream;
+
+/**
+ * @author 吴智兴
+ * @since 2024/05/30
+ */
+public interface MinioService {
+    Boolean checkExits(String md5, String fileSuffix) ;
+
+    Boolean upload(String md5,  Integer chunkIndex, InputStream inputStream, long fileSize);
+
+    String merge(String md5, Integer chunkTotal,String fileSuffix);
+}

+ 4 - 3
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/service/TranscodingService.java

@@ -1,8 +1,10 @@
 package vip.xiaonuo.disk.service;
 
+import org.apache.kafka.clients.consumer.ConsumerRecord;
 import vip.xiaonuo.common.pojo.CommonResult;
 import vip.xiaonuo.disk.dto.file.TranscodingReqDTO;
-import vip.xiaonuo.disk.dto.file.TranscodingResourceReqDTO;
+
+import java.util.List;
 
 /**
  * 资源转码 service
@@ -19,6 +21,5 @@ public interface TranscodingService {
      */
     CommonResult<String> transcodingVideo(TranscodingReqDTO transcodingReqDTO);
 
-    CommonResult<String> transcodingResource(TranscodingResourceReqDTO transcodingResourceReqDTO);
-    void transcodingResourceParallel(TranscodingResourceReqDTO transcodingResourceReqDTO);
+    void transcodingResourceParallel(List<ConsumerRecord<String, String>> consumerRecordList);
 }

+ 198 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/service/impl/MinioServiceImpl.java

@@ -0,0 +1,198 @@
+package vip.xiaonuo.disk.service.impl;
+
+
+import io.minio.*;
+import io.minio.http.Method;
+import io.minio.messages.Item;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Service;
+import vip.xiaonuo.disk.service.MinioService;
+
+import javax.annotation.Resource;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * @author 吴浩炼
+ * @since 2024/05/30
+ */
+@Service
+public class MinioServiceImpl implements MinioService {
+
+    Logger logger = LoggerFactory.getLogger(MinioServiceImpl.class);
+
+    @Value("${ufop.minio.bucket-name}")
+    private String bucketName;
+    @Resource
+    private MinioClient minioClient;
+
+    /**
+     * 校验文件是否存在
+     *
+     * @param md5        文件md5值
+     * @param fileSuffix 文件后缀
+     * @return 是否存在
+     */
+    @Override
+    public Boolean checkExits(String md5, String fileSuffix) {
+        boolean exits = true;
+        try {
+//            检查是否存在该文件
+            minioClient.statObject(StatObjectArgs.builder().bucket(bucketName).object("files/" + md5 + fileSuffix).build());
+        } catch (Exception e) {
+            exits = false;
+
+        }
+        return exits;
+    }
+
+    /**
+     * 上传文件分片
+     *
+     * @param md5         文件md5值
+     * @param chunkIndex  分块索引
+     * @param inputStream 分块数据流
+     * @param fileSize    分块文件大小
+     * @return 上传结果
+     */
+    @Override
+    public Boolean upload(String md5, Integer chunkIndex, InputStream inputStream, long fileSize) {
+        logger.info("开始上传分片");
+        boolean exit = true;
+        try {
+            // 如果存在的话
+            minioClient.statObject(StatObjectArgs.builder().bucket(bucketName).object("chunks/" + md5 + "/" + chunkIndex).build());
+        } catch (Exception e) {
+            exit = false; // 说明不存在
+        }
+        if (exit) {
+            return exit; // 已经存在该切片,无需再次上传,断点续传的关键
+        }
+        try {
+//            开始上传分块
+            minioClient.putObject(
+                    PutObjectArgs.builder().bucket(bucketName).object("chunks/" + md5 + "/" + chunkIndex).stream(inputStream, fileSize, -1).build());
+        } catch (Exception e) {
+            logger.error("upload error", e);
+            throw new RuntimeException(String.format("minio 存储对象失败,【%s】", e.getMessage()));
+        }
+        try {
+//            关闭输入流
+            inputStream.close();
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+        logger.info("上传分片结束");
+        return true;
+    }
+
+    /**
+     * 合并文件
+     *
+     * @param md5        文件md5值
+     * @param chunkTotal 文件分片总数
+     * @param fileSuffix 文件后缀
+     * @return 合并结果
+     */
+    @Override
+    public String merge(String md5, Integer chunkTotal, String fileSuffix) {
+        logger.info("开始合并");
+
+        // 获取所有分块
+        List<Item> itemList = getChunkList(md5);
+        // 获取缺失的分块
+        List<Integer> missChunkIndexList = getMissChunkIndexList(itemList, chunkTotal);
+        if (!missChunkIndexList.isEmpty()) {
+            logger.warn("miss chunk index, chunkIndexList: {}", missChunkIndexList);
+            return String.format("[miss_chunk]%s", missChunkIndexList);
+        }
+
+        // 合并文件
+        List<ComposeSource> composeSourceList = new ArrayList<>();
+        for (Item item : itemList) {
+            composeSourceList.add(ComposeSource.builder().bucket(bucketName).object(item.objectName()).build());
+        }
+        try {
+            logger.info("正在合并");
+            minioClient.composeObject(
+                    ComposeObjectArgs.builder().bucket(bucketName).object("files/" + md5 + fileSuffix).sources(composeSourceList).build());
+        } catch (Exception e) {
+            logger.error("merge error", e);
+            throw new RuntimeException(String.format("minio 合并对象失败,【%s】", e.getMessage()));
+        }
+
+        // 获取文件url
+        try {
+            logger.info("合并结束");
+            return minioClient.getPresignedObjectUrl(
+                    GetPresignedObjectUrlArgs.builder().method(Method.GET).bucket(bucketName).object("files/" + md5 + fileSuffix).expiry(1, TimeUnit.DAYS)
+                            .build());
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * 获取所有分块
+     *
+     * @param md5 文件md5值
+     * @return 分块列表
+     */
+    private List<Item> getChunkList(String md5) {
+        // 获取所有分片
+        Iterable<Result<Item>> resultIterable =
+                minioClient.listObjects(ListObjectsArgs.builder().bucket(bucketName).prefix("chunks/" + md5 + "/").recursive(false) // 是否递归查询
+                        .build());
+
+        List<Item> itemList = new ArrayList<>(); // 分块
+
+        for (Result<Item> itemResult : resultIterable) {
+            try {
+                itemList.add(itemResult.get());
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+        }
+        // 分片文件排序
+        itemList.sort((o1, o2) -> {
+            String o1Name = o1.objectName();
+            String o2Name = o2.objectName();
+            int o1Index = Integer.parseInt(o1Name.substring(o1Name.lastIndexOf("/") + 1));
+            int o2Index = Integer.parseInt(o2Name.substring(o2Name.lastIndexOf("/") + 1));
+            return o1Index - o2Index;
+        });
+        return itemList;
+    }
+
+    /**
+     * 获取缺失的分片
+     *
+     * @param chunkList  分片列表
+     * @param chunkTotal 分片总数
+     * @return 缺失的分片列表
+     */
+    private List<Integer> getMissChunkIndexList(List<Item> chunkList, Integer chunkTotal) {
+        List<Integer> missChunkIndexList = new ArrayList<>(chunkTotal); // 缺失的分片文件
+        int index = 1;
+        for (Item item : chunkList) {
+            String chunkName = item.objectName();
+            int chunkIndex = Integer.parseInt(chunkName.substring(chunkName.lastIndexOf("/") + 1));
+            if (index != chunkIndex) {
+                missChunkIndexList.add(chunkIndex);
+            }
+            index++;
+        }
+        if (chunkTotal > index) {
+            for (int i = index; i <= chunkTotal; i++) {
+                missChunkIndexList.add(i);
+            }
+        }
+        return missChunkIndexList;
+    }
+
+}

+ 40 - 20
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/service/impl/ResourceFileServiceImpl.java

@@ -25,6 +25,7 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.springframework.stereotype.Service;
 import org.springframework.transaction.annotation.Transactional;
+import vip.xiaonuo.disk.commpen.ProducerCommpen;
 import vip.xiaonuo.disk.component.ResourceFileDealComp;
 import vip.xiaonuo.disk.domain.*;
 import vip.xiaonuo.disk.dto.file.*;
@@ -32,6 +33,8 @@ import vip.xiaonuo.disk.io.QiwenFile;
 import vip.xiaonuo.disk.mapper.*;
 import vip.xiaonuo.disk.service.ResourceFileService;
 import vip.xiaonuo.disk.service.TranscodingService;
+import vip.xiaonuo.disk.util.CheckThreadPool;
+import vip.xiaonuo.disk.util.KafKaProducerUtil;
 import vip.xiaonuo.disk.util.OperationLogUtil;
 import vip.xiaonuo.disk.vo.file.UploadFileVo;
 
@@ -44,9 +47,9 @@ import java.util.Base64;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 import java.util.zip.Adler32;
 import java.util.zip.CheckedOutputStream;
@@ -77,8 +80,10 @@ public class ResourceFileServiceImpl extends ServiceImpl<ResourceFileMapper, Res
     PictureFileMapper pictureFileMapper;
     @Resource
     private TranscodingService transcodingService;
-
-
+    @Resource
+    private KafKaProducerUtil kafKaProducerUtil;
+    @Resource
+    private ProducerCommpen producer;
 
     @Resource
     OperationLogUtil operationLogUtil;
@@ -124,12 +129,17 @@ public class ResourceFileServiceImpl extends ServiceImpl<ResourceFileMapper, Res
                 //上传成功同时用新线程异步转换格式用来创建预览用的文件
                 if(StringUtils.isEmpty(file.getPreviewFileUrl()))
                 {
-                    exec.schedule(() -> {
-                        TranscodingResourceReqDTO transcodingResourceReqDTO = new TranscodingResourceReqDTO();
-                        transcodingResourceReqDTO.setUserFileId(resourceUserFile.getUserFileId());
-                        transcodingResourceReqDTO.setUserId(userId);
-                        transcodingService.transcodingResourceParallel(transcodingResourceReqDTO);
-                    }, 10, TimeUnit.SECONDS);
+                    try {
+                    //    ExecutorService threadPool = CheckThreadPool.getExecutor();
+                    //    threadPool.execute(() -> {
+                            TranscodingResourceReqDTO transcodingResourceReqDTO = new TranscodingResourceReqDTO();
+                            transcodingResourceReqDTO.setUserFileId(resourceUserFile.getUserFileId());
+                            transcodingResourceReqDTO.setUserId(userId);
+                            kafKaProducerUtil.sendTranscodingResource(producer.getProducer(), JSON.toJSONString(transcodingResourceReqDTO));
+                     //   });
+                    } catch (Exception e) {
+                        throw new RuntimeException(e);
+                    }
                 }
 
 
@@ -150,12 +160,14 @@ public class ResourceFileServiceImpl extends ServiceImpl<ResourceFileMapper, Res
             uploadFileVo.setUserFileId(resourceUserFile.getUserFileId());
 
         } else {
+            //根据MD5值清空表
+            uploadTaskDetailMapper.deleteUploadTaskDetail(uploadFileDTO.getIdentifier());
             uploadFileVo.setSkipUpload(false);
             uploadFileVo.setUploadStatus("0");
-            List<Integer> uploaded = uploadTaskDetailMapper.selectUploadedChunkNumList(uploadFileDTO.getIdentifier());
-            if (uploaded != null && !uploaded.isEmpty()) {
-                uploadFileVo.setUploaded(uploaded);
-            } else {
+          //  List<Integer> uploaded = uploadTaskDetailMapper.selectUploadedChunkNumList(uploadFileDTO.getIdentifier());
+//            if (uploaded != null && !uploaded.isEmpty()) {
+//                uploadFileVo.setUploaded(uploaded);
+//            } else {
 
                 LambdaQueryWrapper<UploadTask> lambdaQueryWrapper = new LambdaQueryWrapper<>();
                 lambdaQueryWrapper.eq(UploadTask::getIdentifier, uploadFileDTO.getIdentifier());
@@ -171,7 +183,7 @@ public class ResourceFileServiceImpl extends ServiceImpl<ResourceFileMapper, Res
                     uploadTask.setUserId(userId);
                     uploadTaskMapper.insert(uploadTask);
                 }
-            }
+         //   }
 
         }
 
@@ -250,13 +262,21 @@ public class ResourceFileServiceImpl extends ServiceImpl<ResourceFileMapper, Res
                 }
                 uploadFileVo.setUserFileId(resourceUserFile.getUserFileId());
                 uploadFileVo.setUploadStatus("1");
+                ExecutorService threadPool = CheckThreadPool.getExecutor();
+                try {
+            //        threadPool.execute(() -> {
+                        TranscodingResourceReqDTO transcodingResourceReqDTO = new TranscodingResourceReqDTO();
+                        transcodingResourceReqDTO.setUserFileId(resourceUserFile.getUserFileId());
+                        transcodingResourceReqDTO.setUserId(userId);
+                        kafKaProducerUtil.sendTranscodingResource(producer.getProducer(), JSON.toJSONString(transcodingResourceReqDTO));
+            //        });
+                } catch (Exception e) {
+                    throw new RuntimeException(e);
+                }
                 //上传成功同时用新线程异步转换格式用来创建预览用的文件
-                exec.schedule(() -> {
-                    TranscodingResourceReqDTO transcodingResourceReqDTO = new TranscodingResourceReqDTO();
-                    transcodingResourceReqDTO.setUserFileId(resourceUserFile.getUserFileId());
-                    transcodingResourceReqDTO.setUserId(userId);
-                    transcodingService.transcodingResourceParallel(transcodingResourceReqDTO);
-                }, 10, TimeUnit.SECONDS);
+
+
+
 
                 if (relativePath.contains("/")) {
                     QiwenFile finalQiwenFile = qiwenFile;

+ 82 - 160
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/service/impl/TranscodingServiceImpl.java

@@ -1,9 +1,11 @@
 package vip.xiaonuo.disk.service.impl;
 
 import cn.hutool.core.util.IdUtil;
+import com.alibaba.fastjson.JSON;
 import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
 import com.qiwenshare.common.util.DateUtil;
 import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.springframework.stereotype.Service;
 import org.springframework.transaction.annotation.Transactional;
 import vip.xiaonuo.auth.core.util.StpLoginUserUtil;
@@ -26,6 +28,7 @@ import vip.xiaonuo.disk.util.VideoConverter;
 import javax.annotation.Resource;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Date;
 import java.util.List;
 
 /**
@@ -53,7 +56,6 @@ public class TranscodingServiceImpl implements TranscodingService {
 
 
 
-
     @Resource
     private ResourceUserFileMapper resourceUserFileMapper;
 
@@ -111,184 +113,104 @@ public class TranscodingServiceImpl implements TranscodingService {
         return CommonResult.ok("格式转换成功");
     }
 
-    @Override
-    public CommonResult<String> transcodingResource(TranscodingResourceReqDTO transcodingResourceReqDTO) {
-        // 校验目标文件是否存在,是否是本人的
-        String userId = StpLoginUserUtil.getLoginUser().getId();
-        QueryWrapper<ResourceUserFile> queryWrapper = new QueryWrapper<>();
-        queryWrapper.lambda().eq(ResourceUserFile::getUserFileId, transcodingResourceReqDTO.getUserFileId());
-        ResourceUserFile userFile = resourceUserFileMapper.selectOne(queryWrapper);
-        if (userFile == null || !userFile.getUserId().equals(userId)) {
-            return CommonResult.error("文件不存在或不是本人的");
-        }
 
-        List<String> videoList= Arrays.asList("wmv","avi","flv","mpeg","mpg","rmvb","mov","mkv");
-
-        List<String> wordList= Arrays.asList("doc","docx","ppt","pptx","xls","xlsx","pdf");
 
+    @Override
+    public void transcodingResourceParallel(List<ConsumerRecord<String, String>> consumerRecordList) {
+
+        log.info("===================格式转换开始时间============================>"+ cn.hutool.core.date.DateUtil.format(new Date(),"yyyy-MM-dd HH:mm:ss"));
+        for (ConsumerRecord<String, String> record : consumerRecordList) {
+            if (record.value() != null) {
+                TranscodingResourceReqDTO transcodingResourceReqDTO = JSON.parseObject(record.value(), TranscodingResourceReqDTO.class);
+                // 校验目标文件是否存在,是否是本人的
+                String userId = transcodingResourceReqDTO.getUserId();
+                QueryWrapper<ResourceUserFile> queryWrapper = new QueryWrapper<>();
+                queryWrapper.lambda().eq(ResourceUserFile::getUserFileId, transcodingResourceReqDTO.getUserFileId());
+                ResourceUserFile userFile = resourceUserFileMapper.selectOne(queryWrapper);
+                if (userFile == null || !userFile.getUserId().equals(userId)) {
+                    System.out.println("文件不存在或不是本人的");
+                }
 
-        ResourceFile fileBean = resourceFileMapper.selectById(userFile.getFileId());
-        if (fileBean == null) {
-            return CommonResult.error("文件不存在");
-        }
-        if(videoList.contains(userFile.getExtendName())){
-            transcodingResourceReqDTO.setTranscodingType(0);
-            transcodingResourceReqDTO.setFormat("mp4");
-        }
+                List<String> videoList= Arrays.asList("wmv","avi","flv","mpeg","mpg","rmvb","mov","mkv");
 
-        if(wordList.contains(userFile.getExtendName())){
-            transcodingResourceReqDTO.setTranscodingType(1);
-            transcodingResourceReqDTO.setFormat("pdf");
-        }
+                List<String> wordList= Arrays.asList("doc","docx","ppt","pptx","xls","xlsx");
 
 
-        // 构建转码格式列表
-        ArrayList<String> formatList = new ArrayList<>();
-        formatList.add(transcodingResourceReqDTO.getFormat());
-        // 判断视频还是图片
-        if(transcodingResourceReqDTO.getTranscodingType()!= null)
-        {
-            if (transcodingResourceReqDTO.getTranscodingType() == 0) {
-                try {
-                    // 转码视频
-                    // mp4 wmv avi flv mpeg mpg rmvb mov 互相转
-                    videoConverter.convertAndUpload(fileBean.getFileUrl(), fileBean.getFileId(), new String[]{transcodingResourceReqDTO.getFormat()});
-                   //根据文件id修改预览地址
-                    if(videoList.contains(userFile.getExtendName())||wordList.contains(userFile.getExtendName())){
-                        ResourceFile newFile = new ResourceFile();
-                        newFile.setFileId(fileBean.getFileId());
-                        newFile.setPreviewFileUrl("converted/" + fileBean.getFileId() + "." + transcodingResourceReqDTO.getFormat());
-                        resourceFileMapper.updateById(newFile);
-                    }
-                } catch (Exception ex) {
-                    log.error(ex.getMessage());
-                    return CommonResult.error("格式转换失败");
+                ResourceFile fileBean = resourceFileMapper.selectById(userFile.getFileId());
+                if (fileBean == null) {
+                    System.out.println("文件不存在");
                 }
-            } else if (transcodingResourceReqDTO.getTranscodingType() == 1) {
-                // 转码文档
-                // pdf doc docx ppt pptx xls xlsx
-                // doc转docx,ppt转pptx,xls转xlsx 所有均转pdf
-                // 判断目标格式
-
-                try {
-                    if ("pdf".equals(transcodingResourceReqDTO.getFormat())) {
-                        pdfUtils.convertToPdf(fileBean.getFileUrl(), fileBean.getFileId() + "." + transcodingResourceReqDTO.getFormat());
-                    } else {
-                        officeConverter.convertAndUpload(fileBean.getFileUrl(), fileBean.getFileId() + "." + transcodingResourceReqDTO.getFormat());
-                    }
-                    //根据文件id修改预览地址
-                    ResourceFile newFile = new ResourceFile();
-                    newFile.setFileId(fileBean.getFileId());
-                    if(videoList.contains(userFile.getExtendName())||wordList.contains(userFile.getExtendName())){
-                        newFile.setPreviewFileUrl("converted/" + fileBean.getFileId() + "." + transcodingResourceReqDTO.getFormat());
-                    }else{
-                        newFile.setPreviewFileUrl(fileBean.getFileUrl());
-                    }
-                    resourceFileMapper.updateById(newFile);
-                } catch (Exception ex) {
-                    log.error("格式转换:"+ex.getMessage());
-                    return CommonResult.error("格式转换失败");
+                if(videoList.contains(userFile.getExtendName())){
+                    transcodingResourceReqDTO.setTranscodingType(0);
+                    transcodingResourceReqDTO.setFormat("mp4");
                 }
-            }
-        }
-        else
-        {
-            ResourceFile newFile = new ResourceFile();
-            newFile.setFileId(fileBean.getFileId());
-            newFile.setPreviewFileUrl(fileBean.getFileUrl());
-            resourceFileMapper.updateById(newFile);
-        }
-        return CommonResult.ok("格式转换成功");
-    }
-
-    @Override
-    public void transcodingResourceParallel(TranscodingResourceReqDTO transcodingResourceReqDTO) {
-        // 校验目标文件是否存在,是否是本人的
-        String userId = transcodingResourceReqDTO.getUserId();
-        QueryWrapper<ResourceUserFile> queryWrapper = new QueryWrapper<>();
-        queryWrapper.lambda().eq(ResourceUserFile::getUserFileId, transcodingResourceReqDTO.getUserFileId());
-        ResourceUserFile userFile = resourceUserFileMapper.selectOne(queryWrapper);
-        if (userFile == null || !userFile.getUserId().equals(userId)) {
-            System.out.println("文件不存在或不是本人的");
-        }
-
-        List<String> videoList= Arrays.asList("wmv","avi","flv","mpeg","mpg","rmvb","mov","mkv");
-
-        List<String> wordList= Arrays.asList("doc","docx","ppt","pptx","xls","xlsx");
-
-
-        ResourceFile fileBean = resourceFileMapper.selectById(userFile.getFileId());
-        if (fileBean == null) {
-            System.out.println("文件不存在");
-        }
-        if(videoList.contains(userFile.getExtendName())){
-            transcodingResourceReqDTO.setTranscodingType(0);
-            transcodingResourceReqDTO.setFormat("mp4");
-        }
 
-        if(wordList.contains(userFile.getExtendName())){
-            transcodingResourceReqDTO.setTranscodingType(1);
-            transcodingResourceReqDTO.setFormat("pdf");
-        }
+                if(wordList.contains(userFile.getExtendName())){
+                    transcodingResourceReqDTO.setTranscodingType(1);
+                    transcodingResourceReqDTO.setFormat("pdf");
+                }
 
 
-        // 构建转码格式列表
-        ArrayList<String> formatList = new ArrayList<>();
-        formatList.add(transcodingResourceReqDTO.getFormat());
-        // 判断视频还是图片
-        if(transcodingResourceReqDTO.getTranscodingType()!= null)
-        {
-            if (transcodingResourceReqDTO.getTranscodingType() == 0) {
-                try {
-                    // 转码视频
-                    // mp4 wmv avi flv mpeg mpg rmvb mov 互相转
-                    videoConverter.convertAndUpload(fileBean.getFileUrl(), fileBean.getFileId(), new String[]{transcodingResourceReqDTO.getFormat()});
-                    //根据文件id修改预览地址
-                    if(videoList.contains(userFile.getExtendName())||wordList.contains(userFile.getExtendName())){
-                        ResourceFile newFile = new ResourceFile();
-                        newFile.setFileId(fileBean.getFileId());
-                        newFile.setPreviewFileUrl("converted/" + fileBean.getFileId() + "." + transcodingResourceReqDTO.getFormat());
-                        resourceFileMapper.updateById(newFile);
+                // 构建转码格式列表
+                ArrayList<String> formatList = new ArrayList<>();
+                formatList.add(transcodingResourceReqDTO.getFormat());
+                // 判断视频还是图片
+                if(transcodingResourceReqDTO.getTranscodingType()!= null)
+                {
+                    if (transcodingResourceReqDTO.getTranscodingType() == 0) {
+                        try {
+                            // 转码视频
+                            // mp4 wmv avi flv mpeg mpg rmvb mov 互相转
+                            videoConverter.convertAndUpload(fileBean.getFileUrl(), fileBean.getFileId(), new String[]{transcodingResourceReqDTO.getFormat()});
+                            //根据文件id修改预览地址
+                            if(videoList.contains(userFile.getExtendName())||wordList.contains(userFile.getExtendName())){
+                                ResourceFile newFile = new ResourceFile();
+                                newFile.setFileId(fileBean.getFileId());
+                                newFile.setPreviewFileUrl("converted/" + fileBean.getFileId() + "." + transcodingResourceReqDTO.getFormat());
+                                resourceFileMapper.updateById(newFile);
+                            }
+                        } catch (Exception ex) {
+                            log.error(ex.getMessage());
+                            System.out.println("格式转换失败");
+                        }
+                    } else if (transcodingResourceReqDTO.getTranscodingType() == 1) {
+                        // 转码文档
+                        // pdf doc docx ppt pptx xls xlsx
+                        // doc转docx,ppt转pptx,xls转xlsx 所有均转pdf
+                        // 判断目标格式
+                        try {
+                            if ("pdf".equals(transcodingResourceReqDTO.getFormat())) {
+                                pdfUtils.convertToPdf(fileBean.getFileUrl(), fileBean.getFileId() + "." + transcodingResourceReqDTO.getFormat());
+                            } else {
+                                officeConverter.convertAndUpload(fileBean.getFileUrl(), fileBean.getFileId() + "." + transcodingResourceReqDTO.getFormat());
+                            }
+                            //根据文件id修改预览地址
+                            ResourceFile newFile = new ResourceFile();
+                            newFile.setFileId(fileBean.getFileId());
+                            if(videoList.contains(userFile.getExtendName())||wordList.contains(userFile.getExtendName())){
+                                newFile.setPreviewFileUrl("converted/" + fileBean.getFileId() + "." + transcodingResourceReqDTO.getFormat());
+                            }else{
+                                newFile.setPreviewFileUrl(fileBean.getFileUrl());
+                            }
+                            resourceFileMapper.updateById(newFile);
+                        } catch (Exception ex) {
+                            log.error("格式转换:"+ex.getMessage());
+                            System.out.println("格式转换失败");
+                        }
                     }
-                } catch (Exception ex) {
-                    log.error(ex.getMessage());
-                    System.out.println("格式转换失败");
                 }
-            } else if (transcodingResourceReqDTO.getTranscodingType() == 1) {
-                // 转码文档
-                // pdf doc docx ppt pptx xls xlsx
-                // doc转docx,ppt转pptx,xls转xlsx 所有均转pdf
-                // 判断目标格式
-
-                try {
-                    if ("pdf".equals(transcodingResourceReqDTO.getFormat())) {
-                        pdfUtils.convertToPdf(fileBean.getFileUrl(), fileBean.getFileId() + "." + transcodingResourceReqDTO.getFormat());
-                    } else {
-                        officeConverter.convertAndUpload(fileBean.getFileUrl(), fileBean.getFileId() + "." + transcodingResourceReqDTO.getFormat());
-                    }
-                    //根据文件id修改预览地址
+                else
+                {
                     ResourceFile newFile = new ResourceFile();
                     newFile.setFileId(fileBean.getFileId());
-                    if(videoList.contains(userFile.getExtendName())||wordList.contains(userFile.getExtendName())){
-                        newFile.setPreviewFileUrl("converted/" + fileBean.getFileId() + "." + transcodingResourceReqDTO.getFormat());
-                    }else{
-                        newFile.setPreviewFileUrl(fileBean.getFileUrl());
-                    }
+                    newFile.setPreviewFileUrl(fileBean.getFileUrl());
                     resourceFileMapper.updateById(newFile);
-                } catch (Exception ex) {
-                    log.error("格式转换:"+ex.getMessage());
-                    System.out.println("格式转换失败");
                 }
+                log.info("格式转换成功");
             }
         }
-        else
-        {
-            ResourceFile newFile = new ResourceFile();
-            newFile.setFileId(fileBean.getFileId());
-            newFile.setPreviewFileUrl(fileBean.getFileUrl());
-            resourceFileMapper.updateById(newFile);
-        }
-        System.out.println("格式转换成功");
+        log.info("===================格式转换结束============================>"+ cn.hutool.core.date.DateUtil.format(new Date(),"yyyy-MM-dd HH:mm:ss"));
+
     }
 
 

+ 1 - 1
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/util/CheckThreadPool.java

@@ -8,7 +8,7 @@ import java.util.concurrent.TimeUnit;
 /**
  * Created with IntelliJ IDEA.
  *
- * @Author: zouwang
+ * @Author: pans
  * @Date: 2024/04/18/10:29
  * @Description:
  */

+ 41 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/util/CustomThreadPoolFactory.java

@@ -0,0 +1,41 @@
+package vip.xiaonuo.disk.util;
+
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Primary;
+
+import javax.annotation.PreDestroy;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+@Slf4j
+public class CustomThreadPoolFactory {
+
+    private static ExecutorService executorService;
+
+
+    // 获取线程池的工厂方法
+    public static synchronized ExecutorService getThreadPool() {
+        if (executorService == null || executorService.isShutdown()) {
+            executorService = createThreadPool();
+        }
+        return executorService;
+    }
+
+    // 创建线程池的方法
+    @Bean(name = "CustomThreadPoolFactorythreadPool")
+    @Primary
+    private static ExecutorService createThreadPool() {
+        // 这里可以根据需要进行线程池的配置
+        return Executors.newFixedThreadPool(300);
+    }
+
+    // 关闭线程池的方法
+    @PreDestroy
+    public static void shutdownThreadPool() {
+        if (executorService != null && !executorService.isShutdown()) {
+            log.info("======================shutdownThread");
+            executorService.shutdown();
+        }
+    }
+}

+ 105 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/util/KafKaProducerUtil.java

@@ -0,0 +1,105 @@
+package vip.xiaonuo.disk.util;
+
+import org.apache.kafka.clients.producer.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.data.redis.core.StringRedisTemplate;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.Resource;
+import java.util.Properties;
+
+/*******************************************************
+ * kafka生产者
+ *
+ * @ClassName : KafKaProducerUtil
+ * @author: lin xue peng
+ * @time: 2023-07-11 13:12:25
+ *****************************************************/
+@Component
+public class KafKaProducerUtil {
+	@Resource
+	private StringRedisTemplate stringRedisTemplate;
+
+	private static String AGGREGATION_FAIL_KEY = "alarmCenter:send_message_error";
+
+	private static final Logger log = LoggerFactory
+			.getLogger(KafKaProducerUtil.class);
+
+	/**
+	 * 进行kafka的Properties配置初始化
+	 *
+	 * @return
+	 */
+	public static Producer<String, String> initProps(String userName,
+			String pwd, String nots) {
+
+		Properties props = new Properties();
+		// 初始化参数
+		props.put("bootstrap.servers", nots);
+		props.put("acks", "-1");// ack方式,all,会等所有的commit最慢的方式
+		props.put("retries ", 1);// 失败是否重试,设置1会有可能产生重复数据
+		props.put("buffer.memory", 67108864);// 整个producer可以用于buffer的内存大小
+		props.put("max.request.size", 13631488); // 13MB
+		props.put("linger.ms", 0); // 等多久,如果buffer没满,比如设为1,即消息发送会多1ms的延迟,如果buffer没满
+		props.put("key.serializer",
+				"org.apache.kafka.common.serialization.StringSerializer");
+		props.put("value.serializer",
+				"org.apache.kafka.common.serialization.StringSerializer");
+		// 用户名密码方式 begin
+		props.put("sasl.jaas.config",
+				"org.apache.kafka.common.security.scram.ScramLoginModule required username=\""
+						+ userName + "\" password=\"" + pwd + "\";");
+		props.put("security.protocol", "SASL_PLAINTEXT");
+
+		props.put("sasl.mechanism", "PLAIN");
+		Producer<String, String> producer = new KafkaProducer<>(props);
+
+		log.info("初始化Kafka生产者配置信息成功~~~");
+		return producer;
+	}
+
+	/***
+	 * 格式转换
+	 *
+	 * @param producer
+	 * @param message
+	 */
+	public void sendTranscodingResource(Producer<String, String> producer,
+			String message) {
+		try {
+			log.info("kafka的Properties信息初始化成功~~~");
+			ProducerRecord<String, String> msg = new ProducerRecord<String, String>(
+					"TRANSCODING_RESOURCE", message);
+			producer.send(msg, new Callback() {
+				@Override
+				public void onCompletion(RecordMetadata metadata,
+						Exception exception) {
+					if (exception != null) {
+						log.error("主题失败TRANSCODING_RESOURCE换 ===========================>"+exception);
+						// 发送失败的异常处理!!!
+						log.error("发送TRANSCODING_RESOURCE 主题失败数据为:{}",
+								message);
+						stringRedisTemplate
+								.opsForValue().set(
+										AGGREGATION_FAIL_KEY
+												+ ":TRANSCODING_RESOURCE",
+										message);
+					} else {
+						// 处理发送成功的结果!!!
+					}
+				}
+			});
+			log.info("发送TRANSCODING_RESOURCE 主题成功数据为:{}", message);
+			producer.flush();
+		} catch (Exception e) {
+			log.error("发送TRANSCODING_RESOURCE 主题失败===========================>", e,e.getMessage());
+			log.error("发送TRANSCODING_RESOURCE 主题失败数据为:{}", message);
+
+		}
+	}
+
+
+
+
+}

+ 364 - 0
snowy-plugin/snowy-plugin-disk/snowy-plugin-disk-func/src/main/java/vip/xiaonuo/disk/util/MinioUtil.java

@@ -0,0 +1,364 @@
+package vip.xiaonuo.disk.util;
+
+
+import io.minio.*;
+import io.minio.messages.DeleteError;
+import io.minio.messages.DeleteObject;
+import io.minio.messages.Item;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Component;
+import vip.xiaonuo.disk.domain.FileLink;
+import vip.xiaonuo.disk.domain.UploadFile;
+
+import javax.annotation.Resource;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+
+@Component
+public class MinioUtil {
+
+    @Value("${ufop.minio.endpoint}")
+    private String endpoint;
+
+    @Value("${ufop.minio.access-key}")
+    private String accessKey;
+
+    @Value("${ufop.minio.secret-key}")
+    private String secretKey;
+
+    @Value("${ufop.minio.bucket-name}")
+    private String bucket;
+
+    @Resource
+    private MinioClient minioClient;
+
+
+    Logger logger = LoggerFactory.getLogger(MinioUtil.class);
+
+
+    /**
+     * @param md5        文件md5值
+     * @param fileSuffix 后缀
+     * @return 是否存在该文件
+     */
+    public Boolean checkExist(String md5, String fileSuffix) {
+        boolean exits = true;
+        try {
+            minioClient.statObject(StatObjectArgs.builder().bucket(bucket).object("files/" + md5 + fileSuffix).build());
+        } catch (Exception e) {
+            exits = false;
+        }
+        return exits;
+    }
+
+    /**
+     * 检查文件列表中的各个文件是否存在
+     * @param md5List
+     * @return
+     */
+    public List<String> checkFilesExits(List<UploadFile> md5List) {
+        ArrayList<String> noExitList = new ArrayList<>();
+        md5List.forEach(item -> {
+            try {
+                logger.info("校验是否存在的文件名是: files/" + item.getMd5() + item.getFileSuffix());
+                minioClient.statObject(StatObjectArgs.builder().bucket(bucket).object("files/" + item.getMd5() + item.getFileSuffix()).build());
+            } catch (Exception e) {
+                logger.info("该文件需要上传,md5值是:" + item.getMd5());
+                // 未存在的文件添加到队列中,前端仍需上传
+                noExitList.add(item.getMd5());
+            }
+        });
+        return noExitList;
+    }
+
+    /**
+     * 上传文件分块
+     *
+     * @param md5         文件md5值
+     * @param chunkIndex  分块索引
+     * @param inputStream 分块数据流
+     * @param fileSize    分块文件大小
+     * @return 上传结果
+     */
+    public Boolean upload(String md5, Integer chunkIndex, InputStream inputStream, long fileSize) {
+        logger.info("开始上传, 文件" + md5);
+        logger.info("开始上传, 文件" + chunkIndex);
+
+        boolean exits = true; // 存在该分块了
+        // 判断分块是否存在
+        try {
+            minioClient.statObject(StatObjectArgs.builder().bucket(bucket).object("chunks/" + md5 + "/" + chunkIndex).build());
+        } catch (Exception e) {
+            logger.info("该分块不存在,需要上传");
+            exits = false;
+        }
+        // 如果存在,直接返回true,否则进行上传分片
+        if (exits) {
+            logger.info("该分块存在,无需上传...");
+            return exits;
+        }
+        logger.info("第" + chunkIndex + "分块开始上传...");
+        // Upload known sized input stream.
+        try {
+            minioClient.putObject(
+                    PutObjectArgs.builder().bucket(bucket).object("chunks/" + md5 + "/" + chunkIndex).stream(
+                            inputStream, fileSize, -1).build());
+        } catch (Exception e) {
+            logger.error("上传分块错误", e);
+        }
+        try {
+            inputStream.close();
+        } catch (IOException e) {
+            logger.error("关闭流失败", e);
+            throw new RuntimeException(e);
+        }
+        logger.info("上传分片结束");
+        return true;
+    }
+
+
+    /**
+     * 通过MD5获取某个文件的分块列表
+     *
+     * @param md5
+     * @return 分块列表
+     */
+    public List<Item> getChunkList(String md5) {
+        Iterable<Result<Item>> results = minioClient.listObjects(
+                ListObjectsArgs.builder()
+                        .bucket(bucket)
+                        .prefix("chunks/" + md5 + "/")
+                        .recursive(false) // 只有一层,不必递归
+                        .build());
+        ArrayList<Item> items = new ArrayList<>();
+        results.forEach(result -> {
+            try {
+                items.add(result.get());
+            } catch (Exception e) {
+                logger.error("获取分块列表报错", e);
+            }
+        });
+        // 需要进行排序再进行Compose
+        items.sort((o1, o2) -> {
+            String o1Name = o1.objectName(); // 返回的是在minio bucket中的路径
+            String o2Name = o2.objectName();
+            int o1Index = Integer.parseInt(o1Name.substring(o1Name.lastIndexOf("/") + 1));
+            int o2Index = Integer.parseInt(o2Name.substring(o2Name.lastIndexOf("/") + 1));
+            return o1Index - o2Index; // 谁小谁在前
+        });
+        return items;
+    }
+
+
+    /**
+     * 获取缺失的分块列表
+     *
+     * @param chunkList
+     * @param chunkTotal
+     * @return 缺失的分块列表
+     */
+    private List<Integer> getMissChunkIndexList(List<Item> chunkList, Integer chunkTotal) {
+        /**
+         * 需要找出缺失块,
+         * 1、首先,minio中md存储的分块文件名以md5命名,其中临时分块是按照1~100等排序
+         * 2、chunkTotal为前端分块数量,正常情况minio中得到的分块列表元素数量应该与其一致,
+         * 3、若存在漏传或者断传情况,则添加到missChunkIndexList中
+         */
+        List<Integer> missChunkIndexList = new ArrayList<>(chunkTotal);
+        if (!chunkList.isEmpty()) { // 存在部分上传的情况
+            for (int i = 1; i < chunkTotal; i++) {
+                for (int j = i; j < chunkList.size(); j++) {
+                    String chunkName = chunkList.get(i - 1).objectName();
+                    int index = Integer.parseInt(chunkName.substring(chunkName.lastIndexOf("/") + 1));
+                    if (index != i) {
+                        missChunkIndexList.add(i);
+                        i++;
+                        break;
+                    }
+                }
+            }
+        } else {
+            // 使用IntStream生成从1到chunkTotal(包含)的整数流,并收集到missChunkIndexList中
+            missChunkIndexList = IntStream.rangeClosed(1, chunkTotal)
+                    .boxed()  // 将int转换为Integer
+                    .collect(Collectors.toList());  //收集到列表中
+        }
+        return missChunkIndexList;
+    }
+
+    /**
+     * 合并文件
+     *
+     * @param md5
+     * @param chunkTotal
+     * @param fileSuffix
+     * @return
+     */
+    public Boolean merge(String md5, Integer chunkTotal, String fileSuffix) {
+        logger.info("开始合并");
+        // 获取所有分块
+        List<Item> itemList = getChunkList(md5);
+
+        // 获取缺失的分块
+        List<Integer> missChunkIndexList = getMissChunkIndexList(itemList, chunkTotal);
+
+        if (!missChunkIndexList.isEmpty()) { // 缺失上传分块
+            logger.warn("缺失分块,缺失列表为:{},需要重新上传", missChunkIndexList);
+            return false;
+        }
+
+        // 合并文件
+        List<ComposeSource> sourceObjectList = new ArrayList<ComposeSource>();
+        for (Item item : itemList) {
+            sourceObjectList.add(
+                    ComposeSource.builder().bucket(bucket).object(item.objectName()).build());
+        }
+
+        // 合并文件
+        try {
+            minioClient.composeObject(
+                    ComposeObjectArgs.builder()
+                            .bucket(bucket)
+                            .object("files/" + md5 + fileSuffix)
+                            .sources(sourceObjectList)
+                            .build());
+
+        } catch (Exception e) {
+            logger.warn("合并过程中报错:", e);
+            logger.warn("合并失败...");
+            return false;
+        }
+        logger.info("合并成功了...");
+//        http://192.168.123.101:9000/sph/files/71149ab18f8279400e4ba5269ae295f5.jpg
+        String url = endpoint + "/" + bucket + "/" + "files/" + md5 + fileSuffix;
+        logger.info("插入的的url是:" + url);
+        Boolean insertResult = this.insertUrl(url);
+        Boolean deleteResult = this.deleteChunks(md5);
+        logger.info(insertResult.toString());
+        logger.info(deleteResult.toString());
+        if (insertResult && deleteResult) {
+            logger.info("插入成功了...");
+            return true;
+        } else {
+            logger.error("插入失败...");
+            return false;
+        }
+
+
+    }
+
+
+    /**
+     * mongDB中插入Url记录,并返回插入结果
+     *
+     * @param url
+     * @return
+     */
+    public Boolean insertUrl(String url) {
+        FileLink fileLink = new FileLink();
+        fileLink.setUrl(url);
+  //      mongoTemplate.insert(fileLink);
+    //    List<FileLink> fileLinks = mongoTemplate.find(Query.query(Criteria.where("url").is(url)), FileLink.class);
+        return false;
+    }
+
+
+
+
+    /**
+     * 删除文件
+     *
+     * @param url
+     * @return
+     */
+    public Boolean deleteFileLink(String url) {
+        System.out.println("url是:" + url);
+        String filePos = url.substring(url.lastIndexOf("files/"));  // 文件位置
+        System.out.println("filePos是:" + filePos);
+        String md5 = filePos.substring(filePos.lastIndexOf("/") + 1, filePos.lastIndexOf("."));
+        System.out.println("md5是:" + md5);
+
+
+        // 1、minio中查找文件并删除
+        boolean isExit = true;
+        try {
+            minioClient.statObject(
+                    StatObjectArgs.builder().bucket(bucket).object(filePos).build());
+        } catch (Exception e) {
+            isExit = false;
+        }
+
+
+        if (isExit) {
+            try {
+                minioClient.removeObject(
+                        RemoveObjectArgs.builder().bucket(bucket).object(filePos).build());
+            } catch (Exception e) {
+                logger.warn("minio中删除该文件报错:" + e.getMessage());
+                return false;
+            }
+        }
+        logger.info("minio是否存在该文件?" + (isExit ? "存在" : "不存在"));
+
+        return false;
+
+    }
+
+
+    /**
+     * 删除分块
+     *
+     * @param md5
+     * @return
+     */
+    public Boolean deleteChunks(String md5) {
+        logger.info("开始删除分块...");
+        // 1、查找分块并删除,如果没有分块
+        List<Item> chunkList = getChunkList(md5);  // 获取chunks/{md5}/下所有文件的列表
+        List<DeleteObject> delChunkList = new LinkedList<>(); // 创建为删除队列
+
+        List<String> nameList = new LinkedList<>(); //  分块位置列表
+        for (Item item : chunkList) {
+            nameList.add(item.objectName());
+        }
+        logger.info("查到的chunklist" + nameList);
+        for (String objectName : nameList) {
+            logger.info("name是:" + objectName);
+            delChunkList.add(new DeleteObject(objectName));
+        }
+
+        Iterable<Result<DeleteError>> results =
+                minioClient.removeObjects(
+                        RemoveObjectsArgs.builder().bucket(bucket).objects(delChunkList).build());
+
+        if (results.iterator().hasNext()) {
+            for (Result<DeleteError> result : results) {
+                DeleteError error = null;
+                try {
+                    error = result.get();
+                    logger.error("删除分块时的错误是:" + error);
+                } catch (Exception e) {
+                    logger.warn("删除分块抛出的异常是:" + e.getMessage());
+                    throw new RuntimeException(e);
+                }
+                System.out.println(
+                        "Error in deleting object " + error.objectName() + "; " + error.message());
+            }
+            logger.info("结束删除分块1...");
+            return false;
+        } else {
+            logger.info("结束删除分块2...");
+            return true;
+        }
+
+    }
+
+}

+ 6 - 2
snowy-server/snowy-gateway-app/src/main/java/vip/xiaonuo/gateway/config/GatewayConfigure.java

@@ -144,9 +144,13 @@ public class GatewayConfigure {
             /* 资源中心 */
             "/api/webapp/disk/resourcecentre/page",
             "/api/webapp/disk/resourcecentre/detail",
-            "/api/webapp/disk/courseauditrecord/addViewCount"
-
+            "/api/webapp/disk/courseauditrecord/addViewCount",
 
+            "/api/webapp/disk/minio/upload",
+            "/api/webapp/disk/minio/checkExits",
+            "/api/webapp/disk/minio/merge",
+            "/api/webapp/disk/minio/delete",
+            "/api/webapp/disk/minio/checkMd5List"
     };
 
     /**