DifyAudioChatController.java 3.62 KB
Newer Older
何处是我家's avatar
提交  
何处是我家 committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
package com.ewaytek.deepseek.controller;

import com.ewaytek.deepseek.common.config.DeepseekConfig;
import com.ewaytek.deepseek.service.dify.DifyAudioChatService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.io.FileSystemResource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.servlet.mvc.method.annotation.ResponseBodyEmitter;
import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody;

import javax.annotation.Resource;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;

/**
 * @author yangtq
 * @date 2025/3/28
 */
@RequiredArgsConstructor
@RestController
@Slf4j
@RequestMapping("/dify/audio")
public class DifyAudioChatController {

    @Resource
    private DifyAudioChatService difyAudioChatService;

    @PostMapping("/voice1")
    public StreamingResponseBody systemChatAudio(
            @RequestParam(value = "audio", required = false) MultipartFile audio,
            @RequestParam(required = false, value = "context") String context, HttpServletResponse response) {
        response.setContentType(MediaType.APPLICATION_OCTET_STREAM_VALUE); // 设置响应类型为二进制流
        response.setHeader("Cache-Control", "no-cache");
        response.setHeader("Connection", "keep-alive");

        return  outputStream -> {
            try {
                if (audio != null) {
                    difyAudioChatService.systemChatAudio(audio.getInputStream(), null, outputStream);
                } else if (context != null) {
                    difyAudioChatService.systemChatAudio(null, context, outputStream);
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        };
    }


    @PostMapping("/voice")
    @ResponseBody
    public ResponseBodyEmitter systemChatAudioFlie(
            @RequestParam(value = "audio", required = false) MultipartFile audio,
            @RequestParam(required = false, value = "context") String context, HttpServletResponse response) {
        response.setContentType(MediaType.TEXT_EVENT_STREAM_VALUE); // 设置响应类型为二进制流
        response.setHeader("Cache-Control", "no-cache");
        response.setHeader("Connection", "keep-alive");
        ResponseBodyEmitter emitter = new ResponseBodyEmitter(0L);
        try {
            if (audio != null) {
                difyAudioChatService.systemChatAudioFlie(audio.getInputStream(), null, emitter);
            } else if (context != null) {
                difyAudioChatService.systemChatAudioFlie(null, context, emitter);
            }
        } catch (Exception e) {
            emitter.completeWithError(e);
        }finally {
            emitter.complete();
        }
        return emitter;
    }


    @GetMapping("/{fileName}")
    public ResponseEntity<FileSystemResource> getAudioFile(@PathVariable String fileName) {
        // 构建文件路径
        String filePath = DeepseekConfig.getDownloadPath() + fileName;

        // 检查文件是否存在
        File file = new File(filePath);
        if (!file.exists()) {
            return ResponseEntity.notFound().build();
        }

        // 返回文件作为流
        return ResponseEntity.ok()
                .contentType(MediaType.parseMediaType("audio/wav"))
                .body(new FileSystemResource(file));
    }

}