feat(微信聊天): 添加语音消息功能支持

实现语音消息的录制、播放和发送功能,包括:
1. 新增AudioRecorder组件用于录音
2. 添加AudioMessage组件展示语音消息
3. 修改消息输入组件支持语音消息类型
4. 调整样式适配语音消息展示
This commit is contained in:
超级老白兔
2025-09-09 18:09:38 +08:00
parent 514b077da4
commit 03056186c6
9 changed files with 673 additions and 299 deletions

View File

@@ -1,18 +1,14 @@
{
"_charts-CM0JFsjx.js": {
"file": "assets/charts-CM0JFsjx.js",
"_charts-CtV6DO5_.js": {
"file": "assets/charts-CtV6DO5_.js",
"name": "charts",
"imports": [
"_ui-Dkyp_L4f.js",
"_ui-BRTknrR5.js",
"_vendor-BPPoWDlG.js"
]
},
"_ui-D0C0OGrH.css": {
"file": "assets/ui-D0C0OGrH.css",
"src": "_ui-D0C0OGrH.css"
},
"_ui-Dkyp_L4f.js": {
"file": "assets/ui-Dkyp_L4f.js",
"_ui-BRTknrR5.js": {
"file": "assets/ui-BRTknrR5.js",
"name": "ui",
"imports": [
"_vendor-BPPoWDlG.js"
@@ -21,6 +17,10 @@
"assets/ui-D0C0OGrH.css"
]
},
"_ui-D0C0OGrH.css": {
"file": "assets/ui-D0C0OGrH.css",
"src": "_ui-D0C0OGrH.css"
},
"_utils-DiZV3oaL.js": {
"file": "assets/utils-DiZV3oaL.js",
"name": "utils",
@@ -33,18 +33,18 @@
"name": "vendor"
},
"index.html": {
"file": "assets/index-DYycL-yo.js",
"file": "assets/index-DGdErvda.js",
"name": "index",
"src": "index.html",
"isEntry": true,
"imports": [
"_vendor-BPPoWDlG.js",
"_utils-DiZV3oaL.js",
"_ui-Dkyp_L4f.js",
"_charts-CM0JFsjx.js"
"_ui-BRTknrR5.js",
"_charts-CtV6DO5_.js"
],
"css": [
"assets/index-CHPV8625.css"
"assets/index-DoT8YtM8.css"
]
}
}

View File

@@ -11,13 +11,13 @@
</style>
<!-- 引入 uni-app web-view SDK必须 -->
<script type="text/javascript" src="/websdk.js"></script>
<script type="module" crossorigin src="/assets/index-DYycL-yo.js"></script>
<script type="module" crossorigin src="/assets/index-DGdErvda.js"></script>
<link rel="modulepreload" crossorigin href="/assets/vendor-BPPoWDlG.js">
<link rel="modulepreload" crossorigin href="/assets/utils-DiZV3oaL.js">
<link rel="modulepreload" crossorigin href="/assets/ui-Dkyp_L4f.js">
<link rel="modulepreload" crossorigin href="/assets/charts-CM0JFsjx.js">
<link rel="modulepreload" crossorigin href="/assets/ui-BRTknrR5.js">
<link rel="modulepreload" crossorigin href="/assets/charts-CtV6DO5_.js">
<link rel="stylesheet" crossorigin href="/assets/ui-D0C0OGrH.css">
<link rel="stylesheet" crossorigin href="/assets/index-CHPV8625.css">
<link rel="stylesheet" crossorigin href="/assets/index-DoT8YtM8.css">
</head>
<body>
<div id="root"></div>

View File

@@ -0,0 +1,391 @@
import React, { useState, useRef, useCallback } from "react";
import { Button, message, Modal } from "antd";
import {
AudioOutlined,
PlayCircleOutlined,
PauseCircleOutlined,
SendOutlined,
DeleteOutlined,
} from "@ant-design/icons";
import { uploadFile } from "@/api/common";
interface AudioRecorderProps {
onAudioUploaded: (filePath: string) => void;
className?: string;
disabled?: boolean;
maxDuration?: number; // 最大录音时长(秒)
}
type RecordingState =
| "idle"
| "recording"
| "recorded"
| "playing"
| "uploading";
const AudioRecorder: React.FC<AudioRecorderProps> = ({
onAudioUploaded,
className,
disabled = false,
maxDuration = 60,
}) => {
const [visible, setVisible] = useState(false);
const [state, setState] = useState<RecordingState>("idle");
const [recordingTime, setRecordingTime] = useState(0);
const [audioBlob, setAudioBlob] = useState<Blob | null>(null);
const [audioUrl, setAudioUrl] = useState<string>("");
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
const audioRef = useRef<HTMLAudioElement | null>(null);
const timerRef = useRef<NodeJS.Timeout | null>(null);
const chunksRef = useRef<Blob[]>([]);
// 打开弹窗
const openRecorder = () => {
setVisible(true);
};
// 关闭弹窗并重置状态
const closeRecorder = () => {
if (state === "recording") {
stopRecording();
}
if (state === "playing") {
pauseAudio();
}
deleteRecording();
setVisible(false);
};
// 开始录音
const startRecording = useCallback(async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
// 尝试使用MP3格式如果不支持则回退到WebM
let mimeType = "audio/mp3";
if (!MediaRecorder.isTypeSupported(mimeType)) {
mimeType = "audio/mpeg";
if (!MediaRecorder.isTypeSupported(mimeType)) {
mimeType = "audio/webm";
}
}
const mediaRecorder = new MediaRecorder(stream, { mimeType });
mediaRecorderRef.current = mediaRecorder;
chunksRef.current = [];
mediaRecorder.ondataavailable = event => {
if (event.data.size > 0) {
chunksRef.current.push(event.data);
}
};
mediaRecorder.onstop = () => {
const blob = new Blob(chunksRef.current, { type: mimeType });
setAudioBlob(blob);
const url = URL.createObjectURL(blob);
setAudioUrl(url);
setState("recorded");
// 停止所有音频轨道
stream.getTracks().forEach(track => track.stop());
};
mediaRecorder.start();
setState("recording");
setRecordingTime(0);
// 开始计时
timerRef.current = setInterval(() => {
setRecordingTime(prev => {
const newTime = prev + 1;
if (newTime >= maxDuration) {
stopRecording();
}
return newTime;
});
}, 1000);
} catch (error) {
console.error("录音失败:", error);
message.error("无法访问麦克风,请检查权限设置");
}
}, [maxDuration]);
// 停止录音
const stopRecording = useCallback(() => {
if (
mediaRecorderRef.current &&
mediaRecorderRef.current.state === "recording"
) {
mediaRecorderRef.current.stop();
}
if (timerRef.current) {
clearInterval(timerRef.current);
timerRef.current = null;
}
}, []);
// 播放录音
const playAudio = useCallback(() => {
if (audioRef.current && audioUrl) {
audioRef.current.play();
setState("playing");
}
}, [audioUrl]);
// 暂停播放
const pauseAudio = useCallback(() => {
if (audioRef.current) {
audioRef.current.pause();
setState("recorded");
}
}, []);
// 删除录音
const deleteRecording = useCallback(() => {
if (audioUrl) {
URL.revokeObjectURL(audioUrl);
}
setAudioBlob(null);
setAudioUrl("");
setRecordingTime(0);
setState("idle");
}, [audioUrl]);
// 发送录音
const sendAudio = useCallback(async () => {
if (!audioBlob) return;
try {
setState("uploading");
// 创建文件对象
const timestamp = Date.now();
const fileExtension =
audioBlob.type.includes("mp3") || audioBlob.type.includes("mpeg")
? "mp3"
: "webm";
const audioFile = new File(
[audioBlob],
`audio_${timestamp}.${fileExtension}`,
{
type: audioBlob.type,
},
);
// 打印文件格式信息
console.log("音频文件信息:", {
fileName: audioFile.name,
fileType: audioFile.type,
fileSize: audioFile.size,
fileExtension: fileExtension,
blobType: audioBlob.type,
});
// 上传文件
const filePath = await uploadFile(audioFile);
// 调用回调函数
onAudioUploaded(filePath);
// 重置状态并关闭弹窗
deleteRecording();
setVisible(false);
message.success("语音发送成功");
} catch (error) {
console.error("语音上传失败:", error);
message.error("语音发送失败,请重试");
setState("recorded");
}
}, [audioBlob, onAudioUploaded, deleteRecording]);
// 格式化时间显示
const formatTime = (seconds: number) => {
const mins = Math.floor(seconds / 60);
const secs = seconds % 60;
return `${mins.toString().padStart(2, "0")}:${secs.toString().padStart(2, "0")}`;
};
// 渲染弹窗内容
const renderModalContent = () => {
switch (state) {
case "idle":
return (
<div style={{ textAlign: "center", padding: "40px 20px" }}>
<div
style={{ marginBottom: "20px", fontSize: "16px", color: "#666" }}
>
</div>
<Button
type="primary"
size="large"
icon={<AudioOutlined />}
onClick={startRecording}
style={{
borderRadius: "50%",
width: "80px",
height: "80px",
fontSize: "24px",
}}
/>
</div>
);
case "recording":
return (
<div style={{ textAlign: "center", padding: "40px 20px" }}>
<div style={{ marginBottom: "20px" }}>
<div
style={{
fontSize: "24px",
color: "#ff4d4f",
fontWeight: "bold",
marginBottom: "10px",
}}
>
{formatTime(recordingTime)}
</div>
<div style={{ fontSize: "14px", color: "#999" }}>
...
</div>
</div>
<Button
type="primary"
danger
size="large"
onClick={stopRecording}
style={{
borderRadius: "50%",
width: "80px",
height: "80px",
fontSize: "24px",
}}
>
</Button>
</div>
);
case "recorded":
case "playing":
return (
<div style={{ padding: "20px" }}>
<div style={{ textAlign: "center", marginBottom: "20px" }}>
<div
style={{
fontSize: "18px",
fontWeight: "bold",
marginBottom: "10px",
}}
>
: {formatTime(recordingTime)}
</div>
<div style={{ fontSize: "14px", color: "#666" }}>
{state === "playing"
? "正在播放..."
: "录音完成,可以试听或发送"}
</div>
</div>
<div
style={{
display: "flex",
justifyContent: "center",
gap: "12px",
marginBottom: "20px",
}}
>
<Button
type="text"
size="large"
icon={
state === "playing" ? (
<PauseCircleOutlined />
) : (
<PlayCircleOutlined />
)
}
onClick={state === "playing" ? pauseAudio : playAudio}
title={state === "playing" ? "暂停播放" : "播放预览"}
/>
<Button
type="text"
size="large"
icon={<DeleteOutlined />}
onClick={deleteRecording}
title="删除重录"
danger
/>
</div>
<div style={{ textAlign: "center" }}>
<Button
type="primary"
size="large"
icon={<SendOutlined />}
onClick={sendAudio}
loading={state === ("uploading" as RecordingState)}
style={{ minWidth: "120px" }}
>
</Button>
</div>
</div>
);
case "uploading":
return (
<div style={{ textAlign: "center", padding: "40px 20px" }}>
<Button
type="primary"
loading
size="large"
style={{ minWidth: "120px" }}
>
...
</Button>
</div>
);
default:
return null;
}
};
return (
<>
<Button
type="text"
icon={<AudioOutlined />}
onClick={openRecorder}
className={className}
disabled={disabled}
title="点击录音"
/>
<Modal
title="录音"
open={visible}
onCancel={closeRecorder}
footer={null}
width={400}
centered
maskClosable={state === "idle"}
>
{renderModalContent()}
{audioUrl && (
<audio
ref={audioRef}
src={audioUrl}
onEnded={() => setState("recorded")}
style={{ display: "none" }}
/>
)}
</Modal>
</>
);
};
export default AudioRecorder;

View File

@@ -326,18 +326,6 @@
}
}
.messageItem {
margin-bottom: 16px;
position: relative;
.messageContent {
display: flex;
align-items: flex-start;
gap: 8px;
max-width: 70%;
}
}
.messageTime {
text-align: center;
padding: 4px 0;
@@ -346,252 +334,6 @@
margin: 20px 0;
}
.messageItem {
.messageContent {
.messageAvatar {
flex-shrink: 0;
}
.messageBubble {
max-width: 100%;
.messageSender {
font-size: 12px;
color: #8c8c8c;
margin-bottom: 4px;
}
.messageText {
color: #262626;
line-height: 1.5;
word-break: break-word;
background: #fff;
padding: 8px 12px;
border-radius: 8px;
box-shadow: 0 1px 2px rgba(0, 0, 0, 0.1);
}
.emojiMessage {
img {
max-width: 120px;
max-height: 120px;
border-radius: 4px;
display: block;
cursor: pointer;
}
}
.imageMessage {
img {
max-width: 100%;
border-radius: 8px;
display: block;
cursor: pointer;
}
}
.videoMessage {
position: relative;
display: flex;
flex-direction: column;
video {
max-width: 100%;
border-radius: 8px;
display: block;
}
.videoContainer {
position: relative;
cursor: pointer;
display: flex;
justify-content: center;
align-items: center;
overflow: hidden;
border-radius: 8px;
&:hover .videoPlayIcon {
transform: scale(1.1);
}
.videoThumbnail {
width: 100%;
display: block;
border-radius: 8px;
}
.videoPlayIcon {
position: absolute;
display: flex;
align-items: center;
justify-content: center;
background-color: rgba(0, 0, 0, 0.5);
border-radius: 50%;
width: 60px;
height: 60px;
transition: transform 0.2s ease;
.loadingSpinner {
width: 32px;
height: 32px;
border: 3px solid rgba(255, 255, 255, 0.3);
border-radius: 50%;
border-top-color: #fff;
animation: spin 1s linear infinite;
}
}
@keyframes spin {
0% {
transform: rotate(0deg);
}
100% {
transform: rotate(360deg);
}
}
}
.downloadButton {
position: absolute;
top: 8px;
right: 8px;
display: flex;
align-items: center;
justify-content: center;
color: #fff;
font-size: 18px;
width: 32px;
height: 32px;
border-radius: 50%;
transition: all 0.2s;
&:hover {
color: #40a9ff;
}
}
}
.audioMessage {
position: relative;
display: flex;
align-items: center;
background: #f5f5f5;
border-radius: 8px;
padding: 8px;
audio {
flex: 1;
min-width: 200px;
}
.downloadButton {
display: flex;
align-items: center;
justify-content: center;
color: #1890ff;
font-size: 18px;
width: 32px;
height: 32px;
border-radius: 50%;
margin-left: 8px;
transition: all 0.2s;
&:hover {
color: #40a9ff;
}
}
}
.fileMessage {
background: #f5f5f5;
border-radius: 8px;
padding: 8px;
display: flex;
align-items: center;
position: relative;
transition: background-color 0.2s;
width: 240px;
&:hover {
background: #e6f7ff;
}
.fileInfo {
flex: 1;
margin-right: 8px;
cursor: pointer;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.downloadButton {
display: flex;
align-items: center;
justify-content: center;
color: #1890ff;
font-size: 18px;
width: 32px;
height: 32px;
border-radius: 50%;
transition: all 0.2s;
&:hover {
color: #40a9ff;
}
}
}
.locationMessage {
background: #f5f5f5;
border-radius: 8px;
padding: 8px;
display: flex;
align-items: center;
cursor: pointer;
transition: background-color 0.2s;
&:hover {
background: #fff2e8;
}
}
.messageTime {
display: none;
}
}
}
}
.ownMessage {
.messageContent {
flex-direction: row-reverse;
margin-left: auto;
.messageBubble {
color: #262626;
line-height: 1.5;
word-break: break-word;
background: #fff;
border-radius: 8px;
max-width: 100%;
.messageText {
color: #333;
}
.messageTime {
color: rgba(255, 255, 255, 0.7);
}
}
}
}
.otherMessage {
.messageContent {
margin-right: auto;
}
}
// 响应式设计
@media (max-width: 1200px) {
.profileSider {
@@ -631,12 +373,6 @@
}
}
.messageItem {
.messageContent {
max-width: 85%;
}
}
.profileContent {
padding: 12px;

View File

@@ -13,6 +13,7 @@ import { useWebSocketStore } from "@/store/module/websocket/websocket";
import { EmojiPicker } from "@/components/EmojiSeclection";
import { EmojiInfo } from "@/components/EmojiSeclection/wechatEmoji";
import SimpleFileUpload from "@/components/Upload/SimpleFileUpload";
import AudioRecorder from "@/components/Upload/AudioRecorder";
import styles from "./MessageEnter.module.scss";
const { Footer } = Layout;
@@ -94,10 +95,23 @@ const MessageEnter: React.FC<MessageEnterProps> = ({ contract }) => {
// 其他格式默认为文件
return 49; // 文件
};
const handleFileUploaded = (filePath: string) => {
const FileType = {
TEXT: 1,
IMAGE: 2,
VIDEO: 3,
AUDIO: 4,
FILE: 5,
};
const handleFileUploaded = (filePath: string, fileType: number) => {
// msgType(1:文本 3:图片 43:视频 47:动图表情包gif、其他表情包 49:小程序/其他:图文、文件)
const msgType = getMsgTypeByFileFormat(filePath);
let msgType = 1;
if ([FileType.TEXT].includes(fileType)) {
msgType = getMsgTypeByFileFormat(filePath);
} else if ([FileType.IMAGE].includes(fileType)) {
msgType = 3;
} else if ([FileType.AUDIO].includes(fileType)) {
msgType = 34;
}
const params = {
wechatAccountId: contract.wechatAccountId,
@@ -119,7 +133,9 @@ const MessageEnter: React.FC<MessageEnterProps> = ({ contract }) => {
<div className={styles.leftTool}>
<EmojiPicker onEmojiSelect={handleEmojiSelect} />
<SimpleFileUpload
onFileUploaded={handleFileUploaded}
onFileUploaded={filePath =>
handleFileUploaded(filePath, FileType.FILE)
}
maxSize={1}
type={4}
slot={
@@ -131,7 +147,9 @@ const MessageEnter: React.FC<MessageEnterProps> = ({ contract }) => {
}
/>
<SimpleFileUpload
onFileUploaded={handleFileUploaded}
onFileUploaded={filePath =>
handleFileUploaded(filePath, FileType.IMAGE)
}
maxSize={1}
type={1}
slot={
@@ -143,13 +161,12 @@ const MessageEnter: React.FC<MessageEnterProps> = ({ contract }) => {
}
/>
<Tooltip title="语音">
<Button
type="text"
icon={<AudioOutlined />}
className={styles.toolbarButton}
/>
</Tooltip>
<AudioRecorder
onAudioUploaded={filePath =>
handleFileUploaded(filePath, FileType.AUDIO)
}
className={styles.toolbarButton}
/>
</div>
<div className={styles.rightTool}>
<div className={styles.rightToolItem}>

View File

@@ -76,7 +76,6 @@
display: flex;
align-items: flex-start;
gap: 8px;
max-width: 70%;
}
// 头像
@@ -533,10 +532,6 @@
// 响应式设计
@media (max-width: 768px) {
.messageContent {
max-width: 85%;
}
.messageBubble {
padding: 6px 10px;
}

View File

@@ -0,0 +1,117 @@
// 消息气泡样式
.messageBubble {
word-wrap: break-word;
padding: 8px 12px;
border-radius: 8px;
background-color: #f0f0f0;
}
// 语音消息容器
.audioMessage {
min-width: 200px;
max-width: 100%;
width: 100%;
box-sizing: border-box;
overflow: hidden;
}
// 音频控制容器
.audioContainer {
display: flex;
align-items: center;
gap: 12px;
cursor: pointer;
padding: 8px;
border-radius: 6px;
transition: background-color 0.2s;
width: 100%;
box-sizing: border-box;
overflow: hidden;
&:hover {
background-color: rgba(0, 0, 0, 0.05);
}
}
// 播放图标
.audioIcon {
display: flex;
align-items: center;
justify-content: center;
width: 32px;
height: 32px;
border-radius: 50%;
background-color: #fff;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
transition: all 0.2s;
&:hover {
transform: scale(1.05);
}
}
// 音频内容区域
.audioContent {
flex: 1;
display: flex;
align-items: center;
gap: 8px;
min-width: 0;
overflow: hidden;
}
// 波形动画容器
.audioWaveform {
display: flex;
align-items: center;
gap: 2px;
height: 30px; // 固定高度防止抖动
}
// 波形条
.waveBar {
width: 3px;
background-color: #d9d9d9;
border-radius: 1.5px;
transition: all 0.3s ease;
transform-origin: center; // 设置变换原点为中心
&.playing {
animation: waveAnimation 1.5s ease-in-out infinite;
}
}
// 音频时长显示
.audioDuration {
font-size: 12px;
color: #666;
white-space: nowrap;
}
// 进度条容器
.audioProgress {
margin-top: 8px;
height: 2px;
background-color: #e0e0e0;
border-radius: 1px;
overflow: hidden;
}
// 进度条
.audioProgressBar {
height: 100%;
background-color: #1890ff;
border-radius: 1px;
transition: width 0.1s ease;
}
// 波形动画
@keyframes waveAnimation {
0%,
100% {
transform: scaleY(0.5);
}
50% {
transform: scaleY(1.2);
}
}

View File

@@ -0,0 +1,108 @@
import React, { useState, useRef } from "react";
import { PauseCircleFilled, SoundOutlined } from "@ant-design/icons";
import styles from "./AudioMessage.module.scss";
interface AudioMessageProps {
audioUrl: string;
msgId: number;
}
const AudioMessage: React.FC<AudioMessageProps> = ({ audioUrl, msgId }) => {
const [playingAudioId, setPlayingAudioId] = useState<string | null>(null);
const [audioProgress, setAudioProgress] = useState<Record<string, number>>(
{},
);
const audioRefs = useRef<Record<string, HTMLAudioElement>>({});
const audioId = `audio_${msgId}_${Date.now()}`;
const isPlaying = playingAudioId === audioId;
const progress = audioProgress[audioId] || 0;
// 播放/暂停音频
const handleAudioToggle = () => {
const audio = audioRefs.current[audioId];
if (!audio) {
const newAudio = new Audio(audioUrl);
audioRefs.current[audioId] = newAudio;
newAudio.addEventListener("timeupdate", () => {
const currentProgress =
(newAudio.currentTime / newAudio.duration) * 100;
setAudioProgress(prev => ({
...prev,
[audioId]: currentProgress,
}));
});
newAudio.addEventListener("ended", () => {
setPlayingAudioId(null);
setAudioProgress(prev => ({ ...prev, [audioId]: 0 }));
});
newAudio.addEventListener("error", () => {
console.error("音频播放失败");
setPlayingAudioId(null);
});
newAudio.play();
setPlayingAudioId(audioId);
} else {
if (isPlaying) {
audio.pause();
setPlayingAudioId(null);
} else {
// 停止其他正在播放的音频
Object.values(audioRefs.current).forEach(a => a.pause());
setPlayingAudioId(null);
audio.play();
setPlayingAudioId(audioId);
}
}
};
return (
<div className={styles.messageBubble}>
<div className={styles.audioMessage}>
<div className={styles.audioContainer} onClick={handleAudioToggle}>
<div className={styles.audioIcon}>
{isPlaying ? (
<PauseCircleFilled
style={{ fontSize: "20px", color: "#1890ff" }}
/>
) : (
<SoundOutlined style={{ fontSize: "20px", color: "#666" }} />
)}
</div>
<div className={styles.audioContent}>
<div className={styles.audioWaveform}>
{/* 音频波形效果 */}
{Array.from({ length: 20 }, (_, i) => (
<div
key={i}
className={`${styles.waveBar} ${isPlaying ? styles.playing : ""}`}
style={{
height: `${Math.random() * 20 + 10}px`,
animationDelay: `${i * 0.1}s`,
backgroundColor: progress > i * 5 ? "#1890ff" : "#d9d9d9",
}}
/>
))}
</div>
<div className={styles.audioDuration}></div>
</div>
</div>
{progress > 0 && (
<div className={styles.audioProgress}>
<div
className={styles.audioProgressBar}
style={{ width: `${progress}%` }}
/>
</div>
)}
</div>
</div>
);
};
export default AudioMessage;

View File

@@ -1,4 +1,4 @@
import React, { useEffect, useRef } from "react";
import React, { useEffect, useRef, useState } from "react";
import { Avatar, Divider } from "antd";
import {
UserOutlined,
@@ -6,6 +6,7 @@ import {
DownloadOutlined,
PlayCircleFilled,
} from "@ant-design/icons";
import AudioMessage from "./components/AudioMessage/AudioMessage";
import { ChatRecord, ContractData, weChatGroup } from "@/pages/pc/ckbox/data";
import { formatWechatTime, parseWeappMsgStr } from "@/utils/common";
import { getEmojiPath } from "@/components/EmojiSeclection/wechatEmoji";
@@ -18,6 +19,7 @@ interface MessageRecordProps {
}
const MessageRecord: React.FC<MessageRecordProps> = ({ contract }) => {
const messagesEndRef = useRef<HTMLDivElement>(null);
const currentMessages = useWeChatStore(state => state.currentMessages);
const loadChatMessages = useWeChatStore(state => state.loadChatMessages);
const messagesLoading = useWeChatStore(state => state.messagesLoading);
@@ -383,6 +385,14 @@ const MessageRecord: React.FC<MessageRecordProps> = ({ contract }) => {
}
return renderErrorMessage("[表情包]");
case 34: // 语音消息
if (typeof content !== "string" || !content.trim()) {
return renderErrorMessage("[语音消息 - 无效内容]");
}
// content直接是音频URL字符串
return <AudioMessage audioUrl={content} msgId={msg.id} />;
case 49: // 小程序/文章/其他:图文、文件
if (typeof content !== "string" || !content.trim()) {
return renderErrorMessage("[小程序/文章/文件消息 - 无效内容]");