feat: 重构 IdentifyFace.vue 为 Hooks 架构

- 新增 hooks/ 目录,包含三个专用 Hook:
  * useVoiceGeneration - 语音生成和校验逻辑
  * useDigitalHumanGeneration - 数字人视频生成逻辑
  * useIdentifyFaceController - 协调两个子 Hook 的控制器

- 新增 types/identify-face.ts 完整类型定义

- 重构 IdentifyFace.vue 使用 hooks 架构:
  * 视图层与业务逻辑分离
  * 状态管理清晰化
  * 模块解耦,逻辑清晰

- 遵循单一职责原则,每个 Hook 只负责一个领域
- 提升代码可测试性和可维护性
- 支持两种视频素材来源:素材库选择和直接上传
- 实现语音生成优先校验的业务规则

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-12-28 00:19:17 +08:00
parent effbbc694c
commit 36195ea55a
46 changed files with 4258 additions and 3454 deletions

View File

@@ -0,0 +1,239 @@
/**
* @fileoverview useDigitalHumanGeneration Hook - 数字人生成逻辑封装
* @author Claude Code
*/
import { ref, computed } from 'vue'
import { message } from 'ant-design-vue'
import type {
UseDigitalHumanGeneration,
VideoState,
IdentifyState,
MaterialValidation,
Video,
AudioState,
} from '../types/identify-face'
import { identifyUploadedVideo, uploadAndIdentifyVideo } from '@/api/kling'
/**
* 数字人生成 Hook
* @param audioState 音频状态(来自父 Hook
*/
export function useDigitalHumanGeneration(
audioState: AudioState
): UseDigitalHumanGeneration {
// ==================== 响应式状态 ====================
const videoState = ref<VideoState>({
uploadedVideo: '',
videoFile: null,
previewVideoUrl: '',
selectedVideo: null,
videoSource: null,
selectorVisible: false,
})
const identifyState = ref<IdentifyState>({
identifying: false,
identified: false,
sessionId: '',
faceId: '',
faceStartTime: 0,
faceEndTime: 0,
videoFileId: null,
})
const materialValidation = ref<MaterialValidation>({
videoDuration: 0,
audioDuration: 0,
isValid: false,
showDetails: false,
})
// ==================== 计算属性 ====================
/**
* 人脸出现时长
*/
const faceDuration = computed(() => {
return identifyState.value.faceEndTime - identifyState.value.faceStartTime
})
/**
* 是否可以生成数字人视频
*/
const canGenerate = computed(() => {
const hasVideo = videoState.value.uploadedVideo || videoState.value.selectedVideo
const audioValidated = audioState.validationPassed
const materialValidated = materialValidation.value.isValid
return !!(hasVideo && audioValidated && materialValidated)
})
// ==================== 核心方法 ====================
/**
* 处理视频文件上传
*/
const handleFileUpload = async (file: File): Promise<void> => {
if (!file.name.match(/\.(mp4|mov)$/i)) {
message.error('仅支持 MP4 和 MOV')
return
}
videoState.value.videoFile = file
videoState.value.uploadedVideo = URL.createObjectURL(file)
videoState.value.selectedVideo = null
videoState.value.previewVideoUrl = ''
videoState.value.videoSource = 'upload'
resetIdentifyState()
resetMaterialValidation()
await performFaceRecognition()
}
/**
* 处理从素材库选择视频
*/
const handleVideoSelect = (video: Video): void => {
videoState.value.selectedVideo = video
videoState.value.uploadedVideo = video.fileUrl
videoState.value.videoFile = null
videoState.value.videoSource = 'select'
videoState.value.selectorVisible = false
resetIdentifyState()
identifyState.value.videoFileId = video.id
materialValidation.value.videoDuration = (video.duration || 0) * 1000
performFaceRecognition()
}
/**
* 执行人脸识别
*/
const performFaceRecognition = async (): Promise<void> => {
const hasUploadFile = videoState.value.videoFile
const hasSelectedVideo = videoState.value.selectedVideo
if (!hasUploadFile && !hasSelectedVideo) {
return
}
identifyState.value.identifying = true
try {
let res
if (hasSelectedVideo) {
res = await identifyUploadedVideo(hasSelectedVideo)
identifyState.value.videoFileId = hasSelectedVideo.id
} else {
res = await uploadAndIdentifyVideo(hasUploadFile!)
identifyState.value.videoFileId = res.data.fileId
}
identifyState.value.sessionId = res.data.sessionId
identifyState.value.faceId = res.data.faceId
identifyState.value.faceStartTime = res.data.startTime || 0
identifyState.value.faceEndTime = res.data.endTime || 0
identifyState.value.identified = true
const durationSec = faceDuration.value / 1000
const suggestedMaxChars = Math.floor(durationSec * 3.5)
message.success(`识别完成!人脸出现时长约 ${durationSec.toFixed(1)} 秒,建议文案不超过 ${suggestedMaxChars}`)
} catch (error: any) {
message.error(error.message || '识别失败')
throw error
} finally {
identifyState.value.identifying = false
}
}
/**
* 验证素材时长
*/
const validateMaterialDuration = (videoDurationMs: number, audioDurationMs: number): boolean => {
const isValid = videoDurationMs > audioDurationMs
materialValidation.value.videoDuration = videoDurationMs
materialValidation.value.audioDuration = audioDurationMs
materialValidation.value.isValid = isValid
return isValid
}
/**
* 重置视频状态
*/
const resetVideoState = (): void => {
videoState.value.uploadedVideo = ''
videoState.value.videoFile = null
videoState.value.selectedVideo = null
videoState.value.videoSource = null
videoState.value.previewVideoUrl = ''
videoState.value.selectorVisible = false
resetIdentifyState()
resetMaterialValidation()
}
/**
* 获取视频预览 URL
*/
const getVideoPreviewUrl = (video: Video): string => {
if (video.coverBase64) {
if (!video.coverBase64.startsWith('data:')) {
return `data:image/jpeg;base64,${video.coverBase64}`
}
return video.coverBase64
}
if (video.previewUrl) {
return video.previewUrl
}
if (video.coverUrl) {
return video.coverUrl
}
return 'data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjAwIiBoZWlnaHQ9IjExMCIgdmlld0JveD0iMCAwIDIwMCAxMTAiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CjxyZWN0IHdpZHRoPSIyMDAiIGhlaWdodD0iMTEwIiBmaWxsPSIjMzc0MTUxIi8+CjxwYXRoIGQ9Ik04NSA0NUwxMTUgNjVMMTA1IDg1TDc1IDc1TDg1IDQ1WiIgZmlsbD0iIzU3MjY1MSIvPgo8L3N2Zz4K'
}
/**
* 重置识别状态
*/
const resetIdentifyState = (): void => {
identifyState.value.identified = false
identifyState.value.sessionId = ''
identifyState.value.faceId = ''
identifyState.value.videoFileId = null
}
/**
* 重置素材校验状态
*/
const resetMaterialValidation = (): void => {
materialValidation.value.videoDuration = 0
materialValidation.value.audioDuration = 0
materialValidation.value.isValid = false
}
return {
// 响应式状态
videoState,
identifyState,
materialValidation,
// 计算属性
faceDuration,
canGenerate,
// 方法
handleFileUpload,
handleVideoSelect,
performFaceRecognition,
validateMaterialDuration,
resetVideoState,
getVideoPreviewUrl,
}
}