yangyin
2024-12-02 269b41f9de3e34f06e123341c8f1fb7d9ef4b87f
src/components/chat/components/playBar/voicePage/VoicePage.vue
@@ -17,24 +17,26 @@
            </div>
            <div class="flex items-center">
               <div class="sound-animate relative">
                  <i class="ywicon icon-maikefeng-filled !text-[26px] absolute -left-10 top-[5px]"></i>
                  <i class="ywifont ywicon-maikefeng-filled !text-[26px] absolute -left-10 top-[5px]"></i>
                  <span :style="{ 'animation-play-state': animationPlayState }"></span
                  ><span :style="{ 'animation-play-state': animationPlayState }"></span
                  ><span :style="{ 'animation-play-state': animationPlayState }"></span
                  ><span :style="{ 'animation-play-state': animationPlayState }"></span>
               </div>
            </div>
            <div class="mt-5">请开始说话</div>
            <div class="mt-5" :class="{ 'cursor-pointer': currentVoiceType === VoiceTipType.Speak }" @click="voiceTipClick">
               {{ voiceTipMap[currentVoiceType] }}
            </div>
            <div class="flex items-center justify-between bottom-16 absolute left-1/2 -translate-x-1/2 space-x-16">
               <div class="size-[35px] flex items-center justify-center bg-[#292929] rounded-full cursor-pointer" @click="togglePlayClick">
                  <i class="ywicon !text-[16px]" :class="playIcon"></i>
                  <i class="ywifont !text-[16px]" :class="playIcon"></i>
               </div>
               <div class="size-[56px] flex items-center justify-center bg-red-500 rounded-full cursor-pointer" @click="closeClick">
                  <i class="ywicon icon-guanbi !text-[26px]"></i>
                  <i class="ywifont ywicon-guanbi !text-[26px]"></i>
               </div>
               <div class="size-[35px] flex items-center justify-center bg-[#292929] rounded-full cursor-pointer">
                  <i class="ywicon icon-gengduo !text-[23px]"></i>
                  <i class="ywifont ywicon-gengduo !text-[23px]"></i>
               </div>
            </div>
         </div>
@@ -43,30 +45,106 @@
</template>
<script setup lang="ts">
import { computed, ref, watch } from 'vue';
import { computed, nextTick, ref, watch } from 'vue';
import type { ChatContent } from '../../../model/types';
import { AnswerType } from '../../../model/types';
import { VoiceRecognitionErrorType, VoiceTipType, voiceTipMap } from './types';
import router from '/@/router';
import { setRoomConfig } from '/@/stores/chatRoom';
const animationPlayState = ref<'paused' | 'running'>('running');
const playIcon = computed(() => (animationPlayState.value === 'running' ? 'icon-zanting' : 'icon-bofang'));
const playIcon = computed(() => (animationPlayState.value === 'running' ? 'ywicon-zanting' : 'ywicon-bofang'));
const isSpeak = ref(false);
const togglePlayClick = () => {
   animationPlayState.value = animationPlayState.value === 'running' ? 'paused' : 'running';
   if (currentVoiceType.value === VoiceTipType.Speak) {
      if (isSpeak.value) {
         window.speechSynthesis.pause();
      } else {
         window.speechSynthesis.resume();
      }
      isSpeak.value = !isSpeak.value;
   }
};
const props = defineProps(['isHome']);
const emit = defineEmits(['submit', 'updateInputValue']);
const isShow = defineModel('isShow', {
   type: Boolean,
});
const resetToListenVoice = () => {
   currentVoiceType.value = VoiceTipType.NoSpeech;
   audioChangeWord();
};
const isListening = ref(false);
const inputValue = ref('');
const closeClick = () => {
   isShow.value = false;
};
let recognition = null;
let speech = null;
const currentVoiceType = ref<VoiceTipType>(VoiceTipType.NoSpeech);
const handleAnswerRes = (res: ChatContent) => {
   if (!res) {
      return;
   }
   if (!isShow.value) {
      return;
   }
   let text = '';
   if (res.type === AnswerType.Text || res.type === AnswerType.Knowledge) {
      if (res.type === AnswerType.Knowledge) {
         text = res.values?.map((item) => item.answer) ?? '';
      } else {
         text = res.values;
      }
   } else {
      text = '抱歉,我无法口述回答此问题的,需要查看请关闭此语音对话界面';
   }
   currentVoiceType.value = VoiceTipType.Speak;
   isSpeak.value = true;
   var speech = new SpeechSynthesisUtterance();
   speech.text = text; // 内容
   speech.lang = 'zh-cn'; // 语言
   speech.voiceURI = 'Microsoft Huihui - Chinese (Simplified, PRC)'; // 声音和服务
   // eslint-disable-next-line no-irregular-whitespace
   speech.volume = 0.7; // 声音的音量区间范围是​​0​​​到​​1默认是​​1​​
   // eslint-disable-next-line no-irregular-whitespace
   speech.rate = 1; // 语速,数值,默认值是​​1​​​,范围是​​0.1​​​到​​10​​​,表示语速的倍数,例如​​2​​表示正常语速的两倍
   // eslint-disable-next-line no-irregular-whitespace
   speech.pitch = 1; // 表示说话的音高,数值,范围从​​0​​​(最小)到​​2​​​(最大)。默认值为​​1​​。
   speech.onend = () => {
      resetToListenVoice();
   };
   window.speechSynthesis.speak(speech);
   setRoomConfig(router.currentRoute.value.query.id as string, 'firstResCb', undefined);
};
const voiceTipClick = () => {
   switch (currentVoiceType.value) {
      case VoiceTipType.Speak:
         window.speechSynthesis.cancel();
         setTimeout(() => {
            resetToListenVoice();
         }, 0);
         break;
      default:
         break;
   }
   window.speechSynthesis.cancel();
};
const audioChangeWord = () => {
   inputValue.value = '';
   emit('updateInputValue', '');
   // 创建SpeechRecognition对象
   // eslint-disable-next-line no-undef
   var recognition = new webkitSpeechRecognition();
   recognition = new webkitSpeechRecognition();
   if (!recognition) {
      // eslint-disable-next-line no-undef
      recognition = new SpeechRecognition();
@@ -84,14 +162,35 @@
   recognition.onresult = function (event) {
      var result = event.results[0][0].transcript;
      console.log('监听结果:', result);
      inputValue.value = result;
      emit('updateInputValue', result);
      currentVoiceType.value = VoiceTipType.Think;
      if (!props.isHome) {
         emit('submit', handleAnswerRes);
      } else {
         setRoomConfig(router.currentRoute.value.query.id as string, 'firstResCb', handleAnswerRes);
         emit('submit');
      }
   };
   recognition.onspeechstart = (event) => {
      currentVoiceType.value = VoiceTipType.Speech;
   };
   // 监听错误事件
   recognition.onerror = function (event) {
      isListening.value = false;
      ElMessage.error('监听语音失败');
      // ElMessage.error('监听语音失败');
      console.error(event.error);
      switch (event.error) {
         case VoiceRecognitionErrorType.NoSpeech:
            if (isShow.value) {
               resetToListenVoice();
            }
            break;
         default:
            break;
      }
   };
   // 监听结束事件(包括识别成功、识别错误和用户停止)
   recognition.onend = function () {
@@ -101,7 +200,10 @@
};
const resetStatus = () => {
   currentVoiceType.value = VoiceTipType.NoSpeech;
   animationPlayState.value = 'running';
   recognition?.abort();
   window.speechSynthesis.cancel();
};
watch(
@@ -109,6 +211,8 @@
   (val) => {
      if (!val) {
         resetStatus();
      } else {
         resetToListenVoice();
      }
   }
);