123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329 |
- <script setup lang="ts">
- import { ref ,computed,watch } from 'vue';
- import { useBasicLayout } from '@/hooks/useBasicLayout'
- import { t } from '@/locales'
- import { NInput ,NButton,useMessage,NImage,NTooltip, NAutoComplete,NTag
- ,NPopover,NModal, NDropdown } from 'naive-ui'
- import { SvgIcon } from '@/components/common';
- import { canVisionModel, GptUploader, mlog, upImg,getFileFromClipboard,isFileMp3
- ,countTokens, checkDisableGpt4, Recognition } from '@/api';
- import { gptConfigStore, homeStore,useChatStore } from '@/store';
- import { AutoCompleteOptions } from 'naive-ui/es/auto-complete/src/interface';
- import { RenderLabel } from 'naive-ui/es/_internal/select-menu/src/interface';
- import { useRoute } from 'vue-router'
- import aiModel from "@/views/mj/aiModel.vue"
- import AiMic from './aiMic.vue';
- import { useIconRender } from '@/hooks/useIconRender'
- const { iconRender } = useIconRender()
- //import FormData from 'form-data'
- const route = useRoute()
- const chatStore = useChatStore()
- const emit = defineEmits(['update:modelValue'])
- const props = defineProps<{ modelValue:string,disabled?:boolean,searchOptions?:AutoCompleteOptions,renderOption?: RenderLabel }>();
- const fsRef = ref()
- const st = ref<{fileBase64:string[],isLoad:number,isShow:boolean,showMic:boolean,micStart:boolean}>({fileBase64:[],isLoad:0
- ,isShow:false,showMic:false , micStart:false})
- const { isMobile } = useBasicLayout()
- const placeholder = computed(() => {
- if (isMobile.value)
- return t('chat.placeholderMobile')
- return t('chat.placeholder');//可输入说点什么,也可贴截图或拖拽文件
- })
- const { uuid } = route.params as { uuid: string }
- const dataSources = computed(() => chatStore.getChatByUuid(+uuid))
- const handleSubmit = ( ) => {
- if( mvalue.value=='' ) return ;
- if(checkDisableGpt4(gptConfigStore.myData.model)){
- ms.error( t('mj.disableGpt4') );
- return false;
- }
- if( homeStore.myData.isLoader ) {
- return ;
- }
- let obj={
- prompt: mvalue.value,
- fileBase64:st.value.fileBase64
- }
- homeStore.setMyData({act:'gpt.submit', actData:obj });
- mvalue.value='';
- st.value.fileBase64=[];
- return false;
- }
- const ms= useMessage();
- const mvalue = computed({
- get() { return props.modelValue },
- set(value) { emit('update:modelValue', value) }
- })
- function selectFile(input:any){
- const file = input.target.files[0];
- upFile( file );
- }
- const myToken =ref({remain:0,modelTokens:'4k'});
- const funt = async ()=>{
- const d = await countTokens( dataSources.value, mvalue.value ,chatStore.active??1002 )
- myToken.value=d ;
- return d ;
- }
- watch(()=>mvalue.value, funt )
- watch(()=> dataSources.value , funt )
- watch(()=> gptConfigStore.myData , funt,{deep:true} )
- watch(()=> homeStore.myData.isLoader , funt,{deep:true} )
- funt();
-
- const upFile= (file:any )=>{
- if( !canVisionModel(gptConfigStore.myData.model ) ) {
- if( isFileMp3( file.name ) ){
- mlog('mp3' , file);
- // const formData = new FormData( );
- // formData.append('file', file);
- // formData.append('model', 'whisper-1');
- // GptUploader('/v1/audio/transcriptions',formData).then(r=>{
- // mlog('语音识别成功', r );
- // }).catch(e=>ms.error('上传失败:'+ ( e.message?? JSON.stringify(e)) ));
- homeStore.setMyData({act:'gpt.whisper', actData:{ file , prompt:'whisper' } });
- return ;
- }else{
- upImg( file).then(d=>{
- fsRef.value.value='';
- if(st.value.fileBase64.findIndex(v=>v==d)>-1) {
- ms.error(t('mj.noReUpload')) ;//'不能重复上传'
- return ;
- }
- st.value.fileBase64.push(d)
- } ).catch(e=>ms.error(e));
- }
- }else{
- const formData = new FormData( );
- //const file = input.target.files[0];
- formData.append('file', file);
- ms.info( t('mj.uploading') );
- st.value.isLoad=1;
- GptUploader('/v1/upload',formData).then(r=>{
- //mlog('上传成功', r);
- st.value.isLoad= 0 ;
- if(r.url ){
- ms.info(t('mj.uploadSuccess'));
- if(r.url.indexOf('http')>-1) {
- st.value.fileBase64.push(r.url)
- }else{
- st.value.fileBase64.push(location.origin +r.url)
- }
- }else if(r.error) ms.error(r.error);
- }).catch(e=>{
- st.value.isLoad= 0 ;
- ms.error( t('mj.uploadFail')+ ( e.message?? JSON.stringify(e)) )
- });
- }
- }
-
- function handleEnter(event: KeyboardEvent) {
- if (!isMobile.value) {
- if (event.key === 'Enter' && !event.shiftKey) {
- event.preventDefault()
- handleSubmit()
- }
- }
- else {
- if (event.key === 'Enter' && event.ctrlKey) {
- event.preventDefault()
- handleSubmit()
- }
- }
- }
- const acceptData = computed(() => {
- if( canVisionModel(gptConfigStore.myData.model) ) return "*/*";
- return "image/jpeg, image/jpg, image/png, image/gif, .mp3, .mp4, .mpeg, .mpga, .m4a, .wav, .webm"
- })
- const drop = (e: DragEvent) => {
- e.preventDefault();
- e.stopPropagation();
- if( !e.dataTransfer || e.dataTransfer.files.length==0 ) return;
- const files = e.dataTransfer.files;
- upFile(files[0]);
- //mlog('drop', files);
- }
- const paste= (e: ClipboardEvent)=>{
- let rz = getFileFromClipboard(e);
- if(rz.length>0 ) upFile(rz[0]);
- }
-
- const sendMic= (e:any )=>{
- mlog('sendMic', e );
- st.value.showMic=false;
- let du = 'whisper.wav';// (e.stat && e.stat.duration)?(e.stat.duration.toFixed(2)+'s'):'whisper.wav';
- const file = new File([e.blob], du, { type: 'audio/wav' });
- homeStore.setMyData({act:'gpt.whisper', actData:{ file , prompt:'whisper',duration : e.stat?.duration } });
- }
- //语音识别ASR
- const goASR=()=>{
- const olod = mvalue.value;
- const rec= new Recognition();
- let rz= '';
- rec.setListener( (r:string)=>{
- //mlog('result ', r );
- rz= r ;
- mvalue.value= r;
- st.value.micStart= true
- }).setOnEnd( ( )=>{
- //mlog('rec end');
- mvalue.value= olod+rz;
- ms.info( t('mj.micRecEnd'));
- st.value.micStart= false
- }).setOpt({
- timeOut:2000,
- onStart:()=>{ ms.info( t('mj.micRec')); st.value.micStart= true },
- }).start();
- }
- const drOption=[
- {
- label: t('mj.micWhisper'),
- key: "whisper",
- icon:iconRender({ icon: 'ri:openai-fill' }),
- },{
- label: t('mj.micAsr'),
- icon:iconRender({ icon: 'ri:chrome-line' }),
- key: "asr"
- }
- ]
- const handleSelectASR = ( key: string | number )=>{
- if(key=='asr') goASR();
- if(key=='whisper') st.value.showMic=true;
- }
- </script>
- <template>
- <div v-if="st.showMic" class=" myinputs flex justify-center items-center" >
- <AiMic @cancel="st.showMic=false" @send="sendMic" />
- </div>
- <div class=" myinputs" @drop="drop" @paste="paste" v-else>
- <input type="file" id="fileInput" @change="selectFile" class="hidden" ref="fsRef" :accept="acceptData"/>
- <div class="w-full relative">
- <div class="flex items-base justify-start pb-1 flex-wrap-reverse" v-if="st.fileBase64.length>0 ">
- <div class="w-[60px] h-[60px] rounded-sm bg-slate-50 mr-1 mt-1 text-red-300 relative group" v-for="(v,ii) in st.fileBase64">
- <NImage :src="v" object-fit="cover" class="w-full h-full" >
- <template #placeholder>
- <a class="w-full h-full flex items-center justify-center text-neutral-500" :href="v" target="_blank" >
- <SvgIcon icon="mdi:download" />{{ $t('mj.attr1') }} {{ ii+1 }}
- </a>
- </template>
- </NImage>
- <SvgIcon icon="mdi:close" class="hidden group-hover:block absolute top-[-5px] right-[-5px] rounded-full bg-red-300 text-white cursor-pointer" @click="st.fileBase64.splice(st.fileBase64.indexOf(v),1)"></SvgIcon>
- </div>
- </div>
- <div class="absolute bottom-0 right-0 z-1">
- <NPopover trigger="hover">
- <template #trigger>
- <NTag type="info" round size="small" style="cursor: pointer; " :bordered="false" >
- <div class="opacity-60 flex" >
- <SvgIcon icon="material-symbols:token-outline" /> {{ $t('mj.remain') }}{{ myToken.remain }}/{{ myToken.modelTokens }}
- </div>
- </NTag>
- </template>
- <div class="w-[300px]">
- {{ $t('mj.tokenInfo1') }}
- <p class="py-1" v-text="$t('mj.tokenInfo2')"> </p>
- <p class=" text-right">
- <NButton @click="st.isShow=true" type="info" size="small">{{ $t('setting.setting') }}</NButton>
- </p>
- </div>
-
- </NPopover>
-
- </div>
- </div>
- <NAutoComplete v-model:value="mvalue" :options="searchOptions" :render-label="renderOption" >
- <template #default="{ handleInput, handleBlur, handleFocus }">
- <NInput ref="inputRef" v-model:value="mvalue" type="textarea"
- :placeholder="placeholder" :autosize="{ minRows: 1, maxRows: isMobile ? 4 : 8 }"
- @input="handleInput"
- @focus="handleFocus"
- @blur="handleBlur"
- @keypress="handleEnter" >
- <template #prefix>
- <div class=" relative; w-[22px]">
- <n-tooltip trigger="hover">
- <template #trigger>
- <SvgIcon icon="line-md:uploading-loop" class="absolute bottom-[10px] left-[8px] cursor-pointer" v-if="st.isLoad==1"></SvgIcon>
- <SvgIcon icon="ri:attachment-line" class="absolute bottom-[10px] left-[8px] cursor-pointer" @click="fsRef.click()" v-else></SvgIcon>
- </template>
- <div v-if="canVisionModel(gptConfigStore.myData.model)" v-html="$t('mj.upPdf')">
-
- </div>
- <div v-else v-html="$t('mj.upImg')">
- </div>
- </n-tooltip>
- </div>
- <!-- <div class=" relative; w-[22px]">
- <SvgIcon icon="bi:mic" class="absolute bottom-[10px] left-[30px] cursor-pointer" @click="st.showMic=true"></SvgIcon>
- </div> -->
- <n-dropdown trigger="hover" :options="drOption" @select="handleSelectASR">
- <div class=" relative; w-[22px]">
- <div class="absolute bottom-[14px] left-[31px]" v-if="st.micStart">
- <span class="relative flex h-3 w-3" >
- <span class="animate-ping absolute inline-flex h-full w-full rounded-full bg-red-500 opacity-75"></span>
- <span class="relative inline-flex rounded-full h-3 w-3 bg-red-400"></span>
- </span>
- </div>
- <!-- <SvgIcon icon="bi:mic" class="absolute bottom-[10px] left-[55px] cursor-pointer" @click="goASR()"></SvgIcon> -->
- <SvgIcon icon="bi:mic" class="absolute bottom-[10px] left-[30px] cursor-pointer"></SvgIcon>
- </div>
- </n-dropdown>
-
- </template>
- <template #suffix>
- <div class=" relative; w-[40px] ">
- <div class="absolute bottom-[-3px] right-[0px] ">
- <NButton type="primary" :disabled="disabled || homeStore.myData.isLoader " @click="handleSubmit" >
-
- <template #icon>
- <span class="dark:text-black">
- <SvgIcon icon="ri:stop-circle-line" v-if="homeStore.myData.isLoader" />
- <SvgIcon icon="ri:send-plane-fill" v-else/>
- </span>
- </template>
-
- </NButton>
- </div>
- </div>
- </template>
- </NInput>
- </template>
- </NAutoComplete>
- <!-- translate-y-[-8px] -->
- </div>
- <NModal v-model:show="st.isShow" preset="card" :title="$t('mjchat.modelChange')" class="!max-w-[620px]" @close="st.isShow=false" >
- <aiModel @close="st.isShow=false"/>
- </NModal>
- <!-- <n-drawer v-model:show="st.showMic" :width="420" :on-update:show="onShowFun">
- <n-drawer-content title="录音" closable>
- <AiMic />
- </n-drawer-content>
- </n-drawer> -->
- </template>
- <style >
- .myinputs .n-input .n-input-wrapper{
- @apply items-stretch;
-
- }
- </style>
|