aiGptInput.vue 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329
  1. <script setup lang="ts">
  2. import { ref ,computed,watch } from 'vue';
  3. import { useBasicLayout } from '@/hooks/useBasicLayout'
  4. import { t } from '@/locales'
  5. import { NInput ,NButton,useMessage,NImage,NTooltip, NAutoComplete,NTag
  6. ,NPopover,NModal, NDropdown } from 'naive-ui'
  7. import { SvgIcon } from '@/components/common';
  8. import { canVisionModel, GptUploader, mlog, upImg,getFileFromClipboard,isFileMp3
  9. ,countTokens, checkDisableGpt4, Recognition } from '@/api';
  10. import { gptConfigStore, homeStore,useChatStore } from '@/store';
  11. import { AutoCompleteOptions } from 'naive-ui/es/auto-complete/src/interface';
  12. import { RenderLabel } from 'naive-ui/es/_internal/select-menu/src/interface';
  13. import { useRoute } from 'vue-router'
  14. import aiModel from "@/views/mj/aiModel.vue"
  15. import AiMic from './aiMic.vue';
  16. import { useIconRender } from '@/hooks/useIconRender'
  17. const { iconRender } = useIconRender()
  18. //import FormData from 'form-data'
  19. const route = useRoute()
  20. const chatStore = useChatStore()
  21. const emit = defineEmits(['update:modelValue'])
  22. const props = defineProps<{ modelValue:string,disabled?:boolean,searchOptions?:AutoCompleteOptions,renderOption?: RenderLabel }>();
  23. const fsRef = ref()
  24. const st = ref<{fileBase64:string[],isLoad:number,isShow:boolean,showMic:boolean,micStart:boolean}>({fileBase64:[],isLoad:0
  25. ,isShow:false,showMic:false , micStart:false})
  26. const { isMobile } = useBasicLayout()
  27. const placeholder = computed(() => {
  28. if (isMobile.value)
  29. return t('chat.placeholderMobile')
  30. return t('chat.placeholder');//可输入说点什么,也可贴截图或拖拽文件
  31. })
  32. const { uuid } = route.params as { uuid: string }
  33. const dataSources = computed(() => chatStore.getChatByUuid(+uuid))
  34. const handleSubmit = ( ) => {
  35. if( mvalue.value=='' ) return ;
  36. if(checkDisableGpt4(gptConfigStore.myData.model)){
  37. ms.error( t('mj.disableGpt4') );
  38. return false;
  39. }
  40. if( homeStore.myData.isLoader ) {
  41. return ;
  42. }
  43. let obj={
  44. prompt: mvalue.value,
  45. fileBase64:st.value.fileBase64
  46. }
  47. homeStore.setMyData({act:'gpt.submit', actData:obj });
  48. mvalue.value='';
  49. st.value.fileBase64=[];
  50. return false;
  51. }
  52. const ms= useMessage();
  53. const mvalue = computed({
  54. get() { return props.modelValue },
  55. set(value) { emit('update:modelValue', value) }
  56. })
  57. function selectFile(input:any){
  58. const file = input.target.files[0];
  59. upFile( file );
  60. }
  61. const myToken =ref({remain:0,modelTokens:'4k'});
  62. const funt = async ()=>{
  63. const d = await countTokens( dataSources.value, mvalue.value ,chatStore.active??1002 )
  64. myToken.value=d ;
  65. return d ;
  66. }
  67. watch(()=>mvalue.value, funt )
  68. watch(()=> dataSources.value , funt )
  69. watch(()=> gptConfigStore.myData , funt,{deep:true} )
  70. watch(()=> homeStore.myData.isLoader , funt,{deep:true} )
  71. funt();
  72. const upFile= (file:any )=>{
  73. if( !canVisionModel(gptConfigStore.myData.model ) ) {
  74. if( isFileMp3( file.name ) ){
  75. mlog('mp3' , file);
  76. // const formData = new FormData( );
  77. // formData.append('file', file);
  78. // formData.append('model', 'whisper-1');
  79. // GptUploader('/v1/audio/transcriptions',formData).then(r=>{
  80. // mlog('语音识别成功', r );
  81. // }).catch(e=>ms.error('上传失败:'+ ( e.message?? JSON.stringify(e)) ));
  82. homeStore.setMyData({act:'gpt.whisper', actData:{ file , prompt:'whisper' } });
  83. return ;
  84. }else{
  85. upImg( file).then(d=>{
  86. fsRef.value.value='';
  87. if(st.value.fileBase64.findIndex(v=>v==d)>-1) {
  88. ms.error(t('mj.noReUpload')) ;//'不能重复上传'
  89. return ;
  90. }
  91. st.value.fileBase64.push(d)
  92. } ).catch(e=>ms.error(e));
  93. }
  94. }else{
  95. const formData = new FormData( );
  96. //const file = input.target.files[0];
  97. formData.append('file', file);
  98. ms.info( t('mj.uploading') );
  99. st.value.isLoad=1;
  100. GptUploader('/v1/upload',formData).then(r=>{
  101. //mlog('上传成功', r);
  102. st.value.isLoad= 0 ;
  103. if(r.url ){
  104. ms.info(t('mj.uploadSuccess'));
  105. if(r.url.indexOf('http')>-1) {
  106. st.value.fileBase64.push(r.url)
  107. }else{
  108. st.value.fileBase64.push(location.origin +r.url)
  109. }
  110. }else if(r.error) ms.error(r.error);
  111. }).catch(e=>{
  112. st.value.isLoad= 0 ;
  113. ms.error( t('mj.uploadFail')+ ( e.message?? JSON.stringify(e)) )
  114. });
  115. }
  116. }
  117. function handleEnter(event: KeyboardEvent) {
  118. if (!isMobile.value) {
  119. if (event.key === 'Enter' && !event.shiftKey) {
  120. event.preventDefault()
  121. handleSubmit()
  122. }
  123. }
  124. else {
  125. if (event.key === 'Enter' && event.ctrlKey) {
  126. event.preventDefault()
  127. handleSubmit()
  128. }
  129. }
  130. }
  131. const acceptData = computed(() => {
  132. if( canVisionModel(gptConfigStore.myData.model) ) return "*/*";
  133. return "image/jpeg, image/jpg, image/png, image/gif, .mp3, .mp4, .mpeg, .mpga, .m4a, .wav, .webm"
  134. })
  135. const drop = (e: DragEvent) => {
  136. e.preventDefault();
  137. e.stopPropagation();
  138. if( !e.dataTransfer || e.dataTransfer.files.length==0 ) return;
  139. const files = e.dataTransfer.files;
  140. upFile(files[0]);
  141. //mlog('drop', files);
  142. }
  143. const paste= (e: ClipboardEvent)=>{
  144. let rz = getFileFromClipboard(e);
  145. if(rz.length>0 ) upFile(rz[0]);
  146. }
  147. const sendMic= (e:any )=>{
  148. mlog('sendMic', e );
  149. st.value.showMic=false;
  150. let du = 'whisper.wav';// (e.stat && e.stat.duration)?(e.stat.duration.toFixed(2)+'s'):'whisper.wav';
  151. const file = new File([e.blob], du, { type: 'audio/wav' });
  152. homeStore.setMyData({act:'gpt.whisper', actData:{ file , prompt:'whisper',duration : e.stat?.duration } });
  153. }
  154. //语音识别ASR
  155. const goASR=()=>{
  156. const olod = mvalue.value;
  157. const rec= new Recognition();
  158. let rz= '';
  159. rec.setListener( (r:string)=>{
  160. //mlog('result ', r );
  161. rz= r ;
  162. mvalue.value= r;
  163. st.value.micStart= true
  164. }).setOnEnd( ( )=>{
  165. //mlog('rec end');
  166. mvalue.value= olod+rz;
  167. ms.info( t('mj.micRecEnd'));
  168. st.value.micStart= false
  169. }).setOpt({
  170. timeOut:2000,
  171. onStart:()=>{ ms.info( t('mj.micRec')); st.value.micStart= true },
  172. }).start();
  173. }
  174. const drOption=[
  175. {
  176. label: t('mj.micWhisper'),
  177. key: "whisper",
  178. icon:iconRender({ icon: 'ri:openai-fill' }),
  179. },{
  180. label: t('mj.micAsr'),
  181. icon:iconRender({ icon: 'ri:chrome-line' }),
  182. key: "asr"
  183. }
  184. ]
  185. const handleSelectASR = ( key: string | number )=>{
  186. if(key=='asr') goASR();
  187. if(key=='whisper') st.value.showMic=true;
  188. }
  189. </script>
  190. <template>
  191. <div v-if="st.showMic" class=" myinputs flex justify-center items-center" >
  192. <AiMic @cancel="st.showMic=false" @send="sendMic" />
  193. </div>
  194. <div class=" myinputs" @drop="drop" @paste="paste" v-else>
  195. <input type="file" id="fileInput" @change="selectFile" class="hidden" ref="fsRef" :accept="acceptData"/>
  196. <div class="w-full relative">
  197. <div class="flex items-base justify-start pb-1 flex-wrap-reverse" v-if="st.fileBase64.length>0 ">
  198. <div class="w-[60px] h-[60px] rounded-sm bg-slate-50 mr-1 mt-1 text-red-300 relative group" v-for="(v,ii) in st.fileBase64">
  199. <NImage :src="v" object-fit="cover" class="w-full h-full" >
  200. <template #placeholder>
  201. <a class="w-full h-full flex items-center justify-center text-neutral-500" :href="v" target="_blank" >
  202. <SvgIcon icon="mdi:download" />{{ $t('mj.attr1') }} {{ ii+1 }}
  203. </a>
  204. </template>
  205. </NImage>
  206. <SvgIcon icon="mdi:close" class="hidden group-hover:block absolute top-[-5px] right-[-5px] rounded-full bg-red-300 text-white cursor-pointer" @click="st.fileBase64.splice(st.fileBase64.indexOf(v),1)"></SvgIcon>
  207. </div>
  208. </div>
  209. <div class="absolute bottom-0 right-0 z-1">
  210. <NPopover trigger="hover">
  211. <template #trigger>
  212. <NTag type="info" round size="small" style="cursor: pointer; " :bordered="false" >
  213. <div class="opacity-60 flex" >
  214. <SvgIcon icon="material-symbols:token-outline" /> {{ $t('mj.remain') }}{{ myToken.remain }}/{{ myToken.modelTokens }}
  215. </div>
  216. </NTag>
  217. </template>
  218. <div class="w-[300px]">
  219. {{ $t('mj.tokenInfo1') }}
  220. <p class="py-1" v-text="$t('mj.tokenInfo2')"> </p>
  221. <p class=" text-right">
  222. <NButton @click="st.isShow=true" type="info" size="small">{{ $t('setting.setting') }}</NButton>
  223. </p>
  224. </div>
  225. </NPopover>
  226. </div>
  227. </div>
  228. <NAutoComplete v-model:value="mvalue" :options="searchOptions" :render-label="renderOption" >
  229. <template #default="{ handleInput, handleBlur, handleFocus }">
  230. <NInput ref="inputRef" v-model:value="mvalue" type="textarea"
  231. :placeholder="placeholder" :autosize="{ minRows: 1, maxRows: isMobile ? 4 : 8 }"
  232. @input="handleInput"
  233. @focus="handleFocus"
  234. @blur="handleBlur"
  235. @keypress="handleEnter" >
  236. <template #prefix>
  237. <div class=" relative; w-[22px]">
  238. <n-tooltip trigger="hover">
  239. <template #trigger>
  240. <SvgIcon icon="line-md:uploading-loop" class="absolute bottom-[10px] left-[8px] cursor-pointer" v-if="st.isLoad==1"></SvgIcon>
  241. <SvgIcon icon="ri:attachment-line" class="absolute bottom-[10px] left-[8px] cursor-pointer" @click="fsRef.click()" v-else></SvgIcon>
  242. </template>
  243. <div v-if="canVisionModel(gptConfigStore.myData.model)" v-html="$t('mj.upPdf')">
  244. </div>
  245. <div v-else v-html="$t('mj.upImg')">
  246. </div>
  247. </n-tooltip>
  248. </div>
  249. <!-- <div class=" relative; w-[22px]">
  250. <SvgIcon icon="bi:mic" class="absolute bottom-[10px] left-[30px] cursor-pointer" @click="st.showMic=true"></SvgIcon>
  251. </div> -->
  252. <n-dropdown trigger="hover" :options="drOption" @select="handleSelectASR">
  253. <div class=" relative; w-[22px]">
  254. <div class="absolute bottom-[14px] left-[31px]" v-if="st.micStart">
  255. <span class="relative flex h-3 w-3" >
  256. <span class="animate-ping absolute inline-flex h-full w-full rounded-full bg-red-500 opacity-75"></span>
  257. <span class="relative inline-flex rounded-full h-3 w-3 bg-red-400"></span>
  258. </span>
  259. </div>
  260. <!-- <SvgIcon icon="bi:mic" class="absolute bottom-[10px] left-[55px] cursor-pointer" @click="goASR()"></SvgIcon> -->
  261. <SvgIcon icon="bi:mic" class="absolute bottom-[10px] left-[30px] cursor-pointer"></SvgIcon>
  262. </div>
  263. </n-dropdown>
  264. </template>
  265. <template #suffix>
  266. <div class=" relative; w-[40px] ">
  267. <div class="absolute bottom-[-3px] right-[0px] ">
  268. <NButton type="primary" :disabled="disabled || homeStore.myData.isLoader " @click="handleSubmit" >
  269. <template #icon>
  270. <span class="dark:text-black">
  271. <SvgIcon icon="ri:stop-circle-line" v-if="homeStore.myData.isLoader" />
  272. <SvgIcon icon="ri:send-plane-fill" v-else/>
  273. </span>
  274. </template>
  275. </NButton>
  276. </div>
  277. </div>
  278. </template>
  279. </NInput>
  280. </template>
  281. </NAutoComplete>
  282. <!-- translate-y-[-8px] -->
  283. </div>
  284. <NModal v-model:show="st.isShow" preset="card" :title="$t('mjchat.modelChange')" class="!max-w-[620px]" @close="st.isShow=false" >
  285. <aiModel @close="st.isShow=false"/>
  286. </NModal>
  287. <!-- <n-drawer v-model:show="st.showMic" :width="420" :on-update:show="onShowFun">
  288. <n-drawer-content title="录音" closable>
  289. <AiMic />
  290. </n-drawer-content>
  291. </n-drawer> -->
  292. </template>
  293. <style >
  294. .myinputs .n-input .n-input-wrapper{
  295. @apply items-stretch;
  296. }
  297. </style>