You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
apply-assistant-v3/pages/addJob/index.vue

1085 lines
36 KiB
Vue

3 months ago
<template>
<div class="g_page_ai">
<div class="main-obj" style="background-color: #ffffff;">
<div class="" style="overflow-y: auto;position: relative;">
<div style="padding: 24px 0;" class="g_flex_row_center g_position_rela">
<div class="g_w_64 g_h_64 g_flex_c" style="overflow: hidden;border-radius: 50%;">
<image :src="fadanBaseImg + 'v3/add1.png'" alt="" style="width: 100px;" mode="widthFix" />
</div>
</div>
<div class="conet"
style="width: calc(100vw - 40px);margin: 0px auto 12px;background-color: #f5f5f5;border-radius: 12px;padding: 8px;">
<div class="g_c_4 g_fs_16 g_c_0">
你好我是伯才智能匹配AI<text class="g_fw_bold">大鹏</text>可以帮老乡快速匹配工作支持语音输入为了匹配更准确需多提供老乡需求信息例如:
</div>
<div class="g_pt_24 g_pb_24">
<div class="g_flex_row_start g_mb_4">
<div class="g_fs_16 g_fw_600 g_mr_4 g_flex_none g_c_0">1. 性别</div>
</div>
<div class="g_flex_row_start g_mb_4">
<div class="g_fs_16 g_fw_600 g_mr_4 g_flex_none g_c_0">2. 年龄</div>
</div>
<div class="g_flex_row_start g_mb_4">
<div class="g_fs_16 g_fw_600 g_mr_4 g_flex_none g_c_0">3. 意向城市</div>
</div>
<div class="g_flex_row_start g_mb_0">
<div class="g_fs_16 g_fw_600 g_mr_4 g_flex_none g_c_0">4. 工作要求(如吃住班次等)</div>
</div>
</div>
<view class="g_pb_24" >
<div class="">
<text class="g_fs_16 g_flex_none g_c_0">示例</text>
<text class="g_fs_16 g_fw_600 g_mr_4 g_flex_none g_c_0">有位32岁大姐想去常州找个长白班的工作</text>
</div>
</view>
<div class="g_c_4 g_fs_16 g_c_0">
快告诉我老乡需求开始匹配吧!
</div>
</div>
<div class="g_flex_row_between"
style="margin-top: 4px;padding-left: 40rpx;padding-right: 40rpx;"
v-if="corpUserFlag"
>
<div v-for="(item,index) in customList" :key="index" @click="goChatDetail(item)">
<div>
<image class="" :src="item.image" style="width: 96rpx;height: 96rpx;border-radius: 8px;">
</image>
</div>
<div style="margin-top: 8px;color: rgba(0, 0, 0, 0.5);font-size: 12px;">
{{ item.title }}
</div>
</div>
</div>
</div>
<div style="height: 251px;background-color: #fff;"></div>
<div class="g_flex_none m-opera-obj-fixed g_flex_column_end"
style="position: fixed;
left: 50%;
transform: translateX(-50%);
bottom:44px;
bottom: calc(44px + constant(safe-area-inset-bottom));
bottom: calc(44px + env(safe-area-inset-bottom));
width: 100vw;
padding-top: 10px;
padding-bottom: 20px;
background-image: linear-gradient(to bottom,rgba(255, 255, 255, 0) 0%,rgba(255, 255, 255, 1) 100%);"
>
<div class="g_flex_row_center voice-ban">
<view style="width: 120px;height: 40px;border-radius: 50px;border: 1px solid #eee;"
class="g_flex_c g_fs_16 view-btn g_bg_f" :style="btnStyle" hover-class="thover" @click="goHistory('view')">
查看会话
</view>
</div>
<div class="tip-ban g_flex_row_center g_h_26">
<view v-if="voiceStatus == 0"
class="voice-tip g_h_all g_mt_3"
style="color: #666666;
font-size: 12px;
"
:style="writeStylt"
>
松手发送上移取消
</view>
<view v-if="voiceStatus == 1"
class="voice-tip g_h_all g_mt_3"
style="color: #fe0000;
font-size: 12px;
"
:style="writeStylt"
>
松手取消
</view>
</div>
<view class="p-com-chat g_w_all g_bg_f"
>
<view class="chat-operate g_flex_c" :style="writeStyle" :class="voiceStatus == 1 ? 'bg_5_op5' : 'g_bg_f'">
<view class="g_bg_f m-input g_flex_row_center g_position_rela"
:class="hasTopPadding && msgType == 'text' ? 'hasTopPadding' : ''"
:style="{
'overflow': 'auto',
'width':'calc(100vw - 40px)',
'box-shadow': 'rgb(0 0 0 / 10%) 0px 2px 12px 0px',
'border-radius':'50px',
'height': msgType == 'text' ? (tah < 46 ? '56px' : tah + px) :'56px',
'min-height': '56px',
}">
<!-- 左侧 -->
<view class="g_flex_none g_flex_column_end g_h_all g_w_56 g_posi_relative g_text_c" @click="handleUpdateMsgType()"
:class="voiceStatus == 0 && spec ? 'g_bg_main' : voiceStatus == 1 ? 'g_bg_f0a' : voiceStatus == -2 ? 'g_c_t g_bg_main' : ''"
style="border-radius: 50px 0px 0px 50px;position: relative;min-height: 56px;" v-if="gptType != 'ai-text'"
:style="{
'height': msgType == 'text' ? (tah < 46 ? '56px' : tah + 20 + 'px') :'56px',
}"
>
<i class="iconfont"
:class="msgType == 'text' ? 'icon-huatongyuyin g_fs_22' : 'icon-weixinjianpan2 g_fs_26'"
style="position: fixed;"
:style="{
'left':msgType == 'text' ? '39px' :'37px',
'bottom':msgType == 'text' ? '35px' :'33px',
}"
v-if="voiceStatus == -1"></i>
</view>
<!-- 中部 -->
<view class="g_flex_1 g_flex_column_center"
>
<view v-if="msgType == 'text'" class="m-input-point g_w_all g_flex_column_center">
<div class="container g_flex_column_center">
<textarea :auto-height="false"
cursor-spacing="50"
class="g_fs_17 g_c_0"
:class="hasTopPadding ? 'g_mt_0' : ''"
id="textarea"
placeholder="请输入内容"
v-model="sendMsg"
@linechange="linechange"
@input="onInput"
style="height: 22px;padding: 10px 0;line-height: 27px;"
:style="{ height: tah + 'px' }"
/>
</div>
</view>
<view v-else>
<view class="g_w_all g_h_56 m-voice-point g_flex_c g_fw_700 g_fs_17"
@touchstart="onTouchStart" @touchend="onTouchEnd" @touchmove="onTouchMove"
:class="voiceStatus == 1 ? 'g_c_f g_bg_f0a' : voiceStatus == 0 ? 'g_bg_main' : voiceStatus == -2 ? 'g_bg_main' : ''">
{{ voiceStatus == 1 ? "" : voiceStatus == -1 ? "按住 说话" : "" }}
</view>
</view>
</view>
<!-- 发送 -->
<view class="g_flex_none g_flex_column_end g_h_all g_w_56 g_position_rela g_text_c" @click="handleSendMsg"
:class="voiceStatus == 0 && spec ? 'g_bg_main' : voiceStatus == 1 ? 'g_bg_f0a' : voiceStatus == -2 ? 'g_c_t g_bg_main' : ''"
style="border-radius: 0 50px 50px 0;position: relative;min-height: 56px;"
:style="{
'height': msgType == 'text' ? (tah < 46 ? '56px' : tah + 20 + 'px') :'56px',
}"
>
<!-- 清屏 -->
<i class="iconfont icon-close-circle g_fs_26 g_c_9"
style="position: fixed;right: 36px;bottom: 80px;"
v-if="tah > 80 && msgType == 'text'" @click.stop="clearMsg"
></i>
<i class="iconfont icon-fasong g_fs_32"
:class="sendIconStatus ? 'g_c_main' : voiceStatus == -2 ? 'g_c_t g_bg_main' : 'g_c_b'"
style="position: fixed;right: 33px;bottom: 31px;"
v-if="voiceStatus == -1"></i>
</view>
<!-- 录音交互动画 -->
<view class="longpress-top-mask g_flex_c"
@touchend="onTouchEnd"
style="
bottom:38px;
"
v-if="voiceStatus == 0 || voiceStatus == 1 || voiceStatus == -2">
<div class="column-voice g_flex_row_center">
<div v-for="(item, index) in 20" :key="index" class="g_flex_column_center">
<div class="column-item " :style="{
animation: 'voi_animate 1.5s infinite ' + 0.1 * index + 's',
'-webkit-animation': 'voi_animate 1.5s infinite ' + 0.1 * index + 's',
}"></div>
</div>
</div>
</view>
</view>
</view>
</view>
</div>
</div>
<g-tabbar class="tabbar"></g-tabbar>
</div>
</template>
<!-- #ifdef APP -->
<script module="yourModuleName" lang="renderjs">
//此模块内部只能用选项式API风格vue2、vue3均可用请照抄这段代码不可改成setup组合式API风格否则可能不能import vue导致编译失败
/**Apprenderjsimportjsjs
因为App中默认是在renderjsWebView中进行录音和音频编码
如果配置了 RecordApp.UniWithoutAppRenderjs=true 且未调用依赖renderjs的功能时如nvue可视化仅H5中可用的插件
可不提供此renderjs模块同时逻辑层中需要将相关import的条件编译去掉**/
import 'recorder-core'
import RecordApp from 'recorder-core/src/app-support/app'
import '../../uni_modules/Recorder-UniCore/app-uni-support.js' //renderjs中似乎不支持"@/"打头的路径,如果编译路径错误请改正路径即可
//按需引入你需要的录音格式支持文件,和插件
import 'recorder-core/src/engine/mp3'
import 'recorder-core/src/engine/mp3-engine'
//按需引入你需要的录音格式支持文件如果需要多个格式支持把这些格式的编码引擎js文件统统引入进来即可
import "recorder-core/src/engine/wav";
import 'recorder-core/src/extensions/waveview'
export default {
mounted() {
//App的renderjs必须调用的函数传入当前模块this
RecordApp.UniRenderjsRegister(this);
},
methods: {
//这里定义的方法,在逻辑层中可通过 RecordApp.UniWebViewVueCall(this,'this.xxxFunc()') 直接调用
//调用逻辑层的方法,请直接用 this.$ownerInstance.callMethod("xxxFunc",{args}) 调用二进制数据需转成base64来传递
}
}
</script>
<!-- #endif -->
<script>
//必须引入的Recorder核心文件路径是 /src/recorder-core.js 下同使用import、require都行
import Recorder from "recorder-core"; //注意如果未引用Recorder变量可能编译时会被优化删除如vue3 tree-shaking请改成 import 'recorder-core',或随便调用一下 Recorder.a=1 保证强引用
//必须引入的RecordApp核心文件文件路径是 /src/app-support/app.js
import RecordApp from "recorder-core/src/app-support/app";
//所有平台必须引入的uni-app支持文件如果编译出现路径错误请把@换成 ../../ 这种)
import "../../root/components/Recorder-UniCore/app-uni-support.js";
/** 需要编译成微信小程序时,引入微信小程序支持文件 **/
// #ifdef MP-WEIXIN
import "recorder-core/src/app-support/app-miniProgram-wx-support.js";
// #endif
/** H5、小程序环境中引入需要的格式编码器、可视化插件App环境中在renderjs中引入 **/
// 注意如果App中需要在逻辑层中调用Recorder的编码/转码功能,需要去掉此条件编译,否则会报未加载编码器的错误
// #ifdef H5 || MP-WEIXIN
//按需引入你需要的录音格式支持文件如果需要多个格式支持把这些格式的编码引擎js文件统统引入进来即可
import "recorder-core/src/engine/wav";
//可选的插件支持项,把需要的插件按需引入进来即可
import "recorder-core/src/extensions/waveview";
// #endif
var _wsTimer = null;
export default {
onShareAppMessage() {
return this.G.shareFun();
},
data() {
return {
corpUserFlag: false,
isFocus:false,
tah:36,
writeStyle: 'bottom:108px',
writeStyls:'bottom:136px',
writeStylt:'bottom:166px',
btnStyle:'bottom:186px',
topViewBottom:"bottom:0px",
isIosWxapp: false,
scrollIntoView: '',
wh: uni.getSystemInfoSync().screenHeight + 'px',
isFinish: 1,
isFluency: false,
isAside: {
isShow: false,
},
content: "",
textareaHeight: 30,
initialHeight: 0,
showLoad: false,
localBaseImg: this.G.store().localBaseImg,
fadanBaseImg:this.G.store().fadanBaseImg,
scrollTop: 0,
dzj: "",
msgType: "voice",
sendMsg: "",
sendIconStatus: false,
voiceMsg: "按住 说话",
voiceStatus: -1, // -1 录音前 & 录音结束 0 录音时且在指定范围 1录音时但不在指定范围 -2点击
longPressDelay: 230, // 设定长按所需时间,单位毫秒
spec: true,
isAuth: false,
content: "", // 内容
hasTopPadding: false,
allJob: [],
talkId: 0,
gptType: "",
bottomHeight: 100,
longPressTimer: null,
isStartRecord: false,
sendMsgGroup: [],
socketTask: null,
sequenceCounter: 1,
cid: -1,
hisPage: 1,
ws_send_ready: false,
reconnectCount: 0,
isSending: false,
customList: [],
};
},
onLoad(options) {
let that = this;
//页面onShow时【必须调用】的函数传入当前组件this
RecordApp.UniPageOnShow(this);
},
onShow() {
let that = this;
if (uni.getStorageSync("apply-token")) {
that.checkRecordingPermission();
that.G.Get(that.api.ai_config, {}, (res) => {
// uni.setStorageSync("robot_config", JSON.stringify(res));
that.customList = res.map(item => {
return {
title: item.name,
image: item.iconUrl,
page: '/root/chat/index',
description:item.description,
robotId:item.robotId
}
})
});
}
this.corpUserFlag = uni.getStorageSync("apply-userinfo").corpUserFlag;
const isWxApp = uni.getSystemInfoSync().uniPlatform == 'mp-weixin'
const isIosWxapp = uni.getSystemInfoSync().platform == 'ios' && isWxApp
that.isIosWxapp = isIosWxapp;
that.isFinish = 1;
that.voiceStatus = -1;
that.isStartRecord = false;
// 创建查询对象
const query = wx.createSelectorQuery();
// 选择指定的 DOM 元素
query.select(".chat-operate").boundingClientRect();
// 执行查询
query.exec((res) => {
if (res && res[0]) {
const height = res[0].height;
that.bottomHeight = height;
that.wh = `calc(${uni.getSystemInfoSync().windowHeight}px - ${that.bottomHeight}px)`;
} else {
console.error("未找到元素");
}
});
if (this.isMounted) RecordApp.UniPageOnShow(this);
},
watch: {
sendMsg(val) {
if (val) {
this.sendIconStatus = true;
} else {
this.sendIconStatus = false;
}
},
},
methods: {
//请求录音权限
recReq() {
var that = this;
//编译成App时提供的授权许可编译成H5、小程序为免费授权可不填写如果未填写授权许可将会在App打开后第一次调用请求录音权限时弹出“未获得商用授权时App上仅供测试”提示框
//RecordApp.UniAppUseLicense='我已获得UniAppID=*****的商用授权';
RecordApp.RequestPermission_H5OpenSet = {
audioTrackSet: {
noiseSuppression: true,
echoCancellation: true,
autoGainControl: true
}
}; //这个是Start中的audioTrackSet配置在h5H5、App+renderjs中必须提前配置因为h5中RequestPermission会直接打开录音
RecordApp.UniWebViewActivate(this); //App环境下必须先切换成当前页面WebView
RecordApp.RequestPermission(
() => {
console.log("已获得录音权限,可以开始录音了");
that.msgType = "voice";
that.isAuth = true;
},
(msg, isUserNotAllow) => {
if (isUserNotAllow) {
//用户拒绝了录音权限
//这里你应当编写代码进行引导用户给录音权限,不同平台分别进行编写
uni.showModal({
title: '提示',
content: '需要录音权限才能使用语音功能,请前往设置开启权限',
success: function (res) {
if (res.confirm) {
uni.openSetting({
success: (settingData) => {
if (settingData.authSetting['scope.record']) {
uni.showToast({ title: '授权成功' });
that.msgType = "voice";
that.isAuth = true;
that.checkRecordingPermission(); // 重新尝试请求权限
} else {
uni.showToast({ icon: 'none', title: '授权失败' });
}
},
});
}
},
});
}
that.msgType = "text";
console.error("请求录音权限失败:" + msg);
}
);
},
//开始录音
recStart() {
//Android App如果要后台录音需要启用后台录音保活服务iOS不需要需使用配套原生插件、或使用第三方保活插件
//录音配置信息
var set = {
type: "wav",
sampleRate: 16000,
bitRate: 16, //mp3格式指定采样率hz、比特率kbps其他参数使用默认配置注意是数字的参数必须提供数字不要用字符串需要使用的type类型需提前把格式支持文件加载进来比如使用wav格式需要提前加载wav.js编码引擎
/*,audioTrackSet:{ //可选如果需要同时播放声音比如语音通话需要打开回声消除并不一定会生效打开后声音可能会从听筒播放部分环境下如小程序App原生插件可调用接口切换成扬声器外放
//注意H5、App+renderjs中需要在请求录音权限前进行相同配置RecordApp.RequestPermission_H5OpenSet后此配置才会生效
echoCancellation:true,noiseSuppression:true,autoGainControl:true} */
onProcess: (buffers, powerLevel, duration, sampleRate, newBufferIdx, asyncEnd) => {
//全平台通用可实时上传发送数据配合Recorder.SampleData方法将buffers中的新数据连续的转换成pcm上传或使用mock方法将新数据连续的转码成其他格式上传可以参考Recorder文档里面的Demo片段列表 -> 实时转码并上传-通用版基于本功能可以做到实时转发数据、实时保存数据、实时语音识别ASR
//注意App里面是在renderjs中进行实际的音频格式编码操作此处的buffers数据是renderjs实时转发过来的修改此处的buffers数据不会改变renderjs中buffers所以不会改变生成的音频文件可在onProcess_renderjs中进行修改操作就没有此问题了如需清理buffers内存此处和onProcess_renderjs中均需要进行清理H5、小程序中无此限制
//注意如果你要用只支持在浏览器中使用的Recorder扩展插件App里面请在renderjs中引入此扩展插件然后在onProcess_renderjs中调用这个插件H5可直接在这里进行调用小程序不支持这类插件如果调用插件的逻辑比较复杂建议封装成js文件这样逻辑层、renderjs中直接import不需要重复编写
//H5、小程序等可视化图形绘制直接运行在逻辑层App里面需要在onProcess_renderjs中进行这些操作
// #ifdef H5 || MP-WEIXIN
if (this.waveView) this.waveView.input(buffers[buffers.length - 1], powerLevel,
sampleRate);
// #endif
/*typetakeoffEncodeChunktypeunknownApponProcess_renderjs
if(this.clearBufferIdx>newBufferIdx){ this.clearBufferIdx=0 } //重新录音了就重置
for(var i=this.clearBufferIdx||0;i<newBufferIdx;i++) buffers[i]=null;
this.clearBufferIdx=newBufferIdx; */
},
onProcess_renderjs: `function(buffers,powerLevel,duration,sampleRate,newBufferIdx,asyncEnd){
//App中在这里修改buffers会改变生成的音频文件但注意buffers会先转发到逻辑层onProcess后才会调用本方法因此在逻辑层的onProcess中需要重新修改一遍
//本方法可以返回truerenderjs中的onProcess将开启异步模式处理完后调用asyncEnd结束异步注意这里异步修改的buffers一样的不会在逻辑层的onProcess中生效
//App中是在renderjs中进行的可视化图形绘制因此需要写在这里this是renderjs模块的this也可以用This变量如果代码比较复杂请直接在renderjs的methods里面放个方法xxxFunc这里直接使用this.xxxFunc(args)进行调用
if(this.waveView) this.waveView.input(buffers[buffers.length-1],powerLevel,sampleRate);
/*onProcess
if(this.clearBufferIdx>newBufferIdx){ this.clearBufferIdx=0 } //重新录音了就重置
for(var i=this.clearBufferIdx||0;i<newBufferIdx;i++) buffers[i]=null;
this.clearBufferIdx=newBufferIdx; */
}`,
onProcessBefore_renderjs: `function(buffers,powerLevel,duration,sampleRate,newBufferIdx){
//App中本方法会在逻辑层onProcess之前调用因此修改的buffers会转发给逻辑层onProcess本方法没有asyncEnd参数不支持异步处理
//一般无需提供本方法只用onProcess_renderjs就行renderjs的onProcess内部调用过程onProcessBefore_renderjs -> 转发给逻辑层onProcess -> onProcess_renderjs
}`,
takeoffEncodeChunk: true ?
null :
(chunkBytes) => {
//全平台通用实时接收到编码器编码出来的音频片段数据chunkBytes是Uint8Array二进制数据可以实时上传发送出去
//App中如果未配置RecordApp.UniWithoutAppRenderjs时建议提供此回调因为录音结束后会将整个录音文件从renderjs传回逻辑层由于uni-app的逻辑层和renderjs层数据交互性能实在太拉跨了大点的文件传输会比较慢提供此回调后可避免Stop时产生超大数据回传
//App中使用原生插件时可方便的将数据实时保存到同一文件第一帧时append:false新建文件后面的append:true追加到文件
//RecordApp.UniNativeUtsPluginCallAsync("writeFile",{path:"xxx.mp3",append:回调次数!=1, dataBase64:RecordApp.UniBtoa(chunkBytes.buffer)}).then(...).catch(...)
},
takeoffEncodeChunk_renderjs: true ?
null :
`function(chunkBytes){
//App中这里可以做一些仅在renderjs中才生效的事情不提供也行this是renderjs模块的this也可以用This变量
}`,
start_renderjs: `function(){
//App中可以放一个函数在Start成功时renderjs中会先调用这里的代码this是renderjs模块的this也可以用This变量
//放一些仅在renderjs中才生效的事情比如初始化不提供也行
}`,
stop_renderjs: `function(arrayBuffer,duration,mime){
//App中可以放一个函数在Stop成功时renderjs中会先调用这里的代码this是renderjs模块的this也可以用This变量
//放一些仅在renderjs中才生效的事情不提供也行
}`,
};
RecordApp.UniWebViewActivate(this); //App环境下必须先切换成当前页面WebView
RecordApp.Start(
set,
() => {
console.log("已开始录音");
//【稳如老狗WDT】可选的监控是否在正常录音有onProcess回调如果长时间没有回调就代表录音不正常
//var wdt=this.watchDogTimer=setInterval ... 请参考示例Demo的main_recTest.vue中的watchDogTimer实现
//创建音频可视化图形绘制App环境下是在renderjs中绘制H5、小程序等是在逻辑层中绘制因此需要提供两段相同的代码
//view里面放一个canvascanvas需要指定宽高下面style里指定了300*100
//<canvas type="2d" class="recwave-WaveView" style="width:300px;height:100px"></canvas>
// RecordApp.UniFindCanvas(this,[".recwave-WaveView"],`
// this.waveView=Recorder.WaveView({compatibleCanvas:canvas1, width:300, height:100});
// `,(canvas1)=>{
// this.waveView=Recorder.WaveView({compatibleCanvas:canvas1, width:300, height:100});
// });
},
(msg) => {
console.error("开始录音失败:" + msg);
uni.hideLoading();
}
);
},
//暂停录音
recPause() {
if (RecordApp.GetCurrentRecOrNull()) {
RecordApp.Pause();
console.log("已暂停");
}
},
//继续录音
recResume() {
if (RecordApp.GetCurrentRecOrNull()) {
RecordApp.Resume();
console.log("继续录音中...");
}
},
//停止录音
recStop() {
//RecordApp.UniNativeUtsPluginCallAsync("androidNotifyService",{ close:true }) //关闭Android App后台录音保活服务
let that = this;
RecordApp.Stop(
(arrayBuffer, duration, mime) => {
if (that.spec) {
//全平台通用arrayBuffer是音频文件二进制数据可以保存成文件或者发送给服务器
//App中如果在Start参数中提供了stop_renderjsrenderjs中的函数会比这个函数先执行
//注意当Start时提供了takeoffEncodeChunk后你需要自行实时保存录音文件数据因此Stop时返回的arrayBuffer的长度将为0字节
//如果是H5环境也可以直接构造成Blob/File文件对象和Recorder使用一致
// #ifdef H5
var blob = new Blob([arrayBuffer], {
type: mime
});
var file = new File([arrayBuffer], "recorder.wav");
//uni.uploadFile({file:file, ...}) //参考demo中的test_upload_saveFile.vue
// #endif
//如果是App、小程序环境可以直接保存到本地文件然后调用相关网络接口上传
// #ifdef APP || MP-WEIXIN
var audioChunks = new Int8Array(arrayBuffer);
uni.setStorageSync('test_file', audioChunks.join(','))
uni.setStorageSync('btn_form', 1)
uni.setStorageSync('robot_id', '7491244881596809243')
uni.setStorageSync('isExecute',1)
uni.navigateTo({
url: '/root/chat/index?cid=&form=audiodata',
})
} else {}
// #endif
},
(msg) => {
console.error("结束录音失败:" + msg);
uni.hideLoading();
}
);
},
readAudioFileAsArrayBuffer(filePath) {
var that = this;
return new Promise((resolve, reject) => {
uni.getFileSystemManager().readFile({
filePath: filePath,
// encoding: 'binary',
responseType: "arraybuffer", // 指定响应类型为数组缓冲区
success: (res) => {
console.log("读取音频文件成功", res);
resolve(new Int8Array(res.data));
},
fail: (err) => {
console.error("读取文件失败", err);
reject(err);
},
});
});
},
keyboardheightchange(e) {
var that = this;
if (e.target.dataset.height == 0 && this.sendMsg == "") {
this.textareaHeight = 30;
}
if (this.isIosWxapp) {
if (e.detail.height == 0) {
this.writeStyle = `bottom:108px`
this.writeStyls = `bottom:136px`
this.writeStylt = `bottom:166px`
this.btnStyle = `bottom:186px`
this.topViewBottom = `bottom:0`
// this.wh = `calc(${uni.getSystemInfoSync().windowHeight}px - ${this.bottomHeight}px)`
// this.ghHeight = 'height:84px'
} else {
this.writeStyle = `bottom: calc(${e.detail.height}px - env(safe-area-inset-bottom) + 36px)`
this.writeStyls = `bottom: calc(${e.detail.height}px - env(safe-area-inset-bottom) + 64px)`
this.writeStylt = `bottom: calc(${e.detail.height}px - env(safe-area-inset-bottom) + 94px)`
this.btnStyle = `bottom: calc(${e.detail.height}px - env(safe-area-inset-bottom) + 114px)`
// this.ghHeight = `height:calc(84px + ${e.detail.height}px)`
this.topViewBottom = `bottom:300px`
}
that.scrollToBottom();
}
},
onInput(e) {
this.content = e.target.value;
},
onFocus() {
this.initialHeight = this.textareaHeight;
},
onBlur() {
this.updateTextareaHeight();
},
updateTextareaHeight() {},
linechange(e) {
console.log('获取行数变化:',e.detail)
let that = this;
that.isFocus = true;
setTimeout(()=>{
that.isFocus = true;
that.tah = e.detail.heightRpx / 2;
},100)
// if (e.detail.lineCount > 1) {
// this.hasTopPadding = true;
// } else {
// this.hasTopPadding = false;
// }
},
scrollToBottom() {
let that = this;
},
handleUpdateMsgType() {
let that = this;
this.sendMsg = '';
this.animate();
this.isFocus = false;
if (this.msgType == "text") {
this.msgType = "voice";
this.checkRecordingPermission();
} else {
this.msgType = "text";
}
that.writeStyle = `bottom:108px`
that.writeStyls = `bottom:136px`
that.writeStylt = `bottom:166px`
that.btnStyle = `bottom:186px`
that.topViewBottom = `bottom:0`
},
handleSendMsg() {
let that = this;
if (that.msgType == "voice") {
return false;
}
if (that.showLoad) {
uni.showToast({
icon: "none",
title: "正在匹配中,请稍候",
});
return false;
}
if (that.sendMsg == "") {
uni.showToast({
icon: "none",
title: "请输入内容",
});
return false;
}
that.animate();
that.sendAI();
that.writeStyle = `bottom:108px`
that.writeStyls = `bottom:136px`
that.writeStylt = `bottom:166px`
that.btnStyle = `bottom:186px`
that.topViewBottom = `bottom:0`
setTimeout(() => {
that.sendMsg = "";
that.updateTextareaHeight();
that.textareaHeight = 30;
}, 10);
setTimeout(() => {
that.textareaHeight = 30;
}, 80);
},
checkRecordingPermission() {
this.recReq();
},
onTouchMove(e) {
let that = this;
if (!that.isAuth) {
that.voiceStatus = -1;
return false;
}
const touch = e.touches[0];
let _x = touch.clientX,
_y = touch.clientY;
console.log('y 坐标:',_y,' 按妞:',that.writeStyle)
let _bool = that.writeStyle.split(':')[1].split('px')[0]
console.log('Y临界值',uni.getSystemInfoSync().windowHeight - _bool - 72)
if (_y > uni.getSystemInfoSync().windowHeight - _bool - 72) {
that.voiceStatus = 0;
that.spec = true;
} else {
that.voiceStatus = 1;
that.spec = false;
}
},
onTouchStart() {
let that = this;
if (!that.isAuth) {
uni.showToast({
icon: "none",
title: "授权中,请稍候",
});
return false;
}
if (that.showLoad) {
uni.showToast({
icon: "none",
title: "正在匹配中,请稍候",
});
return false;
}
that.animate("heavy");
uni.removeStorageSync('asytip');
that.reconnectCount = 0;
that.voiceStatus = -2;
that.isStartRecord = false;
that.longPressTimer = setTimeout(() => {
that.isStartRecord = true;
that.spec = true;
that.startRecording();
}, that.longPressDelay);
},
onTouchEnd() {
let that = this;
if (!that.isAuth) {
that.voiceStatus = -1;
return false;
}
clearTimeout(that.longPressTimer);
if (that.isAuth) {
that.stopRecording();
} else {
uni.showToast({
icon: "none",
title: "授权中,请稍候",
});
return false;
}
},
startRecording() {
let that = this;
setTimeout(() => {
that.voiceStatus = 0; // 标记为正在录音
console.log("that.isFluency", that.isFluency);
if (!that.isFluency) {
that.isSending = false;
// 语音识别开始
that.recStart();
}
}, 100);
},
stopRecording() {
let that = this;
that.baseEnd();
setTimeout(
() => {
that.baseEnd();
},
that.isFluency ? 500 : 0
);
},
baseEnd() {
let that = this;
that.voiceStatus = -1;
if (that.isStartRecord) {
that.recStop();
} else {
uni.hideLoading();
}
that.isStartRecord = false;
that.$forceUpdate();
},
sendAudioAI($msg) {
let that = this;
if (that.isSending) {
console.log("正在发送消息,忽略重复发送请求");
return;
}
that.isSending = true;
setTimeout(() => {
that.voiceStatus = -1;
that.sendBaseData($msg, 'audio');
}, 10);
},
sendAI($form = "", $value = "") {
let that = this;
that.textareaHeight = 30;
that.sendBaseData(that.sendMsg, 'text');
},
sendBaseData($sendMessage = "", $type) {
let that = this;
if (!$sendMessage) {
console.log("防抖");
return false;
}
that.saveHistory(0, $sendMessage, () => {
uni.hideLoading();
uni.setStorageSync('ls-chat-text', $sendMessage)
$sendMessage = '';
uni.setStorageSync('robot_id', '7491244881596809243')
uni.setStorageSync('isExecute',1)
uni.navigateTo({
url: '/root/chat/index?cid=&form=msgdata&stip=' + $type
})
});
},
saveHistory(robotTag = 0, $message = "", callabck = () => {}) {
let that = this,
_dou = $message;
if ($message) {
$message = '';
callabck();
}
},
animate($type = "heavy") {
uni.vibrateShort({
type: $type,
fail(err){
console.log('震动失败:',err)
}
});
},
clearMsg() {
this.sendMsg = "";
},
goHistory($form = '') {
let that = this;
uni.setStorageSync('robot_id', '7491244881596809243')
uni.setStorageSync('isExecute',1)
uni.navigateTo({
url: "/root/chat/index?cid=&form=viwdata",
});
},
goChatDetail($item = '') {
let that = this;
uni.setStorageSync('robot_id', $item.robotId)
let _title = $item.title;
if($item.title == '智能匹配'){
_title = '';
}
uni.setStorageSync('isExecute',1)
uni.navigateTo({
url: "/root/chat/index?cid=&form=viwdata&title=" + _title,
});
},
},
};
</script>
<style lang="scss">
.routeItem {
margin-bottom: 12px;
.item {
border: 1px solid #eee;
padding: 4px 8px;
border-radius: 2px;
}
&:first-child {
margin-top: 12px;
}
}
.g_page_ai {
.main-obj {
height: calc(calc(100vh - env(safe-area-inset-bottom)) - 50px);
height: calc(calc(100vh - constant(safe-area-inset-bottom)) - 50px);
}
}
.g_c_b {
color: #bbb;
}
.p-com-chat {
background-color: #fffff;
.g_bg_f0a {
background-color: #fe0000;
}
.chat-content {
width: calc(100% - 0px);
margin: 0 auto;
padding-bottom: 0px;
.chat-left {
padding: 0 10px;
.msg {
border-radius: 12px;
line-height: 1.5;
// letter-spacing: 1.5px;
word-break: break-all;
}
}
.chat-right {
padding: 0 10px;
.msg {
border-radius: 12px;
line-height: 1.5;
// letter-spacing: 1.5px;
word-break: break-all;
}
}
}
.chat-operate {
width: 100%;
.m-input {
width: calc(100% - 20px);
margin: 0 auto;
border-radius: 40px;
font-size: 16px;
}
}
.longpress-top-mask {
position: fixed;
left: 0;
width: 100vw;
z-index: 99;
}
@keyframes voi_animate {
0% {
height: 50%;
background-color: #ffffff;
}
20% {
height: 50%;
background-color: #ffffff;
}
50% {
height: 100%;
background-color: #ffffff;
}
80% {
height: 50%;
background-color: #ffffff;
}
100% {
height: 50%;
background-color: #ffffff;
}
}
.column-voice {
width: 100%;
height: 22px;
overflow: hidden;
// max-width: calc(100% - 140px);
max-width: calc(100% - 140px);
margin: 0 auto;
.column-item {
width: 3px;
height: 100%;
margin-left: 6px;
border-radius: 10px;
background-color: #ffffff;
vertical-align: middle;
display: inline-block;
}
}
}
/* 容器样式 */
.loader {
display: flex;
// justify-content: center;
// align-items: center;
// height: 100vh; /* 使容器占据整个视口的高度 */
}
/* 单个点的样式 */
.dot {
width: 10px;
height: 10px;
margin: 0 2px;
border-radius: 50%;
background-color: #666;
animation: dotPulse 1s infinite ease-in-out;
}
/* 动画定义 */
@keyframes dotPulse {
0%,
80%,
100% {
transform: scale(0.9);
}
40% {
transform: scale(1.1);
}
}
/* 第二个点的延时 */
.dot:nth-child(2) {
animation-delay: -0.33s;
}
/* 第三个点的延时 */
.dot:nth-child(3) {
animation-delay: -0.66s;
}
.container {
// position: relative;
// overflow-y: auto; /* 设置滚动条 */
// height: 100%; /* 设置容器高度 */
}
textarea {
resize: none;
/* 禁止手动调整大小 */
overflow: hidden;
/* 隐藏超出部分 */
width: 100%;
/* 设置宽度 */
}
.hasTopPadding {
padding-top: 8px !important;
box-sizing: content-box !important;
}
.biggerSize::after {
content: "";
/* display: inline-block; */
width: 60px;
height: 60px;
position: absolute;
left: 50%;
top: 50%;
z-index: 99;
transform: translate(-50%, -50%);
}
.bg_5_op5{
background-color: #ffffff;
}
3 months ago
.g_fs_32 {
font-size: 32px;
}
3 months ago
</style>