如何延长Webkit SpeechRecognition的通话时间?短暂的讲话后没有休息

如何解决如何延长Webkit SpeechRecognition的通话时间?短暂的讲话后没有休息

我仅在单击按钮后才使用语音识别。因此,recognition.continuous = false被设置。来自https://developer.mozilla.org/en-US/docs/Web/API/SpeechRecognition/

的注释

SpeechRecognition接口控件的Continuous属性 每次识别是否返回连续结果,还是仅返回 一个结果。

现在当我说些什么时,例如这是一个不错的源代码,{需要暂停1-2秒},此刻它会中断。

如何延长演讲时间?我认为3-5秒后可以休息一下。

PS:由于安全问题,代码可能无法在此处运行。因此,请看一下https://codepen.io/durrrrr/pen/RwaRVdg

navigator.getUserMedia = navigator.getUserMedia ||
                         navigator.webkitGetUserMedia ||
                         navigator.mozGetUserMedia;

if (navigator.getUserMedia) {
   navigator.getUserMedia({ audio: true },function(stream) {
         console.log("Microphone Access supported");
      },function(err) {
         console.log("An error occured by accessing the microphone: " + err.name);
      }
    );
} else {
   console.log("getUserMedia not supported");
}


window.SpeechRecognition = window.webkitSpeechRecognition || window.SpeechRecognition;

if ('SpeechRecognition' in window) {

    console.log('Speech recognition supported');

    var SpeechRecognition = SpeechRecognition || webkitSpeechRecognition;
    var SpeechGrammarList = SpeechGrammarList || webkitSpeechGrammarList;

    // JSpeech Grammar Format
    var grammar = '#JSGF V1.0; grammar colors; public <color> = aqua | azure | beige | bisque | black | blue | brown | chocolate | coral | crimson | cyan | fuchsia | ghostwhite | gold | goldenrod | gray | green | indigo | ivory | khaki | lavender | lime | linen | magenta | maroon | moccasin | navy | olive | orange | orchid | peru | pink | plum | purple | red | salmon | sienna | silver | snow | tan | teal | thistle | tomato | turquoise | violet | white | yellow ;'
    var message = $('#message');
    var recognition = new SpeechRecognition();
    var speechRecognitionList = new SpeechGrammarList();
    var finalTranscript = '';
    speechRecognitionList.addFromString(grammar,1)
    recognition.grammars = speechRecognitionList;
    recognition.lang = 'en-EN';
    recognition.interimResults = true; 
    recognition.continuous = false;

    recognition.onresult = function(event) {
      var currentTranscript = '';
      var confidenceFinalTranscrip = '';
      for(var i = event.resultIndex; i < event.results.length; ++i) {
        if(event.results[i].isFinal) {
          finalTranscript += event.results[i][0].transcript;
          confidenceFinalTranscrip = event.results[i][0].confidence;
          $('#finalTranscript').html(finalTranscript);
        } else {
          currentTranscript += event.results[i][0].transcript;
        }
      }
      $('#currentTranscript').html(currentTranscript);
    };
  
    recognition.onspeechend = function() {
      $('#pulse-ring-microphone').removeClass('pulse-ring');
      $('#pulse-ring-microphone').removeClass('pulse-ring-small');
      $('#pulse-ring-microphone-font').removeClass('pulse-ring-font-on').addClass('pulse-ring-font-off');
      recognition.stop();
    };

    recognition.onerror = function(event) {
      $('#pulse-ring-microphone').removeClass('pulse-ring');
      $('#pulse-ring-microphone-font').removeClass('pulse-ring-font-on').addClass('pulse-ring-font-off');
      $('#errorTranscript').html(event.speechError);
    }

} else {
  console.log('Speech recognition not supported');
}

$('#btnStartSpeech').on('click',function(){
  $('#pulse-ring-microphone').addClass('pulse-ring');
  $('#pulse-ring-microphone-font').removeClass('pulse-ring-font-off').addClass('pulse-ring-font-on');
  recognition.start();
});
.container-pulse-ring {
  display: flex;
  align-items: center;
  justify-content: center;
}

.container-pulse-ring:focus {
  outline: -webkit-focus-ring-color auto 0px;
}

#btnStartSpeech.btnAnimated {
  border: none;
  padding: 0;
  border-radius: 100%;
  width: 150px;
  height: 150px;
  font-size: 3.5em;
  color: #fff;
  padding: 0;
  margin: 0;
  background: #337ab7;
  position: relative;
  display: inline-block;
  line-height: 50px;
  text-align: center;
  white-space: nowrap;
  vertical-align: middle;
  -ms-touch-action: manipulation;
  touch-action: manipulation;
  cursor: pointer;
  -webkit-user-select: none;
  -moz-user-select: none;
  -ms-user-select: none;
  user-select: none;
  background-image: none;
}

.btnAnimated:focus {
  outline: 0px auto -webkit-focus-ring-color;
  outline-offset: -2px;
}

.pulse-ring-font-on {
  color: #fff;
}

.pulse-ring-font-off {
  color: #333;
}

.pulse-ring {
  content: '';
  width: 150px;
  height: 150px;
  border: 50px solid #337ab7;
  border-radius: 50%;
  position: absolute;
  top: -1px;
  left: -1px;
  animation: pulsate infinite 2s;
}

@-webkit-keyframes pulsate {
  0% {
    -webkit-transform: scale(1,1);
    opacity: 1;
  }

  100% {
    -webkit-transform: scale(1.2,1.2);
    opacity: 0;
  }
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script>
navigator.getUserMedia = navigator.getUserMedia ||
                         navigator.webkitGetUserMedia ||
                         navigator.mozGetUserMedia;

if (navigator.getUserMedia) {
   navigator.getUserMedia({ audio: true },function(stream) {
         console.log("Accessed the Microphone");
      },function(err) {
         console.log("The following error occured: " + err.name);
      }
    );
} else {
   console.log("getUserMedia not supported");
}


window.SpeechRecognition = window.webkitSpeechRecognition || window.SpeechRecognition;

if ('SpeechRecognition' in window) {

    console.log('Speech recognition supported');

    var SpeechRecognition = SpeechRecognition || webkitSpeechRecognition;

    // JSpeech Grammar Format
    var message = $('#message');
    var recognition = new SpeechRecognition();
    var finalTranscript = '';
    recognition.lang = 'en-EN';
    recognition.interimResults = true; 
    recognition.continuous = false;

    recognition.onresult = function(event) {
      var currentTranscript = '';
      var confidenceFinalTranscrip = '';
      for(var i = event.resultIndex; i < event.results.length; ++i) {
        if(event.results[i].isFinal) {
          finalTranscript += event.results[i][0].transcript;
          confidenceFinalTranscrip = event.results[i][0].confidence;
          $('#finalTranscript').html(finalTranscript);
        } else {
          currentTranscript += event.results[i][0].transcript;
        }
      }
      $('#currentTranscript').html(currentTranscript);
    };
  
    recognition.onspeechend = function() {
      $('#pulse-ring-microphone').removeClass('pulse-ring');
      $('#pulse-ring-microphone').removeClass('pulse-ring-small');
      $('#pulse-ring-microphone-font').removeClass('pulse-ring-font-on').addClass('pulse-ring-font-off');
      recognition.stop();
    };

    recognition.onerror = function(event) {
      $('#pulse-ring-microphone').removeClass('pulse-ring');
      $('#pulse-ring-microphone-font').removeClass('pulse-ring-font-on').addClass('pulse-ring-font-off');
      $('#errorTranscript').html(event.speechError);
    }

} else {
  console.log('Speech recognition not supported');
}

$('#btnStartSpeech').on('click',function(){
  $('#pulse-ring-microphone').addClass('pulse-ring');
  $('#pulse-ring-microphone-font').removeClass('pulse-ring-font-off').addClass('pulse-ring-font-on');
  recognition.start();
});

解决方法

您可以运行setTimeout重新启动识别开始

setTimeout(() => {
      recognition.start();
    },500);
,

recognition.continuous = false 表示服务在第一个最终结果出现后立即停止。内部 VAD(语音活动检测)将决定您的输入何时停止。它的攻击性很强,而且你不能改变阈值,所以在1-2秒没有发言之后,它总是会结束。

您可以将引擎设置为 continuous = true 并收集多个最终结果,直到达到最大值。时间到了(我将它用于 SEPIA 开源语音助手),但状态处理可能非常棘手。

我目前正在构建一个类似于 Web Speech API 的库,其中包含用于开源 VAD 和语音识别 (SEPIA Web Audio Library) 的模块,让您可以完全控制各个方面,但您需要托管您的自己的ASR服务器...^^

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。

相关推荐


依赖报错 idea导入项目后依赖报错,解决方案:https://blog.csdn.net/weixin_42420249/article/details/81191861 依赖版本报错:更换其他版本 无法下载依赖可参考:https://blog.csdn.net/weixin_42628809/a
错误1:代码生成器依赖和mybatis依赖冲突 启动项目时报错如下 2021-12-03 13:33:33.927 ERROR 7228 [ main] o.s.b.d.LoggingFailureAnalysisReporter : *************************** APPL
错误1:gradle项目控制台输出为乱码 # 解决方案:https://blog.csdn.net/weixin_43501566/article/details/112482302 # 在gradle-wrapper.properties 添加以下内容 org.gradle.jvmargs=-Df
错误还原:在查询的过程中,传入的workType为0时,该条件不起作用 &lt;select id=&quot;xxx&quot;&gt; SELECT di.id, di.name, di.work_type, di.updated... &lt;where&gt; &lt;if test=&qu
报错如下,gcc版本太低 ^ server.c:5346:31: 错误:‘struct redisServer’没有名为‘server_cpulist’的成员 redisSetCpuAffinity(server.server_cpulist); ^ server.c: 在函数‘hasActiveC
解决方案1 1、改项目中.idea/workspace.xml配置文件,增加dynamic.classpath参数 2、搜索PropertiesComponent,添加如下 &lt;property name=&quot;dynamic.classpath&quot; value=&quot;tru
删除根组件app.vue中的默认代码后报错:Module Error (from ./node_modules/eslint-loader/index.js): 解决方案:关闭ESlint代码检测,在项目根目录创建vue.config.js,在文件中添加 module.exports = { lin
查看spark默认的python版本 [root@master day27]# pyspark /home/software/spark-2.3.4-bin-hadoop2.7/conf/spark-env.sh: line 2: /usr/local/hadoop/bin/hadoop: No s
使用本地python环境可以成功执行 import pandas as pd import matplotlib.pyplot as plt # 设置字体 plt.rcParams[&#39;font.sans-serif&#39;] = [&#39;SimHei&#39;] # 能正确显示负号 p
错误1:Request method ‘DELETE‘ not supported 错误还原:controller层有一个接口,访问该接口时报错:Request method ‘DELETE‘ not supported 错误原因:没有接收到前端传入的参数,修改为如下 参考 错误2:cannot r
错误1:启动docker镜像时报错:Error response from daemon: driver failed programming external connectivity on endpoint quirky_allen 解决方法:重启docker -&gt; systemctl r
错误1:private field ‘xxx‘ is never assigned 按Altʾnter快捷键,选择第2项 参考:https://blog.csdn.net/shi_hong_fei_hei/article/details/88814070 错误2:启动时报错,不能找到主启动类 #
报错如下,通过源不能下载,最后警告pip需升级版本 Requirement already satisfied: pip in c:\users\ychen\appdata\local\programs\python\python310\lib\site-packages (22.0.4) Coll
错误1:maven打包报错 错误还原:使用maven打包项目时报错如下 [ERROR] Failed to execute goal org.apache.maven.plugins:maven-resources-plugin:3.2.0:resources (default-resources)
错误1:服务调用时报错 服务消费者模块assess通过openFeign调用服务提供者模块hires 如下为服务提供者模块hires的控制层接口 @RestController @RequestMapping(&quot;/hires&quot;) public class FeignControl
错误1:运行项目后报如下错误 解决方案 报错2:Failed to execute goal org.apache.maven.plugins:maven-compiler-plugin:3.8.1:compile (default-compile) on project sb 解决方案:在pom.
参考 错误原因 过滤器或拦截器在生效时,redisTemplate还没有注入 解决方案:在注入容器时就生效 @Component //项目运行时就注入Spring容器 public class RedisBean { @Resource private RedisTemplate&lt;String
使用vite构建项目报错 C:\Users\ychen\work&gt;npm init @vitejs/app @vitejs/create-app is deprecated, use npm init vite instead C:\Users\ychen\AppData\Local\npm-