blob: d3f3c24ff7d4ffd60c796ed077e2ec87ef2fbc54 [file] [log] [blame]
[email protected]64d09222012-05-25 10:10:341// Copyright (c) 2012 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef CONTENT_RENDERER_SPEECH_RECOGNITION_DISPATCHER_H_
6#define CONTENT_RENDERER_SPEECH_RECOGNITION_DISPATCHER_H_
[email protected]64d09222012-05-25 10:10:347
8#include <map>
9
10#include "base/basictypes.h"
burnik2eeb4662014-10-09 21:30:1611#include "base/memory/scoped_ptr.h"
12#include "base/memory/shared_memory.h"
13#include "base/sync_socket.h"
[email protected]fc88c1e2012-12-04 09:54:3614#include "content/public/common/speech_recognition_result.h"
[email protected]64d09222012-05-25 10:10:3415#include "content/public/renderer/render_view_observer.h"
burnik2eeb4662014-10-09 21:30:1616#include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
[email protected]5c30b5e02013-05-30 03:46:0817#include "third_party/WebKit/public/platform/WebVector.h"
[email protected]2255a9332013-06-17 05:12:3118#include "third_party/WebKit/public/web/WebSpeechRecognitionHandle.h"
19#include "third_party/WebKit/public/web/WebSpeechRecognizer.h"
[email protected]64d09222012-05-25 10:10:3420
burnik2eeb4662014-10-09 21:30:1621namespace media {
22class AudioParameters;
23}
24
[email protected]64d09222012-05-25 10:10:3425namespace content {
[email protected]e9ff79c2012-10-19 21:31:2626class RenderViewImpl;
burnik2eeb4662014-10-09 21:30:1627#if defined(ENABLE_WEBRTC)
28class SpeechRecognitionAudioSink;
29#endif
[email protected]64d09222012-05-25 10:10:3430struct SpeechRecognitionError;
31struct SpeechRecognitionResult;
[email protected]64d09222012-05-25 10:10:3432
33// SpeechRecognitionDispatcher is a delegate for methods used by WebKit for
34// scripted JS speech APIs. It's the complement of
35// SpeechRecognitionDispatcherHost (owned by RenderViewHost).
[email protected]e9ff79c2012-10-19 21:31:2636class SpeechRecognitionDispatcher : public RenderViewObserver,
[email protected]180ef242013-11-07 06:50:4637 public blink::WebSpeechRecognizer {
[email protected]64d09222012-05-25 10:10:3438 public:
39 explicit SpeechRecognitionDispatcher(RenderViewImpl* render_view);
avi5c77d212015-09-25 20:08:2540 ~SpeechRecognitionDispatcher() override;
[email protected]64d09222012-05-25 10:10:3441
[email protected]e976c3c52014-07-24 17:41:5542 // Aborts all speech recognitions.
43 void AbortAllRecognitions();
44
[email protected]64d09222012-05-25 10:10:3445 private:
46 // RenderViewObserver implementation.
dcheng6d18e402014-10-21 12:32:5247 bool OnMessageReceived(const IPC::Message& message) override;
[email protected]64d09222012-05-25 10:10:3448
[email protected]180ef242013-11-07 06:50:4649 // blink::WebSpeechRecognizer implementation.
avi5c77d212015-09-25 20:08:2550 void start(const blink::WebSpeechRecognitionHandle&,
51 const blink::WebSpeechRecognitionParams&,
52 blink::WebSpeechRecognizerClient*) override;
53 void stop(const blink::WebSpeechRecognitionHandle&,
54 blink::WebSpeechRecognizerClient*) override;
55 void abort(const blink::WebSpeechRecognitionHandle&,
56 blink::WebSpeechRecognizerClient*) override;
[email protected]64d09222012-05-25 10:10:3457
58 void OnRecognitionStarted(int request_id);
59 void OnAudioStarted(int request_id);
60 void OnSoundStarted(int request_id);
61 void OnSoundEnded(int request_id);
62 void OnAudioEnded(int request_id);
[email protected]e9ff79c2012-10-19 21:31:2663 void OnErrorOccurred(int request_id, const SpeechRecognitionError& error);
[email protected]64d09222012-05-25 10:10:3464 void OnRecognitionEnded(int request_id);
[email protected]fc88c1e2012-12-04 09:54:3665 void OnResultsRetrieved(int request_id,
66 const SpeechRecognitionResults& result);
burnik2eeb4662014-10-09 21:30:1667 void OnAudioReceiverReady(int session_id,
68 const media::AudioParameters& params,
69 const base::SharedMemoryHandle handle,
70 const base::SyncSocket::TransitDescriptor socket);
71
72 void ResetAudioSink();
[email protected]64d09222012-05-25 10:10:3473
[email protected]180ef242013-11-07 06:50:4674 int GetOrCreateIDForHandle(const blink::WebSpeechRecognitionHandle& handle);
75 bool HandleExists(const blink::WebSpeechRecognitionHandle& handle);
76 const blink::WebSpeechRecognitionHandle& GetHandleFromID(int handle_id);
[email protected]64d09222012-05-25 10:10:3477
78 // The WebKit client class that we use to send events back to the JS world.
[email protected]180ef242013-11-07 06:50:4679 blink::WebSpeechRecognizerClient* recognizer_client_;
[email protected]64d09222012-05-25 10:10:3480
burnik2eeb4662014-10-09 21:30:1681#if defined(ENABLE_WEBRTC)
82 // Media stream audio track that the speech recognition connects to.
83 // Accessed on the render thread.
84 blink::WebMediaStreamTrack audio_track_;
85
86 // Audio sink used to provide audio from the track.
87 scoped_ptr<SpeechRecognitionAudioSink> speech_audio_sink_;
88#endif
89
[email protected]180ef242013-11-07 06:50:4690 typedef std::map<int, blink::WebSpeechRecognitionHandle> HandleMap;
[email protected]64d09222012-05-25 10:10:3491 HandleMap handle_map_;
92 int next_id_;
93
94 DISALLOW_COPY_AND_ASSIGN(SpeechRecognitionDispatcher);
95};
96
[email protected]e9ff79c2012-10-19 21:31:2697} // namespace content
98
[email protected]64d09222012-05-25 10:10:3499#endif // CONTENT_RENDERER_SPEECH_RECOGNITION_DISPATCHER_H_