summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/modules/audio_device/android/opensles_player.h
blob: 41593a448fb291b6d3e5c25d027b83f3b962d46c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
/*
 *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
 *  in the file PATENTS.  All contributing project authors may
 *  be found in the AUTHORS file in the root of the source tree.
 */

#ifndef MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_PLAYER_H_
#define MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_PLAYER_H_

#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include <SLES/OpenSLES_AndroidConfiguration.h>

#include "api/sequence_checker.h"
#include "modules/audio_device/android/audio_common.h"
#include "modules/audio_device/android/audio_manager.h"
#include "modules/audio_device/android/opensles_common.h"
#include "modules/audio_device/audio_device_generic.h"
#include "modules/audio_device/include/audio_device_defines.h"
#include "modules/utility/include/helpers_android.h"

namespace webrtc {

class FineAudioBuffer;

// Implements 16-bit mono PCM audio output support for Android using the
// C based OpenSL ES API. No calls from C/C++ to Java using JNI is done.
//
// An instance must be created and destroyed on one and the same thread.
// All public methods must also be called on the same thread. A thread checker
// will RTC_DCHECK if any method is called on an invalid thread. Decoded audio
// buffers are requested on a dedicated internal thread managed by the OpenSL
// ES layer.
//
// The existing design forces the user to call InitPlayout() after Stoplayout()
// to be able to call StartPlayout() again. This is inline with how the Java-
// based implementation works.
//
// OpenSL ES is a native C API which have no Dalvik-related overhead such as
// garbage collection pauses and it supports reduced audio output latency.
// If the device doesn't claim this feature but supports API level 9 (Android
// platform version 2.3) or later, then we can still use the OpenSL ES APIs but
// the output latency may be higher.
class OpenSLESPlayer {
 public:
  // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is
  // required for lower latency. Beginning with API level 18 (Android 4.3), a
  // buffer count of 1 is sufficient for lower latency. In addition, the buffer
  // size and sample rate must be compatible with the device's native output
  // configuration provided via the audio manager at construction.
  // TODO(henrika): perhaps set this value dynamically based on OS version.
  static const int kNumOfOpenSLESBuffers = 2;

  explicit OpenSLESPlayer(AudioManager* audio_manager);
  ~OpenSLESPlayer();

  int Init();
  int Terminate();

  int InitPlayout();
  bool PlayoutIsInitialized() const { return initialized_; }

  int StartPlayout();
  int StopPlayout();
  bool Playing() const { return playing_; }

  int SpeakerVolumeIsAvailable(bool& available);
  int SetSpeakerVolume(uint32_t volume);
  int SpeakerVolume(uint32_t& volume) const;
  int MaxSpeakerVolume(uint32_t& maxVolume) const;
  int MinSpeakerVolume(uint32_t& minVolume) const;

  void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);

 private:
  // These callback methods are called when data is required for playout.
  // They are both called from an internal "OpenSL ES thread" which is not
  // attached to the Dalvik VM.
  static void SimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf caller,
                                        void* context);
  void FillBufferQueue();
  // Reads audio data in PCM format using the AudioDeviceBuffer.
  // Can be called both on the main thread (during Start()) and from the
  // internal audio thread while output streaming is active.
  // If the `silence` flag is set, the audio is filled with zeros instead of
  // asking the WebRTC layer for real audio data. This procedure is also known
  // as audio priming.
  void EnqueuePlayoutData(bool silence);

  // Allocate memory for audio buffers which will be used to render audio
  // via the SLAndroidSimpleBufferQueueItf interface.
  void AllocateDataBuffers();

  // Obtaines the SL Engine Interface from the existing global Engine object.
  // The interface exposes creation methods of all the OpenSL ES object types.
  // This method defines the `engine_` member variable.
  bool ObtainEngineInterface();

  // Creates/destroys the output mix object.
  bool CreateMix();
  void DestroyMix();

  // Creates/destroys the audio player and the simple-buffer object.
  // Also creates the volume object.
  bool CreateAudioPlayer();
  void DestroyAudioPlayer();

  SLuint32 GetPlayState() const;

  // Ensures that methods are called from the same thread as this object is
  // created on.
  SequenceChecker thread_checker_;

  // Stores thread ID in first call to SimpleBufferQueueCallback() from internal
  // non-application thread which is not attached to the Dalvik JVM.
  // Detached during construction of this object.
  SequenceChecker thread_checker_opensles_;

  // Raw pointer to the audio manager injected at construction. Used to cache
  // audio parameters and to access the global SL engine object needed by the
  // ObtainEngineInterface() method. The audio manager outlives any instance of
  // this class.
  AudioManager* audio_manager_;

  // Contains audio parameters provided to this class at construction by the
  // AudioManager.
  const AudioParameters audio_parameters_;

  // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
  // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
  AudioDeviceBuffer* audio_device_buffer_;

  bool initialized_;
  bool playing_;

  // PCM-type format definition.
  // TODO(henrika): add support for SLAndroidDataFormat_PCM_EX (android-21) if
  // 32-bit float representation is needed.
  SLDataFormat_PCM pcm_format_;

  // Queue of audio buffers to be used by the player object for rendering
  // audio.
  std::unique_ptr<SLint16[]> audio_buffers_[kNumOfOpenSLESBuffers];

  // FineAudioBuffer takes an AudioDeviceBuffer which delivers audio data
  // in chunks of 10ms. It then allows for this data to be pulled in
  // a finer or coarser granularity. I.e. interacting with this class instead
  // of directly with the AudioDeviceBuffer one can ask for any number of
  // audio data samples.
  // Example: native buffer size can be 192 audio frames at 48kHz sample rate.
  // WebRTC will provide 480 audio frames per 10ms but OpenSL ES asks for 192
  // in each callback (one every 4th ms). This class can then ask for 192 and
  // the FineAudioBuffer will ask WebRTC for new data approximately only every
  // second callback and also cache non-utilized audio.
  std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;

  // Keeps track of active audio buffer 'n' in the audio_buffers_[n] queue.
  // Example (kNumOfOpenSLESBuffers = 2): counts 0, 1, 0, 1, ...
  int buffer_index_;

  // This interface exposes creation methods for all the OpenSL ES object types.
  // It is the OpenSL ES API entry point.
  SLEngineItf engine_;

  // Output mix object to be used by the player object.
  webrtc::ScopedSLObjectItf output_mix_;

  // The audio player media object plays out audio to the speakers. It also
  // supports volume control.
  webrtc::ScopedSLObjectItf player_object_;

  // This interface is supported on the audio player and it controls the state
  // of the audio player.
  SLPlayItf player_;

  // The Android Simple Buffer Queue interface is supported on the audio player
  // and it provides methods to send audio data from the source to the audio
  // player for rendering.
  SLAndroidSimpleBufferQueueItf simple_buffer_queue_;

  // This interface exposes controls for manipulating the object’s audio volume
  // properties. This interface is supported on the Audio Player object.
  SLVolumeItf volume_;

  // Last time the OpenSL ES layer asked for audio data to play out.
  uint32_t last_play_time_;
};

}  // namespace webrtc

#endif  // MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_PLAYER_H_