/external/libgdx/backends/gdx-backend-lwjgl/src/com/badlogic/gdx/backends/lwjgl/audio/ |
D | OpenALSound.java | 17 package com.badlogic.gdx.backends.lwjgl.audio; 22 import com.badlogic.gdx.audio.Sound; 29 private final OpenALAudio audio; field in OpenALSound 32 public OpenALSound (OpenALAudio audio) { in OpenALSound() argument 33 this.audio = audio; in OpenALSound() 57 if (audio.noDevice) return 0; in play() 58 int sourceID = audio.obtainSource(false); in play() 61 audio.retain(this, true); in play() 62 sourceID = audio.obtainSource(false); in play() 63 } else audio.retain(this, false); in play() [all …]
|
D | OpenALMusic.java | 17 package com.badlogic.gdx.backends.lwjgl.audio; 25 import com.badlogic.gdx.audio.Music; 40 private final OpenALAudio audio; field in OpenALMusic 54 public OpenALMusic (OpenALAudio audio, FileHandle file) { in OpenALMusic() argument 55 this.audio = audio; in OpenALMusic() 67 if (audio.noDevice) return; in play() 69 sourceID = audio.obtainSource(true); in play() 72 audio.music.add(this); in play() 103 if (audio.noDevice) return; in stop() 105 audio.music.removeValue(this, true); in stop() [all …]
|
D | Ogg.java | 17 package com.badlogic.gdx.backends.lwjgl.audio; 30 public Music (OpenALAudio audio, FileHandle file) { in Music() argument 31 super(audio, file); in Music() 32 if (audio.noDevice) return; in Music() 61 public Sound (OpenALAudio audio, FileHandle file) { in Sound() argument 62 super(audio); in Sound() 63 if (audio.noDevice) return; in Sound()
|
/external/libgdx/backends/gdx-backend-lwjgl3/src/com/badlogic/gdx/backends/lwjgl3/audio/ |
D | OpenALSound.java | 17 package com.badlogic.gdx.backends.lwjgl3.audio; 22 import com.badlogic.gdx.audio.Sound; 29 private final OpenALAudio audio; field in OpenALSound 32 public OpenALSound (OpenALAudio audio) { in OpenALSound() argument 33 this.audio = audio; in OpenALSound() 57 if (audio.noDevice) return 0; in play() 58 int sourceID = audio.obtainSource(false); in play() 61 audio.retain(this, true); in play() 62 sourceID = audio.obtainSource(false); in play() 63 } else audio.retain(this, false); in play() [all …]
|
D | OpenALMusic.java | 17 package com.badlogic.gdx.backends.lwjgl3.audio; 25 import com.badlogic.gdx.audio.Music; 40 private final OpenALAudio audio; field in OpenALMusic 54 public OpenALMusic (OpenALAudio audio, FileHandle file) { in OpenALMusic() argument 55 this.audio = audio; in OpenALMusic() 67 if (audio.noDevice) return; in play() 69 sourceID = audio.obtainSource(true); in play() 72 audio.music.add(this); in play() 103 if (audio.noDevice) return; in stop() 105 audio.music.removeValue(this, true); in stop() [all …]
|
D | Ogg.java | 17 package com.badlogic.gdx.backends.lwjgl3.audio; 30 public Music (OpenALAudio audio, FileHandle file) { in Music() argument 31 super(audio, file); in Music() 32 if (audio.noDevice) return; in Music() 61 public Sound (OpenALAudio audio, FileHandle file) { in Sound() argument 62 super(audio); in Sound() 63 if (audio.noDevice) return; in Sound()
|
/external/vboot_reference/firmware/lib/ |
D | vboot_audio.c | 62 static void VbGetDevMusicNotes(VbAudioContext *audio, int use_short) in VbGetDevMusicNotes() argument 85 if (!audio->background_beep) in VbGetDevMusicNotes() 192 audio->music_notes = notebuf; in VbGetDevMusicNotes() 193 audio->note_count = count; in VbGetDevMusicNotes() 194 audio->free_notes_when_done = 1; in VbGetDevMusicNotes() 200 audio->music_notes = builtin; in VbGetDevMusicNotes() 201 audio->note_count = count; in VbGetDevMusicNotes() 202 audio->free_notes_when_done = 0; in VbGetDevMusicNotes() 212 VbAudioContext *audio = &au; in VbAudioOpen() local 227 Memset(audio, 0, sizeof(*audio)); in VbAudioOpen() [all …]
|
/external/v8/tools/gyp/tools/emacs/testdata/ |
D | media.gyp | 10 # Override to dynamically link the cras (ChromeOS audio) library. 33 'audio/android/audio_manager_android.cc', 34 'audio/android/audio_manager_android.h', 35 'audio/android/audio_track_output_android.cc', 36 'audio/android/audio_track_output_android.h', 37 'audio/android/opensles_input.cc', 38 'audio/android/opensles_input.h', 39 'audio/android/opensles_output.cc', 40 'audio/android/opensles_output.h', 41 'audio/async_socket_io_handler.h', [all …]
|
/external/webrtc/webrtc/modules/audio_processing/ |
D | noise_suppression_impl.cc | 70 void NoiseSuppressionImpl::AnalyzeCaptureAudio(AudioBuffer* audio) { in AnalyzeCaptureAudio() argument 71 RTC_DCHECK(audio); in AnalyzeCaptureAudio() 78 RTC_DCHECK_GE(160u, audio->num_frames_per_band()); in AnalyzeCaptureAudio() 79 RTC_DCHECK_EQ(suppressors_.size(), audio->num_channels()); in AnalyzeCaptureAudio() 82 audio->split_bands_const_f(i)[kBand0To8kHz]); in AnalyzeCaptureAudio() 87 void NoiseSuppressionImpl::ProcessCaptureAudio(AudioBuffer* audio) { in ProcessCaptureAudio() argument 88 RTC_DCHECK(audio); in ProcessCaptureAudio() 94 RTC_DCHECK_GE(160u, audio->num_frames_per_band()); in ProcessCaptureAudio() 95 RTC_DCHECK_EQ(suppressors_.size(), audio->num_channels()); in ProcessCaptureAudio() 99 audio->split_bands_const_f(i), in ProcessCaptureAudio() [all …]
|
D | gain_control_impl.cc | 69 int GainControlImpl::ProcessRenderAudio(AudioBuffer* audio) { in ProcessRenderAudio() argument 75 assert(audio->num_frames_per_band() <= 160); in ProcessRenderAudio() 81 WebRtcAgc_GetAddFarendError(my_handle, audio->num_frames_per_band()); in ProcessRenderAudio() 88 render_queue_buffer_.end(), audio->mixed_low_pass_data(), in ProcessRenderAudio() 89 (audio->mixed_low_pass_data() + audio->num_frames_per_band())); in ProcessRenderAudio() 127 int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) { in AnalyzeCaptureAudio() argument 134 assert(audio->num_frames_per_band() <= 160); in AnalyzeCaptureAudio() 135 assert(audio->num_channels() == num_handles()); in AnalyzeCaptureAudio() 145 audio->split_bands(i), in AnalyzeCaptureAudio() 146 audio->num_bands(), in AnalyzeCaptureAudio() [all …]
|
D | echo_control_mobile_impl.cc | 93 int EchoControlMobileImpl::ProcessRenderAudio(const AudioBuffer* audio) { in ProcessRenderAudio() argument 100 assert(audio->num_frames_per_band() <= 160); in ProcessRenderAudio() 101 assert(audio->num_channels() == apm_->num_reverse_channels()); in ProcessRenderAudio() 108 for (size_t j = 0; j < audio->num_channels(); j++) { in ProcessRenderAudio() 111 my_handle, audio->split_bands_const(j)[kBand0To8kHz], in ProcessRenderAudio() 112 audio->num_frames_per_band()); in ProcessRenderAudio() 119 audio->split_bands_const(j)[kBand0To8kHz], in ProcessRenderAudio() 120 (audio->split_bands_const(j)[kBand0To8kHz] + in ProcessRenderAudio() 121 audio->num_frames_per_band())); in ProcessRenderAudio() 167 int EchoControlMobileImpl::ProcessCaptureAudio(AudioBuffer* audio) { in ProcessCaptureAudio() argument [all …]
|
D | echo_cancellation_impl.cc | 88 int EchoCancellationImpl::ProcessRenderAudio(const AudioBuffer* audio) { in ProcessRenderAudio() argument 94 assert(audio->num_frames_per_band() <= 160); in ProcessRenderAudio() 95 assert(audio->num_channels() == apm_->num_reverse_channels()); in ProcessRenderAudio() 103 for (size_t j = 0; j < audio->num_channels(); j++) { in ProcessRenderAudio() 108 my_handle, audio->split_bands_const_f(j)[kBand0To8kHz], in ProcessRenderAudio() 109 audio->num_frames_per_band()); in ProcessRenderAudio() 117 audio->split_bands_const_f(j)[kBand0To8kHz], in ProcessRenderAudio() 118 (audio->split_bands_const_f(j)[kBand0To8kHz] + in ProcessRenderAudio() 119 audio->num_frames_per_band())); in ProcessRenderAudio() 162 int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) { in ProcessCaptureAudio() argument [all …]
|
/external/webrtc/webrtc/audio/ |
D | webrtc_audio.gypi | 17 'audio/audio_receive_stream.cc', 18 'audio/audio_receive_stream.h', 19 'audio/audio_send_stream.cc', 20 'audio/audio_send_stream.h', 21 'audio/audio_sink.h', 22 'audio/audio_state.cc', 23 'audio/audio_state.h', 24 'audio/conversion.h', 25 'audio/scoped_voe_interface.h',
|
/external/webrtc/webrtc/modules/audio_device/ios/ |
D | audio_device_ios.mm | 34 // audio session. This variable is used to ensure that we only activate an audio 60 // will be set to this value as well to avoid resampling the the audio unit's 67 // ~10.6667ms or 512 audio frames per buffer. The FineAudioBuffer instance will 74 // in the I/O audio unit. Initial tests have shown that it is possible to use 78 // audio unit. Hence, we will not hit a RTC_CHECK in 82 // Number of bytes per audio sample for 16-bit signed integer representation. 98 // Verifies that the current audio session supports input audio and that the 102 // Ensure that the device currently supports audio input. 104 LOG(LS_ERROR) << "No audio input path is available!"; 121 // Activates an audio session suitable for full duplex VoIP sessions when [all …]
|
/external/libgdx/backends/gdx-backend-moe/src/com/badlogic/gdx/backends/iosmoe/ |
D | IOSAudio.java | 21 import com.badlogic.gdx.audio.AudioDevice; 22 import com.badlogic.gdx.audio.AudioRecorder; 23 import com.badlogic.gdx.audio.Music; 24 import com.badlogic.gdx.audio.Sound; 33 OALSimpleAudio audio = OALSimpleAudio.sharedInstance(); in IOSAudio() local 34 if (audio != null) { in IOSAudio() 35 audio.setAllowIpod(config.allowIpod); in IOSAudio() 36 audio.setHonorSilentSwitch(true); in IOSAudio()
|
/external/libgdx/backends/gdx-backend-robovm/src/com/badlogic/gdx/backends/iosrobovm/ |
D | IOSAudio.java | 21 import com.badlogic.gdx.audio.AudioDevice; 22 import com.badlogic.gdx.audio.AudioRecorder; 23 import com.badlogic.gdx.audio.Music; 24 import com.badlogic.gdx.audio.Sound; 33 OALSimpleAudio audio = OALSimpleAudio.sharedInstance(); in IOSAudio() local 34 if (audio != null) { in IOSAudio() 35 audio.setAllowIpod(config.allowIpod); in IOSAudio() 36 audio.setHonorSilentSwitch(true); in IOSAudio()
|
/external/autotest/client/site_tests/audio_AudioCorruption/ |
D | control | 7 PURPOSE = "Verify that Chrome can handle corrupted mp3 audio" 9 This test will fail if Chrome can't catch error for playing corrupted mp3 audio. 15 TEST_CLASS = "audio" 19 This test verifies Chrome can catch error for playing corrupted mp3 audio. 22 audio = 'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/audio_AudioCorruptio… 23 job.run_test('audio_AudioCorruption', audio=audio)
|
/external/autotest/client/site_tests/audio_CrasLoopback/ |
D | control | 5 AUTHOR = 'The Chromium OS Audiovideo Team, chromeos-audio@google.com' 7 PURPOSE = 'Test that audio played to line out can be heard at mic in.' 9 Check if the audio played to line out is heard by cras_test_client at mic in. 11 ATTRIBUTES = "suite:audio, suite:partners" 12 SUITE = 'audio, partners' 15 TEST_CLASS = "audio" 20 Test that audio playback and capture are working.
|
/external/autotest/client/site_tests/audio_AlsaLoopback/ |
D | control | 5 AUTHOR = 'The Chromium OS Audiovideo Team, chromeos-audio@google.com' 7 PURPOSE = 'Test that audio played to line out can be heard at mic in.' 9 Check if the audio played to line out is heard by arecord at mic in. 11 ATTRIBUTES = "suite:audio" 12 SUITE = 'audio' 15 TEST_CLASS = "audio" 20 Test that audio playback and capture are working.
|
/external/libgdx/tests/gdx-tests/src/com/badlogic/gdx/tests/ |
D | AudioRecorderTest.java | 20 import com.badlogic.gdx.audio.AudioDevice; 21 import com.badlogic.gdx.audio.AudioRecorder; 32 device = Gdx.audio.newAudioDevice(44100, true); in create() 33 recorder = Gdx.audio.newAudioRecorder(44100, true); in create() 62 device = Gdx.audio.newAudioDevice(44100, true); in resume() 63 recorder = Gdx.audio.newAudioRecorder(44100, true); in resume()
|
/external/libvorbis/doc/ |
D | a1-encapsulation-ogg.tex | 9 streams to encapsulate Vorbis compressed audio packet data into file 13 of Vorbis audio packets. 36 The Ogg stream must be unmultiplexed (only one stream, a Vorbis audio stream, per link) 44 for low-bitrate movies consisting of DivX video and Vorbis audio. 45 However, a 'Vorbis I audio file' is taken to imply Vorbis audio 47 audio player' is not required to implement Ogg support beyond the 59 while visual media should use \literal{video/ogg}, and audio 60 \literal{audio/ogg}. Vorbis data encapsulated in Ogg may appear 62 \literal{audio/vorbis} + \literal{audio/vorbis-config}. 73 uniquely identifies a stream as Vorbis audio, is placed alone in the [all …]
|
/external/libgdx/backends/gdx-backend-lwjgl3/src/com/badlogic/gdx/backends/lwjgl3/audio/mock/ |
D | MockAudio.java | 17 package com.badlogic.gdx.backends.lwjgl3.audio.mock; 20 import com.badlogic.gdx.audio.AudioDevice; 21 import com.badlogic.gdx.audio.AudioRecorder; 22 import com.badlogic.gdx.audio.Music; 23 import com.badlogic.gdx.audio.Sound;
|
/external/libgdx/backends/gdx-backend-headless/src/com/badlogic/gdx/backends/headless/mock/audio/ |
D | MockAudio.java | 17 package com.badlogic.gdx.backends.headless.mock.audio; 20 import com.badlogic.gdx.audio.AudioDevice; 21 import com.badlogic.gdx.audio.AudioRecorder; 22 import com.badlogic.gdx.audio.Music; 23 import com.badlogic.gdx.audio.Sound;
|
/external/webrtc/webrtc/modules/audio_coding/codecs/g711/ |
D | audio_encoder_pcm.cc | 82 rtc::ArrayView<const int16_t> audio, in EncodeInternal() argument 88 speech_buffer_.insert(speech_buffer_.end(), audio.begin(), audio.end()); in EncodeInternal() 110 size_t AudioEncoderPcmA::EncodeCall(const int16_t* audio, in EncodeCall() argument 113 return WebRtcG711_EncodeA(audio, input_len, encoded); in EncodeCall() 123 size_t AudioEncoderPcmU::EncodeCall(const int16_t* audio, in EncodeCall() argument 126 return WebRtcG711_EncodeU(audio, input_len, encoded); in EncodeCall()
|
/external/autotest/client/site_tests/audio_LoopbackLatency/ |
D | control | 7 PURPOSE = 'Test that audio loopback latency' 9 Check if the audio played to line out can be heard mic in, and assert 12 ATTRIBUTES = "suite:audio" 13 SUITE = 'audio' 16 TEST_CLASS = "audio" 21 Test that audio loopback latency is within certain limit.
|