Project import generated by Copybara.
GitOrigin-RevId: c2597990d2200830529f823f969b7e48293ab787
This commit is contained in:
parent
785d266e3f
commit
423c21b454
1
.bazelrc
1
.bazelrc
|
@ -14,6 +14,7 @@ build --copt='-Wno-unused-local-typedefs'
|
|||
build --copt='-Wno-ignored-attributes'
|
||||
# Temporarily set the incompatiblity flag for Bazel 0.27.0 and above
|
||||
build --incompatible_disable_deprecated_attr_params=false
|
||||
build --incompatible_depset_is_not_iterable=false
|
||||
|
||||
# Sets the default Apple platform to macOS.
|
||||
build --apple_platform_type=macos
|
||||
|
|
|
@ -238,7 +238,7 @@ To build and run iOS apps:
|
|||
```
|
||||
|
||||
Option 2. Follow Bazel's
|
||||
[documentation](https://docs.bazel.build/versions/master/install-ubuntu.html)
|
||||
[documentation](https://docs.bazel.build/versions/master/install-os-x.html#install-with-installer-mac-os-x)
|
||||
to install any version of Bazel manually.
|
||||
|
||||
4. Install OpenCV and FFmpeg.
|
||||
|
@ -600,15 +600,30 @@ The steps below use Android Studio to build and install a MediaPipe example app.
|
|||
|
||||
5. Select `Configure` | `Plugins` install `Bazel`.
|
||||
|
||||
6. Select `Import Bazel Project`.
|
||||
6. Select `Android Studio` | `Preferences` | `Bazel settings` and modify `Bazel binary location` to be the same as the output of `$ which bazel`.
|
||||
|
||||
7. Select `Import Bazel Project`.
|
||||
|
||||
* Select `Workspace`: `/path/to/mediapipe`.
|
||||
* Select `Generate from BUILD file`: `/path/to/mediapipe/BUILD`.
|
||||
* Select `Finish`.
|
||||
* Modify `Project View` to be the following and select `Finish`.
|
||||
|
||||
7. Connect an Android device to the workstation.
|
||||
```
|
||||
directories:
|
||||
# read project settings, e.g., .bazelrc
|
||||
.
|
||||
-mediapipe/objc
|
||||
-mediapipe/examples/ios
|
||||
|
||||
8. Select `Run...` | `Edit Configurations...`.
|
||||
targets:
|
||||
//mediapipe/...:all
|
||||
|
||||
android_sdk_platform: android-29
|
||||
```
|
||||
|
||||
8. Connect an Android device to the workstation.
|
||||
|
||||
9. Select `Run...` | `Edit Configurations...`.
|
||||
|
||||
* Enter Target Expression:
|
||||
`//mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectioncpu`
|
||||
|
|
|
@ -226,6 +226,7 @@ cc_test(
|
|||
"//mediapipe/calculators/core:flow_limiter_calculator",
|
||||
"//mediapipe/calculators/core:immediate_mux_calculator",
|
||||
"//mediapipe/calculators/core:round_robin_demux_calculator",
|
||||
"//mediapipe/calculators/util:annotation_overlay_calculator",
|
||||
"//mediapipe/framework:calculator_cc_proto",
|
||||
"//mediapipe/framework:calculator_framework",
|
||||
"//mediapipe/framework:calculator_profile_cc_proto",
|
||||
|
|
|
@ -174,6 +174,10 @@ void GraphProfiler::Pause() {
|
|||
}
|
||||
|
||||
void GraphProfiler::Resume() {
|
||||
// is_profiling_ enables recording of performance stats.
|
||||
// is_tracing_ enables recording of timing events.
|
||||
// While the graph is running, these variables indicate
|
||||
// IsProfilerEnabled and IsTracerEnabled.
|
||||
is_profiling_ = IsProfilerEnabled(profiler_config_);
|
||||
is_tracing_ = IsTracerEnabled(profiler_config_);
|
||||
}
|
||||
|
@ -502,7 +506,7 @@ void GraphProfiler::AddProcessSample(
|
|||
}
|
||||
|
||||
std::unique_ptr<GlProfilingHelper> GraphProfiler::CreateGlProfilingHelper() {
|
||||
if (!IsProfilerEnabled(profiler_config_)) {
|
||||
if (!IsTracerEnabled(profiler_config_)) {
|
||||
return nullptr;
|
||||
}
|
||||
return absl::make_unique<mediapipe::GlProfilingHelper>(shared_from_this());
|
||||
|
@ -576,7 +580,6 @@ void AssignNodeNames(GraphProfile* profile) {
|
|||
LOG(INFO) << "trace_log_path: " << trace_log_path;
|
||||
int log_interval_count = GetLogIntervalCount(profiler_config_);
|
||||
int log_file_count = GetLogFileCount(profiler_config_);
|
||||
++previous_log_index_;
|
||||
|
||||
// Record the GraphTrace events since the previous WriteProfile.
|
||||
// The end_time is chosen to be trace_log_margin_usec in the past,
|
||||
|
@ -592,6 +595,10 @@ void AssignNodeNames(GraphProfile* profile) {
|
|||
tracer()->GetLog(previous_log_end_time_, end_time, trace);
|
||||
}
|
||||
previous_log_end_time_ = end_time;
|
||||
// If there are no trace events, skip log writing.
|
||||
if (is_tracing_ && trace->calculator_trace().empty()) {
|
||||
return ::mediapipe::OkStatus();
|
||||
}
|
||||
|
||||
// Record the latest CalculatorProfiles.
|
||||
Status status;
|
||||
|
@ -603,6 +610,7 @@ void AssignNodeNames(GraphProfile* profile) {
|
|||
this->Reset();
|
||||
|
||||
// Record the CalculatorGraphConfig, once per log file.
|
||||
++previous_log_index_;
|
||||
bool is_new_file = (previous_log_index_ % log_interval_count == 0);
|
||||
if (is_new_file) {
|
||||
*profile.mutable_config() = validated_graph_->Config();
|
||||
|
|
|
@ -67,9 +67,8 @@ class GraphTracerTest : public ::testing::Test {
|
|||
}
|
||||
|
||||
// Initializes the GraphTracer.
|
||||
void SetUpGraphTracer(size_t size) {
|
||||
void SetUpGraphTracer() {
|
||||
ProfilerConfig profiler_config;
|
||||
profiler_config.set_trace_log_capacity(size);
|
||||
profiler_config.set_trace_enabled(true);
|
||||
tracer_ = absl::make_unique<GraphTracer>(profiler_config);
|
||||
}
|
||||
|
@ -118,7 +117,7 @@ class GraphTracerTest : public ::testing::Test {
|
|||
|
||||
TEST_F(GraphTracerTest, EmptyTrace) {
|
||||
// Define the GraphTracer.
|
||||
SetUpGraphTracer(1024 * 1024);
|
||||
SetUpGraphTracer();
|
||||
|
||||
// Validate the GraphTrace data.
|
||||
EXPECT_THAT(GetTrace(),
|
||||
|
@ -131,7 +130,7 @@ TEST_F(GraphTracerTest, EmptyTrace) {
|
|||
|
||||
TEST_F(GraphTracerTest, CalculatorTrace) {
|
||||
// Define the GraphTracer, the CalculatorState, and the stream specs.
|
||||
SetUpGraphTracer(1024 * 1024);
|
||||
SetUpGraphTracer();
|
||||
SetUpCalculatorContext("PCalculator_1", /*node_id=*/0, {"input_stream"},
|
||||
{"output_stream"});
|
||||
absl::Time curr_time = start_time_;
|
||||
|
@ -171,7 +170,7 @@ TEST_F(GraphTracerTest, CalculatorTrace) {
|
|||
|
||||
TEST_F(GraphTracerTest, GraphTrace) {
|
||||
// Define the GraphTracer, the CalculatorState, and the stream specs.
|
||||
SetUpGraphTracer(1024 * 1024);
|
||||
SetUpGraphTracer();
|
||||
SetUpCalculatorContext("PCalculator_1", /*node_id=*/0, {"input_stream"},
|
||||
{"up_1", "up_2"});
|
||||
absl::Time curr_time = start_time_;
|
||||
|
@ -914,11 +913,14 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLog) {
|
|||
}
|
||||
|
||||
// Read a GraphProfile from a file path.
|
||||
void ReadGraphProfile(const std::string& path, GraphProfile* profile) {
|
||||
::mediapipe::Status ReadGraphProfile(const std::string& path,
|
||||
GraphProfile* profile) {
|
||||
std::ifstream ifs;
|
||||
ifs.open(path);
|
||||
proto_ns::io::IstreamInputStream in_stream(&ifs);
|
||||
profile->ParseFromZeroCopyStream(&in_stream);
|
||||
return ifs.is_open() ? ::mediapipe::OkStatus()
|
||||
: ::mediapipe::UnavailableError("Cannot open");
|
||||
}
|
||||
|
||||
TEST_F(GraphTracerE2ETest, DemuxGraphLogFile) {
|
||||
|
@ -928,7 +930,8 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLogFile) {
|
|||
graph_config_.mutable_profiler_config()->set_trace_log_interval_usec(-1);
|
||||
RunDemuxInFlightGraph();
|
||||
GraphProfile profile;
|
||||
ReadGraphProfile(absl::StrCat(log_path, 0, ".binarypb"), &profile);
|
||||
MEDIAPIPE_EXPECT_OK(
|
||||
ReadGraphProfile(absl::StrCat(log_path, 0, ".binarypb"), &profile));
|
||||
EXPECT_EQ(89, profile.graph_trace(0).calculator_trace().size());
|
||||
}
|
||||
|
||||
|
@ -937,22 +940,24 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLogFiles) {
|
|||
SetUpDemuxInFlightGraph();
|
||||
graph_config_.mutable_profiler_config()->set_trace_log_path(log_path);
|
||||
graph_config_.mutable_profiler_config()->set_trace_log_count(100);
|
||||
graph_config_.mutable_profiler_config()->set_trace_log_interval_count(10);
|
||||
graph_config_.mutable_profiler_config()->set_trace_log_interval_count(5);
|
||||
graph_config_.mutable_profiler_config()->set_trace_log_interval_usec(2500);
|
||||
RunDemuxInFlightGraph();
|
||||
std::vector<int> event_counts;
|
||||
std::vector<GraphProfile> graph_profiles;
|
||||
for (int i = 0; i < 7; ++i) {
|
||||
GraphProfile profile;
|
||||
ReadGraphProfile(absl::StrCat(log_path, i, ".binarypb"), &profile);
|
||||
int count = 0;
|
||||
for (auto trace : *profile.mutable_graph_trace()) {
|
||||
count += trace.calculator_trace().size();
|
||||
std::string log_file_name = absl::StrCat(log_path, i, ".binarypb");
|
||||
if (ReadGraphProfile(log_file_name, &profile).ok()) {
|
||||
int count = 0;
|
||||
for (auto trace : *profile.mutable_graph_trace()) {
|
||||
count += trace.calculator_trace().size();
|
||||
}
|
||||
event_counts.push_back(count);
|
||||
graph_profiles.push_back(profile);
|
||||
}
|
||||
event_counts.push_back(count);
|
||||
graph_profiles.push_back(profile);
|
||||
}
|
||||
std::vector<int> expected = {37, 42, 19, 0, 0, 0, 0};
|
||||
std::vector<int> expected = {37, 52, 9};
|
||||
EXPECT_EQ(event_counts, expected);
|
||||
GraphProfile& profile_2 = graph_profiles[2];
|
||||
profile_2.clear_calculator_profiles();
|
||||
|
@ -981,179 +986,6 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLogFiles) {
|
|||
stream_name: "output_packets_0"
|
||||
stream_name: "finish_indicator"
|
||||
stream_name: "output_1"
|
||||
}
|
||||
graph_trace {
|
||||
base_time: 1544086800000000
|
||||
base_timestamp: 0
|
||||
stream_name: ""
|
||||
stream_name: "input_packets_0"
|
||||
stream_name: "input_0_sampled"
|
||||
stream_name: "input_0"
|
||||
stream_name: "input_1"
|
||||
stream_name: "output_0"
|
||||
stream_name: "output_packets_0"
|
||||
stream_name: "finish_indicator"
|
||||
stream_name: "output_1"
|
||||
}
|
||||
graph_trace {
|
||||
base_time: 1544086800000000
|
||||
base_timestamp: 0
|
||||
stream_name: ""
|
||||
stream_name: "input_packets_0"
|
||||
stream_name: "input_0_sampled"
|
||||
stream_name: "input_0"
|
||||
stream_name: "input_1"
|
||||
stream_name: "output_0"
|
||||
stream_name: "output_packets_0"
|
||||
stream_name: "finish_indicator"
|
||||
stream_name: "output_1"
|
||||
}
|
||||
graph_trace {
|
||||
base_time: 1544086800000000
|
||||
base_timestamp: 0
|
||||
stream_name: ""
|
||||
stream_name: "input_packets_0"
|
||||
stream_name: "input_0_sampled"
|
||||
stream_name: "input_0"
|
||||
stream_name: "input_1"
|
||||
stream_name: "output_0"
|
||||
stream_name: "output_packets_0"
|
||||
stream_name: "finish_indicator"
|
||||
stream_name: "output_1"
|
||||
}
|
||||
graph_trace {
|
||||
base_time: 1544086800000000
|
||||
base_timestamp: 0
|
||||
stream_name: ""
|
||||
stream_name: "input_packets_0"
|
||||
stream_name: "input_0_sampled"
|
||||
stream_name: "input_0"
|
||||
stream_name: "input_1"
|
||||
stream_name: "output_0"
|
||||
stream_name: "output_packets_0"
|
||||
stream_name: "finish_indicator"
|
||||
stream_name: "output_1"
|
||||
}
|
||||
graph_trace {
|
||||
base_time: 1544086800000000
|
||||
base_timestamp: 0
|
||||
stream_name: ""
|
||||
stream_name: "input_packets_0"
|
||||
stream_name: "input_0_sampled"
|
||||
stream_name: "input_0"
|
||||
stream_name: "input_1"
|
||||
stream_name: "output_0"
|
||||
stream_name: "output_packets_0"
|
||||
stream_name: "finish_indicator"
|
||||
stream_name: "output_1"
|
||||
}
|
||||
graph_trace {
|
||||
base_time: 1544086800000000
|
||||
base_timestamp: 0
|
||||
stream_name: ""
|
||||
stream_name: "input_packets_0"
|
||||
stream_name: "input_0_sampled"
|
||||
stream_name: "input_0"
|
||||
stream_name: "input_1"
|
||||
stream_name: "output_0"
|
||||
stream_name: "output_packets_0"
|
||||
stream_name: "finish_indicator"
|
||||
stream_name: "output_1"
|
||||
calculator_trace {
|
||||
node_id: 3
|
||||
input_timestamp: 50000
|
||||
event_type: PROCESS
|
||||
finish_time: 65004
|
||||
output_trace { packet_timestamp: 50000 stream_id: 5 }
|
||||
}
|
||||
calculator_trace {
|
||||
node_id: 5
|
||||
event_type: READY_FOR_PROCESS
|
||||
start_time: 65004
|
||||
}
|
||||
calculator_trace {
|
||||
node_id: 3
|
||||
event_type: READY_FOR_CLOSE
|
||||
start_time: 65004
|
||||
}
|
||||
calculator_trace {
|
||||
node_id: 5
|
||||
input_timestamp: 50000
|
||||
event_type: PROCESS
|
||||
start_time: 65004
|
||||
finish_time: 65004
|
||||
input_trace {
|
||||
start_time: 65004
|
||||
finish_time: 65004
|
||||
packet_timestamp: 50000
|
||||
stream_id: 5
|
||||
}
|
||||
output_trace { packet_timestamp: 50000 stream_id: 6 }
|
||||
output_trace { packet_timestamp: 50000 stream_id: 7 }
|
||||
}
|
||||
calculator_trace {
|
||||
node_id: 1
|
||||
event_type: READY_FOR_PROCESS
|
||||
start_time: 65004
|
||||
}
|
||||
calculator_trace {
|
||||
node_id: 5
|
||||
event_type: NOT_READY
|
||||
start_time: 65004
|
||||
}
|
||||
calculator_trace {
|
||||
node_id: 5
|
||||
event_type: READY_FOR_PROCESS
|
||||
start_time: 65004
|
||||
}
|
||||
calculator_trace {
|
||||
node_id: 5
|
||||
event_type: NOT_READY
|
||||
start_time: 65004
|
||||
}
|
||||
calculator_trace {
|
||||
node_id: 1
|
||||
input_timestamp: 50000
|
||||
event_type: PROCESS
|
||||
start_time: 65004
|
||||
input_trace {
|
||||
start_time: 65004
|
||||
finish_time: 65004
|
||||
packet_timestamp: 50000
|
||||
stream_id: 7
|
||||
}
|
||||
}
|
||||
calculator_trace {
|
||||
node_id: 1
|
||||
event_type: NOT_READY
|
||||
start_time: 65004
|
||||
}
|
||||
}
|
||||
graph_trace {
|
||||
base_time: 1544086800000000
|
||||
base_timestamp: 0
|
||||
stream_name: ""
|
||||
stream_name: "input_packets_0"
|
||||
stream_name: "input_0_sampled"
|
||||
stream_name: "input_0"
|
||||
stream_name: "input_1"
|
||||
stream_name: "output_0"
|
||||
stream_name: "output_packets_0"
|
||||
stream_name: "finish_indicator"
|
||||
stream_name: "output_1"
|
||||
}
|
||||
graph_trace {
|
||||
base_time: 1544086800000000
|
||||
base_timestamp: 0
|
||||
stream_name: ""
|
||||
stream_name: "input_packets_0"
|
||||
stream_name: "input_0_sampled"
|
||||
stream_name: "input_0"
|
||||
stream_name: "input_1"
|
||||
stream_name: "output_0"
|
||||
stream_name: "output_packets_0"
|
||||
stream_name: "finish_indicator"
|
||||
stream_name: "output_1"
|
||||
calculator_trace {
|
||||
node_id: 4
|
||||
input_timestamp: 40000
|
||||
|
@ -1288,7 +1120,7 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLogFiles) {
|
|||
num_histogram_intervals: 100
|
||||
trace_log_count: 100
|
||||
trace_log_interval_usec: 2500
|
||||
trace_log_interval_count: 10
|
||||
trace_log_interval_count: 5
|
||||
trace_enabled: true
|
||||
}
|
||||
}
|
||||
|
@ -1428,5 +1260,30 @@ TEST_F(GraphTracerE2ETest, GpuTaskTrace) {
|
|||
)")));
|
||||
}
|
||||
|
||||
// Show that trace_enabled activates the GlContextProfiler.
|
||||
TEST_F(GraphTracerE2ETest, GpuTracing) {
|
||||
CHECK(proto_ns::TextFormat::ParseFromString(R"(
|
||||
input_stream: "input_buffer"
|
||||
input_stream: "render_data"
|
||||
output_stream: "annotated_buffer"
|
||||
node {
|
||||
calculator: "AnnotationOverlayCalculator"
|
||||
input_stream: "INPUT_FRAME:input_buffer"
|
||||
input_stream: "render_data"
|
||||
output_stream: "OUTPUT_FRAME:annotated_buffer"
|
||||
}
|
||||
profiler_config {
|
||||
trace_enabled: true
|
||||
}
|
||||
)",
|
||||
&graph_config_));
|
||||
|
||||
// Create the CalculatorGraph with only trace_enabled set.
|
||||
MEDIAPIPE_ASSERT_OK(graph_.Initialize(graph_config_, {}));
|
||||
// Check that GPU profiling is enabled wihout running the graph.
|
||||
// This graph with GlFlatColorCalculator cannot run on desktop.
|
||||
EXPECT_NE(nullptr, graph_.profiler()->CreateGlProfilingHelper());
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace mediapipe
|
||||
|
|
|
@ -766,6 +766,16 @@ proto_library(
|
|||
deps = ["//mediapipe/framework:calculator_proto"],
|
||||
)
|
||||
|
||||
mediapipe_cc_proto_library(
|
||||
name = "copy_calculator_cc_proto",
|
||||
srcs = ["copy_calculator.proto"],
|
||||
cc_deps = [
|
||||
"//mediapipe/framework:calculator_cc_proto",
|
||||
],
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [":copy_calculator_proto"],
|
||||
)
|
||||
|
||||
objc_library(
|
||||
name = "metal_copy_calculator",
|
||||
srcs = ["MetalCopyCalculator.mm"],
|
||||
|
@ -852,19 +862,6 @@ objc_library(
|
|||
alwayslink = 1,
|
||||
)
|
||||
|
||||
### Tests
|
||||
|
||||
cc_library(
|
||||
name = "gpu_test_base",
|
||||
testonly = 1,
|
||||
hdrs = ["gpu_test_base.h"],
|
||||
deps = [
|
||||
":gl_calculator_helper",
|
||||
":gpu_shared_data_internal",
|
||||
"//testing/base/public:gunit_for_library_testonly",
|
||||
],
|
||||
)
|
||||
|
||||
MIN_IOS_VERSION = "9.0" # For thread_local.
|
||||
|
||||
test_suite(
|
||||
|
|
|
@ -0,0 +1,295 @@
|
|||
// Copyright 2019 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package com.google.mediapipe.components;
|
||||
|
||||
import android.media.AudioFormat;
|
||||
import android.media.AudioRecord;
|
||||
import android.media.AudioTimestamp;
|
||||
import android.media.MediaRecorder.AudioSource;
|
||||
import android.os.Build.VERSION;
|
||||
import android.os.Build.VERSION_CODES;
|
||||
import android.util.Log;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
/** Provides access to audio data from a microphone. */
|
||||
public class MicrophoneHelper {
|
||||
/** The listener is called when audio data from the microphone is available. */
|
||||
public interface OnAudioDataAvailableListener {
|
||||
public void onAudioDataAvailable(byte[] audioData, long timestampMicros);
|
||||
}
|
||||
|
||||
private static final String TAG = "MicrophoneHelper";
|
||||
|
||||
private static final int AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
|
||||
private static final int AUDIO_SOURCE = AudioSource.MIC;
|
||||
|
||||
// A small constant valued multiplier for setting bufferSize. This is useful
|
||||
// to reduce buffer overflows when a lot of data needs to be read at a high
|
||||
// sample rate from the audio stream. Note that it is desirable to keep this
|
||||
// multiplier small, because very large buffer sizes can slow down blocking
|
||||
// calls to AudioRecord.read(...) when the sample rate is low for instance.
|
||||
private static final int BUFFER_SIZE_MULTIPLIER = 2;
|
||||
|
||||
// A small constant value to decide the number of seconds of audio data that
|
||||
// will be read in a single AudioRecord.read(...) call when
|
||||
// AudioRecord.minBufferSize(...) is unavailable. Smaller values for this
|
||||
// constant favor faster blocking calls to AudioRecord.read(...).
|
||||
private static final int MAX_READ_INTERVAL_SEC = 1;
|
||||
|
||||
// This class uses AudioFormat.ENCODING_PCM_16BIT, i.e. 16 bits per single channel sample.
|
||||
private static final int BYTES_PER_MONO_SAMPLE = 2;
|
||||
|
||||
private static final long UNINITIALIZED_TIMESTAMP = -1;
|
||||
private static final long NANOS_PER_MICROS = 1000;
|
||||
private static final long MICROS_PER_SECOND = 1000000;
|
||||
|
||||
// Number of audio samples recorded per second.
|
||||
private final int sampleRateInHz;
|
||||
// Channel configuration of audio source, one of AudioRecord.CHANNEL_IN_MONO or
|
||||
// AudioRecord.CHANNEL_IN_STEREO.
|
||||
private final int channelConfig;
|
||||
// Data storage allocated to record audio samples in a single function call to AudioRecord.read().
|
||||
private final int bufferSize;
|
||||
// Bytes used per sample, accounts for number of channels of audio source. Possible values are 2
|
||||
// bytes for a 1-channel sample and 4 bytes for a 2-channel sample.
|
||||
private final int bytesPerSample;
|
||||
|
||||
private byte[] audioData;
|
||||
|
||||
// Timestamp provided by the AudioTimestamp object.
|
||||
private AudioTimestamp audioTimestamp;
|
||||
// Initial timestamp base. Can be set by the client so that all timestamps calculated using the
|
||||
// number of samples read per AudioRecord.read() function call start from this timestamp.
|
||||
private long initialTimestamp = UNINITIALIZED_TIMESTAMP;
|
||||
// The total number of samples read from multiple calls to AudioRecord.read(). This is reset to
|
||||
// zero for every startMicrophone() call.
|
||||
private long totalNumSamplesRead;
|
||||
|
||||
// AudioRecord is used to setup a way to record data from the audio source. See
|
||||
// https://developer.android.com/reference/android/media/AudioRecord.htm for details.
|
||||
private AudioRecord audioRecord;
|
||||
// Data is read on a separate non-blocking thread.
|
||||
private Thread recordingThread;
|
||||
|
||||
// This flag determines if audio will be read from the audio source and if the data read will be
|
||||
// sent to the listener of this class.
|
||||
private boolean recording = false;
|
||||
|
||||
// This listener is provided with the data read on every AudioRecord.read() call. If the listener
|
||||
// called stopRecording() while a call to AudioRecord.read() was blocked, the class will discard
|
||||
// the data read after recording stopped.
|
||||
private OnAudioDataAvailableListener onAudioDataAvailableListener;
|
||||
|
||||
/**
|
||||
* MicrophoneHelper class constructor. Arugments:
|
||||
*
|
||||
* @param sampleRateInHz Number of samples per second to be read from audio stream.
|
||||
* @param channelConfig Configuration of audio channels. See
|
||||
* https://developer.android.com/reference/android/media/AudioRecord.html#public-constructors_1.
|
||||
*/
|
||||
public MicrophoneHelper(int sampleRateInHz, int channelConfig) {
|
||||
this.sampleRateInHz = sampleRateInHz;
|
||||
this.channelConfig = channelConfig;
|
||||
|
||||
// Number of channels of audio source, depending on channelConfig.
|
||||
final int channelCount = channelConfig == AudioFormat.CHANNEL_IN_STEREO ? 2 : 1;
|
||||
|
||||
bytesPerSample = BYTES_PER_MONO_SAMPLE * channelCount;
|
||||
|
||||
// The minimum buffer size required by AudioRecord.
|
||||
final int minBufferSize =
|
||||
AudioRecord.getMinBufferSize(
|
||||
sampleRateInHz, channelConfig, /*audioFormat=*/ AUDIO_ENCODING);
|
||||
|
||||
// Set bufferSize. If the minimum buffer size permitted by the hardware is
|
||||
// unavailable, use the the sampleRateInHz value as the number of bytes.
|
||||
// This is arguably better than another arbitrary constant because a higher
|
||||
// value of sampleRateInHz implies the need for reading large chunks of data
|
||||
// from the audio stream in each AudioRecord.read(...) call.
|
||||
if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
|
||||
Log.e(TAG, "AudioRecord minBufferSize unavailable.");
|
||||
bufferSize = sampleRateInHz * MAX_READ_INTERVAL_SEC * bytesPerSample * BUFFER_SIZE_MULTIPLIER;
|
||||
} else {
|
||||
bufferSize = minBufferSize * BUFFER_SIZE_MULTIPLIER;
|
||||
}
|
||||
}
|
||||
|
||||
private void setupAudioRecord() {
|
||||
audioData = new byte[bufferSize];
|
||||
|
||||
Log.d(TAG, "AudioRecord(" + sampleRateInHz + ", " + bufferSize + ")");
|
||||
audioRecord =
|
||||
new AudioRecord.Builder()
|
||||
.setAudioSource(AUDIO_SOURCE)
|
||||
.setAudioFormat(
|
||||
new AudioFormat.Builder()
|
||||
.setEncoding(AUDIO_ENCODING)
|
||||
.setSampleRate(sampleRateInHz)
|
||||
.setChannelMask(channelConfig)
|
||||
.build())
|
||||
.setBufferSizeInBytes(bufferSize)
|
||||
.build();
|
||||
|
||||
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
|
||||
audioRecord.release();
|
||||
Log.e(TAG, "AudioRecord could not open.");
|
||||
return;
|
||||
}
|
||||
|
||||
recordingThread =
|
||||
new Thread(
|
||||
() -> {
|
||||
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
|
||||
Log.v(TAG, "Running audio recording thread.");
|
||||
|
||||
// Initial timestamp in case the AudioRecord.getTimestamp() function is unavailable.
|
||||
long startTimestamp = initialTimestamp != UNINITIALIZED_TIMESTAMP
|
||||
? initialTimestamp
|
||||
: System.nanoTime() / NANOS_PER_MICROS;
|
||||
long sampleBasedTimestamp;
|
||||
while (recording) {
|
||||
if (audioRecord == null) {
|
||||
break;
|
||||
}
|
||||
final int numBytesRead =
|
||||
audioRecord.read(audioData, /*offsetInBytes=*/ 0, /*sizeInBytes=*/ bufferSize);
|
||||
// If AudioRecord.getTimestamp() is unavailable, calculate the timestamp using the
|
||||
// number of samples read in the call to AudioRecord.read().
|
||||
long sampleBasedFallbackTimestamp =
|
||||
startTimestamp + totalNumSamplesRead * MICROS_PER_SECOND / sampleRateInHz;
|
||||
sampleBasedTimestamp =
|
||||
getTimestamp(/*fallbackTimestamp=*/sampleBasedFallbackTimestamp);
|
||||
if (numBytesRead <= 0) {
|
||||
if (numBytesRead == AudioRecord.ERROR_INVALID_OPERATION) {
|
||||
Log.e(TAG, "ERROR_INVALID_OPERATION");
|
||||
} else if (numBytesRead == AudioRecord.ERROR_BAD_VALUE) {
|
||||
Log.e(TAG, "ERROR_BAD_VALUE");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Log.v(TAG, "Read " + numBytesRead + " bytes of audio data.");
|
||||
|
||||
// Confirm that the listener is still interested in receiving audio data and
|
||||
// stopMicrophone() wasn't called. If the listener called stopMicrophone(), discard
|
||||
// the data read in the latest AudioRecord.read(...) function call.
|
||||
if (recording) {
|
||||
onAudioDataAvailableListener.onAudioDataAvailable(
|
||||
audioData.clone(), sampleBasedTimestamp);
|
||||
}
|
||||
|
||||
// TODO: Replace byte[] with short[] audioData.
|
||||
// It is expected that audioRecord.read() will read full samples and therefore
|
||||
// numBytesRead is expected to be a multiple of bytesPerSample.
|
||||
int numSamplesRead = numBytesRead / bytesPerSample;
|
||||
totalNumSamplesRead += numSamplesRead;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// If AudioRecord.getTimestamp() is available and returns without error, this function returns the
|
||||
// timestamp using AudioRecord.getTimestamp(). If the function is unavailable, it returns a
|
||||
// fallbackTimestamp provided as an argument to this method.
|
||||
private long getTimestamp(long fallbackTimestamp) {
|
||||
// AudioRecord.getTimestamp is only available at API Level 24 and above.
|
||||
// https://developer.android.com/reference/android/media/AudioRecord.html#getTimestamp(android.media.AudioTimestamp,%20int).
|
||||
if (VERSION.SDK_INT >= VERSION_CODES.N) {
|
||||
if (audioTimestamp == null) {
|
||||
audioTimestamp = new AudioTimestamp();
|
||||
}
|
||||
int status = audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC);
|
||||
if (status == AudioRecord.SUCCESS) {
|
||||
return audioTimestamp.nanoTime / NANOS_PER_MICROS;
|
||||
} else {
|
||||
Log.e(TAG, "audioRecord.getTimestamp failed with status: " + status);
|
||||
}
|
||||
}
|
||||
return fallbackTimestamp;
|
||||
}
|
||||
|
||||
// Returns the buffer size read by this class per AudioRecord.read() call.
|
||||
public int getBufferSize() {
|
||||
return bufferSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Overrides the use of system time as the source of timestamps for audio packets. Not
|
||||
* recommended. Provided to maintain compatibility with existing usage by CameraRecorder.
|
||||
*/
|
||||
public void setInitialTimestamp(long initialTimestamp) {
|
||||
this.initialTimestamp = initialTimestamp;
|
||||
}
|
||||
|
||||
// This method sets up a new AudioRecord object for reading audio data from the microphone. It
|
||||
// can be called multiple times to restart the recording if necessary.
|
||||
public void startMicrophone() {
|
||||
if (recording) {
|
||||
return;
|
||||
}
|
||||
|
||||
setupAudioRecord();
|
||||
audioRecord.startRecording();
|
||||
if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
|
||||
Log.e(TAG, "AudioRecord couldn't start recording.");
|
||||
audioRecord.release();
|
||||
return;
|
||||
}
|
||||
|
||||
recording = true;
|
||||
totalNumSamplesRead = 0;
|
||||
recordingThread.start();
|
||||
|
||||
Log.d(TAG, "AudioRecord is recording audio.");
|
||||
}
|
||||
|
||||
// Stops the AudioRecord object from reading data from the microphone and releases it.
|
||||
public void stopMicrophone() {
|
||||
stopMicrophoneWithoutCleanup();
|
||||
cleanup();
|
||||
Log.d(TAG, "AudioRecord stopped recording audio.");
|
||||
}
|
||||
|
||||
// Stops the AudioRecord object from reading data from the microphone.
|
||||
public void stopMicrophoneWithoutCleanup() {
|
||||
if (!recording) {
|
||||
return;
|
||||
}
|
||||
|
||||
recording = false;
|
||||
try {
|
||||
if (recordingThread != null) {
|
||||
recordingThread.join();
|
||||
}
|
||||
} catch (InterruptedException ie) {
|
||||
Log.e(TAG, "Exception: ", ie);
|
||||
}
|
||||
|
||||
audioRecord.stop();
|
||||
if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) {
|
||||
Log.e(TAG, "AudioRecord.stop() didn't run properly.");
|
||||
}
|
||||
}
|
||||
|
||||
// Releases the AudioRecord object when there is no ongoing recording.
|
||||
public void cleanup() {
|
||||
if (recording) {
|
||||
return;
|
||||
}
|
||||
audioRecord.release();
|
||||
}
|
||||
|
||||
public void setOnAudioDataAvailableListener(@Nullable OnAudioDataAvailableListener listener) {
|
||||
onAudioDataAvailableListener = listener;
|
||||
}
|
||||
}
|
|
@ -79,8 +79,12 @@ bool AssetManager::FileExists(const std::string& filename) {
|
|||
AAssetDir* asset_dir =
|
||||
AAssetManager_openDir(asset_manager_, filename.c_str());
|
||||
if (asset_dir != nullptr) {
|
||||
// openDir always succeeds, so check if there are files in it. This won't
|
||||
// work if it's empty, but an empty assets manager directory is essentially
|
||||
// unusable (i.e. not considered a valid path).
|
||||
bool dir_exists = AAssetDir_getNextFileName(asset_dir) != nullptr;
|
||||
AAssetDir_close(asset_dir);
|
||||
return true;
|
||||
return dir_exists;
|
||||
}
|
||||
|
||||
return false;
|
||||
|
|
Loading…
Reference in New Issue
Block a user