Skip to content
Permalink
Browse files

Project import generated by Copybara.

PiperOrigin-RevId: 255500495
  • Loading branch information...
MediaPipe Team jqtang
MediaPipe Team authored and jqtang committed Jun 27, 2019
1 parent 2aaf469 commit 56cdbaf140b24e22d1554a6956532fa148b5d795
@@ -14,7 +14,7 @@ See mobile and desktop [examples](mediapipe/docs/examples.md).
## Documentation
[MediaPipe Read-the-Docs](https://mediapipe.readthedocs.io/) or [docs.mediapipe.dev](https://docs.mediapipe.dev)

Check out the [Examples page] for tutorials on how to use MediaPipe. [Concepts page](https://mediapipe.readthedocs.io/en/latest/concepts.html) for basic definitions
Check out the [Examples page](https://mediapipe.readthedocs.io/en/latest/examples.html) for tutorials on how to use MediaPipe. [Concepts page](https://mediapipe.readthedocs.io/en/latest/concepts.html) for basic definitions

## Visualizing MediaPipe graphs
A web-based visualizer is hosted on [viz.mediapipe.dev](https://viz.mediapipe.dev/). Please also see instructions [here](mediapipe/docs/visualizer.md).
@@ -115,36 +115,50 @@ http_archive(
build_file = "@//third_party:opencv_android.BUILD",
strip_prefix = "OpenCV-android-sdk",
type = "zip",
url = "https://sourceforge.net/projects/opencvlibrary/files/4.0.1/opencv-4.0.1-android-sdk.zip/download"
urls = [
"https://sourceforge.net/projects/opencvlibrary/files/4.0.1/opencv-4.0.1-android-sdk.zip/download",
"https://github.com/opencv/opencv/releases/download/4.1.0/opencv-4.1.0-android-sdk.zip",
],
)

# Google Maven Repository
GMAVEN_TAG = "20181212-2"
RULES_JVM_EXTERNAL_TAG = "2.2"
RULES_JVM_EXTERNAL_SHA = "f1203ce04e232ab6fdd81897cf0ff76f2c04c0741424d192f28e65ae752ce2d6"

http_archive(
name = "gmaven_rules",
strip_prefix = "gmaven_rules-%s" % GMAVEN_TAG,
url = "https://github.com/bazelbuild/gmaven_rules/archive/%s.tar.gz" % GMAVEN_TAG,
name = "rules_jvm_external",
strip_prefix = "rules_jvm_external-%s" % RULES_JVM_EXTERNAL_TAG,
sha256 = RULES_JVM_EXTERNAL_SHA,
url = "https://github.com/bazelbuild/rules_jvm_external/archive/%s.zip" % RULES_JVM_EXTERNAL_TAG,
)

load("@gmaven_rules//:gmaven.bzl", "gmaven_rules")
load("@rules_jvm_external//:defs.bzl", "maven_install")

gmaven_rules()
maven_install(
artifacts = [
"com.android.support.constraint:constraint-layout:aar:1.0.2",
"androidx.appcompat:appcompat:aar:1.0.2",
],
repositories = [
"https://dl.google.com/dl/android/maven2",
],
)

maven_server(
name = "google_server",
url = "http://maven.google.com",
url = "https://dl.google.com/dl/android/maven2",
)

maven_jar(
name = "androidx_lifecycle",
artifact = "androidx.lifecycle:lifecycle-common:2.0.0",
sha1 = "e070ffae07452331bc5684734fce6831d531785c",
server = "google_server",
)

maven_jar(
name = "androidx_concurrent_futures",
artifact = "androidx.concurrent:concurrent-futures:1.0.0-alpha03",
sha1 = "b528df95c7e2fefa2210c0c742bf3e491c1818ae",
server = "google_server",
)

@@ -169,6 +183,7 @@ maven_jar(
maven_jar(
name = "com_google_code_findbugs",
artifact = "com.google.code.findbugs:jsr305:3.0.2",
sha1 = "25ea2e8b0c338a877313bd4672d3fe056ea78f0d",
)

# You may run setup_android.sh to install Android SDK and NDK.
@@ -216,6 +216,9 @@ ::mediapipe::Status RecolorCalculator::RenderGpu(CalculatorContext* cc) {

GlRender();

glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, 0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, 0);
glFlush();
}
@@ -321,13 +321,20 @@ ::mediapipe::Status SetAlphaCalculator::RenderGpu(CalculatorContext* cc) {
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, alpha_texture.name());
GlRender(cc); // use channel 0 of mask
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, 0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, 0);
alpha_texture.Release();
} else {
gpu_helper_.BindFramebuffer(output_texture); // GL_TEXTURE0
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, input_texture.name());
GlRender(cc); // use value from options
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, 0);
}
glFlush();

// Send out image as GPU packet.
auto output_frame = output_texture.GetFrame<mediapipe::GpuBuffer>();
@@ -393,9 +400,6 @@ ::mediapipe::Status SetAlphaCalculator::GlRender(CalculatorContext* cc) {
glDeleteVertexArrays(1, &vao);
glDeleteBuffers(2, vbo);

// execute command queue
glBindTexture(GL_TEXTURE_2D, 0);
glFlush();
#endif // __ANDROID__

return ::mediapipe::OkStatus();
@@ -506,8 +506,7 @@ ::mediapipe::Status TfLiteTensorsToDetectionsCalculator::GlSetup(
#if defined(__ANDROID__)
// A shader to decode detection boxes.
const std::string decode_src = absl::Substitute(
R"(
#version 310 es
R"( #version 310 es
layout(local_size_x = 1, local_size_y = 1, local_size_z = 1) in;
@@ -643,8 +642,7 @@ void main() {

// A shader to score detection boxes.
const std::string score_src = absl::Substitute(
R"(
#version 310 es
R"( #version 310 es
layout(local_size_x = 1, local_size_y = $0, local_size_z = 1) in;
@@ -301,6 +301,8 @@ ::mediapipe::Status TfLiteTensorsToSegmentationCalculator::ProcessGpu(
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, input_mask_texture.name());
mask_program_with_prev_->Dispatch(workgroups);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, 0);
}
}

@@ -413,8 +415,7 @@ ::mediapipe::Status TfLiteTensorsToSegmentationCalculator::InitGpu(
// Currently uses 4 channels for output,
// and sets both R and A channels as mask value.
const std::string shader_src_template =
R"(
#version 310 es
R"( #version 310 es
layout(local_size_x = $0, local_size_y = $0, local_size_z = 1) in;
@@ -359,6 +359,9 @@ ::mediapipe::Status AnnotationOverlayCalculator::RenderToGpu(

RETURN_IF_ERROR(GlRender(cc));

glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, 0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, 0);
glFlush();
}
@@ -25,7 +25,6 @@ package(default_visibility = ["//visibility:private"])
proto_library(
name = "locus_proto",
srcs = ["locus.proto"],
visibility = ["//mediapipe:__subpackages__"],
deps = ["//mediapipe/framework/formats/annotation:rasterization_proto"],
)

@@ -45,8 +45,8 @@ node: {
# resulting in potential letterboxing in the transformed image.
node: {
calculator: "ImageTransformationCalculator"
input_stream: "IMAGE_GPU:throttled_input_video"
output_stream: "IMAGE_GPU:transformed_input_video"
input_stream: "IMAGE:input_video_cpu"
output_stream: "IMAGE:transformed_input_video_cpu"
output_stream: "LETTERBOX_PADDING:letterbox_padding"
node_options: {
[type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] {
@@ -57,13 +57,6 @@ node: {
}
}

# Transfers the transformed input image from GPU to CPU memory.
node: {
calculator: "GpuBufferToImageFrameCalculator"
input_stream: "transformed_input_video"
output_stream: "transformed_input_video_cpu"
}

# Converts the transformed input image on CPU into an image tensor as a
# TfLiteTensor. The zero_center option is set to true to normalize the
# pixel values to [-1.f, 1.f] as opposed to [0.f, 1.f]. The flip_vertically
@@ -109,7 +109,7 @@ public void process(Packet packet) {

mediapipeGraph.setParentGlContext(parentNativeContext);
} catch (MediaPipeException e) {
// TODO: Report this error from MediaPipe.
Log.e(TAG, "Mediapipe error: ", e);
}

videoSurfaceOutput = mediapipeGraph.addSurfaceOutput(videoOutputStream);
@@ -199,7 +199,7 @@ public void close() {
try {
mediapipeGraph.tearDown();
} catch (MediaPipeException e) {
// TODO: Report this error from MediaPipe.
Log.e(TAG, "Mediapipe error: ", e);
}
}
}
@@ -209,6 +209,7 @@ public void close() {
*
* <p>Normally the graph is initialized when the first frame arrives. You can optionally call this
* method to initialize it ahead of time.
* @throws MediaPipeException for any error status.
*/
public void preheat() {
if (!started.getAndSet(true)) {
@@ -220,6 +221,10 @@ public void setOnWillAddFrameListener(@Nullable OnWillAddFrameListener addFrameL
this.addFrameListener = addFrameListener;
}

/**
* Returns true if the MediaPipe graph can accept one more input frame.
* @throws MediaPipeException for any error status.
*/
private boolean maybeAcceptNewFrame() {
if (!started.getAndSet(true)) {
startGraph();
@@ -254,7 +259,7 @@ public void onNewFrame(final TextureFrame frame) {
mediapipeGraph.addConsumablePacketToInputStream(
videoInputStream, imagePacket, frame.getTimestamp());
} catch (MediaPipeException e) {
// TODO: Report this error from MediaPipe.
Log.e(TAG, "Mediapipe error: ", e);
}
imagePacket.release();
}
@@ -281,7 +286,7 @@ public void onNewFrame(final Bitmap bitmap, long timestamp) {
// packet, which may allow for more memory optimizations.
mediapipeGraph.addConsumablePacketToInputStream(videoInputStreamCpu, packet, timestamp);
} catch (MediaPipeException e) {
// TODO: Report this error from MediaPipe.
Log.e(TAG, "Mediapipe error: ", e);
}
packet.release();
}
@@ -290,10 +295,14 @@ public void waitUntilIdle() {
try {
mediapipeGraph.waitUntilGraphIdle();
} catch (MediaPipeException e) {
// TODO: Report this error from MediaPipe.
Log.e(TAG, "Mediapipe error: ", e);
}
}

/**
* Starts running the MediaPipe graph.
* @throws MediaPipeException for any error status.
*/
private void startGraph() {
mediapipeGraph.startRunningGraph();
}
@@ -128,6 +128,7 @@ public synchronized CalculatorGraphConfig getCalculatorGraphConfig() {
*
* @param streamName The output stream name in the graph for callback.
* @param callback The callback for handling the call when output stream gets a {@link Packet}.
* @throws MediaPipeException for any error status.
*/
public synchronized void addPacketCallback(String streamName, PacketCallback callback) {
Preconditions.checkState(
@@ -145,6 +146,7 @@ public synchronized void addPacketCallback(String streamName, PacketCallback cal
* @param streamName The output stream name in the graph for callback.
* @param callback The callback for handling the call when output stream gets a {@link Packet} and
* has a stream header.
* @throws MediaPipeException for any error status.
*/
public synchronized void addPacketWithHeaderCallback(
String streamName, PacketWithHeaderCallback callback) {
@@ -251,6 +253,7 @@ public synchronized void setStreamHeader(
* Runs the mediapipe graph until it finishes.
*
* <p>Side packets that are needed by the graph should be set using {@link setInputSidePackets}.
* @throws MediaPipeException for any error status.
*/
public synchronized void runGraphUntilClose() {
Preconditions.checkState(
@@ -268,6 +271,7 @@ public synchronized void runGraphUntilClose() {
* <p>Returns immediately after starting the scheduler.
*
* <p>Side packets that are needed by the graph should be set using {@link setInputSidePackets}.
* @throws MediaPipeException for any error status.
*/
public synchronized void startRunningGraph() {
Preconditions.checkState(
@@ -319,6 +323,7 @@ public synchronized void setGraphInputStreamBlockingMode(boolean mode) {
* @param packet the mediapipe packet.
* @param timestamp the timestamp of the packet, although not enforced, the unit is normally
* microsecond.
* @throws MediaPipeException for any error status.
*/
public synchronized void addPacketToInputStream(
String streamName, Packet packet, long timestamp) {
@@ -343,6 +348,7 @@ public synchronized void addPacketToInputStream(
* @param packet the mediapipe packet.
* @param timestamp the timestamp of the packet, although not enforced, the unit is normally
* microsecond.
* @throws MediaPipeException for any error status.
*/
public synchronized void addConsumablePacketToInputStream(
String streamName, Packet packet, long timestamp) {
@@ -363,20 +369,30 @@ public synchronized void addConsumablePacketToInputStream(
}
}

/** Closes the specified input stream. */
/**
* Closes the specified input stream.
* @throws MediaPipeException for any error status.
*/
public synchronized void closeInputStream(String streamName) {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
nativeCloseInputStream(nativeGraphHandle, streamName);
}

/** Closes all the input streams in the mediapipe graph. */
/**
* Closes all the input streams in the mediapipe graph.
* @throws MediaPipeException for any error status.
*/
public synchronized void closeAllInputStreams() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
nativeCloseAllInputStreams(nativeGraphHandle);
}

/**
* Closes all the input streams and source calculators in the mediapipe graph.
* @throws MediaPipeException for any error status.
*/
public synchronized void closeAllPacketSources() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
@@ -387,14 +403,18 @@ public synchronized void closeAllPacketSources() {
* Waits until the graph is done processing.
*
* <p>This should be called after all sources and input streams are closed.
* @throws MediaPipeException for any error status.
*/
public synchronized void waitUntilGraphDone() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
nativeWaitUntilGraphDone(nativeGraphHandle);
}

/** Waits until the graph runner is idle. */
/**
* Waits until the graph runner is idle.
* @throws MediaPipeException for any error status.
*/
public synchronized void waitUntilGraphIdle() {
Preconditions.checkState(
nativeGraphHandle != 0, "Invalid context, tearDown() might have been called.");
@@ -455,6 +475,7 @@ public synchronized void updatePacketReference(Packet referencePacket, Packet ne
* OpenGL. This runner should be connected to the calculators by specifiying an input side packet
* in the graph file with the same name.
*
* @throws MediaPipeException for any error status.
* @deprecated Call {@link setParentGlContext} to set up texture sharing between contexts. Apart
* from that, GL is set up automatically.
*/
@@ -471,6 +492,7 @@ public synchronized void createGlRunner(String name, long javaGlContext) {
* enable the sharing of textures and other objects between the two contexts.
*
* <p>Cannot be called after the graph has been started.
* @throws MediaPipeException for any error status.
*/
public synchronized void setParentGlContext(long javaGlContext) {
Preconditions.checkState(

0 comments on commit 56cdbaf

Please sign in to comment.
You can’t perform that action at this time.