Skip to content

Commit

Permalink
Merge pull request #1 from PaddlePaddle/develop
Browse files Browse the repository at this point in the history
pull
  • Loading branch information
kk12333 committed Jun 1, 2018
2 parents 4f9e029 + 2a345a2 commit 6ad09ae
Show file tree
Hide file tree
Showing 76 changed files with 1,619 additions and 928 deletions.
3 changes: 2 additions & 1 deletion CMakeLists.txt
@@ -1,6 +1,7 @@
cmake_minimum_required(VERSION 3.0)
project(paddle-mobile)
add_definitions(-DPADDLE_MOBILE_DEBUG="true")
add_definitions(-DPADDLE_MOBILE_DEBUG)
add_definitions(-DENABLE_EXCEPTION)

set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
set(CMAKE_BUILD_TYPE RelWithDebInfo)
Expand Down
14 changes: 14 additions & 0 deletions scripts/push2android.sh
@@ -0,0 +1,14 @@
#!/usr/bin/env sh

push_fn () {
MODELS_PATH="../test/models/*"
EXE_FILE="../test/build/*"
EXE_DIR="data/local/tmp/bin"
MODELS_DIR="data/local/tmp/models"
LIB_PATH="../build/release/arm-v7a/build/*"
adb push ${EXE_FILE} ${EXE_DIR}
adb push ${LIB_PATH} ${EXE_DIR}
adb push ${MODELS_PATH} ${MODELS_DIR}
echo "test files sync completed"
}
push_fn
6 changes: 3 additions & 3 deletions src/common/enforce.h
Expand Up @@ -14,7 +14,7 @@ limitations under the License. */

#pragma once

#ifdef PADDLE_MOBILE_DEBUG
#ifdef ENABLE_EXCEPTION
#include <stdio.h>
#include <exception>
#include <sstream>
Expand All @@ -25,7 +25,7 @@ limitations under the License. */

namespace paddle_mobile {

#ifdef PADDLE_MOBILE_DEBUG
#ifdef ENABLE_EXCEPTION
struct PaddleMobileException : public std::exception {
const std::string exception_prefix = "paddle mobile C++ Exception: \n";
std::string message;
Expand Down Expand Up @@ -64,7 +64,7 @@ struct PaddleMobileException : public std::exception {
}
#else
#define PADDLE_MOBILE_THROW_EXCEPTION(...)
#define PADDLE_MOBILE_ASSERT(stat, ...)
#define PADDLE_MOBILE_ENFORCE(stat, ...)
#endif

} // namespace paddle_mobile
49 changes: 49 additions & 0 deletions src/common/types.h
Expand Up @@ -14,6 +14,10 @@ limitations under the License. */

#pragma once;

#include <string>
#include <unordered_map>
#include <utility>

namespace paddle_mobile {
enum class Precision : int { FP32 = 0 };

Expand Down Expand Up @@ -67,4 +71,49 @@ enum PMStatus {
PMUnImplError = 0x07, /*!< Unimplement error. */
PMWrongDevice = 0x08 /*!< un-correct device. */
};

static const std::string G_OP_TYPE_CONV = "conv2d";
static const std::string G_OP_TYPE_BATCHNORM = "batch_norm";
static const std::string G_OP_TYPE_BOX_CODER = "box_coder";
static const std::string G_OP_TYPE_CONCAT = "concat";
static const std::string G_OP_TYPE_ELEMENTWISE_ADD = "elementwise_add";
static const std::string G_OP_TYPE_FUSION_CONV_ADD_RELU =
"fusion_conv_add_relu";
static const std::string G_OP_TYPE_FC = "fc";
static const std::string G_OP_TYPE_LRN = "lrn";
static const std::string G_OP_TYPE_MUL = "mul";
static const std::string G_OP_TYPE_MULTICLASS_NMS = "multiclass_nms";
static const std::string G_OP_TYPE_POOL2D = "pool2d";
static const std::string G_OP_TYPE_PRIOR_BOX = "prior_box";
static const std::string G_OP_TYPE_RELU = "relu";
static const std::string G_OP_TYPE_RESHAPE = "reshape";
static const std::string G_OP_TYPE_SIGMOID = "sigmoid";
static const std::string G_OP_TYPE_SOFTMAX = "softmax";
static const std::string G_OP_TYPE_TRANSPOSE = "transpose";
static const std::string G_OP_TYPE_SPLIT = "split";
static const std::string G_OP_TYPE_FEED = "feed";
static const std::string G_OP_TYPE_FETCH = "fetch";
static const std::string G_OP_TYPE_DEPTHWISE_CONV = "depthwise_conv2d";

static std::unordered_map<
std::string, std::pair<std::vector<std::string>, std::vector<std::string>>>
op_input_output_key = {
{G_OP_TYPE_CONV, {{"Input"}, {"Output"}}},
{G_OP_TYPE_RELU, {{"X"}, {"Out"}}},
{G_OP_TYPE_SOFTMAX, {{"X"}, {"Out"}}},
{G_OP_TYPE_MUL, {{"X"}, {"Out"}}},
{G_OP_TYPE_ELEMENTWISE_ADD, {{"X", "Y"}, {"Out"}}},
{G_OP_TYPE_POOL2D, {{"X"}, {"Out"}}},
{G_OP_TYPE_BATCHNORM, {{"X"}, {"Y"}}},
{G_OP_TYPE_LRN, {{"X"}, {"Out"}}},
{G_OP_TYPE_CONCAT, {{"X"}, {"Out"}}},
{G_OP_TYPE_SPLIT, {{"X"}, {"Out"}}},
{G_OP_TYPE_FEED, {{"X"}, {"Out"}}},
{G_OP_TYPE_FETCH, {{"X"}, {"Out"}}},
{G_OP_TYPE_TRANSPOSE, {{"X"}, {"Out"}}},
{G_OP_TYPE_BOX_CODER,
{{"PriorBox", "PriorBoxVar", "TargetBox"}, {"OutputBox"}}},
{G_OP_TYPE_PRIOR_BOX, {{"Image", "Input"}, {"Boxes", "Variances"}}},
{G_OP_TYPE_MULTICLASS_NMS, {{"BBoxes", "Scores"}, {"Out"}}},
{G_OP_TYPE_RESHAPE, {{"X"}, {"Out"}}}};
} // namespace paddle_mobile
8 changes: 0 additions & 8 deletions src/framework/op_registry.h
Expand Up @@ -90,14 +90,6 @@ class OpRegistry {
const std::string& type, const VariableNameMap& inputs,
const VariableNameMap& outputs, const AttributeMap attrs,
std::shared_ptr<paddle_mobile::framework::Scope> scope) {
LOG(paddle_mobile::kLOG_DEBUG1) << " type: " << type;
LOG(paddle_mobile::kLOG_DEBUG1) << " input size: " << inputs.size();
LOG(paddle_mobile::kLOG_DEBUG1) << " output size: " << outputs.size();
LOG(paddle_mobile::kLOG_DEBUG1) << " attr size: " << attrs.size();
LOG(paddle_mobile::kLOG_DEBUG1)
<< " OpInfoMap size: " << OpInfoMap<Dtype>::Instance()->map().size();
LOG(paddle_mobile::kLOG_DEBUG1) << " has type: " << type << " "
<< OpInfoMap<Dtype>::Instance()->Has(type);
auto& info = OpInfoMap<Dtype>::Instance()->Get(type);
auto op = info.Creator()(type, inputs, outputs, attrs, scope);
return std::shared_ptr<OperatorBase<Dtype>>(op);
Expand Down
25 changes: 24 additions & 1 deletion src/framework/operator.cpp
Expand Up @@ -13,11 +13,21 @@ See the License for the specific language governing permissions and
limitations under the License. */

#include "framework/operator.h"
#include "framework/op_info.h"
#include "operators/op_param.h"

namespace paddle_mobile {
namespace framework {

template <typename Dtype>
vector<string> OperatorBase<Dtype>::GetOutKeys() const {
auto it = op_input_output_key.find(type_);
if (it == op_input_output_key.end()) {
DLOG << type_ << " has no outputs";
return {};
}
return it->second.second;
}

template <typename Dtype>
OperatorBase<Dtype>::OperatorBase(const std::string &type,
const VariableNameMap &inputs,
Expand All @@ -31,9 +41,22 @@ OperatorBase<Dtype>::OperatorBase(const std::string &type,
scope_(scope) {
CheckAllInputOutputSet();
}

template <typename Dtype>
void OperatorBase<Dtype>::CheckAllInputOutputSet() const {}

template <typename Dtype>
void OperatorBase<Dtype>::Run() const {
RunImpl();
#ifdef PADDLE_MOBILE_DEBUG
vector<string> output_keys = GetOutKeys();
for (const auto key : output_keys) {
Tensor *out_ = GetVarValue<framework::LoDTensor>(key, outputs_, *scope_);
DLOG << type_ << " output- " << key << "=" << *out_;
}
#endif
}

template class OperatorBase<CPU>;
template class OperatorWithKernel<CPU>;

Expand Down
72 changes: 52 additions & 20 deletions src/framework/operator.h
Expand Up @@ -36,39 +36,60 @@ limitations under the License. */

namespace paddle_mobile {
namespace framework {
static std::unordered_map<
std::string, std::pair<std::vector<std::string>, std::vector<std::string>>>
op_input_output_key = {{"conv2d", {{"Input"}, {"Output"}}},
{"relu", {{"X"}, {"Out"}}},
{"softmax", {{"X"}, {"Out"}}},
{"mul", {{"X"}, {"Out"}}},
{"elementwise_add", {{"X", "Y"}, {"Out"}}},
{"pool2d", {{"X"}, {"Out"}}},
{"batch_norm", {{"X"}, {"Y"}}},
{"lrn", {{"X"}, {"Out"}}},
{"concat", {{"X"}, {"Out"}}},
{"feed", {{"X"}, {"Out"}}},
{"fetch", {{"X"}, {"Out"}}}};
using std::string;
using std::vector;

template <typename T>
static T *GetVarValue(const string &key, const VariableNameMap &var_map,
const Scope &scope) {
auto var_vec = var_map.at(key);
if (!var_vec.empty()) {
auto var = scope.FindVar(var_vec[0]);
return var->GetMutable<T>();
} else {
return nullptr;
}
}

template <typename Dtype>
class OperatorBase : PaddleMobileObject {
public:
/*
* @b op 基类的实例化方法, op 获取到了 输入、参数以及提前分配好的输出 tensor
* */
OperatorBase(const std::string &type, const VariableNameMap &inputs,
const VariableNameMap &outputs, const AttributeMap &attrs,
std::shared_ptr<Scope> scope);
virtual ~OperatorBase() {}
virtual void Run() const = 0;
virtual void InferShape() const = 0;
void Run() const;
std::vector<string> GetOutKeys() const;
virtual void RunImpl() const = 0;

/*
* @b op 运算所需的输入, 如上一层的输出结果、卷积核
* */
const VariableNameMap &Inputs() const { return inputs_; }
/*
* @b op 的输出, 内存会提前被分配好, 运算结果会被存到分配好的内存内
* */
const VariableNameMap &Outputs() const { return outputs_; }
/*
* @b op 类型
* */
const std::string &Type() const { return type_; }
/*
* @b op 运算所需要用到的参数: 如 conv 运算所需要用到的 stride
* */
const AttributeMap &Attrs() const { return attrs_; }
void ClearVariables(const std::vector<std::string> &var_names) const {
if (this->scope_) {
this->scope_->EraseVars(var_names);
}
}
/*
* @b 根据输入形状和参数计算出输出形状
* */
virtual void InferShape() const = 0;

protected:
std::shared_ptr<Scope> scope_;
Expand All @@ -81,22 +102,33 @@ class OperatorBase : PaddleMobileObject {
void CheckAllInputOutputSet() const;
};

/*
* @b 这个类为所有带有运算的 op 的父类, 这个 op 继承与 OperatorBase
* */
template <typename Dtype>
class OperatorWithKernel : public OperatorBase<Dtype> {
public:
OperatorWithKernel(const std::string &type, const VariableNameMap &inputs,
const VariableNameMap &outputs, const AttributeMap &attrs,
std::shared_ptr<Scope> scope)
: OperatorBase<Dtype>(type, inputs, outputs, attrs, scope) {}
virtual void Run() const = 0;

virtual void RunImpl() const = 0;
virtual void InferShape() const = 0;
};

/*
* @b 所有kernel的父类
* */
template <typename Dtype, typename P>
class OpKernelBase : PaddleMobileObject {
public:
/*
* @b 所有kernel 需实现 Compute 方法
* @p para 这个参数为 kernel 运算时所需要用到参数组成的一个结构体,
* 所有结构体存在与: paddle-mobile/src/operators/op_param.h
* */
virtual void Compute(const P &para) const = 0;

virtual ~OpKernelBase() = default;
};

Expand All @@ -113,13 +145,13 @@ class FusionOpMatcher : PaddleMobileObject {

virtual std::string Type() = 0;

virtual void FolderNodes(Node &node) {
node.Folder(node_.Depth(), Type(), {});
virtual void FolderNodes(Node *node) {
node->Folder(node_.Depth(), Type(), {});
}

virtual Node &BeginNode() { return node_; }

std::string BeginType() { return node_.BeginType(); }
std::string BeginType() { return node_.Type(); }

protected:
Node node_;
Expand Down
8 changes: 1 addition & 7 deletions src/framework/program/block_desc.cpp
Expand Up @@ -25,13 +25,7 @@ std::vector<std::shared_ptr<VarDesc>> BlockDesc::Vars() const {
return res;
}

std::vector<std::shared_ptr<OpDesc>> BlockDesc::Ops() const {
std::vector<std::shared_ptr<OpDesc>> res;
for (const auto &op : ops_) {
res.push_back(op);
}
return res;
}
std::vector<std::shared_ptr<OpDesc>> BlockDesc::Ops() const { return ops_; }

BlockDesc::BlockDesc(PaddleMobile__Framework__Proto__BlockDesc *desc)
: index_(desc->idx), parent_index_(desc->idx) {
Expand Down
4 changes: 4 additions & 0 deletions src/framework/program/block_desc.h
Expand Up @@ -26,6 +26,7 @@ class BlockDesc : PaddleMobileObject {
public:
friend class Node;
friend class ProgramOptimize;
BlockDesc() {}
BlockDesc(PaddleMobile__Framework__Proto__BlockDesc *desc);
BlockDesc(const BlockDesc &block_desc)
: index_(block_desc.index_), parent_index_(block_desc.parent_index_) {
Expand All @@ -43,6 +44,8 @@ class BlockDesc : PaddleMobileObject {

const int &ID() const { return index_; }

const bool &MultiThread() const { return multi_thread_; }

const int &Parent() const { return parent_index_; }

bool operator==(const paddle_mobile::framework::BlockDesc &in_block) const {
Expand All @@ -58,6 +61,7 @@ class BlockDesc : PaddleMobileObject {

private:
int index_;
bool multi_thread_;
int parent_index_;
std::vector<std::shared_ptr<OpDesc>> ops_;
std::unordered_map<std::string, std::shared_ptr<VarDesc>> vars_;
Expand Down

0 comments on commit 6ad09ae

Please sign in to comment.