Skip to content

Commit 30a58b5

Browse files
committed
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into lod_tensor2
2 parents f299206 + 08f9b72 commit 30a58b5

27 files changed

+977
-49
lines changed

CMakeLists.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,9 @@ endif()
6767
if(ANDROID)
6868
if(${CMAKE_SYSTEM_VERSION} VERSION_LESS "16")
6969
message(FATAL_ERROR "Unsupport standalone toolchains with Android API level lower than 16")
70+
elseif(${CMAKE_SYSTEM_VERSION} VERSION_LESS "21")
71+
# TODO: support glog for Android api 16 ~ 19 in the future
72+
message(WARNING "Using the unofficial git repository <https://github.com/Xreki/glog.git> instead")
7073
endif()
7174

7275
set(WITH_GPU OFF CACHE STRING

Dockerfile.android

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,14 @@ RUN /bin/bash -c 'if [[ -n ${UBUNTU_MIRROR} ]]; then sed -i 's#http://archive.ub
66

77
# ENV variables
88
ARG ANDROID_ABI
9+
ARG ANDROID_API
910

1011
ENV ANDROID_ABI=${ANDROID_ABI:-"armeabi-v7a"}
12+
ENV ANDROID_API=${ANDROID_API:-21}
1113

1214
ENV HOME=/root \
1315
ANDROID_NDK_HOME=/opt/android-ndk-linux \
14-
ANDROID_ARM_STANDALONE_TOOLCHAIN=/opt/arm-toolchain \
15-
ANDROID_ARM64_STANDALONE_TOOLCHAIN=/opt/arm64-toolchain
16+
ANDROID_TOOLCHAINS_DIR=/opt/toolchains
1617

1718
RUN apt-get update && \
1819
apt-get install -y \
@@ -42,14 +43,12 @@ RUN pip install --upgrade pip && \
4243
pip install pre-commit
4344

4445
# Android NDK
45-
RUN mkdir /opt/android-ndk-tmp && \
46+
RUN mkdir -p ${ANDROID_TOOLCHAINS_DIR} && \
47+
mkdir -p /opt/android-ndk-tmp && \
4648
cd /opt/android-ndk-tmp && \
4749
wget -q https://dl.google.com/android/repository/android-ndk-r14b-linux-x86_64.zip && \
4850
unzip -q android-ndk-r14b-linux-x86_64.zip && \
4951
mv android-ndk-r14b ${ANDROID_NDK_HOME} && \
50-
${ANDROID_NDK_HOME}/build/tools/make-standalone-toolchain.sh --arch=arm --platform=android-23 --install-dir=${ANDROID_ARM_STANDALONE_TOOLCHAIN} && \
51-
${ANDROID_NDK_HOME}/build/tools/make-standalone-toolchain.sh --arch=arm64 --platform=android-23 --install-dir=${ANDROID_ARM64_STANDALONE_TOOLCHAIN} && \
52-
rm -rf /opt/android-ndk-tmp && \
53-
rm -rf ${ANDROID_NDK_HOME}
52+
rm -rf /opt/android-ndk-tmp
5453

5554
CMD ["bash", "/paddle/paddle/scripts/docker/build_android.sh"]

cmake/external/gflags.cmake

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@ SET(GFLAGS_SOURCES_DIR ${THIRD_PARTY_PATH}/gflags)
1818
SET(GFLAGS_INSTALL_DIR ${THIRD_PARTY_PATH}/install/gflags)
1919
SET(GFLAGS_INCLUDE_DIR "${GFLAGS_INSTALL_DIR}/include" CACHE PATH "gflags include directory." FORCE)
2020
IF(WIN32)
21-
set(GFLAGS_LIBRARIES "${GFLAGS_INSTALL_DIR}/lib/gflags.lib" CACHE FILEPATH "GFLAGS_LIBRARIES" FORCE)
21+
set(GFLAGS_LIBRARIES "${GFLAGS_INSTALL_DIR}/lib/gflags.lib" CACHE FILEPATH "GFLAGS_LIBRARIES" FORCE)
2222
ELSE(WIN32)
23-
set(GFLAGS_LIBRARIES "${GFLAGS_INSTALL_DIR}/lib/libgflags.a" CACHE FILEPATH "GFLAGS_LIBRARIES" FORCE)
23+
set(GFLAGS_LIBRARIES "${GFLAGS_INSTALL_DIR}/lib/libgflags.a" CACHE FILEPATH "GFLAGS_LIBRARIES" FORCE)
2424
ENDIF(WIN32)
2525

2626
INCLUDE_DIRECTORIES(${GFLAGS_INCLUDE_DIR})
@@ -56,3 +56,12 @@ SET_PROPERTY(TARGET gflags PROPERTY IMPORTED_LOCATION ${GFLAGS_LIBRARIES})
5656
ADD_DEPENDENCIES(gflags extern_gflags)
5757

5858
LIST(APPEND external_project_dependencies gflags)
59+
60+
IF(WITH_C_API)
61+
INSTALL(DIRECTORY ${GFLAGS_INCLUDE_DIR} DESTINATION third_party/gflags)
62+
IF(ANDROID)
63+
INSTALL(FILES ${GFLAGS_LIBRARIES} DESTINATION third_party/gflags/lib/${ANDROID_ABI})
64+
ELSE()
65+
INSTALL(FILES ${GFLAGS_LIBRARIES} DESTINATION third_party/gflags/lib)
66+
ENDIF()
67+
ENDIF()

cmake/external/glog.cmake

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,9 @@ SET(GLOG_INSTALL_DIR ${THIRD_PARTY_PATH}/install/glog)
1919
SET(GLOG_INCLUDE_DIR "${GLOG_INSTALL_DIR}/include" CACHE PATH "glog include directory." FORCE)
2020

2121
IF(WIN32)
22-
SET(GLOG_LIBRARIES "${GLOG_INSTALL_DIR}/lib/libglog.lib" CACHE FILEPATH "glog library." FORCE)
22+
SET(GLOG_LIBRARIES "${GLOG_INSTALL_DIR}/lib/libglog.lib" CACHE FILEPATH "glog library." FORCE)
2323
ELSE(WIN32)
24-
SET(GLOG_LIBRARIES "${GLOG_INSTALL_DIR}/lib/libglog.a" CACHE FILEPATH "glog library." FORCE)
24+
SET(GLOG_LIBRARIES "${GLOG_INSTALL_DIR}/lib/libglog.a" CACHE FILEPATH "glog library." FORCE)
2525
ENDIF(WIN32)
2626

2727
INCLUDE_DIRECTORIES(${GLOG_INCLUDE_DIR})
@@ -56,3 +56,12 @@ ADD_DEPENDENCIES(glog extern_glog gflags)
5656
LINK_LIBRARIES(glog gflags)
5757

5858
LIST(APPEND external_project_dependencies glog)
59+
60+
IF(WITH_C_API)
61+
INSTALL(DIRECTORY ${GLOG_INCLUDE_DIR} DESTINATION third_party/glog)
62+
IF(ANDROID)
63+
INSTALL(FILES ${GLOG_LIBRARIES} DESTINATION third_party/glog/lib/${ANDROID_ABI})
64+
ELSE()
65+
INSTALL(FILES ${GLOG_LIBRARIES} DESTINATION third_party/glog/lib)
66+
ENDIF()
67+
ENDIF()

cmake/external/openblas.cmake

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,26 @@ IF(NOT ${CBLAS_FOUND})
7373
UPDATE_COMMAND ""
7474
CONFIGURE_COMMAND ""
7575
)
76+
77+
IF(WITH_C_API)
78+
INSTALL(DIRECTORY ${CBLAS_INC_DIR} DESTINATION third_party/openblas)
79+
# Because libopenblas.a is a symbolic link of another library, thus need to
80+
# install the whole directory.
81+
IF(ANDROID)
82+
SET(TMP_INSTALL_DIR third_party/openblas/lib/${ANDROID_ABI})
83+
ELSE()
84+
SET(TMP_INSTALL_DIR third_party/openblas/lib)
85+
ENDIF()
86+
INSTALL(CODE "execute_process(
87+
COMMAND ${CMAKE_COMMAND} -E copy_directory ${CBLAS_INSTALL_DIR}/lib
88+
destination ${CMAKE_INSTALL_PREFIX}/${TMP_INSTALL_DIR}
89+
)"
90+
)
91+
INSTALL(CODE "MESSAGE(STATUS \"Installing: \"
92+
\"${CBLAS_INSTALL_DIR}/lib -> ${CMAKE_INSTALL_PREFIX}/${TMP_INSTALL_DIR}\"
93+
)"
94+
)
95+
ENDIF()
7696
ENDIF(NOT ${CBLAS_FOUND})
7797

7898
MESSAGE(STATUS "BLAS library: ${CBLAS_LIBRARIES}")

cmake/external/protobuf.cmake

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -223,6 +223,15 @@ IF(NOT PROTOBUF_FOUND)
223223
SET(PROTOBUF_PROTOC_LIBRARY ${extern_protobuf_PROTOC_LIBRARY}
224224
CACHE FILEPATH "protoc library." FORCE)
225225

226+
IF(WITH_C_API)
227+
INSTALL(DIRECTORY ${PROTOBUF_INCLUDE_DIR} DESTINATION third_party/protobuf)
228+
IF(ANDROID)
229+
INSTALL(FILES ${PROTOBUF_LIBRARY} DESTINATION third_party/protobuf/lib/${ANDROID_ABI})
230+
ELSE()
231+
INSTALL(FILES ${PROTOBUF_LIBRARY} DESTINATION third_party/protobuf/lib)
232+
ENDIF()
233+
ENDIF()
234+
226235
IF(CMAKE_CROSSCOMPILING)
227236
PROMPT_PROTOBUF_LIB(protobuf_host extern_protobuf)
228237
ELSE()

cmake/external/zlib.cmake

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,3 +49,12 @@ ExternalProject_Add(
4949
)
5050

5151
LIST(APPEND external_project_dependencies zlib)
52+
53+
IF(WITH_C_API)
54+
INSTALL(DIRECTORY ${ZLIB_INCLUDE_DIR} DESTINATION third_party/zlib)
55+
IF(ANDROID)
56+
INSTALL(FILES ${ZLIB_LIBRARIES} DESTINATION third_party/zlib/lib/${ANDROID_ABI})
57+
ELSE()
58+
INSTALL(FILES ${ZLIB_LIBRARIES} DESTINATION third_party/zlib/lib)
59+
ENDIF()
60+
ENDIF()

paddle/capi/CMakeLists.txt

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,9 +64,29 @@ link_paddle_exe(paddle_capi_shared)
6464
install(FILES ${CAPI_HEADERS} DESTINATION include/paddle)
6565
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/config.h DESTINATION include/paddle)
6666
if(ANDROID)
67+
execute_process(
68+
COMMAND ${GIT_EXECUTABLE} log --pretty=oneline -1
69+
OUTPUT_VARIABLE GIT_COMMITS_LIST
70+
RESULT_VARIABLE GIT_COMMITS_LIST_RESULT
71+
ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE)
72+
if(${GIT_COMMITS_LIST_RESULT})
73+
set(GIT_COMMITS_LIST "No commits.")
74+
endif()
6775
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/${capi_whole_library}
6876
DESTINATION lib/${ANDROID_ABI})
6977
install(TARGETS paddle_capi_shared DESTINATION lib/${ANDROID_ABI})
78+
install(CODE "FILE(WRITE ${CMAKE_INSTALL_PREFIX}/lib/${ANDROID_ABI}/BUILD.txt
79+
\"Compiler:\n\"
80+
\"\\t${CMAKE_C_COMPILER}\\n\"
81+
\"\\t${CMAKE_CXX_COMPILER}\\n\"
82+
\"Compiler Flags:\\n\"
83+
\"\\t${CMAKE_F_FLAGS}\\n\"
84+
\"\\t${CMAKE_CXX_FLAGS}\\n\"
85+
\"Android API: ${CMAKE_SYSTEM_VERSION}\\n\"
86+
\"Lastest commit:\\n\"
87+
\"\\t${GIT_COMMITS_LIST}\\n\"
88+
)"
89+
)
7090
else(ANDROID)
7191
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/${capi_whole_library} DESTINATION lib)
7292
install(TARGETS paddle_capi_shared DESTINATION lib)

paddle/framework/backward.md

Lines changed: 41 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,22 @@
22

33
## Motivation
44

5-
In Neural Network, the backpropagation algorithm follows the chain rule, so we need to compound the gradient operators/expressions together with the chain rule. Every forward network needs a backward network to construct the full computation graph, the operator/expression's backward pass will be generated respect to forward pass.
5+
In Neural Network, many model is solved by the the backpropagation algorithm(known as BP) at present. Technically it caculates the gradient of the loss function, then distributed back through the networks. Follows the chain rule, so we need a module chains the gradient operators/expressions together with to construct the backward pass. Every forward network needs a backward network to construct the full computation graph, the operator/expression's backward pass will be generated respect to forward pass.
66

7-
## Backward Operator Registry
7+
## Implementation
88

9-
A backward network is built up with several backward operators. Backward operators take forward operators' inputs outputs, and output gradients and then calculate its input gradients.
9+
In this design doc, we exported only one API for generating the backward pass.
10+
11+
```c++
12+
std::unique_ptr<OperatorBase> Backward(const OperatorBase& forwardOp,
13+
const std::unordered_set<std::string>& no_grad_vars);
14+
```
15+
16+
The implementation behind it can be divided into two parts, **Backward Operator Creating** and **Backward Operator Building**.
17+
18+
### Backward Operator Registry
19+
20+
A backward network is built up with several backward operators. Backward operators take forward operators' inputs, outputs, and output gradients and then calculate its input gradients.
1021
1122
| | forward operator | backward operator
1223
| ---------------------- | ---------------- |------------------------- |
@@ -25,7 +36,7 @@ REGISTER_OP(mul, MulOp, MulOpMaker, mul_grad, MulOpGrad);
2536

2637
`mul_grad` is the type of backward operator, and `MulOpGrad` is its class name.
2738

28-
## Backward Opeartor Creating
39+
### Backward Opeartor Creating
2940

3041
Given a certain forward operator, we can get its corresponding backward operator by calling:
3142

@@ -43,40 +54,47 @@ The function `BuildGradOp` will sequentially execute following processes:
4354

4455
4. Building backward operator with `inputs`, `outputs` and forward operator's attributes.
4556

46-
## Backward Network Building
47-
48-
A backward network is a series of backward operators. The main idea of building a backward network is creating backward operators in the inverted sequence and put them together.
57+
### Backward Network Building
4958

50-
In our design, the network itself is also a kind of operator. So the operators contained by a big network may be some small network.
51-
52-
given a forward network, it generates the backward network. We only care about the Gradients—`OutputGradients`, `InputGradients`.
59+
A backward network is a series of backward operators. The main idea of building a backward network is creating backward operators in the inverted sequence and append them together one by one. There is some corner case need to process specially.
5360

5461
1. Op
5562

56-
when the input forward network is an Op, return its gradient Operator Immediately.
63+
When the input forward network is an Op, return its gradient Operator Immediately. If all of its outputs are in no gradient set, then return a special `NOP`.
5764

5865
2. NetOp
5966

60-
when the input forward network is a NetOp, it needs to call the sub NetOp/Operators backward function recursively. During the process, we need to collect the `OutputGradients` name according to the forward NetOp.
67+
In our design, the network itself is also a kind of operator(**NetOp**). So the operators contained by a big network may be some small network. When the input forward network is a NetOp, it needs to call the sub NetOp/Operators backward function recursively. During the process, we need to collect the `OutputGradients` name according to the forward NetOp.
68+
69+
3. RnnOp
70+
71+
RnnOp is a nested stepnet operator. Backward module need to recusively call `Backward` for every stepnet.
72+
73+
4. Sharing Variables
74+
75+
**sharing variables**. As illustrated in the pictures, two operator's share the same variable name of W@GRAD, which will overwrite their sharing input variable.
76+
77+
<p align="center">
78+
<img src="./images/duplicate_op.png" width="50%" ><br/>
6179

62-
**shared variable**. As illustrated in the pictures, two operator's `Output` `Gradient` will overwrite their shared input variable.
80+
​ pic 1. Sharing variables in operators.
6381

64-
<p align="center">
65-
<img src="./images/duplicate_op.png" width="50%" ><br/>
82+
</p>
6683

67-
1. Shared variable in operators.
84+
​ Sharing variable between operators or same input variable used in multiple operators leads to a duplicate gradient variable. As demo show above, we need to rename gradient name recursively and add a generic add operator to replace the overwrite links.
6885

69-
</p>
86+
<p align="center">
87+
<img src="images/duplicate_op2.png" width="40%" ><br/>
7088

71-
Share variable between operators or same input variable used in multiple operators leads to a duplicate gradient variable. As demo show above, we need to rename gradient name recursively and add a generic add operator replace the overwrite links.
89+
​ pic 2. Replace sharing variable's gradient with `Add` operator.
7290

73-
<p align="center">
74-
<img src="images/duplicate_op2.png" width="50%" ><br/>
91+
</p>
7592

76-
2. Replace shared variable's gradient with `Add` operator.
93+
​ Because our framework finds variables accord to their names, we need to rename the output links. We add a suffix of number to represent its position in clockwise.
7794

78-
</p>
95+
5. Part of Gradient is Zero.
7996

97+
In the whole graph, there is some case of that one operator's gradient is not needed, but its input's gradient is a dependency link of other operator, we need to fill a same shape gradient matrix in the position. In our implement, we insert a special `fillZeroLike` operator.
8098

8199

82-
​ Then collect the sub graph `OutputGradients`/`InputGradients` as the NetOp's and return it.
100+
Follow these rules above, then collect the sub graph `OutputGradients`/`InputGradients` as the NetOp's and return it.
177 Bytes
Binary file not shown.

0 commit comments

Comments
 (0)