Skip to content

Commit 8bd4752

Browse files
author
Peng Li
committed
Merge branch 'develop' into add_label_seq_pos_to_inputdef
2 parents 1b8e151 + 446e3c2 commit 8bd4752

File tree

11 files changed

+33
-46
lines changed

11 files changed

+33
-46
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ before_install:
5656
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo paddle/scripts/travis/before_install.linux.sh; fi
5757
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then paddle/scripts/travis/before_install.osx.sh; fi
5858
- if [[ "$JOB" == "PRE_COMMIT" ]]; then sudo ln -s /usr/bin/clang-format-3.8 /usr/bin/clang-format; fi
59-
- pip install wheel protobuf sphinx recommonmark virtualenv numpy sphinx_rtd_theme pre-commit
59+
- pip install wheel protobuf sphinx recommonmark virtualenv numpy sphinx_rtd_theme pre-commit requests==2.9.2 LinkChecker
6060
script:
6161
- paddle/scripts/travis/main.sh
6262
notifications:

paddle/gserver/tests/test_ConvTrans.cpp

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -206,8 +206,8 @@ TEST(Layer, convTransLayerFwd2) {
206206
/* filter_size */ 5,
207207
result);
208208

209-
float resultData[] = {1, 2, 2, 2, 1, 2, 4, 4, 4, 2, 2, 4, 4,
210-
4, 2, 2, 4, 4, 4, 2, 1, 2, 2, 2, 1};
209+
real resultData[] = {1, 2, 2, 2, 1, 2, 4, 4, 4, 2, 2, 4, 4,
210+
4, 2, 2, 4, 4, 4, 2, 1, 2, 2, 2, 1};
211211
result->setData(resultData);
212212
doOneConvtTest(/* imgSize */ 5,
213213
/* output_x */ 2,
@@ -216,8 +216,8 @@ TEST(Layer, convTransLayerFwd2) {
216216
/* filter_size */ 4,
217217
result);
218218

219-
float resultData2[] = {1, 2, 2, 2, 1, 2, 4, 4, 4, 2, 2, 4, 4,
220-
4, 2, 2, 4, 4, 4, 2, 1, 2, 2, 2, 1};
219+
real resultData2[] = {1, 2, 2, 2, 1, 2, 4, 4, 4, 2, 2, 4, 4,
220+
4, 2, 2, 4, 4, 4, 2, 1, 2, 2, 2, 1};
221221
result->setData(resultData2);
222222
doOneConvtTest(/* imgSize */ 5,
223223
/* output_x */ 2,
@@ -226,8 +226,8 @@ TEST(Layer, convTransLayerFwd2) {
226226
/* filter_size */ 5,
227227
result);
228228

229-
float resultData3[] = {1, 1, 2, 1, 1, 1, 1, 2, 1, 1, 2, 2, 4,
230-
2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 1};
229+
real resultData3[] = {1, 1, 2, 1, 1, 1, 1, 2, 1, 1, 2, 2, 4,
230+
2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 1};
231231
result->setData(resultData3);
232232
doOneConvtTest(/* imgSize */ 5,
233233
/* output_x */ 2,

paddle/gserver/tests/test_ConvUnify.cpp

Lines changed: 6 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -106,8 +106,8 @@ TEST(Layer, convParaUnified) {
106106
#ifndef PADDLE_ONLY_CPU
107107
MatrixPtr input, resultCpu, resultGpu;
108108
input = Matrix::create(1, 4 * 4, false, false);
109-
float inputData[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
110-
float param[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 8, 7, 6, 5, 4, 3, 2, 1};
109+
real inputData[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
110+
real param[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 8, 7, 6, 5, 4, 3, 2, 1};
111111

112112
input->setData(inputData);
113113

@@ -137,26 +137,9 @@ TEST(Layer, convParaUnified) {
137137
checkMatrixEqual(resultCpu, resultGpu);
138138

139139
input = Matrix::create(1, 3 * 3 * 2, false, false);
140-
float inputData2[] = {1,
141-
2,
142-
3,
143-
4,
144-
5,
145-
6,
146-
7,
147-
8,
148-
9,
149-
150-
10,
151-
11,
152-
12,
153-
13,
154-
14,
155-
15,
156-
16,
157-
17,
158-
18};
159-
float param2[] = {1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1};
140+
real inputData2[] = {
141+
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18};
142+
real param2[] = {1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1};
160143

161144
input->setData(inputData2);
162145

@@ -185,7 +168,7 @@ TEST(Layer, convParaUnified) {
185168
true);
186169
checkMatrixEqual(resultCpu, resultGpu);
187170

188-
float param3[] = {1, 2, 3, 4, 4, 3, 2, 1};
171+
real param3[] = {1, 2, 3, 4, 4, 3, 2, 1};
189172

190173
resultCpu = doOneConvTest(/* imgSize */ 3,
191174
/* output_x */ 2,

paddle/parameter/ParameterUpdaterBase.h

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ class ParameterUpdater {
3838
virtual void startPass() {}
3939

4040
// called by Trainer then finishing a pass, ruturn true if pass accepted
41-
virtual bool finishPass(real cost = 0) { return true; }
41+
virtual bool finishPass() { return true; }
4242

4343
// called by Trainer before backward() of a batch
4444
// Return the type of pass it needs. This pass type will be passed
@@ -112,9 +112,9 @@ class ParameterUpdaterComposite : public ParameterUpdater {
112112
[&](int tid, size_t numThreads) { updaters_[tid]->startPass(); });
113113
}
114114

115-
virtual bool finishPass(real cost = 0) {
115+
virtual bool finishPass() {
116116
syncThreadPool_->execPlusOwner(
117-
[&](int tid, size_t numThreads) { updaters_[tid]->finishPass(cost); });
117+
[&](int tid, size_t numThreads) { updaters_[tid]->finishPass(); });
118118
return true;
119119
}
120120

paddle/scripts/travis/docs.sh

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,10 @@ source ./common.sh
77
cmake .. -DCMAKE_BUILD_TYPE=Debug -DWITH_GPU=OFF -DWITH_DOC=ON
88
make paddle_docs paddle_docs_cn
99

10+
# check websites for broken links
11+
linkchecker doc/en/html/index.html
12+
linkchecker doc/cn/html/index.html
13+
1014
# Parse Github URL
1115
REPO=`git config remote.origin.url`
1216
SSH_REPO=${REPO/https:\/\/github.com\//git@github.com:}
@@ -35,8 +39,8 @@ git checkout $TARGET_BRANCH || git checkout --orphan $TARGET_BRANCH
3539

3640
# remove old docs. mv new docs.
3741
rm -rf doc doc_cn
38-
mv ../doc_cn/html doc_cn
39-
mv ../doc/html doc
42+
mv ../doc/cn/html doc_cn
43+
mv ../doc/en/html doc
4044

4145
# Check is there anything changed.
4246
set +e

paddle/trainer/ParameterUpdater.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -102,9 +102,9 @@ class SgdLocalUpdater : public ParameterUpdater {
102102
* @param cost sum cost during one pass.
103103
* @return true if accept (used for owlqn).
104104
*/
105-
virtual bool finishPass(real cost) {
105+
virtual bool finishPass() {
106106
optimizer_->finishPass();
107-
return ParameterUpdater::finishPass(cost);
107+
return ParameterUpdater::finishPass();
108108
}
109109

110110
/**
@@ -220,9 +220,9 @@ class SgdUpdaterWithCpuAverager : public SgdLocalUpdater {
220220
averager_->startPass();
221221
SgdLocalUpdater::startPass();
222222
}
223-
virtual bool finishPass(real cost) {
223+
virtual bool finishPass() {
224224
averager_->finishPass();
225-
return SgdLocalUpdater::finishPass(cost);
225+
return SgdLocalUpdater::finishPass();
226226
}
227227

228228
/// apply the averaged parameter to PARAMETER_VALUE

paddle/trainer/RemoteParameterUpdater.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -309,7 +309,7 @@ void RemoteParameterUpdater::startPass() {
309309
}
310310
}
311311

312-
bool RemoteParameterUpdater::finishPass(real cost) {
312+
bool RemoteParameterUpdater::finishPass() {
313313
if (localUpdater_) {
314314
localUpdater_->finishPass();
315315
}
@@ -712,7 +712,7 @@ void SparseRemoteParameterUpdater::startPass() {
712712
}
713713
}
714714

715-
bool SparseRemoteParameterUpdater::finishPass(real cost) {
715+
bool SparseRemoteParameterUpdater::finishPass() {
716716
if (config_.algorithm() == TrainAlgorithm::SGD) {
717717
parameterClient_->waitPassFinish();
718718
} else {

paddle/trainer/RemoteParameterUpdater.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ class RemoteParameterUpdater : public ParameterUpdater {
9090
*/
9191
virtual void finishBatch(real cost);
9292
virtual void startPass();
93-
virtual bool finishPass(real cost);
93+
virtual bool finishPass();
9494

9595
#ifndef PADDLE_DISABLE_TIMER
9696
virtual void setForwardbackwardTime(uint64_t delta) {
@@ -281,7 +281,7 @@ class SparseRemoteParameterUpdater : public ParameterUpdater {
281281
/// send all sparse related parameters to all pservers
282282
virtual void finishBatch(real cost);
283283
virtual void startPass();
284-
virtual bool finishPass(real cost);
284+
virtual bool finishPass();
285285

286286
virtual void apply();
287287
virtual void restore();

paddle/trainer/ThreadParameterUpdater.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ void SgdThreadUpdater::startPass() {
7070
}
7171
}
7272

73-
bool SgdThreadUpdater::finishPass(real cost) {
73+
bool SgdThreadUpdater::finishPass() {
7474
catchUpWith();
7575

7676
for (auto& para : parameters_) {

paddle/trainer/ThreadParameterUpdater.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ class SgdThreadUpdater : public ParameterUpdater {
4747
virtual void startPass();
4848

4949
// Use the finishPass() function of the base optimizer.
50-
virtual bool finishPass(real cost);
50+
virtual bool finishPass();
5151

5252
virtual void init(const std::vector<ParameterPtr>& parameters);
5353
virtual PassType startBatch(int64_t batchSize);

0 commit comments

Comments
 (0)