Skip to content

Commit

Permalink
Merge pull request #976 from pengli09/add_label_seq_pos_to_inputdef
Browse files Browse the repository at this point in the history
Support user specified label input in tests
  • Loading branch information
pengli09 authored Dec 21, 2016
2 parents adc5839 + d09564b commit 28c5010
Show file tree
Hide file tree
Showing 2 changed files with 51 additions and 5 deletions.
36 changes: 31 additions & 5 deletions paddle/gserver/tests/LayerGradUtil.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -303,13 +303,31 @@ void initDataLayer(TestConfig testConf,
ICpuGpuVectorPtr sequenceStartPositions;
ICpuGpuVectorPtr subSequenceStartPositions;
IVectorPtr cpuSequenceDims;
for (size_t i = 0; i < testConf.inputDefs.size(); i++) {
for (size_t i = 0; i < testConf.inputDefs.size(); ++i) {
if (testConf.inputDefs[i].inputType != INPUT_SEQUENCE_LABEL) continue;

const std::vector<int>& labelSeqStartPositions =
testConf.inputDefs[i].labelSeqStartPositions;
if (labelSeqStartPositions.size() != 0) {
CHECK(!sequenceStartPositions);
CHECK_GE(labelSeqStartPositions.size(), 2);

sequenceStartPositions =
ICpuGpuVector::create(labelSeqStartPositions.size(), useGpu);
sequenceStartPositions->copyFrom(
labelSeqStartPositions.data(), labelSeqStartPositions.size(), useGpu);
}
}

for (size_t i = 0; i < testConf.inputDefs.size(); ++i) {
LayerConfig config;
config.set_name(testConf.inputDefs[i].name);
config.set_type("data");
config.set_size(testConf.inputDefs[i].dim);
LayerPtr layer = LayerPtr(new DataLayer(config));
size_t numSequence = batchSize / 10 + 1;
size_t numSequence = sequenceStartPositions
? sequenceStartPositions->getSize() - 1
: batchSize / 10 + 1;

Argument data;
auto fillData = [&](bool trans, int height, int width) {
Expand All @@ -336,9 +354,17 @@ void initDataLayer(TestConfig testConf,
break;
case INPUT_LABEL:
case INPUT_SEQUENCE_LABEL:
data.ids = VectorT<int>::create(batchSize, useGpu);
// now rand number can be 0 to inputDefs[i].dim
data.ids->rand(testConf.inputDefs[i].dim);
if (testConf.inputDefs[i].labelInitValue.size() != 0) {
const std::vector<int>& labelInitValue =
testConf.inputDefs[i].labelInitValue;
CHECK_EQ(labelInitValue.size(), batchSize);
data.ids = VectorT<int>::create(batchSize, useGpu);
data.ids->copyFrom(labelInitValue.data(), batchSize);
} else {
data.ids = VectorT<int>::create(batchSize, useGpu);
// now rand number can be 0 to inputDefs[i].dim
data.ids->rand(testConf.inputDefs[i].dim);
}
break;
case INPUT_SPARSE_NON_VALUE_DATA:
data.value = makeRandomSparseMatrix(
Expand Down
20 changes: 20 additions & 0 deletions paddle/gserver/tests/LayerGradUtil.h
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,9 @@ struct InputDef {
size_t paraSize;
ParaSparse sparse;
bool isStatic;
std::vector<int> labelInitValue;
std::vector<int> labelSeqStartPositions;

InputDef(InputType type, string nameIn, size_t dimIn, size_t sizeIn) {
inputType = type;
name = nameIn;
Expand All @@ -72,6 +75,23 @@ struct InputDef {
sparse = {""};
isStatic = false;
}

InputDef(InputType type,
string nameIn,
size_t dimIn,
size_t sizeIn,
const std::vector<int>& labelInitValue,
const std::vector<int>& labelSeqStartPositions)
: labelInitValue(labelInitValue),
labelSeqStartPositions(labelSeqStartPositions) {
inputType = type;
name = nameIn;
dim = dimIn;
paraSize = sizeIn;
sparse = {""};
isStatic = false;
}

InputDef(InputType type,
string nameIn,
size_t dimIn,
Expand Down

0 comments on commit 28c5010

Please sign in to comment.