Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions doc/api/v2/config/layer.rst
Original file line number Diff line number Diff line change
Expand Up @@ -335,6 +335,16 @@ bilinear_interp
.. autoclass:: paddle.v2.layer.bilinear_interp
:noindex:

dot_prod
---------
.. autoclass:: paddle.v2.layer.dot_prod
:noindex:

out_prod
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

需要像339行那样的横线。

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done

--------
.. autoclass:: paddle.v2.layer.out_prod
:noindex:

power
-----
.. autoclass:: paddle.v2.layer.power
Expand Down
97 changes: 97 additions & 0 deletions paddle/gserver/layers/DotProdLayer.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "Layer.h"
#include "paddle/math/Matrix.h"
#include "paddle/utils/Logging.h"
#include "paddle/utils/Stat.h"

namespace paddle {

/**
* @brief A layer for computing the dot product of two vectors.
* Input1: vector (batchSize * dim)
* Input2: vector (batchSize * dim)
* Output: a matrix: (batchSize * 1)
*/

class DotProdLayer : public Layer {
public:
explicit DotProdLayer(const LayerConfig& config) : Layer(config) {}

~DotProdLayer() {}

bool init(const LayerMap& layerMap,
const ParameterMap& parameterMap) override;

void forward(PassType passType) override;
void backward(const UpdateCallback& callback = nullptr) override;
};

REGISTER_LAYER(dot_prod, DotProdLayer);

bool DotProdLayer::init(const LayerMap& layerMap,
const ParameterMap& parameterMap) {
Layer::init(layerMap, parameterMap);

CHECK_EQ(inputLayers_.size(), 2U);
CHECK_EQ(1UL, getSize())
<< "The output dimensionality of this layer should be fixed to 1.";

return true;
}

void DotProdLayer::forward(PassType passType) {
Layer::forward(passType);

MatrixPtr inV0 = getInputValue(0);
MatrixPtr inV1 = getInputValue(1);

size_t batchSize = inV0->getHeight();
CHECK_EQ(inV1->getHeight(), batchSize);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这里检查一下 inv0 和 inv1 的宽度必须相同。

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done

CHECK_EQ(inV0->getWidth(), inV1->getWidth());

{
REGISTER_TIMER_INFO("FwResetTimer", getName().c_str());
reserveOutput(batchSize, 1);
}

MatrixPtr outV = getOutputValue();
{
REGISTER_TIMER_INFO("FwDotProdTimer", getName().c_str());
outV->sumOfProducts(*inV0, *inV1, 1, 0);
}
}

void DotProdLayer::backward(const UpdateCallback& callback) {
MatrixPtr inV0 = getInputValue(0);
MatrixPtr inV1 = getInputValue(1);
MatrixPtr outG = getOutputGrad();
MatrixPtr inG0 = getInputGrad(0);
MatrixPtr inG1 = getInputGrad(1);

{
REGISTER_TIMER_INFO("BwDotProdTimer", getName().c_str());

if (inG0) {
inG0->addRowScale(0, *inV1, *outG);
}

if (inG1) {
inG1->addRowScale(0, *inV0, *outG);
}
}
}

} // namespace paddle
15 changes: 15 additions & 0 deletions paddle/gserver/tests/test_LayerGrad.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1081,6 +1081,21 @@ TEST(Layer, InterpolationLayer) {
}
}

TEST(Layer, DotProdLayer) {
TestConfig config;
config.layerConfig.set_type("dot_prod");
config.layerConfig.set_size(1);

config.inputDefs.push_back({INPUT_DATA, "layer_0", 10, 0});
config.layerConfig.add_inputs();
config.inputDefs.push_back({INPUT_DATA, "layer_1", 10, 0});
config.layerConfig.add_inputs();

for (auto useGpu : {false, true}) {
testLayerGrad(config, "dot_prod", 10, false, useGpu);
}
}

TEST(Layer, OuterProdLayer) {
TestConfig config;
config.layerConfig.set_type("out_prod");
Expand Down
12 changes: 12 additions & 0 deletions python/paddle/trainer/config_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -3209,6 +3209,18 @@ def __init__(self, name, inputs, selected_indices, bias=False, **xargs):
self.set_layer_size(size)


@config_layer('dot_prod')
class DotProdLayer(LayerBase):
def __init__(self, name, inputs, device=None):
super(DotProdLayer, self).__init__(
name, 'dot_prod', 0, inputs, device=device)
config_assert(len(inputs) == 2, 'DotProdLayer must have 2 inputs.')
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这里加一个检测,检查一下两个输入的维度必须相同。

self.get_input_layer(0).size == self.get_input_layer(1).size

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done

config_assert(
self.get_input_layer(0).size == self.get_input_layer(1).size,
"Two inputs should have the same size.")
self.set_layer_size(1)


@config_layer('out_prod')
class OuterProdLayer(LayerBase):
def __init__(self, name, inputs, device=None):
Expand Down
41 changes: 41 additions & 0 deletions python/paddle/trainer_config_helpers/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@
'huber_classification_cost',
'block_expand_layer',
'maxout_layer',
'dot_prod_layer',
'out_prod_layer',
'printer_layer',
'print_layer',
Expand Down Expand Up @@ -197,6 +198,7 @@ class LayerType(object):
SCALING_LAYER = 'scaling'
TRANS_LAYER = 'trans'
ROTATE_LAYER = 'rotate'
DOT_PROD_LAYER = 'dot_prod'
OUT_PROD_LAYER = 'out_prod'
FEATURE_MAP_EXPAND_LAYER = 'featmap_expand'

Expand Down Expand Up @@ -4140,6 +4142,45 @@ def maxid_layer(input, name=None, layer_attr=None):
size=l.config.size)


@wrap_name_default()
def dot_prod_layer(input1, input2, name=None, layer_attr=None):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

需要为helper 加单测。

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done

"""
A layer for computing the dot product of two vectors.

The example usage is:

.. code-block:: python

dot_prod = dot_prod_layer(input1=vec1, input2=vec2)

:param name: The name of this layer. It is optional.
:type name: basestring
:param input1: The first input layer.
:type input: LayerOutput
:param input2: The second input layer.
:type input2: LayerOutput
:param layer_attr: The extra layer attribute. See ExtraLayerAttribute for
details.
:type layer_attr: ExtraLayerAttribute.
:return: LayerOutput object.
:rtype: LayerOutput
"""
assert isinstance(input1, LayerOutput)
assert isinstance(input2, LayerOutput)
assert input1.size == input2.size, ("Two inputs should have the same size.")

l = Layer(
name=name,
type=LayerType.DOT_PROD_LAYER,
inputs=[input1.name, input2.name],
**ExtraLayerAttribute.to_kwargs(layer_attr))
return LayerOutput(
name=name,
layer_type=LayerType.DOT_PROD_LAYER,
parents=[input1, input2],
size=l.config.size)


@wrap_name_default()
def out_prod_layer(input1, input2, name=None, layer_attr=None):
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ test_prelu_layer test_row_conv test_detection_output_layer test_multibox_loss_la
test_recursive_topology test_gated_unit_layer test_clip_layer test_row_l2_norm_layer
test_kmax_seq_socre_layer test_sub_nested_seq_select_layer test_scale_shift_layer
test_seq_slice_layer test_cross_entropy_over_beam test_roi_pool_layer test_pooling3D_layer
test_conv3d_layer test_deconv3d_layer test_BatchNorm3D test_resize_layer test_scale_sub_region_layer)
test_conv3d_layer test_deconv3d_layer test_BatchNorm3D test_resize_layer test_scale_sub_region_layer
test_dot_prod_layer)

export whole_configs=(test_split_datasource)
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
type: "nn"
layers {
name: "vector1"
type: "data"
size: 10
active_type: ""
}
layers {
name: "vector2"
type: "data"
size: 10
active_type: ""
}
layers {
name: "__dot_prod_layer_0__"
type: "dot_prod"
size: 1
active_type: ""
inputs {
input_layer_name: "vector1"
}
inputs {
input_layer_name: "vector2"
}
}
input_layer_names: "vector1"
input_layer_names: "vector2"
output_layer_names: "__dot_prod_layer_0__"
sub_models {
name: "root"
layer_names: "vector1"
layer_names: "vector2"
layer_names: "__dot_prod_layer_0__"
input_layer_names: "vector1"
input_layer_names: "vector2"
output_layer_names: "__dot_prod_layer_0__"
is_recurrent_layer_group: false
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from paddle.trainer_config_helpers import *

vec1 = data_layer(name='vector1', size=10)
vec2 = data_layer(name='vector2', size=10)
dot_product = dot_prod_layer(input1=vec1, input2=vec2)

outputs(dot_product)