Skip to content

Commit 0d56823

Browse files
committed
add new API: paddle.clone;Tensor.element_size;nn.utils.parameters_to_vector
1 parent 61ef56a commit 0d56823

File tree

15 files changed

+442
-40
lines changed

15 files changed

+442
-40
lines changed

CMakeLists.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,8 @@ if(WIN32)
126126
endforeach(flag_var)
127127
endif()
128128

129+
# NOTE(zhouwei): msvc max/min macro conflict with std::min/max, define NOMINMAX globally
130+
add_definitions("-DNOMINMAX")
129131
# windows build turn off warnings, use parallel compiling.
130132
foreach(flag_var
131133
CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE

paddle/fluid/framework/var_desc.cc

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ limitations under the License. */
1515
#include "paddle/fluid/framework/var_desc.h"
1616

1717
#include "glog/logging.h"
18+
#include "paddle/fluid/framework/data_type.h"
1819
#include "paddle/fluid/platform/enforce.h"
1920

2021
namespace paddle {
@@ -116,6 +117,10 @@ proto::VarType::Type VarDesc::GetDataType() const {
116117
return tensor_desc().data_type();
117118
}
118119

120+
size_t VarDesc::ElementSize() const {
121+
return framework::SizeOfType(GetDataType());
122+
}
123+
119124
std::vector<proto::VarType::Type> VarDesc::GetDataTypes() const {
120125
std::vector<proto::VarType::TensorDesc> descs = tensor_descs();
121126
std::vector<proto::VarType::Type> res;

paddle/fluid/framework/var_desc.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,8 @@ class VarDesc {
9696

9797
proto::VarType::Type GetDataType() const;
9898

99+
size_t ElementSize() const;
100+
99101
std::vector<proto::VarType::Type> GetDataTypes() const;
100102

101103
void SetLoDLevel(int32_t lod_level);

paddle/fluid/imperative/layer.h

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
#include <utility>
2626
#include <vector>
2727

28+
#include "paddle/fluid/framework/data_type.h"
2829
#include "paddle/fluid/framework/operator.h"
2930
#include "paddle/fluid/framework/type_defs.h"
3031
#include "paddle/fluid/framework/var_type.h"
@@ -37,7 +38,6 @@
3738
#include "paddle/fluid/platform/enforce.h"
3839
#include "paddle/fluid/platform/macros.h"
3940
#include "paddle/pten/include/core.h"
40-
4141
namespace paddle {
4242
namespace framework {
4343
class Variable;
@@ -212,6 +212,8 @@ class VarBase {
212212

213213
framework::proto::VarType::Type DataType() const { return var_->DataType(); }
214214

215+
size_t ElementSize() const { return framework::SizeOfType(var_->DataType()); }
216+
215217
void SetForwardDataType(framework::proto::VarType::Type data_type) {
216218
var_->SetForwardDataType(data_type);
217219
}

paddle/fluid/pybind/imperative.cc

Lines changed: 57 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -2013,35 +2013,70 @@ void BindImperative(py::module *m_ptr) {
20132013
auto *t = self->MutableVar()->GetMutable<framework::LoDTensor>();
20142014
return t->numel();
20152015
})
2016+
.def("element_size", &imperative::VarBase::ElementSize, R"DOC(
2017+
Returns the size in bytes of an element in the Tensor.
2018+
2019+
Examples:
2020+
.. code-block:: python
2021+
2022+
import paddle
2023+
2024+
x = paddle.to_tensor(1, dtype='bool')
2025+
x.element_size() # 1
2026+
2027+
x = paddle.to_tensor(1, dtype='float16')
2028+
x.element_size() # 2
2029+
2030+
x = paddle.to_tensor(1, dtype='float32')
2031+
x.element_size() # 4
2032+
2033+
x = paddle.to_tensor(1, dtype='float64')
2034+
x.element_size() # 8
2035+
2036+
x = paddle.to_tensor(1, dtype='complex128')
2037+
x.element_size() # 16
2038+
)DOC")
20162039
.def_property("name", &imperative::VarBase::Name,
20172040
&imperative::VarBase::SetName)
20182041
.def_property("stop_gradient",
20192042
&imperative::VarBase::OverridedStopGradient,
20202043
&imperative::VarBase::SetOverridedStopGradient)
20212044
.def_property("persistable", &imperative::VarBase::Persistable,
20222045
&imperative::VarBase::SetPersistable)
2023-
.def_property_readonly(
2024-
"shape",
2025-
[](imperative::VarBase &self) {
2026-
if (self.Var().IsType<framework::LoDTensor>()) {
2027-
return framework::vectorize<int>(
2028-
self.Var().Get<framework::LoDTensor>().dims());
2029-
} else if (self.Var().IsType<framework::SelectedRows>()) {
2030-
return framework::vectorize<int>(
2031-
self.Var().Get<framework::SelectedRows>().value().dims());
2032-
} else if (self.Var().IsType<framework::Strings>()) {
2033-
return std::vector<int>{static_cast<int>(
2034-
self.Var().Get<framework::Strings>().size())};
2035-
} else if (self.Var().IsType<framework::Vocab>()) {
2036-
return std::vector<int>{
2037-
static_cast<int>(self.Var().Get<framework::Vocab>().size())};
2038-
} else {
2039-
VLOG(2) << "It is meaningless to get shape of "
2040-
"variable type "
2041-
<< GetTypeName(self);
2042-
return std::vector<int>();
2043-
}
2044-
})
2046+
.def_property_readonly("shape",
2047+
[](imperative::VarBase &self) {
2048+
if (self.Var().IsType<framework::LoDTensor>()) {
2049+
return framework::vectorize<int>(
2050+
self.Var()
2051+
.Get<framework::LoDTensor>()
2052+
.dims());
2053+
} else if (self.Var()
2054+
.IsType<
2055+
framework::SelectedRows>()) {
2056+
return framework::vectorize<int>(
2057+
self.Var()
2058+
.Get<framework::SelectedRows>()
2059+
.value()
2060+
.dims());
2061+
} else if (self.Var()
2062+
.IsType<framework::Strings>()) {
2063+
return std::vector<int>{static_cast<int>(
2064+
self.Var()
2065+
.Get<framework::Strings>()
2066+
.size())};
2067+
} else if (self.Var()
2068+
.IsType<framework::Vocab>()) {
2069+
return std::vector<int>{static_cast<int>(
2070+
self.Var()
2071+
.Get<framework::Vocab>()
2072+
.size())};
2073+
} else {
2074+
VLOG(2) << "It is meaningless to get shape of "
2075+
"variable type "
2076+
<< GetTypeName(self);
2077+
return std::vector<int>();
2078+
}
2079+
})
20452080
.def_property_readonly("is_leaf", &imperative::VarBase::IsLeaf,
20462081
R"DOC(
20472082
Whether a Tensor is leaf Tensor.

paddle/fluid/pybind/protobuf.cc

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -179,6 +179,8 @@ void BindVarDsec(pybind11::module *m) {
179179
pybind11::return_value_policy::reference)
180180
.def("dtype", &pd::VarDesc::GetDataType,
181181
pybind11::return_value_policy::reference)
182+
.def("element_size", &pd::VarDesc::ElementSize,
183+
pybind11::return_value_policy::reference)
182184
.def("dtypes", &pd::VarDesc::GetDataTypes,
183185
pybind11::return_value_policy::reference)
184186
.def("lod_level", &pd::VarDesc::GetLoDLevel)

python/paddle/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,7 @@
9191
from .tensor.creation import empty_like # noqa: F401
9292
from .tensor.creation import assign # noqa: F401
9393
from .tensor.creation import complex # noqa: F401
94+
from .tensor.creation import clone # noqa: F401
9495
from .tensor.linalg import matmul # noqa: F401
9596
from .tensor.linalg import dot # noqa: F401
9697
from .tensor.linalg import norm # noqa: F401
@@ -587,4 +588,5 @@
587588
'fmin',
588589
'moveaxis',
589590
'repeat_interleave',
591+
'clone',
590592
]

python/paddle/fluid/framework.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1396,6 +1396,33 @@ def to_string(self, throw_on_error, with_details=False):
13961396

13971397
__repr__ = __str__
13981398

1399+
def element_size(self):
1400+
"""
1401+
Returns the size in bytes of an element in the Tensor.
1402+
1403+
Examples:
1404+
.. code-block:: python
1405+
1406+
import paddle
1407+
paddle.enable_static()
1408+
1409+
x = paddle.static.data(name='x1', shape=[3, 2], dtype='bool')
1410+
x.element_size() # 1
1411+
1412+
x = paddle.static.data(name='x2', shape=[3, 2], dtype='int16')
1413+
x.element_size() # 2
1414+
1415+
x = paddle.static.data(name='x3', shape=[3, 2], dtype='float16')
1416+
x.element_size() # 2
1417+
1418+
x = paddle.static.data(name='x4', shape=[3, 2], dtype='float32')
1419+
x.element_size() # 4
1420+
1421+
x = paddle.static.data(name='x5', shape=[3, 2], dtype='float64')
1422+
x.element_size() # 8
1423+
"""
1424+
return self.desc.element_size()
1425+
13991426
@property
14001427
def stop_gradient(self):
14011428
"""

python/paddle/fluid/tests/unittests/test_assign_op.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -169,6 +169,31 @@ def test_assign_BasicTypes(self):
169169
self.assertTrue(np.allclose(result3.numpy(), np.array([1])))
170170
paddle.enable_static()
171171

172+
def test_clone(self):
173+
paddle.disable_static()
174+
x = paddle.ones([2])
175+
x.stop_gradient = False
176+
clone_x = paddle.clone(x)
177+
178+
y = clone_x**3
179+
y.backward()
180+
181+
self.assertTrue(np.array_equal(x, [1, 1]), True)
182+
self.assertTrue(np.array_equal(clone_x.grad.numpy(), [3, 3]), True)
183+
self.assertTrue(np.array_equal(x.grad.numpy(), [3, 3]), True)
184+
paddle.enable_static()
185+
186+
with program_guard(Program(), Program()):
187+
x_np = np.random.randn(2, 3).astype('float32')
188+
x = paddle.static.data("X", shape=[2, 3])
189+
clone_x = paddle.clone(x)
190+
exe = paddle.static.Executor()
191+
y_np = exe.run(paddle.static.default_main_program(),
192+
feed={'X': x_np},
193+
fetch_list=[clone_x])[0]
194+
195+
self.assertTrue(np.array_equal(y_np, x_np), True)
196+
172197

173198
class TestAssignOpErrorApi(unittest.TestCase):
174199
def test_errors(self):

python/paddle/fluid/tests/unittests/test_parameter.py

Lines changed: 51 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -18,18 +18,19 @@
1818
import copy
1919
import paddle
2020
from paddle.fluid.dygraph import guard
21-
from paddle.fluid.framework import default_main_program
21+
from paddle.fluid.framework import default_main_program, Variable
2222
import paddle.fluid.core as core
2323
from paddle.fluid.executor import Executor
2424
import paddle.fluid.io as io
2525
from paddle.fluid.initializer import ConstantInitializer
2626
import numpy as np
2727

28+
paddle.enable_static()
2829
main_program = default_main_program()
2930

3031

3132
class ParameterChecks(unittest.TestCase):
32-
def check_parameter(self):
33+
def test_parameter(self):
3334
shape = [784, 100]
3435
val = 1.0625
3536
b = main_program.global_block()
@@ -43,13 +44,13 @@ def check_parameter(self):
4344
self.assertEqual((784, 100), param.shape)
4445
self.assertEqual(core.VarDesc.VarType.FP32, param.dtype)
4546
self.assertEqual(0, param.block.idx)
46-
exe = Executor(core.CPUPlace())
47+
exe = Executor(paddle.CPUPlace())
4748
p = exe.run(main_program, fetch_list=[param])[0]
48-
self.assertTrue(np.allclose(p, np.ones(shape) * val))
49+
self.assertTrue(np.array_equal(p, np.ones(shape) * val))
4950
p = io.get_parameter_value_by_name('fc.w', exe, main_program)
50-
self.assertTrue(np.allclose(np.array(p), np.ones(shape) * val))
51+
self.assertTrue(np.array_equal(p, np.ones(shape) * val))
5152

52-
def check_parambase(self):
53+
def test_parambase(self):
5354
with guard():
5455
linear = paddle.nn.Linear(10, 10)
5556
param = linear.weight
@@ -71,7 +72,7 @@ def check_parambase(self):
7172
pram_copy2 = copy.deepcopy(param, memo)
7273
self.assertEqual(id(param_copy), id(pram_copy2))
7374

74-
def check_exceptions(self):
75+
def test_exception(self):
7576
b = main_program.global_block()
7677
with self.assertRaises(ValueError):
7778
b.create_parameter(
@@ -86,16 +87,52 @@ def check_exceptions(self):
8687
b.create_parameter(
8788
name='test', shape=[-1], dtype='float32', initializer=None)
8889

90+
def test_parambase_to_vector(self):
91+
with guard():
92+
linear1 = paddle.nn.Linear(
93+
10,
94+
15,
95+
paddle.ParamAttr(
96+
initializer=paddle.nn.initializer.Constant(3.)))
8997

90-
class TestParameter(ParameterChecks):
91-
def _test_parameter(self):
92-
self.check_parameter()
98+
vec = paddle.nn.utils.parameters_to_vector(linear1.parameters())
99+
self.assertTrue(isinstance(vec, Variable))
100+
self.assertTrue(vec.shape, [165])
93101

94-
def test_parambase(self):
95-
self.check_parambase()
102+
linear2 = paddle.nn.Linear(10, 15)
103+
paddle.nn.utils.vector_to_parameters(vec, linear2.parameters())
104+
self.assertTrue(
105+
np.array_equal(linear1.weight.numpy(), linear2.weight.numpy()),
106+
True)
107+
self.assertTrue(
108+
np.array_equal(linear1.bias.numpy(), linear2.bias.numpy()),
109+
True)
110+
self.assertTrue(linear2.weight.is_leaf, True)
111+
self.assertTrue(linear2.bias.is_leaf, True)
112+
113+
def test_parameter_to_vector(self):
114+
main_program = paddle.static.Program()
115+
start_program = paddle.static.Program()
116+
with paddle.static.program_guard(main_program, start_program):
117+
linear1 = paddle.nn.Linear(
118+
10,
119+
15,
120+
paddle.ParamAttr(
121+
initializer=paddle.nn.initializer.Constant(3.)))
122+
123+
vec = paddle.nn.utils.parameters_to_vector(linear1.parameters())
124+
self.assertTrue(isinstance(vec, Variable))
125+
self.assertTrue(vec.shape, [165])
126+
127+
linear2 = paddle.nn.Linear(10, 15)
128+
paddle.nn.utils.vector_to_parameters(vec, linear2.parameters())
96129

97-
def test_exceptions(self):
98-
self.check_exceptions()
130+
exe = paddle.static.Executor()
131+
exe.run(start_program)
132+
outs = exe.run(main_program,
133+
fetch_list=[linear1.parameters(), linear2.parameters()])
134+
self.assertTrue(np.array_equal(outs[0], outs[2]))
135+
self.assertTrue(np.array_equal(outs[1], outs[3]))
99136

100137

101138
if __name__ == '__main__':

0 commit comments

Comments
 (0)