@@ -40,14 +40,41 @@ class Fill {
4040 : initializer_(initializer), attrs_(attrs) {}
4141
4242 void operator ()(VariableHandle var) {
43- get_global_tape ().AddOp (initializer_, {}, {{" Out" , {var}}}, attrs_);
43+ if (initializer_ == " fill_constant" ) {
44+ // fill_constant is not OperatorWithKernel, so we can't add it to the tape
45+ framework::OpDesc op_desc =
46+ CreateOpDesc (initializer_, {}, {{" Out" , {var}}}, attrs_);
47+ ScopeWrapper scope ({}, {{" Out" , {var}}});
48+ framework::OpRegistry::CreateOp (op_desc)->Run (scope,
49+ platform::CPUPlace ());
50+ } else {
51+ get_global_tape ().AddOp (initializer_, {}, {{" Out" , {var}}}, attrs_);
52+ }
4453 }
4554
4655 private:
4756 const std::string initializer_;
4857 const framework::AttributeMap attrs_;
4958};
5059
60+ void init_params (VariableHandle v,
61+ const std::string &initializer,
62+ const framework::AttributeMap &attrs) {
63+ if (initializer == " fill_constant" ) {
64+ // fill_constant is not OperatorWithKernel, so we can't add it to the tape
65+ framework::OpDesc op_desc =
66+ CreateOpDesc (initializer, {}, {{" Out" , {v}}}, attrs);
67+ ScopeWrapper scope ({}, {{" Out" , {v}}});
68+ framework::OpRegistry::CreateOp (op_desc)->Run (scope, platform::CPUPlace ());
69+ } else {
70+ Tape init_tape;
71+ init_tape.AddOp (initializer, {}, {{" Out" , {v}}}, attrs);
72+ init_tape.Forward ();
73+ }
74+ }
75+
76+ // TODO(tonyyang-svail): change this to a function
77+ // https://github.com/PaddlePaddle/tape/issues/23
5178class Mean {
5279 public:
5380 VariableHandle operator ()(VariableHandle var) {
@@ -82,8 +109,6 @@ class Linear {
82109 : w_(new Variable(" LinearWeight" )),
83110 b_ (new Variable(" LinearBias" )),
84111 act_(act) {
85- Tape init_tape;
86-
87112 // Use Xavier to initialize Weight
88113 float limit = sqrt (6.0 / static_cast <float >(in_dim + out_dim));
89114 framework::AttributeMap attrs;
@@ -92,15 +117,13 @@ class Linear {
92117 attrs[" min" ] = -limit;
93118 attrs[" max" ] = limit;
94119 attrs[" seed" ] = RandomSeed::GetRandomSeed ();
95- init_tape. AddOp ( " uniform_random " , {}, {{ " Out " , {w_}}} , attrs);
120+ init_params (w_, " uniform_random " , attrs);
96121
97122 // Use fill zero to initialize Bias
98123 attrs[" dtype" ] = paddle::framework::proto::VarType::Type::VarType_Type_FP32;
99124 attrs[" shape" ] = std::vector<int >{out_dim};
100125 attrs[" value" ] = 0 .0f ;
101- init_tape.AddOp (" fill_constant" , {}, {{" Out" , {b_}}}, attrs);
102-
103- init_tape.Forward ();
126+ init_params (b_, " fill_constant" , attrs);
104127 }
105128
106129 VariableHandle operator ()(VariableHandle input) {
@@ -134,8 +157,6 @@ class Convolution2D {
134157 : w_(new Variable(" ConvolutionWeight" )),
135158 b_ (new Variable(" ConvolutionBias" )),
136159 act_(act) {
137- Tape init_tape;
138-
139160 // Use Xavier to initialize Weight
140161 float fan_in = c_in * f * f, fan_out = c_out * f * f;
141162 float limit = sqrt (6.0 / (fan_in + fan_out));
@@ -145,15 +166,13 @@ class Convolution2D {
145166 attrs[" min" ] = -limit;
146167 attrs[" max" ] = limit;
147168 attrs[" seed" ] = RandomSeed::GetRandomSeed ();
148- init_tape. AddOp ( " uniform_random " , {}, {{ " Out " , {w_}}} , attrs);
169+ init_params (w_, " uniform_random " , attrs);
149170
150171 // Use fill zero to initialize Bias
151172 attrs[" dtype" ] = paddle::framework::proto::VarType::Type::VarType_Type_FP32;
152173 attrs[" shape" ] = std::vector<int >{c_out};
153174 attrs[" value" ] = 0 .0f ;
154- init_tape.AddOp (" fill_constant" , {}, {{" Out" , {b_}}}, attrs);
155-
156- init_tape.Forward ();
175+ init_params (b_, " fill_constant" , attrs);
157176 }
158177
159178 VariableHandle operator ()(VariableHandle input) {
@@ -190,16 +209,12 @@ class Convolution2D {
190209class SGD {
191210 public:
192211 explicit SGD (float learning_rate) : learning_rate_(new Variable(" sgd" )) {
193- Tape init_tape;
194-
195212 std::string initializer = " fill_constant" ;
196213 framework::AttributeMap attrs;
197214 attrs[" dtype" ] = paddle::framework::proto::VarType::Type::VarType_Type_FP32;
198215 attrs[" shape" ] = std::vector<int >{1 };
199216 attrs[" value" ] = learning_rate;
200- init_tape.AddOp (initializer, {}, {{" Out" , {learning_rate_}}}, attrs);
201-
202- init_tape.Forward ();
217+ init_params (learning_rate_, initializer, attrs);
203218 }
204219
205220 void Update (VariableHandle input) {
@@ -224,7 +239,6 @@ VariableHandle CreateRecordioFileReader(std::string filename,
224239 std::vector<int > ranks,
225240 std::vector<int > lod_levels) {
226241 VariableHandle reader (new paddle::tape::Variable (" reader" ));
227- reader->MutableDesc ()->SetType (paddle::framework::proto::VarType::READER);
228242
229243 framework::OpDesc op_desc = CreateOpDesc (" create_recordio_file_reader" ,
230244 {},
@@ -240,10 +254,7 @@ VariableHandle CreateRecordioFileReader(std::string filename,
240254}
241255
242256void ReadNext (VariableHandle reader, VariableHandle data_holder) {
243- PADDLE_ENFORCE_EQ (reader->Desc ().GetType (),
244- paddle::framework::proto::VarType::READER);
245- PADDLE_ENFORCE_EQ (data_holder->Desc ().GetType (),
246- paddle::framework::proto::VarType::LOD_TENSOR_ARRAY);
257+ PADDLE_ENFORCE (reader->Var ().IsType <framework::ReaderHolder>());
247258
248259 reader->GetMutable <paddle::framework::ReaderHolder>()->ReadNext (
249260 data_holder->GetMutable <paddle::framework::LoDTensorArray>());
0 commit comments