@@ -30,10 +30,10 @@ void regclass_pyngraph_op_BatchNormTraining(py::module m)
30
30
batch_norm_training (m, " BatchNormTraining" );
31
31
batch_norm_training.doc () =
32
32
" ngraph.impl.op.BatchNormTraining wraps ngraph::op::BatchNormTraining" ;
33
- batch_norm_training.def (py::init<double ,
33
+ batch_norm_training.def (py::init<const std::shared_ptr<ngraph::Node>& ,
34
34
const std::shared_ptr<ngraph::Node>&,
35
35
const std::shared_ptr<ngraph::Node>&,
36
- const std::shared_ptr<ngraph::Node>& >());
36
+ double >());
37
37
}
38
38
39
39
void regclass_pyngraph_op_BatchNormInference (py::module m)
@@ -45,12 +45,12 @@ void regclass_pyngraph_op_BatchNormInference(py::module m)
45
45
batch_norm_inference.doc () =
46
46
" ngraph.impl.op.BatchNormInference wraps ngraph::op::BatchNormInference" ;
47
47
48
- batch_norm_inference.def (py::init<double ,
48
+ batch_norm_inference.def (py::init<const std::shared_ptr<ngraph::Node>& ,
49
49
const std::shared_ptr<ngraph::Node>&,
50
50
const std::shared_ptr<ngraph::Node>&,
51
51
const std::shared_ptr<ngraph::Node>&,
52
52
const std::shared_ptr<ngraph::Node>&,
53
- const std::shared_ptr<ngraph::Node>& >());
53
+ double >());
54
54
}
55
55
56
56
void regclass_pyngraph_op_BatchNormTrainingBackprop (py::module m)
@@ -61,11 +61,11 @@ void regclass_pyngraph_op_BatchNormTrainingBackprop(py::module m)
61
61
batch_norm_training_backprop (m, " BatchNormTrainingBackprop" );
62
62
batch_norm_training_backprop.doc () =
63
63
" ngraph.impl.op.BatchNormTrainingBackprop wraps ngraph::op::BatchNormTrainingBackprop" ;
64
- batch_norm_training_backprop.def (py::init<double ,
64
+ batch_norm_training_backprop.def (py::init<const std::shared_ptr<ngraph::Node>& ,
65
65
const std::shared_ptr<ngraph::Node>&,
66
66
const std::shared_ptr<ngraph::Node>&,
67
67
const std::shared_ptr<ngraph::Node>&,
68
68
const std::shared_ptr<ngraph::Node>&,
69
69
const std::shared_ptr<ngraph::Node>&,
70
- const std::shared_ptr<ngraph::Node>& >());
70
+ double >());
71
71
}
0 commit comments