Skip to content

Commit 4b86d21

Browse files
committed
Merge branch 'develop' into remove-deepep-legacy-apis
2 parents 3ba92a9 + 3d3826f commit 4b86d21

81 files changed

Lines changed: 948 additions & 740 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

paddle/common/flags.cc

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,19 @@ PHI_DEFINE_EXPORTED_int32(
102102
0,
103103
"Setting the check and print level when FLAGS_check_nan_inf is set.");
104104

105+
/**
106+
* Operator related FLAG
107+
* Name: FLAGS_check_nan_inf_blacklist
108+
* Since Version:
109+
* Value Range: string, default=""
110+
* Example: FLAGS_check_nan_inf_blacklist="op1,op2,op3"
111+
* Note: Blacklist of ops to skip when checking NAN/INF
112+
*/
113+
PHI_DEFINE_EXPORTED_string(
114+
check_nan_inf_blacklist,
115+
"",
116+
"Blacklist of ops to skip when checking NAN/INF, split by ','");
117+
105118
/**
106119
* Operator related FLAG
107120
* Name: FLAGS_check_nan_inf

paddle/fluid/eager/nan_inf_utils.cc

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
#include "paddle/phi/core/distributed/auto_parallel/dist_tensor.h"
2424
#include "paddle/phi/core/selected_rows.h"
2525

26+
COMMON_DECLARE_string(check_nan_inf_blacklist);
2627
COMMON_DECLARE_int32(check_nan_inf_level);
2728
namespace egr {
2829

@@ -82,6 +83,27 @@ bool CheckOp(const std::string& api_name) {
8283
}
8384

8485
void CheckTensorHasNanOrInf(const std::string& api_name, const Tensor& tensor) {
86+
if (api_name == "empty") {
87+
VLOG(4) << "Current op is \"empty\", skip nan inf check.";
88+
return;
89+
}
90+
91+
if (api_name == "empty_like") {
92+
VLOG(4) << "Current op is \"empty_like\", skip nan inf check.";
93+
return;
94+
}
95+
96+
if (!FLAGS_check_nan_inf_blacklist.empty()) {
97+
std::stringstream blacklist_ss(FLAGS_check_nan_inf_blacklist);
98+
std::string blacklisted_op;
99+
while (std::getline(blacklist_ss, blacklisted_op, ',')) {
100+
if (api_name == blacklisted_op) {
101+
VLOG(4) << "Current op is in blacklist, skip nan inf check: "
102+
<< api_name;
103+
return;
104+
}
105+
}
106+
}
85107
auto op_name = phi::TransToFluidOpName(api_name);
86108
if (tensor.initialized() && CheckOp(op_name)) {
87109
auto& tensor_name = tensor.name();

paddle/fluid/pybind/compiled_program.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -855,7 +855,7 @@ void BindCompiledProgram(pybind11::module &m) { // NOLINT
855855
optimization passes should be defined in this way. BuildStrategy
856856
cannot be updated after being finalized.)DOC");
857857

858-
cp.def(py::init<const std::vector<phi::Place> &,
858+
cp.def(py::init<const std::vector<Place> &,
859859
const std::vector<std::string> &,
860860
const std::string &,
861861
Scope *,

paddle/fluid/pybind/data_set_py.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ class IterableDatasetWrapper {
4545
public:
4646
IterableDatasetWrapper(framework::Dataset *dataset,
4747
const std::vector<std::string> &slots,
48-
const std::vector<phi::Place> &places,
48+
const std::vector<Place> &places,
4949
size_t batch_size,
5050
bool drop_last)
5151
: dataset_(dataset),
@@ -184,7 +184,7 @@ class IterableDatasetWrapper {
184184
private:
185185
framework::Dataset *dataset_;
186186
std::vector<std::string> slots_;
187-
std::vector<phi::Place> places_;
187+
std::vector<Place> places_;
188188
size_t batch_size_;
189189
bool drop_last_;
190190

@@ -382,7 +382,7 @@ void BindDataset(py::module *m) {
382382
py::class_<IterableDatasetWrapper>(*m, "IterableDatasetWrapper")
383383
.def(py::init<framework::Dataset *,
384384
const std::vector<std::string> &,
385-
const std::vector<phi::Place> &,
385+
const std::vector<Place> &,
386386
size_t,
387387
bool>())
388388
.def("_start", &IterableDatasetWrapper::Start)

paddle/fluid/pybind/eager.cc

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ void EmptyTensorInitializer(TensorObject* self,
7575
const Place& place,
7676
bool persistable = false,
7777
int stop_gradient = -1,
78-
paddle::DataType dtype = paddle::DataType::FLOAT32,
78+
DataType dtype = DataType::FLOAT32,
7979
const std::vector<int>& dims = {0},
8080
framework::proto::VarType::Type var_type =
8181
framework::proto::VarType::DENSE_TENSOR,
@@ -774,7 +774,7 @@ Tensor is the basic data structure in PaddlePaddle. There are some ways to creat
774774
* (should have at least five parameter, five parameters create DenseTensor,
775775
* seven parameters create DistTensor)
776776
* def __init__ (
777-
* ** dtype: paddle::DataType,
777+
* ** dtype: DataType,
778778
* ** dims: vector<int>,
779779
* ** name: std::string,
780780
* ** type: paddle::framework::proto::VarType::DENSE_TENSOR,
@@ -991,7 +991,7 @@ int TensorInit(PyObject* self, PyObject* args, PyObject* kwargs) {
991991
"forbidden. Please check your code and make sure you new a "
992992
"persistable before calling this constructor."));
993993

994-
paddle::DataType dtype = CastPyArg2DataType(kw_dtype, "TensorInit", 0);
994+
DataType dtype = CastPyArg2DataType(kw_dtype, "TensorInit", 0);
995995
std::vector<int> dims = CastPyArg2VectorOfInt(kw_dims, 0);
996996

997997
std::string act_name = "";
@@ -1098,7 +1098,7 @@ int TensorInit(PyObject* self, PyObject* args, PyObject* kwargs) {
10981098
if (PyObject_TypeCheck(arg0_ptr, g_data_type_pytype) ||
10991099
PyObject_TypeCheck(arg0_ptr, g_vartype_pytype)) {
11001100
VLOG(6) << "Calling case2's initializer.";
1101-
paddle::DataType dtype = CastPyArg2DataType(arg0_ptr, "TensorInit", 0);
1101+
DataType dtype = CastPyArg2DataType(arg0_ptr, "TensorInit", 0);
11021102
std::vector<int> dims =
11031103
CastPyArg2VectorOfInt(PyTuple_GET_ITEM(args, 1), 1);
11041104
std::string act_name = "";
@@ -1174,7 +1174,7 @@ int TensorInit(PyObject* self, PyObject* args, PyObject* kwargs) {
11741174
if (PyObject_TypeCheck(arg0_ptr, g_data_type_pytype) ||
11751175
PyObject_TypeCheck(arg0_ptr, g_vartype_pytype)) {
11761176
VLOG(6) << "Calling case2's initializer.";
1177-
paddle::DataType dtype = CastPyArg2DataType(arg0_ptr, "TensorInit", 0);
1177+
DataType dtype = CastPyArg2DataType(arg0_ptr, "TensorInit", 0);
11781178
std::vector<int> dims =
11791179
CastPyArg2VectorOfInt(PyTuple_GET_ITEM(args, 1), 1);
11801180
std::string act_name = "";

paddle/fluid/pybind/eager_math_op_patch.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ static bool IsNumpyArray(PyObject* obj) {
7474
}
7575

7676
void InitTensorWithNumpyValue(const py::object& array,
77-
const phi::Place& place,
77+
const Place& place,
7878
Tensor* self,
7979
bool zero_copy = false) {
8080
PADDLE_ENFORCE_EQ(
@@ -143,7 +143,7 @@ std::set<DataType> _complex_dtypes{
143143
// '__eq__',
144144
// '__ne__'
145145

146-
void SetDevice(phi::Place place) {
146+
void SetDevice(Place place) {
147147
if (phi::is_gpu_place(place)) {
148148
#if defined(PADDLE_WITH_CUDA) || defined(PADDLE_WITH_HIP)
149149
phi::backends::gpu::SetDeviceId(place.device);

paddle/fluid/pybind/eager_method.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ namespace paddle::pybind {
8080

8181
extern void InitTensorWithNumpyValue(TensorObject* self,
8282
const pybind11::object& array,
83-
const phi::Place& place,
83+
const Place& place,
8484
bool zero_copy);
8585

8686
extern PyTypeObject* p_tensor_type;
@@ -625,8 +625,8 @@ static PyObject* tensor_method__is_dense_tensor_hold_allocation(
625625
EAGER_CATCH_AND_THROW_RETURN_NULL
626626
}
627627

628-
static void IncreaseTensorReferenceCountUntilCopyComplete(
629-
const Tensor& tensor, const phi::Place& place) {
628+
static void IncreaseTensorReferenceCountUntilCopyComplete(const Tensor& tensor,
629+
const Place& place) {
630630
auto place_ = phi::is_gpu_place(place) ? place : tensor.place();
631631

632632
auto tracer = egr::Controller::Instance().GetCurrentTracer();

paddle/fluid/pybind/eager_utils.cc

Lines changed: 28 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -730,10 +730,10 @@ std::vector<std::vector<size_t>> CastPyArg2VectorOfVectorOfSize_t(
730730
return result;
731731
}
732732

733-
phi::Place CastPyArg2Place(PyObject* obj, ssize_t arg_pos) {
734-
phi::Place place;
733+
Place CastPyArg2Place(PyObject* obj, ssize_t arg_pos) {
734+
Place place;
735735
if (PyObject_TypeCheck(obj, g_place_pytype)) { // NOLINT
736-
place = ::pybind11::handle(obj).cast<phi::Place>();
736+
place = ::pybind11::handle(obj).cast<Place>();
737737
} else if (PyObject_TypeCheck(obj, g_cudaplace_pytype)) {
738738
place = ::pybind11::handle(obj).cast<GPUPlace>();
739739
} else if (PyObject_TypeCheck(obj, g_cpuplace_pytype)) {
@@ -966,20 +966,20 @@ paddle::framework::proto::VarType::Type CastPyArg2ProtoType(PyObject* obj,
966966
return dtype;
967967
}
968968

969-
paddle::DataType CastPyArg2DataTypeDirectly(PyObject* obj,
970-
const std::string& op_type,
971-
ssize_t arg_pos) {
969+
DataType CastPyArg2DataTypeDirectly(PyObject* obj,
970+
const std::string& op_type,
971+
ssize_t arg_pos) {
972972
if (obj == Py_None) {
973973
return DataType::UNDEFINED;
974974
}
975975

976-
paddle::DataType dtype;
976+
DataType dtype;
977977
if (PyObject_TypeCheck(obj, g_data_type_pytype)) {
978-
dtype = ::pybind11::handle(obj).cast<paddle::DataType>();
978+
dtype = ::pybind11::handle(obj).cast<DataType>();
979979
} else {
980980
PADDLE_THROW(common::errors::InvalidType(
981981
"%s: argument (position %d) must be "
982-
"one of paddle::DataType, "
982+
"one of DataType, "
983983
"but got %s",
984984
op_type,
985985
arg_pos + 1,
@@ -988,10 +988,10 @@ paddle::DataType CastPyArg2DataTypeDirectly(PyObject* obj,
988988
return dtype;
989989
}
990990

991-
paddle::DataType CastPyArg2DataTypeDirectly(PyObject* obj,
992-
const std::string& op_type,
993-
ssize_t arg_pos,
994-
paddle::DataType default_value) {
991+
DataType CastPyArg2DataTypeDirectly(PyObject* obj,
992+
const std::string& op_type,
993+
ssize_t arg_pos,
994+
DataType default_value) {
995995
if (obj == nullptr) {
996996
return default_value;
997997
} else {
@@ -1229,7 +1229,7 @@ PyObject* ToPyObject(const std::vector<std::vector<Tensor>>& value,
12291229
return result;
12301230
}
12311231

1232-
PyObject* ToPyObject(const phi::Place& value) {
1232+
PyObject* ToPyObject(const Place& value) {
12331233
auto obj = ::pybind11::cast(value);
12341234
obj.inc_ref();
12351235
return obj.ptr();
@@ -2954,24 +2954,24 @@ std::vector<Tensor>& GetTensorListFromArgsWithBuffer(
29542954
return result;
29552955
}
29562956

2957-
paddle::Place CastPyArg2Place(PyObject* obj,
2958-
const std::string& op_type,
2959-
ssize_t arg_pos) {
2957+
Place CastPyArg2Place(PyObject* obj,
2958+
const std::string& op_type,
2959+
ssize_t arg_pos) {
29602960
return CastPyArg2Place(obj, arg_pos);
29612961
}
2962-
paddle::Place CastPyArg2Place(PyObject* obj,
2963-
const std::string& op_type,
2964-
ssize_t arg_pos,
2965-
paddle::Place default_place) {
2962+
Place CastPyArg2Place(PyObject* obj,
2963+
const std::string& op_type,
2964+
ssize_t arg_pos,
2965+
Place default_place) {
29662966
if (obj != nullptr) {
29672967
return CastPyArg2Place(obj, op_type, arg_pos);
29682968
} else {
29692969
return default_place;
29702970
}
29712971
}
2972-
paddle::DataType CastPyArg2DataType(PyObject* obj,
2973-
const std::string& op_type,
2974-
ssize_t arg_pos) {
2972+
DataType CastPyArg2DataType(PyObject* obj,
2973+
const std::string& op_type,
2974+
ssize_t arg_pos) {
29752975
if (obj == Py_None) {
29762976
return DataType::UNDEFINED;
29772977
}
@@ -2994,10 +2994,10 @@ paddle::DataType CastPyArg2DataType(PyObject* obj,
29942994
return NumpyDtype2TensorDtype(type_num);
29952995
}
29962996
}
2997-
paddle::DataType CastPyArg2DataType(PyObject* obj,
2998-
const std::string& op_type,
2999-
ssize_t arg_pos,
3000-
paddle::DataType default_value) {
2997+
DataType CastPyArg2DataType(PyObject* obj,
2998+
const std::string& op_type,
2999+
ssize_t arg_pos,
3000+
DataType default_value) {
30013001
if (obj != nullptr) {
30023002
return CastPyArg2DataType(obj, op_type, arg_pos);
30033003
} else {

paddle/fluid/pybind/eager_utils.h

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ std::vector<Tensor> CastPyArg2VectorOfTensor(
8181
PyObject* obj,
8282
ssize_t arg_pos,
8383
const phi::distributed::ProcessMesh* mesh = nullptr);
84-
phi::Place CastPyArg2Place(PyObject* obj, ssize_t arg_pos);
84+
Place CastPyArg2Place(PyObject* obj, ssize_t arg_pos);
8585
DenseTensor CastPyArg2FrameworkTensor(PyObject* obj, ssize_t arg_pos);
8686
std::vector<DenseTensor> CastPyArg2VectorOfTensorBase(PyObject* obj,
8787
ssize_t arg_pos);
@@ -157,7 +157,7 @@ PyObject* ToPyObject(const std::vector<Tensor>& value,
157157
bool return_py_none_if_not_initialize = false);
158158
PyObject* ToPyObject(const std::vector<std::vector<Tensor>>& value,
159159
bool return_py_none_if_not_initialize = false);
160-
PyObject* ToPyObject(const phi::Place& value);
160+
PyObject* ToPyObject(const Place& value);
161161
PyObject* ToPyObject(const phi::DenseTensor* value);
162162
PyObject* ToPyObject(const phi::distributed::DistTensor* value);
163163
PyObject* ToPyObject(const phi::distributed::TensorDistAttr* value);
@@ -443,29 +443,29 @@ paddle::experimental::IntArray CastPyArg2IntArray(
443443
const std::string& op_type,
444444
ssize_t arg_pos,
445445
paddle::experimental::IntArray default_value);
446-
paddle::Place CastPyArg2Place(PyObject* obj,
447-
const std::string& op_type,
448-
ssize_t arg_pos);
449-
paddle::Place CastPyArg2Place(PyObject* obj,
450-
const std::string& op_type,
451-
ssize_t arg_pos,
452-
paddle::Place default_place);
453-
454-
paddle::DataType CastPyArg2DataType(PyObject* obj,
446+
Place CastPyArg2Place(PyObject* obj,
447+
const std::string& op_type,
448+
ssize_t arg_pos);
449+
Place CastPyArg2Place(PyObject* obj,
450+
const std::string& op_type,
451+
ssize_t arg_pos,
452+
Place default_place);
453+
454+
DataType CastPyArg2DataType(PyObject* obj,
455+
const std::string& op_type,
456+
ssize_t arg_pos);
457+
DataType CastPyArg2DataType(PyObject* obj,
458+
const std::string& op_type,
459+
ssize_t arg_pos,
460+
DataType default_value);
461+
462+
DataType CastPyArg2DataTypeDirectly(PyObject* obj,
455463
const std::string& op_type,
456464
ssize_t arg_pos);
457-
paddle::DataType CastPyArg2DataType(PyObject* obj,
465+
DataType CastPyArg2DataTypeDirectly(PyObject* obj,
458466
const std::string& op_type,
459467
ssize_t arg_pos,
460-
paddle::DataType default_value);
461-
462-
paddle::DataType CastPyArg2DataTypeDirectly(PyObject* obj,
463-
const std::string& op_type,
464-
ssize_t arg_pos);
465-
paddle::DataType CastPyArg2DataTypeDirectly(PyObject* obj,
466-
const std::string& op_type,
467-
ssize_t arg_pos,
468-
paddle::DataType default_value);
468+
DataType default_value);
469469

470470
phi::distributed::TensorDistAttr CastPyArg2DistAttr(PyObject* obj,
471471
ssize_t arg_pos);

0 commit comments

Comments
 (0)