diff --git a/paddle/fluid/inference/api/analysis_predictor.cc b/paddle/fluid/inference/api/analysis_predictor.cc index 4bfc24555d681e..0e9a42d81ff6cf 100644 --- a/paddle/fluid/inference/api/analysis_predictor.cc +++ b/paddle/fluid/inference/api/analysis_predictor.cc @@ -580,9 +580,6 @@ void AnalysisPredictor::PrepareArgument() { if (!config_.model_dir().empty()) { argument_.SetModelDir(config_.model_dir()); } else { - PADDLE_ENFORCE_EQ(config_.params_file().empty(), false, - platform::errors::PreconditionNotMet( - "Either model_dir or param_file should be set.")); PADDLE_ENFORCE_EQ(config_.prog_file().empty(), false, platform::errors::PreconditionNotMet( "Either model_dir or prog_file should be set.")); @@ -1135,7 +1132,7 @@ bool AnalysisPredictor::LoadProgramDesc() { std::string filename; if (!config_.model_dir().empty()) { filename = config_.model_dir() + "/__model__"; - } else if (!config_.prog_file().empty() && !config_.params_file().empty()) { + } else if (!config_.prog_file().empty()) { // All parameters are saved in a single file. // The file names should be consistent with that used // in Python API `fluid.io.save_inference_model`.