Skip to content

Commit

Permalink
alter the capi of PD_PredictorRun to provide proper function, test=de…
Browse files Browse the repository at this point in the history
…velop (PaddlePaddle#20697)

modify the way to pass parameter out_size in function.
  • Loading branch information
FrostML authored Oct 18, 2019
1 parent 4eeda9d commit d39777f
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 15 deletions.
4 changes: 2 additions & 2 deletions paddle/fluid/inference/capi/c_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,8 @@ PADDLE_CAPI_EXPORT extern int* PD_GetPaddleTensorShape(const PD_Tensor* tensor,
// AnalysisPredictor
PADDLE_CAPI_EXPORT extern bool PD_PredictorRun(const PD_AnalysisConfig* config,
PD_Tensor* inputs, int in_size,
PD_Tensor* output_data,
int** out_size, int batch_size);
PD_Tensor** output_data,
int* out_size, int batch_size);

PADDLE_CAPI_EXPORT extern bool PD_PredictorZeroCopyRun(
const PD_AnalysisConfig* config, PD_ZeroCopyData* inputs, int in_size,
Expand Down
7 changes: 4 additions & 3 deletions paddle/fluid/inference/capi/pd_predictor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ using paddle::ConvertToACPrecision;
extern "C" {

bool PD_PredictorRun(const PD_AnalysisConfig* config, PD_Tensor* inputs,
int in_size, PD_Tensor* output_data, int** out_size,
int in_size, PD_Tensor** output_data, int* out_size,
int batch_size) {
PADDLE_ENFORCE_NOT_NULL(config);
static std::map<std::string, std::unique_ptr<paddle::PaddlePredictor>>
Expand All @@ -43,10 +43,11 @@ bool PD_PredictorRun(const PD_AnalysisConfig* config, PD_Tensor* inputs,
std::vector<paddle::PaddleTensor> out;
if (predictor->Run(in, &out, batch_size)) {
int osize = out.size();
*output_data = new PD_Tensor[osize];
for (int i = 0; i < osize; ++i) {
output_data[i].tensor = out[i];
output_data[i]->tensor = out[i];
}
*out_size = &osize;
*out_size = osize;
return true;
}
return false;
Expand Down
19 changes: 9 additions & 10 deletions paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ limitations under the License. */
#include <string>
#include <vector>
#include "paddle/fluid/inference/capi/c_api.h"
#include "paddle/fluid/inference/capi/c_api_internal.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h"

namespace paddle {
Expand Down Expand Up @@ -56,16 +57,15 @@ void PD_run() {
PD_SetPaddleTensorData(input, buf);

PD_Tensor* out_data = PD_NewPaddleTensor();
int* out_size;
PD_PredictorRun(config, input, 1, out_data, &out_size, 1);
LOG(INFO) << *out_size;
int out_size;
PD_PredictorRun(config, input, 1, &out_data, &out_size, 1);
LOG(INFO) << out_size;
LOG(INFO) << PD_GetPaddleTensorName(out_data);
LOG(INFO) << PD_GetPaddleTensorDType(out_data);
PD_PaddleBuf* b = PD_GetPaddleTensorData(out_data);
LOG(INFO) << PD_PaddleBufLength(b);
LOG(INFO) << PD_PaddleBufLength(b) / sizeof(float);
float* result = static_cast<float*>(PD_PaddleBufData(b));
LOG(INFO) << *result;
PD_PaddleBufResize(b, 500);
PD_DeletePaddleTensor(input);
int* size;
PD_GetPaddleTensorShape(out_data, &size);
Expand Down Expand Up @@ -132,16 +132,15 @@ void buffer_run() {
PD_SetPaddleTensorData(input, buf);

PD_Tensor* out_data = PD_NewPaddleTensor();
int* out_size;
PD_PredictorRun(config, input, 1, out_data, &out_size, 1);
LOG(INFO) << *out_size;
int out_size;
PD_PredictorRun(config, input, 1, &out_data, &out_size, 1);
LOG(INFO) << out_size;
LOG(INFO) << PD_GetPaddleTensorName(out_data);
LOG(INFO) << PD_GetPaddleTensorDType(out_data);
PD_PaddleBuf* b = PD_GetPaddleTensorData(out_data);
LOG(INFO) << PD_PaddleBufLength(b);
LOG(INFO) << PD_PaddleBufLength(b) / sizeof(float);
float* result = static_cast<float*>(PD_PaddleBufData(b));
LOG(INFO) << *result;
PD_PaddleBufResize(b, 500);
PD_DeletePaddleTensor(input);
PD_DeletePaddleBuf(buf);
}
Expand Down

0 comments on commit d39777f

Please sign in to comment.