blob: 0eaa3bfc54eea4f8a50033959e276fbebe6beaa4 [file] [log] [blame]
Alan Greenb9d0c832020-04-30 08:29:50 +10001// Copyright 2020 The Chromium OS Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4//
5// A simplified interface to the ML service. Used to implement the ml_cmdline
6// tool.
7#include "ml/simple.h"
8
9#include <string>
10#include <utility>
11#include <vector>
12
13#include <base/bind.h>
14#include <base/run_loop.h>
15#include <mojo/public/cpp/bindings/interface_request.h>
16
17#include "ml/machine_learning_service_impl.h"
18#include "ml/mojom/graph_executor.mojom.h"
19#include "ml/mojom/machine_learning_service.mojom.h"
20#include "ml/mojom/model.mojom.h"
21#include "ml/tensor_view.h"
22
23using ::chromeos::machine_learning::mojom::BuiltinModelId;
24using ::chromeos::machine_learning::mojom::BuiltinModelSpec;
25using ::chromeos::machine_learning::mojom::BuiltinModelSpecPtr;
26using ::chromeos::machine_learning::mojom::CreateGraphExecutorResult;
27using ::chromeos::machine_learning::mojom::ExecuteResult;
28using ::chromeos::machine_learning::mojom::GraphExecutorPtr;
Alan Green55e16542020-05-11 14:06:46 +100029using ::chromeos::machine_learning::mojom::GraphExecutorOptions;
Alan Greenb9d0c832020-04-30 08:29:50 +100030using ::chromeos::machine_learning::mojom::LoadModelResult;
31using ::chromeos::machine_learning::mojom::MachineLearningServicePtr;
32using ::chromeos::machine_learning::mojom::ModelPtr;
33using ::chromeos::machine_learning::mojom::TensorPtr;
34
35namespace ml {
36namespace simple {
37namespace {
38
39// Creates a 1-D tensor containing a single value
40TensorPtr NewSingleValueTensor(const double value) {
41 auto tensor(chromeos::machine_learning::mojom::Tensor::New());
42 TensorView<double> tensor_view(tensor);
43 tensor_view.Allocate();
44 tensor_view.GetShape() = {1};
45 tensor_view.GetValues() = {value};
46 return tensor;
47}
48
49} // namespace
50
Alan Green55e16542020-05-11 14:06:46 +100051AddResult Add(const double x, const double y, bool use_nnapi) {
Alan Greenb9d0c832020-04-30 08:29:50 +100052 AddResult result = {"Not completed.", -1.0};
53
54 // Create ML Service
55 MachineLearningServicePtr ml_service;
56 const MachineLearningServiceImpl ml_service_impl(
57 mojo::MakeRequest(&ml_service).PassMessagePipe(), base::Closure());
58
59 // Load model.
60 BuiltinModelSpecPtr spec = BuiltinModelSpec::New();
61 spec->id = BuiltinModelId::TEST_MODEL;
62 ModelPtr model;
63 bool model_load_ok = false;
64 ml_service->LoadBuiltinModel(
65 std::move(spec), mojo::MakeRequest(&model),
66 base::Bind(
67 [](bool* const model_load_ok, const LoadModelResult result) {
68 *model_load_ok = result == LoadModelResult::OK;
69 },
70 &model_load_ok));
71 base::RunLoop().RunUntilIdle();
72 if (!model_load_ok) {
73 result.status = "Failed to load model.";
74 return result;
75 }
76
77 // Get graph executor for model.
78 GraphExecutorPtr graph_executor;
79 bool graph_executor_ok = false;
Alan Green55e16542020-05-11 14:06:46 +100080 auto options = GraphExecutorOptions::New(use_nnapi);
81 model->CreateGraphExecutorWithOptions(
82 std::move(options), mojo::MakeRequest(&graph_executor),
83 base::Bind(
84 [](bool* const graph_executor_ok,
85 const CreateGraphExecutorResult result) {
86 *graph_executor_ok = result == CreateGraphExecutorResult::OK;
87 },
88 &graph_executor_ok));
Alan Greenb9d0c832020-04-30 08:29:50 +100089 base::RunLoop().RunUntilIdle();
90 if (!model_load_ok) {
91 result.status = "Failed to get graph executor";
92 return result;
93 }
94
95 // Construct input to graph executor and perform inference
96 base::flat_map<std::string, TensorPtr> inputs;
97 inputs.emplace("x", NewSingleValueTensor(x));
98 inputs.emplace("y", NewSingleValueTensor(y));
99 std::vector<std::string> outputs({"z"});
100 bool inference_ok = false;
101 graph_executor->Execute(
102 std::move(inputs), std::move(outputs),
103 base::Bind(
104 [](bool* const inference_ok, double* const sum,
105 const ExecuteResult execute_result,
106 base::Optional<std::vector<TensorPtr>> outputs) {
107 // Check that the inference succeeded and gave the expected number
108 // of outputs.
109 *inference_ok = execute_result == ExecuteResult::OK &&
110 outputs.has_value() && outputs->size() == 1;
111 if (!*inference_ok) {
112 return;
113 }
114
115 // Get value from output
116 const TensorView<double> out_tensor((*outputs)[0]);
117 *sum = out_tensor.GetValues()[0];
118 },
119 &inference_ok, &result.sum));
120 base::RunLoop().RunUntilIdle();
121 if (!inference_ok) {
122 result.status = "Inference failed.";
123 return result;
124 }
125
Alan Greenc5bcbcd2020-05-07 11:44:26 +1000126 result.status = "OK";
Alan Greenb9d0c832020-04-30 08:29:50 +1000127 return result;
128}
129
130} // namespace simple
131} // namespace ml