Line data Source code
1 : /* SPDX-License-Identifier: Apache-2.0 */
2 : /**
3 : * Copyright (c) 2023 Samsung Electronics Co., Ltd. All Rights Reserved.
4 : *
5 : * @file ml-api-service-offloading.c
6 : * @date 26 Jun 2023
7 : * @brief ML offloading service of NNStreamer/Service C-API
8 : * @see https://github.com/nnstreamer/nnstreamer
9 : * @author Gichan Jang <gichan2.jang@samsung.com>
10 : * @bug No known bugs except for NYI items
11 : */
12 :
13 : #include <glib.h>
14 : #include <glib/gstdio.h>
15 : #include <gio/gio.h>
16 : #include <gst/gst.h>
17 : #include <gst/gstbuffer.h>
18 : #include <gst/app/app.h>
19 : #include <string.h>
20 : #include <curl/curl.h>
21 : #include <json-glib/json-glib.h>
22 : #include <nnstreamer-edge.h>
23 :
24 : #include "ml-api-internal.h"
25 : #include "ml-api-service.h"
26 : #include "ml-api-service-private.h"
27 : #include "ml-api-service-offloading.h"
28 : #include "ml-api-service-training-offloading.h"
29 :
30 : #define MAX_PORT_NUM_LEN 6U
31 :
32 : /**
33 : * @brief Data struct for options.
34 : */
35 : typedef struct
36 : {
37 : gchar *host;
38 : guint port;
39 : gchar *topic;
40 : gchar *dest_host;
41 : guint dest_port;
42 : nns_edge_connect_type_e conn_type;
43 : nns_edge_node_type_e node_type;
44 : gchar *id;
45 : } edge_info_s;
46 :
47 : /**
48 : * @brief Structure for ml_service_offloading.
49 : */
50 : typedef struct
51 : {
52 : nns_edge_h edge_h;
53 : nns_edge_node_type_e node_type;
54 :
55 : gchar *path; /**< A path to save the received model file */
56 : GHashTable *option_table;
57 : GHashTable *service_table;
58 :
59 : ml_service_offloading_mode_e offloading_mode;
60 : void *priv;
61 : } _ml_service_offloading_s;
62 :
63 : /**
64 : * @brief Get ml-service node type from ml_option.
65 : */
66 : static nns_edge_node_type_e
67 0 : _mlrs_get_node_type (const gchar * value)
68 : {
69 0 : nns_edge_node_type_e node_type = NNS_EDGE_NODE_TYPE_UNKNOWN;
70 :
71 0 : if (!value)
72 0 : return node_type;
73 :
74 0 : if (g_ascii_strcasecmp (value, "sender") == 0) {
75 0 : node_type = NNS_EDGE_NODE_TYPE_QUERY_CLIENT;
76 0 : } else if (g_ascii_strcasecmp (value, "receiver") == 0) {
77 0 : node_type = NNS_EDGE_NODE_TYPE_QUERY_SERVER;
78 : } else {
79 0 : _ml_error_report ("Invalid node type '%s', please check node type.", value);
80 : }
81 :
82 0 : return node_type;
83 : }
84 :
85 : /**
86 : * @brief Get nnstreamer-edge connection type
87 : */
88 : static nns_edge_connect_type_e
89 0 : _mlrs_get_conn_type (const gchar * value)
90 : {
91 0 : nns_edge_connect_type_e conn_type = NNS_EDGE_CONNECT_TYPE_UNKNOWN;
92 :
93 0 : if (!value)
94 0 : return conn_type;
95 :
96 0 : if (0 == g_ascii_strcasecmp (value, "TCP"))
97 0 : conn_type = NNS_EDGE_CONNECT_TYPE_TCP;
98 0 : else if (0 == g_ascii_strcasecmp (value, "HYBRID"))
99 0 : conn_type = NNS_EDGE_CONNECT_TYPE_HYBRID;
100 0 : else if (0 == g_ascii_strcasecmp (value, "MQTT"))
101 0 : conn_type = NNS_EDGE_CONNECT_TYPE_MQTT;
102 : else
103 0 : conn_type = NNS_EDGE_CONNECT_TYPE_UNKNOWN;
104 :
105 0 : return conn_type;
106 : }
107 :
108 : /**
109 : * @brief Get edge info from ml_option.
110 : */
111 : static void
112 0 : _mlrs_get_edge_info (ml_option_h option, edge_info_s ** edge_info)
113 : {
114 : edge_info_s *_info;
115 : void *value;
116 :
117 0 : *edge_info = _info = g_new0 (edge_info_s, 1);
118 :
119 0 : if (ML_ERROR_NONE == ml_option_get (option, "host", &value))
120 0 : _info->host = g_strdup (value);
121 : else
122 0 : _info->host = g_strdup ("localhost");
123 0 : if (ML_ERROR_NONE == ml_option_get (option, "port", &value))
124 0 : _info->port = (guint) g_ascii_strtoull (value, NULL, 10);
125 0 : if (ML_ERROR_NONE == ml_option_get (option, "dest-host", &value))
126 0 : _info->dest_host = g_strdup (value);
127 : else
128 0 : _info->dest_host = g_strdup ("localhost");
129 0 : if (ML_ERROR_NONE == ml_option_get (option, "dest-port", &value))
130 0 : _info->dest_port = (guint) g_ascii_strtoull (value, NULL, 10);
131 0 : if (ML_ERROR_NONE == ml_option_get (option, "connect-type", &value))
132 0 : _info->conn_type = _mlrs_get_conn_type (value);
133 : else
134 0 : _info->conn_type = NNS_EDGE_CONNECT_TYPE_UNKNOWN;
135 0 : if (ML_ERROR_NONE == ml_option_get (option, "topic", &value))
136 0 : _info->topic = g_strdup (value);
137 0 : if (ML_ERROR_NONE == ml_option_get (option, "node-type", &value))
138 0 : _info->node_type = _mlrs_get_node_type (value);
139 0 : if (ML_ERROR_NONE == ml_option_get (option, "id", &value))
140 0 : _info->id = g_strdup (value);
141 0 : }
142 :
143 : /**
144 : * @brief Set nns-edge info.
145 : */
146 : static void
147 0 : _mlrs_set_edge_info (edge_info_s * edge_info, nns_edge_h edge_h)
148 : {
149 0 : char port[MAX_PORT_NUM_LEN] = { 0, };
150 :
151 0 : nns_edge_set_info (edge_h, "HOST", edge_info->host);
152 0 : g_snprintf (port, MAX_PORT_NUM_LEN, "%u", edge_info->port);
153 0 : nns_edge_set_info (edge_h, "PORT", port);
154 :
155 0 : if (edge_info->topic)
156 0 : nns_edge_set_info (edge_h, "TOPIC", edge_info->topic);
157 :
158 0 : nns_edge_set_info (edge_h, "DEST_HOST", edge_info->dest_host);
159 0 : g_snprintf (port, MAX_PORT_NUM_LEN, "%u", edge_info->dest_port);
160 0 : nns_edge_set_info (edge_h, "DEST_PORT", port);
161 0 : }
162 :
163 : /**
164 : * @brief Release edge info.
165 : */
166 : static void
167 0 : _mlrs_release_edge_info (edge_info_s * edge_info)
168 : {
169 0 : g_clear_pointer (&edge_info->dest_host, g_free);
170 0 : g_clear_pointer (&edge_info->host, g_free);
171 0 : g_clear_pointer (&edge_info->topic, g_free);
172 0 : g_clear_pointer (&edge_info->id, g_free);
173 0 : g_free (edge_info);
174 0 : }
175 :
176 : /**
177 : * @brief Get ml offloading service type from ml_option.
178 : */
179 : static ml_service_offloading_type_e
180 0 : _mlrs_get_service_type (gchar * service_str)
181 : {
182 0 : ml_service_offloading_type_e service_type =
183 : ML_SERVICE_OFFLOADING_TYPE_UNKNOWN;
184 :
185 0 : if (!service_str)
186 0 : return service_type;
187 :
188 0 : if (g_ascii_strcasecmp (service_str, "model_raw") == 0) {
189 0 : service_type = ML_SERVICE_OFFLOADING_TYPE_MODEL_RAW;
190 0 : } else if (g_ascii_strcasecmp (service_str, "model_uri") == 0) {
191 0 : service_type = ML_SERVICE_OFFLOADING_TYPE_MODEL_URI;
192 0 : } else if (g_ascii_strcasecmp (service_str, "pipeline_raw") == 0) {
193 0 : service_type = ML_SERVICE_OFFLOADING_TYPE_PIPELINE_RAW;
194 0 : } else if (g_ascii_strcasecmp (service_str, "pipeline_uri") == 0) {
195 0 : service_type = ML_SERVICE_OFFLOADING_TYPE_PIPELINE_URI;
196 0 : } else if (g_ascii_strcasecmp (service_str, "reply") == 0) {
197 0 : service_type = ML_SERVICE_OFFLOADING_TYPE_REPLY;
198 0 : } else if (g_ascii_strcasecmp (service_str, "launch") == 0) {
199 0 : service_type = ML_SERVICE_OFFLOADING_TYPE_LAUNCH;
200 : } else {
201 0 : _ml_error_report ("Invalid service type '%s', please check service type.",
202 : service_str);
203 : }
204 :
205 0 : return service_type;
206 : }
207 :
208 : /**
209 : * @brief Get ml offloading service activation type.
210 : */
211 : static gboolean
212 0 : _mlrs_parse_activate (const gchar * activate)
213 : {
214 0 : return (activate && g_ascii_strcasecmp (activate, "true") == 0);
215 : }
216 :
217 : /**
218 : * @brief Callback function for receiving data using curl.
219 : */
220 : static size_t
221 0 : curl_mem_write_cb (void *data, size_t size, size_t nmemb, void *clientp)
222 : {
223 0 : size_t recv_size = size * nmemb;
224 0 : GByteArray *array = (GByteArray *) clientp;
225 :
226 0 : if (!array || !data || recv_size == 0)
227 0 : return 0;
228 :
229 0 : g_byte_array_append (array, data, recv_size);
230 :
231 0 : return recv_size;
232 : }
233 :
234 : /**
235 : * @brief Register model file given by the offloading sender.
236 : */
237 : static gboolean
238 0 : _mlrs_model_register (gchar * service_key, nns_edge_data_h data_h,
239 : void *data, nns_size_t data_len, const gchar * dir_path)
240 : {
241 0 : guint version = 0;
242 0 : g_autofree gchar *description = NULL;
243 0 : g_autofree gchar *name = NULL;
244 0 : g_autofree gchar *activate = NULL;
245 0 : g_autofree gchar *model_path = NULL;
246 0 : gboolean active_bool = TRUE;
247 0 : GError *error = NULL;
248 :
249 0 : if (NNS_EDGE_ERROR_NONE != nns_edge_data_get_info (data_h, "description",
250 : &description)
251 0 : || NNS_EDGE_ERROR_NONE != nns_edge_data_get_info (data_h, "name", &name)
252 0 : || NNS_EDGE_ERROR_NONE != nns_edge_data_get_info (data_h, "activate",
253 : &activate)) {
254 0 : _ml_loge ("Failed to get info from data handle.");
255 0 : return FALSE;
256 : }
257 :
258 0 : active_bool = _mlrs_parse_activate (activate);
259 0 : model_path = g_build_path (G_DIR_SEPARATOR_S, dir_path, name, NULL);
260 0 : if (!g_file_set_contents (model_path, (char *) data, data_len, &error)) {
261 0 : _ml_loge ("Failed to write data to file: %s",
262 : error ? error->message : "unknown error");
263 0 : g_clear_error (&error);
264 0 : return FALSE;
265 : }
266 :
267 : /**
268 : * @todo Hashing the path. Where is the default path to save the model file?
269 : */
270 0 : if (ML_ERROR_NONE != ml_service_model_register (service_key, model_path,
271 : active_bool, description, &version)) {
272 0 : _ml_loge ("Failed to register model, service key is '%s'.", service_key);
273 0 : return FALSE;
274 : }
275 :
276 0 : return TRUE;
277 : }
278 :
279 : /**
280 : * @brief Get path to save the model given from offloading sender.
281 : * @note The caller is responsible for freeing the returned data using g_free().
282 : */
283 : static gchar *
284 0 : _mlrs_get_model_dir_path (_ml_service_offloading_s * offloading_s,
285 : const gchar * service_key)
286 : {
287 0 : g_autofree gchar *dir_path = NULL;
288 :
289 0 : if (offloading_s->path) {
290 0 : dir_path = g_strdup (offloading_s->path);
291 : } else {
292 0 : g_autofree gchar *current_dir = g_get_current_dir ();
293 :
294 0 : dir_path = g_build_path (G_DIR_SEPARATOR_S, current_dir, service_key, NULL);
295 0 : if (g_mkdir_with_parents (dir_path, 0755) < 0) {
296 0 : _ml_loge ("Failed to create directory '%s': %s", dir_path,
297 : g_strerror (errno));
298 0 : return NULL;
299 : }
300 : }
301 :
302 0 : return g_steal_pointer (&dir_path);
303 : }
304 :
305 : /**
306 : * @brief Get data from gievn uri
307 : */
308 : static gboolean
309 0 : _mlrs_get_data_from_uri (gchar * uri, GByteArray * array)
310 : {
311 : CURL *curl;
312 : CURLcode res;
313 0 : gboolean ret = FALSE;
314 :
315 0 : curl = curl_easy_init ();
316 0 : if (curl) {
317 0 : if (CURLE_OK != curl_easy_setopt (curl, CURLOPT_URL, (gchar *) uri) ||
318 0 : CURLE_OK != curl_easy_setopt (curl, CURLOPT_FOLLOWLOCATION, 1L) ||
319 0 : CURLE_OK != curl_easy_setopt (curl, CURLOPT_WRITEFUNCTION,
320 0 : curl_mem_write_cb) ||
321 0 : CURLE_OK != curl_easy_setopt (curl, CURLOPT_WRITEDATA,
322 : (void *) array)) {
323 0 : _ml_loge ("Failed to set option for curl easy handle.");
324 0 : ret = FALSE;
325 0 : goto done;
326 : }
327 :
328 0 : res = curl_easy_perform (curl);
329 :
330 0 : if (res != CURLE_OK) {
331 0 : _ml_loge ("curl_easy_perform failed: %s", curl_easy_strerror (res));
332 0 : ret = FALSE;
333 0 : goto done;
334 : }
335 :
336 0 : ret = TRUE;
337 : }
338 :
339 0 : done:
340 0 : if (curl)
341 0 : curl_easy_cleanup (curl);
342 0 : return ret;
343 : }
344 :
345 : /**
346 : * @brief Process ml offloading service
347 : */
348 : static int
349 0 : _mlrs_process_service_offloading (nns_edge_data_h data_h, void *user_data)
350 : {
351 : void *data;
352 : nns_size_t data_len;
353 0 : g_autofree gchar *service_str = NULL;
354 0 : g_autofree gchar *service_key = NULL;
355 0 : g_autofree gchar *dir_path = NULL;
356 : ml_service_offloading_type_e service_type;
357 0 : int ret = NNS_EDGE_ERROR_NONE;
358 0 : ml_service_s *mls = (ml_service_s *) user_data;
359 0 : _ml_service_offloading_s *offloading_s =
360 : (_ml_service_offloading_s *) mls->priv;
361 0 : ml_service_event_e event_type = ML_SERVICE_EVENT_UNKNOWN;
362 0 : ml_information_h info_h = NULL;
363 :
364 0 : ret = nns_edge_data_get (data_h, 0, &data, &data_len);
365 0 : if (NNS_EDGE_ERROR_NONE != ret) {
366 0 : _ml_error_report_return (ret,
367 : "Failed to get data while processing the ml-offloading service.");
368 : }
369 :
370 0 : ret = nns_edge_data_get_info (data_h, "service-type", &service_str);
371 0 : if (NNS_EDGE_ERROR_NONE != ret) {
372 0 : _ml_error_report_return (ret,
373 : "Failed to get service type while processing the ml-offloading service.");
374 : }
375 0 : service_type = _mlrs_get_service_type (service_str);
376 :
377 0 : ret = nns_edge_data_get_info (data_h, "service-key", &service_key);
378 0 : if (NNS_EDGE_ERROR_NONE != ret) {
379 0 : _ml_error_report_return (ret,
380 : "Failed to get service key while processing the ml-offloading service.");
381 : }
382 :
383 0 : dir_path = _mlrs_get_model_dir_path (offloading_s, service_key);
384 :
385 0 : if (offloading_s->offloading_mode == ML_SERVICE_OFFLOADING_MODE_TRAINING) {
386 0 : ret = _ml_service_training_offloading_process_received_data (mls, data_h,
387 : dir_path, data, service_type);
388 0 : if (NNS_EDGE_ERROR_NONE != ret) {
389 0 : _ml_error_report_return (ret,
390 : "Failed to process received data on training offloading.");
391 : }
392 :
393 0 : if (service_type == ML_SERVICE_OFFLOADING_TYPE_REPLY) {
394 0 : if (!dir_path) {
395 0 : _ml_error_report_return (NNS_EDGE_ERROR_UNKNOWN,
396 : "Failed to get model directory path.");
397 : }
398 :
399 0 : if (!_mlrs_model_register (service_key, data_h, data, data_len, dir_path)) {
400 0 : _ml_error_report_return (NNS_EDGE_ERROR_UNKNOWN,
401 : "Failed to register model downloaded from: %s.", (gchar *) data);
402 : }
403 : }
404 : }
405 :
406 0 : switch (service_type) {
407 0 : case ML_SERVICE_OFFLOADING_TYPE_MODEL_URI:
408 : {
409 : GByteArray *array;
410 :
411 0 : if (!dir_path) {
412 0 : _ml_error_report_return (NNS_EDGE_ERROR_UNKNOWN,
413 : "Failed to get model directory path.");
414 : }
415 :
416 0 : array = g_byte_array_new ();
417 :
418 0 : if (!_mlrs_get_data_from_uri ((gchar *) data, array)) {
419 0 : g_byte_array_free (array, TRUE);
420 0 : _ml_error_report_return (NNS_EDGE_ERROR_IO,
421 : "Failed to get data from uri: %s.", (gchar *) data);
422 : }
423 :
424 0 : if (_mlrs_model_register (service_key, data_h, array->data, array->len,
425 : dir_path)) {
426 0 : event_type = ML_SERVICE_EVENT_MODEL_REGISTERED;
427 : } else {
428 0 : _ml_error_report ("Failed to register model downloaded from: %s.",
429 : (gchar *) data);
430 0 : ret = NNS_EDGE_ERROR_UNKNOWN;
431 : }
432 0 : g_byte_array_free (array, TRUE);
433 0 : break;
434 : }
435 0 : case ML_SERVICE_OFFLOADING_TYPE_MODEL_RAW:
436 : {
437 0 : if (!dir_path) {
438 0 : _ml_error_report_return (NNS_EDGE_ERROR_UNKNOWN,
439 : "Failed to get model directory path.");
440 : }
441 :
442 0 : if (_mlrs_model_register (service_key, data_h, data, data_len, dir_path)) {
443 0 : event_type = ML_SERVICE_EVENT_MODEL_REGISTERED;
444 : } else {
445 0 : _ml_error_report ("Failed to register model downloaded from: %s.",
446 : (gchar *) data);
447 0 : ret = NNS_EDGE_ERROR_UNKNOWN;
448 : }
449 0 : break;
450 : }
451 0 : case ML_SERVICE_OFFLOADING_TYPE_PIPELINE_URI:
452 : {
453 0 : GByteArray *array = g_byte_array_new ();
454 :
455 0 : ret = _mlrs_get_data_from_uri ((gchar *) data, array);
456 0 : if (!ret) {
457 0 : g_byte_array_free (array, TRUE);
458 0 : _ml_error_report_return (ret,
459 : "Failed to get data from uri: %s.", (gchar *) data);
460 : }
461 0 : ret = ml_service_pipeline_set (service_key, (gchar *) array->data);
462 0 : if (ML_ERROR_NONE == ret) {
463 0 : event_type = ML_SERVICE_EVENT_PIPELINE_REGISTERED;
464 : }
465 0 : g_byte_array_free (array, TRUE);
466 0 : break;
467 : }
468 0 : case ML_SERVICE_OFFLOADING_TYPE_PIPELINE_RAW:
469 0 : ret = ml_service_pipeline_set (service_key, (gchar *) data);
470 0 : if (ML_ERROR_NONE == ret) {
471 0 : event_type = ML_SERVICE_EVENT_PIPELINE_REGISTERED;
472 : }
473 0 : break;
474 0 : case ML_SERVICE_OFFLOADING_TYPE_REPLY:
475 : {
476 0 : ret = _ml_information_create (&info_h);
477 0 : if (ML_ERROR_NONE != ret) {
478 0 : _ml_error_report ("Failed to create information handle.");
479 0 : goto done;
480 : }
481 0 : ret = _ml_information_set (info_h, "data", (void *) data, NULL);
482 0 : if (ML_ERROR_NONE != ret) {
483 0 : _ml_error_report ("Failed to set data information.");
484 0 : goto done;
485 : }
486 0 : event_type = ML_SERVICE_EVENT_REPLY;
487 0 : break;
488 : }
489 0 : case ML_SERVICE_OFFLOADING_TYPE_LAUNCH:
490 : {
491 0 : ml_service_h service_h = NULL;
492 :
493 : /**
494 : * @todo Check privilege and availability here.
495 : */
496 :
497 0 : service_h =
498 0 : g_hash_table_lookup (offloading_s->service_table, service_key);
499 0 : if (service_h) {
500 0 : _ml_logi ("The registered service as key %s is already launched.",
501 : service_key);
502 0 : break;
503 : }
504 :
505 0 : ret = ml_service_pipeline_launch (service_key, &service_h);
506 0 : if (ret != ML_ERROR_NONE) {
507 0 : _ml_error_report
508 : ("Failed to launch the registered pipeline. service key: %s",
509 : service_key);
510 0 : goto done;
511 : }
512 0 : ret = ml_service_start (service_h);
513 0 : if (ret != ML_ERROR_NONE) {
514 0 : _ml_error_report
515 : ("Failed to start the registered pipeline. service key: %s",
516 : service_key);
517 0 : ml_service_destroy (service_h);
518 0 : goto done;
519 : }
520 :
521 0 : g_hash_table_insert (offloading_s->service_table, g_strdup (service_key),
522 : service_h);
523 0 : event_type = ML_SERVICE_EVENT_LAUNCH;
524 0 : break;
525 : }
526 0 : default:
527 0 : _ml_error_report ("Unknown service type '%d' or not supported yet.",
528 : service_type);
529 0 : break;
530 : }
531 :
532 0 : if (event_type != ML_SERVICE_EVENT_UNKNOWN) {
533 0 : ml_service_event_cb_info_s cb_info = { 0 };
534 :
535 0 : _ml_service_get_event_cb_info (mls, &cb_info);
536 :
537 0 : if (cb_info.cb) {
538 0 : cb_info.cb (event_type, info_h, cb_info.pdata);
539 : }
540 : }
541 :
542 0 : done:
543 0 : if (info_h) {
544 0 : ml_information_destroy (info_h);
545 : }
546 :
547 0 : return ret;
548 : }
549 :
550 : /**
551 : * @brief Edge event callback.
552 : */
553 : static int
554 0 : _mlrs_edge_event_cb (nns_edge_event_h event_h, void *user_data)
555 : {
556 0 : nns_edge_event_e event = NNS_EDGE_EVENT_UNKNOWN;
557 0 : nns_edge_data_h data_h = NULL;
558 0 : int ret = NNS_EDGE_ERROR_NONE;
559 :
560 0 : ret = nns_edge_event_get_type (event_h, &event);
561 0 : if (NNS_EDGE_ERROR_NONE != ret)
562 0 : return ret;
563 :
564 0 : switch (event) {
565 0 : case NNS_EDGE_EVENT_NEW_DATA_RECEIVED:
566 : {
567 0 : ret = nns_edge_event_parse_new_data (event_h, &data_h);
568 0 : if (NNS_EDGE_ERROR_NONE != ret)
569 0 : return ret;
570 :
571 0 : ret = _mlrs_process_service_offloading (data_h, user_data);
572 0 : break;
573 : }
574 0 : default:
575 0 : break;
576 : }
577 :
578 0 : if (data_h)
579 0 : nns_edge_data_destroy (data_h);
580 :
581 0 : return ret;
582 : }
583 :
584 : /**
585 : * @brief Create edge handle.
586 : */
587 : static int
588 0 : _mlrs_create_edge_handle (ml_service_s * mls, edge_info_s * edge_info)
589 : {
590 0 : int ret = 0;
591 0 : nns_edge_h edge_h = NULL;
592 0 : _ml_service_offloading_s *offloading_s = NULL;
593 :
594 0 : ret = nns_edge_create_handle (edge_info->id, edge_info->conn_type,
595 : edge_info->node_type, &edge_h);
596 :
597 0 : if (NNS_EDGE_ERROR_NONE != ret) {
598 0 : _ml_error_report_return_continue (ret,
599 : "Failed to create edge handle for ml-service offloading. Internal error?");
600 : }
601 :
602 0 : offloading_s = (_ml_service_offloading_s *) mls->priv;
603 0 : ret = nns_edge_set_event_callback (edge_h, _mlrs_edge_event_cb, mls);
604 0 : if (NNS_EDGE_ERROR_NONE != ret) {
605 0 : _ml_error_report
606 : ("Failed to set event callback in edge handle for ml-service offloading. Internal error?");
607 0 : goto error;
608 : }
609 :
610 0 : _mlrs_set_edge_info (edge_info, edge_h);
611 :
612 0 : ret = nns_edge_start (edge_h);
613 0 : if (NNS_EDGE_ERROR_NONE != ret) {
614 0 : _ml_error_report
615 : ("Failed to start edge for ml-service offloading. Internal error?");
616 0 : goto error;
617 : }
618 :
619 0 : if (edge_info->node_type == NNS_EDGE_NODE_TYPE_QUERY_CLIENT) {
620 0 : ret = nns_edge_connect (edge_h, edge_info->dest_host, edge_info->dest_port);
621 :
622 0 : if (NNS_EDGE_ERROR_NONE != ret) {
623 0 : _ml_error_report
624 : ("Failed to connect edge for ml-service offloading. Internal error?");
625 0 : goto error;
626 : }
627 : }
628 :
629 0 : offloading_s->edge_h = edge_h;
630 :
631 0 : error:
632 0 : if (ret != NNS_EDGE_ERROR_NONE) {
633 0 : nns_edge_release_handle (edge_h);
634 : }
635 :
636 0 : return ret;
637 : }
638 :
639 : /**
640 : * @brief Set offloading mode and private data.
641 : */
642 : int
643 0 : _ml_service_offloading_set_mode (ml_service_h handle,
644 : ml_service_offloading_mode_e mode, void *priv)
645 : {
646 0 : ml_service_s *mls = (ml_service_s *) handle;
647 : _ml_service_offloading_s *offloading_s;
648 :
649 0 : if (!_ml_service_handle_is_valid (mls)) {
650 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
651 : "The parameter, 'handle' (ml_service_h), is invalid. It should be a valid ml_service_h instance.");
652 : }
653 :
654 0 : offloading_s = (_ml_service_offloading_s *) mls->priv;
655 :
656 0 : offloading_s->offloading_mode = mode;
657 0 : offloading_s->priv = priv;
658 :
659 0 : return ML_ERROR_NONE;
660 : }
661 :
662 : /**
663 : * @brief Get offloading mode and private data.
664 : */
665 : int
666 0 : _ml_service_offloading_get_mode (ml_service_h handle,
667 : ml_service_offloading_mode_e * mode, void **priv)
668 : {
669 0 : ml_service_s *mls = (ml_service_s *) handle;
670 : _ml_service_offloading_s *offloading_s;
671 :
672 0 : if (!_ml_service_handle_is_valid (mls)) {
673 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
674 : "The parameter, 'handle' (ml_service_h), is invalid. It should be a valid ml_service_h instance.");
675 : }
676 :
677 0 : if (!mode || !priv) {
678 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
679 : "The parameter, mode or priv, is null. It should be a valid pointer.");
680 : }
681 :
682 0 : offloading_s = (_ml_service_offloading_s *) mls->priv;
683 :
684 0 : *mode = offloading_s->offloading_mode;
685 0 : *priv = offloading_s->priv;
686 :
687 0 : return ML_ERROR_NONE;
688 : }
689 :
690 : /**
691 : * @brief Internal function to release ml-service offloading data.
692 : */
693 : int
694 0 : _ml_service_offloading_release_internal (ml_service_s * mls)
695 : {
696 : _ml_service_offloading_s *offloading_s;
697 :
698 : /* Supposed internal function call to release handle. */
699 0 : if (!mls || !mls->priv)
700 0 : return ML_ERROR_NONE;
701 :
702 0 : offloading_s = (_ml_service_offloading_s *) mls->priv;
703 :
704 0 : if (offloading_s->offloading_mode == ML_SERVICE_OFFLOADING_MODE_TRAINING) {
705 : /**
706 : * '_ml_service_training_offloading_destroy' transfers internally trained models.
707 : * So keep offloading handle.
708 : */
709 : if (ML_ERROR_NONE != _ml_service_training_offloading_destroy (mls)) {
710 0 : _ml_error_report
711 : ("Failed to release ml-service training offloading handle");
712 : }
713 : }
714 :
715 0 : if (offloading_s->edge_h) {
716 0 : nns_edge_release_handle (offloading_s->edge_h);
717 0 : offloading_s->edge_h = NULL;
718 : }
719 :
720 0 : if (offloading_s->option_table) {
721 0 : g_hash_table_destroy (offloading_s->option_table);
722 0 : offloading_s->option_table = NULL;
723 : }
724 :
725 0 : if (offloading_s->service_table) {
726 0 : g_hash_table_destroy (offloading_s->service_table);
727 0 : offloading_s->service_table = NULL;
728 : }
729 :
730 0 : g_clear_pointer (&offloading_s->path, g_free);
731 0 : g_clear_pointer (&mls->priv, g_free);
732 :
733 0 : return ML_ERROR_NONE;
734 : }
735 :
736 : /**
737 : * @brief Set value in ml-service offloading handle.
738 : */
739 : int
740 0 : _ml_service_offloading_set_information (ml_service_h handle, const gchar * name,
741 : const gchar * value)
742 : {
743 0 : ml_service_s *mls = (ml_service_s *) handle;
744 : _ml_service_offloading_s *offloading_s;
745 0 : int ret = ML_ERROR_NONE;
746 :
747 0 : if (!_ml_service_handle_is_valid (mls)) {
748 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
749 : "The parameter, 'handle' (ml_service_h), is invalid. It should be a valid ml_service_h instance.");
750 : }
751 :
752 0 : offloading_s = (_ml_service_offloading_s *) mls->priv;
753 :
754 0 : if (g_ascii_strcasecmp (name, "path") == 0) {
755 0 : if (!g_file_test (value, G_FILE_TEST_IS_DIR)) {
756 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
757 : "The given param, dir path '%s' is invalid or the dir is not found or accessible.",
758 : value);
759 : }
760 :
761 0 : if (g_access (value, W_OK) != 0) {
762 0 : _ml_error_report_return (ML_ERROR_PERMISSION_DENIED,
763 : "Write permission to dir '%s' is denied.", value);
764 : }
765 :
766 0 : g_clear_pointer (&offloading_s->path, g_free);
767 0 : offloading_s->path = g_strdup (value);
768 :
769 0 : if (offloading_s->offloading_mode == ML_SERVICE_OFFLOADING_MODE_TRAINING) {
770 0 : ret = _ml_service_training_offloading_set_path (mls, offloading_s->path);
771 : }
772 : }
773 :
774 0 : return ret;
775 : }
776 :
777 : /**
778 : * @brief Internal function to set the services in ml-service offloading handle.
779 : */
780 : static int
781 0 : _ml_service_offloading_set_service (ml_service_s * mls, const gchar * key,
782 : const gchar * value)
783 : {
784 : _ml_service_offloading_s *offloading_s;
785 :
786 0 : if (!STR_IS_VALID (key) || !STR_IS_VALID (value)) {
787 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
788 : "The parameter, 'key' or 'value' is null or empty string. It should be a valid string.");
789 : }
790 0 : offloading_s = (_ml_service_offloading_s *) mls->priv;
791 :
792 0 : g_hash_table_insert (offloading_s->option_table, g_strdup (key),
793 0 : g_strdup (value));
794 :
795 0 : return ML_ERROR_NONE;
796 : }
797 :
798 : /**
799 : * @brief Internal function to parse service info from config file.
800 : */
801 : static int
802 0 : _ml_service_offloading_parse_services (ml_service_s * mls, JsonObject * object)
803 : {
804 : GList *list, *iter;
805 0 : int status = ML_ERROR_NONE;
806 :
807 0 : list = json_object_get_members (object);
808 0 : for (iter = list; iter != NULL; iter = g_list_next (iter)) {
809 0 : const gchar *key = iter->data;
810 0 : JsonNode *json_node = json_object_get_member (object, key);
811 0 : gchar *val = json_to_string (json_node, TRUE);
812 :
813 0 : if (val) {
814 0 : status = _ml_service_offloading_set_service (mls, key, val);
815 0 : g_free (val);
816 :
817 0 : if (status != ML_ERROR_NONE) {
818 0 : _ml_error_report ("Failed to set service key '%s'.", key);
819 0 : break;
820 : }
821 : }
822 : }
823 0 : g_list_free (list);
824 :
825 0 : return status;
826 : }
827 :
828 : /**
829 : * @brief Private function to release the pipeline service
830 : */
831 : static void
832 0 : _cleanup_pipeline_service (gpointer data)
833 : {
834 : int ret;
835 0 : ml_service_h service_h = data;
836 :
837 0 : ret = ml_service_destroy (service_h);
838 0 : if (ML_ERROR_NONE != ret) {
839 0 : _ml_error_report ("Failed to destroy the pipeline service.");
840 : }
841 0 : }
842 :
843 : /**
844 : * @brief Internal function to create ml-offloading data with given ml-option handle.
845 : */
846 : static int
847 0 : _ml_service_offloading_create_from_option (ml_service_s * mls,
848 : ml_option_h option)
849 : {
850 : _ml_service_offloading_s *offloading_s;
851 0 : edge_info_s *edge_info = NULL;
852 0 : int ret = ML_ERROR_NONE;
853 0 : gchar *_path = NULL;
854 :
855 0 : mls->priv = offloading_s = g_try_new0 (_ml_service_offloading_s, 1);
856 0 : if (offloading_s == NULL) {
857 0 : _ml_error_report_return (ML_ERROR_OUT_OF_MEMORY,
858 : "Failed to allocate memory for the service handle's private data. Out of memory?");
859 : }
860 :
861 0 : offloading_s->option_table =
862 0 : g_hash_table_new_full (g_str_hash, g_str_equal, g_free, g_free);
863 0 : if (!offloading_s->option_table) {
864 0 : _ml_error_report_return (ML_ERROR_OUT_OF_MEMORY,
865 : "Failed to allocate memory for the option table of ml-service offloading. Out of memory?");
866 : }
867 :
868 0 : offloading_s->service_table =
869 0 : g_hash_table_new_full (g_str_hash, g_str_equal, g_free,
870 : _cleanup_pipeline_service);
871 0 : if (!offloading_s->service_table) {
872 0 : _ml_error_report_return (ML_ERROR_OUT_OF_MEMORY,
873 : "Failed to allocate memory for the service table of ml-service offloading. Out of memory?");
874 : }
875 :
876 0 : if (ML_ERROR_NONE == ml_option_get (option, "path", (void **) (&_path))) {
877 0 : ret = _ml_service_offloading_set_information (mls, "path", _path);
878 0 : if (ML_ERROR_NONE != ret) {
879 0 : _ml_error_report_return (ret,
880 : "Failed to set path in ml-service offloading handle.");
881 : }
882 : }
883 :
884 0 : _mlrs_get_edge_info (option, &edge_info);
885 :
886 0 : offloading_s->node_type = edge_info->node_type;
887 0 : ret = _mlrs_create_edge_handle (mls, edge_info);
888 0 : _mlrs_release_edge_info (edge_info);
889 :
890 0 : return ret;
891 : }
892 :
893 : /**
894 : * @brief Internal function to convert json (string member) to ml-option.
895 : */
896 : static int
897 0 : _ml_service_offloading_convert_to_option (JsonObject * object,
898 : ml_option_h * option_h)
899 : {
900 0 : ml_option_h tmp = NULL;
901 0 : int status = ML_ERROR_NONE;
902 : const gchar *key, *val;
903 : GList *list, *iter;
904 :
905 0 : if (!object || !option_h)
906 0 : return ML_ERROR_INVALID_PARAMETER;
907 :
908 0 : status = ml_option_create (&tmp);
909 0 : if (status != ML_ERROR_NONE) {
910 0 : _ml_error_report_return (status,
911 : "Failed to convert json to ml-option, cannot create ml-option handle.");
912 : }
913 :
914 0 : list = json_object_get_members (object);
915 0 : for (iter = list; iter != NULL; iter = g_list_next (iter)) {
916 0 : key = iter->data;
917 :
918 0 : if (g_ascii_strcasecmp (key, "training") == 0) {
919 : /* It is not a value to set for option. */
920 0 : continue;
921 : }
922 :
923 0 : val = _ml_service_get_json_string_member (object, key);
924 :
925 0 : status = ml_option_set (tmp, key, g_strdup (val), g_free);
926 0 : if (status != ML_ERROR_NONE) {
927 0 : _ml_error_report ("Failed to set %s option: %s.", key, val);
928 0 : break;
929 : }
930 : }
931 0 : g_list_free (list);
932 :
933 0 : if (status == ML_ERROR_NONE) {
934 0 : *option_h = tmp;
935 : } else {
936 0 : ml_option_destroy (tmp);
937 : }
938 :
939 0 : return status;
940 : }
941 :
942 : /**
943 : * @brief Internal function to parse configuration file to create offloading service.
944 : */
945 : int
946 0 : _ml_service_offloading_create (ml_service_h handle, JsonObject * object)
947 : {
948 0 : ml_service_s *mls = (ml_service_s *) handle;
949 : int status;
950 0 : ml_option_h option = NULL;
951 : JsonObject *offloading;
952 :
953 0 : if (!mls || !object) {
954 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
955 : "Failed to create offloading handle, invalid parameter.");
956 : }
957 :
958 0 : offloading = json_object_get_object_member (object, "offloading");
959 0 : if (!offloading) {
960 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
961 : "Failed to get 'offloading' member from configuration file.");
962 : }
963 :
964 0 : status = _ml_service_offloading_convert_to_option (offloading, &option);
965 0 : if (status != ML_ERROR_NONE) {
966 0 : _ml_error_report ("Failed to set ml-option from configuration file.");
967 0 : goto done;
968 : }
969 :
970 0 : status = _ml_service_offloading_create_from_option (mls, option);
971 0 : if (status != ML_ERROR_NONE) {
972 0 : _ml_error_report ("Failed to create ml-service offloading.");
973 0 : goto done;
974 : }
975 :
976 0 : if (json_object_has_member (object, "services")) {
977 : JsonObject *svc_object;
978 :
979 0 : svc_object = json_object_get_object_member (object, "services");
980 0 : status = _ml_service_offloading_parse_services (mls, svc_object);
981 0 : if (status != ML_ERROR_NONE) {
982 0 : _ml_logw ("Failed to parse services from configuration file.");
983 : }
984 : }
985 :
986 0 : if (json_object_has_member (offloading, "training")) {
987 0 : status = _ml_service_training_offloading_create (mls, offloading);
988 0 : if (status != ML_ERROR_NONE) {
989 0 : _ml_logw ("Failed to parse training from configuration file.");
990 : }
991 : }
992 :
993 0 : done:
994 0 : if (option)
995 0 : ml_option_destroy (option);
996 :
997 0 : return status;
998 : }
999 :
1000 : /**
1001 : * @brief Internal function to start ml-service offloading.
1002 : */
1003 : int
1004 0 : _ml_service_offloading_start (ml_service_h handle)
1005 : {
1006 0 : ml_service_s *mls = (ml_service_s *) handle;
1007 : _ml_service_offloading_s *offloading_s;
1008 0 : int ret = ML_ERROR_NONE;
1009 :
1010 0 : if (!_ml_service_handle_is_valid (mls)) {
1011 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
1012 : "The parameter, 'handle' (ml_service_h), is invalid. It should be a valid ml_service_h instance.");
1013 : }
1014 :
1015 0 : offloading_s = (_ml_service_offloading_s *) mls->priv;
1016 :
1017 0 : if (offloading_s->offloading_mode == ML_SERVICE_OFFLOADING_MODE_TRAINING) {
1018 0 : ret = _ml_service_training_offloading_start (mls);
1019 0 : if (ret != ML_ERROR_NONE) {
1020 0 : _ml_error_report ("Failed to start training offloading.");
1021 : }
1022 : }
1023 :
1024 0 : return ret;
1025 : }
1026 :
1027 : /**
1028 : * @brief Internal function to stop ml-service offloading.
1029 : */
1030 : int
1031 0 : _ml_service_offloading_stop (ml_service_h handle)
1032 : {
1033 0 : ml_service_s *mls = (ml_service_s *) handle;
1034 : _ml_service_offloading_s *offloading_s;
1035 0 : int ret = ML_ERROR_NONE;
1036 :
1037 0 : if (!_ml_service_handle_is_valid (mls)) {
1038 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
1039 : "The parameter, 'handle' (ml_service_h), is invalid. It should be a valid ml_service_h instance.");
1040 : }
1041 :
1042 0 : offloading_s = (_ml_service_offloading_s *) mls->priv;
1043 :
1044 0 : if (offloading_s->offloading_mode == ML_SERVICE_OFFLOADING_MODE_TRAINING) {
1045 0 : ret = _ml_service_training_offloading_stop (mls);
1046 0 : if (ret != ML_ERROR_NONE) {
1047 0 : _ml_error_report ("Failed to stop training offloading.");
1048 : }
1049 : }
1050 :
1051 0 : return ret;
1052 : }
1053 :
1054 : /**
1055 : * @brief Internal function to request service to ml-service offloading.
1056 : * Register new information, such as neural network models or pipeline descriptions, on a offloading server.
1057 : */
1058 : int
1059 0 : _ml_service_offloading_request (ml_service_h handle, const char *key,
1060 : const ml_tensors_data_h input)
1061 : {
1062 0 : ml_service_s *mls = (ml_service_s *) handle;
1063 0 : _ml_service_offloading_s *offloading_s = NULL;
1064 0 : const gchar *service_key = NULL;
1065 0 : nns_edge_data_h data_h = NULL;
1066 0 : int ret = NNS_EDGE_ERROR_NONE;
1067 0 : const gchar *service_str = NULL;
1068 0 : const gchar *description = NULL;
1069 0 : const gchar *name = NULL;
1070 0 : const gchar *activate = NULL;
1071 0 : ml_tensors_data_s *_in = NULL;
1072 0 : g_autoptr (JsonNode) service_node = NULL;
1073 : JsonObject *service_obj;
1074 : guint i;
1075 :
1076 0 : if (!_ml_service_handle_is_valid (mls)) {
1077 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
1078 : "The parameter, 'handle' (ml_service_h), is invalid. It should be a valid ml_service_h instance.");
1079 : }
1080 :
1081 0 : if (!STR_IS_VALID (key)) {
1082 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
1083 : "The parameter, 'key' is NULL. It should be a valid string.");
1084 : }
1085 :
1086 0 : if (!input)
1087 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
1088 : "The parameter, input (ml_tensors_data_h), is NULL. It should be a valid ml_tensor_data_h instance, which is usually created by ml_tensors_data_create().");
1089 :
1090 0 : offloading_s = (_ml_service_offloading_s *) mls->priv;
1091 :
1092 0 : service_str = g_hash_table_lookup (offloading_s->option_table, key);
1093 0 : if (!service_str) {
1094 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
1095 : "The given service key, %s, is not registered in the ml-service offloading handle.",
1096 : key);
1097 : }
1098 :
1099 0 : service_node = json_from_string (service_str, NULL);
1100 0 : if (!service_node) {
1101 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
1102 : "Failed to parse the json string, %s.", service_str);
1103 : }
1104 0 : service_obj = json_node_get_object (service_node);
1105 0 : if (!service_obj) {
1106 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
1107 : "Failed to get the json object from the json node.");
1108 : }
1109 :
1110 : service_str =
1111 0 : _ml_service_get_json_string_member (service_obj, "service-type");
1112 0 : if (!service_str) {
1113 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
1114 : "Failed to get service type from the json object.");
1115 : }
1116 :
1117 0 : service_key = _ml_service_get_json_string_member (service_obj, "service-key");
1118 0 : if (!service_key) {
1119 0 : _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
1120 : "Failed to get service key from the json object.");
1121 : }
1122 :
1123 0 : ret = nns_edge_data_create (&data_h);
1124 0 : if (NNS_EDGE_ERROR_NONE != ret) {
1125 0 : _ml_error_report ("Failed to create an edge data.");
1126 0 : return ret;
1127 : }
1128 :
1129 0 : ret = nns_edge_data_set_info (data_h, "service-type", service_str);
1130 0 : if (NNS_EDGE_ERROR_NONE != ret) {
1131 0 : _ml_error_report ("Failed to set service type in edge data.");
1132 0 : goto done;
1133 : }
1134 0 : ret = nns_edge_data_set_info (data_h, "service-key", service_key);
1135 0 : if (NNS_EDGE_ERROR_NONE != ret) {
1136 0 : _ml_error_report ("Failed to set service key in edge data.");
1137 0 : goto done;
1138 : }
1139 :
1140 0 : description = _ml_service_get_json_string_member (service_obj, "description");
1141 0 : if (description) {
1142 0 : ret = nns_edge_data_set_info (data_h, "description", description);
1143 0 : if (NNS_EDGE_ERROR_NONE != ret) {
1144 0 : _ml_logi ("Failed to set description in edge data.");
1145 : }
1146 : }
1147 :
1148 0 : name = _ml_service_get_json_string_member (service_obj, "name");
1149 0 : if (name) {
1150 0 : ret = nns_edge_data_set_info (data_h, "name", name);
1151 0 : if (NNS_EDGE_ERROR_NONE != ret) {
1152 0 : _ml_logi ("Failed to set name in edge data.");
1153 : }
1154 : }
1155 :
1156 0 : activate = _ml_service_get_json_string_member (service_obj, "activate");
1157 0 : if (activate) {
1158 0 : ret = nns_edge_data_set_info (data_h, "activate", activate);
1159 0 : if (NNS_EDGE_ERROR_NONE != ret) {
1160 0 : _ml_logi ("Failed to set activate in edge data.");
1161 : }
1162 : }
1163 :
1164 0 : _in = (ml_tensors_data_s *) input;
1165 0 : G_LOCK_UNLESS_NOLOCK (*_in);
1166 :
1167 0 : for (i = 0; i < _in->num_tensors; i++) {
1168 : ret =
1169 0 : nns_edge_data_add (data_h, _in->tensors[i].data, _in->tensors[i].size,
1170 : NULL);
1171 0 : if (NNS_EDGE_ERROR_NONE != ret) {
1172 0 : _ml_error_report ("Failed to add tensor data to the edge data.");
1173 0 : goto done;
1174 : }
1175 : }
1176 :
1177 0 : ret = nns_edge_send (offloading_s->edge_h, data_h);
1178 0 : if (NNS_EDGE_ERROR_NONE != ret) {
1179 0 : _ml_error_report
1180 : ("Failed to publish the edge data to register the offloading service.");
1181 : }
1182 :
1183 0 : done:
1184 0 : if (_in)
1185 0 : G_UNLOCK_UNLESS_NOLOCK (*_in);
1186 0 : if (data_h)
1187 0 : nns_edge_data_destroy (data_h);
1188 0 : return ret;
1189 : }
1190 :
1191 : /**
1192 : * @brief Internal function to request service to ml-service offloading.
1193 : * Register new information, such as neural network models or pipeline descriptions, on a offloading server.
1194 : */
1195 : int
1196 0 : _ml_service_offloading_request_raw (ml_service_h handle, const char *key,
1197 : void *data, size_t len)
1198 : {
1199 : ml_tensors_data_h input;
1200 : ml_tensors_data_s *_in;
1201 : int status;
1202 :
1203 : /* Set internal data structure to send edge data. */
1204 0 : status = _ml_tensors_data_create_no_alloc (NULL, &input);
1205 0 : if (status != ML_ERROR_NONE)
1206 0 : return status;
1207 :
1208 : /**
1209 : * It is ok, we just pass the raw data to nnstreamer-edge.
1210 : * Updating data in handle, no tensor information, and no lock here.
1211 : */
1212 0 : _in = (ml_tensors_data_s *) input;
1213 0 : _in->num_tensors = 1;
1214 0 : _in->tensors[0].data = data;
1215 0 : _in->tensors[0].size = len;
1216 :
1217 0 : status = _ml_service_offloading_request (handle, key, input);
1218 :
1219 0 : _ml_tensors_data_destroy_internal (input, FALSE);
1220 0 : return status;
1221 : }
|