LCOV - code coverage report
Current view: top level - capi-machine-learning-inference-1.8.6/c/src - ml-api-service-offloading.c (source / functions) Coverage Total Hit
Test: ML API 1.8.6-0 nnstreamer/api#7f8530c294f86ec880b29347a861499239d358a1 Lines: 79.7 % 558 445
Test Date: 2025-06-06 05:24:38 Functions: 100.0 % 28 28

            Line data    Source code
       1              : /* SPDX-License-Identifier: Apache-2.0 */
       2              : /**
       3              :  * Copyright (c) 2023 Samsung Electronics Co., Ltd. All Rights Reserved.
       4              :  *
       5              :  * @file ml-api-service-offloading.c
       6              :  * @date 26 Jun 2023
       7              :  * @brief ML offloading service of NNStreamer/Service C-API
       8              :  * @see https://github.com/nnstreamer/nnstreamer
       9              :  * @author Gichan Jang <gichan2.jang@samsung.com>
      10              :  * @bug No known bugs except for NYI items
      11              :  */
      12              : 
      13              : #include <glib.h>
      14              : #include <glib/gstdio.h>
      15              : #include <gio/gio.h>
      16              : #include <gst/gst.h>
      17              : #include <gst/gstbuffer.h>
      18              : #include <gst/app/app.h>
      19              : #include <string.h>
      20              : #include <curl/curl.h>
      21              : #include <json-glib/json-glib.h>
      22              : #include <nnstreamer-edge.h>
      23              : 
      24              : #include "ml-api-internal.h"
      25              : #include "ml-api-service.h"
      26              : #include "ml-api-service-private.h"
      27              : #include "ml-api-service-offloading.h"
      28              : #include "ml-api-service-training-offloading.h"
      29              : 
      30              : #define MAX_PORT_NUM_LEN 6U
      31              : 
      32              : /**
      33              :  * @brief Data struct for options.
      34              :  */
      35              : typedef struct
      36              : {
      37              :   gchar *host;
      38              :   guint port;
      39              :   gchar *topic;
      40              :   gchar *dest_host;
      41              :   guint dest_port;
      42              :   nns_edge_connect_type_e conn_type;
      43              :   nns_edge_node_type_e node_type;
      44              :   gchar *id;
      45              : } edge_info_s;
      46              : 
      47              : /**
      48              :  * @brief Structure for ml_service_offloading.
      49              :  */
      50              : typedef struct
      51              : {
      52              :   nns_edge_h edge_h;
      53              :   nns_edge_node_type_e node_type;
      54              : 
      55              :   gchar *path; /**< A path to save the received model file */
      56              :   GHashTable *option_table;
      57              :   GHashTable *service_table;
      58              : 
      59              :   ml_service_offloading_mode_e offloading_mode;
      60              :   void *priv;
      61              : } _ml_service_offloading_s;
      62              : 
      63              : /**
      64              :  * @brief Get ml-service node type from ml_option.
      65              :  */
      66              : static nns_edge_node_type_e
      67           30 : _mlrs_get_node_type (const gchar * value)
      68              : {
      69           30 :   nns_edge_node_type_e node_type = NNS_EDGE_NODE_TYPE_UNKNOWN;
      70              : 
      71           30 :   if (!value)
      72            0 :     return node_type;
      73              : 
      74           30 :   if (g_ascii_strcasecmp (value, "sender") == 0) {
      75           13 :     node_type = NNS_EDGE_NODE_TYPE_QUERY_CLIENT;
      76           17 :   } else if (g_ascii_strcasecmp (value, "receiver") == 0) {
      77           17 :     node_type = NNS_EDGE_NODE_TYPE_QUERY_SERVER;
      78              :   } else {
      79            0 :     _ml_error_report ("Invalid node type '%s', please check node type.", value);
      80              :   }
      81              : 
      82           30 :   return node_type;
      83              : }
      84              : 
      85              : /**
      86              :  * @brief Get nnstreamer-edge connection type
      87              :  */
      88              : static nns_edge_connect_type_e
      89           30 : _mlrs_get_conn_type (const gchar * value)
      90              : {
      91           30 :   nns_edge_connect_type_e conn_type = NNS_EDGE_CONNECT_TYPE_UNKNOWN;
      92              : 
      93           30 :   if (!value)
      94            0 :     return conn_type;
      95              : 
      96           30 :   if (0 == g_ascii_strcasecmp (value, "TCP"))
      97           30 :     conn_type = NNS_EDGE_CONNECT_TYPE_TCP;
      98            0 :   else if (0 == g_ascii_strcasecmp (value, "HYBRID"))
      99            0 :     conn_type = NNS_EDGE_CONNECT_TYPE_HYBRID;
     100            0 :   else if (0 == g_ascii_strcasecmp (value, "MQTT"))
     101            0 :     conn_type = NNS_EDGE_CONNECT_TYPE_MQTT;
     102              :   else
     103            0 :     conn_type = NNS_EDGE_CONNECT_TYPE_UNKNOWN;
     104              : 
     105           30 :   return conn_type;
     106              : }
     107              : 
     108              : /**
     109              :  * @brief Get edge info from ml_option.
     110              :  */
     111              : static void
     112           30 : _mlrs_get_edge_info (ml_option_h option, edge_info_s ** edge_info)
     113              : {
     114              :   edge_info_s *_info;
     115              :   void *value;
     116              : 
     117           30 :   *edge_info = _info = g_new0 (edge_info_s, 1);
     118              : 
     119           30 :   if (ML_ERROR_NONE == ml_option_get (option, "host", &value))
     120           34 :     _info->host = g_strdup (value);
     121              :   else
     122           13 :     _info->host = g_strdup ("localhost");
     123           30 :   if (ML_ERROR_NONE == ml_option_get (option, "port", &value))
     124           17 :     _info->port = (guint) g_ascii_strtoull (value, NULL, 10);
     125           30 :   if (ML_ERROR_NONE == ml_option_get (option, "dest-host", &value))
     126           26 :     _info->dest_host = g_strdup (value);
     127              :   else
     128           17 :     _info->dest_host = g_strdup ("localhost");
     129           30 :   if (ML_ERROR_NONE == ml_option_get (option, "dest-port", &value))
     130           13 :     _info->dest_port = (guint) g_ascii_strtoull (value, NULL, 10);
     131           30 :   if (ML_ERROR_NONE == ml_option_get (option, "connect-type", &value))
     132           30 :     _info->conn_type = _mlrs_get_conn_type (value);
     133              :   else
     134            0 :     _info->conn_type = NNS_EDGE_CONNECT_TYPE_UNKNOWN;
     135           30 :   if (ML_ERROR_NONE == ml_option_get (option, "topic", &value))
     136           60 :     _info->topic = g_strdup (value);
     137           30 :   if (ML_ERROR_NONE == ml_option_get (option, "node-type", &value))
     138           30 :     _info->node_type = _mlrs_get_node_type (value);
     139           30 :   if (ML_ERROR_NONE == ml_option_get (option, "id", &value))
     140            0 :     _info->id = g_strdup (value);
     141           30 : }
     142              : 
     143              : /**
     144              :  * @brief Set nns-edge info.
     145              :  */
     146              : static void
     147           30 : _mlrs_set_edge_info (edge_info_s * edge_info, nns_edge_h edge_h)
     148              : {
     149           30 :   char port[MAX_PORT_NUM_LEN] = { 0, };
     150              : 
     151           30 :   nns_edge_set_info (edge_h, "HOST", edge_info->host);
     152           30 :   g_snprintf (port, MAX_PORT_NUM_LEN, "%u", edge_info->port);
     153           30 :   nns_edge_set_info (edge_h, "PORT", port);
     154              : 
     155           30 :   if (edge_info->topic)
     156           30 :     nns_edge_set_info (edge_h, "TOPIC", edge_info->topic);
     157              : 
     158           30 :   nns_edge_set_info (edge_h, "DEST_HOST", edge_info->dest_host);
     159           30 :   g_snprintf (port, MAX_PORT_NUM_LEN, "%u", edge_info->dest_port);
     160           30 :   nns_edge_set_info (edge_h, "DEST_PORT", port);
     161           30 : }
     162              : 
     163              : /**
     164              :  * @brief Release edge info.
     165              :  */
     166              : static void
     167           30 : _mlrs_release_edge_info (edge_info_s * edge_info)
     168              : {
     169           30 :   g_free (edge_info->dest_host);
     170           30 :   g_free (edge_info->host);
     171           30 :   g_free (edge_info->topic);
     172           30 :   g_free (edge_info->id);
     173           30 :   g_free (edge_info);
     174           30 : }
     175              : 
     176              : /**
     177              :  * @brief Get ml offloading service type from ml_option.
     178              :  */
     179              : static ml_service_offloading_type_e
     180           17 : _mlrs_get_service_type (gchar * service_str)
     181              : {
     182           17 :   ml_service_offloading_type_e service_type =
     183              :       ML_SERVICE_OFFLOADING_TYPE_UNKNOWN;
     184              : 
     185           17 :   if (!service_str)
     186            0 :     return service_type;
     187              : 
     188           17 :   if (g_ascii_strcasecmp (service_str, "model_raw") == 0) {
     189            4 :     service_type = ML_SERVICE_OFFLOADING_TYPE_MODEL_RAW;
     190           13 :   } else if (g_ascii_strcasecmp (service_str, "model_uri") == 0) {
     191            1 :     service_type = ML_SERVICE_OFFLOADING_TYPE_MODEL_URI;
     192           12 :   } else if (g_ascii_strcasecmp (service_str, "pipeline_raw") == 0) {
     193            5 :     service_type = ML_SERVICE_OFFLOADING_TYPE_PIPELINE_RAW;
     194            7 :   } else if (g_ascii_strcasecmp (service_str, "pipeline_uri") == 0) {
     195            1 :     service_type = ML_SERVICE_OFFLOADING_TYPE_PIPELINE_URI;
     196            6 :   } else if (g_ascii_strcasecmp (service_str, "reply") == 0) {
     197            4 :     service_type = ML_SERVICE_OFFLOADING_TYPE_REPLY;
     198            2 :   } else if (g_ascii_strcasecmp (service_str, "launch") == 0) {
     199            2 :     service_type = ML_SERVICE_OFFLOADING_TYPE_LAUNCH;
     200              :   } else {
     201            0 :     _ml_error_report ("Invalid service type '%s', please check service type.",
     202              :         service_str);
     203              :   }
     204              : 
     205           17 :   return service_type;
     206              : }
     207              : 
     208              : /**
     209              :  * @brief Get ml offloading service activation type.
     210              :  */
     211              : static gboolean
     212            6 : _mlrs_parse_activate (const gchar * activate)
     213              : {
     214            6 :   return (activate && g_ascii_strcasecmp (activate, "true") == 0);
     215              : }
     216              : 
     217              : /**
     218              :  * @brief Callback function for receiving data using curl.
     219              :  */
     220              : static size_t
     221          263 : curl_mem_write_cb (void *data, size_t size, size_t nmemb, void *clientp)
     222              : {
     223          263 :   size_t recv_size = size * nmemb;
     224          263 :   GByteArray *array = (GByteArray *) clientp;
     225              : 
     226          263 :   if (!array || !data || recv_size == 0)
     227            0 :     return 0;
     228              : 
     229          263 :   g_byte_array_append (array, data, recv_size);
     230              : 
     231          263 :   return recv_size;
     232              : }
     233              : 
     234              : /**
     235              :  * @brief Register model file given by the offloading sender.
     236              :  */
     237              : static gboolean
     238            6 : _mlrs_model_register (gchar * service_key, nns_edge_data_h data_h,
     239              :     void *data, nns_size_t data_len, const gchar * dir_path)
     240              : {
     241            6 :   guint version = 0;
     242            6 :   g_autofree gchar *description = NULL;
     243            6 :   g_autofree gchar *name = NULL;
     244            6 :   g_autofree gchar *activate = NULL;
     245            6 :   g_autofree gchar *model_path = NULL;
     246            6 :   gboolean active_bool = TRUE;
     247            6 :   GError *error = NULL;
     248              : 
     249            6 :   if (NNS_EDGE_ERROR_NONE != nns_edge_data_get_info (data_h, "description",
     250              :           &description)
     251            6 :       || NNS_EDGE_ERROR_NONE != nns_edge_data_get_info (data_h, "name", &name)
     252            6 :       || NNS_EDGE_ERROR_NONE != nns_edge_data_get_info (data_h, "activate",
     253              :           &activate)) {
     254            0 :     _ml_loge ("Failed to get info from data handle.");
     255            0 :     return FALSE;
     256              :   }
     257              : 
     258            6 :   active_bool = _mlrs_parse_activate (activate);
     259            6 :   model_path = g_build_path (G_DIR_SEPARATOR_S, dir_path, name, NULL);
     260            6 :   if (!g_file_set_contents (model_path, (char *) data, data_len, &error)) {
     261            0 :     _ml_loge ("Failed to write data to file: %s",
     262              :         error ? error->message : "unknown error");
     263            0 :     g_clear_error (&error);
     264            0 :     return FALSE;
     265              :   }
     266              : 
     267              :   /**
     268              :    * @todo Hashing the path. Where is the default path to save the model file?
     269              :    */
     270            6 :   if (ML_ERROR_NONE != ml_service_model_register (service_key, model_path,
     271              :           active_bool, description, &version)) {
     272            0 :     _ml_loge ("Failed to register model, service key is '%s'.", service_key);
     273            0 :     return FALSE;
     274              :   }
     275              : 
     276            6 :   return TRUE;
     277              : }
     278              : 
     279              : /**
     280              :  * @brief Get path to save the model given from offloading sender.
     281              :  * @note The caller is responsible for freeing the returned data using g_free().
     282              :  */
     283              : static gchar *
     284           17 : _mlrs_get_model_dir_path (_ml_service_offloading_s * offloading_s,
     285              :     const gchar * service_key)
     286              : {
     287           17 :   g_autofree gchar *dir_path = NULL;
     288              : 
     289           17 :   if (offloading_s->path) {
     290           12 :     dir_path = g_strdup (offloading_s->path);
     291              :   } else {
     292           11 :     g_autofree gchar *current_dir = g_get_current_dir ();
     293              : 
     294           11 :     dir_path = g_build_path (G_DIR_SEPARATOR_S, current_dir, service_key, NULL);
     295           11 :     if (g_mkdir_with_parents (dir_path, 0755) < 0) {
     296            0 :       _ml_loge ("Failed to create directory '%s': %s", dir_path,
     297              :           g_strerror (errno));
     298            0 :       return NULL;
     299              :     }
     300              :   }
     301              : 
     302           17 :   return g_steal_pointer (&dir_path);
     303              : }
     304              : 
     305              : /**
     306              :  * @brief Get data from gievn uri
     307              :  */
     308              : static gboolean
     309            2 : _mlrs_get_data_from_uri (gchar * uri, GByteArray * array)
     310              : {
     311              :   CURL *curl;
     312              :   CURLcode res;
     313            2 :   gboolean ret = FALSE;
     314              : 
     315            2 :   curl = curl_easy_init ();
     316            2 :   if (curl) {
     317            2 :     if (CURLE_OK != curl_easy_setopt (curl, CURLOPT_URL, (gchar *) uri) ||
     318            2 :         CURLE_OK != curl_easy_setopt (curl, CURLOPT_FOLLOWLOCATION, 1L) ||
     319            2 :         CURLE_OK != curl_easy_setopt (curl, CURLOPT_WRITEFUNCTION,
     320            2 :             curl_mem_write_cb) ||
     321            2 :         CURLE_OK != curl_easy_setopt (curl, CURLOPT_WRITEDATA,
     322              :             (void *) array)) {
     323            0 :       _ml_loge ("Failed to set option for curl easy handle.");
     324            0 :       ret = FALSE;
     325            0 :       goto done;
     326              :     }
     327              : 
     328            2 :     res = curl_easy_perform (curl);
     329              : 
     330            2 :     if (res != CURLE_OK) {
     331            0 :       _ml_loge ("curl_easy_perform failed: %s", curl_easy_strerror (res));
     332            0 :       ret = FALSE;
     333            0 :       goto done;
     334              :     }
     335              : 
     336            2 :     ret = TRUE;
     337              :   }
     338              : 
     339            0 : done:
     340            2 :   if (curl)
     341            2 :     curl_easy_cleanup (curl);
     342            2 :   return ret;
     343              : }
     344              : 
     345              : /**
     346              :  * @brief Process ml offloading service
     347              :  */
     348              : static int
     349           17 : _mlrs_process_service_offloading (nns_edge_data_h data_h, void *user_data)
     350              : {
     351              :   void *data;
     352              :   nns_size_t data_len;
     353           17 :   g_autofree gchar *service_str = NULL;
     354           17 :   g_autofree gchar *service_key = NULL;
     355           17 :   g_autofree gchar *dir_path = NULL;
     356              :   ml_service_offloading_type_e service_type;
     357           17 :   int ret = NNS_EDGE_ERROR_NONE;
     358           17 :   ml_service_s *mls = (ml_service_s *) user_data;
     359           17 :   _ml_service_offloading_s *offloading_s =
     360              :       (_ml_service_offloading_s *) mls->priv;
     361           17 :   ml_service_event_e event_type = ML_SERVICE_EVENT_UNKNOWN;
     362           17 :   ml_information_h info_h = NULL;
     363              : 
     364           17 :   ret = nns_edge_data_get (data_h, 0, &data, &data_len);
     365           17 :   if (NNS_EDGE_ERROR_NONE != ret) {
     366            0 :     _ml_error_report_return (ret,
     367              :         "Failed to get data while processing the ml-offloading service.");
     368              :   }
     369              : 
     370           17 :   ret = nns_edge_data_get_info (data_h, "service-type", &service_str);
     371           17 :   if (NNS_EDGE_ERROR_NONE != ret) {
     372            0 :     _ml_error_report_return (ret,
     373              :         "Failed to get service type while processing the ml-offloading service.");
     374              :   }
     375           17 :   service_type = _mlrs_get_service_type (service_str);
     376              : 
     377           17 :   ret = nns_edge_data_get_info (data_h, "service-key", &service_key);
     378           17 :   if (NNS_EDGE_ERROR_NONE != ret) {
     379            0 :     _ml_error_report_return (ret,
     380              :         "Failed to get service key while processing the ml-offloading service.");
     381              :   }
     382              : 
     383           17 :   dir_path = _mlrs_get_model_dir_path (offloading_s, service_key);
     384              : 
     385           17 :   if (offloading_s->offloading_mode == ML_SERVICE_OFFLOADING_MODE_TRAINING) {
     386            4 :     ret = _ml_service_training_offloading_process_received_data (mls, data_h,
     387              :         dir_path, data, service_type);
     388            4 :     if (NNS_EDGE_ERROR_NONE != ret) {
     389            0 :       _ml_error_report_return (ret,
     390              :           "Failed to process received data on training offloading.");
     391              :     }
     392              : 
     393            4 :     if (service_type == ML_SERVICE_OFFLOADING_TYPE_REPLY) {
     394            1 :       if (!dir_path) {
     395            0 :         _ml_error_report_return (NNS_EDGE_ERROR_UNKNOWN,
     396              :             "Failed to get model directory path.");
     397              :       }
     398              : 
     399            1 :       if (!_mlrs_model_register (service_key, data_h, data, data_len, dir_path)) {
     400            0 :         _ml_error_report_return (NNS_EDGE_ERROR_UNKNOWN,
     401              :             "Failed to register model downloaded from: %s.", (gchar *) data);
     402              :       }
     403              :     }
     404              :   }
     405              : 
     406           17 :   switch (service_type) {
     407            1 :     case ML_SERVICE_OFFLOADING_TYPE_MODEL_URI:
     408              :     {
     409              :       GByteArray *array;
     410              : 
     411            1 :       if (!dir_path) {
     412            0 :         _ml_error_report_return (NNS_EDGE_ERROR_UNKNOWN,
     413              :             "Failed to get model directory path.");
     414              :       }
     415              : 
     416            1 :       array = g_byte_array_new ();
     417              : 
     418            1 :       if (!_mlrs_get_data_from_uri ((gchar *) data, array)) {
     419            0 :         g_byte_array_free (array, TRUE);
     420            0 :         _ml_error_report_return (NNS_EDGE_ERROR_IO,
     421              :             "Failed to get data from uri: %s.", (gchar *) data);
     422              :       }
     423              : 
     424            1 :       if (_mlrs_model_register (service_key, data_h, array->data, array->len,
     425              :               dir_path)) {
     426            1 :         event_type = ML_SERVICE_EVENT_MODEL_REGISTERED;
     427              :       } else {
     428            0 :         _ml_error_report ("Failed to register model downloaded from: %s.",
     429              :             (gchar *) data);
     430            0 :         ret = NNS_EDGE_ERROR_UNKNOWN;
     431              :       }
     432            1 :       g_byte_array_free (array, TRUE);
     433            1 :       break;
     434              :     }
     435            4 :     case ML_SERVICE_OFFLOADING_TYPE_MODEL_RAW:
     436              :     {
     437            4 :       if (!dir_path) {
     438            0 :         _ml_error_report_return (NNS_EDGE_ERROR_UNKNOWN,
     439              :             "Failed to get model directory path.");
     440              :       }
     441              : 
     442            4 :       if (_mlrs_model_register (service_key, data_h, data, data_len, dir_path)) {
     443            4 :         event_type = ML_SERVICE_EVENT_MODEL_REGISTERED;
     444              :       } else {
     445            0 :         _ml_error_report ("Failed to register model downloaded from: %s.",
     446              :             (gchar *) data);
     447            0 :         ret = NNS_EDGE_ERROR_UNKNOWN;
     448              :       }
     449            4 :       break;
     450              :     }
     451            1 :     case ML_SERVICE_OFFLOADING_TYPE_PIPELINE_URI:
     452              :     {
     453            1 :       GByteArray *array = g_byte_array_new ();
     454              : 
     455            1 :       ret = _mlrs_get_data_from_uri ((gchar *) data, array);
     456            1 :       if (!ret) {
     457            0 :         g_byte_array_free (array, TRUE);
     458            0 :         _ml_error_report_return (ret,
     459              :             "Failed to get data from uri: %s.", (gchar *) data);
     460              :       }
     461            1 :       ret = ml_service_pipeline_set (service_key, (gchar *) array->data);
     462            1 :       if (ML_ERROR_NONE == ret) {
     463            1 :         event_type = ML_SERVICE_EVENT_PIPELINE_REGISTERED;
     464              :       }
     465            1 :       g_byte_array_free (array, TRUE);
     466            1 :       break;
     467              :     }
     468            5 :     case ML_SERVICE_OFFLOADING_TYPE_PIPELINE_RAW:
     469            5 :       ret = ml_service_pipeline_set (service_key, (gchar *) data);
     470            5 :       if (ML_ERROR_NONE == ret) {
     471            5 :         event_type = ML_SERVICE_EVENT_PIPELINE_REGISTERED;
     472              :       }
     473            5 :       break;
     474            4 :     case ML_SERVICE_OFFLOADING_TYPE_REPLY:
     475              :     {
     476            4 :       ret = _ml_information_create (&info_h);
     477            4 :       if (ML_ERROR_NONE != ret) {
     478            0 :         _ml_error_report ("Failed to create information handle.");
     479            0 :         goto done;
     480              :       }
     481            4 :       ret = _ml_information_set (info_h, "data", (void *) data, NULL);
     482            4 :       if (ML_ERROR_NONE != ret) {
     483            0 :         _ml_error_report ("Failed to set data information.");
     484            0 :         goto done;
     485              :       }
     486            4 :       event_type = ML_SERVICE_EVENT_REPLY;
     487            4 :       break;
     488              :     }
     489            2 :     case ML_SERVICE_OFFLOADING_TYPE_LAUNCH:
     490              :     {
     491            2 :       ml_service_h service_h = NULL;
     492              : 
     493              :       /**
     494              :        * @todo Check privilege and availability here.
     495              :        */
     496              : 
     497            2 :       service_h =
     498            2 :           g_hash_table_lookup (offloading_s->service_table, service_key);
     499            2 :       if (service_h) {
     500            0 :         _ml_logi ("The registered service as key %s is already launched.",
     501              :             service_key);
     502            2 :         break;
     503              :       }
     504              : 
     505            2 :       ret = ml_service_pipeline_launch (service_key, &service_h);
     506            2 :       if (ret != ML_ERROR_NONE) {
     507            0 :         _ml_error_report
     508              :             ("Failed to launch the registered pipeline. service key: %s",
     509              :             service_key);
     510            0 :         goto done;
     511              :       }
     512            2 :       ret = ml_service_start (service_h);
     513            2 :       if (ret != ML_ERROR_NONE) {
     514            0 :         _ml_error_report
     515              :             ("Failed to start the registered pipeline. service key: %s",
     516              :             service_key);
     517            0 :         ml_service_destroy (service_h);
     518            0 :         goto done;
     519              :       }
     520              : 
     521            4 :       g_hash_table_insert (offloading_s->service_table, g_strdup (service_key),
     522              :           service_h);
     523            2 :       event_type = ML_SERVICE_EVENT_LAUNCH;
     524            2 :       break;
     525              :     }
     526            0 :     default:
     527            0 :       _ml_error_report ("Unknown service type '%d' or not supported yet.",
     528              :           service_type);
     529            0 :       break;
     530              :   }
     531              : 
     532           17 :   if (event_type != ML_SERVICE_EVENT_UNKNOWN) {
     533           17 :     ml_service_event_cb_info_s cb_info = { 0 };
     534              : 
     535           17 :     _ml_service_get_event_cb_info (mls, &cb_info);
     536              : 
     537           17 :     if (cb_info.cb) {
     538           17 :       cb_info.cb (event_type, info_h, cb_info.pdata);
     539              :     }
     540              :   }
     541              : 
     542            0 : done:
     543           17 :   if (info_h) {
     544            4 :     ml_information_destroy (info_h);
     545              :   }
     546              : 
     547           17 :   return ret;
     548              : }
     549              : 
     550              : /**
     551              :  * @brief Edge event callback.
     552              :  */
     553              : static int
     554           69 : _mlrs_edge_event_cb (nns_edge_event_h event_h, void *user_data)
     555              : {
     556           69 :   nns_edge_event_e event = NNS_EDGE_EVENT_UNKNOWN;
     557           69 :   nns_edge_data_h data_h = NULL;
     558           69 :   int ret = NNS_EDGE_ERROR_NONE;
     559              : 
     560           69 :   ret = nns_edge_event_get_type (event_h, &event);
     561           69 :   if (NNS_EDGE_ERROR_NONE != ret)
     562           69 :     return ret;
     563              : 
     564           69 :   switch (event) {
     565           17 :     case NNS_EDGE_EVENT_NEW_DATA_RECEIVED:
     566              :     {
     567           17 :       ret = nns_edge_event_parse_new_data (event_h, &data_h);
     568           17 :       if (NNS_EDGE_ERROR_NONE != ret)
     569            0 :         return ret;
     570              : 
     571           17 :       ret = _mlrs_process_service_offloading (data_h, user_data);
     572           17 :       break;
     573              :     }
     574           52 :     default:
     575           52 :       break;
     576              :   }
     577              : 
     578           69 :   if (data_h)
     579           17 :     nns_edge_data_destroy (data_h);
     580              : 
     581           69 :   return ret;
     582              : }
     583              : 
     584              : /**
     585              :  * @brief Create edge handle.
     586              :  */
     587              : static int
     588           30 : _mlrs_create_edge_handle (ml_service_s * mls, edge_info_s * edge_info)
     589              : {
     590           30 :   int ret = 0;
     591           30 :   nns_edge_h edge_h = NULL;
     592           30 :   _ml_service_offloading_s *offloading_s = NULL;
     593              : 
     594           30 :   ret = nns_edge_create_handle (edge_info->id, edge_info->conn_type,
     595              :       edge_info->node_type, &edge_h);
     596              : 
     597           30 :   if (NNS_EDGE_ERROR_NONE != ret) {
     598           30 :     _ml_error_report_return_continue (ret,
     599              :         "Failed to create edge handle for ml-service offloading. Internal error?");
     600              :   }
     601              : 
     602           30 :   offloading_s = (_ml_service_offloading_s *) mls->priv;
     603           30 :   ret = nns_edge_set_event_callback (edge_h, _mlrs_edge_event_cb, mls);
     604           30 :   if (NNS_EDGE_ERROR_NONE != ret) {
     605            0 :     _ml_error_report
     606              :         ("Failed to set event callback in edge handle for ml-service offloading. Internal error?");
     607            0 :     goto error;
     608              :   }
     609              : 
     610           30 :   _mlrs_set_edge_info (edge_info, edge_h);
     611              : 
     612           30 :   ret = nns_edge_start (edge_h);
     613           30 :   if (NNS_EDGE_ERROR_NONE != ret) {
     614            0 :     _ml_error_report
     615              :         ("Failed to start edge for ml-service offloading. Internal error?");
     616            0 :     goto error;
     617              :   }
     618              : 
     619           30 :   if (edge_info->node_type == NNS_EDGE_NODE_TYPE_QUERY_CLIENT) {
     620           13 :     ret = nns_edge_connect (edge_h, edge_info->dest_host, edge_info->dest_port);
     621              : 
     622           13 :     if (NNS_EDGE_ERROR_NONE != ret) {
     623            0 :       _ml_error_report
     624              :           ("Failed to connect edge for ml-service offloading. Internal error?");
     625            0 :       goto error;
     626              :     }
     627              :   }
     628              : 
     629           30 :   offloading_s->edge_h = edge_h;
     630              : 
     631           30 : error:
     632           30 :   if (ret != NNS_EDGE_ERROR_NONE) {
     633            0 :     nns_edge_release_handle (edge_h);
     634              :   }
     635              : 
     636           30 :   return ret;
     637              : }
     638              : 
     639              : /**
     640              :  * @brief Set offloading mode and private data.
     641              :  */
     642              : int
     643           13 : _ml_service_offloading_set_mode (ml_service_h handle,
     644              :     ml_service_offloading_mode_e mode, void *priv)
     645              : {
     646           13 :   ml_service_s *mls = (ml_service_s *) handle;
     647              :   _ml_service_offloading_s *offloading_s;
     648              : 
     649           13 :   if (!_ml_service_handle_is_valid (mls)) {
     650            0 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
     651              :         "The parameter, 'handle' (ml_service_h), is invalid. It should be a valid ml_service_h instance.");
     652              :   }
     653              : 
     654           13 :   offloading_s = (_ml_service_offloading_s *) mls->priv;
     655              : 
     656           13 :   offloading_s->offloading_mode = mode;
     657           13 :   offloading_s->priv = priv;
     658              : 
     659           13 :   return ML_ERROR_NONE;
     660              : }
     661              : 
     662              : /**
     663              :  * @brief Get offloading mode and private data.
     664              :  */
     665              : int
     666           42 : _ml_service_offloading_get_mode (ml_service_h handle,
     667              :     ml_service_offloading_mode_e * mode, void **priv)
     668              : {
     669           42 :   ml_service_s *mls = (ml_service_s *) handle;
     670              :   _ml_service_offloading_s *offloading_s;
     671              : 
     672           42 :   if (!_ml_service_handle_is_valid (mls)) {
     673            2 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
     674              :         "The parameter, 'handle' (ml_service_h), is invalid. It should be a valid ml_service_h instance.");
     675              :   }
     676              : 
     677           40 :   if (!mode || !priv) {
     678            0 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
     679              :         "The parameter, mode or priv, is null. It should be a valid pointer.");
     680              :   }
     681              : 
     682           40 :   offloading_s = (_ml_service_offloading_s *) mls->priv;
     683              : 
     684           40 :   *mode = offloading_s->offloading_mode;
     685           40 :   *priv = offloading_s->priv;
     686              : 
     687           40 :   return ML_ERROR_NONE;
     688              : }
     689              : 
     690              : /**
     691              :  * @brief Internal function to release ml-service offloading data.
     692              :  */
     693              : int
     694           31 : _ml_service_offloading_release_internal (ml_service_s * mls)
     695              : {
     696              :   _ml_service_offloading_s *offloading_s;
     697              : 
     698              :   /* Supposed internal function call to release handle. */
     699           31 :   if (!mls || !mls->priv)
     700            1 :     return ML_ERROR_NONE;
     701              : 
     702           30 :   offloading_s = (_ml_service_offloading_s *) mls->priv;
     703              : 
     704           30 :   if (offloading_s->offloading_mode == ML_SERVICE_OFFLOADING_MODE_TRAINING) {
     705              :     /**
     706              :      * '_ml_service_training_offloading_destroy' transfers internally trained models.
     707              :      * So keep offloading handle.
     708              :      */
     709            5 :     if (ML_ERROR_NONE != _ml_service_training_offloading_destroy (mls)) {
     710            0 :       _ml_error_report
     711              :           ("Failed to release ml-service training offloading handle");
     712              :     }
     713              :   }
     714              : 
     715           30 :   if (offloading_s->edge_h) {
     716           30 :     nns_edge_release_handle (offloading_s->edge_h);
     717           30 :     offloading_s->edge_h = NULL;
     718              :   }
     719              : 
     720           30 :   if (offloading_s->option_table) {
     721           30 :     g_hash_table_destroy (offloading_s->option_table);
     722           30 :     offloading_s->option_table = NULL;
     723              :   }
     724              : 
     725           30 :   if (offloading_s->service_table) {
     726           30 :     g_hash_table_destroy (offloading_s->service_table);
     727           30 :     offloading_s->service_table = NULL;
     728              :   }
     729              : 
     730           30 :   g_free (offloading_s->path);
     731           30 :   g_free (offloading_s);
     732           30 :   mls->priv = NULL;
     733              : 
     734           30 :   return ML_ERROR_NONE;
     735              : }
     736              : 
     737              : /**
     738              :  * @brief Set value in ml-service offloading handle.
     739              :  */
     740              : int
     741            4 : _ml_service_offloading_set_information (ml_service_h handle, const gchar * name,
     742              :     const gchar * value)
     743              : {
     744            4 :   ml_service_s *mls = (ml_service_s *) handle;
     745              :   _ml_service_offloading_s *offloading_s;
     746            4 :   int ret = ML_ERROR_NONE;
     747              : 
     748            4 :   if (!_ml_service_handle_is_valid (mls)) {
     749            0 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
     750              :         "The parameter, 'handle' (ml_service_h), is invalid. It should be a valid ml_service_h instance.");
     751              :   }
     752              : 
     753            4 :   offloading_s = (_ml_service_offloading_s *) mls->priv;
     754              : 
     755            4 :   if (g_ascii_strcasecmp (name, "path") == 0) {
     756            4 :     if (!g_file_test (value, G_FILE_TEST_IS_DIR)) {
     757            0 :       _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
     758              :           "The given param, dir path '%s' is invalid or the dir is not found or accessible.",
     759              :           value);
     760              :     }
     761              : 
     762            4 :     if (g_access (value, W_OK) != 0) {
     763            0 :       _ml_error_report_return (ML_ERROR_PERMISSION_DENIED,
     764              :           "Write permission to dir '%s' is denied.", value);
     765              :     }
     766              : 
     767            4 :     g_free (offloading_s->path);
     768            4 :     offloading_s->path = g_strdup (value);
     769              : 
     770            4 :     if (offloading_s->offloading_mode == ML_SERVICE_OFFLOADING_MODE_TRAINING) {
     771            2 :       ret = _ml_service_training_offloading_set_path (mls, offloading_s->path);
     772              :     }
     773              :   }
     774              : 
     775            4 :   return ret;
     776              : }
     777              : 
     778              : /**
     779              :  * @brief Internal function to set the services in ml-service offloading handle.
     780              :  */
     781              : static int
     782           80 : _ml_service_offloading_set_service (ml_service_s * mls, const gchar * key,
     783              :     const gchar * value)
     784              : {
     785              :   _ml_service_offloading_s *offloading_s;
     786              : 
     787           80 :   if (!STR_IS_VALID (key) || !STR_IS_VALID (value)) {
     788            0 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
     789              :         "The parameter, 'key' or 'value' is null or empty string. It should be a valid string.");
     790              :   }
     791           80 :   offloading_s = (_ml_service_offloading_s *) mls->priv;
     792              : 
     793           80 :   g_hash_table_insert (offloading_s->option_table, g_strdup (key),
     794           80 :       g_strdup (value));
     795              : 
     796           80 :   return ML_ERROR_NONE;
     797              : }
     798              : 
     799              : /**
     800              :  * @brief Internal function to parse service info from config file.
     801              :  */
     802              : static int
     803           30 : _ml_service_offloading_parse_services (ml_service_s * mls, JsonObject * object)
     804              : {
     805              :   GList *list, *iter;
     806           30 :   int status = ML_ERROR_NONE;
     807              : 
     808           30 :   list = json_object_get_members (object);
     809          110 :   for (iter = list; iter != NULL; iter = g_list_next (iter)) {
     810           80 :     const gchar *key = iter->data;
     811           80 :     JsonNode *json_node = json_object_get_member (object, key);
     812           80 :     gchar *val = json_to_string (json_node, TRUE);
     813              : 
     814           80 :     if (val) {
     815           80 :       status = _ml_service_offloading_set_service (mls, key, val);
     816           80 :       g_free (val);
     817              : 
     818           80 :       if (status != ML_ERROR_NONE) {
     819            0 :         _ml_error_report ("Failed to set service key '%s'.", key);
     820            0 :         break;
     821              :       }
     822              :     }
     823              :   }
     824           30 :   g_list_free (list);
     825              : 
     826           30 :   return status;
     827              : }
     828              : 
     829              : /**
     830              :  * @brief Private function to release the pipeline service
     831              :  */
     832              : static void
     833            2 : _cleanup_pipeline_service (gpointer data)
     834              : {
     835              :   int ret;
     836            2 :   ml_service_h service_h = data;
     837              : 
     838            2 :   ret = ml_service_destroy (service_h);
     839            2 :   if (ML_ERROR_NONE != ret) {
     840            0 :     _ml_error_report ("Failed to destroy the pipeline service.");
     841              :   }
     842            2 : }
     843              : 
     844              : /**
     845              :  * @brief Internal function to create ml-offloading data with given ml-option handle.
     846              :  */
     847              : static int
     848           30 : _ml_service_offloading_create_from_option (ml_service_s * mls,
     849              :     ml_option_h option)
     850              : {
     851              :   _ml_service_offloading_s *offloading_s;
     852           30 :   edge_info_s *edge_info = NULL;
     853           30 :   int ret = ML_ERROR_NONE;
     854           30 :   gchar *_path = NULL;
     855              : 
     856           30 :   mls->priv = offloading_s = g_try_new0 (_ml_service_offloading_s, 1);
     857           30 :   if (offloading_s == NULL) {
     858           30 :     _ml_error_report_return (ML_ERROR_OUT_OF_MEMORY,
     859              :         "Failed to allocate memory for the service handle's private data. Out of memory?");
     860              :   }
     861              : 
     862           30 :   offloading_s->option_table =
     863           30 :       g_hash_table_new_full (g_str_hash, g_str_equal, g_free, g_free);
     864           30 :   if (!offloading_s->option_table) {
     865            0 :     _ml_error_report_return (ML_ERROR_OUT_OF_MEMORY,
     866              :         "Failed to allocate memory for the option table of ml-service offloading. Out of memory?");
     867              :   }
     868              : 
     869           30 :   offloading_s->service_table =
     870           30 :       g_hash_table_new_full (g_str_hash, g_str_equal, g_free,
     871              :       _cleanup_pipeline_service);
     872           30 :   if (!offloading_s->service_table) {
     873            0 :     _ml_error_report_return (ML_ERROR_OUT_OF_MEMORY,
     874              :         "Failed to allocate memory for the service table of ml-service offloading. Out of memory?");
     875              :   }
     876              : 
     877           30 :   if (ML_ERROR_NONE == ml_option_get (option, "path", (void **) (&_path))) {
     878            0 :     ret = _ml_service_offloading_set_information (mls, "path", _path);
     879            0 :     if (ML_ERROR_NONE != ret) {
     880            0 :       _ml_error_report_return (ret,
     881              :           "Failed to set path in ml-service offloading handle.");
     882              :     }
     883              :   }
     884              : 
     885           30 :   _mlrs_get_edge_info (option, &edge_info);
     886              : 
     887           30 :   offloading_s->node_type = edge_info->node_type;
     888           30 :   ret = _mlrs_create_edge_handle (mls, edge_info);
     889           30 :   _mlrs_release_edge_info (edge_info);
     890              : 
     891           30 :   return ret;
     892              : }
     893              : 
     894              : /**
     895              :  * @brief Internal function to convert json (string member) to ml-option.
     896              :  */
     897              : static int
     898           30 : _ml_service_offloading_convert_to_option (JsonObject * object,
     899              :     ml_option_h * option_h)
     900              : {
     901           30 :   ml_option_h tmp = NULL;
     902           30 :   int status = ML_ERROR_NONE;
     903              :   const gchar *key, *val;
     904              :   GList *list, *iter;
     905              : 
     906           30 :   if (!object || !option_h)
     907           30 :     return ML_ERROR_INVALID_PARAMETER;
     908              : 
     909           30 :   status = ml_option_create (&tmp);
     910           30 :   if (status != ML_ERROR_NONE) {
     911            0 :     _ml_error_report_return (status,
     912              :         "Failed to convert json to ml-option, cannot create ml-option handle.");
     913              :   }
     914              : 
     915           30 :   list = json_object_get_members (object);
     916          186 :   for (iter = list; iter != NULL; iter = g_list_next (iter)) {
     917          156 :     key = iter->data;
     918              : 
     919          156 :     if (g_ascii_strcasecmp (key, "training") == 0) {
     920              :       /* It is not a value to set for option. */
     921            6 :       continue;
     922              :     }
     923              : 
     924          150 :     val = _ml_service_get_json_string_member (object, key);
     925              : 
     926          150 :     status = ml_option_set (tmp, key, g_strdup (val), g_free);
     927          150 :     if (status != ML_ERROR_NONE) {
     928            0 :       _ml_error_report ("Failed to set %s option: %s.", key, val);
     929            0 :       break;
     930              :     }
     931              :   }
     932           30 :   g_list_free (list);
     933              : 
     934           30 :   if (status == ML_ERROR_NONE) {
     935           30 :     *option_h = tmp;
     936              :   } else {
     937            0 :     ml_option_destroy (tmp);
     938              :   }
     939              : 
     940           30 :   return status;
     941              : }
     942              : 
     943              : /**
     944              :  * @brief Internal function to parse configuration file to create offloading service.
     945              :  */
     946              : int
     947           32 : _ml_service_offloading_create (ml_service_h handle, JsonObject * object)
     948              : {
     949           32 :   ml_service_s *mls = (ml_service_s *) handle;
     950              :   int status;
     951           32 :   ml_option_h option = NULL;
     952              :   JsonObject *offloading;
     953              : 
     954           32 :   if (!mls || !object) {
     955           32 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
     956              :         "Failed to create offloading handle, invalid parameter.");
     957              :   }
     958              : 
     959           30 :   offloading = json_object_get_object_member (object, "offloading");
     960           30 :   if (!offloading) {
     961            0 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
     962              :         "Failed to get 'offloading' member from configuration file.");
     963              :   }
     964              : 
     965           30 :   status = _ml_service_offloading_convert_to_option (offloading, &option);
     966           30 :   if (status != ML_ERROR_NONE) {
     967            0 :     _ml_error_report ("Failed to set ml-option from configuration file.");
     968            0 :     goto done;
     969              :   }
     970              : 
     971           30 :   status = _ml_service_offloading_create_from_option (mls, option);
     972           30 :   if (status != ML_ERROR_NONE) {
     973            0 :     _ml_error_report ("Failed to create ml-service offloading.");
     974            0 :     goto done;
     975              :   }
     976              : 
     977           30 :   if (json_object_has_member (object, "services")) {
     978              :     JsonObject *svc_object;
     979              : 
     980           30 :     svc_object = json_object_get_object_member (object, "services");
     981           30 :     status = _ml_service_offloading_parse_services (mls, svc_object);
     982           30 :     if (status != ML_ERROR_NONE) {
     983            0 :       _ml_logw ("Failed to parse services from configuration file.");
     984              :     }
     985              :   }
     986              : 
     987           30 :   if (json_object_has_member (offloading, "training")) {
     988            6 :     status = _ml_service_training_offloading_create (mls, offloading);
     989            6 :     if (status != ML_ERROR_NONE) {
     990            0 :       _ml_logw ("Failed to parse training from configuration file.");
     991              :     }
     992              :   }
     993              : 
     994           30 : done:
     995           30 :   if (option)
     996           30 :     ml_option_destroy (option);
     997              : 
     998           30 :   return status;
     999              : }
    1000              : 
    1001              : /**
    1002              :  * @brief Internal function to start ml-service offloading.
    1003              :  */
    1004              : int
    1005            2 : _ml_service_offloading_start (ml_service_h handle)
    1006              : {
    1007            2 :   ml_service_s *mls = (ml_service_s *) handle;
    1008              :   _ml_service_offloading_s *offloading_s;
    1009            2 :   int ret = ML_ERROR_NONE;
    1010              : 
    1011            2 :   if (!_ml_service_handle_is_valid (mls)) {
    1012            0 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
    1013              :         "The parameter, 'handle' (ml_service_h), is invalid. It should be a valid ml_service_h instance.");
    1014              :   }
    1015              : 
    1016            2 :   offloading_s = (_ml_service_offloading_s *) mls->priv;
    1017              : 
    1018            2 :   if (offloading_s->offloading_mode == ML_SERVICE_OFFLOADING_MODE_TRAINING) {
    1019            2 :     ret = _ml_service_training_offloading_start (mls);
    1020            2 :     if (ret != ML_ERROR_NONE) {
    1021            0 :       _ml_error_report ("Failed to start training offloading.");
    1022              :     }
    1023              :   }
    1024              : 
    1025            2 :   return ret;
    1026              : }
    1027              : 
    1028              : /**
    1029              :  * @brief Internal function to stop ml-service offloading.
    1030              :  */
    1031              : int
    1032            2 : _ml_service_offloading_stop (ml_service_h handle)
    1033              : {
    1034            2 :   ml_service_s *mls = (ml_service_s *) handle;
    1035              :   _ml_service_offloading_s *offloading_s;
    1036            2 :   int ret = ML_ERROR_NONE;
    1037              : 
    1038            2 :   if (!_ml_service_handle_is_valid (mls)) {
    1039            0 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
    1040              :         "The parameter, 'handle' (ml_service_h), is invalid. It should be a valid ml_service_h instance.");
    1041              :   }
    1042              : 
    1043            2 :   offloading_s = (_ml_service_offloading_s *) mls->priv;
    1044              : 
    1045            2 :   if (offloading_s->offloading_mode == ML_SERVICE_OFFLOADING_MODE_TRAINING) {
    1046            2 :     ret = _ml_service_training_offloading_stop (mls);
    1047            2 :     if (ret != ML_ERROR_NONE) {
    1048            0 :       _ml_error_report ("Failed to stop training offloading.");
    1049              :     }
    1050              :   }
    1051              : 
    1052            2 :   return ret;
    1053              : }
    1054              : 
    1055              : /**
    1056              :  * @brief Internal function to request service to ml-service offloading.
    1057              :  * Register new information, such as neural network models or pipeline descriptions, on a offloading server.
    1058              :  */
    1059              : int
    1060           25 : _ml_service_offloading_request (ml_service_h handle, const char *key,
    1061              :     const ml_tensors_data_h input)
    1062              : {
    1063           25 :   ml_service_s *mls = (ml_service_s *) handle;
    1064           25 :   _ml_service_offloading_s *offloading_s = NULL;
    1065           25 :   const gchar *service_key = NULL;
    1066           25 :   nns_edge_data_h data_h = NULL;
    1067           25 :   int ret = NNS_EDGE_ERROR_NONE;
    1068           25 :   const gchar *service_str = NULL;
    1069           25 :   const gchar *description = NULL;
    1070           25 :   const gchar *name = NULL;
    1071           25 :   const gchar *activate = NULL;
    1072           25 :   ml_tensors_data_s *_in = NULL;
    1073           25 :   g_autoptr (JsonNode) service_node = NULL;
    1074              :   JsonObject *service_obj;
    1075              :   guint i;
    1076              : 
    1077           25 :   if (!_ml_service_handle_is_valid (mls)) {
    1078            2 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
    1079              :         "The parameter, 'handle' (ml_service_h), is invalid. It should be a valid ml_service_h instance.");
    1080              :   }
    1081              : 
    1082           23 :   if (!STR_IS_VALID (key)) {
    1083            3 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
    1084              :         "The parameter, 'key' is NULL. It should be a valid string.");
    1085              :   }
    1086              : 
    1087           20 :   if (!input)
    1088            1 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
    1089              :         "The parameter, input (ml_tensors_data_h), is NULL. It should be a valid ml_tensor_data_h instance, which is usually created by ml_tensors_data_create().");
    1090              : 
    1091           19 :   offloading_s = (_ml_service_offloading_s *) mls->priv;
    1092              : 
    1093           19 :   service_str = g_hash_table_lookup (offloading_s->option_table, key);
    1094           19 :   if (!service_str) {
    1095            2 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
    1096              :         "The given service key, %s, is not registered in the ml-service offloading handle.",
    1097              :         key);
    1098              :   }
    1099              : 
    1100           17 :   service_node = json_from_string (service_str, NULL);
    1101           17 :   if (!service_node) {
    1102            0 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
    1103              :         "Failed to parse the json string, %s.", service_str);
    1104              :   }
    1105           17 :   service_obj = json_node_get_object (service_node);
    1106           17 :   if (!service_obj) {
    1107            0 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
    1108              :         "Failed to get the json object from the json node.");
    1109              :   }
    1110              : 
    1111              :   service_str =
    1112           17 :       _ml_service_get_json_string_member (service_obj, "service-type");
    1113           17 :   if (!service_str) {
    1114            0 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
    1115              :         "Failed to get service type from the json object.");
    1116              :   }
    1117              : 
    1118           17 :   service_key = _ml_service_get_json_string_member (service_obj, "service-key");
    1119           17 :   if (!service_key) {
    1120            0 :     _ml_error_report_return (ML_ERROR_INVALID_PARAMETER,
    1121              :         "Failed to get service key from the json object.");
    1122              :   }
    1123              : 
    1124           17 :   ret = nns_edge_data_create (&data_h);
    1125           17 :   if (NNS_EDGE_ERROR_NONE != ret) {
    1126            0 :     _ml_error_report ("Failed to create an edge data.");
    1127            0 :     return ret;
    1128              :   }
    1129              : 
    1130           17 :   ret = nns_edge_data_set_info (data_h, "service-type", service_str);
    1131           17 :   if (NNS_EDGE_ERROR_NONE != ret) {
    1132            0 :     _ml_error_report ("Failed to set service type in edge data.");
    1133            0 :     goto done;
    1134              :   }
    1135           17 :   ret = nns_edge_data_set_info (data_h, "service-key", service_key);
    1136           17 :   if (NNS_EDGE_ERROR_NONE != ret) {
    1137            0 :     _ml_error_report ("Failed to set service key in edge data.");
    1138            0 :     goto done;
    1139              :   }
    1140              : 
    1141           17 :   description = _ml_service_get_json_string_member (service_obj, "description");
    1142           17 :   if (description) {
    1143           14 :     ret = nns_edge_data_set_info (data_h, "description", description);
    1144           14 :     if (NNS_EDGE_ERROR_NONE != ret) {
    1145            0 :       _ml_logi ("Failed to set description in edge data.");
    1146              :     }
    1147              :   }
    1148              : 
    1149           17 :   name = _ml_service_get_json_string_member (service_obj, "name");
    1150           17 :   if (name) {
    1151            6 :     ret = nns_edge_data_set_info (data_h, "name", name);
    1152            6 :     if (NNS_EDGE_ERROR_NONE != ret) {
    1153            0 :       _ml_logi ("Failed to set name in edge data.");
    1154              :     }
    1155              :   }
    1156              : 
    1157           17 :   activate = _ml_service_get_json_string_member (service_obj, "activate");
    1158           17 :   if (activate) {
    1159            6 :     ret = nns_edge_data_set_info (data_h, "activate", activate);
    1160            6 :     if (NNS_EDGE_ERROR_NONE != ret) {
    1161            0 :       _ml_logi ("Failed to set activate in edge data.");
    1162              :     }
    1163              :   }
    1164           17 :   _in = (ml_tensors_data_s *) input;
    1165           34 :   for (i = 0; i < _in->num_tensors; i++) {
    1166              :     ret =
    1167           17 :         nns_edge_data_add (data_h, _in->tensors[i].data, _in->tensors[i].size,
    1168              :         NULL);
    1169           17 :     if (NNS_EDGE_ERROR_NONE != ret) {
    1170            0 :       _ml_error_report ("Failed to add camera data to the edge data.");
    1171            0 :       goto done;
    1172              :     }
    1173              :   }
    1174              : 
    1175           17 :   ret = nns_edge_send (offloading_s->edge_h, data_h);
    1176           17 :   if (NNS_EDGE_ERROR_NONE != ret) {
    1177            0 :     _ml_error_report
    1178              :         ("Failed to publish the data to register the offloading service.");
    1179              :   }
    1180              : 
    1181           17 : done:
    1182           17 :   if (data_h)
    1183           17 :     nns_edge_data_destroy (data_h);
    1184           17 :   return ret;
    1185              : }
    1186              : 
    1187              : /**
    1188              :  * @brief Internal function to request service to ml-service offloading.
    1189              :  * Register new information, such as neural network models or pipeline descriptions, on a offloading server.
    1190              :  */
    1191              : int
    1192            8 : _ml_service_offloading_request_raw (ml_service_h handle, const char *key,
    1193              :     void *data, size_t len)
    1194              : {
    1195              :   ml_tensors_data_s input;
    1196              : 
    1197              :   /* Set internal data structure to send edge data. */
    1198            8 :   input.num_tensors = 1;
    1199            8 :   input.tensors[0].data = data;
    1200            8 :   input.tensors[0].size = len;
    1201              : 
    1202            8 :   return _ml_service_offloading_request (handle, key, &input);
    1203              : }
        

Generated by: LCOV version 2.0-1