Line data Source code
1 : /* SPDX-License-Identifier: LGPL-2.1-only */
2 : /**
3 : * NNStreamer Common Header's Contents (pipeline extension)
4 : * Copyright (C) 2020 MyungJoo Ham <myungjoo.ham@samsung.com>
5 : */
6 : /**
7 : * @file nnstreamer_plugin_api_impl.c
8 : * @date 14 Apr 2020
9 : * @brief Common data for NNStreamer, the GStreamer plugin for neural networks
10 : * @see https://github.com/nnstreamer/nnstreamer
11 : * @author MyungJoo Ham <myungjoo.ham@samsung.com>
12 : * @bug No known bugs except for NYI items
13 : *
14 : */
15 :
16 : #include <nnstreamer_util.h>
17 : #include <string.h>
18 : #include <tensor_common.h>
19 :
20 : static const gchar *gst_tensor_time_sync_mode_string[] = {
21 : [SYNC_NOSYNC] = "nosync",
22 : [SYNC_SLOWEST] = "slowest",
23 : [SYNC_BASEPAD] = "basepad",
24 : [SYNC_REFRESH] = "refresh",
25 : [SYNC_END] = NULL
26 : };
27 :
28 : /**
29 : * @brief The old rank of tensor.
30 : */
31 : #define NNS_TENSOR_RANK_LIMIT_PREV (4)
32 :
33 : #define NNS_TENSOR_EXTRA_MAGIC 0xf00dc0de
34 :
35 : /**
36 : * @brief Data structure to describe a "extra" tensor data.
37 : * This represents the information of the NNS_TENSOR_SIZE_LIMIT-th memory block for tensor stream.
38 : */
39 : typedef struct
40 : {
41 : uint32_t magic;
42 : uint32_t version;
43 : uint32_t num_extra_tensors;
44 : uint64_t reserved;
45 : GstTensorInfo infos[NNS_TENSOR_SIZE_EXTRA_LIMIT];
46 : } GstTensorExtraInfo;
47 :
48 : /**
49 : * @brief Check if given memory has extra tensors.
50 : * @param[in] map GstMapInfo of GstMemory to be checked.
51 : * @return TRUE if @map has extra tensors, otherwise FALSE.
52 : */
53 : static gboolean
54 2910 : gst_memory_map_is_extra_tensor (GstMapInfo * map)
55 : {
56 : GstTensorExtraInfo *extra_info;
57 : gboolean is_extra;
58 :
59 2910 : g_return_val_if_fail (map != NULL, FALSE);
60 :
61 2910 : if (map->size < sizeof (GstTensorExtraInfo))
62 380 : return FALSE;
63 :
64 2530 : extra_info = (GstTensorExtraInfo *) map->data;
65 :
66 : /* check magic in header (extra info) of the memory */
67 2530 : is_extra = (extra_info && extra_info->magic == NNS_TENSOR_EXTRA_MAGIC);
68 :
69 2530 : return is_extra;
70 : }
71 :
72 : /**
73 : * @brief Initialize GstTensorExtraInfo structure with given @a memory.
74 : * @param[in/out] extra GstTensorExtraInfo to be initialized.
75 : * @param[in] reserved_size The memory size of extra memory block.
76 : */
77 : static void
78 28 : gst_tensor_extra_info_init (GstTensorExtraInfo * extra, gsize reserved_size)
79 : {
80 : guint i;
81 :
82 28 : g_return_if_fail (extra != NULL);
83 :
84 28 : extra->magic = NNS_TENSOR_EXTRA_MAGIC;
85 28 : extra->version = 0;
86 28 : extra->num_extra_tensors = 0;
87 :
88 : /* set reserved size of NNS_TENSOR_SIZE_LIMIT-th memory */
89 28 : extra->reserved = reserved_size;
90 6748 : for (i = 0; i < NNS_TENSOR_SIZE_EXTRA_LIMIT; ++i) {
91 6720 : gst_tensor_info_init (&extra->infos[i]);
92 : }
93 : }
94 :
95 : /**
96 : * @brief Get the corresponding mode from the string value.
97 : * @param[in] str The string value for the mode.
98 : * @return Corresponding mode for the string. SYNC_END for errors.
99 : */
100 : tensor_time_sync_mode
101 66 : gst_tensor_time_sync_get_mode (const gchar * str)
102 : {
103 : gint index;
104 :
105 66 : index = find_key_strv (gst_tensor_time_sync_mode_string, str);
106 :
107 66 : return (index < 0) ? SYNC_END : index;
108 : }
109 :
110 : /**
111 : * @brief Get the time-sync mode string.
112 : * @return Corresponding mode string.
113 : */
114 : const gchar *
115 2 : gst_tensor_time_sync_get_mode_string (tensor_time_sync_mode mode)
116 : {
117 2 : return gst_tensor_time_sync_mode_string[mode];
118 : }
119 :
120 : /**
121 : * @brief Setup time sync option.
122 : * @param[in/out] filter "this" pointer. Sync mode & option MUST BE set already.
123 : * @return True if successfully set the option.
124 : */
125 : gboolean
126 95 : gst_tensor_time_sync_set_option_data (tensor_time_sync_data * sync)
127 : {
128 95 : g_return_val_if_fail (sync != NULL, FALSE);
129 :
130 95 : if (sync->mode == SYNC_END || sync->option == NULL)
131 66 : return FALSE;
132 :
133 29 : switch (sync->mode) {
134 0 : case SYNC_NOSYNC:
135 0 : break;
136 0 : case SYNC_SLOWEST:
137 0 : break;
138 29 : case SYNC_BASEPAD:
139 : {
140 29 : g_auto (GStrv) strv = g_strsplit (sync->option, ":", 2);
141 : guint sink_id;
142 : guint duration;
143 :
144 29 : if (strv[0] != NULL)
145 29 : sink_id = (guint) g_ascii_strtoull (strv[0], NULL, 10);
146 : else
147 0 : sink_id = 0;
148 :
149 29 : if (strv[1] != NULL)
150 27 : duration = (guint) g_ascii_strtoull (strv[1], NULL, 10);
151 : else
152 2 : duration = G_MAXINT;
153 :
154 29 : sync->data_basepad.sink_id = sink_id;
155 29 : sync->data_basepad.duration = duration;
156 29 : break;
157 : }
158 0 : default:
159 : /* unknown mode */
160 0 : GST_WARNING ("Unknown mode = %d", sync->mode);
161 0 : return FALSE;
162 : }
163 :
164 29 : return TRUE;
165 : }
166 :
167 : /**
168 : * @brief Internal function to detect EOS using the number of empty pads.
169 : * @param[in] collect Collect pad.
170 : * @param[in] sync Synchronization option.
171 : * @param[in] empty The number of empty pads (pad has no buffer).
172 : * @return True if EOS.
173 : */
174 : static gboolean
175 11512 : _gst_tensor_time_sync_is_eos (GstCollectPads * collect,
176 : tensor_time_sync_data * sync, guint empty)
177 : {
178 : guint total;
179 11512 : gboolean is_eos = FALSE;
180 :
181 11512 : total = g_slist_length (collect->data);
182 :
183 11512 : switch (sync->mode) {
184 9592 : case SYNC_REFRESH:
185 9592 : if (empty == total)
186 2 : is_eos = TRUE;
187 9592 : break;
188 1920 : default:
189 1920 : if (empty > 0)
190 206 : is_eos = TRUE;
191 1920 : break;
192 : }
193 :
194 11512 : return is_eos;
195 : }
196 :
197 : /**
198 : * @brief A function call to decide current timestamp among collected pads based on PTS.
199 : * It will decide current timestamp according to sync option.
200 : * GstMeta is also copied with same sync mode.
201 : */
202 : gboolean
203 5860 : gst_tensor_time_sync_get_current_time (GstCollectPads * collect,
204 : tensor_time_sync_data * sync, GstClockTime * current_time,
205 : GstBuffer * tensors_buf)
206 : {
207 5860 : GSList *walk = NULL;
208 : guint count, empty_pad;
209 :
210 5860 : g_return_val_if_fail (collect != NULL, FALSE);
211 5860 : g_return_val_if_fail (sync != NULL, FALSE);
212 5860 : g_return_val_if_fail (current_time != NULL, FALSE);
213 :
214 5860 : walk = collect->data;
215 5860 : count = empty_pad = 0;
216 :
217 18645 : while (walk) {
218 : GstCollectData *data;
219 : GstBuffer *buf;
220 12785 : gboolean need_update = FALSE;
221 :
222 12785 : data = (GstCollectData *) walk->data;
223 12785 : buf = gst_collect_pads_peek (collect, data);
224 12785 : walk = g_slist_next (walk);
225 :
226 12785 : if (buf) {
227 7463 : switch (sync->mode) {
228 6807 : case SYNC_NOSYNC:
229 : /* fall-through */
230 : case SYNC_SLOWEST:
231 : case SYNC_REFRESH:
232 6807 : if (*current_time < GST_BUFFER_PTS (buf))
233 5352 : need_update = TRUE;
234 6807 : break;
235 656 : case SYNC_BASEPAD:
236 656 : if (count == sync->data_basepad.sink_id)
237 296 : need_update = TRUE;
238 656 : break;
239 0 : default:
240 0 : break;
241 : }
242 7463 : if (need_update) {
243 5648 : *current_time = GST_BUFFER_PTS (buf);
244 5648 : gst_buffer_copy_into (tensors_buf, buf, GST_BUFFER_COPY_METADATA,
245 : 0, -1);
246 : }
247 7463 : gst_buffer_unref (buf);
248 : } else {
249 5322 : empty_pad++;
250 : }
251 :
252 12785 : count++;
253 : }
254 :
255 5860 : return _gst_tensor_time_sync_is_eos (collect, sync, empty_pad);
256 : }
257 :
258 : /**
259 : * @brief A function to be called while processing a flushing event.
260 : * It should clear old buffer and reset pad data.
261 : */
262 : void
263 152 : gst_tensor_time_sync_flush (GstCollectPads * collect)
264 : {
265 : GSList *walk;
266 : GstTensorCollectPadData *pad;
267 :
268 152 : g_return_if_fail (collect != NULL);
269 :
270 152 : walk = collect->data;
271 633 : while (walk) {
272 481 : pad = (GstTensorCollectPadData *) walk->data;
273 :
274 481 : if (pad->buffer) {
275 418 : gst_buffer_unref (pad->buffer);
276 418 : pad->buffer = NULL;
277 : }
278 :
279 481 : walk = g_slist_next (walk);
280 : }
281 : }
282 :
283 : /**
284 : * @brief Internal function to update buffer in pad data based on the sync mode.
285 : */
286 : static gboolean
287 3780 : _gst_tensor_time_sync_buffer_update (GstCollectPads * collect,
288 : GstCollectData * data, GstClockTime current, GstClockTime base,
289 : tensor_time_sync_data * sync)
290 : {
291 : GstTensorCollectPadData *pad;
292 : GstBuffer *buf;
293 :
294 3780 : pad = (GstTensorCollectPadData *) data;
295 :
296 3780 : buf = gst_collect_pads_peek (collect, data);
297 3780 : if (buf != NULL) {
298 3647 : if (GST_BUFFER_PTS (buf) < current) {
299 672 : gst_buffer_unref (buf);
300 672 : if (pad->buffer != NULL)
301 669 : gst_buffer_unref (pad->buffer);
302 672 : pad->buffer = gst_collect_pads_pop (collect, data);
303 672 : return FALSE;
304 : }
305 :
306 4400 : if ((sync->mode == SYNC_SLOWEST && pad->buffer != NULL &&
307 1425 : (ABS (GST_CLOCK_DIFF (current, GST_BUFFER_PTS (pad->buffer))) <
308 1425 : ABS (GST_CLOCK_DIFF (current, GST_BUFFER_PTS (buf))))) ||
309 3946 : (sync->mode == SYNC_BASEPAD && pad->buffer != NULL &&
310 1135 : (((GstClockTime) ABS (GST_CLOCK_DIFF (current,
311 : GST_BUFFER_PTS (buf)))) > base))) {
312 : /* keep last buffer */
313 : } else {
314 : /* update last buffer */
315 2240 : if (pad->buffer != NULL)
316 1825 : gst_buffer_unref (pad->buffer);
317 2240 : pad->buffer = gst_collect_pads_pop (collect, data);
318 : }
319 :
320 2975 : gst_buffer_unref (buf);
321 : }
322 :
323 3108 : return TRUE;
324 : }
325 :
326 : /**
327 : * @brief A function call to make tensors from collected pads.
328 : * It decide which buffer is going to be used according to sync option.
329 : * @return True to push buffer.
330 : */
331 : gboolean
332 6325 : gst_tensor_time_sync_buffer_from_collectpad (GstCollectPads * collect,
333 : tensor_time_sync_data * sync, GstClockTime current_time,
334 : GstBuffer * tensors_buf, GstTensorsConfig * configs, gboolean * is_eos)
335 : {
336 6325 : GSList *walk = NULL;
337 : GstCollectData *data;
338 : GstTensorCollectPadData *pad;
339 6325 : GstBuffer *buf = NULL;
340 : GstMemory *mem;
341 6325 : gint old_numerator = G_MAXINT;
342 6325 : gint old_denominator = G_MAXINT;
343 : guint counting, empty_pad;
344 : GstTensorsConfig in_configs;
345 6325 : GstClockTime base_time = 0;
346 : GstTensorInfo *_info;
347 : guint i, j;
348 : GstMemory *in_mem[NNS_TENSOR_SIZE_LIMIT];
349 : tensor_format in_formats[NNS_TENSOR_SIZE_LIMIT];
350 :
351 12650 : g_return_val_if_fail (collect != NULL, FALSE);
352 6325 : g_return_val_if_fail (sync != NULL, FALSE);
353 6325 : g_return_val_if_fail (tensors_buf != NULL, FALSE);
354 6325 : g_return_val_if_fail (configs != NULL, FALSE);
355 6325 : g_return_val_if_fail (is_eos != NULL, FALSE);
356 :
357 6325 : walk = collect->data;
358 6325 : counting = empty_pad = 0;
359 :
360 6325 : if (sync->mode == SYNC_BASEPAD) {
361 712 : walk = g_slist_nth (walk, sync->data_basepad.sink_id);
362 712 : if (walk == NULL) {
363 0 : GST_ERROR_OBJECT (collect, "Cannot get GstCollectData from GSList");
364 0 : return FALSE;
365 : }
366 :
367 712 : data = (GstCollectData *) walk->data;
368 712 : pad = (GstTensorCollectPadData *) data;
369 :
370 712 : buf = gst_collect_pads_peek (collect, data);
371 712 : if (buf != NULL) {
372 650 : if (pad->buffer != NULL)
373 625 : base_time =
374 625 : MIN ((GstClockTimeDiff) sync->data_basepad.duration,
375 : ABS (GST_CLOCK_DIFF (GST_BUFFER_PTS (buf),
376 : GST_BUFFER_PTS (pad->buffer))) - 1);
377 650 : gst_buffer_unref (buf);
378 : }
379 : }
380 :
381 6325 : walk = collect->data;
382 :
383 6325 : gst_tensors_config_init (&in_configs);
384 :
385 19280 : while (walk) {
386 13628 : gboolean configured = FALSE;
387 13628 : gboolean is_empty = FALSE;
388 :
389 13628 : data = (GstCollectData *) walk->data;
390 13628 : pad = (GstTensorCollectPadData *) data;
391 :
392 13628 : if (gst_pad_has_current_caps (data->pad)) {
393 13627 : GstCaps *caps = gst_pad_get_current_caps (data->pad);
394 13627 : GstStructure *s = gst_caps_get_structure (caps, 0);
395 :
396 13627 : if (gst_tensors_config_validate (&in_configs))
397 7303 : gst_tensors_config_free (&in_configs);
398 :
399 13627 : gst_tensors_config_from_structure (&in_configs, s);
400 13627 : gst_caps_unref (caps);
401 :
402 13627 : configured = gst_tensors_config_validate (&in_configs);
403 : }
404 :
405 : /**
406 : * This would be an internal logic error.
407 : * in_configs should be already confirmed valid at the negotiation phase
408 : * and this function should be called in a running pipeline.
409 : * If new sync mode is enabled (e.g., handle output when a pad gets new buffer),
410 : * this may cause unexpected exception.
411 : */
412 13628 : if (!configured) {
413 1 : return FALSE;
414 : }
415 :
416 13627 : if (in_configs.rate_d < old_denominator)
417 6324 : old_denominator = in_configs.rate_d;
418 13627 : if (in_configs.rate_n < old_numerator)
419 6532 : old_numerator = in_configs.rate_n;
420 :
421 13627 : walk = g_slist_next (walk);
422 :
423 13627 : switch (sync->mode) {
424 3780 : case SYNC_SLOWEST:
425 : /* fall-through */
426 : case SYNC_BASEPAD:
427 3780 : if (!_gst_tensor_time_sync_buffer_update (collect, data,
428 : current_time, base_time, sync))
429 672 : return FALSE;
430 3108 : buf = gst_buffer_ref (pad->buffer);
431 3108 : is_empty = (buf == NULL);
432 3108 : break;
433 257 : case SYNC_NOSYNC:
434 257 : buf = gst_collect_pads_pop (collect, data);
435 257 : is_empty = (buf == NULL);
436 257 : break;
437 9590 : case SYNC_REFRESH:
438 9590 : buf = gst_collect_pads_pop (collect, data);
439 9590 : if (buf != NULL) {
440 4796 : if (pad->buffer != NULL) {
441 4794 : gst_buffer_unref (pad->buffer);
442 : }
443 4796 : pad->buffer = gst_buffer_ref (buf);
444 : } else {
445 4794 : if (pad->buffer == NULL) {
446 0 : *is_eos = FALSE;
447 0 : ml_logd ("Not the all buffers are arrived yet.");
448 0 : return FALSE;
449 : }
450 4794 : is_empty = TRUE;
451 4794 : buf = gst_buffer_ref (pad->buffer);
452 : }
453 9590 : break;
454 0 : default:
455 0 : break;
456 : }
457 :
458 12955 : if (GST_IS_BUFFER (buf)) {
459 12955 : guint32 n_tensor = gst_tensor_buffer_get_count (buf);
460 12955 : buf = gst_tensor_buffer_from_config (buf, &in_configs);
461 :
462 : /** These are internal logic error. If given inputs are incorrect,
463 : the negotiation should have been failed before this stage. */
464 12955 : if (gst_tensors_config_is_static (&in_configs))
465 12929 : g_assert (n_tensor == in_configs.info.num_tensors);
466 12955 : g_assert ((counting + n_tensor) <= NNS_TENSOR_SIZE_LIMIT);
467 :
468 12955 : if (gst_tensors_config_is_flexible (&in_configs))
469 26 : configs->info.format = _NNS_TENSOR_FORMAT_FLEXIBLE;
470 :
471 26290 : for (i = 0; i < n_tensor; ++i) {
472 13335 : in_mem[counting] = gst_tensor_buffer_get_nth_memory (buf, i);
473 :
474 : /* set info */
475 13335 : gst_tensor_info_copy (gst_tensors_info_get_nth_info (&configs->info,
476 13335 : counting), gst_tensors_info_get_nth_info (&in_configs.info, i));
477 13335 : in_formats[counting] = in_configs.info.format;
478 13335 : counting++;
479 : }
480 :
481 12955 : gst_buffer_unref (buf);
482 : }
483 12955 : if (is_empty)
484 4794 : empty_pad++;
485 : }
486 :
487 : /* append memories to output buffer */
488 18191 : for (i = 0; i < counting; i++) {
489 12539 : _info = gst_tensors_info_get_nth_info (&configs->info, i);
490 12539 : mem = in_mem[i];
491 :
492 12539 : if (gst_tensors_config_is_flexible (configs)) {
493 : /* append header if input tensor is not flexible */
494 46 : if (in_formats[i] != _NNS_TENSOR_FORMAT_FLEXIBLE) {
495 : GstTensorMetaInfo meta;
496 :
497 20 : gst_tensor_info_convert_to_meta (_info, &meta);
498 20 : mem = gst_tensor_meta_info_append_header (&meta, in_mem[i]);
499 20 : gst_memory_unref (in_mem[i]);
500 : }
501 : }
502 :
503 12539 : if (!gst_tensor_buffer_append_memory (tensors_buf, mem, _info)) {
504 0 : for (j = i + 1; j < counting; j++)
505 0 : gst_memory_unref (in_mem[j]);
506 :
507 0 : nns_loge ("Failed to append memory to buffer.");
508 0 : return FALSE;
509 : }
510 : }
511 :
512 5652 : configs->info.num_tensors = counting;
513 5652 : configs->rate_d = old_denominator;
514 5652 : configs->rate_n = old_numerator;
515 :
516 5652 : GST_BUFFER_PTS (tensors_buf) = current_time;
517 :
518 5652 : gst_tensors_config_free (&in_configs);
519 :
520 : /* check eos */
521 5652 : *is_eos = _gst_tensor_time_sync_is_eos (collect, sync, empty_pad);
522 5652 : return !(*is_eos);
523 : }
524 :
525 : /**
526 : * @brief Configure gst-buffer with tensors information.
527 : * NNStreamer handles single memory chunk as single tensor.
528 : * If incoming buffer has invalid memories, separate it and generate new gst-buffer using tensors information.
529 : * Note that this function always takes the ownership of input buffer.
530 : * @param in input buffer
531 : * @param config tensors config structure
532 : * @return Newly allocated buffer. Null if failed. Caller should unref the buffer using gst_buffer_unref().
533 : */
534 : GstBuffer *
535 35591 : gst_tensor_buffer_from_config (GstBuffer * in, GstTensorsConfig * config)
536 : {
537 35591 : GstBuffer *out = NULL;
538 35591 : GstMemory *all = NULL;
539 : GstMapInfo map;
540 : guint i, num;
541 : gsize total, offset;
542 : gsize mem_size[NNS_TENSOR_MEMORY_MAX];
543 35591 : gboolean configured = FALSE;
544 35591 : gboolean is_extra = FALSE;
545 :
546 35591 : if (!GST_IS_BUFFER (in)) {
547 1 : nns_loge ("Failed to get tensor buffer, invalid input buffer.");
548 35591 : return NULL;
549 : }
550 :
551 35590 : if (!gst_tensors_config_validate (config)) {
552 2 : nns_loge ("Failed to get tensor buffer, invalid tensor configuration.");
553 2 : goto error;
554 : }
555 :
556 35588 : num = gst_buffer_n_memory (in);
557 35588 : total = gst_buffer_get_size (in);
558 :
559 : /* get memory size */
560 35588 : if (gst_tensors_config_is_static (config)) {
561 35512 : if (num == config->info.num_tensors) {
562 : /* Do nothing, pass input buffer. */
563 35505 : out = gst_buffer_ref (in);
564 35505 : goto done;
565 : }
566 :
567 7 : num = config->info.num_tensors;
568 7 : if ((is_extra = (num > NNS_TENSOR_MEMORY_MAX)))
569 5 : num = NNS_TENSOR_MEMORY_MAX;
570 93 : for (i = 0; i < num; i++)
571 86 : mem_size[i] = gst_tensors_info_get_size (&config->info, i);
572 7 : if (is_extra) {
573 5 : mem_size[num - 1] += sizeof (GstTensorExtraInfo);
574 25 : for (; i < config->info.num_tensors; i++)
575 20 : mem_size[num - 1] += gst_tensors_info_get_size (&config->info, i);
576 : }
577 : } else {
578 76 : if (num > 1) {
579 : /* Suppose it is already configured. */
580 20 : out = gst_buffer_ref (in);
581 20 : goto done;
582 : }
583 :
584 56 : if (!gst_buffer_map (in, &map, GST_MAP_READ)) {
585 0 : nns_loge ("Failed to get tensor buffer, cannot get the memory info.");
586 0 : goto error;
587 : }
588 :
589 56 : num = 0;
590 56 : offset = 0;
591 114 : while (offset < total) {
592 : GstTensorMetaInfo meta;
593 58 : gpointer h = map.data + offset;
594 :
595 58 : if (num >= NNS_TENSOR_MEMORY_MAX - 1) {
596 : /* Suppose remained memory may include extra tensors. */
597 0 : mem_size[num++] = total - offset;
598 0 : break;
599 : }
600 :
601 58 : gst_tensor_meta_info_parse_header (&meta, h);
602 58 : mem_size[num] = gst_tensor_meta_info_get_header_size (&meta);
603 58 : mem_size[num] += gst_tensor_meta_info_get_data_size (&meta);
604 :
605 58 : offset += mem_size[num];
606 58 : num++;
607 : }
608 :
609 56 : gst_buffer_unmap (in, &map);
610 :
611 56 : if (num == 1) {
612 : /* Do nothing, pass input buffer. */
613 55 : out = gst_buffer_ref (in);
614 55 : goto done;
615 : }
616 : }
617 :
618 : /* configure output buffer */
619 8 : out = gst_buffer_new ();
620 8 : all = gst_buffer_get_all_memory (in);
621 8 : offset = 0;
622 :
623 95 : for (i = 0; i < num; i++) {
624 : /* invalid memory size */
625 88 : if (offset + mem_size[i] > total) {
626 1 : nns_loge ("Failed to get tensor buffer, data size is mismatched.");
627 1 : goto error;
628 : }
629 :
630 87 : gst_buffer_append_memory (out, gst_memory_share (all, offset, mem_size[i]));
631 87 : offset += mem_size[i];
632 : }
633 :
634 7 : gst_buffer_copy_into (out, in, GST_BUFFER_COPY_METADATA, 0, -1);
635 :
636 35587 : done:
637 35587 : configured = TRUE;
638 35590 : error:
639 35590 : gst_buffer_unref (in);
640 :
641 35590 : if (all)
642 8 : gst_memory_unref (all);
643 :
644 35590 : if (!configured) {
645 3 : if (out) {
646 1 : gst_buffer_unref (out);
647 1 : out = NULL;
648 : }
649 : }
650 :
651 35590 : return out;
652 : }
653 :
654 : /**
655 : * @brief Internal struct to handle aggregation data in hash table.
656 : */
657 : typedef struct
658 : {
659 : GstAdapter *adapter;
660 : } gst_tensor_aggregation_data_s;
661 :
662 : #define AGGREGATION_DEFAULT_KEY 0xC0FFEEU
663 :
664 : /**
665 : * @brief Internal function to free aggregation data.
666 : */
667 : static void
668 867 : gst_tensor_aggregation_free_data (gpointer data)
669 : {
670 : gst_tensor_aggregation_data_s *aggr;
671 :
672 867 : aggr = (gst_tensor_aggregation_data_s *) data;
673 867 : if (aggr) {
674 867 : gst_adapter_clear (aggr->adapter);
675 867 : g_object_unref (aggr->adapter);
676 :
677 867 : g_free (aggr);
678 : }
679 867 : }
680 :
681 : /**
682 : * @brief Internal function to add new aggregation data.
683 : */
684 : static gst_tensor_aggregation_data_s *
685 924 : gst_tensor_aggregation_add_data (GHashTable * table, const guint32 key)
686 : {
687 : gst_tensor_aggregation_data_s *aggr;
688 : guint32 hashkey;
689 :
690 924 : g_return_val_if_fail (table != NULL, NULL);
691 924 : if (key == 0)
692 0 : hashkey = AGGREGATION_DEFAULT_KEY;
693 : else
694 924 : hashkey = key;
695 924 : aggr = g_new0 (gst_tensor_aggregation_data_s, 1);
696 924 : aggr->adapter = gst_adapter_new ();
697 :
698 924 : g_hash_table_insert (table, GINT_TO_POINTER (hashkey), aggr);
699 924 : return aggr;
700 : }
701 :
702 : /**
703 : * @brief Internal function to get aggregation data.
704 : */
705 : static gst_tensor_aggregation_data_s *
706 303 : gst_tensor_aggregation_get_data (GHashTable * table, const guint32 key)
707 : {
708 303 : g_return_val_if_fail (table != NULL, NULL);
709 :
710 315 : return (gst_tensor_aggregation_data_s *) g_hash_table_lookup (table,
711 12 : GINT_TO_POINTER (key == 0 ? AGGREGATION_DEFAULT_KEY : key));
712 : }
713 :
714 : /**
715 : * @brief Internal function to remove all buffers from aggregation data.
716 : */
717 : static void
718 3487 : gst_tensor_aggregation_clear_internal (gpointer key, gpointer value,
719 : gpointer user_data)
720 : {
721 : gst_tensor_aggregation_data_s *aggr;
722 :
723 : UNUSED (key);
724 : UNUSED (user_data);
725 :
726 3487 : aggr = (gst_tensor_aggregation_data_s *) value;
727 3487 : if (aggr) {
728 3487 : gst_adapter_clear (aggr->adapter);
729 : }
730 3487 : }
731 :
732 : /**
733 : * @brief Gets new hash table for tensor aggregation.
734 : * @return Newly allocated hash table, caller should release this using g_hash_table_destroy().
735 : */
736 : GHashTable *
737 921 : gst_tensor_aggregation_init (void)
738 : {
739 : GHashTable *table;
740 :
741 921 : table = g_hash_table_new_full (g_direct_hash, g_direct_equal, NULL,
742 : gst_tensor_aggregation_free_data);
743 :
744 : /**
745 : * Add default adapter (for the case if buffer has no specific id).
746 : * If gst-buffer has tensor-meta which includes client-id,
747 : * e.g., aggregation frames from multiple clients on query-server pipeline,
748 : * nnstreamer element should parse meta and request adapter with this id.
749 : * However, on normal pipeline, gst-buffer does not contain tensor-meta,
750 : * then the element may request adapter with null key string.
751 : */
752 921 : gst_tensor_aggregation_add_data (table, AGGREGATION_DEFAULT_KEY);
753 :
754 921 : return table;
755 : }
756 :
757 : /**
758 : * @brief Clears buffers from adapter.
759 : * @param table a hash table instance initialized with gst_tensor_aggregation_init()
760 : * @param key the key to look up (set null to get default adapter)
761 : */
762 : void
763 1 : gst_tensor_aggregation_clear (GHashTable * table, const guint32 key)
764 : {
765 : gst_tensor_aggregation_data_s *aggr;
766 :
767 1 : g_return_if_fail (table != NULL);
768 :
769 1 : aggr = gst_tensor_aggregation_get_data (table, key);
770 1 : gst_tensor_aggregation_clear_internal (NULL, aggr, NULL);
771 : }
772 :
773 : /**
774 : * @brief Clears buffers from all adapters in hash table.
775 : * @param table a hash table instance initialized with gst_tensor_aggregation_init()
776 : */
777 : void
778 3481 : gst_tensor_aggregation_clear_all (GHashTable * table)
779 : {
780 3481 : g_hash_table_foreach (table, gst_tensor_aggregation_clear_internal, NULL);
781 3481 : }
782 :
783 : /**
784 : * @brief Gets adapter from hash table.
785 : * @param table a hash table instance initialized with gst_tensor_aggregation_init()
786 : * @param key the key to look up (set null to get default adapter)
787 : * @return gst-adapter instance. DO NOT release this instance.
788 : */
789 : GstAdapter *
790 303 : gst_tensor_aggregation_get_adapter (GHashTable * table, const guint32 key)
791 : {
792 : gst_tensor_aggregation_data_s *aggr;
793 :
794 303 : g_return_val_if_fail (table != NULL, NULL);
795 :
796 302 : aggr = gst_tensor_aggregation_get_data (table, key);
797 302 : if (!aggr) {
798 : /*append new data */
799 3 : aggr = gst_tensor_aggregation_add_data (table, key);
800 : }
801 :
802 302 : return aggr->adapter;
803 : }
804 :
805 : /**
806 : * @brief Internal function to check tensor dimensions to append old caps for backward compatibility (rank 4).
807 : */
808 : static gboolean
809 10348 : _append_prev_caps (const GstTensorsConfig * config)
810 : {
811 : GstTensorsInfo *info;
812 : GstTensorInfo *_info;
813 : guint i, rank, min_rank;
814 :
815 10348 : g_return_val_if_fail (config != NULL, FALSE);
816 :
817 10348 : info = (GstTensorsInfo *) (&config->info);
818 10348 : if (!gst_tensors_info_validate (info))
819 4703 : return FALSE;
820 :
821 5780 : for (i = 0; i < info->num_tensors; i++) {
822 5648 : _info = gst_tensors_info_get_nth_info (info, i);
823 :
824 5648 : rank = gst_tensor_dimension_get_rank (_info->dimension);
825 5648 : min_rank = gst_tensor_dimension_get_min_rank (_info->dimension);
826 :
827 5648 : if (rank <= NNS_TENSOR_RANK_LIMIT_PREV ||
828 : min_rank > NNS_TENSOR_RANK_LIMIT_PREV)
829 5513 : return FALSE;
830 : }
831 :
832 132 : return TRUE;
833 : }
834 :
835 : /**
836 : * @brief Internal function to get caps for single tensor from config.
837 : */
838 : static GstCaps *
839 5517 : _get_tensor_caps (const GstTensorsConfig * config)
840 : {
841 : GstCaps *caps;
842 5517 : GstStructure *structure = NULL;
843 : GstTensorsInfo *info;
844 : GstTensorInfo *_info;
845 :
846 5517 : g_return_val_if_fail (config != NULL, NULL);
847 :
848 5517 : info = (GstTensorsInfo *) (&config->info);
849 5517 : if (info->num_tensors > 1)
850 103 : return NULL;
851 :
852 5414 : caps = gst_caps_from_string (GST_TENSOR_CAP_DEFAULT);
853 5414 : _info = gst_tensors_info_get_nth_info (info, 0);
854 :
855 : /* structure for backward compatibility */
856 5414 : if (_append_prev_caps (config))
857 63 : structure = gst_structure_new_empty (NNS_MIMETYPE_TENSOR);
858 :
859 5414 : if (gst_tensor_dimension_is_valid (_info->dimension)) {
860 : {
861 3209 : g_autofree gchar *dim_str =
862 3209 : gst_tensor_get_dimension_string (_info->dimension);
863 :
864 3209 : gst_caps_set_simple (caps, "dimension", G_TYPE_STRING, dim_str, NULL);
865 : }
866 :
867 3209 : if (structure) {
868 63 : g_autofree gchar *dim_str =
869 63 : gst_tensor_get_rank_dimension_string (_info->dimension,
870 : NNS_TENSOR_RANK_LIMIT_PREV);
871 :
872 63 : gst_structure_set (structure, "dimension", G_TYPE_STRING, dim_str, NULL);
873 : }
874 : }
875 :
876 5414 : if (_info->type != _NNS_END) {
877 3619 : const gchar *type_str = gst_tensor_get_type_string (_info->type);
878 :
879 3619 : gst_caps_set_simple (caps, "type", G_TYPE_STRING, type_str, NULL);
880 :
881 3619 : if (structure)
882 63 : gst_structure_set (structure, "type", G_TYPE_STRING, type_str, NULL);
883 : }
884 :
885 5414 : if (config->rate_n >= 0 && config->rate_d > 0) {
886 1991 : gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION,
887 1991 : config->rate_n, config->rate_d, NULL);
888 :
889 1991 : if (structure)
890 63 : gst_structure_set (structure, "framerate", GST_TYPE_FRACTION,
891 63 : config->rate_n, config->rate_d, NULL);
892 : }
893 :
894 5414 : if (structure)
895 63 : gst_caps_append_structure (caps, structure);
896 :
897 5414 : return caps;
898 : }
899 :
900 : /**
901 : * @brief Internal function to get caps for multi tensors from config.
902 : */
903 : static GstCaps *
904 4934 : _get_tensors_caps (const GstTensorsConfig * config)
905 : {
906 : GstCaps *caps;
907 4934 : GstStructure *structure = NULL;
908 :
909 4934 : g_return_val_if_fail (config != NULL, NULL);
910 :
911 4934 : caps = gst_caps_from_string (GST_TENSORS_CAP_DEFAULT);
912 :
913 : /* structure for backward compatibility */
914 4934 : if (_append_prev_caps (config))
915 69 : structure = gst_structure_new_empty (NNS_MIMETYPE_TENSORS);
916 :
917 4934 : if (config->info.num_tensors > 0) {
918 2707 : g_autofree gchar *type_str =
919 2707 : gst_tensors_info_get_types_string (&config->info);
920 :
921 : /* Set GstCaps */
922 : {
923 2707 : g_autofree gchar *dim_str =
924 2707 : gst_tensors_info_get_dimensions_string (&config->info);
925 :
926 2707 : gst_caps_set_simple (caps, "num_tensors", G_TYPE_INT,
927 2707 : config->info.num_tensors, NULL);
928 2707 : gst_caps_set_simple (caps, "dimensions", G_TYPE_STRING, dim_str, NULL);
929 2707 : gst_caps_set_simple (caps, "types", G_TYPE_STRING, type_str, NULL);
930 : }
931 :
932 : /* Set GstStructure */
933 2707 : if (structure) {
934 69 : g_autofree gchar *dim_str =
935 69 : gst_tensors_info_get_rank_dimensions_string (&config->info,
936 : NNS_TENSOR_RANK_LIMIT_PREV);
937 :
938 69 : gst_structure_set (structure, "num_tensors", G_TYPE_INT,
939 69 : config->info.num_tensors, NULL);
940 69 : gst_structure_set (structure, "dimensions", G_TYPE_STRING, dim_str, NULL);
941 69 : gst_structure_set (structure, "types", G_TYPE_STRING, type_str, NULL);
942 : }
943 : }
944 :
945 4934 : if (config->rate_n >= 0 && config->rate_d > 0) {
946 1425 : gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION,
947 1425 : config->rate_n, config->rate_d, NULL);
948 :
949 1425 : if (structure)
950 68 : gst_structure_set (structure, "framerate", GST_TYPE_FRACTION,
951 68 : config->rate_n, config->rate_d, NULL);
952 : }
953 :
954 4934 : if (structure)
955 69 : gst_caps_append_structure (caps, structure);
956 :
957 4934 : return caps;
958 : }
959 :
960 : /**
961 : * @brief Internal function to get caps for flexible tensor from config.
962 : */
963 : static GstCaps *
964 2790 : _get_flexible_caps (const GstTensorsConfig * config)
965 : {
966 : GstCaps *caps;
967 :
968 2790 : caps = gst_caps_from_string (GST_TENSORS_FLEX_CAP_DEFAULT);
969 :
970 2790 : if (config->rate_n >= 0 && config->rate_d > 0) {
971 814 : gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION,
972 814 : config->rate_n, config->rate_d, NULL);
973 : }
974 :
975 2790 : return caps;
976 : }
977 :
978 : /**
979 : * @brief Check given mimetype is tensor stream.
980 : * @param structure structure to be interpreted
981 : * @return TRUE if mimetype is tensor stream
982 : */
983 : gboolean
984 16007 : gst_structure_is_tensor_stream (const GstStructure * structure)
985 : {
986 : const gchar *name;
987 :
988 16007 : name = gst_structure_get_name (structure);
989 16007 : g_return_val_if_fail (name != NULL, FALSE);
990 :
991 28879 : return (g_str_equal (name, NNS_MIMETYPE_TENSOR) ||
992 12872 : g_str_equal (name, NNS_MIMETYPE_TENSORS));
993 : }
994 :
995 : /**
996 : * @brief Get media type from structure
997 : * @param structure structure to be interpreted
998 : * @return corresponding media type (returns _NNS_MEDIA_INVALID for unsupported type)
999 : */
1000 : media_type
1001 20195 : gst_structure_get_media_type (const GstStructure * structure)
1002 : {
1003 : const gchar *name;
1004 :
1005 20195 : name = gst_structure_get_name (structure);
1006 :
1007 20195 : g_return_val_if_fail (name != NULL, _NNS_MEDIA_INVALID);
1008 :
1009 20195 : if (g_str_has_prefix (name, "video/")) {
1010 2770 : return _NNS_VIDEO;
1011 : }
1012 :
1013 17425 : if (g_str_has_prefix (name, "audio/")) {
1014 2187 : return _NNS_AUDIO;
1015 : }
1016 :
1017 15238 : if (g_str_has_prefix (name, "text/")) {
1018 2160 : return _NNS_TEXT;
1019 : }
1020 :
1021 13078 : if (g_str_equal (name, "application/octet-stream")) {
1022 4416 : return _NNS_OCTET;
1023 : }
1024 :
1025 8662 : if (gst_structure_is_tensor_stream (structure)) {
1026 2155 : return _NNS_TENSOR;
1027 : }
1028 :
1029 : /* unknown or unsupported type */
1030 6507 : return _NNS_MEDIA_INVALID;
1031 : }
1032 :
1033 : /**
1034 : * @brief Parse caps from peer pad and set tensors config.
1035 : * @param pad GstPad to get the capabilities
1036 : * @param config tensors config structure to be filled
1037 : * @param is_fixed flag to be updated when peer caps is fixed (not mandatory, do nothing when the param is null)
1038 : * @return TRUE if successfully configured from peer
1039 : */
1040 : gboolean
1041 4292 : gst_tensors_config_from_peer (GstPad * pad, GstTensorsConfig * config,
1042 : gboolean * is_fixed)
1043 : {
1044 : GstCaps *peer_caps;
1045 : GstStructure *structure;
1046 4292 : gboolean ret = FALSE;
1047 :
1048 4292 : g_return_val_if_fail (GST_IS_PAD (pad), FALSE);
1049 4292 : g_return_val_if_fail (config != NULL, FALSE);
1050 :
1051 4292 : gst_tensors_config_init (config);
1052 :
1053 4292 : if ((peer_caps = gst_pad_peer_query_caps (pad, NULL))) {
1054 4292 : if (gst_caps_get_size (peer_caps) > 0) {
1055 2189 : structure = gst_caps_get_structure (peer_caps, 0);
1056 2189 : ret = gst_tensors_config_from_structure (config, structure);
1057 : }
1058 :
1059 4292 : if (ret && is_fixed)
1060 30 : *is_fixed = gst_caps_is_fixed (peer_caps);
1061 :
1062 4292 : gst_caps_unref (peer_caps);
1063 : }
1064 :
1065 4292 : return ret;
1066 : }
1067 :
1068 : /**
1069 : * @brief Check whether two structures have the same dimension
1070 : */
1071 : static gboolean
1072 207 : _is_structure_dimension_same (GstStructure * st1, GstStructure * st2,
1073 : const gchar * fieldname)
1074 : {
1075 : const char *dim_str1;
1076 : const char *dim_str2;
1077 :
1078 207 : g_return_val_if_fail (gst_structure_has_field (st1, fieldname), FALSE);
1079 207 : g_return_val_if_fail (gst_structure_has_field (st2, fieldname), FALSE);
1080 :
1081 207 : dim_str1 = gst_structure_get_string (st1, fieldname);
1082 207 : dim_str2 = gst_structure_get_string (st2, fieldname);
1083 :
1084 207 : return gst_tensor_dimension_string_is_equal (dim_str1, dim_str2);
1085 : }
1086 :
1087 : /**
1088 : * @brief Update caps dimensions for negotiation
1089 : * @param caps caps to compare and update
1090 : * @param filter caps to compare
1091 : */
1092 : void
1093 1602 : gst_tensor_caps_update_dimension (GstCaps * caps, GstCaps * filter)
1094 : {
1095 : GstStructure *st_caps, *st_filter;
1096 : guint i, j;
1097 :
1098 1602 : g_return_if_fail (GST_IS_CAPS (caps));
1099 1602 : g_return_if_fail (GST_IS_CAPS (filter));
1100 :
1101 4151 : for (i = 0; i < gst_caps_get_size (caps); i++) {
1102 2549 : st_caps = gst_caps_get_structure (caps, i);
1103 :
1104 2549 : if (!gst_structure_is_tensor_stream (st_caps))
1105 0 : continue;
1106 :
1107 7304 : for (j = 0; j < gst_caps_get_size (filter); j++) {
1108 4755 : st_filter = gst_caps_get_structure (filter, j);
1109 :
1110 4755 : if (!gst_structure_is_tensor_stream (st_filter))
1111 0 : continue;
1112 :
1113 : /* other/tensor */
1114 4755 : if (gst_structure_has_field (st_caps, "dimension")
1115 2598 : && gst_structure_has_field (st_filter, "dimension")) {
1116 : /* update dimensions for negotiation */
1117 230 : if (_is_structure_dimension_same (st_caps, st_filter, "dimension")) {
1118 115 : gst_structure_set (st_caps, "dimension", G_TYPE_STRING,
1119 : gst_structure_get_string (st_filter, "dimension"), NULL);
1120 : }
1121 : }
1122 : /* other/tensors */
1123 4640 : else if (gst_structure_has_field (st_caps, "dimensions")
1124 1213 : && gst_structure_has_field (st_filter, "dimensions")) {
1125 : /* update dimensions for negotiation */
1126 89 : if (_is_structure_dimension_same (st_caps, st_filter, "dimensions")) {
1127 80 : gst_structure_set (st_caps, "dimensions", G_TYPE_STRING,
1128 : gst_structure_get_string (st_filter, "dimensions"), NULL);
1129 : }
1130 : }
1131 : }
1132 : }
1133 : }
1134 :
1135 : /**
1136 : * @brief Try intersecting @caps1 and @caps2 for tensor stream
1137 : * @param caps1 a GstCaps to intersect
1138 : * @param caps2 a GstCaps to intersect
1139 : * @return TRUE if intersection would be not empty.
1140 : */
1141 : gboolean
1142 14 : gst_tensor_caps_can_intersect (GstCaps * caps1, GstCaps * caps2)
1143 : {
1144 : GstStructure *structure1;
1145 : GstStructure *structure2;
1146 : GstStructure *structure_copy1;
1147 : GstStructure *structure_copy2;
1148 :
1149 : const gchar *name1;
1150 : const gchar *name2;
1151 :
1152 : gboolean intersectable;
1153 :
1154 14 : if (gst_caps_can_intersect (caps1, caps2))
1155 10 : return TRUE;
1156 :
1157 4 : structure1 = gst_caps_get_structure (caps1, 0);
1158 4 : structure2 = gst_caps_get_structure (caps2, 0);
1159 :
1160 4 : if (!gst_structure_is_tensor_stream (structure1)
1161 4 : || !gst_structure_is_tensor_stream (structure2))
1162 0 : return FALSE;
1163 :
1164 4 : name1 = gst_structure_get_name (structure1);
1165 4 : name2 = gst_structure_get_name (structure2);
1166 :
1167 4 : if (!g_str_equal (name1, name2))
1168 1 : return FALSE;
1169 :
1170 : /* other/tensor */
1171 3 : if (g_str_equal (name1, NNS_MIMETYPE_TENSOR)) {
1172 3 : if (gst_structure_has_field (structure1, "dimension")
1173 3 : && gst_structure_has_field (structure2, "dimension")) {
1174 3 : if (!_is_structure_dimension_same (structure1, structure2, "dimension"))
1175 1 : return FALSE;
1176 : }
1177 : }
1178 : /* other/tensors */
1179 0 : else if (gst_structure_has_field (structure1, "dimensions")
1180 0 : && gst_structure_has_field (structure2, "dimensions")) {
1181 0 : if (!_is_structure_dimension_same (structure1, structure2, "dimensions"))
1182 0 : return FALSE;
1183 : }
1184 :
1185 2 : structure_copy1 = gst_structure_copy (structure1);
1186 2 : structure_copy2 = gst_structure_copy (structure2);
1187 :
1188 2 : gst_structure_remove_field (structure_copy1, "dimension");
1189 2 : gst_structure_remove_field (structure_copy1, "dimensions");
1190 2 : gst_structure_remove_field (structure_copy2, "dimension");
1191 2 : gst_structure_remove_field (structure_copy2, "dimensions");
1192 :
1193 : intersectable =
1194 2 : gst_structure_can_intersect (structure_copy1, structure_copy2);
1195 :
1196 2 : gst_structure_free (structure_copy1);
1197 2 : gst_structure_free (structure_copy2);
1198 :
1199 2 : return intersectable;
1200 : }
1201 :
1202 : /**
1203 : * @brief Get pad caps from tensors config and caps of the peer connected to the pad.
1204 : * @param pad GstPad to get possible caps
1205 : * @param config tensors config structure
1206 : * @return caps for given config. Caller is responsible for unreffing the returned caps.
1207 : */
1208 : GstCaps *
1209 1208 : gst_tensor_pad_caps_from_config (GstPad * pad, const GstTensorsConfig * config)
1210 : {
1211 1208 : GstCaps *caps = NULL;
1212 : GstCaps *templ;
1213 : gboolean is_flexible, peer_is_flexible, peer_has_tensor_caps;
1214 : GstCaps *peer_caps;
1215 :
1216 1208 : g_return_val_if_fail (GST_IS_PAD (pad), NULL);
1217 1208 : g_return_val_if_fail (config != NULL, NULL);
1218 :
1219 1208 : templ = gst_pad_get_pad_template_caps (pad);
1220 :
1221 : /* check peer caps */
1222 1208 : peer_is_flexible = peer_has_tensor_caps = FALSE;
1223 :
1224 1208 : peer_caps = gst_pad_peer_query_caps (pad, NULL);
1225 1208 : if (peer_caps && gst_caps_get_size (peer_caps) > 0) {
1226 : GstCaps *tmp;
1227 : GstStructure *st;
1228 : GstTensorsConfig peer_config;
1229 :
1230 912 : tmp = gst_caps_from_string (GST_TENSOR_CAP_DEFAULT);
1231 912 : peer_has_tensor_caps = gst_caps_can_intersect (tmp, peer_caps);
1232 912 : gst_caps_unref (tmp);
1233 :
1234 912 : st = gst_caps_get_structure (peer_caps, 0);
1235 912 : if (gst_tensors_config_from_structure (&peer_config, st))
1236 912 : peer_is_flexible = gst_tensors_config_is_flexible (&peer_config);
1237 912 : gst_tensors_config_free (&peer_config);
1238 : }
1239 :
1240 : /* other/tensors (flexible) */
1241 1208 : is_flexible = gst_tensors_config_is_flexible (config);
1242 :
1243 1208 : if (is_flexible || peer_is_flexible) {
1244 70 : caps = _get_flexible_caps (config);
1245 70 : goto intersectable;
1246 : }
1247 :
1248 : /* other/tensor */
1249 1138 : if (config->info.num_tensors == 1 && peer_has_tensor_caps) {
1250 727 : caps = _get_tensor_caps (config);
1251 727 : if (peer_caps)
1252 727 : gst_tensor_caps_update_dimension (caps, peer_caps);
1253 :
1254 727 : if (gst_caps_can_intersect (caps, templ))
1255 723 : goto done;
1256 :
1257 4 : gst_caps_unref (caps);
1258 : }
1259 :
1260 : /* other/tensors (static) */
1261 415 : caps = _get_tensors_caps (config);
1262 415 : if (peer_caps)
1263 415 : gst_tensor_caps_update_dimension (caps, peer_caps);
1264 :
1265 0 : intersectable:
1266 485 : if (!gst_caps_can_intersect (caps, templ)) {
1267 0 : gst_caps_unref (caps);
1268 0 : caps = NULL;
1269 : }
1270 :
1271 485 : done:
1272 1208 : gst_caps_unref (templ);
1273 1208 : if (peer_caps)
1274 1208 : gst_caps_unref (peer_caps);
1275 1208 : caps = gst_caps_truncate (caps);
1276 1208 : return caps;
1277 : }
1278 :
1279 : /**
1280 : * @brief Get all possible caps from tensors config. Unlike gst_tensor_pad_caps_from_config(), this function does not check peer caps.
1281 : * @param pad GstPad to get possible caps
1282 : * @param config tensors config structure
1283 : * @return caps for given config. Caller is responsible for unreffing the returned caps.
1284 : */
1285 : GstCaps *
1286 1999 : gst_tensor_pad_possible_caps_from_config (GstPad * pad,
1287 : const GstTensorsConfig * config)
1288 : {
1289 : GstCaps *caps, *tmp;
1290 : GstCaps *templ;
1291 :
1292 1999 : g_return_val_if_fail (GST_IS_PAD (pad), NULL);
1293 1999 : g_return_val_if_fail (config != NULL, NULL);
1294 :
1295 1999 : caps = gst_caps_new_empty ();
1296 1999 : templ = gst_pad_get_pad_template_caps (pad);
1297 :
1298 : /* append caps for static tensor */
1299 1999 : if (gst_tensors_config_is_static (config)) {
1300 : /* other/tensor */
1301 1879 : if ((tmp = _get_tensor_caps (config)) != NULL) {
1302 1776 : if (gst_caps_can_intersect (tmp, templ))
1303 1729 : gst_caps_append (caps, tmp);
1304 : else
1305 47 : gst_caps_unref (tmp);
1306 : }
1307 :
1308 : /* other/tensors */
1309 1879 : if ((tmp = _get_tensors_caps (config)) != NULL) {
1310 1879 : if (gst_caps_can_intersect (tmp, templ))
1311 1879 : gst_caps_append (caps, tmp);
1312 : else
1313 0 : gst_caps_unref (tmp);
1314 : }
1315 : }
1316 :
1317 : /* caps for flexible tensor */
1318 1999 : if ((tmp = _get_flexible_caps (config)) != NULL) {
1319 1999 : if (gst_caps_can_intersect (tmp, templ))
1320 1906 : gst_caps_append (caps, tmp);
1321 : else
1322 93 : gst_caps_unref (tmp);
1323 : }
1324 :
1325 : /* if no possible caps for given config, return null. */
1326 1999 : if (gst_caps_is_empty (caps)) {
1327 0 : gst_caps_unref (caps);
1328 0 : caps = NULL;
1329 : }
1330 :
1331 1999 : gst_caps_unref (templ);
1332 1999 : return caps;
1333 : }
1334 :
1335 : /**
1336 : * @brief Get tensor format of current pad caps.
1337 : * @param pad GstPad to check current caps.
1338 : * @return The tensor_format of current pad caps.
1339 : *
1340 : * If pad does not have tensor caps return _NNS_TENSOR_FORMAT_END
1341 : */
1342 : tensor_format
1343 129124 : gst_tensor_pad_get_format (GstPad * pad)
1344 : {
1345 : GstCaps *caps;
1346 129124 : tensor_format ret = _NNS_TENSOR_FORMAT_END;
1347 :
1348 129124 : g_return_val_if_fail (GST_IS_PAD (pad), _NNS_TENSOR_FORMAT_END);
1349 :
1350 129124 : caps = gst_pad_get_current_caps (pad);
1351 129124 : if (caps) {
1352 : GstStructure *structure;
1353 : GstTensorsConfig config;
1354 :
1355 129111 : structure = gst_caps_get_structure (caps, 0);
1356 129111 : if (gst_tensors_config_from_structure (&config, structure)) {
1357 129000 : ret = config.info.format;
1358 : }
1359 129111 : gst_caps_unref (caps);
1360 129111 : gst_tensors_config_free (&config);
1361 : }
1362 :
1363 129124 : return ret;
1364 : }
1365 :
1366 : /**
1367 : * @brief Get caps from tensors config (for other/tensors)
1368 : * @param config tensors config info
1369 : * @return caps for given config
1370 : */
1371 : GstCaps *
1372 3362 : gst_tensors_caps_from_config (const GstTensorsConfig * config)
1373 : {
1374 : GstCaps *caps;
1375 :
1376 3362 : g_return_val_if_fail (config != NULL, NULL);
1377 :
1378 3361 : if (gst_tensors_config_is_flexible (config)) {
1379 721 : caps = _get_flexible_caps (config);
1380 : } else {
1381 2640 : caps = _get_tensors_caps (config);
1382 : }
1383 :
1384 3361 : caps = gst_caps_truncate (caps);
1385 :
1386 3361 : return caps;
1387 : }
1388 :
1389 : /**
1390 : * @brief Get tensor caps from tensors config
1391 : * @param config tensors config info
1392 : * @return caps for given config
1393 : */
1394 : GstCaps *
1395 2912 : gst_tensor_caps_from_config (const GstTensorsConfig * config)
1396 : {
1397 : GstCaps *caps;
1398 2912 : g_return_val_if_fail (config != NULL, NULL);
1399 :
1400 2911 : caps = _get_tensor_caps (config);
1401 2911 : caps = gst_caps_truncate (caps);
1402 :
1403 2911 : return caps;
1404 : }
1405 :
1406 : /**
1407 : * @brief Parse structure and set tensors config (for other/tensors)
1408 : * @param config tensors config structure to be filled
1409 : * @param structure structure to be interpreted
1410 : * @return TRUE if no error
1411 : */
1412 : gboolean
1413 156942 : gst_tensors_config_from_structure (GstTensorsConfig * config,
1414 : const GstStructure * structure)
1415 : {
1416 : const gchar *name;
1417 156942 : tensor_format format = _NNS_TENSOR_FORMAT_STATIC;
1418 :
1419 156942 : g_return_val_if_fail (config != NULL, FALSE);
1420 156940 : gst_tensors_config_init (config);
1421 :
1422 156940 : g_return_val_if_fail (structure != NULL, FALSE);
1423 :
1424 156939 : name = gst_structure_get_name (structure);
1425 :
1426 156939 : if (g_str_equal (name, NNS_MIMETYPE_TENSOR)) {
1427 : /* other/tensor is always static */
1428 144473 : config->info.num_tensors = 1;
1429 :
1430 144473 : if (gst_structure_has_field (structure, "dimension")) {
1431 138342 : const gchar *dim_str = gst_structure_get_string (structure, "dimension");
1432 138342 : gst_tensor_parse_dimension (dim_str, config->info.info[0].dimension);
1433 : }
1434 :
1435 144473 : if (gst_structure_has_field (structure, "type")) {
1436 138495 : const gchar *type_str = gst_structure_get_string (structure, "type");
1437 138495 : config->info.info[0].type = gst_tensor_get_type (type_str);
1438 : }
1439 12466 : } else if (g_str_equal (name, NNS_MIMETYPE_TENSORS)) {
1440 12355 : if (gst_structure_has_field (structure, "format")) {
1441 : const gchar *format_str;
1442 :
1443 12353 : format_str = gst_structure_get_string (structure, "format");
1444 12353 : format = gst_tensor_get_format (format_str);
1445 :
1446 12353 : if (format == _NNS_TENSOR_FORMAT_END) {
1447 1485 : GST_INFO
1448 : ("Invalid format %s, it should be one of %s. Suppose tensor format is static.",
1449 : _STR_NULL (format_str), GST_TENSOR_FORMAT_ALL);
1450 : } else {
1451 10868 : config->info.format = format;
1452 : }
1453 : }
1454 :
1455 12355 : if (config->info.format == _NNS_TENSOR_FORMAT_STATIC) {
1456 10368 : gst_structure_get_int (structure, "num_tensors",
1457 10368 : (gint *) (&config->info.num_tensors));
1458 :
1459 : /* parse dimensions */
1460 10368 : if (gst_structure_has_field (structure, "dimensions")) {
1461 : const gchar *dims_str;
1462 : guint num_dims;
1463 :
1464 8041 : dims_str = gst_structure_get_string (structure, "dimensions");
1465 : num_dims =
1466 8041 : gst_tensors_info_parse_dimensions_string (&config->info, dims_str);
1467 :
1468 8041 : if (config->info.num_tensors != num_dims) {
1469 8 : nns_logw ("Invalid param, dimensions (%d) tensors (%d)\n",
1470 : num_dims, config->info.num_tensors);
1471 : }
1472 : }
1473 :
1474 : /* parse types */
1475 10368 : if (gst_structure_has_field (structure, "types")) {
1476 : const gchar *types_str;
1477 : guint num_types;
1478 :
1479 7993 : types_str = gst_structure_get_string (structure, "types");
1480 : num_types =
1481 7993 : gst_tensors_info_parse_types_string (&config->info, types_str);
1482 :
1483 7993 : if (config->info.num_tensors != num_types) {
1484 0 : nns_logw ("Invalid param, types (%d) tensors (%d)\n",
1485 : num_types, config->info.num_tensors);
1486 : }
1487 : }
1488 : }
1489 : } else {
1490 111 : nns_logw ("Unsupported type = %s\n", name ? name : "Unknown");
1491 111 : return FALSE;
1492 : }
1493 :
1494 156828 : if (gst_structure_has_field (structure, "framerate")) {
1495 156809 : gst_structure_get_fraction (structure, "framerate", &config->rate_n,
1496 156809 : &config->rate_d);
1497 : }
1498 :
1499 156828 : return TRUE;
1500 : }
1501 :
1502 : /**
1503 : * @brief Parse caps and set tensors config (for other/tensors)
1504 : * @param[out] config tensors config structure to be filled
1505 : * @param[in] caps incoming capability
1506 : * @return TRUE/FALSE (if successfully configured, return TRUE)
1507 : */
1508 : gboolean
1509 95 : gst_tensors_config_from_cap (GstTensorsConfig * config, const GstCaps * caps)
1510 : {
1511 : GstStructure *structure;
1512 :
1513 95 : if (!gst_caps_is_fixed (caps)) {
1514 2 : nns_logw ("GstCaps is not fixed");
1515 2 : return FALSE;
1516 : }
1517 :
1518 93 : structure = gst_caps_get_structure (caps, 0);
1519 :
1520 93 : return gst_tensors_config_from_structure (config, structure)
1521 93 : && gst_tensors_config_validate (config);
1522 : }
1523 :
1524 : /**
1525 : * @brief Parse memory and fill the tensor meta.
1526 : * @param[out] meta tensor meta structure to be filled
1527 : * @param[in] mem pointer to GstMemory to be parsed
1528 : * @return TRUE if successfully set the meta
1529 : */
1530 : gboolean
1531 59912 : gst_tensor_meta_info_parse_memory (GstTensorMetaInfo * meta, GstMemory * mem)
1532 : {
1533 : GstMapInfo map;
1534 : gsize hsize, msize;
1535 : gboolean ret;
1536 :
1537 119823 : g_return_val_if_fail (mem != NULL, FALSE);
1538 59911 : g_return_val_if_fail (meta != NULL, FALSE);
1539 :
1540 59910 : gst_tensor_meta_info_init (meta);
1541 :
1542 : /* Check header size of tensor-meta. */
1543 59910 : hsize = gst_tensor_meta_info_get_header_size (meta);
1544 59910 : msize = gst_memory_get_sizes (mem, NULL, NULL);
1545 59910 : if (msize < hsize)
1546 7564 : return FALSE;
1547 :
1548 52346 : if (!gst_memory_map (mem, &map, GST_MAP_READ)) {
1549 0 : nns_loge ("Failed to get the meta, cannot map the memory.");
1550 0 : return FALSE;
1551 : }
1552 :
1553 52346 : ret = gst_tensor_meta_info_parse_header (meta, map.data);
1554 :
1555 52346 : gst_memory_unmap (mem, &map);
1556 52346 : return ret;
1557 : }
1558 :
1559 : /**
1560 : * @brief Append header to memory.
1561 : * @param[in] meta tensor meta structure
1562 : * @param[in] mem pointer to GstMemory
1563 : * @return Newly allocated GstMemory (Caller should free returned memory using gst_memory_unref())
1564 : */
1565 : GstMemory *
1566 282 : gst_tensor_meta_info_append_header (GstTensorMetaInfo * meta, GstMemory * mem)
1567 : {
1568 282 : GstMemory *new_mem = NULL;
1569 : gsize msize, hsize;
1570 : GstMapInfo old_map, new_map;
1571 :
1572 563 : g_return_val_if_fail (mem != NULL, NULL);
1573 281 : g_return_val_if_fail (gst_tensor_meta_info_validate (meta), NULL);
1574 :
1575 280 : if (!gst_memory_map (mem, &old_map, GST_MAP_READ)) {
1576 0 : nns_loge ("Failed to append header, cannot map the old memory.");
1577 0 : return NULL;
1578 : }
1579 :
1580 : /* memory size (header + old memory) */
1581 280 : hsize = gst_tensor_meta_info_get_header_size (meta);
1582 280 : msize = hsize + old_map.size;
1583 :
1584 280 : new_mem = gst_allocator_alloc (NULL, msize, NULL);
1585 280 : if (!gst_memory_map (new_mem, &new_map, GST_MAP_WRITE)) {
1586 0 : nns_loge ("Failed to append header, cannot map the new memory.");
1587 0 : gst_memory_unmap (mem, &old_map);
1588 0 : gst_memory_unref (new_mem);
1589 0 : return NULL;
1590 : }
1591 :
1592 : /* set header and copy old data */
1593 280 : gst_tensor_meta_info_update_header (meta, new_map.data);
1594 280 : memcpy (new_map.data + hsize, old_map.data, old_map.size);
1595 :
1596 280 : gst_memory_unmap (mem, &old_map);
1597 280 : gst_memory_unmap (new_mem, &new_map);
1598 280 : return new_mem;
1599 : }
1600 :
1601 : /**
1602 : * @brief Get the nth GstMemory from given @a buffer.
1603 : * @param[in] buffer GstBuffer to be parsed.
1604 : * @param[in] index Index of GstMemory to be returned.
1605 : * @return GstMemory if found, otherwise NULL (Caller should free returned memory using gst_memory_unref()).
1606 : */
1607 : GstMemory *
1608 62110 : gst_tensor_buffer_get_nth_memory (GstBuffer * buffer, const guint index)
1609 : {
1610 : guint i, num_tensors;
1611 : gsize offset;
1612 62110 : GstMemory *extra_tensors_memory, *res_mem = NULL;
1613 : GstMapInfo extra_tensors_map;
1614 : GstTensorExtraInfo *extra_info;
1615 :
1616 62110 : if (!GST_IS_BUFFER (buffer)) {
1617 0 : nns_loge ("Failed to parse GstBuffer (invalid input buffer).");
1618 62110 : return NULL;
1619 : }
1620 :
1621 62110 : num_tensors = gst_tensor_buffer_get_count (buffer);
1622 62110 : if (index >= num_tensors) {
1623 0 : nns_loge ("Invalid index %u, the number of tensors in the buffer is %u.",
1624 : index, num_tensors);
1625 0 : return NULL;
1626 : }
1627 :
1628 : /* If num_tensors is less than or equal to NNS_TENSOR_MEMORY_MAX, it's trivial. */
1629 62110 : if (num_tensors <= NNS_TENSOR_MEMORY_MAX || index < NNS_TENSOR_MEMORY_MAX - 1) {
1630 61489 : return gst_buffer_get_memory (buffer, index);
1631 : }
1632 :
1633 : /* If num_tensors is greater than NNS_TENSOR_MEMORY_MAX, we need to parse extra info. */
1634 : extra_tensors_memory =
1635 621 : gst_buffer_peek_memory (buffer, NNS_TENSOR_MEMORY_MAX - 1);
1636 621 : if (!extra_tensors_memory) {
1637 0 : nns_loge ("Failed to get %d-th memory", NNS_TENSOR_MEMORY_MAX);
1638 0 : return NULL;
1639 : }
1640 :
1641 621 : if (!gst_memory_map (extra_tensors_memory, &extra_tensors_map, GST_MAP_READ)) {
1642 0 : nns_loge ("Failed to map %d-th memory", NNS_TENSOR_MEMORY_MAX);
1643 0 : return NULL;
1644 : }
1645 :
1646 : /* check header (extra info) of the memory */
1647 621 : if (!gst_memory_map_is_extra_tensor (&extra_tensors_map)) {
1648 0 : nns_loge ("Invalid extra header");
1649 0 : goto done;
1650 : }
1651 :
1652 : /* parse the memory */
1653 621 : extra_info = (GstTensorExtraInfo *) extra_tensors_map.data;
1654 621 : offset = sizeof (GstTensorExtraInfo);
1655 :
1656 : /* If index is NNS_TENSOR_MEMORY_MAX - 1 */
1657 621 : if (index == NNS_TENSOR_MEMORY_MAX - 1) {
1658 : res_mem =
1659 29 : gst_memory_share (extra_tensors_memory, offset, extra_info->reserved);
1660 29 : goto done;
1661 : }
1662 :
1663 592 : offset += extra_info->reserved;
1664 :
1665 31720 : for (i = 1; i <= index - NNS_TENSOR_MEMORY_MAX; ++i) {
1666 31128 : offset += gst_tensor_info_get_size (&extra_info->infos[i - 1]);
1667 : }
1668 :
1669 : /* wrap it as GstMemory */
1670 : res_mem =
1671 592 : gst_memory_share (extra_tensors_memory, offset,
1672 592 : gst_tensor_info_get_size (&extra_info->infos[index -
1673 : NNS_TENSOR_MEMORY_MAX]));
1674 :
1675 621 : done:
1676 621 : gst_memory_unmap (extra_tensors_memory, &extra_tensors_map);
1677 621 : return res_mem;
1678 : }
1679 :
1680 : /**
1681 : * @brief Append @a memory to given @a buffer.
1682 : * @param[in/out] buffer GstBuffer to be appended.
1683 : * @param[in] memory GstMemory to append. This function takes ownership of this, even if it returns failure.
1684 : * @param[in] info GstTensorInfo of given @a memory.
1685 : * @return TRUE if successfully appended, otherwise FALSE.
1686 : */
1687 : gboolean
1688 59887 : gst_tensor_buffer_append_memory (GstBuffer * buffer, GstMemory * memory,
1689 : const GstTensorInfo * info)
1690 : {
1691 : guint num_mems, new_mem_index;
1692 59887 : GstMemory *new_memory = NULL, *last_memory = NULL;
1693 : gsize offset, new_mem_size, last_mem_size;
1694 : GstMapInfo new_memory_map, last_memory_map, incoming_memory_map;
1695 : GstTensorExtraInfo *extra_info;
1696 : GstTensorMetaInfo meta;
1697 : gboolean is_extra, is_static;
1698 59887 : gboolean appended = FALSE;
1699 :
1700 59887 : if (!GST_IS_BUFFER (buffer)) {
1701 0 : nns_loge ("Failed to append memory, given buffer is invalid.");
1702 0 : goto failed;
1703 : }
1704 :
1705 59887 : if (!memory) {
1706 0 : nns_loge ("Failed to append memory, given memory is NULL.");
1707 0 : goto failed;
1708 : }
1709 :
1710 59887 : if (gst_tensor_meta_info_parse_memory (&meta, memory)) {
1711 558 : is_static = (meta.format == _NNS_TENSOR_FORMAT_STATIC);
1712 : } else {
1713 : /* Suppose given memory is static tensor. */
1714 59329 : is_static = TRUE;
1715 :
1716 : /* Error case if given tensor-info is invalid. */
1717 59329 : if (!gst_tensor_info_validate (info)) {
1718 0 : nns_loge ("Failed to get tensor info (invalid input info).");
1719 0 : goto failed;
1720 : }
1721 : }
1722 :
1723 59887 : num_mems = gst_buffer_n_memory (buffer);
1724 :
1725 : /* trivial call to gst_buffer_append_memory */
1726 59887 : if (num_mems < NNS_TENSOR_MEMORY_MAX) {
1727 59299 : gst_buffer_append_memory (buffer, memory);
1728 59887 : return TRUE;
1729 : }
1730 :
1731 : /* given buffer has NNS_TENSOR_MEMORY_MAX memory blocks */
1732 588 : last_memory = gst_buffer_peek_memory (buffer, num_mems - 1);
1733 588 : if (!last_memory) {
1734 0 : nns_loge ("Failed to get last memory");
1735 0 : goto failed;
1736 : }
1737 :
1738 588 : if (!gst_memory_map (last_memory, &last_memory_map, GST_MAP_READ)) {
1739 0 : nns_loge ("Failed to map last memory");
1740 0 : last_memory = NULL;
1741 0 : goto failed;
1742 : }
1743 :
1744 588 : new_mem_size = last_mem_size = gst_memory_get_sizes (last_memory, NULL, NULL);
1745 :
1746 : /* if the memory does not have proper header, append it */
1747 588 : is_extra = gst_memory_map_is_extra_tensor (&last_memory_map);
1748 588 : if (!is_extra) {
1749 28 : new_mem_size += sizeof (GstTensorExtraInfo);
1750 : }
1751 :
1752 588 : new_mem_size += gst_memory_get_sizes (memory, NULL, NULL);
1753 :
1754 588 : new_memory = gst_allocator_alloc (NULL, new_mem_size, NULL);
1755 588 : if (!new_memory) {
1756 0 : nns_loge ("Failed to allocate memory for extra tensors.");
1757 0 : goto failed;
1758 : }
1759 :
1760 588 : if (!gst_memory_map (new_memory, &new_memory_map, GST_MAP_WRITE)) {
1761 0 : nns_loge ("Failed to map extra memory");
1762 0 : gst_memory_unref (new_memory);
1763 0 : new_memory = NULL;
1764 0 : goto failed;
1765 : }
1766 :
1767 588 : if (!gst_memory_map (memory, &incoming_memory_map, GST_MAP_READ)) {
1768 0 : nns_loge ("Failed to map incoming memory");
1769 0 : goto failed;
1770 : }
1771 :
1772 588 : extra_info = (GstTensorExtraInfo *) new_memory_map.data;
1773 :
1774 : /* if the last_memory does not have proper header, append it */
1775 588 : if (!is_extra) {
1776 28 : gst_tensor_extra_info_init (extra_info, last_mem_size);
1777 28 : offset = sizeof (GstTensorExtraInfo);
1778 : } else {
1779 560 : offset = 0;
1780 : }
1781 :
1782 : /* copy last_memory into new_memory */
1783 588 : memcpy (new_memory_map.data + offset, last_memory_map.data,
1784 : last_memory_map.size);
1785 :
1786 : /* copy incoming_memory into new_memory */
1787 588 : new_mem_index = extra_info->num_extra_tensors;
1788 588 : extra_info->num_extra_tensors += 1;
1789 :
1790 : /* Copy tensor info into extra. */
1791 588 : if (is_static) {
1792 588 : gst_tensor_info_copy (&extra_info->infos[new_mem_index], info);
1793 :
1794 : /**
1795 : * Free the name string, cause it does not freed by gstreamer.
1796 : * @todo Make custom gst_allocator later?
1797 : */
1798 588 : g_free (extra_info->infos[new_mem_index].name);
1799 588 : extra_info->infos[new_mem_index].name = NULL;
1800 : } else {
1801 0 : gst_tensor_meta_info_convert (&meta, &extra_info->infos[new_mem_index]);
1802 : }
1803 :
1804 588 : memcpy (new_memory_map.data + offset + last_memory_map.size,
1805 588 : incoming_memory_map.data, incoming_memory_map.size);
1806 :
1807 588 : gst_memory_unmap (memory, &incoming_memory_map);
1808 588 : gst_memory_unmap (last_memory, &last_memory_map);
1809 588 : last_memory = NULL;
1810 :
1811 588 : gst_buffer_replace_memory (buffer, num_mems - 1, new_memory);
1812 588 : appended = TRUE;
1813 :
1814 588 : failed:
1815 588 : if (new_memory) {
1816 588 : gst_memory_unmap (new_memory, &new_memory_map);
1817 588 : if (!appended)
1818 0 : gst_memory_unref (new_memory);
1819 : }
1820 :
1821 588 : if (last_memory)
1822 0 : gst_memory_unmap (last_memory, &last_memory_map);
1823 :
1824 : /* Release incoming memory even if failed to append it into buffer. */
1825 588 : if (memory)
1826 588 : gst_memory_unref (memory);
1827 :
1828 588 : return appended;
1829 : }
1830 :
1831 : /**
1832 : * @brief Get the number of tensors in the buffer.
1833 : */
1834 : guint
1835 121514 : gst_tensor_buffer_get_count (GstBuffer * buffer)
1836 : {
1837 : guint num_mems;
1838 : GstMemory *mem;
1839 : GstMapInfo map;
1840 : GstTensorExtraInfo *extra_info;
1841 :
1842 243028 : g_return_val_if_fail (buffer != NULL, 0);
1843 :
1844 121514 : num_mems = gst_buffer_n_memory (buffer);
1845 121514 : if (num_mems < NNS_TENSOR_MEMORY_MAX) {
1846 119813 : return num_mems;
1847 : }
1848 :
1849 : /* num_mems == NNS_TENSOR_MEMORY_MAX */
1850 1701 : mem = gst_buffer_peek_memory (buffer, num_mems - 1);
1851 1701 : if (!mem) {
1852 0 : nns_loge ("Failed to get the last memory.");
1853 0 : return 0;
1854 : }
1855 :
1856 1701 : if (!gst_memory_map (mem, &map, GST_MAP_READ)) {
1857 0 : nns_loge ("Failed to map the last memory.");
1858 0 : return 0;
1859 : }
1860 :
1861 1701 : if (gst_memory_map_is_extra_tensor (&map)) {
1862 1349 : extra_info = (GstTensorExtraInfo *) map.data;
1863 1349 : num_mems = extra_info->num_extra_tensors + NNS_TENSOR_MEMORY_MAX;
1864 : } else {
1865 352 : nns_logi ("The last memory does not have extra tensors header. "
1866 : "Assuming the number of tensors is %d.", num_mems);
1867 : }
1868 :
1869 1701 : gst_memory_unmap (mem, &map);
1870 :
1871 1701 : return num_mems;
1872 : }
1873 :
1874 : /**
1875 : * @brief Sets the value of a property based on the specified property value and GParamSpec.
1876 : *
1877 : * @param prop_value A pointer to the GValue where the property value will be set.
1878 : * @param param_spec A pointer to the GParamSpec that describes the property.
1879 : * @param property_value A string representing the value to be set for the property.
1880 : *
1881 : * @note This API is intended to be used by gst_tensor_parse_config_file ()
1882 : */
1883 : static void
1884 34 : set_property_value (GValue * prop_value, const GParamSpec * param_spec,
1885 : const gchar * property_value)
1886 : {
1887 34 : GType value_type = G_PARAM_SPEC_VALUE_TYPE (param_spec);
1888 34 : g_value_init (prop_value, value_type);
1889 :
1890 34 : if (value_type == G_TYPE_BOOLEAN) {
1891 0 : gboolean value = g_ascii_strcasecmp (property_value, "true") == 0;
1892 0 : g_value_set_boolean (prop_value, value);
1893 34 : } else if (value_type == G_TYPE_INT) {
1894 0 : gint value = atoi (property_value);
1895 0 : g_value_set_int (prop_value, value);
1896 34 : } else if (value_type == G_TYPE_UINT) {
1897 0 : guint value = atoi (property_value);
1898 0 : g_value_set_uint (prop_value, value);
1899 34 : } else if (value_type == G_TYPE_FLOAT) {
1900 0 : gfloat value = atof (property_value);
1901 0 : g_value_set_float (prop_value, value);
1902 34 : } else if (value_type == G_TYPE_DOUBLE) {
1903 0 : gdouble value = atof (property_value);
1904 0 : g_value_set_double (prop_value, value);
1905 : } else {
1906 34 : g_value_set_string (prop_value, property_value); /** default is string */
1907 : }
1908 34 : }
1909 :
1910 : /**
1911 : * @brief Parses a configuration file and sets the corresponding properties on a GObject.
1912 : *
1913 : * This function reads the contents of the configuration file located at the given path
1914 : * and sets the properties of the specified GObject based on the configuration data.
1915 : *
1916 : * @param config_path The path to the configuration file.
1917 : * @param object The GObject on which to set the properties.
1918 : *
1919 : * @note The responsibility of managing the memory of the GObject passed as a parameter
1920 : * lies outside this function.
1921 : */
1922 :
1923 : void
1924 9 : gst_tensor_parse_config_file (const gchar * config_path, const GObject * object)
1925 : {
1926 9 : g_autofree gchar *config_data = NULL;
1927 9 : g_auto (GStrv) lines = NULL;
1928 9 : GStrv line = NULL;
1929 9 : GError *error = NULL;
1930 9 : GObjectClass *g_object_class = G_OBJECT_GET_CLASS (object);
1931 :
1932 9 : if (!g_file_get_contents (config_path, &config_data, NULL, &error)) {
1933 0 : GST_DEBUG ("Failed to read config file: %s\n", error->message);
1934 0 : g_error_free (error);
1935 0 : return;
1936 : }
1937 :
1938 9 : lines = g_strsplit (config_data, "\n", -1);
1939 :
1940 : /** Iterate over each line */
1941 52 : for (line = lines; *line; ++line) {
1942 43 : g_auto (GStrv) parts = g_strsplit (*line, "=", 2);
1943 :
1944 43 : if (g_strv_length (parts) == 2) {
1945 68 : g_autofree gchar *property_name = g_strstrip (g_strdup (parts[0]));
1946 68 : g_autofree gchar *property_value = g_strstrip (g_strdup (parts[1]));
1947 :
1948 : GParamSpec *pdata =
1949 34 : g_object_class_find_property (g_object_class, property_name);
1950 :
1951 34 : if (pdata != NULL) {
1952 34 : GValue prop_value = G_VALUE_INIT;
1953 34 : set_property_value (&prop_value, pdata, property_value);
1954 34 : g_object_set_property (G_OBJECT (object), pdata->name, &prop_value);
1955 34 : g_value_unset (&prop_value);
1956 : }
1957 : }
1958 : }
1959 : }
|