LCOV - code coverage report
Current view: top level - vnet/crypto - node.c (source / functions) Hit Total Coverage
Test: coverage-filtered.info Lines: 52 72 72.2 %
Date: 2023-07-05 22:20:52 Functions: 8 13 61.5 %

          Line data    Source code
       1             : /*
       2             :  * Copyright (c) 2020 Cisco and/or its affiliates.
       3             :  * Licensed under the Apache License, Version 2.0 (the "License");
       4             :  * you may not use this file except in compliance with the License.
       5             :  * You may obtain a copy of the License at:
       6             :  *
       7             :  *     http://www.apache.org/licenses/LICENSE-2.0
       8             :  *
       9             :  * Unless required by applicable law or agreed to in writing, software
      10             :  * distributed under the License is distributed on an "AS IS" BASIS,
      11             :  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      12             :  * See the License for the specific language governing permissions and
      13             :  * limitations under the License.
      14             :  */
      15             : 
      16             : #include <stdbool.h>
      17             : #include <vlib/vlib.h>
      18             : #include <vnet/crypto/crypto.h>
      19             : 
      20             : typedef enum
      21             : {
      22             : #define _(sym,str) VNET_CRYPTO_ASYNC_ERROR_##sym,
      23             :   foreach_crypto_op_status
      24             : #undef _
      25             :     VNET_CRYPTO_ASYNC_N_ERROR,
      26             : } vnet_crypto_async_error_t;
      27             : 
      28             : static char *vnet_crypto_async_error_strings[] = {
      29             : #define _(sym,string) string,
      30             :   foreach_crypto_op_status
      31             : #undef _
      32             : };
      33             : 
      34             : #define foreach_crypto_dispatch_next \
      35             :   _(ERR_DROP, "error-drop")
      36             : 
      37             : typedef enum
      38             : {
      39             : #define _(n, s) CRYPTO_DISPATCH_NEXT_##n,
      40             :   foreach_crypto_dispatch_next
      41             : #undef _
      42             :     CRYPTO_DISPATCH_N_NEXT,
      43             : } crypto_dispatch_next_t;
      44             : 
      45             : typedef struct
      46             : {
      47             :   vnet_crypto_op_status_t op_status;
      48             :   vnet_crypto_async_op_id_t op;
      49             : } crypto_dispatch_trace_t;
      50             : 
      51             : static u8 *
      52           0 : format_crypto_dispatch_trace (u8 * s, va_list * args)
      53             : {
      54           0 :   CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
      55           0 :   CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
      56           0 :   crypto_dispatch_trace_t *t = va_arg (*args, crypto_dispatch_trace_t *);
      57             : 
      58           0 :   s = format (s, "%U: %U", format_vnet_crypto_async_op, t->op,
      59           0 :               format_vnet_crypto_op_status, t->op_status);
      60           0 :   return s;
      61             : }
      62             : 
      63             : static void
      64           0 : vnet_crypto_async_add_trace (vlib_main_t * vm, vlib_node_runtime_t * node,
      65             :                              vlib_buffer_t * b,
      66             :                              vnet_crypto_async_op_id_t op_id,
      67             :                              vnet_crypto_op_status_t status)
      68             : {
      69           0 :   crypto_dispatch_trace_t *tr = vlib_add_trace (vm, node, b, sizeof (*tr));
      70           0 :   tr->op_status = status;
      71           0 :   tr->op = op_id;
      72           0 : }
      73             : 
      74             : static_always_inline u32
      75        6771 : crypto_dequeue_frame (vlib_main_t * vm, vlib_node_runtime_t * node,
      76             :                       vnet_crypto_thread_t * ct,
      77             :                       vnet_crypto_frame_dequeue_t * hdl, u32 n_cache,
      78             :                       u32 * n_total)
      79             : {
      80        6771 :   vnet_crypto_main_t *cm = &crypto_main;
      81        6771 :   u32 n_elts = 0;
      82        6771 :   u32 enqueue_thread_idx = ~0;
      83        6771 :   vnet_crypto_async_frame_t *cf = (hdl) (vm, &n_elts, &enqueue_thread_idx);
      84        6771 :   *n_total += n_elts;
      85             : 
      86        9598 :   while (cf || n_elts)
      87             :     {
      88        2828 :       if (cf)
      89             :         {
      90        2792 :           vec_validate (ct->buffer_indices, n_cache + cf->n_elts);
      91        2792 :           vec_validate (ct->nexts, n_cache + cf->n_elts);
      92        2792 :           clib_memcpy_fast (ct->buffer_indices + n_cache, cf->buffer_indices,
      93        2792 :                             sizeof (u32) * cf->n_elts);
      94        2792 :           if (cf->state == VNET_CRYPTO_FRAME_STATE_SUCCESS)
      95             :             {
      96        2742 :               clib_memcpy_fast (ct->nexts + n_cache, cf->next_node_index,
      97        2742 :                                 sizeof (u16) * cf->n_elts);
      98             :             }
      99             :           else
     100             :             {
     101             :               u32 i;
     102         345 :               for (i = 0; i < cf->n_elts; i++)
     103             :                 {
     104         295 :                   if (cf->elts[i].status != VNET_CRYPTO_OP_STATUS_COMPLETED)
     105             :                     {
     106         290 :                       ct->nexts[i + n_cache] = CRYPTO_DISPATCH_NEXT_ERR_DROP;
     107         290 :                       vlib_node_increment_counter (vm, node->node_index,
     108         290 :                                                    cf->elts[i].status, 1);
     109             :                     }
     110             :                   else
     111           5 :                     ct->nexts[i + n_cache] = cf->next_node_index[i];
     112             :                 }
     113             :             }
     114        2792 :           n_cache += cf->n_elts;
     115        2792 :           if (n_cache >= VLIB_FRAME_SIZE)
     116             :             {
     117           0 :               vlib_buffer_enqueue_to_next_vec (vm, node, &ct->buffer_indices,
     118             :                                                &ct->nexts, n_cache);
     119           0 :               n_cache = 0;
     120             :             }
     121             : 
     122        2792 :           if (PREDICT_FALSE (node->flags & VLIB_NODE_FLAG_TRACE))
     123             :             {
     124             :               u32 i;
     125             : 
     126           0 :               for (i = 0; i < cf->n_elts; i++)
     127             :                 {
     128           0 :                   vlib_buffer_t *b = vlib_get_buffer (vm,
     129             :                                                       cf->buffer_indices[i]);
     130           0 :                   if (b->flags & VLIB_BUFFER_IS_TRACED)
     131           0 :                     vnet_crypto_async_add_trace (vm, node, b, cf->op,
     132           0 :                                                  cf->elts[i].status);
     133             :                 }
     134             :             }
     135        2792 :           vnet_crypto_async_free_frame (vm, cf);
     136             :         }
     137             :       /* signal enqueue-thread to dequeue the processed frame (n_elts>0) */
     138        2828 :       if (n_elts > 0 &&
     139        2792 :           ((node->state == VLIB_NODE_STATE_POLLING &&
     140          10 :             (node->flags &
     141        2792 :              VLIB_NODE_FLAG_SWITCH_FROM_POLLING_TO_INTERRUPT_MODE)) ||
     142        2792 :            node->state == VLIB_NODE_STATE_INTERRUPT))
     143             :         {
     144        2782 :           vlib_node_set_interrupt_pending (
     145             :             vlib_get_main_by_index (enqueue_thread_idx),
     146             :             cm->crypto_node_index);
     147             :         }
     148             : 
     149        2828 :       n_elts = 0;
     150        2828 :       enqueue_thread_idx = 0;
     151        2828 :       cf = (hdl) (vm, &n_elts, &enqueue_thread_idx);
     152        2827 :       *n_total += n_elts;
     153             :     }
     154             : 
     155        6770 :   return n_cache;
     156             : }
     157             : 
     158        9007 : VLIB_NODE_FN (crypto_dispatch_node) (vlib_main_t * vm,
     159             :                                      vlib_node_runtime_t * node,
     160             :                                      vlib_frame_t * frame)
     161             : {
     162        6771 :   vnet_crypto_main_t *cm = &crypto_main;
     163        6771 :   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
     164        6771 :   u32 n_dispatched = 0, n_cache = 0, index;
     165       13541 :   vec_foreach_index (index, cm->dequeue_handlers)
     166             :     {
     167        6771 :       n_cache = crypto_dequeue_frame (
     168        6771 :         vm, node, ct, cm->dequeue_handlers[index], n_cache, &n_dispatched);
     169             :     }
     170             :   /* *INDENT-ON* */
     171        6770 :   if (n_cache)
     172        2472 :     vlib_buffer_enqueue_to_next_vec (vm, node, &ct->buffer_indices, &ct->nexts,
     173             :                                      n_cache);
     174             : 
     175             :   /* if there are still pending tasks and node in interrupt mode,
     176             :   sending current thread signal to dequeue next loop */
     177        6770 :   if (pool_elts (ct->frame_pool) > 0 &&
     178        1147 :       ((node->state == VLIB_NODE_STATE_POLLING &&
     179           0 :         (node->flags &
     180        1147 :          VLIB_NODE_FLAG_SWITCH_FROM_POLLING_TO_INTERRUPT_MODE)) ||
     181        1147 :        node->state == VLIB_NODE_STATE_INTERRUPT))
     182             :     {
     183        1147 :       vlib_node_set_interrupt_pending (vm, node->node_index);
     184             :     }
     185             : 
     186        6771 :   return n_dispatched;
     187             : }
     188             : 
     189             : /* *INDENT-OFF* */
     190      178120 : VLIB_REGISTER_NODE (crypto_dispatch_node) = {
     191             :   .name = "crypto-dispatch",
     192             :   .type = VLIB_NODE_TYPE_INPUT,
     193             :   .flags = VLIB_NODE_FLAG_ADAPTIVE_MODE,
     194             :   .state = VLIB_NODE_STATE_INTERRUPT,
     195             :   .format_trace = format_crypto_dispatch_trace,
     196             : 
     197             :   .n_errors = ARRAY_LEN(vnet_crypto_async_error_strings),
     198             :   .error_strings = vnet_crypto_async_error_strings,
     199             : 
     200             :   .n_next_nodes = CRYPTO_DISPATCH_N_NEXT,
     201             :   .next_nodes = {
     202             : #define _(n, s) \
     203             :   [CRYPTO_DISPATCH_NEXT_##n] = s,
     204             :       foreach_crypto_dispatch_next
     205             : #undef _
     206             :   },
     207             : };
     208             : /* *INDENT-ON* */
     209             : 
     210             : /*
     211             :  * fd.io coding-style-patch-verification: ON
     212             :  *
     213             :  * Local Variables:
     214             :  * eval: (c-set-style "gnu")
     215             :  * End:
     216             :  */

Generated by: LCOV version 1.14