LCOV - code coverage report
Current view: top level - vnet/ipsec - esp_decrypt.c (source / functions) Hit Total Coverage
Test: coverage-filtered.info Lines: 640 701 91.3 %
Date: 2023-10-26 01:39:38 Functions: 71 97 73.2 %

          Line data    Source code
       1             : /*
       2             :  * esp_decrypt.c : IPSec ESP decrypt node
       3             :  *
       4             :  * Copyright (c) 2015 Cisco and/or its affiliates.
       5             :  * Licensed under the Apache License, Version 2.0 (the "License");
       6             :  * you may not use this file except in compliance with the License.
       7             :  * You may obtain a copy of the License at:
       8             :  *
       9             :  *     http://www.apache.org/licenses/LICENSE-2.0
      10             :  *
      11             :  * Unless required by applicable law or agreed to in writing, software
      12             :  * distributed under the License is distributed on an "AS IS" BASIS,
      13             :  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      14             :  * See the License for the specific language governing permissions and
      15             :  * limitations under the License.
      16             :  */
      17             : #include <vnet/vnet.h>
      18             : #include <vnet/api_errno.h>
      19             : #include <vnet/ip/ip.h>
      20             : #include <vnet/l2/l2_input.h>
      21             : 
      22             : #include <vnet/ipsec/ipsec.h>
      23             : #include <vnet/ipsec/esp.h>
      24             : #include <vnet/ipsec/ipsec_io.h>
      25             : #include <vnet/ipsec/ipsec_tun.h>
      26             : 
      27             : #include <vnet/gre/packet.h>
      28             : 
      29             : #define foreach_esp_decrypt_next                                              \
      30             :   _ (DROP, "error-drop")                                                      \
      31             :   _ (IP4_INPUT, "ip4-input-no-checksum")                                      \
      32             :   _ (IP6_INPUT, "ip6-input")                                                  \
      33             :   _ (L2_INPUT, "l2-input")                                                    \
      34             :   _ (MPLS_INPUT, "mpls-input")                                                \
      35             :   _ (HANDOFF, "handoff")
      36             : 
      37             : #define _(v, s) ESP_DECRYPT_NEXT_##v,
      38             : typedef enum
      39             : {
      40             :   foreach_esp_decrypt_next
      41             : #undef _
      42             :     ESP_DECRYPT_N_NEXT,
      43             : } esp_decrypt_next_t;
      44             : 
      45             : #define foreach_esp_decrypt_post_next                                         \
      46             :   _ (DROP, "error-drop")                                                      \
      47             :   _ (IP4_INPUT, "ip4-input-no-checksum")                                      \
      48             :   _ (IP6_INPUT, "ip6-input")                                                  \
      49             :   _ (MPLS_INPUT, "mpls-input")                                                \
      50             :   _ (L2_INPUT, "l2-input")
      51             : 
      52             : #define _(v, s) ESP_DECRYPT_POST_NEXT_##v,
      53             : typedef enum
      54             : {
      55             :   foreach_esp_decrypt_post_next
      56             : #undef _
      57             :     ESP_DECRYPT_POST_N_NEXT,
      58             : } esp_decrypt_post_next_t;
      59             : 
      60             : typedef struct
      61             : {
      62             :   u32 seq;
      63             :   u32 sa_seq;
      64             :   u32 sa_seq_hi;
      65             :   u32 pkt_seq_hi;
      66             :   ipsec_crypto_alg_t crypto_alg;
      67             :   ipsec_integ_alg_t integ_alg;
      68             : } esp_decrypt_trace_t;
      69             : 
      70             : typedef vl_counter_esp_decrypt_enum_t esp_decrypt_error_t;
      71             : 
      72             : /* The number of byres in the hisequence number */
      73             : #define N_HI_ESN_BYTES 4
      74             : 
      75             : /* packet trace format function */
      76             : static u8 *
      77      182435 : format_esp_decrypt_trace (u8 * s, va_list * args)
      78             : {
      79      182435 :   CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
      80      182435 :   CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
      81      182435 :   esp_decrypt_trace_t *t = va_arg (*args, esp_decrypt_trace_t *);
      82             : 
      83      182435 :   s = format (s,
      84             :               "esp: crypto %U integrity %U pkt-seq %d sa-seq %u sa-seq-hi %u "
      85             :               "pkt-seq-hi %u",
      86      182435 :               format_ipsec_crypto_alg, t->crypto_alg, format_ipsec_integ_alg,
      87      182435 :               t->integ_alg, t->seq, t->sa_seq, t->sa_seq_hi, t->pkt_seq_hi);
      88      182435 :   return s;
      89             : }
      90             : 
      91             : #define ESP_ENCRYPT_PD_F_FD_TRANSPORT (1 << 2)
      92             : 
      93             : static_always_inline void
      94       10122 : esp_process_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
      95             :                  vnet_crypto_op_t * ops, vlib_buffer_t * b[], u16 * nexts,
      96             :                  int e)
      97             : {
      98       10122 :   vnet_crypto_op_t *op = ops;
      99       10122 :   u32 n_fail, n_ops = vec_len (ops);
     100             : 
     101       10122 :   if (n_ops == 0)
     102        3305 :     return;
     103             : 
     104        6817 :   n_fail = n_ops - vnet_crypto_process_ops (vm, op, n_ops);
     105             : 
     106        8939 :   while (n_fail)
     107             :     {
     108        2122 :       ASSERT (op - ops < n_ops);
     109        2122 :       if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
     110             :         {
     111        2122 :           u32 err, bi = op->user_data;
     112        2122 :           if (op->status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
     113        2122 :             err = e;
     114             :           else
     115           0 :             err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
     116        2122 :           esp_decrypt_set_next_index (b[bi], node, vm->thread_index, err, bi,
     117             :                                       nexts, ESP_DECRYPT_NEXT_DROP,
     118        2122 :                                       vnet_buffer (b[bi])->ipsec.sad_index);
     119        2122 :           n_fail--;
     120             :         }
     121        2122 :       op++;
     122             :     }
     123             : }
     124             : 
     125             : static_always_inline void
     126       10122 : esp_process_chained_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
     127             :                          vnet_crypto_op_t * ops, vlib_buffer_t * b[],
     128             :                          u16 * nexts, vnet_crypto_op_chunk_t * chunks, int e)
     129             : {
     130             : 
     131       10122 :   vnet_crypto_op_t *op = ops;
     132       10122 :   u32 n_fail, n_ops = vec_len (ops);
     133             : 
     134       10122 :   if (PREDICT_TRUE (n_ops == 0))
     135        8870 :     return;
     136             : 
     137        1252 :   n_fail = n_ops - vnet_crypto_process_chained_ops (vm, op, chunks, n_ops);
     138             : 
     139        1252 :   while (n_fail)
     140             :     {
     141           0 :       ASSERT (op - ops < n_ops);
     142           0 :       if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
     143             :         {
     144           0 :           u32 err, bi = op->user_data;
     145           0 :           if (op->status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
     146           0 :             err = e;
     147             :           else
     148           0 :             err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
     149           0 :           esp_decrypt_set_next_index (b[bi], node, vm->thread_index, err, bi,
     150             :                                       nexts, ESP_DECRYPT_NEXT_DROP,
     151           0 :                                       vnet_buffer (b[bi])->ipsec.sad_index);
     152           0 :           n_fail--;
     153             :         }
     154           0 :       op++;
     155             :     }
     156             : }
     157             : 
     158             : always_inline void
     159      236601 : esp_remove_tail (vlib_main_t * vm, vlib_buffer_t * b, vlib_buffer_t * last,
     160             :                  u16 tail)
     161             : {
     162      236601 :   vlib_buffer_t *before_last = b;
     163             : 
     164      236601 :   if (last->current_length > tail)
     165             :     {
     166      226953 :       last->current_length -= tail;
     167      226953 :       return;
     168             :     }
     169        9648 :   ASSERT (b->flags & VLIB_BUFFER_NEXT_PRESENT);
     170             : 
     171       21976 :   while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
     172             :     {
     173       12328 :       before_last = b;
     174       12328 :       b = vlib_get_buffer (vm, b->next_buffer);
     175             :     }
     176        9648 :   before_last->current_length -= tail - last->current_length;
     177        9648 :   vlib_buffer_free_one (vm, before_last->next_buffer);
     178        9648 :   before_last->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
     179             : }
     180             : 
     181             : /* ICV is splitted in last two buffers so move it to the last buffer and
     182             :    return pointer to it */
     183             : static_always_inline u8 *
     184       14472 : esp_move_icv (vlib_main_t * vm, vlib_buffer_t * first,
     185             :               esp_decrypt_packet_data_t * pd,
     186             :               esp_decrypt_packet_data2_t * pd2, u16 icv_sz, u16 * dif)
     187             : {
     188             :   vlib_buffer_t *before_last, *bp;
     189       14472 :   u16 last_sz = pd2->lb->current_length;
     190       14472 :   u16 first_sz = icv_sz - last_sz;
     191             : 
     192       14472 :   bp = before_last = first;
     193       41540 :   while (bp->flags & VLIB_BUFFER_NEXT_PRESENT)
     194             :     {
     195       27068 :       before_last = bp;
     196       27068 :       bp = vlib_get_buffer (vm, bp->next_buffer);
     197             :     }
     198             : 
     199       14472 :   u8 *lb_curr = vlib_buffer_get_current (pd2->lb);
     200       14472 :   memmove (lb_curr + first_sz, lb_curr, last_sz);
     201       14472 :   clib_memcpy_fast (lb_curr, vlib_buffer_get_tail (before_last) - first_sz,
     202             :                     first_sz);
     203       14472 :   before_last->current_length -= first_sz;
     204       14472 :   if (before_last == first)
     205        1876 :     pd->current_length -= first_sz;
     206       14472 :   clib_memset (vlib_buffer_get_tail (before_last), 0, first_sz);
     207       14472 :   if (dif)
     208       14472 :     dif[0] = first_sz;
     209       14472 :   pd2->lb = before_last;
     210       14472 :   pd2->icv_removed = 1;
     211       14472 :   pd2->free_buffer_index = before_last->next_buffer;
     212       14472 :   before_last->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
     213       14472 :   return lb_curr;
     214             : }
     215             : 
     216             : static_always_inline u16
     217      110772 : esp_insert_esn (vlib_main_t *vm, ipsec_sa_t *sa, esp_decrypt_packet_data_t *pd,
     218             :                 esp_decrypt_packet_data2_t *pd2, u32 *data_len, u8 **digest,
     219             :                 u16 *len, vlib_buffer_t *b, u8 *payload)
     220             : {
     221      110772 :   if (!ipsec_sa_is_set_USE_ESN (sa))
     222       65316 :     return 0;
     223             :   /* shift ICV by 4 bytes to insert ESN */
     224       45456 :   u32 seq_hi = clib_host_to_net_u32 (pd->seq_hi);
     225             :   u8 tmp[ESP_MAX_ICV_SIZE];
     226             : 
     227       45456 :   if (pd2->icv_removed)
     228             :     {
     229           0 :       u16 space_left = vlib_buffer_space_left_at_end (vm, pd2->lb);
     230           0 :       if (space_left >= N_HI_ESN_BYTES)
     231             :         {
     232           0 :           clib_memcpy_fast (vlib_buffer_get_tail (pd2->lb), &seq_hi,
     233             :                             N_HI_ESN_BYTES);
     234           0 :           *data_len += N_HI_ESN_BYTES;
     235             :         }
     236             :       else
     237           0 :         return N_HI_ESN_BYTES;
     238             : 
     239           0 :       len[0] = b->current_length;
     240             :     }
     241             :   else
     242             :     {
     243       45456 :       clib_memcpy_fast (tmp, payload + len[0], ESP_MAX_ICV_SIZE);
     244       45456 :       clib_memcpy_fast (payload + len[0], &seq_hi, N_HI_ESN_BYTES);
     245       45456 :       clib_memcpy_fast (payload + len[0] + N_HI_ESN_BYTES, tmp,
     246             :                         ESP_MAX_ICV_SIZE);
     247       45456 :       *data_len += N_HI_ESN_BYTES;
     248       45456 :       *digest += N_HI_ESN_BYTES;
     249             :     }
     250       45456 :   return N_HI_ESN_BYTES;
     251             : }
     252             : 
     253             : static_always_inline u8 *
     254        7236 : esp_move_icv_esn (vlib_main_t * vm, vlib_buffer_t * first,
     255             :                   esp_decrypt_packet_data_t * pd,
     256             :                   esp_decrypt_packet_data2_t * pd2, u16 icv_sz,
     257             :                   ipsec_sa_t * sa, u8 * extra_esn, u32 * len)
     258             : {
     259        7236 :   u16 dif = 0;
     260        7236 :   u8 *digest = esp_move_icv (vm, first, pd, pd2, icv_sz, &dif);
     261        7236 :   if (dif)
     262        7236 :     *len -= dif;
     263             : 
     264        7236 :   if (ipsec_sa_is_set_USE_ESN (sa))
     265             :     {
     266        3618 :       u32 seq_hi = clib_host_to_net_u32 (pd->seq_hi);
     267        3618 :       u16 space_left = vlib_buffer_space_left_at_end (vm, pd2->lb);
     268             : 
     269        3618 :       if (space_left >= N_HI_ESN_BYTES)
     270             :         {
     271        3216 :           clib_memcpy_fast (vlib_buffer_get_tail (pd2->lb), &seq_hi,
     272             :                             N_HI_ESN_BYTES);
     273        3216 :           *len += N_HI_ESN_BYTES;
     274             :         }
     275             :       else
     276             :         {
     277             :           /* no space for ESN at the tail, use the next buffer
     278             :            * (with ICV data) */
     279         402 :           ASSERT (pd2->icv_removed);
     280         402 :           vlib_buffer_t *tmp = vlib_get_buffer (vm, pd2->free_buffer_index);
     281         402 :           clib_memcpy_fast (vlib_buffer_get_current (tmp) - N_HI_ESN_BYTES,
     282             :                             &seq_hi, N_HI_ESN_BYTES);
     283         402 :           extra_esn[0] = 1;
     284             :         }
     285             :     }
     286        7236 :   return digest;
     287             : }
     288             : 
     289             : static_always_inline int
     290       39664 : esp_decrypt_chain_integ (vlib_main_t *vm, ipsec_per_thread_data_t *ptd,
     291             :                          const esp_decrypt_packet_data_t *pd,
     292             :                          esp_decrypt_packet_data2_t *pd2, ipsec_sa_t *sa0,
     293             :                          vlib_buffer_t *b, u8 icv_sz, u8 *start_src,
     294             :                          u32 start_len, u8 **digest, u16 *n_ch,
     295             :                          u32 *integ_total_len)
     296             : {
     297             :   vnet_crypto_op_chunk_t *ch;
     298       39664 :   vlib_buffer_t *cb = vlib_get_buffer (vm, b->next_buffer);
     299       39664 :   u16 n_chunks = 1;
     300             :   u32 total_len;
     301       39664 :   vec_add2 (ptd->chunks, ch, 1);
     302       39664 :   total_len = ch->len = start_len;
     303       39664 :   ch->src = start_src;
     304             : 
     305             :   while (1)
     306             :     {
     307       52528 :       vec_add2 (ptd->chunks, ch, 1);
     308       52528 :       n_chunks += 1;
     309       52528 :       ch->src = vlib_buffer_get_current (cb);
     310       52528 :       if (pd2->lb == cb)
     311             :         {
     312       39664 :           if (pd2->icv_removed)
     313        5360 :             ch->len = cb->current_length;
     314             :           else
     315       34304 :             ch->len = cb->current_length - icv_sz;
     316       39664 :           if (ipsec_sa_is_set_USE_ESN (sa0))
     317             :             {
     318       19832 :               u32 seq_hi = clib_host_to_net_u32 (pd->seq_hi);
     319             :               u8 tmp[ESP_MAX_ICV_SIZE];
     320             :               u8 *esn;
     321             :               vlib_buffer_t *tmp_b;
     322       19832 :               u16 space_left = vlib_buffer_space_left_at_end (vm, pd2->lb);
     323       19832 :               if (space_left < N_HI_ESN_BYTES)
     324             :                 {
     325        1206 :                   if (pd2->icv_removed)
     326             :                     {
     327             :                       /* use pre-data area from the last bufer
     328             :                          that was removed from the chain */
     329         402 :                       tmp_b = vlib_get_buffer (vm, pd2->free_buffer_index);
     330         402 :                       esn = tmp_b->data - N_HI_ESN_BYTES;
     331             :                     }
     332             :                   else
     333             :                     {
     334             :                       /* no space, need to allocate new buffer */
     335         804 :                       u32 tmp_bi = 0;
     336         804 :                       if (vlib_buffer_alloc (vm, &tmp_bi, 1) != 1)
     337           0 :                         return -1;
     338         804 :                       tmp_b = vlib_get_buffer (vm, tmp_bi);
     339         804 :                       esn = tmp_b->data;
     340         804 :                       pd2->free_buffer_index = tmp_bi;
     341             :                     }
     342        1206 :                   clib_memcpy_fast (esn, &seq_hi, N_HI_ESN_BYTES);
     343             : 
     344        1206 :                   vec_add2 (ptd->chunks, ch, 1);
     345        1206 :                   n_chunks += 1;
     346        1206 :                   ch->src = esn;
     347        1206 :                   ch->len = N_HI_ESN_BYTES;
     348             :                 }
     349             :               else
     350             :                 {
     351       18626 :                   if (pd2->icv_removed)
     352             :                     {
     353        2278 :                       clib_memcpy_fast (vlib_buffer_get_tail (pd2->lb),
     354             :                                         &seq_hi, N_HI_ESN_BYTES);
     355             :                     }
     356             :                   else
     357             :                     {
     358       16348 :                       clib_memcpy_fast (tmp, *digest, ESP_MAX_ICV_SIZE);
     359       16348 :                       clib_memcpy_fast (*digest, &seq_hi, N_HI_ESN_BYTES);
     360       16348 :                       clib_memcpy_fast (*digest + N_HI_ESN_BYTES, tmp,
     361             :                                         ESP_MAX_ICV_SIZE);
     362       16348 :                       *digest += N_HI_ESN_BYTES;
     363             :                     }
     364       18626 :                   ch->len += N_HI_ESN_BYTES;
     365             :                 }
     366             :             }
     367       39664 :           total_len += ch->len;
     368       39664 :           break;
     369             :         }
     370             :       else
     371       12864 :         total_len += ch->len = cb->current_length;
     372             : 
     373       12864 :       if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
     374           0 :         break;
     375             : 
     376       12864 :       cb = vlib_get_buffer (vm, cb->next_buffer);
     377             :     }
     378             : 
     379       39664 :   if (n_ch)
     380       31624 :     *n_ch = n_chunks;
     381       39664 :   if (integ_total_len)
     382        8040 :     *integ_total_len = total_len;
     383             : 
     384       39664 :   return 0;
     385             : }
     386             : 
     387             : static_always_inline u32
     388       72360 : esp_decrypt_chain_crypto (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
     389             :                           esp_decrypt_packet_data_t * pd,
     390             :                           esp_decrypt_packet_data2_t * pd2,
     391             :                           ipsec_sa_t * sa0, vlib_buffer_t * b, u8 icv_sz,
     392             :                           u8 * start, u32 start_len, u8 ** tag, u16 * n_ch)
     393             : {
     394             :   vnet_crypto_op_chunk_t *ch;
     395       72360 :   vlib_buffer_t *cb = b;
     396       72360 :   u16 n_chunks = 1;
     397             :   u32 total_len;
     398       72360 :   vec_add2 (ptd->chunks, ch, 1);
     399       72360 :   total_len = ch->len = start_len;
     400       72360 :   ch->src = ch->dst = start;
     401       72360 :   cb = vlib_get_buffer (vm, cb->next_buffer);
     402       72360 :   n_chunks = 1;
     403             : 
     404             :   while (1)
     405             :     {
     406      101572 :       vec_add2 (ptd->chunks, ch, 1);
     407      101572 :       n_chunks += 1;
     408      101572 :       ch->src = ch->dst = vlib_buffer_get_current (cb);
     409      101572 :       if (pd2->lb == cb)
     410             :         {
     411       72360 :           if (ipsec_sa_is_set_IS_AEAD (sa0))
     412             :             {
     413       36180 :               if (pd2->lb->current_length < icv_sz)
     414             :                 {
     415        7236 :                   u16 dif = 0;
     416        7236 :                   *tag = esp_move_icv (vm, b, pd, pd2, icv_sz, &dif);
     417             : 
     418             :                   /* this chunk does not contain crypto data */
     419        7236 :                   n_chunks -= 1;
     420             :                   /* and fix previous chunk's length as it might have
     421             :                      been changed */
     422        7236 :                   ASSERT (n_chunks > 0);
     423        7236 :                   if (pd2->lb == b)
     424             :                     {
     425           0 :                       total_len -= dif;
     426           0 :                       ch[-1].len -= dif;
     427             :                     }
     428             :                   else
     429             :                     {
     430        7236 :                       total_len = total_len + pd2->lb->current_length -
     431        7236 :                         ch[-1].len;
     432        7236 :                       ch[-1].len = pd2->lb->current_length;
     433             :                     }
     434        7236 :                   break;
     435             :                 }
     436             :               else
     437       28944 :                 *tag = vlib_buffer_get_tail (pd2->lb) - icv_sz;
     438             :             }
     439             : 
     440       65124 :           if (pd2->icv_removed)
     441        4824 :             total_len += ch->len = cb->current_length;
     442             :           else
     443       60300 :             total_len += ch->len = cb->current_length - icv_sz;
     444             :         }
     445             :       else
     446       29212 :         total_len += ch->len = cb->current_length;
     447             : 
     448       94336 :       if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
     449       65124 :         break;
     450             : 
     451       29212 :       cb = vlib_get_buffer (vm, cb->next_buffer);
     452             :     }
     453             : 
     454       72360 :   if (n_ch)
     455       52260 :     *n_ch = n_chunks;
     456             : 
     457       72360 :   return total_len;
     458             : }
     459             : 
     460             : static_always_inline void
     461      204052 : esp_decrypt_prepare_sync_op (vlib_main_t * vm, vlib_node_runtime_t * node,
     462             :                              ipsec_per_thread_data_t * ptd,
     463             :                              vnet_crypto_op_t *** crypto_ops,
     464             :                              vnet_crypto_op_t *** integ_ops,
     465             :                              vnet_crypto_op_t * op,
     466             :                              ipsec_sa_t * sa0, u8 * payload,
     467             :                              u16 len, u8 icv_sz, u8 iv_sz,
     468             :                              esp_decrypt_packet_data_t * pd,
     469             :                              esp_decrypt_packet_data2_t * pd2,
     470             :                              vlib_buffer_t * b, u16 * next, u32 index)
     471             : {
     472      204052 :   const u8 esp_sz = sizeof (esp_header_t);
     473             : 
     474      204052 :   if (PREDICT_TRUE (sa0->integ_op_id != VNET_CRYPTO_OP_NONE))
     475             :     {
     476      136590 :       vnet_crypto_op_init (op, sa0->integ_op_id);
     477      136590 :       op->key_index = sa0->integ_key_index;
     478      136590 :       op->src = payload;
     479      136590 :       op->flags = VNET_CRYPTO_OP_FLAG_HMAC_CHECK;
     480      136590 :       op->user_data = index;
     481      136590 :       op->digest = payload + len;
     482      136590 :       op->digest_len = icv_sz;
     483      136590 :       op->len = len;
     484             : 
     485      136590 :       if (pd->is_chain)
     486             :         {
     487             :           /* buffer is chained */
     488       32964 :           op->len = pd->current_length;
     489             : 
     490             :           /* special case when ICV is splitted and needs to be reassembled
     491             :            * first -> move it to the last buffer. Also take into account
     492             :            * that ESN needs to be added after encrypted data and may or
     493             :            * may not fit in the tail.*/
     494       32964 :           if (pd2->lb->current_length < icv_sz)
     495             :             {
     496        5628 :               u8 extra_esn = 0;
     497        5628 :               op->digest =
     498        5628 :                 esp_move_icv_esn (vm, b, pd, pd2, icv_sz, sa0,
     499             :                                   &extra_esn, &op->len);
     500             : 
     501        5628 :               if (extra_esn)
     502             :                 {
     503             :                   /* esn is in the last buffer, that was unlinked from
     504             :                    * the chain */
     505         402 :                   op->len = b->current_length;
     506             :                 }
     507             :               else
     508             :                 {
     509        5226 :                   if (pd2->lb == b)
     510             :                     {
     511             :                       /* we now have a single buffer of crypto data, adjust
     512             :                        * the length (second buffer contains only ICV) */
     513        1340 :                       *integ_ops = &ptd->integ_ops;
     514        1340 :                       *crypto_ops = &ptd->crypto_ops;
     515        1340 :                       len = b->current_length;
     516        1340 :                       goto out;
     517             :                     }
     518             :                 }
     519             :             }
     520             :           else
     521       27336 :             op->digest = vlib_buffer_get_tail (pd2->lb) - icv_sz;
     522             : 
     523       31624 :           op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
     524       31624 :           op->chunk_index = vec_len (ptd->chunks);
     525       31624 :           if (esp_decrypt_chain_integ (vm, ptd, pd, pd2, sa0, b, icv_sz,
     526       31624 :                                        payload, pd->current_length,
     527             :                                        &op->digest, &op->n_chunks, 0) < 0)
     528             :             {
     529           0 :               esp_decrypt_set_next_index (
     530             :                 b, node, vm->thread_index, ESP_DECRYPT_ERROR_NO_BUFFERS, 0,
     531             :                 next, ESP_DECRYPT_NEXT_DROP, pd->sa_index);
     532           0 :               return;
     533             :             }
     534             :         }
     535             :       else
     536      103626 :         esp_insert_esn (vm, sa0, pd, pd2, &op->len, &op->digest, &len, b,
     537             :                         payload);
     538      136590 :     out:
     539      136590 :       vec_add_aligned (*(integ_ops[0]), op, 1, CLIB_CACHE_LINE_BYTES);
     540             :     }
     541             : 
     542      204052 :   payload += esp_sz;
     543      204052 :   len -= esp_sz;
     544             : 
     545      204052 :   if (sa0->crypto_dec_op_id != VNET_CRYPTO_OP_NONE)
     546             :     {
     547      189073 :       vnet_crypto_op_init (op, sa0->crypto_dec_op_id);
     548      189073 :       op->key_index = sa0->crypto_key_index;
     549      189073 :       op->iv = payload;
     550             : 
     551      189073 :       if (ipsec_sa_is_set_IS_CTR (sa0))
     552             :         {
     553             :           /* construct nonce in a scratch space in front of the IP header */
     554      110949 :           esp_ctr_nonce_t *nonce =
     555      110949 :             (esp_ctr_nonce_t *) (payload - esp_sz - pd->hdr_sz -
     556             :                                  sizeof (*nonce));
     557      110949 :           if (ipsec_sa_is_set_IS_AEAD (sa0))
     558             :             {
     559             :               /* constuct aad in a scratch space in front of the nonce */
     560       67335 :               esp_header_t *esp0 = (esp_header_t *) (payload - esp_sz);
     561       67335 :               op->aad = (u8 *) nonce - sizeof (esp_aead_t);
     562       67335 :               op->aad_len = esp_aad_fill (op->aad, esp0, sa0, pd->seq_hi);
     563       67335 :               op->tag = payload + len;
     564       67335 :               op->tag_len = 16;
     565       67335 :               if (PREDICT_FALSE (ipsec_sa_is_set_IS_NULL_GMAC (sa0)))
     566             :                 {
     567             :                   /* RFC-4543 ENCR_NULL_AUTH_AES_GMAC: IV is part of AAD */
     568       22779 :                   payload -= iv_sz;
     569       22779 :                   len += iv_sz;
     570             :                 }
     571             :             }
     572             :           else
     573             :             {
     574       43614 :               nonce->ctr = clib_host_to_net_u32 (1);
     575             :             }
     576      110949 :           nonce->salt = sa0->salt;
     577      110949 :           ASSERT (sizeof (u64) == iv_sz);
     578      110949 :           nonce->iv = *(u64 *) op->iv;
     579      110949 :           op->iv = (u8 *) nonce;
     580             :         }
     581      189073 :       op->src = op->dst = payload += iv_sz;
     582      189073 :       op->len = len - iv_sz;
     583      189073 :       op->user_data = index;
     584             : 
     585      189073 :       if (pd->is_chain && (pd2->lb != b))
     586             :         {
     587             :           /* buffer is chained */
     588       52260 :           op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
     589       52260 :           op->chunk_index = vec_len (ptd->chunks);
     590       52260 :           esp_decrypt_chain_crypto (vm, ptd, pd, pd2, sa0, b, icv_sz,
     591       52260 :                                     payload, len - pd->iv_sz + pd->icv_sz,
     592             :                                     &op->tag, &op->n_chunks);
     593             :         }
     594             : 
     595      189073 :       vec_add_aligned (*(crypto_ops[0]), op, 1, CLIB_CACHE_LINE_BYTES);
     596             :     }
     597             : }
     598             : 
     599             : static_always_inline esp_decrypt_error_t
     600       39305 : esp_decrypt_prepare_async_frame (vlib_main_t *vm, vlib_node_runtime_t *node,
     601             :                                  ipsec_per_thread_data_t *ptd,
     602             :                                  vnet_crypto_async_frame_t *f, ipsec_sa_t *sa0,
     603             :                                  u8 *payload, u16 len, u8 icv_sz, u8 iv_sz,
     604             :                                  esp_decrypt_packet_data_t *pd,
     605             :                                  esp_decrypt_packet_data2_t *pd2, u32 bi,
     606             :                                  vlib_buffer_t *b, u16 *next, u16 async_next)
     607             : {
     608       39305 :   const u8 esp_sz = sizeof (esp_header_t);
     609       39305 :   esp_decrypt_packet_data_t *async_pd = &(esp_post_data (b))->decrypt_data;
     610       39305 :   esp_decrypt_packet_data2_t *async_pd2 = esp_post_data2 (b);
     611       39305 :   u8 *tag = payload + len, *iv = payload + esp_sz, *aad = 0;
     612       39305 :   const u32 key_index = sa0->crypto_key_index;
     613       39305 :   u32 crypto_len, integ_len = 0;
     614       39305 :   i16 crypto_start_offset, integ_start_offset = 0;
     615       39305 :   u8 flags = 0;
     616             : 
     617       39305 :   if (!ipsec_sa_is_set_IS_AEAD (sa0))
     618             :     {
     619             :       /* linked algs */
     620       15722 :       integ_start_offset = payload - b->data;
     621       15722 :       integ_len = len;
     622       15722 :       if (PREDICT_TRUE (sa0->integ_op_id != VNET_CRYPTO_OP_NONE))
     623       15722 :         flags |= VNET_CRYPTO_OP_FLAG_HMAC_CHECK;
     624             : 
     625       15722 :       if (pd->is_chain)
     626             :         {
     627             :           /* buffer is chained */
     628        8576 :           integ_len = pd->current_length;
     629             : 
     630             :           /* special case when ICV is splitted and needs to be reassembled
     631             :            * first -> move it to the last buffer. Also take into account
     632             :            * that ESN needs to be added after encrypted data and may or
     633             :            * may not fit in the tail.*/
     634        8576 :           if (pd2->lb->current_length < icv_sz)
     635             :             {
     636        1608 :               u8 extra_esn = 0;
     637        1608 :               tag = esp_move_icv_esn (vm, b, pd, pd2, icv_sz, sa0,
     638             :                                       &extra_esn, &integ_len);
     639             : 
     640        1608 :               if (extra_esn)
     641             :                 {
     642             :                   /* esn is in the last buffer, that was unlinked from
     643             :                    * the chain */
     644           0 :                   integ_len = b->current_length;
     645             :                 }
     646             :               else
     647             :                 {
     648        1608 :                   if (pd2->lb == b)
     649             :                     {
     650             :                       /* we now have a single buffer of crypto data, adjust
     651             :                        * the length (second buffer contains only ICV) */
     652         536 :                       len = b->current_length;
     653         536 :                       goto out;
     654             :                     }
     655             :                 }
     656             :             }
     657             :           else
     658        6968 :             tag = vlib_buffer_get_tail (pd2->lb) - icv_sz;
     659             : 
     660        8040 :           flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
     661        8040 :           if (esp_decrypt_chain_integ (vm, ptd, pd, pd2, sa0, b, icv_sz,
     662        8040 :                                        payload, pd->current_length, &tag, 0,
     663             :                                        &integ_len) < 0)
     664             :             {
     665             :               /* allocate buffer failed, will not add to frame and drop */
     666           0 :               return (ESP_DECRYPT_ERROR_NO_BUFFERS);
     667             :             }
     668             :         }
     669             :       else
     670        7146 :         esp_insert_esn (vm, sa0, pd, pd2, &integ_len, &tag, &len, b, payload);
     671             :     }
     672             : 
     673       23583 : out:
     674             :   /* crypto */
     675       39305 :   payload += esp_sz;
     676       39305 :   len -= esp_sz;
     677       39305 :   iv = payload;
     678             : 
     679       39305 :   if (ipsec_sa_is_set_IS_CTR (sa0))
     680             :     {
     681             :       /* construct nonce in a scratch space in front of the IP header */
     682       23583 :       esp_ctr_nonce_t *nonce =
     683       23583 :         (esp_ctr_nonce_t *) (payload - esp_sz - pd->hdr_sz - sizeof (*nonce));
     684       23583 :       if (ipsec_sa_is_set_IS_AEAD (sa0))
     685             :         {
     686             :           /* constuct aad in a scratch space in front of the nonce */
     687       23583 :           esp_header_t *esp0 = (esp_header_t *) (payload - esp_sz);
     688       23583 :           aad = (u8 *) nonce - sizeof (esp_aead_t);
     689       23583 :           esp_aad_fill (aad, esp0, sa0, pd->seq_hi);
     690       23583 :           tag = payload + len;
     691       23583 :           if (PREDICT_FALSE (ipsec_sa_is_set_IS_NULL_GMAC (sa0)))
     692             :             {
     693             :               /* RFC-4543 ENCR_NULL_AUTH_AES_GMAC: IV is part of AAD */
     694           0 :               payload -= iv_sz;
     695           0 :               len += iv_sz;
     696             :             }
     697             :         }
     698             :       else
     699             :         {
     700           0 :           nonce->ctr = clib_host_to_net_u32 (1);
     701             :         }
     702       23583 :       nonce->salt = sa0->salt;
     703       23583 :       ASSERT (sizeof (u64) == iv_sz);
     704       23583 :       nonce->iv = *(u64 *) iv;
     705       23583 :       iv = (u8 *) nonce;
     706             :     }
     707             : 
     708       39305 :   crypto_start_offset = (payload += iv_sz) - b->data;
     709       39305 :   crypto_len = len - iv_sz;
     710             : 
     711       39305 :   if (pd->is_chain && (pd2->lb != b))
     712             :     {
     713             :       /* buffer is chained */
     714       20100 :       flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
     715             : 
     716       20100 :       crypto_len = esp_decrypt_chain_crypto (vm, ptd, pd, pd2, sa0, b, icv_sz,
     717             :                                              payload,
     718       20100 :                                              len - pd->iv_sz + pd->icv_sz,
     719             :                                              &tag, 0);
     720             :     }
     721             : 
     722       39305 :   *async_pd = *pd;
     723       39305 :   *async_pd2 = *pd2;
     724             : 
     725             :   /* for AEAD integ_len - crypto_len will be negative, it is ok since it
     726             :    * is ignored by the engine. */
     727       39305 :   vnet_crypto_async_add_to_frame (
     728       39305 :     vm, f, key_index, crypto_len, integ_len - crypto_len, crypto_start_offset,
     729             :     integ_start_offset, bi, async_next, iv, tag, aad, flags);
     730             : 
     731       39305 :   return (ESP_DECRYPT_ERROR_RX_PKTS);
     732             : }
     733             : 
     734             : static_always_inline void
     735      240945 : esp_decrypt_post_crypto (vlib_main_t *vm, vlib_node_runtime_t *node,
     736             :                          const u16 *next_by_next_header,
     737             :                          const esp_decrypt_packet_data_t *pd,
     738             :                          const esp_decrypt_packet_data2_t *pd2,
     739             :                          vlib_buffer_t *b, u16 *next, int is_ip6, int is_tun,
     740             :                          int is_async)
     741             : {
     742      240945 :   ipsec_sa_t *sa0 = ipsec_sa_get (pd->sa_index);
     743      240945 :   vlib_buffer_t *lb = b;
     744      240945 :   const u8 esp_sz = sizeof (esp_header_t);
     745      240945 :   const u8 tun_flags = IPSEC_SA_FLAG_IS_TUNNEL | IPSEC_SA_FLAG_IS_TUNNEL_V6;
     746      240945 :   u8 pad_length = 0, next_header = 0;
     747             :   u16 icv_sz;
     748             : 
     749             :   /*
     750             :    * redo the anti-reply check
     751             :    * in this frame say we have sequence numbers, s, s+1, s+1, s+1
     752             :    * and s and s+1 are in the window. When we did the anti-replay
     753             :    * check above we did so against the state of the window (W),
     754             :    * after packet s-1. So each of the packets in the sequence will be
     755             :    * accepted.
     756             :    * This time s will be cheked against Ws-1, s+1 chceked against Ws
     757             :    * (i.e. the window state is updated/advnaced)
     758             :    * so this time the successive s+! packet will be dropped.
     759             :    * This is a consequence of batching the decrypts. If the
     760             :    * check-dcrypt-advance process was done for each packet it would
     761             :    * be fine. But we batch the decrypts because it's much more efficient
     762             :    * to do so in SW and if we offload to HW and the process is async.
     763             :    *
     764             :    * You're probably thinking, but this means an attacker can send the
     765             :    * above sequence and cause VPP to perform decrpyts that will fail,
     766             :    * and that's true. But if the attacker can determine s (a valid
     767             :    * sequence number in the window) which is non-trivial, it can generate
     768             :    * a sequence s, s+1, s+2, s+3, ... s+n and nothing will prevent any
     769             :    * implementation, sequential or batching, from decrypting these.
     770             :    */
     771      240945 :   if (ipsec_sa_anti_replay_and_sn_advance (sa0, pd->seq, pd->seq_hi, true,
     772             :                                            NULL))
     773             :     {
     774         670 :       esp_decrypt_set_next_index (b, node, vm->thread_index,
     775             :                                   ESP_DECRYPT_ERROR_REPLAY, 0, next,
     776             :                                   ESP_DECRYPT_NEXT_DROP, pd->sa_index);
     777         670 :       return;
     778             :     }
     779             : 
     780             :   u64 n_lost =
     781      240275 :     ipsec_sa_anti_replay_advance (sa0, vm->thread_index, pd->seq, pd->seq_hi);
     782             : 
     783      240275 :   vlib_prefetch_simple_counter (&ipsec_sa_err_counters[IPSEC_SA_ERROR_LOST],
     784             :                                 vm->thread_index, pd->sa_index);
     785             : 
     786      240275 :   if (pd->is_chain)
     787             :     {
     788       77720 :       lb = pd2->lb;
     789       77720 :       icv_sz = pd2->icv_removed ? 0 : pd->icv_sz;
     790       77720 :       if (pd2->free_buffer_index)
     791             :         {
     792       15276 :           vlib_buffer_free_one (vm, pd2->free_buffer_index);
     793       15276 :           lb->next_buffer = 0;
     794             :         }
     795       77720 :       if (lb->current_length < sizeof (esp_footer_t) + icv_sz)
     796             :         {
     797             :           /* esp footer is either splitted in two buffers or in the before
     798             :            * last buffer */
     799             : 
     800           0 :           vlib_buffer_t *before_last = b, *bp = b;
     801           0 :           while (bp->flags & VLIB_BUFFER_NEXT_PRESENT)
     802             :             {
     803           0 :               before_last = bp;
     804           0 :               bp = vlib_get_buffer (vm, bp->next_buffer);
     805             :             }
     806           0 :           u8 *bt = vlib_buffer_get_tail (before_last);
     807             : 
     808           0 :           if (lb->current_length == icv_sz)
     809             :             {
     810           0 :               esp_footer_t *f = (esp_footer_t *) (bt - sizeof (*f));
     811           0 :               pad_length = f->pad_length;
     812           0 :               next_header = f->next_header;
     813             :             }
     814             :           else
     815             :             {
     816           0 :               pad_length = (bt - 1)[0];
     817           0 :               next_header = ((u8 *) vlib_buffer_get_current (lb))[0];
     818             :             }
     819             :         }
     820             :       else
     821             :         {
     822       77720 :           esp_footer_t *f =
     823       77720 :             (esp_footer_t *) (lb->data + lb->current_data +
     824       77720 :                               lb->current_length - sizeof (esp_footer_t) -
     825             :                               icv_sz);
     826       77720 :           pad_length = f->pad_length;
     827       77720 :           next_header = f->next_header;
     828             :         }
     829             :     }
     830             :   else
     831             :     {
     832      162555 :       icv_sz = pd->icv_sz;
     833      162555 :       esp_footer_t *f =
     834      162555 :         (esp_footer_t *) (lb->data + lb->current_data + lb->current_length -
     835      162555 :                           sizeof (esp_footer_t) - icv_sz);
     836      162555 :       pad_length = f->pad_length;
     837      162555 :       next_header = f->next_header;
     838             :     }
     839             : 
     840      240275 :   u16 adv = pd->iv_sz + esp_sz;
     841      240275 :   u16 tail = sizeof (esp_footer_t) + pad_length + icv_sz;
     842      240275 :   u16 tail_orig = sizeof (esp_footer_t) + pad_length + pd->icv_sz;
     843      240275 :   b->flags &=
     844             :     ~(VLIB_BUFFER_TOTAL_LENGTH_VALID | VNET_BUFFER_F_L4_CHECKSUM_COMPUTED |
     845             :       VNET_BUFFER_F_L4_CHECKSUM_CORRECT);
     846             : 
     847      240275 :   if ((pd->flags & tun_flags) == 0 && !is_tun)   /* transport mode */
     848      157180 :     {
     849      157180 :       u8 udp_sz = (is_ip6 == 0 && pd->flags & IPSEC_SA_FLAG_UDP_ENCAP) ?
     850             :         sizeof (udp_header_t) : 0;
     851      157180 :       u16 ip_hdr_sz = pd->hdr_sz - udp_sz;
     852      157180 :       u8 *old_ip = b->data + pd->current_data - ip_hdr_sz - udp_sz;
     853      157180 :       u8 *ip = old_ip + adv + udp_sz;
     854             : 
     855      157180 :       if (is_ip6 && ip_hdr_sz > 64)
     856           0 :         memmove (ip, old_ip, ip_hdr_sz);
     857             :       else
     858      157180 :         clib_memcpy_le64 (ip, old_ip, ip_hdr_sz);
     859             : 
     860      157180 :       b->current_data = pd->current_data + adv - ip_hdr_sz;
     861      157180 :       b->current_length += ip_hdr_sz - adv;
     862      157180 :       esp_remove_tail (vm, b, lb, tail);
     863             : 
     864      157180 :       if (is_ip6)
     865             :         {
     866       31682 :           ip6_header_t *ip6 = (ip6_header_t *) ip;
     867       31682 :           u16 len = clib_net_to_host_u16 (ip6->payload_length);
     868       31682 :           len -= adv + tail_orig;
     869       31682 :           ip6->payload_length = clib_host_to_net_u16 (len);
     870       31682 :           ip6->protocol = next_header;
     871       31682 :           next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
     872             :         }
     873             :       else
     874             :         {
     875      125498 :           ip4_header_t *ip4 = (ip4_header_t *) ip;
     876      125498 :           ip_csum_t sum = ip4->checksum;
     877      125498 :           u16 len = clib_net_to_host_u16 (ip4->length);
     878      125498 :           len = clib_host_to_net_u16 (len - adv - tail_orig - udp_sz);
     879      125498 :           sum = ip_csum_update (sum, ip4->protocol, next_header,
     880             :                                 ip4_header_t, protocol);
     881      125498 :           sum = ip_csum_update (sum, ip4->length, len, ip4_header_t, length);
     882      125498 :           ip4->checksum = ip_csum_fold (sum);
     883      125498 :           ip4->protocol = next_header;
     884      125498 :           ip4->length = len;
     885      125498 :           next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
     886             :         }
     887             :     }
     888             :   else
     889             :     {
     890       83095 :       if (PREDICT_TRUE (next_header == IP_PROTOCOL_IP_IN_IP))
     891             :         {
     892       42204 :           next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
     893       42204 :           b->current_data = pd->current_data + adv;
     894       42204 :           b->current_length = pd->current_length - adv;
     895       42204 :           esp_remove_tail (vm, b, lb, tail);
     896             :         }
     897       40891 :       else if (next_header == IP_PROTOCOL_IPV6)
     898             :         {
     899       36963 :           next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
     900       36963 :           b->current_data = pd->current_data + adv;
     901       36963 :           b->current_length = pd->current_length - adv;
     902       36963 :           esp_remove_tail (vm, b, lb, tail);
     903             :         }
     904        3928 :       else if (next_header == IP_PROTOCOL_MPLS_IN_IP)
     905             :         {
     906         254 :           next[0] = ESP_DECRYPT_NEXT_MPLS_INPUT;
     907         254 :           b->current_data = pd->current_data + adv;
     908         254 :           b->current_length = pd->current_length - adv;
     909         254 :           esp_remove_tail (vm, b, lb, tail);
     910             :         }
     911        3674 :       else if (is_tun && next_header == IP_PROTOCOL_GRE)
     912        3674 :         {
     913             :           gre_header_t *gre;
     914             : 
     915        3674 :           b->current_data = pd->current_data + adv;
     916        3674 :           b->current_length = pd->current_length - adv - tail;
     917             : 
     918        3674 :           gre = vlib_buffer_get_current (b);
     919             : 
     920        3674 :           vlib_buffer_advance (b, sizeof (*gre));
     921             : 
     922        3674 :           switch (clib_net_to_host_u16 (gre->protocol))
     923             :             {
     924         260 :             case GRE_PROTOCOL_teb:
     925         260 :               vnet_update_l2_len (b);
     926         260 :               next[0] = ESP_DECRYPT_NEXT_L2_INPUT;
     927         260 :               break;
     928        2146 :             case GRE_PROTOCOL_ip4:
     929        2146 :               next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
     930        2146 :               break;
     931        1267 :             case GRE_PROTOCOL_ip6:
     932        1267 :               next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
     933        1267 :               break;
     934           1 :             default:
     935           1 :               esp_decrypt_set_next_index (
     936             :                 b, node, vm->thread_index, ESP_DECRYPT_ERROR_UNSUP_PAYLOAD, 0,
     937             :                 next, ESP_DECRYPT_NEXT_DROP, pd->sa_index);
     938           1 :               break;
     939             :             }
     940             :         }
     941           0 :       else if ((next[0] = vec_elt (next_by_next_header, next_header)) !=
     942             :                (u16) ~0)
     943             :         {
     944           0 :           b->current_data = pd->current_data + adv;
     945           0 :           b->current_length = pd->current_length - adv;
     946           0 :           esp_remove_tail (vm, b, lb, tail);
     947             :         }
     948             :       else
     949             :         {
     950           0 :           esp_decrypt_set_next_index (b, node, vm->thread_index,
     951             :                                       ESP_DECRYPT_ERROR_UNSUP_PAYLOAD, 0, next,
     952             :                                       ESP_DECRYPT_NEXT_DROP, pd->sa_index);
     953           0 :           return;
     954             :         }
     955             : 
     956       83095 :       if (is_tun)
     957             :         {
     958       18765 :           if (ipsec_sa_is_set_IS_PROTECT (sa0))
     959             :             {
     960             :               /*
     961             :                * There are two encap possibilities
     962             :                * 1) the tunnel and ths SA are prodiving encap, i.e. it's
     963             :                *   MAC | SA-IP | TUN-IP | ESP | PAYLOAD
     964             :                * implying the SA is in tunnel mode (on a tunnel interface)
     965             :                * 2) only the tunnel provides encap
     966             :                *   MAC | TUN-IP | ESP | PAYLOAD
     967             :                * implying the SA is in transport mode.
     968             :                *
     969             :                * For 2) we need only strip the tunnel encap and we're good.
     970             :                *  since the tunnel and crypto ecnap (int the tun=protect
     971             :                * object) are the same and we verified above that these match
     972             :                * for 1) we need to strip the SA-IP outer headers, to
     973             :                * reveal the tunnel IP and then check that this matches
     974             :                * the configured tunnel.
     975             :                */
     976             :               const ipsec_tun_protect_t *itp;
     977             : 
     978             :               itp =
     979        1024 :                 ipsec_tun_protect_get (vnet_buffer (b)->ipsec.protect_index);
     980             : 
     981        1024 :               if (PREDICT_TRUE (next_header == IP_PROTOCOL_IP_IN_IP))
     982             :                 {
     983             :                   const ip4_header_t *ip4;
     984             : 
     985         707 :                   ip4 = vlib_buffer_get_current (b);
     986             : 
     987         707 :                   if (!ip46_address_is_equal_v4 (&itp->itp_tun.src,
     988         644 :                                                  &ip4->dst_address) ||
     989         644 :                       !ip46_address_is_equal_v4 (&itp->itp_tun.dst,
     990             :                                                  &ip4->src_address))
     991             :                     {
     992          63 :                       esp_decrypt_set_next_index (
     993             :                         b, node, vm->thread_index,
     994             :                         ESP_DECRYPT_ERROR_TUN_NO_PROTO, 0, next,
     995             :                         ESP_DECRYPT_NEXT_DROP, pd->sa_index);
     996             :                     }
     997             :                 }
     998         317 :               else if (next_header == IP_PROTOCOL_IPV6)
     999             :                 {
    1000             :                   const ip6_header_t *ip6;
    1001             : 
    1002         317 :                   ip6 = vlib_buffer_get_current (b);
    1003             : 
    1004         317 :                   if (!ip46_address_is_equal_v6 (&itp->itp_tun.src,
    1005         254 :                                                  &ip6->dst_address) ||
    1006         254 :                       !ip46_address_is_equal_v6 (&itp->itp_tun.dst,
    1007             :                                                  &ip6->src_address))
    1008             :                     {
    1009          63 :                       esp_decrypt_set_next_index (
    1010             :                         b, node, vm->thread_index,
    1011             :                         ESP_DECRYPT_ERROR_TUN_NO_PROTO, 0, next,
    1012             :                         ESP_DECRYPT_NEXT_DROP, pd->sa_index);
    1013             :                     }
    1014             :                 }
    1015             :             }
    1016             :         }
    1017             :     }
    1018             : 
    1019      240275 :   if (PREDICT_FALSE (n_lost))
    1020       25891 :     vlib_increment_simple_counter (&ipsec_sa_err_counters[IPSEC_SA_ERROR_LOST],
    1021             :                                    vm->thread_index, pd->sa_index, n_lost);
    1022             : }
    1023             : 
    1024             : always_inline uword
    1025        6272 : esp_decrypt_inline (vlib_main_t *vm, vlib_node_runtime_t *node,
    1026             :                     vlib_frame_t *from_frame, int is_ip6, int is_tun,
    1027             :                     u16 async_next_node)
    1028             : {
    1029        6272 :   ipsec_main_t *im = &ipsec_main;
    1030        6272 :   const u16 *next_by_next_header = im->next_header_registrations;
    1031        6272 :   u32 thread_index = vm->thread_index;
    1032             :   u16 len;
    1033        6272 :   ipsec_per_thread_data_t *ptd = vec_elt_at_index (im->ptd, thread_index);
    1034        6272 :   u32 *from = vlib_frame_vector_args (from_frame);
    1035        6272 :   u32 n_left = from_frame->n_vectors;
    1036        6272 :   vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
    1037             :   vlib_buffer_t *sync_bufs[VLIB_FRAME_SIZE];
    1038        6272 :   u16 sync_nexts[VLIB_FRAME_SIZE], *sync_next = sync_nexts, n_sync = 0;
    1039        6272 :   u16 async_nexts[VLIB_FRAME_SIZE], *async_next = async_nexts;
    1040        6272 :   u16 noop_nexts[VLIB_FRAME_SIZE], n_noop = 0;
    1041             :   u32 sync_bi[VLIB_FRAME_SIZE];
    1042             :   u32 noop_bi[VLIB_FRAME_SIZE];
    1043        6272 :   esp_decrypt_packet_data_t pkt_data[VLIB_FRAME_SIZE], *pd = pkt_data;
    1044        6272 :   esp_decrypt_packet_data2_t pkt_data2[VLIB_FRAME_SIZE], *pd2 = pkt_data2;
    1045        6272 :   esp_decrypt_packet_data_t cpd = { };
    1046        6272 :   u32 current_sa_index = ~0, current_sa_bytes = 0, current_sa_pkts = 0;
    1047        6272 :   const u8 esp_sz = sizeof (esp_header_t);
    1048        6272 :   ipsec_sa_t *sa0 = 0;
    1049        6272 :   vnet_crypto_op_t _op, *op = &_op;
    1050             :   vnet_crypto_op_t **crypto_ops;
    1051             :   vnet_crypto_op_t **integ_ops;
    1052        6272 :   int is_async = im->async_mode;
    1053        6272 :   vnet_crypto_async_op_id_t async_op = ~0;
    1054             :   vnet_crypto_async_frame_t *async_frames[VNET_CRYPTO_ASYNC_OP_N_IDS];
    1055             :   esp_decrypt_error_t err;
    1056             : 
    1057        6272 :   vlib_get_buffers (vm, from, b, n_left);
    1058        6272 :   if (!is_async)
    1059             :     {
    1060        5367 :       vec_reset_length (ptd->crypto_ops);
    1061        5367 :       vec_reset_length (ptd->integ_ops);
    1062        5367 :       vec_reset_length (ptd->chained_crypto_ops);
    1063        5367 :       vec_reset_length (ptd->chained_integ_ops);
    1064             :     }
    1065        6272 :   vec_reset_length (ptd->async_frames);
    1066        6272 :   vec_reset_length (ptd->chunks);
    1067        6272 :   clib_memset (sync_nexts, -1, sizeof (sync_nexts));
    1068        6272 :   clib_memset (async_frames, 0, sizeof (async_frames));
    1069             : 
    1070      255114 :   while (n_left > 0)
    1071             :     {
    1072             :       u8 *payload;
    1073             : 
    1074      248842 :       err = ESP_DECRYPT_ERROR_RX_PKTS;
    1075      248842 :       if (n_left > 2)
    1076             :         {
    1077             :           u8 *p;
    1078      237856 :           vlib_prefetch_buffer_header (b[2], LOAD);
    1079      237856 :           p = vlib_buffer_get_current (b[1]);
    1080      237856 :           clib_prefetch_load (p);
    1081      237856 :           p -= CLIB_CACHE_LINE_BYTES;
    1082      237856 :           clib_prefetch_load (p);
    1083             :         }
    1084             : 
    1085      248842 :       u32 n_bufs = vlib_buffer_chain_linearize (vm, b[0]);
    1086      248842 :       if (n_bufs == 0)
    1087             :         {
    1088           0 :           err = ESP_DECRYPT_ERROR_NO_BUFFERS;
    1089           0 :           esp_decrypt_set_next_index (b[0], node, thread_index, err, n_noop,
    1090             :                                       noop_nexts, ESP_DECRYPT_NEXT_DROP,
    1091           0 :                                       vnet_buffer (b[0])->ipsec.sad_index);
    1092           0 :           goto next;
    1093             :         }
    1094             : 
    1095      248842 :       if (vnet_buffer (b[0])->ipsec.sad_index != current_sa_index)
    1096             :         {
    1097        6281 :           if (current_sa_pkts)
    1098           9 :             vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
    1099             :                                              current_sa_index, current_sa_pkts,
    1100             :                                              current_sa_bytes);
    1101        6281 :           current_sa_bytes = current_sa_pkts = 0;
    1102             : 
    1103        6281 :           current_sa_index = vnet_buffer (b[0])->ipsec.sad_index;
    1104        6281 :           vlib_prefetch_combined_counter (&ipsec_sa_counters, thread_index,
    1105             :                                           current_sa_index);
    1106        6281 :           sa0 = ipsec_sa_get (current_sa_index);
    1107             : 
    1108             :           /* fetch the second cacheline ASAP */
    1109        6281 :           clib_prefetch_load (sa0->cacheline1);
    1110        6281 :           cpd.icv_sz = sa0->integ_icv_size;
    1111        6281 :           cpd.iv_sz = sa0->crypto_iv_size;
    1112        6281 :           cpd.flags = sa0->flags;
    1113        6281 :           cpd.sa_index = current_sa_index;
    1114        6281 :           is_async = im->async_mode | ipsec_sa_is_set_IS_ASYNC (sa0);
    1115             :         }
    1116             : 
    1117      248842 :       if (PREDICT_FALSE ((u16) ~0 == sa0->thread_index))
    1118             :         {
    1119             :           /* this is the first packet to use this SA, claim the SA
    1120             :            * for this thread. this could happen simultaneously on
    1121             :            * another thread */
    1122           6 :           clib_atomic_cmp_and_swap (&sa0->thread_index, ~0,
    1123             :                                     ipsec_sa_assign_thread (thread_index));
    1124             :         }
    1125             : 
    1126      248842 :       if (PREDICT_FALSE (thread_index != sa0->thread_index))
    1127             :         {
    1128         240 :           vnet_buffer (b[0])->ipsec.thread_index = sa0->thread_index;
    1129         240 :           err = ESP_DECRYPT_ERROR_HANDOFF;
    1130         240 :           esp_decrypt_set_next_index (b[0], node, thread_index, err, n_noop,
    1131             :                                       noop_nexts, ESP_DECRYPT_NEXT_HANDOFF,
    1132             :                                       current_sa_index);
    1133         240 :           goto next;
    1134             :         }
    1135             : 
    1136             :       /* store packet data for next round for easier prefetch */
    1137      248602 :       pd->sa_data = cpd.sa_data;
    1138      248602 :       pd->current_data = b[0]->current_data;
    1139      248602 :       pd->hdr_sz = pd->current_data - vnet_buffer (b[0])->l3_hdr_offset;
    1140      248602 :       payload = b[0]->data + pd->current_data;
    1141      248602 :       pd->seq = clib_host_to_net_u32 (((esp_header_t *) payload)->seq);
    1142      248602 :       pd->is_chain = 0;
    1143      248602 :       pd2->lb = b[0];
    1144      248602 :       pd2->free_buffer_index = 0;
    1145      248602 :       pd2->icv_removed = 0;
    1146             : 
    1147      248602 :       if (n_bufs > 1)
    1148             :         {
    1149       77720 :           pd->is_chain = 1;
    1150             :           /* find last buffer in the chain */
    1151      190548 :           while (pd2->lb->flags & VLIB_BUFFER_NEXT_PRESENT)
    1152      112828 :             pd2->lb = vlib_get_buffer (vm, pd2->lb->next_buffer);
    1153             : 
    1154       77720 :           crypto_ops = &ptd->chained_crypto_ops;
    1155       77720 :           integ_ops = &ptd->chained_integ_ops;
    1156             :         }
    1157             :       else
    1158             :         {
    1159      170882 :           crypto_ops = &ptd->crypto_ops;
    1160      170882 :           integ_ops = &ptd->integ_ops;
    1161             :         }
    1162             : 
    1163      248602 :       pd->current_length = b[0]->current_length;
    1164             : 
    1165             :       /* anti-reply check */
    1166      248602 :       if (ipsec_sa_anti_replay_and_sn_advance (sa0, pd->seq, ~0, false,
    1167             :                                                &pd->seq_hi))
    1168             :         {
    1169        3919 :           err = ESP_DECRYPT_ERROR_REPLAY;
    1170        3919 :           esp_decrypt_set_next_index (b[0], node, thread_index, err, n_noop,
    1171             :                                       noop_nexts, ESP_DECRYPT_NEXT_DROP,
    1172             :                                       current_sa_index);
    1173        3919 :           goto next;
    1174             :         }
    1175             : 
    1176      244683 :       if (pd->current_length < cpd.icv_sz + esp_sz + cpd.iv_sz)
    1177             :         {
    1178        1326 :           err = ESP_DECRYPT_ERROR_RUNT;
    1179        1326 :           esp_decrypt_set_next_index (b[0], node, thread_index, err, n_noop,
    1180             :                                       noop_nexts, ESP_DECRYPT_NEXT_DROP,
    1181             :                                       current_sa_index);
    1182        1326 :           goto next;
    1183             :         }
    1184             : 
    1185      243357 :       len = pd->current_length - cpd.icv_sz;
    1186      243357 :       current_sa_pkts += 1;
    1187      243357 :       current_sa_bytes += vlib_buffer_length_in_chain (vm, b[0]);
    1188             : 
    1189      243357 :       if (is_async)
    1190             :         {
    1191       39305 :           async_op = sa0->crypto_async_dec_op_id;
    1192             : 
    1193             :           /* get a frame for this op if we don't yet have one or it's full
    1194             :            */
    1195       77745 :           if (NULL == async_frames[async_op] ||
    1196       38440 :               vnet_crypto_async_frame_is_full (async_frames[async_op]))
    1197             :             {
    1198        1385 :               async_frames[async_op] =
    1199        1385 :                 vnet_crypto_async_get_frame (vm, async_op);
    1200        1385 :               if (PREDICT_FALSE (!async_frames[async_op]))
    1201             :                 {
    1202           0 :                   err = ESP_DECRYPT_ERROR_NO_AVAIL_FRAME;
    1203           0 :                   esp_decrypt_set_next_index (
    1204             :                     b[0], node, thread_index, err, n_noop, noop_nexts,
    1205             :                     ESP_DECRYPT_NEXT_DROP, current_sa_index);
    1206           0 :                   goto next;
    1207             :                 }
    1208             : 
    1209             :               /* Save the frame to the list we'll submit at the end */
    1210        1385 :               vec_add1 (ptd->async_frames, async_frames[async_op]);
    1211             :             }
    1212             : 
    1213       39305 :           err = esp_decrypt_prepare_async_frame (
    1214             :             vm, node, ptd, async_frames[async_op], sa0, payload, len,
    1215       39305 :             cpd.icv_sz, cpd.iv_sz, pd, pd2, from[b - bufs], b[0], async_next,
    1216             :             async_next_node);
    1217       39305 :           if (ESP_DECRYPT_ERROR_RX_PKTS != err)
    1218             :             {
    1219           0 :               esp_decrypt_set_next_index (
    1220             :                 b[0], node, thread_index, err, n_noop, noop_nexts,
    1221             :                 ESP_DECRYPT_NEXT_DROP, current_sa_index);
    1222             :             }
    1223             :         }
    1224             :       else
    1225      204052 :         esp_decrypt_prepare_sync_op (
    1226             :           vm, node, ptd, &crypto_ops, &integ_ops, op, sa0, payload, len,
    1227      204052 :           cpd.icv_sz, cpd.iv_sz, pd, pd2, b[0], sync_next, n_sync);
    1228             :       /* next */
    1229      248842 :     next:
    1230      248842 :       if (ESP_DECRYPT_ERROR_RX_PKTS != err)
    1231             :         {
    1232        5485 :           noop_bi[n_noop] = from[b - bufs];
    1233        5485 :           n_noop++;
    1234             :         }
    1235      243357 :       else if (!is_async)
    1236             :         {
    1237      204052 :           sync_bi[n_sync] = from[b - bufs];
    1238      204052 :           sync_bufs[n_sync] = b[0];
    1239      204052 :           n_sync++;
    1240      204052 :           sync_next++;
    1241      204052 :           pd += 1;
    1242      204052 :           pd2 += 1;
    1243             :         }
    1244             :       else
    1245       39305 :         async_next++;
    1246             : 
    1247      248842 :       n_left -= 1;
    1248      248842 :       b += 1;
    1249             :     }
    1250             : 
    1251        6272 :   if (PREDICT_TRUE (~0 != current_sa_index))
    1252        6272 :     vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
    1253             :                                      current_sa_index, current_sa_pkts,
    1254             :                                      current_sa_bytes);
    1255             : 
    1256             :   /* submit or free all of the open frames */
    1257             :   vnet_crypto_async_frame_t **async_frame;
    1258             : 
    1259        7657 :   vec_foreach (async_frame, ptd->async_frames)
    1260             :     {
    1261             :       /* free frame and move on if no ops were successfully added */
    1262        1385 :       if (PREDICT_FALSE (!(*async_frame)->n_elts))
    1263             :         {
    1264           0 :           vnet_crypto_async_free_frame (vm, *async_frame);
    1265           0 :           continue;
    1266             :         }
    1267        1385 :       if (vnet_crypto_async_submit_open_frame (vm, *async_frame) < 0)
    1268             :         {
    1269           0 :           n_noop += esp_async_recycle_failed_submit (
    1270             :             vm, *async_frame, node, ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR,
    1271             :             IPSEC_SA_ERROR_CRYPTO_ENGINE_ERROR, n_noop, noop_bi, noop_nexts,
    1272             :             ESP_DECRYPT_NEXT_DROP, false);
    1273           0 :           vnet_crypto_async_reset_frame (*async_frame);
    1274           0 :           vnet_crypto_async_free_frame (vm, *async_frame);
    1275             :         }
    1276             :     }
    1277             : 
    1278        6272 :   if (n_sync)
    1279             :     {
    1280        5061 :       esp_process_ops (vm, node, ptd->integ_ops, sync_bufs, sync_nexts,
    1281             :                        ESP_DECRYPT_ERROR_INTEG_ERROR);
    1282        5061 :       esp_process_chained_ops (vm, node, ptd->chained_integ_ops, sync_bufs,
    1283             :                                sync_nexts, ptd->chunks,
    1284             :                                ESP_DECRYPT_ERROR_INTEG_ERROR);
    1285             : 
    1286        5061 :       esp_process_ops (vm, node, ptd->crypto_ops, sync_bufs, sync_nexts,
    1287             :                        ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
    1288        5061 :       esp_process_chained_ops (vm, node, ptd->chained_crypto_ops, sync_bufs,
    1289             :                                sync_nexts, ptd->chunks,
    1290             :                                ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
    1291             :     }
    1292             : 
    1293             :   /* Post decryption ronud - adjust packet data start and length and next
    1294             :      node */
    1295             : 
    1296        6272 :   n_left = n_sync;
    1297        6272 :   sync_next = sync_nexts;
    1298        6272 :   pd = pkt_data;
    1299        6272 :   pd2 = pkt_data2;
    1300        6272 :   b = sync_bufs;
    1301             : 
    1302      210324 :   while (n_left)
    1303             :     {
    1304      204052 :       if (n_left >= 2)
    1305             :         {
    1306      198991 :           void *data = b[1]->data + pd[1].current_data;
    1307             : 
    1308             :           /* buffer metadata */
    1309      198991 :           vlib_prefetch_buffer_header (b[1], LOAD);
    1310             : 
    1311             :           /* esp_footer_t */
    1312      198991 :           CLIB_PREFETCH (data + pd[1].current_length - pd[1].icv_sz - 2,
    1313             :                          CLIB_CACHE_LINE_BYTES, LOAD);
    1314             : 
    1315             :           /* packet headers */
    1316      198991 :           CLIB_PREFETCH (data - CLIB_CACHE_LINE_BYTES,
    1317             :                          CLIB_CACHE_LINE_BYTES * 2, LOAD);
    1318             :         }
    1319             : 
    1320             :       /* save the sa_index as GRE_teb post_crypto changes L2 opaque */
    1321      204052 :       if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
    1322      203851 :         current_sa_index = vnet_buffer (b[0])->ipsec.sad_index;
    1323             : 
    1324      204052 :       if (sync_next[0] >= ESP_DECRYPT_N_NEXT)
    1325      201930 :         esp_decrypt_post_crypto (vm, node, next_by_next_header, pd, pd2, b[0],
    1326             :                                  sync_next, is_ip6, is_tun, 0);
    1327             : 
    1328             :       /* trace: */
    1329      204052 :       if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
    1330             :         {
    1331             :           esp_decrypt_trace_t *tr;
    1332      203851 :           tr = vlib_add_trace (vm, node, b[0], sizeof (*tr));
    1333      203851 :           sa0 = ipsec_sa_get (current_sa_index);
    1334      203851 :           tr->crypto_alg = sa0->crypto_alg;
    1335      203851 :           tr->integ_alg = sa0->integ_alg;
    1336      203851 :           tr->seq = pd->seq;
    1337      203851 :           tr->sa_seq = sa0->seq;
    1338      203851 :           tr->sa_seq_hi = sa0->seq_hi;
    1339      203851 :           tr->pkt_seq_hi = pd->seq_hi;
    1340             :         }
    1341             : 
    1342             :       /* next */
    1343      204052 :       n_left -= 1;
    1344      204052 :       sync_next += 1;
    1345      204052 :       pd += 1;
    1346      204052 :       pd2 += 1;
    1347      204052 :       b += 1;
    1348             :     }
    1349             : 
    1350        6272 :   vlib_node_increment_counter (vm, node->node_index, ESP_DECRYPT_ERROR_RX_PKTS,
    1351        6272 :                                from_frame->n_vectors);
    1352             : 
    1353        6272 :   if (n_sync)
    1354        5061 :     vlib_buffer_enqueue_to_next (vm, node, sync_bi, sync_nexts, n_sync);
    1355             : 
    1356        6272 :   if (n_noop)
    1357         428 :     vlib_buffer_enqueue_to_next (vm, node, noop_bi, noop_nexts, n_noop);
    1358             : 
    1359        6272 :   return (from_frame->n_vectors);
    1360             : }
    1361             : 
    1362             : always_inline uword
    1363        1340 : esp_decrypt_post_inline (vlib_main_t * vm,
    1364             :                          vlib_node_runtime_t * node,
    1365             :                          vlib_frame_t * from_frame, int is_ip6, int is_tun)
    1366             : {
    1367        1340 :   const ipsec_main_t *im = &ipsec_main;
    1368        1340 :   const u16 *next_by_next_header = im->next_header_registrations;
    1369        1340 :   u32 *from = vlib_frame_vector_args (from_frame);
    1370        1340 :   u32 n_left = from_frame->n_vectors;
    1371        1340 :   vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
    1372        1340 :   u16 nexts[VLIB_FRAME_SIZE], *next = nexts;
    1373        1340 :   vlib_get_buffers (vm, from, b, n_left);
    1374             : 
    1375       40355 :   while (n_left > 0)
    1376             :     {
    1377       39015 :       esp_decrypt_packet_data_t *pd = &(esp_post_data (b[0]))->decrypt_data;
    1378             : 
    1379       39015 :       if (n_left > 2)
    1380             :         {
    1381       36490 :           vlib_prefetch_buffer_header (b[2], LOAD);
    1382       36490 :           vlib_prefetch_buffer_header (b[1], LOAD);
    1383             :         }
    1384             : 
    1385       39015 :       if (!pd->is_chain)
    1386       18379 :         esp_decrypt_post_crypto (vm, node, next_by_next_header, pd, 0, b[0],
    1387             :                                  next, is_ip6, is_tun, 1);
    1388             :       else
    1389             :         {
    1390       20636 :           esp_decrypt_packet_data2_t *pd2 = esp_post_data2 (b[0]);
    1391       20636 :           esp_decrypt_post_crypto (vm, node, next_by_next_header, pd, pd2,
    1392             :                                    b[0], next, is_ip6, is_tun, 1);
    1393             :         }
    1394             : 
    1395             :       /*trace: */
    1396       39015 :       if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
    1397             :         {
    1398       39015 :           ipsec_sa_t *sa0 = ipsec_sa_get (pd->sa_index);
    1399             :           esp_decrypt_trace_t *tr;
    1400       39015 :           esp_decrypt_packet_data_t *async_pd =
    1401       39015 :             &(esp_post_data (b[0]))->decrypt_data;
    1402       39015 :           tr = vlib_add_trace (vm, node, b[0], sizeof (*tr));
    1403       39015 :           sa0 = ipsec_sa_get (async_pd->sa_index);
    1404             : 
    1405       39015 :           tr->crypto_alg = sa0->crypto_alg;
    1406       39015 :           tr->integ_alg = sa0->integ_alg;
    1407       39015 :           tr->seq = pd->seq;
    1408       39015 :           tr->sa_seq = sa0->seq;
    1409       39015 :           tr->sa_seq_hi = sa0->seq_hi;
    1410             :         }
    1411             : 
    1412       39015 :       n_left--;
    1413       39015 :       next++;
    1414       39015 :       b++;
    1415             :     }
    1416             : 
    1417        1340 :   n_left = from_frame->n_vectors;
    1418        1340 :   vlib_node_increment_counter (vm, node->node_index,
    1419             :                                ESP_DECRYPT_ERROR_RX_POST_PKTS, n_left);
    1420             : 
    1421        1340 :   vlib_buffer_enqueue_to_next (vm, node, from, nexts, n_left);
    1422             : 
    1423        1340 :   return n_left;
    1424             : }
    1425             : 
    1426        7338 : VLIB_NODE_FN (esp4_decrypt_node) (vlib_main_t * vm,
    1427             :                                   vlib_node_runtime_t * node,
    1428             :                                   vlib_frame_t * from_frame)
    1429             : {
    1430       10076 :   return esp_decrypt_inline (vm, node, from_frame, 0, 0,
    1431        5038 :                              esp_decrypt_async_next.esp4_post_next);
    1432             : }
    1433             : 
    1434        3240 : VLIB_NODE_FN (esp4_decrypt_post_node) (vlib_main_t * vm,
    1435             :                                        vlib_node_runtime_t * node,
    1436             :                                        vlib_frame_t * from_frame)
    1437             : {
    1438         940 :   return esp_decrypt_post_inline (vm, node, from_frame, 0, 0);
    1439             : }
    1440             : 
    1441        2492 : VLIB_NODE_FN (esp4_decrypt_tun_node) (vlib_main_t * vm,
    1442             :                                       vlib_node_runtime_t * node,
    1443             :                                       vlib_frame_t * from_frame)
    1444             : {
    1445         384 :   return esp_decrypt_inline (vm, node, from_frame, 0, 1,
    1446         192 :                              esp_decrypt_async_next.esp4_tun_post_next);
    1447             : }
    1448             : 
    1449        2300 : VLIB_NODE_FN (esp4_decrypt_tun_post_node) (vlib_main_t * vm,
    1450             :                                            vlib_node_runtime_t * node,
    1451             :                                            vlib_frame_t * from_frame)
    1452             : {
    1453           0 :   return esp_decrypt_post_inline (vm, node, from_frame, 0, 1);
    1454             : }
    1455             : 
    1456        3260 : VLIB_NODE_FN (esp6_decrypt_node) (vlib_main_t * vm,
    1457             :                                   vlib_node_runtime_t * node,
    1458             :                                   vlib_frame_t * from_frame)
    1459             : {
    1460        1920 :   return esp_decrypt_inline (vm, node, from_frame, 1, 0,
    1461         960 :                              esp_decrypt_async_next.esp6_post_next);
    1462             : }
    1463             : 
    1464        2700 : VLIB_NODE_FN (esp6_decrypt_post_node) (vlib_main_t * vm,
    1465             :                                        vlib_node_runtime_t * node,
    1466             :                                        vlib_frame_t * from_frame)
    1467             : {
    1468         400 :   return esp_decrypt_post_inline (vm, node, from_frame, 1, 0);
    1469             : }
    1470             : 
    1471        2382 : VLIB_NODE_FN (esp6_decrypt_tun_node) (vlib_main_t * vm,
    1472             :                                       vlib_node_runtime_t * node,
    1473             :                                       vlib_frame_t * from_frame)
    1474             : {
    1475         164 :   return esp_decrypt_inline (vm, node, from_frame, 1, 1,
    1476          82 :                              esp_decrypt_async_next.esp6_tun_post_next);
    1477             : }
    1478             : 
    1479        2300 : VLIB_NODE_FN (esp6_decrypt_tun_post_node) (vlib_main_t * vm,
    1480             :                                            vlib_node_runtime_t * node,
    1481             :                                            vlib_frame_t * from_frame)
    1482             : {
    1483           0 :   return esp_decrypt_post_inline (vm, node, from_frame, 1, 1);
    1484             : }
    1485             : 
    1486             : /* *INDENT-OFF* */
    1487      183788 : VLIB_REGISTER_NODE (esp4_decrypt_node) = {
    1488             :   .name = "esp4-decrypt",
    1489             :   .vector_size = sizeof (u32),
    1490             :   .format_trace = format_esp_decrypt_trace,
    1491             :   .type = VLIB_NODE_TYPE_INTERNAL,
    1492             : 
    1493             :   .n_errors = ESP_DECRYPT_N_ERROR,
    1494             :   .error_counters = esp_decrypt_error_counters,
    1495             : 
    1496             :   .n_next_nodes = ESP_DECRYPT_N_NEXT,
    1497             :   .next_nodes = {
    1498             :     [ESP_DECRYPT_NEXT_DROP] = "ip4-drop",
    1499             :     [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
    1500             :     [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
    1501             :     [ESP_DECRYPT_NEXT_MPLS_INPUT] = "mpls-drop",
    1502             :     [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
    1503             :     [ESP_DECRYPT_NEXT_HANDOFF] = "esp4-decrypt-handoff",
    1504             :   },
    1505             : };
    1506             : 
    1507      183788 : VLIB_REGISTER_NODE (esp4_decrypt_post_node) = {
    1508             :   .name = "esp4-decrypt-post",
    1509             :   .vector_size = sizeof (u32),
    1510             :   .format_trace = format_esp_decrypt_trace,
    1511             :   .type = VLIB_NODE_TYPE_INTERNAL,
    1512             : 
    1513             :   .n_errors = ESP_DECRYPT_N_ERROR,
    1514             :   .error_counters = esp_decrypt_error_counters,
    1515             : 
    1516             :   .sibling_of = "esp4-decrypt",
    1517             : };
    1518             : 
    1519      183788 : VLIB_REGISTER_NODE (esp6_decrypt_node) = {
    1520             :   .name = "esp6-decrypt",
    1521             :   .vector_size = sizeof (u32),
    1522             :   .format_trace = format_esp_decrypt_trace,
    1523             :   .type = VLIB_NODE_TYPE_INTERNAL,
    1524             : 
    1525             :   .n_errors = ESP_DECRYPT_N_ERROR,
    1526             :   .error_counters = esp_decrypt_error_counters,
    1527             : 
    1528             :   .n_next_nodes = ESP_DECRYPT_N_NEXT,
    1529             :   .next_nodes = {
    1530             :     [ESP_DECRYPT_NEXT_DROP] = "ip6-drop",
    1531             :     [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
    1532             :     [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
    1533             :     [ESP_DECRYPT_NEXT_MPLS_INPUT] = "mpls-drop",
    1534             :     [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
    1535             :     [ESP_DECRYPT_NEXT_HANDOFF]=  "esp6-decrypt-handoff",
    1536             :   },
    1537             : };
    1538             : 
    1539      183788 : VLIB_REGISTER_NODE (esp6_decrypt_post_node) = {
    1540             :   .name = "esp6-decrypt-post",
    1541             :   .vector_size = sizeof (u32),
    1542             :   .format_trace = format_esp_decrypt_trace,
    1543             :   .type = VLIB_NODE_TYPE_INTERNAL,
    1544             : 
    1545             :   .n_errors = ESP_DECRYPT_N_ERROR,
    1546             :   .error_counters = esp_decrypt_error_counters,
    1547             : 
    1548             :   .sibling_of = "esp6-decrypt",
    1549             : };
    1550             : 
    1551      183788 : VLIB_REGISTER_NODE (esp4_decrypt_tun_node) = {
    1552             :   .name = "esp4-decrypt-tun",
    1553             :   .vector_size = sizeof (u32),
    1554             :   .format_trace = format_esp_decrypt_trace,
    1555             :   .type = VLIB_NODE_TYPE_INTERNAL,
    1556             :   .n_errors = ESP_DECRYPT_N_ERROR,
    1557             :   .error_counters = esp_decrypt_error_counters,
    1558             :   .n_next_nodes = ESP_DECRYPT_N_NEXT,
    1559             :   .next_nodes = {
    1560             :     [ESP_DECRYPT_NEXT_DROP] = "ip4-drop",
    1561             :     [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
    1562             :     [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
    1563             :     [ESP_DECRYPT_NEXT_MPLS_INPUT] = "mpls-input",
    1564             :     [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
    1565             :     [ESP_DECRYPT_NEXT_HANDOFF] = "esp4-decrypt-tun-handoff",
    1566             :   },
    1567             : };
    1568             : 
    1569      183788 : VLIB_REGISTER_NODE (esp4_decrypt_tun_post_node) = {
    1570             :   .name = "esp4-decrypt-tun-post",
    1571             :   .vector_size = sizeof (u32),
    1572             :   .format_trace = format_esp_decrypt_trace,
    1573             :   .type = VLIB_NODE_TYPE_INTERNAL,
    1574             : 
    1575             :   .n_errors = ESP_DECRYPT_N_ERROR,
    1576             :   .error_counters = esp_decrypt_error_counters,
    1577             : 
    1578             :   .sibling_of = "esp4-decrypt-tun",
    1579             : };
    1580             : 
    1581      183788 : VLIB_REGISTER_NODE (esp6_decrypt_tun_node) = {
    1582             :   .name = "esp6-decrypt-tun",
    1583             :   .vector_size = sizeof (u32),
    1584             :   .format_trace = format_esp_decrypt_trace,
    1585             :   .type = VLIB_NODE_TYPE_INTERNAL,
    1586             :   .n_errors = ESP_DECRYPT_N_ERROR,
    1587             :   .error_counters = esp_decrypt_error_counters,
    1588             :   .n_next_nodes = ESP_DECRYPT_N_NEXT,
    1589             :   .next_nodes = {
    1590             :     [ESP_DECRYPT_NEXT_DROP] = "ip6-drop",
    1591             :     [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
    1592             :     [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
    1593             :     [ESP_DECRYPT_NEXT_MPLS_INPUT] = "mpls-input",
    1594             :     [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
    1595             :     [ESP_DECRYPT_NEXT_HANDOFF]=  "esp6-decrypt-tun-handoff",
    1596             :   },
    1597             : };
    1598             : 
    1599      183788 : VLIB_REGISTER_NODE (esp6_decrypt_tun_post_node) = {
    1600             :   .name = "esp6-decrypt-tun-post",
    1601             :   .vector_size = sizeof (u32),
    1602             :   .format_trace = format_esp_decrypt_trace,
    1603             :   .type = VLIB_NODE_TYPE_INTERNAL,
    1604             : 
    1605             :   .n_errors = ESP_DECRYPT_N_ERROR,
    1606             :   .error_counters = esp_decrypt_error_counters,
    1607             : 
    1608             :   .sibling_of = "esp6-decrypt-tun",
    1609             : };
    1610             : /* *INDENT-ON* */
    1611             : 
    1612             : #ifndef CLIB_MARCH_VARIANT
    1613             : 
    1614             : static clib_error_t *
    1615         575 : esp_decrypt_init (vlib_main_t *vm)
    1616             : {
    1617         575 :   ipsec_main_t *im = &ipsec_main;
    1618             : 
    1619         575 :   im->esp4_dec_fq_index =
    1620         575 :     vlib_frame_queue_main_init (esp4_decrypt_node.index, 0);
    1621         575 :   im->esp6_dec_fq_index =
    1622         575 :     vlib_frame_queue_main_init (esp6_decrypt_node.index, 0);
    1623         575 :   im->esp4_dec_tun_fq_index =
    1624         575 :     vlib_frame_queue_main_init (esp4_decrypt_tun_node.index, 0);
    1625         575 :   im->esp6_dec_tun_fq_index =
    1626         575 :     vlib_frame_queue_main_init (esp6_decrypt_tun_node.index, 0);
    1627             : 
    1628         575 :   return 0;
    1629             : }
    1630             : 
    1631       55295 : VLIB_INIT_FUNCTION (esp_decrypt_init);
    1632             : 
    1633             : #endif
    1634             : 
    1635             : /*
    1636             :  * fd.io coding-style-patch-verification: ON
    1637             :  *
    1638             :  * Local Variables:
    1639             :  * eval: (c-set-style "gnu")
    1640             :  * End:
    1641             :  */

Generated by: LCOV version 1.14