Line data Source code
1 : /*
2 : * Copyright (c) 2015 Cisco and/or its affiliates.
3 : * Licensed under the Apache License, Version 2.0 (the "License");
4 : * you may not use this file except in compliance with the License.
5 : * You may obtain a copy of the License at:
6 : *
7 : * http://www.apache.org/licenses/LICENSE-2.0
8 : *
9 : * Unless required by applicable law or agreed to in writing, software
10 : * distributed under the License is distributed on an "AS IS" BASIS,
11 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 : * See the License for the specific language governing permissions and
13 : * limitations under the License.
14 : */
15 : /*
16 : * buffer_funcs.h: VLIB buffer related functions/inlines
17 : *
18 : * Copyright (c) 2008 Eliot Dresselhaus
19 : *
20 : * Permission is hereby granted, free of charge, to any person obtaining
21 : * a copy of this software and associated documentation files (the
22 : * "Software"), to deal in the Software without restriction, including
23 : * without limitation the rights to use, copy, modify, merge, publish,
24 : * distribute, sublicense, and/or sell copies of the Software, and to
25 : * permit persons to whom the Software is furnished to do so, subject to
26 : * the following conditions:
27 : *
28 : * The above copyright notice and this permission notice shall be
29 : * included in all copies or substantial portions of the Software.
30 : *
31 : * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 : * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 : * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 : * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 : * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 : * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 : * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 : */
39 :
40 : #ifndef included_vlib_buffer_funcs_h
41 : #define included_vlib_buffer_funcs_h
42 :
43 : #include <vppinfra/hash.h>
44 : #include <vppinfra/fifo.h>
45 : #include <vppinfra/vector/index_to_ptr.h>
46 : #include <vlib/buffer.h>
47 : #include <vlib/physmem_funcs.h>
48 : #include <vlib/main.h>
49 : #include <vlib/node.h>
50 :
51 : /** \file
52 : vlib buffer access methods.
53 : */
54 :
55 : typedef void (vlib_buffer_enqueue_to_next_fn_t) (vlib_main_t *vm,
56 : vlib_node_runtime_t *node,
57 : u32 *buffers, u16 *nexts,
58 : uword count);
59 : typedef void (vlib_buffer_enqueue_to_next_with_aux_fn_t) (
60 : vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u32 *aux_data,
61 : u16 *nexts, uword count);
62 : typedef void (vlib_buffer_enqueue_to_single_next_fn_t) (
63 : vlib_main_t *vm, vlib_node_runtime_t *node, u32 *ers, u16 next_index,
64 : u32 count);
65 :
66 : typedef void (vlib_buffer_enqueue_to_single_next_with_aux_fn_t) (
67 : vlib_main_t *vm, vlib_node_runtime_t *node, u32 *ers, u32 *aux_data,
68 : u16 next_index, u32 count);
69 :
70 : typedef u32 (vlib_buffer_enqueue_to_thread_fn_t) (
71 : vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
72 : u32 *buffer_indices, u16 *thread_indices, u32 n_packets,
73 : int drop_on_congestion);
74 :
75 : typedef u32 (vlib_buffer_enqueue_to_thread_with_aux_fn_t) (
76 : vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
77 : u32 *buffer_indices, u32 *aux, u16 *thread_indices, u32 n_packets,
78 : int drop_on_congestion);
79 :
80 : typedef struct
81 : {
82 : vlib_buffer_enqueue_to_next_fn_t *buffer_enqueue_to_next_fn;
83 : vlib_buffer_enqueue_to_next_with_aux_fn_t
84 : *buffer_enqueue_to_next_with_aux_fn;
85 : vlib_buffer_enqueue_to_single_next_fn_t *buffer_enqueue_to_single_next_fn;
86 : vlib_buffer_enqueue_to_single_next_with_aux_fn_t
87 : *buffer_enqueue_to_single_next_with_aux_fn;
88 : vlib_buffer_enqueue_to_thread_fn_t *buffer_enqueue_to_thread_fn;
89 : vlib_buffer_enqueue_to_thread_with_aux_fn_t
90 : *buffer_enqueue_to_thread_with_aux_fn;
91 : } vlib_buffer_func_main_t;
92 :
93 : extern vlib_buffer_func_main_t vlib_buffer_func_main;
94 :
95 : always_inline void
96 1744394740 : vlib_buffer_validate (vlib_main_t * vm, vlib_buffer_t * b)
97 : {
98 1744394740 : vlib_buffer_main_t *bm = vm->buffer_main;
99 : vlib_buffer_pool_t *bp;
100 :
101 : /* reference count in allocated buffer always must be 1 or higher */
102 1744394740 : ASSERT (b->ref_count > 0);
103 :
104 : /* verify that buffer pool index is valid */
105 1744395232 : bp = vec_elt_at_index (bm->buffer_pools, b->buffer_pool_index);
106 1744394920 : ASSERT (pointer_to_uword (b) >= bp->start);
107 1744394969 : ASSERT (pointer_to_uword (b) < bp->start + bp->size -
108 : (bp->data_size + sizeof (vlib_buffer_t)));
109 1744394969 : }
110 :
111 : always_inline void *
112 1525165219 : vlib_buffer_ptr_from_index (uword buffer_mem_start, u32 buffer_index,
113 : uword offset)
114 : {
115 1525165219 : offset += ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
116 1525165219 : return uword_to_pointer (buffer_mem_start + offset, vlib_buffer_t *);
117 : }
118 :
119 : /** \brief Translate buffer index into buffer pointer
120 :
121 : @param vm - (vlib_main_t *) vlib main data structure pointer
122 : @param buffer_index - (u32) buffer index
123 : @return - (vlib_buffer_t *) buffer pointer
124 : */
125 : always_inline vlib_buffer_t *
126 1525165017 : vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
127 : {
128 1525165017 : vlib_buffer_main_t *bm = vm->buffer_main;
129 : vlib_buffer_t *b;
130 :
131 1525165017 : b = vlib_buffer_ptr_from_index (bm->buffer_mem_start, buffer_index, 0);
132 1525165305 : vlib_buffer_validate (vm, b);
133 1525165700 : return b;
134 : }
135 :
136 : static_always_inline u32
137 162155070 : vlib_buffer_get_default_data_size (vlib_main_t * vm)
138 : {
139 162155070 : return vm->buffer_main->default_data_size;
140 : }
141 :
142 : static_always_inline void
143 42373485 : vlib_buffer_copy_indices (u32 * dst, u32 * src, u32 n_indices)
144 : {
145 42373485 : clib_memcpy_u32 (dst, src, n_indices);
146 42373490 : }
147 :
148 : always_inline void
149 : vlib_buffer_copy_indices_from_ring (u32 * dst, u32 * ring, u32 start,
150 : u32 ring_size, u32 n_buffers)
151 : {
152 : ASSERT (n_buffers <= ring_size);
153 :
154 : if (PREDICT_TRUE (start + n_buffers <= ring_size))
155 : {
156 : vlib_buffer_copy_indices (dst, ring + start, n_buffers);
157 : }
158 : else
159 : {
160 : u32 n = ring_size - start;
161 : vlib_buffer_copy_indices (dst, ring + start, n);
162 : vlib_buffer_copy_indices (dst + n, ring, n_buffers - n);
163 : }
164 : }
165 :
166 : always_inline void
167 : vlib_buffer_copy_indices_to_ring (u32 * ring, u32 * src, u32 start,
168 : u32 ring_size, u32 n_buffers)
169 : {
170 : ASSERT (n_buffers <= ring_size);
171 :
172 : if (PREDICT_TRUE (start + n_buffers <= ring_size))
173 : {
174 : vlib_buffer_copy_indices (ring + start, src, n_buffers);
175 : }
176 : else
177 : {
178 : u32 n = ring_size - start;
179 : vlib_buffer_copy_indices (ring + start, src, n);
180 : vlib_buffer_copy_indices (ring, src + n, n_buffers - n);
181 : }
182 : }
183 :
184 : static_always_inline void
185 23 : vlib_buffer_copy_template (vlib_buffer_t * b, vlib_buffer_t * bt)
186 : {
187 : #if defined CLIB_HAVE_VEC512
188 0 : b->as_u8x64[0] = bt->as_u8x64[0];
189 : #elif defined (CLIB_HAVE_VEC256)
190 22 : b->as_u8x32[0] = bt->as_u8x32[0];
191 22 : b->as_u8x32[1] = bt->as_u8x32[1];
192 : #elif defined (CLIB_HAVE_VEC128)
193 1 : b->as_u8x16[0] = bt->as_u8x16[0];
194 1 : b->as_u8x16[1] = bt->as_u8x16[1];
195 1 : b->as_u8x16[2] = bt->as_u8x16[2];
196 1 : b->as_u8x16[3] = bt->as_u8x16[3];
197 : #else
198 : clib_memcpy_fast (b, bt, 64);
199 : #endif
200 23 : }
201 :
202 : always_inline u8
203 25562281 : vlib_buffer_pool_get_default_for_numa (vlib_main_t * vm, u32 numa_node)
204 : {
205 25562281 : ASSERT (numa_node < VLIB_BUFFER_MAX_NUMA_NODES);
206 25562281 : return vm->buffer_main->default_buffer_pool_index_for_numa[numa_node];
207 : }
208 :
209 : /** \brief Translate array of buffer indices into buffer pointers with offset
210 :
211 : @param vm - (vlib_main_t *) vlib main data structure pointer
212 : @param bi - (u32 *) array of buffer indices
213 : @param b - (void **) array to store buffer pointers
214 : @param count - (uword) number of elements
215 : @param offset - (i32) offset applied to each pointer
216 : */
217 : static_always_inline void
218 73526230 : vlib_get_buffers_with_offset (vlib_main_t *vm, u32 *bi, void **b, u32 count,
219 : i32 offset)
220 : {
221 73526230 : uword buffer_mem_start = vm->buffer_main->buffer_mem_start;
222 73526230 : void *base = (void *) (buffer_mem_start + offset);
223 73526230 : int objsize = __builtin_object_size (b, 0);
224 73526230 : const int sh = CLIB_LOG2_CACHE_LINE_BYTES;
225 :
226 73526230 : if (COMPILE_TIME_CONST (count) == 0 && objsize >= 64 * sizeof (b[0]) &&
227 73526231 : (objsize & ((8 * sizeof (b[0])) - 1)) == 0)
228 0 : {
229 0 : u32 n = round_pow2 (count, 8);
230 0 : ASSERT (objsize >= count);
231 0 : CLIB_ASSUME (objsize >= count);
232 0 : while (n >= 64)
233 : {
234 0 : clib_index_to_ptr_u32 (bi, base, sh, b, 64);
235 0 : b += 64;
236 0 : bi += 64;
237 0 : n -= 64;
238 : }
239 :
240 0 : while (n)
241 : {
242 0 : clib_index_to_ptr_u32 (bi, base, sh, b, 8);
243 0 : b += 8;
244 0 : bi += 8;
245 0 : n -= 8;
246 : }
247 : }
248 : else
249 73526230 : clib_index_to_ptr_u32 (bi, base, sh, b, count);
250 73526229 : }
251 :
252 : /** \brief Translate array of buffer indices into buffer pointers
253 :
254 : @param vm - (vlib_main_t *) vlib main data structure pointer
255 : @param bi - (u32 *) array of buffer indices
256 : @param b - (vlib_buffer_t **) array to store buffer pointers
257 : @param count - (uword) number of elements
258 : */
259 :
260 : static_always_inline void
261 73526231 : vlib_get_buffers (vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, u32 count)
262 : {
263 73526231 : vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
264 73526229 : }
265 :
266 : /** \brief Translate buffer pointer into buffer index
267 :
268 : @param vm - (vlib_main_t *) vlib main data structure pointer
269 : @param p - (void *) buffer pointer
270 : @return - (u32) buffer index
271 : */
272 :
273 : always_inline u32
274 3447639 : vlib_get_buffer_index (vlib_main_t * vm, void *p)
275 : {
276 3447639 : vlib_buffer_main_t *bm = vm->buffer_main;
277 3447639 : uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
278 3447639 : ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
279 3447639 : ASSERT (offset < bm->buffer_mem_size);
280 3447639 : ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
281 3447639 : return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
282 : }
283 :
284 : /** \brief Translate array of buffer pointers into buffer indices with offset
285 :
286 : @param vm - (vlib_main_t *) vlib main data structure pointer
287 : @param b - (void **) array of buffer pointers
288 : @param bi - (u32 *) array to store buffer indices
289 : @param count - (uword) number of elements
290 : @param offset - (i32) offset applied to each pointer
291 : */
292 : static_always_inline void
293 : vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
294 : uword count, i32 offset)
295 : {
296 : #ifdef CLIB_HAVE_VEC256
297 : u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
298 : u64x4 off4 = u64x4_splat (vm->buffer_main->buffer_mem_start - offset);
299 :
300 : while (count >= 8)
301 : {
302 : /* load 4 pointers into 256-bit register */
303 : u64x4 v0 = u64x4_load_unaligned (b);
304 : u64x4 v1 = u64x4_load_unaligned (b + 4);
305 : u32x8 v2, v3;
306 :
307 : v0 -= off4;
308 : v1 -= off4;
309 :
310 : v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
311 : v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
312 :
313 : /* permute 256-bit register so lower u32s of each buffer index are
314 : * placed into lower 128-bits */
315 : v2 = u32x8_permute ((u32x8) v0, mask);
316 : v3 = u32x8_permute ((u32x8) v1, mask);
317 :
318 : /* extract lower 128-bits and save them to the array of buffer indices */
319 : u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
320 : u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
321 : bi += 8;
322 : b += 8;
323 : count -= 8;
324 : }
325 : #endif
326 : while (count >= 4)
327 : {
328 : /* equivalent non-nector implementation */
329 : bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
330 : bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
331 : bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
332 : bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
333 : bi += 4;
334 : b += 4;
335 : count -= 4;
336 : }
337 : while (count)
338 : {
339 : bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
340 : bi += 1;
341 : b += 1;
342 : count -= 1;
343 : }
344 : }
345 :
346 : /** \brief Translate array of buffer pointers into buffer indices
347 :
348 : @param vm - (vlib_main_t *) vlib main data structure pointer
349 : @param b - (vlib_buffer_t **) array of buffer pointers
350 : @param bi - (u32 *) array to store buffer indices
351 : @param count - (uword) number of elements
352 : */
353 : static_always_inline void
354 : vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
355 : uword count)
356 : {
357 : vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
358 : }
359 :
360 : /** \brief Get next buffer in buffer linklist, or zero for end of list.
361 :
362 : @param vm - (vlib_main_t *) vlib main data structure pointer
363 : @param b - (void *) buffer pointer
364 : @return - (vlib_buffer_t *) next buffer, or NULL
365 : */
366 : always_inline vlib_buffer_t *
367 721894 : vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
368 : {
369 721894 : return (b->flags & VLIB_BUFFER_NEXT_PRESENT
370 721894 : ? vlib_get_buffer (vm, b->next_buffer) : 0);
371 : }
372 :
373 : uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
374 : vlib_buffer_t * b_first);
375 :
376 : /** \brief Get length in bytes of the buffer chain
377 :
378 : @param vm - (vlib_main_t *) vlib main data structure pointer
379 : @param b - (void *) buffer pointer
380 : @return - (uword) length of buffer chain
381 : */
382 : always_inline uword
383 177766533 : vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
384 : {
385 177766533 : uword len = b->current_length;
386 :
387 177766533 : if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
388 81775262 : return len;
389 :
390 95991475 : if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
391 95726371 : return len + b->total_length_not_including_first_buffer;
392 :
393 265023 : return vlib_buffer_length_in_chain_slow_path (vm, b);
394 : }
395 :
396 : /** \brief Get length in bytes of the buffer index buffer chain
397 :
398 : @param vm - (vlib_main_t *) vlib main data structure pointer
399 : @param bi - (u32) buffer index
400 : @return - (uword) length of buffer chain
401 : */
402 : always_inline uword
403 559865 : vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
404 : {
405 559865 : vlib_buffer_t *b = vlib_get_buffer (vm, bi);
406 559860 : return vlib_buffer_length_in_chain (vm, b);
407 : }
408 :
409 : /** \brief Copy buffer contents to memory
410 :
411 : @param vm - (vlib_main_t *) vlib main data structure pointer
412 : @param buffer_index - (u32) buffer index
413 : @param contents - (u8 *) memory, <strong>must be large enough</strong>
414 : @return - (uword) length of buffer chain
415 : */
416 : always_inline uword
417 26 : vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
418 : {
419 26 : uword content_len = 0;
420 : uword l;
421 : vlib_buffer_t *b;
422 :
423 : while (1)
424 : {
425 27 : b = vlib_get_buffer (vm, buffer_index);
426 27 : l = b->current_length;
427 27 : clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
428 27 : content_len += l;
429 27 : if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
430 26 : break;
431 1 : buffer_index = b->next_buffer;
432 : }
433 :
434 26 : return content_len;
435 : }
436 :
437 : always_inline uword
438 2 : vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
439 : {
440 2 : return vlib_physmem_get_pa (vm, b->data);
441 : }
442 :
443 : always_inline uword
444 1 : vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
445 : {
446 1 : return vlib_buffer_get_pa (vm, b) + b->current_data;
447 : }
448 :
449 : /** \brief Prefetch buffer metadata by buffer index
450 : The first 64 bytes of buffer contains most header information
451 :
452 : @param vm - (vlib_main_t *) vlib main data structure pointer
453 : @param bi - (u32) buffer index
454 : @param type - LOAD, STORE. In most cases, STORE is the right answer
455 : */
456 : /* Prefetch buffer header given index. */
457 : #define vlib_prefetch_buffer_with_index(vm,bi,type) \
458 : do { \
459 : vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
460 : vlib_prefetch_buffer_header (_b, type); \
461 : } while (0)
462 :
463 : typedef enum
464 : {
465 : /* Index is unknown. */
466 : VLIB_BUFFER_UNKNOWN,
467 :
468 : /* Index is known and free/allocated. */
469 : VLIB_BUFFER_KNOWN_FREE,
470 : VLIB_BUFFER_KNOWN_ALLOCATED,
471 : } vlib_buffer_known_state_t;
472 :
473 : void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
474 : uword n_buffers,
475 : vlib_buffer_known_state_t
476 : expected_state);
477 :
478 : always_inline vlib_buffer_known_state_t
479 438454000 : vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
480 : {
481 438454000 : vlib_buffer_main_t *bm = vm->buffer_main;
482 :
483 438454000 : clib_spinlock_lock (&bm->buffer_known_hash_lockp);
484 438454000 : uword *p = hash_get (bm->buffer_known_hash, buffer_index);
485 438454000 : clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
486 438454000 : return p ? p[0] : VLIB_BUFFER_UNKNOWN;
487 : }
488 :
489 : /* Validates sanity of a single buffer.
490 : Returns format'ed vector with error message if any. */
491 : u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
492 : uword follow_chain);
493 :
494 : u8 *vlib_validate_buffers (vlib_main_t * vm,
495 : u32 * buffers,
496 : uword next_buffer_stride,
497 : uword n_buffers,
498 : vlib_buffer_known_state_t known_state,
499 : uword follow_buffer_next);
500 :
501 : static_always_inline vlib_buffer_pool_t *
502 6315688 : vlib_get_buffer_pool (vlib_main_t * vm, u8 buffer_pool_index)
503 : {
504 6315688 : vlib_buffer_main_t *bm = vm->buffer_main;
505 6315688 : return vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
506 : }
507 :
508 : static_always_inline __clib_warn_unused_result uword
509 89644 : vlib_buffer_pool_get (vlib_main_t * vm, u8 buffer_pool_index, u32 * buffers,
510 : u32 n_buffers)
511 : {
512 89644 : vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
513 : u32 len;
514 :
515 89644 : ASSERT (bp->buffers);
516 :
517 89644 : clib_spinlock_lock (&bp->lock);
518 89648 : len = bp->n_avail;
519 89648 : if (PREDICT_TRUE (n_buffers < len))
520 : {
521 89648 : len -= n_buffers;
522 89648 : vlib_buffer_copy_indices (buffers, bp->buffers + len, n_buffers);
523 89648 : bp->n_avail = len;
524 89648 : clib_spinlock_unlock (&bp->lock);
525 89648 : return n_buffers;
526 : }
527 : else
528 : {
529 0 : vlib_buffer_copy_indices (buffers, bp->buffers, len);
530 0 : bp->n_avail = 0;
531 0 : clib_spinlock_unlock (&bp->lock);
532 0 : return len;
533 : }
534 : }
535 :
536 :
537 : /** \brief Allocate buffers from specific pool into supplied array
538 :
539 : @param vm - (vlib_main_t *) vlib main data structure pointer
540 : @param buffers - (u32 * ) buffer index array
541 : @param n_buffers - (u32) number of buffers requested
542 : @return - (u32) number of buffers actually allocated, may be
543 : less than the number requested or zero
544 : */
545 :
546 : always_inline __clib_warn_unused_result u32
547 28424361 : vlib_buffer_alloc_from_pool (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
548 : u8 buffer_pool_index)
549 : {
550 28424361 : vlib_buffer_main_t *bm = vm->buffer_main;
551 : vlib_buffer_pool_t *bp;
552 : vlib_buffer_pool_thread_t *bpt;
553 : u32 *src, *dst, len, n_left;
554 :
555 : /* If buffer allocation fault injection is configured */
556 : if (VLIB_BUFFER_ALLOC_FAULT_INJECTOR > 0)
557 : {
558 : u32 vlib_buffer_alloc_may_fail (vlib_main_t *, u32);
559 :
560 : /* See how many buffers we're willing to allocate */
561 : n_buffers = vlib_buffer_alloc_may_fail (vm, n_buffers);
562 : if (n_buffers == 0)
563 : return (n_buffers);
564 : }
565 :
566 28424361 : bp = vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
567 28424362 : bpt = vec_elt_at_index (bp->threads, vm->thread_index);
568 :
569 28424365 : dst = buffers;
570 28424365 : n_left = n_buffers;
571 28424365 : len = bpt->n_cached;
572 :
573 : /* per-thread cache contains enough buffers */
574 28424365 : if (len >= n_buffers)
575 : {
576 28334765 : src = bpt->cached_buffers + len - n_buffers;
577 28334765 : vlib_buffer_copy_indices (dst, src, n_buffers);
578 28334765 : bpt->n_cached -= n_buffers;
579 28334765 : goto done;
580 : }
581 :
582 : /* alloc bigger than cache - take buffers directly from main pool */
583 89644 : if (n_buffers >= VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ)
584 : {
585 24 : n_buffers = vlib_buffer_pool_get (vm, buffer_pool_index, buffers,
586 : n_buffers);
587 24 : goto done;
588 : }
589 :
590 : /* take everything available in the cache */
591 89620 : if (len)
592 : {
593 59515 : vlib_buffer_copy_indices (dst, bpt->cached_buffers, len);
594 59515 : bpt->n_cached = 0;
595 59515 : dst += len;
596 59515 : n_left -= len;
597 : }
598 :
599 89620 : len = round_pow2 (n_left, 32);
600 89620 : len = vlib_buffer_pool_get (vm, buffer_pool_index, bpt->cached_buffers,
601 : len);
602 89624 : bpt->n_cached = len;
603 :
604 89624 : if (len)
605 : {
606 89624 : u32 n_copy = clib_min (len, n_left);
607 89624 : src = bpt->cached_buffers + len - n_copy;
608 89624 : vlib_buffer_copy_indices (dst, src, n_copy);
609 89624 : bpt->n_cached -= n_copy;
610 89624 : n_left -= n_copy;
611 : }
612 :
613 89624 : n_buffers -= n_left;
614 :
615 28424369 : done:
616 : /* Verify that buffers are known free. */
617 : if (CLIB_DEBUG > 0)
618 28424369 : vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
619 : VLIB_BUFFER_KNOWN_FREE);
620 28424371 : if (PREDICT_FALSE (bm->alloc_callback_fn != 0))
621 0 : bm->alloc_callback_fn (vm, buffer_pool_index, buffers, n_buffers);
622 28424371 : return n_buffers;
623 : }
624 :
625 : /** \brief Allocate buffers from specific numa node into supplied array
626 :
627 : @param vm - (vlib_main_t *) vlib main data structure pointer
628 : @param buffers - (u32 * ) buffer index array
629 : @param n_buffers - (u32) number of buffers requested
630 : @param numa_node - (u32) numa node
631 : @return - (u32) number of buffers actually allocated, may be
632 : less than the number requested or zero
633 : */
634 : always_inline __clib_warn_unused_result u32
635 25561895 : vlib_buffer_alloc_on_numa (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
636 : u32 numa_node)
637 : {
638 25561895 : u8 index = vlib_buffer_pool_get_default_for_numa (vm, numa_node);
639 25561894 : return vlib_buffer_alloc_from_pool (vm, buffers, n_buffers, index);
640 : }
641 :
642 : /** \brief Allocate buffers into supplied array
643 :
644 : @param vm - (vlib_main_t *) vlib main data structure pointer
645 : @param buffers - (u32 * ) buffer index array
646 : @param n_buffers - (u32) number of buffers requested
647 : @return - (u32) number of buffers actually allocated, may be
648 : less than the number requested or zero
649 : */
650 :
651 : always_inline __clib_warn_unused_result u32
652 25561895 : vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
653 : {
654 25561895 : return vlib_buffer_alloc_on_numa (vm, buffers, n_buffers, vm->numa_node);
655 : }
656 :
657 : /** \brief Allocate buffers into ring
658 :
659 : @param vm - (vlib_main_t *) vlib main data structure pointer
660 : @param buffers - (u32 * ) buffer index ring
661 : @param start - (u32) first slot in the ring
662 : @param ring_size - (u32) ring size
663 : @param n_buffers - (u32) number of buffers requested
664 : @return - (u32) number of buffers actually allocated, may be
665 : less than the number requested or zero
666 : */
667 : always_inline __clib_warn_unused_result u32
668 : vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
669 : u32 ring_size, u32 n_buffers)
670 : {
671 : u32 n_alloc;
672 :
673 : ASSERT (n_buffers <= ring_size);
674 :
675 : if (PREDICT_TRUE (start + n_buffers <= ring_size))
676 : return vlib_buffer_alloc (vm, ring + start, n_buffers);
677 :
678 : n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
679 :
680 : if (PREDICT_TRUE (n_alloc == ring_size - start))
681 : n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
682 :
683 : return n_alloc;
684 : }
685 :
686 : /** \brief Allocate buffers into ring from specific buffer pool
687 :
688 : @param vm - (vlib_main_t *) vlib main data structure pointer
689 : @param buffers - (u32 * ) buffer index ring
690 : @param start - (u32) first slot in the ring
691 : @param ring_size - (u32) ring size
692 : @param n_buffers - (u32) number of buffers requested
693 : @return - (u32) number of buffers actually allocated, may be
694 : less than the number requested or zero
695 : */
696 : always_inline __clib_warn_unused_result u32
697 2315255 : vlib_buffer_alloc_to_ring_from_pool (vlib_main_t * vm, u32 * ring, u32 start,
698 : u32 ring_size, u32 n_buffers,
699 : u8 buffer_pool_index)
700 : {
701 : u32 n_alloc;
702 :
703 2315255 : ASSERT (n_buffers <= ring_size);
704 :
705 2315255 : if (PREDICT_TRUE (start + n_buffers <= ring_size))
706 1768285 : return vlib_buffer_alloc_from_pool (vm, ring + start, n_buffers,
707 : buffer_pool_index);
708 :
709 546971 : n_alloc = vlib_buffer_alloc_from_pool (vm, ring + start, ring_size - start,
710 : buffer_pool_index);
711 :
712 546971 : if (PREDICT_TRUE (n_alloc == ring_size - start))
713 546971 : n_alloc += vlib_buffer_alloc_from_pool (vm, ring, n_buffers - n_alloc,
714 : buffer_pool_index);
715 :
716 546971 : return n_alloc;
717 : }
718 :
719 : static_always_inline void
720 3591606 : vlib_buffer_pool_put (vlib_main_t * vm, u8 buffer_pool_index,
721 : u32 * buffers, u32 n_buffers)
722 : {
723 3591606 : vlib_buffer_main_t *bm = vm->buffer_main;
724 3591606 : vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
725 3591606 : vlib_buffer_pool_thread_t *bpt = vec_elt_at_index (bp->threads,
726 : vm->thread_index);
727 : u32 n_cached, n_empty;
728 :
729 : if (CLIB_DEBUG > 0)
730 3591606 : vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
731 : VLIB_BUFFER_KNOWN_ALLOCATED);
732 3591606 : if (PREDICT_FALSE (bm->free_callback_fn != 0))
733 0 : bm->free_callback_fn (vm, buffer_pool_index, buffers, n_buffers);
734 :
735 3591606 : n_cached = bpt->n_cached;
736 3591606 : n_empty = VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ - n_cached;
737 3591606 : if (n_buffers <= n_empty)
738 : {
739 3441752 : vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
740 : buffers, n_buffers);
741 3441752 : bpt->n_cached = n_cached + n_buffers;
742 3441752 : return;
743 : }
744 :
745 149852 : vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
746 149852 : buffers + n_buffers - n_empty, n_empty);
747 149852 : bpt->n_cached = VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ;
748 :
749 149852 : clib_spinlock_lock (&bp->lock);
750 149852 : vlib_buffer_copy_indices (bp->buffers + bp->n_avail, buffers,
751 : n_buffers - n_empty);
752 149852 : bp->n_avail += n_buffers - n_empty;
753 149852 : clib_spinlock_unlock (&bp->lock);
754 : }
755 :
756 : static_always_inline void
757 2636795 : vlib_buffer_free_inline (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
758 : int maybe_next)
759 2636795 : {
760 2636795 : const int queue_size = 128;
761 2636795 : vlib_buffer_pool_t *bp = 0;
762 2636795 : u8 buffer_pool_index = ~0;
763 2636795 : u32 n_queue = 0, queue[queue_size + 4];
764 2636795 : vlib_buffer_template_t bt = {};
765 : #if defined(CLIB_HAVE_VEC128)
766 2636795 : vlib_buffer_t bpi_mask = {.buffer_pool_index = ~0 };
767 2636795 : vlib_buffer_t bpi_vec = {};
768 2636795 : vlib_buffer_t flags_refs_mask = {
769 : .flags = VLIB_BUFFER_NEXT_PRESENT,
770 : .ref_count = ~1
771 : };
772 : #endif
773 :
774 2636795 : if (PREDICT_FALSE (n_buffers == 0))
775 2355 : return;
776 :
777 2634440 : vlib_buffer_t *b = vlib_get_buffer (vm, buffers[0]);
778 2634440 : buffer_pool_index = b->buffer_pool_index;
779 2634440 : bp = vlib_get_buffer_pool (vm, buffer_pool_index);
780 2634454 : bt = bp->buffer_template;
781 : #if defined(CLIB_HAVE_VEC128)
782 2634454 : bpi_vec.buffer_pool_index = buffer_pool_index;
783 : #endif
784 :
785 41959242 : while (n_buffers)
786 : {
787 : vlib_buffer_t *b[8];
788 39324825 : u32 bi, sum = 0, flags, next;
789 :
790 : #if defined(CLIB_HAVE_VEC512)
791 0 : if (n_buffers < 8)
792 : #else
793 39324825 : if (n_buffers < 4)
794 : #endif
795 4965125 : goto one_by_one;
796 :
797 : #if defined(CLIB_HAVE_VEC512)
798 0 : vlib_get_buffers (vm, buffers, b, 8);
799 : #else
800 34359740 : vlib_get_buffers (vm, buffers, b, 4);
801 :
802 34359740 : if (n_buffers >= 12)
803 : {
804 27355059 : vlib_get_buffers (vm, buffers + 8, b + 4, 4);
805 27355059 : vlib_prefetch_buffer_header (b[4], LOAD);
806 27355059 : vlib_prefetch_buffer_header (b[5], LOAD);
807 27355059 : vlib_prefetch_buffer_header (b[6], LOAD);
808 27355059 : vlib_prefetch_buffer_header (b[7], LOAD);
809 : }
810 : #endif
811 :
812 : #if defined(CLIB_HAVE_VEC512)
813 : u8x16 p0, p1, p2, p3, p4, p5, p6, p7, r;
814 0 : p0 = u8x16_load_unaligned (b[0]);
815 0 : p1 = u8x16_load_unaligned (b[1]);
816 0 : p2 = u8x16_load_unaligned (b[2]);
817 0 : p3 = u8x16_load_unaligned (b[3]);
818 0 : p4 = u8x16_load_unaligned (b[4]);
819 0 : p5 = u8x16_load_unaligned (b[5]);
820 0 : p6 = u8x16_load_unaligned (b[6]);
821 0 : p7 = u8x16_load_unaligned (b[7]);
822 :
823 0 : r = p0 ^ bpi_vec.as_u8x16[0];
824 0 : r |= p1 ^ bpi_vec.as_u8x16[0];
825 0 : r |= p2 ^ bpi_vec.as_u8x16[0];
826 0 : r |= p3 ^ bpi_vec.as_u8x16[0];
827 0 : r |= p4 ^ bpi_vec.as_u8x16[0];
828 0 : r |= p5 ^ bpi_vec.as_u8x16[0];
829 0 : r |= p6 ^ bpi_vec.as_u8x16[0];
830 0 : r |= p7 ^ bpi_vec.as_u8x16[0];
831 0 : r &= bpi_mask.as_u8x16[0];
832 0 : r |=
833 0 : (p0 | p1 | p2 | p3 | p4 | p5 | p6 | p7) & flags_refs_mask.as_u8x16[0];
834 :
835 0 : sum = !u8x16_is_all_zero (r);
836 : #elif defined(CLIB_HAVE_VEC128)
837 : u8x16 p0, p1, p2, p3, r;
838 34359740 : p0 = u8x16_load_unaligned (b[0]);
839 34359740 : p1 = u8x16_load_unaligned (b[1]);
840 34359740 : p2 = u8x16_load_unaligned (b[2]);
841 34359740 : p3 = u8x16_load_unaligned (b[3]);
842 :
843 34359740 : r = p0 ^ bpi_vec.as_u8x16[0];
844 34359740 : r |= p1 ^ bpi_vec.as_u8x16[0];
845 34359740 : r |= p2 ^ bpi_vec.as_u8x16[0];
846 34359740 : r |= p3 ^ bpi_vec.as_u8x16[0];
847 34359740 : r &= bpi_mask.as_u8x16[0];
848 34359740 : r |= (p0 | p1 | p2 | p3) & flags_refs_mask.as_u8x16[0];
849 :
850 34359740 : sum = !u8x16_is_all_zero (r);
851 : #else
852 : sum |= b[0]->flags;
853 : sum |= b[1]->flags;
854 : sum |= b[2]->flags;
855 : sum |= b[3]->flags;
856 : sum &= VLIB_BUFFER_NEXT_PRESENT;
857 : sum += b[0]->ref_count - 1;
858 : sum += b[1]->ref_count - 1;
859 : sum += b[2]->ref_count - 1;
860 : sum += b[3]->ref_count - 1;
861 : sum |= b[0]->buffer_pool_index ^ buffer_pool_index;
862 : sum |= b[1]->buffer_pool_index ^ buffer_pool_index;
863 : sum |= b[2]->buffer_pool_index ^ buffer_pool_index;
864 : sum |= b[3]->buffer_pool_index ^ buffer_pool_index;
865 : #endif
866 :
867 34359740 : if (sum)
868 26500738 : goto one_by_one;
869 :
870 : #if defined(CLIB_HAVE_VEC512)
871 0 : vlib_buffer_copy_indices (queue + n_queue, buffers, 8);
872 0 : b[0]->template = bt;
873 0 : b[1]->template = bt;
874 0 : b[2]->template = bt;
875 0 : b[3]->template = bt;
876 0 : b[4]->template = bt;
877 0 : b[5]->template = bt;
878 0 : b[6]->template = bt;
879 0 : b[7]->template = bt;
880 0 : n_queue += 8;
881 :
882 0 : vlib_buffer_validate (vm, b[0]);
883 0 : vlib_buffer_validate (vm, b[1]);
884 0 : vlib_buffer_validate (vm, b[2]);
885 0 : vlib_buffer_validate (vm, b[3]);
886 0 : vlib_buffer_validate (vm, b[4]);
887 0 : vlib_buffer_validate (vm, b[5]);
888 0 : vlib_buffer_validate (vm, b[6]);
889 0 : vlib_buffer_validate (vm, b[7]);
890 :
891 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
892 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[1]);
893 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[2]);
894 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[3]);
895 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[4]);
896 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[5]);
897 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[6]);
898 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[7]);
899 : #else
900 7858982 : vlib_buffer_copy_indices (queue + n_queue, buffers, 4);
901 7858982 : b[0]->template = bt;
902 7858982 : b[1]->template = bt;
903 7858982 : b[2]->template = bt;
904 7858982 : b[3]->template = bt;
905 7858982 : n_queue += 4;
906 :
907 7858982 : vlib_buffer_validate (vm, b[0]);
908 7858982 : vlib_buffer_validate (vm, b[1]);
909 7858982 : vlib_buffer_validate (vm, b[2]);
910 7858982 : vlib_buffer_validate (vm, b[3]);
911 :
912 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
913 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[1]);
914 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[2]);
915 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[3]);
916 : #endif
917 :
918 7858982 : if (n_queue >= queue_size)
919 : {
920 63345 : vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
921 63345 : n_queue = 0;
922 : }
923 : #if defined(CLIB_HAVE_VEC512)
924 0 : buffers += 8;
925 0 : n_buffers -= 8;
926 : #else
927 7858982 : buffers += 4;
928 7858982 : n_buffers -= 4;
929 : #endif
930 7858982 : continue;
931 :
932 31465813 : one_by_one:
933 31465813 : bi = buffers[0];
934 :
935 187789851 : next_in_chain:
936 187789851 : b[0] = vlib_get_buffer (vm, bi);
937 187789842 : flags = b[0]->flags;
938 187789842 : next = b[0]->next_buffer;
939 :
940 187789842 : if (PREDICT_FALSE (buffer_pool_index != b[0]->buffer_pool_index))
941 : {
942 :
943 0 : if (n_queue)
944 : {
945 0 : vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
946 0 : n_queue = 0;
947 : }
948 :
949 0 : buffer_pool_index = b[0]->buffer_pool_index;
950 : #if defined(CLIB_HAVE_VEC128)
951 0 : bpi_vec.buffer_pool_index = buffer_pool_index;
952 : #endif
953 0 : bp = vlib_get_buffer_pool (vm, buffer_pool_index);
954 0 : bt = bp->buffer_template;
955 : }
956 :
957 187789842 : vlib_buffer_validate (vm, b[0]);
958 :
959 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
960 :
961 187789838 : if (clib_atomic_sub_fetch (&b[0]->ref_count, 1) == 0)
962 : {
963 187789147 : b[0]->template = bt;
964 187789147 : queue[n_queue++] = bi;
965 : }
966 :
967 187789838 : if (n_queue == queue_size)
968 : {
969 937513 : vlib_buffer_pool_put (vm, buffer_pool_index, queue, queue_size);
970 937513 : n_queue = 0;
971 : }
972 :
973 187789838 : if (maybe_next && (flags & VLIB_BUFFER_NEXT_PRESENT))
974 : {
975 156324132 : bi = next;
976 156324132 : goto next_in_chain;
977 : }
978 :
979 31465800 : buffers++;
980 31465800 : n_buffers--;
981 : }
982 :
983 2634441 : if (n_queue)
984 2590751 : vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
985 : }
986 :
987 :
988 : /** \brief Free buffers
989 : Frees the entire buffer chain for each buffer
990 :
991 : @param vm - (vlib_main_t *) vlib main data structure pointer
992 : @param buffers - (u32 * ) buffer index array
993 : @param n_buffers - (u32) number of buffers to free
994 :
995 : */
996 : always_inline void
997 2432516 : vlib_buffer_free (vlib_main_t * vm,
998 : /* pointer to first buffer */
999 : u32 * buffers,
1000 : /* number of buffers to free */
1001 : u32 n_buffers)
1002 : {
1003 2432516 : vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 1);
1004 2432516 : }
1005 :
1006 : /** \brief Free buffers, does not free the buffer chain for each buffer
1007 :
1008 : @param vm - (vlib_main_t *) vlib main data structure pointer
1009 : @param buffers - (u32 * ) buffer index array
1010 : @param n_buffers - (u32) number of buffers to free
1011 :
1012 : */
1013 : always_inline void
1014 4 : vlib_buffer_free_no_next (vlib_main_t * vm,
1015 : /* pointer to first buffer */
1016 : u32 * buffers,
1017 : /* number of buffers to free */
1018 : u32 n_buffers)
1019 : {
1020 4 : vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 0);
1021 4 : }
1022 :
1023 : /** \brief Free one buffer
1024 : Shorthand to free a single buffer chain.
1025 :
1026 : @param vm - (vlib_main_t *) vlib main data structure pointer
1027 : @param buffer_index - (u32) buffer index to free
1028 : */
1029 : always_inline void
1030 204275 : vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
1031 : {
1032 204275 : vlib_buffer_free_inline (vm, &buffer_index, 1, /* maybe next */ 1);
1033 204276 : }
1034 :
1035 : /** \brief Free buffers from ring
1036 :
1037 : @param vm - (vlib_main_t *) vlib main data structure pointer
1038 : @param buffers - (u32 * ) buffer index ring
1039 : @param start - (u32) first slot in the ring
1040 : @param ring_size - (u32) ring size
1041 : @param n_buffers - (u32) number of buffers
1042 : */
1043 : always_inline void
1044 1304804 : vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
1045 : u32 ring_size, u32 n_buffers)
1046 : {
1047 1304804 : ASSERT (n_buffers <= ring_size);
1048 :
1049 1304804 : if (PREDICT_TRUE (start + n_buffers <= ring_size))
1050 : {
1051 1128074 : vlib_buffer_free (vm, ring + start, n_buffers);
1052 : }
1053 : else
1054 : {
1055 176735 : vlib_buffer_free (vm, ring + start, ring_size - start);
1056 176735 : vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
1057 : }
1058 1304804 : }
1059 :
1060 : /** \brief Free buffers from ring without freeing tail buffers
1061 :
1062 : @param vm - (vlib_main_t *) vlib main data structure pointer
1063 : @param buffers - (u32 * ) buffer index ring
1064 : @param start - (u32) first slot in the ring
1065 : @param ring_size - (u32) ring size
1066 : @param n_buffers - (u32) number of buffers
1067 : */
1068 : always_inline void
1069 4 : vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
1070 : u32 ring_size, u32 n_buffers)
1071 : {
1072 4 : ASSERT (n_buffers <= ring_size);
1073 :
1074 4 : if (PREDICT_TRUE (start + n_buffers <= ring_size))
1075 : {
1076 4 : vlib_buffer_free_no_next (vm, ring + start, n_buffers);
1077 : }
1078 : else
1079 : {
1080 0 : vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
1081 0 : vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
1082 : }
1083 4 : }
1084 :
1085 : /* Append given data to end of buffer, possibly allocating new buffers. */
1086 : int vlib_buffer_add_data (vlib_main_t * vm, u32 * buffer_index, void *data,
1087 : u32 n_data_bytes);
1088 :
1089 : /* Define vlib_buffer and vnet_buffer flags bits preserved for copy/clone */
1090 : #define VLIB_BUFFER_COPY_CLONE_FLAGS_MASK \
1091 : (VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID | \
1092 : VLIB_BUFFER_IS_TRACED | ~VLIB_BUFFER_FLAGS_ALL)
1093 :
1094 : /* duplicate all buffers in chain */
1095 : always_inline vlib_buffer_t *
1096 10133 : vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
1097 : {
1098 : vlib_buffer_t *s, *d, *fd;
1099 10133 : uword n_alloc, n_buffers = 1;
1100 10133 : u32 flag_mask = VLIB_BUFFER_COPY_CLONE_FLAGS_MASK;
1101 : int i;
1102 :
1103 10133 : s = b;
1104 10136 : while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1105 : {
1106 3 : n_buffers++;
1107 3 : s = vlib_get_buffer (vm, s->next_buffer);
1108 : }
1109 10133 : u32 new_buffers[n_buffers];
1110 :
1111 10133 : n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1112 :
1113 : /* No guarantee that we'll get all the buffers we asked for */
1114 10133 : if (PREDICT_FALSE (n_alloc < n_buffers))
1115 : {
1116 0 : if (n_alloc > 0)
1117 0 : vlib_buffer_free (vm, new_buffers, n_alloc);
1118 0 : return 0;
1119 : }
1120 :
1121 : /* 1st segment */
1122 10133 : s = b;
1123 10133 : fd = d = vlib_get_buffer (vm, new_buffers[0]);
1124 10133 : d->current_data = s->current_data;
1125 10133 : d->current_length = s->current_length;
1126 10133 : d->flags = s->flags & flag_mask;
1127 10133 : d->trace_handle = s->trace_handle;
1128 10133 : d->total_length_not_including_first_buffer =
1129 10133 : s->total_length_not_including_first_buffer;
1130 10133 : clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1131 10133 : clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1132 10133 : clib_memcpy_fast (vlib_buffer_get_current (d),
1133 10133 : vlib_buffer_get_current (s), s->current_length);
1134 :
1135 : /* next segments */
1136 10136 : for (i = 1; i < n_buffers; i++)
1137 : {
1138 : /* previous */
1139 3 : d->next_buffer = new_buffers[i];
1140 : /* current */
1141 3 : s = vlib_get_buffer (vm, s->next_buffer);
1142 3 : d = vlib_get_buffer (vm, new_buffers[i]);
1143 3 : d->current_data = s->current_data;
1144 3 : d->current_length = s->current_length;
1145 3 : clib_memcpy_fast (vlib_buffer_get_current (d),
1146 3 : vlib_buffer_get_current (s), s->current_length);
1147 3 : d->flags = s->flags & flag_mask;
1148 : }
1149 :
1150 10133 : return fd;
1151 : }
1152 :
1153 : /* duplicate first buffer in chain */
1154 : always_inline vlib_buffer_t *
1155 50 : vlib_buffer_copy_no_chain (vlib_main_t * vm, vlib_buffer_t * b, u32 * di)
1156 : {
1157 : vlib_buffer_t *d;
1158 :
1159 50 : if ((vlib_buffer_alloc (vm, di, 1)) != 1)
1160 0 : return 0;
1161 :
1162 50 : d = vlib_get_buffer (vm, *di);
1163 : /* 1st segment */
1164 50 : d->current_data = b->current_data;
1165 50 : d->current_length = b->current_length;
1166 50 : clib_memcpy_fast (d->opaque, b->opaque, sizeof (b->opaque));
1167 50 : clib_memcpy_fast (d->opaque2, b->opaque2, sizeof (b->opaque2));
1168 50 : clib_memcpy_fast (vlib_buffer_get_current (d),
1169 50 : vlib_buffer_get_current (b), b->current_length);
1170 :
1171 50 : return d;
1172 : }
1173 :
1174 : /* \brief Move packet from current position to offset position in buffer.
1175 : Only work for small packet using one buffer with room to fit the move
1176 : @param vm - (vlib_main_t *) vlib main data structure pointer
1177 : @param b - (vlib_buffer_t *) pointer to buffer
1178 : @param offset - (i16) position to move the packet in buffer
1179 : */
1180 : always_inline void
1181 10 : vlib_buffer_move (vlib_main_t * vm, vlib_buffer_t * b, i16 offset)
1182 : {
1183 10 : ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1184 10 : ASSERT (offset + VLIB_BUFFER_PRE_DATA_SIZE >= 0);
1185 10 : ASSERT (offset + b->current_length <
1186 : vlib_buffer_get_default_data_size (vm));
1187 :
1188 10 : u8 *source = vlib_buffer_get_current (b);
1189 10 : b->current_data = offset;
1190 10 : u8 *destination = vlib_buffer_get_current (b);
1191 10 : u16 length = b->current_length;
1192 :
1193 10 : if (source + length <= destination) /* no overlap */
1194 0 : clib_memcpy_fast (destination, source, length);
1195 : else
1196 10 : memmove (destination, source, length);
1197 10 : }
1198 :
1199 : /** \brief Create a maximum of 256 clones of buffer and store them
1200 : in the supplied array
1201 :
1202 : @param vm - (vlib_main_t *) vlib main data structure pointer
1203 : @param src_buffer - (u32) source buffer index
1204 : @param buffers - (u32 * ) buffer index array
1205 : @param n_buffers - (u16) number of buffer clones requested (<=256)
1206 : @param head_end_offset - (u16) offset relative to current position
1207 : where packet head ends
1208 : @param offset - (i16) copy packet head at current position if 0,
1209 : else at offset position to change headroom space as specified
1210 : @return - (u16) number of buffers actually cloned, may be
1211 : less than the number requested or zero
1212 : */
1213 : always_inline u16
1214 10798 : vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1215 : u16 n_buffers, u16 head_end_offset, i16 offset)
1216 : {
1217 : u16 i;
1218 10798 : vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1219 :
1220 10798 : ASSERT (s->ref_count == 1);
1221 10798 : ASSERT (n_buffers);
1222 10798 : ASSERT (n_buffers <= 256);
1223 10798 : ASSERT (offset + VLIB_BUFFER_PRE_DATA_SIZE >= 0);
1224 10798 : ASSERT ((offset + head_end_offset) <
1225 : vlib_buffer_get_default_data_size (vm));
1226 :
1227 10798 : if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
1228 : {
1229 10367 : buffers[0] = src_buffer;
1230 10367 : if (offset)
1231 0 : vlib_buffer_move (vm, s, offset);
1232 :
1233 17350 : for (i = 1; i < n_buffers; i++)
1234 : {
1235 : vlib_buffer_t *d;
1236 6983 : d = vlib_buffer_copy (vm, s);
1237 6983 : if (d == 0)
1238 0 : return i;
1239 6983 : buffers[i] = vlib_get_buffer_index (vm, d);
1240 :
1241 : }
1242 10367 : return n_buffers;
1243 : }
1244 :
1245 431 : if (PREDICT_FALSE ((n_buffers == 1) && (offset == 0)))
1246 : {
1247 201 : buffers[0] = src_buffer;
1248 201 : return 1;
1249 : }
1250 :
1251 460 : n_buffers = vlib_buffer_alloc_from_pool (vm, buffers, n_buffers,
1252 230 : s->buffer_pool_index);
1253 :
1254 784 : for (i = 0; i < n_buffers; i++)
1255 : {
1256 554 : vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
1257 554 : if (offset)
1258 0 : d->current_data = offset;
1259 : else
1260 554 : d->current_data = s->current_data;
1261 :
1262 554 : d->current_length = head_end_offset;
1263 554 : ASSERT (d->buffer_pool_index == s->buffer_pool_index);
1264 :
1265 554 : d->total_length_not_including_first_buffer = s->current_length -
1266 : head_end_offset;
1267 554 : if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
1268 : {
1269 174 : d->total_length_not_including_first_buffer +=
1270 174 : s->total_length_not_including_first_buffer;
1271 : }
1272 554 : d->flags = (s->flags & VLIB_BUFFER_COPY_CLONE_FLAGS_MASK) |
1273 : VLIB_BUFFER_NEXT_PRESENT;
1274 554 : d->trace_handle = s->trace_handle;
1275 554 : clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1276 554 : clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1277 554 : clib_memcpy_fast (vlib_buffer_get_current (d),
1278 554 : vlib_buffer_get_current (s), head_end_offset);
1279 554 : d->next_buffer = src_buffer;
1280 : }
1281 230 : vlib_buffer_advance (s, head_end_offset);
1282 230 : s->ref_count = n_buffers ? n_buffers : s->ref_count;
1283 546 : while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1284 : {
1285 316 : s = vlib_get_buffer (vm, s->next_buffer);
1286 316 : s->ref_count = n_buffers ? n_buffers : s->ref_count;
1287 : }
1288 :
1289 230 : return n_buffers;
1290 : }
1291 :
1292 : /** \brief Create multiple clones of buffer and store them
1293 : in the supplied array
1294 :
1295 : @param vm - (vlib_main_t *) vlib main data structure pointer
1296 : @param src_buffer - (u32) source buffer index
1297 : @param buffers - (u32 * ) buffer index array
1298 : @param n_buffers - (u16) number of buffer clones requested (<=256)
1299 : @param head_end_offset - (u16) offset relative to current position
1300 : where packet head ends
1301 : @param offset - (i16) copy packet head at current position if 0,
1302 : else at offset position to change headroom space as specified
1303 : @return - (u16) number of buffers actually cloned, may be
1304 : less than the number requested or zero
1305 : */
1306 : always_inline u16
1307 10798 : vlib_buffer_clone_at_offset (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1308 : u16 n_buffers, u16 head_end_offset, i16 offset)
1309 : {
1310 10798 : vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1311 10798 : u16 n_cloned = 0;
1312 :
1313 10798 : while (n_buffers > 256)
1314 : {
1315 : vlib_buffer_t *copy;
1316 0 : copy = vlib_buffer_copy (vm, s);
1317 0 : n_cloned += vlib_buffer_clone_256 (vm,
1318 : vlib_get_buffer_index (vm, copy),
1319 0 : (buffers + n_cloned),
1320 : 256, head_end_offset, offset);
1321 0 : n_buffers -= 256;
1322 : }
1323 21596 : n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
1324 10798 : buffers + n_cloned,
1325 : n_buffers, head_end_offset, offset);
1326 :
1327 10798 : return n_cloned;
1328 : }
1329 :
1330 : /** \brief Create multiple clones of buffer and store them
1331 : in the supplied array
1332 :
1333 : @param vm - (vlib_main_t *) vlib main data structure pointer
1334 : @param src_buffer - (u32) source buffer index
1335 : @param buffers - (u32 * ) buffer index array
1336 : @param n_buffers - (u16) number of buffer clones requested (<=256)
1337 : @param head_end_offset - (u16) offset relative to current position
1338 : where packet head ends
1339 : @return - (u16) number of buffers actually cloned, may be
1340 : less than the number requested or zero
1341 : */
1342 : always_inline u16
1343 10798 : vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1344 : u16 n_buffers, u16 head_end_offset)
1345 : {
1346 10798 : return vlib_buffer_clone_at_offset (vm, src_buffer, buffers, n_buffers,
1347 : head_end_offset, 0);
1348 : }
1349 :
1350 : /** \brief Attach cloned tail to the buffer
1351 :
1352 : @param vm - (vlib_main_t *) vlib main data structure pointer
1353 : @param head - (vlib_buffer_t *) head buffer
1354 : @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
1355 : */
1356 :
1357 : always_inline void
1358 : vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
1359 : vlib_buffer_t * tail)
1360 : {
1361 : ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1362 : ASSERT (head->buffer_pool_index == tail->buffer_pool_index);
1363 :
1364 : head->flags |= VLIB_BUFFER_NEXT_PRESENT;
1365 : head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1366 : head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
1367 : head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
1368 : head->next_buffer = vlib_get_buffer_index (vm, tail);
1369 : head->total_length_not_including_first_buffer = tail->current_length +
1370 : tail->total_length_not_including_first_buffer;
1371 :
1372 : next_segment:
1373 : clib_atomic_add_fetch (&tail->ref_count, 1);
1374 :
1375 : if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
1376 : {
1377 : tail = vlib_get_buffer (vm, tail->next_buffer);
1378 : goto next_segment;
1379 : }
1380 : }
1381 :
1382 : /* Initializes the buffer as an empty packet with no chained buffers. */
1383 : always_inline void
1384 : vlib_buffer_chain_init (vlib_buffer_t * first)
1385 : {
1386 : first->total_length_not_including_first_buffer = 0;
1387 : first->current_length = 0;
1388 : first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1389 : first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
1390 : }
1391 :
1392 : /* The provided next_bi buffer index is appended to the end of the packet. */
1393 : always_inline vlib_buffer_t *
1394 9 : vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
1395 : {
1396 9 : vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
1397 9 : last->next_buffer = next_bi;
1398 9 : last->flags |= VLIB_BUFFER_NEXT_PRESENT;
1399 9 : next_buffer->current_length = 0;
1400 9 : next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1401 9 : return next_buffer;
1402 : }
1403 :
1404 : /* Increases or decreases the packet length.
1405 : * It does not allocate or deallocate new buffers.
1406 : * Therefore, the added length must be compatible
1407 : * with the last buffer. */
1408 : always_inline void
1409 7024 : vlib_buffer_chain_increase_length (vlib_buffer_t * first,
1410 : vlib_buffer_t * last, i32 len)
1411 : {
1412 7024 : last->current_length += len;
1413 7024 : if (first != last)
1414 49 : first->total_length_not_including_first_buffer += len;
1415 7024 : }
1416 :
1417 : /* Copy data to the end of the packet and increases its length.
1418 : * It does not allocate new buffers.
1419 : * Returns the number of copied bytes. */
1420 : always_inline u16
1421 : vlib_buffer_chain_append_data (vlib_main_t * vm,
1422 : vlib_buffer_t * first,
1423 : vlib_buffer_t * last, void *data, u16 data_len)
1424 : {
1425 : u32 n_buffer_bytes = vlib_buffer_get_default_data_size (vm);
1426 : ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
1427 : u16 len = clib_min (data_len,
1428 : n_buffer_bytes - last->current_length -
1429 : last->current_data);
1430 : clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
1431 : data, len);
1432 : vlib_buffer_chain_increase_length (first, last, len);
1433 : return len;
1434 : }
1435 :
1436 : /* Copy data to the end of the packet and increases its length.
1437 : * Allocates additional buffers from the free list if necessary.
1438 : * Returns the number of copied bytes.
1439 : * 'last' value is modified whenever new buffers are allocated and
1440 : * chained and points to the last buffer in the chain. */
1441 : u16
1442 : vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
1443 : vlib_buffer_t * first,
1444 : vlib_buffer_t ** last, void *data,
1445 : u16 data_len);
1446 : void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
1447 :
1448 : format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
1449 : format_vlib_buffer_contents, format_vlib_buffer_no_chain;
1450 :
1451 : typedef struct
1452 : {
1453 : /* Vector of packet data. */
1454 : u8 *packet_data;
1455 :
1456 : /* Number of buffers to allocate in each call to allocator. */
1457 : u32 min_n_buffers_each_alloc;
1458 :
1459 : u8 *name;
1460 : } vlib_packet_template_t;
1461 :
1462 : void vlib_packet_template_init (vlib_main_t * vm,
1463 : vlib_packet_template_t * t,
1464 : void *packet_data,
1465 : uword n_packet_data_bytes,
1466 : uword min_n_buffers_each_alloc,
1467 : char *fmt, ...);
1468 :
1469 : void *vlib_packet_template_get_packet (vlib_main_t * vm,
1470 : vlib_packet_template_t * t,
1471 : u32 * bi_result);
1472 :
1473 : always_inline void
1474 : vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
1475 : {
1476 : vec_free (t->packet_data);
1477 : }
1478 :
1479 : always_inline u32
1480 166541 : vlib_buffer_space_left_at_end (vlib_main_t * vm, vlib_buffer_t * b)
1481 : {
1482 166541 : return b->data + vlib_buffer_get_default_data_size (vm) -
1483 166541 : ((u8 *) vlib_buffer_get_current (b) + b->current_length);
1484 : }
1485 :
1486 : #define VLIB_BUFFER_LINEARIZE_MAX 64
1487 :
1488 : always_inline u32
1489 505401 : vlib_buffer_chain_linearize (vlib_main_t * vm, vlib_buffer_t * b)
1490 : {
1491 : vlib_buffer_t *dst_b;
1492 505401 : u32 n_buffers = 1, to_free = 0;
1493 505401 : u16 rem_len, dst_len, data_size, src_len = 0;
1494 505401 : u8 *dst, *src = 0;
1495 :
1496 505401 : if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
1497 366776 : return 1;
1498 :
1499 138625 : ASSERT (1 == b->ref_count);
1500 138625 : if (PREDICT_FALSE (1 != b->ref_count))
1501 0 : return 0;
1502 :
1503 138625 : data_size = vlib_buffer_get_default_data_size (vm);
1504 138625 : rem_len = vlib_buffer_length_in_chain (vm, b) - b->current_length;
1505 :
1506 138625 : dst_b = b;
1507 138625 : dst = vlib_buffer_get_tail (dst_b);
1508 138625 : dst_len = vlib_buffer_space_left_at_end (vm, dst_b);
1509 :
1510 138625 : b->total_length_not_including_first_buffer -= dst_len;
1511 :
1512 337746 : while (rem_len > 0)
1513 : {
1514 : u16 copy_len;
1515 :
1516 393629 : while (0 == src_len)
1517 : {
1518 194512 : ASSERT (b->flags & VLIB_BUFFER_NEXT_PRESENT);
1519 194512 : if (PREDICT_FALSE (!(b->flags & VLIB_BUFFER_NEXT_PRESENT)))
1520 0 : break; /* malformed chained buffer */
1521 :
1522 194512 : b = vlib_get_buffer (vm, b->next_buffer);
1523 194509 : src = vlib_buffer_get_current (b);
1524 194507 : src_len = b->current_length;
1525 : }
1526 :
1527 199117 : if (0 == dst_len)
1528 : {
1529 179678 : ASSERT (dst_b->flags & VLIB_BUFFER_NEXT_PRESENT);
1530 179678 : if (PREDICT_FALSE (!(dst_b->flags & VLIB_BUFFER_NEXT_PRESENT)))
1531 0 : break; /* malformed chained buffer */
1532 :
1533 179678 : vlib_buffer_t *next_dst_b = vlib_get_buffer (vm, dst_b->next_buffer);
1534 :
1535 179678 : if (PREDICT_TRUE (1 == next_dst_b->ref_count))
1536 : {
1537 : /* normal case: buffer is not cloned, just use it */
1538 179678 : dst_b = next_dst_b;
1539 : }
1540 : else
1541 : {
1542 : /* cloned buffer, build a new dest chain from there */
1543 : vlib_buffer_t *bufs[VLIB_BUFFER_LINEARIZE_MAX];
1544 : u32 bis[VLIB_BUFFER_LINEARIZE_MAX + 1];
1545 0 : const int n = (rem_len + data_size - 1) / data_size;
1546 : int n_alloc;
1547 : int i;
1548 :
1549 0 : ASSERT (n <= VLIB_BUFFER_LINEARIZE_MAX);
1550 0 : if (PREDICT_FALSE (n > VLIB_BUFFER_LINEARIZE_MAX))
1551 0 : return 0;
1552 :
1553 0 : n_alloc = vlib_buffer_alloc (vm, bis, n);
1554 0 : if (PREDICT_FALSE (n_alloc != n))
1555 : {
1556 0 : vlib_buffer_free (vm, bis, n_alloc);
1557 0 : return 0;
1558 : }
1559 :
1560 0 : vlib_get_buffers (vm, bis, bufs, n);
1561 :
1562 0 : for (i = 0; i < n - 1; i++)
1563 : {
1564 0 : bufs[i]->flags |= VLIB_BUFFER_NEXT_PRESENT;
1565 0 : bufs[i]->next_buffer = bis[i + 1];
1566 : }
1567 :
1568 0 : to_free = dst_b->next_buffer;
1569 0 : dst_b->next_buffer = bis[0];
1570 0 : dst_b = bufs[0];
1571 : }
1572 :
1573 179678 : n_buffers++;
1574 :
1575 179678 : dst_b->current_data = clib_min (0, dst_b->current_data);
1576 179678 : dst_b->current_length = 0;
1577 :
1578 179678 : dst = dst_b->data + dst_b->current_data;
1579 179678 : dst_len = data_size - dst_b->current_data;
1580 : }
1581 :
1582 199117 : copy_len = clib_min (src_len, dst_len);
1583 :
1584 199117 : if (PREDICT_TRUE (src == dst))
1585 : {
1586 : /* nothing to do */
1587 : }
1588 24055 : else if (src + copy_len > dst && dst + copy_len > src)
1589 : {
1590 : /* src and dst overlap */
1591 1040 : ASSERT (b == dst_b);
1592 1040 : memmove (dst, src, copy_len);
1593 : }
1594 : else
1595 : {
1596 23015 : clib_memcpy_fast (dst, src, copy_len);
1597 : }
1598 :
1599 199121 : dst_b->current_length += copy_len;
1600 :
1601 199121 : dst += copy_len;
1602 199121 : src += copy_len;
1603 199121 : dst_len -= copy_len;
1604 199121 : src_len -= copy_len;
1605 199121 : rem_len -= copy_len;
1606 : }
1607 :
1608 : /* in case of a malformed chain buffer, we'll exit early from the loop. */
1609 138624 : ASSERT (0 == rem_len);
1610 138624 : b->total_length_not_including_first_buffer -= rem_len;
1611 :
1612 138624 : if (to_free)
1613 0 : vlib_buffer_free_one (vm, to_free);
1614 :
1615 138624 : if (dst_b->flags & VLIB_BUFFER_NEXT_PRESENT)
1616 : {
1617 : /* the resulting chain is smaller than the original, cut it there */
1618 3510 : dst_b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1619 3510 : vlib_buffer_free_one (vm, dst_b->next_buffer);
1620 3511 : if (1 == n_buffers)
1621 : {
1622 : /* no longer a chained buffer */
1623 1066 : dst_b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1624 1066 : dst_b->total_length_not_including_first_buffer = 0;
1625 : }
1626 : }
1627 :
1628 138625 : return n_buffers;
1629 : }
1630 :
1631 : #endif /* included_vlib_buffer_funcs_h */
1632 :
1633 : /*
1634 : * fd.io coding-style-patch-verification: ON
1635 : *
1636 : * Local Variables:
1637 : * eval: (c-set-style "gnu")
1638 : * End:
1639 : */
|