Line data Source code
1 : /*
2 : * Copyright (c) 2015 Cisco and/or its affiliates.
3 : * Licensed under the Apache License, Version 2.0 (the "License");
4 : * you may not use this file except in compliance with the License.
5 : * You may obtain a copy of the License at:
6 : *
7 : * http://www.apache.org/licenses/LICENSE-2.0
8 : *
9 : * Unless required by applicable law or agreed to in writing, software
10 : * distributed under the License is distributed on an "AS IS" BASIS,
11 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 : * See the License for the specific language governing permissions and
13 : * limitations under the License.
14 : */
15 : /*
16 : * buffer_funcs.h: VLIB buffer related functions/inlines
17 : *
18 : * Copyright (c) 2008 Eliot Dresselhaus
19 : *
20 : * Permission is hereby granted, free of charge, to any person obtaining
21 : * a copy of this software and associated documentation files (the
22 : * "Software"), to deal in the Software without restriction, including
23 : * without limitation the rights to use, copy, modify, merge, publish,
24 : * distribute, sublicense, and/or sell copies of the Software, and to
25 : * permit persons to whom the Software is furnished to do so, subject to
26 : * the following conditions:
27 : *
28 : * The above copyright notice and this permission notice shall be
29 : * included in all copies or substantial portions of the Software.
30 : *
31 : * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 : * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 : * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 : * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 : * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 : * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 : * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 : */
39 :
40 : #ifndef included_vlib_buffer_funcs_h
41 : #define included_vlib_buffer_funcs_h
42 :
43 : #include <vppinfra/hash.h>
44 : #include <vppinfra/fifo.h>
45 : #include <vppinfra/vector/index_to_ptr.h>
46 : #include <vlib/buffer.h>
47 : #include <vlib/physmem_funcs.h>
48 : #include <vlib/main.h>
49 : #include <vlib/node.h>
50 :
51 : /** \file
52 : vlib buffer access methods.
53 : */
54 :
55 : typedef void (vlib_buffer_enqueue_to_next_fn_t) (vlib_main_t *vm,
56 : vlib_node_runtime_t *node,
57 : u32 *buffers, u16 *nexts,
58 : uword count);
59 : typedef void (vlib_buffer_enqueue_to_next_with_aux_fn_t) (
60 : vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u32 *aux_data,
61 : u16 *nexts, uword count);
62 : typedef void (vlib_buffer_enqueue_to_single_next_fn_t) (
63 : vlib_main_t *vm, vlib_node_runtime_t *node, u32 *ers, u16 next_index,
64 : u32 count);
65 :
66 : typedef void (vlib_buffer_enqueue_to_single_next_with_aux_fn_t) (
67 : vlib_main_t *vm, vlib_node_runtime_t *node, u32 *ers, u32 *aux_data,
68 : u16 next_index, u32 count);
69 :
70 : typedef u32 (vlib_buffer_enqueue_to_thread_fn_t) (
71 : vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
72 : u32 *buffer_indices, u16 *thread_indices, u32 n_packets,
73 : int drop_on_congestion);
74 :
75 : typedef u32 (vlib_buffer_enqueue_to_thread_with_aux_fn_t) (
76 : vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
77 : u32 *buffer_indices, u32 *aux, u16 *thread_indices, u32 n_packets,
78 : int drop_on_congestion);
79 :
80 : typedef struct
81 : {
82 : vlib_buffer_enqueue_to_next_fn_t *buffer_enqueue_to_next_fn;
83 : vlib_buffer_enqueue_to_next_with_aux_fn_t
84 : *buffer_enqueue_to_next_with_aux_fn;
85 : vlib_buffer_enqueue_to_single_next_fn_t *buffer_enqueue_to_single_next_fn;
86 : vlib_buffer_enqueue_to_single_next_with_aux_fn_t
87 : *buffer_enqueue_to_single_next_with_aux_fn;
88 : vlib_buffer_enqueue_to_thread_fn_t *buffer_enqueue_to_thread_fn;
89 : vlib_buffer_enqueue_to_thread_with_aux_fn_t
90 : *buffer_enqueue_to_thread_with_aux_fn;
91 : } vlib_buffer_func_main_t;
92 :
93 : extern vlib_buffer_func_main_t vlib_buffer_func_main;
94 :
95 : always_inline void
96 1548429326 : vlib_buffer_validate (vlib_main_t * vm, vlib_buffer_t * b)
97 : {
98 1548429326 : vlib_buffer_main_t *bm = vm->buffer_main;
99 : vlib_buffer_pool_t *bp;
100 :
101 : /* reference count in allocated buffer always must be 1 or higher */
102 1548429326 : ASSERT (b->ref_count > 0);
103 :
104 : /* verify that buffer pool index is valid */
105 1548429843 : bp = vec_elt_at_index (bm->buffer_pools, b->buffer_pool_index);
106 1548429437 : ASSERT (pointer_to_uword (b) >= bp->start);
107 1548429469 : ASSERT (pointer_to_uword (b) < bp->start + bp->size -
108 : (bp->data_size + sizeof (vlib_buffer_t)));
109 1548429469 : }
110 :
111 : always_inline void *
112 1362012338 : vlib_buffer_ptr_from_index (uword buffer_mem_start, u32 buffer_index,
113 : uword offset)
114 : {
115 1362012338 : offset += ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
116 1362012338 : return uword_to_pointer (buffer_mem_start + offset, vlib_buffer_t *);
117 : }
118 :
119 : /** \brief Translate buffer index into buffer pointer
120 :
121 : @param vm - (vlib_main_t *) vlib main data structure pointer
122 : @param buffer_index - (u32) buffer index
123 : @return - (vlib_buffer_t *) buffer pointer
124 : */
125 : always_inline vlib_buffer_t *
126 1362012156 : vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
127 : {
128 1362012156 : vlib_buffer_main_t *bm = vm->buffer_main;
129 : vlib_buffer_t *b;
130 :
131 1362012156 : b = vlib_buffer_ptr_from_index (bm->buffer_mem_start, buffer_index, 0);
132 1362012393 : vlib_buffer_validate (vm, b);
133 1362012584 : return b;
134 : }
135 :
136 : static_always_inline u32
137 132990087 : vlib_buffer_get_default_data_size (vlib_main_t * vm)
138 : {
139 132990087 : return vm->buffer_main->default_data_size;
140 : }
141 :
142 : static_always_inline void
143 33549922 : vlib_buffer_copy_indices (u32 * dst, u32 * src, u32 n_indices)
144 : {
145 33549922 : clib_memcpy_u32 (dst, src, n_indices);
146 33549937 : }
147 :
148 : always_inline void
149 : vlib_buffer_copy_indices_from_ring (u32 * dst, u32 * ring, u32 start,
150 : u32 ring_size, u32 n_buffers)
151 : {
152 : ASSERT (n_buffers <= ring_size);
153 :
154 : if (PREDICT_TRUE (start + n_buffers <= ring_size))
155 : {
156 : vlib_buffer_copy_indices (dst, ring + start, n_buffers);
157 : }
158 : else
159 : {
160 : u32 n = ring_size - start;
161 : vlib_buffer_copy_indices (dst, ring + start, n);
162 : vlib_buffer_copy_indices (dst + n, ring, n_buffers - n);
163 : }
164 : }
165 :
166 : always_inline void
167 : vlib_buffer_copy_indices_to_ring (u32 * ring, u32 * src, u32 start,
168 : u32 ring_size, u32 n_buffers)
169 : {
170 : ASSERT (n_buffers <= ring_size);
171 :
172 : if (PREDICT_TRUE (start + n_buffers <= ring_size))
173 : {
174 : vlib_buffer_copy_indices (ring + start, src, n_buffers);
175 : }
176 : else
177 : {
178 : u32 n = ring_size - start;
179 : vlib_buffer_copy_indices (ring + start, src, n);
180 : vlib_buffer_copy_indices (ring, src + n, n_buffers - n);
181 : }
182 : }
183 :
184 : STATIC_ASSERT_OFFSET_OF (vlib_buffer_t, template_end, 64);
185 : static_always_inline void
186 193371106 : vlib_buffer_copy_template (vlib_buffer_t * b, vlib_buffer_t * bt)
187 : {
188 : #if defined CLIB_HAVE_VEC512
189 0 : b->as_u8x64[0] = bt->as_u8x64[0];
190 : #elif defined (CLIB_HAVE_VEC256)
191 182893374 : b->as_u8x32[0] = bt->as_u8x32[0];
192 182893374 : b->as_u8x32[1] = bt->as_u8x32[1];
193 : #elif defined (CLIB_HAVE_VEC128)
194 10477732 : b->as_u8x16[0] = bt->as_u8x16[0];
195 10477732 : b->as_u8x16[1] = bt->as_u8x16[1];
196 10477732 : b->as_u8x16[2] = bt->as_u8x16[2];
197 10477732 : b->as_u8x16[3] = bt->as_u8x16[3];
198 : #else
199 : clib_memcpy_fast (b, bt, 64);
200 : #endif
201 193371106 : }
202 :
203 : always_inline u8
204 19128224 : vlib_buffer_pool_get_default_for_numa (vlib_main_t * vm, u32 numa_node)
205 : {
206 19128224 : ASSERT (numa_node < VLIB_BUFFER_MAX_NUMA_NODES);
207 19128225 : return vm->buffer_main->default_buffer_pool_index_for_numa[numa_node];
208 : }
209 :
210 : /** \brief Translate array of buffer indices into buffer pointers with offset
211 :
212 : @param vm - (vlib_main_t *) vlib main data structure pointer
213 : @param bi - (u32 *) array of buffer indices
214 : @param b - (void **) array to store buffer pointers
215 : @param count - (uword) number of elements
216 : @param offset - (i32) offset applied to each pointer
217 : */
218 : static_always_inline void
219 59499122 : vlib_get_buffers_with_offset (vlib_main_t *vm, u32 *bi, void **b, u32 count,
220 : i32 offset)
221 : {
222 59499122 : uword buffer_mem_start = vm->buffer_main->buffer_mem_start;
223 59499122 : void *base = (void *) (buffer_mem_start + offset);
224 59499122 : int objsize = __builtin_object_size (b, 0);
225 59499122 : const int sh = CLIB_LOG2_CACHE_LINE_BYTES;
226 :
227 59499122 : if (COMPILE_TIME_CONST (count) == 0 && objsize >= 64 * sizeof (b[0]) &&
228 59499122 : (objsize & ((8 * sizeof (b[0])) - 1)) == 0)
229 0 : {
230 0 : u32 n = round_pow2 (count, 8);
231 0 : ASSERT (objsize >= count);
232 0 : CLIB_ASSUME (objsize >= count);
233 0 : while (n >= 64)
234 : {
235 0 : clib_index_to_ptr_u32 (bi, base, sh, b, 64);
236 0 : b += 64;
237 0 : bi += 64;
238 0 : n -= 64;
239 : }
240 :
241 0 : while (n)
242 : {
243 0 : clib_index_to_ptr_u32 (bi, base, sh, b, 8);
244 0 : b += 8;
245 0 : bi += 8;
246 0 : n -= 8;
247 : }
248 : }
249 : else
250 59499122 : clib_index_to_ptr_u32 (bi, base, sh, b, count);
251 59499121 : }
252 :
253 : /** \brief Translate array of buffer indices into buffer pointers
254 :
255 : @param vm - (vlib_main_t *) vlib main data structure pointer
256 : @param bi - (u32 *) array of buffer indices
257 : @param b - (vlib_buffer_t **) array to store buffer pointers
258 : @param count - (uword) number of elements
259 : */
260 :
261 : static_always_inline void
262 59499122 : vlib_get_buffers (vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, u32 count)
263 : {
264 59499122 : vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
265 59499121 : }
266 :
267 : /** \brief Translate buffer pointer into buffer index
268 :
269 : @param vm - (vlib_main_t *) vlib main data structure pointer
270 : @param p - (void *) buffer pointer
271 : @return - (u32) buffer index
272 : */
273 :
274 : always_inline u32
275 4589051 : vlib_get_buffer_index (vlib_main_t * vm, void *p)
276 : {
277 4589051 : vlib_buffer_main_t *bm = vm->buffer_main;
278 4589051 : uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
279 4589051 : ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
280 4589051 : ASSERT (offset < bm->buffer_mem_size);
281 4589051 : ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
282 4589051 : return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
283 : }
284 :
285 : /** \brief Translate array of buffer pointers into buffer indices with offset
286 :
287 : @param vm - (vlib_main_t *) vlib main data structure pointer
288 : @param b - (void **) array of buffer pointers
289 : @param bi - (u32 *) array to store buffer indices
290 : @param count - (uword) number of elements
291 : @param offset - (i32) offset applied to each pointer
292 : */
293 : static_always_inline void
294 : vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
295 : uword count, i32 offset)
296 : {
297 : #ifdef CLIB_HAVE_VEC256
298 : u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
299 : u64x4 off4 = u64x4_splat (vm->buffer_main->buffer_mem_start - offset);
300 :
301 : while (count >= 8)
302 : {
303 : /* load 4 pointers into 256-bit register */
304 : u64x4 v0 = u64x4_load_unaligned (b);
305 : u64x4 v1 = u64x4_load_unaligned (b + 4);
306 : u32x8 v2, v3;
307 :
308 : v0 -= off4;
309 : v1 -= off4;
310 :
311 : v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
312 : v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
313 :
314 : /* permute 256-bit register so lower u32s of each buffer index are
315 : * placed into lower 128-bits */
316 : v2 = u32x8_permute ((u32x8) v0, mask);
317 : v3 = u32x8_permute ((u32x8) v1, mask);
318 :
319 : /* extract lower 128-bits and save them to the array of buffer indices */
320 : u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
321 : u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
322 : bi += 8;
323 : b += 8;
324 : count -= 8;
325 : }
326 : #endif
327 : while (count >= 4)
328 : {
329 : /* equivalent non-nector implementation */
330 : bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
331 : bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
332 : bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
333 : bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
334 : bi += 4;
335 : b += 4;
336 : count -= 4;
337 : }
338 : while (count)
339 : {
340 : bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
341 : bi += 1;
342 : b += 1;
343 : count -= 1;
344 : }
345 : }
346 :
347 : /** \brief Translate array of buffer pointers into buffer indices
348 :
349 : @param vm - (vlib_main_t *) vlib main data structure pointer
350 : @param b - (vlib_buffer_t **) array of buffer pointers
351 : @param bi - (u32 *) array to store buffer indices
352 : @param count - (uword) number of elements
353 : */
354 : static_always_inline void
355 : vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
356 : uword count)
357 : {
358 : vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
359 : }
360 :
361 : /** \brief Get next buffer in buffer linklist, or zero for end of list.
362 :
363 : @param vm - (vlib_main_t *) vlib main data structure pointer
364 : @param b - (void *) buffer pointer
365 : @return - (vlib_buffer_t *) next buffer, or NULL
366 : */
367 : always_inline vlib_buffer_t *
368 668130 : vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
369 : {
370 668130 : return (b->flags & VLIB_BUFFER_NEXT_PRESENT
371 668130 : ? vlib_get_buffer (vm, b->next_buffer) : 0);
372 : }
373 :
374 : uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
375 : vlib_buffer_t * b_first);
376 :
377 : /** \brief Get length in bytes of the buffer chain
378 :
379 : @param vm - (vlib_main_t *) vlib main data structure pointer
380 : @param b - (void *) buffer pointer
381 : @return - (uword) length of buffer chain
382 : */
383 : always_inline uword
384 153717330 : vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
385 : {
386 153717330 : uword len = b->current_length;
387 :
388 153717330 : if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
389 72310358 : return len;
390 :
391 81407011 : if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
392 81181633 : return len + b->total_length_not_including_first_buffer;
393 :
394 225381 : return vlib_buffer_length_in_chain_slow_path (vm, b);
395 : }
396 :
397 : /** \brief Get length in bytes of the buffer index buffer chain
398 :
399 : @param vm - (vlib_main_t *) vlib main data structure pointer
400 : @param bi - (u32) buffer index
401 : @return - (uword) length of buffer chain
402 : */
403 : always_inline uword
404 527719 : vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
405 : {
406 527719 : vlib_buffer_t *b = vlib_get_buffer (vm, bi);
407 527718 : return vlib_buffer_length_in_chain (vm, b);
408 : }
409 :
410 : /** \brief Copy buffer contents to memory
411 :
412 : @param vm - (vlib_main_t *) vlib main data structure pointer
413 : @param buffer_index - (u32) buffer index
414 : @param contents - (u8 *) memory, <strong>must be large enough</strong>
415 : @return - (uword) length of buffer chain
416 : */
417 : always_inline uword
418 25 : vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
419 : {
420 25 : uword content_len = 0;
421 : uword l;
422 : vlib_buffer_t *b;
423 :
424 : while (1)
425 : {
426 25 : b = vlib_get_buffer (vm, buffer_index);
427 25 : l = b->current_length;
428 25 : clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
429 25 : content_len += l;
430 25 : if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
431 25 : break;
432 0 : buffer_index = b->next_buffer;
433 : }
434 :
435 25 : return content_len;
436 : }
437 :
438 : always_inline uword
439 0 : vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
440 : {
441 0 : return vlib_physmem_get_pa (vm, b->data);
442 : }
443 :
444 : always_inline uword
445 0 : vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
446 : {
447 0 : return vlib_buffer_get_pa (vm, b) + b->current_data;
448 : }
449 :
450 : /** \brief Prefetch buffer metadata by buffer index
451 : The first 64 bytes of buffer contains most header information
452 :
453 : @param vm - (vlib_main_t *) vlib main data structure pointer
454 : @param bi - (u32) buffer index
455 : @param type - LOAD, STORE. In most cases, STORE is the right answer
456 : */
457 : /* Prefetch buffer header given index. */
458 : #define vlib_prefetch_buffer_with_index(vm,bi,type) \
459 : do { \
460 : vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
461 : vlib_prefetch_buffer_header (_b, type); \
462 : } while (0)
463 :
464 : typedef enum
465 : {
466 : /* Index is unknown. */
467 : VLIB_BUFFER_UNKNOWN,
468 :
469 : /* Index is known and free/allocated. */
470 : VLIB_BUFFER_KNOWN_FREE,
471 : VLIB_BUFFER_KNOWN_ALLOCATED,
472 : } vlib_buffer_known_state_t;
473 :
474 : void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
475 : uword n_buffers,
476 : vlib_buffer_known_state_t
477 : expected_state);
478 :
479 : always_inline vlib_buffer_known_state_t
480 372836000 : vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
481 : {
482 372836000 : vlib_buffer_main_t *bm = vm->buffer_main;
483 :
484 372836000 : clib_spinlock_lock (&bm->buffer_known_hash_lockp);
485 372836000 : uword *p = hash_get (bm->buffer_known_hash, buffer_index);
486 372836000 : clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
487 372836000 : return p ? p[0] : VLIB_BUFFER_UNKNOWN;
488 : }
489 :
490 : /* Validates sanity of a single buffer.
491 : Returns format'ed vector with error message if any. */
492 : u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
493 : uword follow_chain);
494 :
495 : u8 *vlib_validate_buffers (vlib_main_t * vm,
496 : u32 * buffers,
497 : uword next_buffer_stride,
498 : uword n_buffers,
499 : vlib_buffer_known_state_t known_state,
500 : uword follow_buffer_next);
501 :
502 : static_always_inline vlib_buffer_pool_t *
503 5601884 : vlib_get_buffer_pool (vlib_main_t * vm, u8 buffer_pool_index)
504 : {
505 5601884 : vlib_buffer_main_t *bm = vm->buffer_main;
506 5601884 : return vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
507 : }
508 :
509 : static_always_inline __clib_warn_unused_result uword
510 55046 : vlib_buffer_pool_get (vlib_main_t * vm, u8 buffer_pool_index, u32 * buffers,
511 : u32 n_buffers)
512 : {
513 55046 : vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
514 : u32 len;
515 :
516 55045 : ASSERT (bp->buffers);
517 :
518 55045 : clib_spinlock_lock (&bp->lock);
519 55050 : len = bp->n_avail;
520 55050 : if (PREDICT_TRUE (n_buffers < len))
521 : {
522 55050 : len -= n_buffers;
523 55050 : vlib_buffer_copy_indices (buffers, bp->buffers + len, n_buffers);
524 55050 : bp->n_avail = len;
525 55050 : clib_spinlock_unlock (&bp->lock);
526 55050 : return n_buffers;
527 : }
528 : else
529 : {
530 0 : vlib_buffer_copy_indices (buffers, bp->buffers, len);
531 0 : bp->n_avail = 0;
532 0 : clib_spinlock_unlock (&bp->lock);
533 0 : return len;
534 : }
535 : }
536 :
537 :
538 : /** \brief Allocate buffers from specific pool into supplied array
539 :
540 : @param vm - (vlib_main_t *) vlib main data structure pointer
541 : @param buffers - (u32 * ) buffer index array
542 : @param n_buffers - (u32) number of buffers requested
543 : @return - (u32) number of buffers actually allocated, may be
544 : less than the number requested or zero
545 : */
546 :
547 : always_inline __clib_warn_unused_result u32
548 21415073 : vlib_buffer_alloc_from_pool (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
549 : u8 buffer_pool_index)
550 : {
551 21415073 : vlib_buffer_main_t *bm = vm->buffer_main;
552 : vlib_buffer_pool_t *bp;
553 : vlib_buffer_pool_thread_t *bpt;
554 : u32 *src, *dst, len, n_left;
555 :
556 : /* If buffer allocation fault injection is configured */
557 : if (VLIB_BUFFER_ALLOC_FAULT_INJECTOR > 0)
558 : {
559 : u32 vlib_buffer_alloc_may_fail (vlib_main_t *, u32);
560 :
561 : /* See how many buffers we're willing to allocate */
562 : n_buffers = vlib_buffer_alloc_may_fail (vm, n_buffers);
563 : if (n_buffers == 0)
564 : return (n_buffers);
565 : }
566 :
567 21415073 : bp = vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
568 21415073 : bpt = vec_elt_at_index (bp->threads, vm->thread_index);
569 :
570 21415074 : dst = buffers;
571 21415074 : n_left = n_buffers;
572 21415074 : len = bpt->n_cached;
573 :
574 : /* per-thread cache contains enough buffers */
575 21415074 : if (len >= n_buffers)
576 : {
577 21360005 : src = bpt->cached_buffers + len - n_buffers;
578 21360005 : vlib_buffer_copy_indices (dst, src, n_buffers);
579 21360005 : bpt->n_cached -= n_buffers;
580 21360005 : goto done;
581 : }
582 :
583 : /* alloc bigger than cache - take buffers directly from main pool */
584 55046 : if (n_buffers >= VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ)
585 : {
586 24 : n_buffers = vlib_buffer_pool_get (vm, buffer_pool_index, buffers,
587 : n_buffers);
588 24 : goto done;
589 : }
590 :
591 : /* take everything available in the cache */
592 55022 : if (len)
593 : {
594 39327 : vlib_buffer_copy_indices (dst, bpt->cached_buffers, len);
595 39327 : bpt->n_cached = 0;
596 39327 : dst += len;
597 39327 : n_left -= len;
598 : }
599 :
600 55022 : len = round_pow2 (n_left, 32);
601 55022 : len = vlib_buffer_pool_get (vm, buffer_pool_index, bpt->cached_buffers,
602 : len);
603 55026 : bpt->n_cached = len;
604 :
605 55026 : if (len)
606 : {
607 55026 : u32 n_copy = clib_min (len, n_left);
608 55026 : src = bpt->cached_buffers + len - n_copy;
609 55026 : vlib_buffer_copy_indices (dst, src, n_copy);
610 55026 : bpt->n_cached -= n_copy;
611 55026 : n_left -= n_copy;
612 : }
613 :
614 55026 : n_buffers -= n_left;
615 :
616 21415078 : done:
617 : /* Verify that buffers are known free. */
618 : if (CLIB_DEBUG > 0)
619 21415078 : vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
620 : VLIB_BUFFER_KNOWN_FREE);
621 21415080 : if (PREDICT_FALSE (bm->alloc_callback_fn != 0))
622 0 : bm->alloc_callback_fn (vm, buffer_pool_index, buffers, n_buffers);
623 21415080 : return n_buffers;
624 : }
625 :
626 : /** \brief Allocate buffers from specific numa node into supplied array
627 :
628 : @param vm - (vlib_main_t *) vlib main data structure pointer
629 : @param buffers - (u32 * ) buffer index array
630 : @param n_buffers - (u32) number of buffers requested
631 : @param numa_node - (u32) numa node
632 : @return - (u32) number of buffers actually allocated, may be
633 : less than the number requested or zero
634 : */
635 : always_inline __clib_warn_unused_result u32
636 19127911 : vlib_buffer_alloc_on_numa (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
637 : u32 numa_node)
638 : {
639 19127911 : u8 index = vlib_buffer_pool_get_default_for_numa (vm, numa_node);
640 19127911 : return vlib_buffer_alloc_from_pool (vm, buffers, n_buffers, index);
641 : }
642 :
643 : /** \brief Allocate buffers into supplied array
644 :
645 : @param vm - (vlib_main_t *) vlib main data structure pointer
646 : @param buffers - (u32 * ) buffer index array
647 : @param n_buffers - (u32) number of buffers requested
648 : @return - (u32) number of buffers actually allocated, may be
649 : less than the number requested or zero
650 : */
651 :
652 : always_inline __clib_warn_unused_result u32
653 19127911 : vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
654 : {
655 19127911 : return vlib_buffer_alloc_on_numa (vm, buffers, n_buffers, vm->numa_node);
656 : }
657 :
658 : /** \brief Allocate buffers into ring
659 :
660 : @param vm - (vlib_main_t *) vlib main data structure pointer
661 : @param buffers - (u32 * ) buffer index ring
662 : @param start - (u32) first slot in the ring
663 : @param ring_size - (u32) ring size
664 : @param n_buffers - (u32) number of buffers requested
665 : @return - (u32) number of buffers actually allocated, may be
666 : less than the number requested or zero
667 : */
668 : always_inline __clib_warn_unused_result u32
669 : vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
670 : u32 ring_size, u32 n_buffers)
671 : {
672 : u32 n_alloc;
673 :
674 : ASSERT (n_buffers <= ring_size);
675 :
676 : if (PREDICT_TRUE (start + n_buffers <= ring_size))
677 : return vlib_buffer_alloc (vm, ring + start, n_buffers);
678 :
679 : n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
680 :
681 : if (PREDICT_TRUE (n_alloc == ring_size - start))
682 : n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
683 :
684 : return n_alloc;
685 : }
686 :
687 : /** \brief Allocate buffers into ring from specific buffer pool
688 :
689 : @param vm - (vlib_main_t *) vlib main data structure pointer
690 : @param buffers - (u32 * ) buffer index ring
691 : @param start - (u32) first slot in the ring
692 : @param ring_size - (u32) ring size
693 : @param n_buffers - (u32) number of buffers requested
694 : @return - (u32) number of buffers actually allocated, may be
695 : less than the number requested or zero
696 : */
697 : always_inline __clib_warn_unused_result u32
698 1848517 : vlib_buffer_alloc_to_ring_from_pool (vlib_main_t * vm, u32 * ring, u32 start,
699 : u32 ring_size, u32 n_buffers,
700 : u8 buffer_pool_index)
701 : {
702 : u32 n_alloc;
703 :
704 1848517 : ASSERT (n_buffers <= ring_size);
705 :
706 1848517 : if (PREDICT_TRUE (start + n_buffers <= ring_size))
707 1410117 : return vlib_buffer_alloc_from_pool (vm, ring + start, n_buffers,
708 : buffer_pool_index);
709 :
710 438397 : n_alloc = vlib_buffer_alloc_from_pool (vm, ring + start, ring_size - start,
711 : buffer_pool_index);
712 :
713 438397 : if (PREDICT_TRUE (n_alloc == ring_size - start))
714 438397 : n_alloc += vlib_buffer_alloc_from_pool (vm, ring, n_buffers - n_alloc,
715 : buffer_pool_index);
716 :
717 438397 : return n_alloc;
718 : }
719 :
720 : static_always_inline void
721 3170757 : vlib_buffer_pool_put (vlib_main_t * vm, u8 buffer_pool_index,
722 : u32 * buffers, u32 n_buffers)
723 : {
724 3170757 : vlib_buffer_main_t *bm = vm->buffer_main;
725 3170757 : vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
726 3170757 : vlib_buffer_pool_thread_t *bpt = vec_elt_at_index (bp->threads,
727 : vm->thread_index);
728 : u32 n_cached, n_empty;
729 :
730 : if (CLIB_DEBUG > 0)
731 3170757 : vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
732 : VLIB_BUFFER_KNOWN_ALLOCATED);
733 3170757 : if (PREDICT_FALSE (bm->free_callback_fn != 0))
734 0 : bm->free_callback_fn (vm, buffer_pool_index, buffers, n_buffers);
735 :
736 3170757 : n_cached = bpt->n_cached;
737 3170757 : n_empty = VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ - n_cached;
738 3170757 : if (n_buffers <= n_empty)
739 : {
740 3056362 : vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
741 : buffers, n_buffers);
742 3056362 : bpt->n_cached = n_cached + n_buffers;
743 3056362 : return;
744 : }
745 :
746 114392 : vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
747 114392 : buffers + n_buffers - n_empty, n_empty);
748 114392 : bpt->n_cached = VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ;
749 :
750 114392 : clib_spinlock_lock (&bp->lock);
751 114392 : vlib_buffer_copy_indices (bp->buffers + bp->n_avail, buffers,
752 : n_buffers - n_empty);
753 114392 : bp->n_avail += n_buffers - n_empty;
754 114392 : clib_spinlock_unlock (&bp->lock);
755 : }
756 :
757 : static_always_inline void
758 2378444 : vlib_buffer_free_inline (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
759 : int maybe_next)
760 2378444 : {
761 2378444 : const int queue_size = 128;
762 2378444 : vlib_buffer_pool_t *bp = 0;
763 2378444 : u8 buffer_pool_index = ~0;
764 2378444 : u32 n_queue = 0, queue[queue_size + 4];
765 2378444 : vlib_buffer_t bt = { };
766 : #if defined(CLIB_HAVE_VEC128)
767 2378444 : vlib_buffer_t bpi_mask = {.buffer_pool_index = ~0 };
768 2378444 : vlib_buffer_t bpi_vec = {};
769 2378444 : vlib_buffer_t flags_refs_mask = {
770 : .flags = VLIB_BUFFER_NEXT_PRESENT,
771 : .ref_count = ~1
772 : };
773 : #endif
774 :
775 2378444 : if (PREDICT_FALSE (n_buffers == 0))
776 2355 : return;
777 :
778 2376089 : vlib_buffer_t *b = vlib_get_buffer (vm, buffers[0]);
779 2376089 : buffer_pool_index = b->buffer_pool_index;
780 2376089 : bp = vlib_get_buffer_pool (vm, buffer_pool_index);
781 2376089 : vlib_buffer_copy_template (&bt, &bp->buffer_template);
782 : #if defined(CLIB_HAVE_VEC128)
783 2376089 : bpi_vec.buffer_pool_index = buffer_pool_index;
784 : #endif
785 :
786 34173294 : while (n_buffers)
787 : {
788 : vlib_buffer_t *b[8];
789 31797211 : u32 bi, sum = 0, flags, next;
790 :
791 : #if defined(CLIB_HAVE_VEC512)
792 0 : if (n_buffers < 8)
793 : #else
794 31797211 : if (n_buffers < 4)
795 : #endif
796 4365747 : goto one_by_one;
797 :
798 : #if defined(CLIB_HAVE_VEC512)
799 0 : vlib_get_buffers (vm, buffers, b, 8);
800 : #else
801 27431494 : vlib_get_buffers (vm, buffers, b, 4);
802 :
803 27431494 : if (n_buffers >= 12)
804 : {
805 21656693 : vlib_get_buffers (vm, buffers + 8, b + 4, 4);
806 21656693 : vlib_prefetch_buffer_header (b[4], LOAD);
807 21656693 : vlib_prefetch_buffer_header (b[5], LOAD);
808 21656693 : vlib_prefetch_buffer_header (b[6], LOAD);
809 21656693 : vlib_prefetch_buffer_header (b[7], LOAD);
810 : }
811 : #endif
812 :
813 : #if defined(CLIB_HAVE_VEC512)
814 : u8x16 p0, p1, p2, p3, p4, p5, p6, p7, r;
815 0 : p0 = u8x16_load_unaligned (b[0]);
816 0 : p1 = u8x16_load_unaligned (b[1]);
817 0 : p2 = u8x16_load_unaligned (b[2]);
818 0 : p3 = u8x16_load_unaligned (b[3]);
819 0 : p4 = u8x16_load_unaligned (b[4]);
820 0 : p5 = u8x16_load_unaligned (b[5]);
821 0 : p6 = u8x16_load_unaligned (b[6]);
822 0 : p7 = u8x16_load_unaligned (b[7]);
823 :
824 0 : r = p0 ^ bpi_vec.as_u8x16[0];
825 0 : r |= p1 ^ bpi_vec.as_u8x16[0];
826 0 : r |= p2 ^ bpi_vec.as_u8x16[0];
827 0 : r |= p3 ^ bpi_vec.as_u8x16[0];
828 0 : r |= p4 ^ bpi_vec.as_u8x16[0];
829 0 : r |= p5 ^ bpi_vec.as_u8x16[0];
830 0 : r |= p6 ^ bpi_vec.as_u8x16[0];
831 0 : r |= p7 ^ bpi_vec.as_u8x16[0];
832 0 : r &= bpi_mask.as_u8x16[0];
833 0 : r |=
834 0 : (p0 | p1 | p2 | p3 | p4 | p5 | p6 | p7) & flags_refs_mask.as_u8x16[0];
835 :
836 0 : sum = !u8x16_is_all_zero (r);
837 : #elif defined(CLIB_HAVE_VEC128)
838 : u8x16 p0, p1, p2, p3, r;
839 27431494 : p0 = u8x16_load_unaligned (b[0]);
840 27431494 : p1 = u8x16_load_unaligned (b[1]);
841 27431494 : p2 = u8x16_load_unaligned (b[2]);
842 27431494 : p3 = u8x16_load_unaligned (b[3]);
843 :
844 27431494 : r = p0 ^ bpi_vec.as_u8x16[0];
845 27431494 : r |= p1 ^ bpi_vec.as_u8x16[0];
846 27431494 : r |= p2 ^ bpi_vec.as_u8x16[0];
847 27431494 : r |= p3 ^ bpi_vec.as_u8x16[0];
848 27431494 : r &= bpi_mask.as_u8x16[0];
849 27431494 : r |= (p0 | p1 | p2 | p3) & flags_refs_mask.as_u8x16[0];
850 :
851 27431494 : sum = !u8x16_is_all_zero (r);
852 : #else
853 : sum |= b[0]->flags;
854 : sum |= b[1]->flags;
855 : sum |= b[2]->flags;
856 : sum |= b[3]->flags;
857 : sum &= VLIB_BUFFER_NEXT_PRESENT;
858 : sum += b[0]->ref_count - 1;
859 : sum += b[1]->ref_count - 1;
860 : sum += b[2]->ref_count - 1;
861 : sum += b[3]->ref_count - 1;
862 : sum |= b[0]->buffer_pool_index ^ buffer_pool_index;
863 : sum |= b[1]->buffer_pool_index ^ buffer_pool_index;
864 : sum |= b[2]->buffer_pool_index ^ buffer_pool_index;
865 : sum |= b[3]->buffer_pool_index ^ buffer_pool_index;
866 : #endif
867 :
868 27431494 : if (sum)
869 20621640 : goto one_by_one;
870 :
871 : #if defined(CLIB_HAVE_VEC512)
872 0 : vlib_buffer_copy_indices (queue + n_queue, buffers, 8);
873 0 : vlib_buffer_copy_template (b[0], &bt);
874 0 : vlib_buffer_copy_template (b[1], &bt);
875 0 : vlib_buffer_copy_template (b[2], &bt);
876 0 : vlib_buffer_copy_template (b[3], &bt);
877 0 : vlib_buffer_copy_template (b[4], &bt);
878 0 : vlib_buffer_copy_template (b[5], &bt);
879 0 : vlib_buffer_copy_template (b[6], &bt);
880 0 : vlib_buffer_copy_template (b[7], &bt);
881 0 : n_queue += 8;
882 :
883 0 : vlib_buffer_validate (vm, b[0]);
884 0 : vlib_buffer_validate (vm, b[1]);
885 0 : vlib_buffer_validate (vm, b[2]);
886 0 : vlib_buffer_validate (vm, b[3]);
887 0 : vlib_buffer_validate (vm, b[4]);
888 0 : vlib_buffer_validate (vm, b[5]);
889 0 : vlib_buffer_validate (vm, b[6]);
890 0 : vlib_buffer_validate (vm, b[7]);
891 :
892 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
893 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[1]);
894 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[2]);
895 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[3]);
896 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[4]);
897 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[5]);
898 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[6]);
899 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[7]);
900 : #else
901 6809864 : vlib_buffer_copy_indices (queue + n_queue, buffers, 4);
902 6809864 : vlib_buffer_copy_template (b[0], &bt);
903 6809864 : vlib_buffer_copy_template (b[1], &bt);
904 6809864 : vlib_buffer_copy_template (b[2], &bt);
905 6809864 : vlib_buffer_copy_template (b[3], &bt);
906 6809864 : n_queue += 4;
907 :
908 6809864 : vlib_buffer_validate (vm, b[0]);
909 6809864 : vlib_buffer_validate (vm, b[1]);
910 6809864 : vlib_buffer_validate (vm, b[2]);
911 6809864 : vlib_buffer_validate (vm, b[3]);
912 :
913 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
914 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[1]);
915 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[2]);
916 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[3]);
917 : #endif
918 :
919 6809864 : if (n_queue >= queue_size)
920 : {
921 56874 : vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
922 56874 : n_queue = 0;
923 : }
924 : #if defined(CLIB_HAVE_VEC512)
925 0 : buffers += 8;
926 0 : n_buffers -= 8;
927 : #else
928 6809864 : buffers += 4;
929 6809864 : n_buffers -= 4;
930 : #endif
931 6809864 : continue;
932 :
933 24987357 : one_by_one:
934 24987357 : bi = buffers[0];
935 :
936 159177970 : next_in_chain:
937 159177970 : b[0] = vlib_get_buffer (vm, bi);
938 159177970 : flags = b[0]->flags;
939 159177970 : next = b[0]->next_buffer;
940 :
941 159177970 : if (PREDICT_FALSE (buffer_pool_index != b[0]->buffer_pool_index))
942 : {
943 :
944 0 : if (n_queue)
945 : {
946 0 : vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
947 0 : n_queue = 0;
948 : }
949 :
950 0 : buffer_pool_index = b[0]->buffer_pool_index;
951 : #if defined(CLIB_HAVE_VEC128)
952 0 : bpi_vec.buffer_pool_index = buffer_pool_index;
953 : #endif
954 0 : bp = vlib_get_buffer_pool (vm, buffer_pool_index);
955 0 : vlib_buffer_copy_template (&bt, &bp->buffer_template);
956 : }
957 :
958 159177970 : vlib_buffer_validate (vm, b[0]);
959 :
960 : VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
961 :
962 159177970 : if (clib_atomic_sub_fetch (&b[0]->ref_count, 1) == 0)
963 : {
964 159177258 : vlib_buffer_copy_template (b[0], &bt);
965 159177258 : queue[n_queue++] = bi;
966 : }
967 :
968 159177970 : if (n_queue == queue_size)
969 : {
970 775668 : vlib_buffer_pool_put (vm, buffer_pool_index, queue, queue_size);
971 775668 : n_queue = 0;
972 : }
973 :
974 159177970 : if (maybe_next && (flags & VLIB_BUFFER_NEXT_PRESENT))
975 : {
976 134190413 : bi = next;
977 134190413 : goto next_in_chain;
978 : }
979 :
980 24987357 : buffers++;
981 24987357 : n_buffers--;
982 : }
983 :
984 2376089 : if (n_queue)
985 2338210 : vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
986 : }
987 :
988 :
989 : /** \brief Free buffers
990 : Frees the entire buffer chain for each buffer
991 :
992 : @param vm - (vlib_main_t *) vlib main data structure pointer
993 : @param buffers - (u32 * ) buffer index array
994 : @param n_buffers - (u32) number of buffers to free
995 :
996 : */
997 : always_inline void
998 2164890 : vlib_buffer_free (vlib_main_t * vm,
999 : /* pointer to first buffer */
1000 : u32 * buffers,
1001 : /* number of buffers to free */
1002 : u32 n_buffers)
1003 : {
1004 2164890 : vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 1);
1005 2164890 : }
1006 :
1007 : /** \brief Free buffers, does not free the buffer chain for each buffer
1008 :
1009 : @param vm - (vlib_main_t *) vlib main data structure pointer
1010 : @param buffers - (u32 * ) buffer index array
1011 : @param n_buffers - (u32) number of buffers to free
1012 :
1013 : */
1014 : always_inline void
1015 4 : vlib_buffer_free_no_next (vlib_main_t * vm,
1016 : /* pointer to first buffer */
1017 : u32 * buffers,
1018 : /* number of buffers to free */
1019 : u32 n_buffers)
1020 : {
1021 4 : vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 0);
1022 4 : }
1023 :
1024 : /** \brief Free one buffer
1025 : Shorthand to free a single buffer chain.
1026 :
1027 : @param vm - (vlib_main_t *) vlib main data structure pointer
1028 : @param buffer_index - (u32) buffer index to free
1029 : */
1030 : always_inline void
1031 213550 : vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
1032 : {
1033 213550 : vlib_buffer_free_inline (vm, &buffer_index, 1, /* maybe next */ 1);
1034 213550 : }
1035 :
1036 : /** \brief Free buffers from ring
1037 :
1038 : @param vm - (vlib_main_t *) vlib main data structure pointer
1039 : @param buffers - (u32 * ) buffer index ring
1040 : @param start - (u32) first slot in the ring
1041 : @param ring_size - (u32) ring size
1042 : @param n_buffers - (u32) number of buffers
1043 : */
1044 : always_inline void
1045 1076264 : vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
1046 : u32 ring_size, u32 n_buffers)
1047 : {
1048 1076264 : ASSERT (n_buffers <= ring_size);
1049 :
1050 1076264 : if (PREDICT_TRUE (start + n_buffers <= ring_size))
1051 : {
1052 942477 : vlib_buffer_free (vm, ring + start, n_buffers);
1053 : }
1054 : else
1055 : {
1056 133791 : vlib_buffer_free (vm, ring + start, ring_size - start);
1057 133791 : vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
1058 : }
1059 1076264 : }
1060 :
1061 : /** \brief Free buffers from ring without freeing tail buffers
1062 :
1063 : @param vm - (vlib_main_t *) vlib main data structure pointer
1064 : @param buffers - (u32 * ) buffer index ring
1065 : @param start - (u32) first slot in the ring
1066 : @param ring_size - (u32) ring size
1067 : @param n_buffers - (u32) number of buffers
1068 : */
1069 : always_inline void
1070 4 : vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
1071 : u32 ring_size, u32 n_buffers)
1072 : {
1073 4 : ASSERT (n_buffers <= ring_size);
1074 :
1075 4 : if (PREDICT_TRUE (start + n_buffers <= ring_size))
1076 : {
1077 4 : vlib_buffer_free_no_next (vm, ring + start, n_buffers);
1078 : }
1079 : else
1080 : {
1081 0 : vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
1082 0 : vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
1083 : }
1084 4 : }
1085 :
1086 : /* Append given data to end of buffer, possibly allocating new buffers. */
1087 : int vlib_buffer_add_data (vlib_main_t * vm, u32 * buffer_index, void *data,
1088 : u32 n_data_bytes);
1089 :
1090 : /* Define vlib_buffer and vnet_buffer flags bits preserved for copy/clone */
1091 : #define VLIB_BUFFER_COPY_CLONE_FLAGS_MASK \
1092 : (VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID | \
1093 : VLIB_BUFFER_IS_TRACED | ~VLIB_BUFFER_FLAGS_ALL)
1094 :
1095 : /* duplicate all buffers in chain */
1096 : always_inline vlib_buffer_t *
1097 10133 : vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
1098 : {
1099 : vlib_buffer_t *s, *d, *fd;
1100 10133 : uword n_alloc, n_buffers = 1;
1101 10133 : u32 flag_mask = VLIB_BUFFER_COPY_CLONE_FLAGS_MASK;
1102 : int i;
1103 :
1104 10133 : s = b;
1105 10136 : while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1106 : {
1107 3 : n_buffers++;
1108 3 : s = vlib_get_buffer (vm, s->next_buffer);
1109 : }
1110 10133 : u32 new_buffers[n_buffers];
1111 :
1112 10133 : n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1113 :
1114 : /* No guarantee that we'll get all the buffers we asked for */
1115 10133 : if (PREDICT_FALSE (n_alloc < n_buffers))
1116 : {
1117 0 : if (n_alloc > 0)
1118 0 : vlib_buffer_free (vm, new_buffers, n_alloc);
1119 0 : return 0;
1120 : }
1121 :
1122 : /* 1st segment */
1123 10133 : s = b;
1124 10133 : fd = d = vlib_get_buffer (vm, new_buffers[0]);
1125 10133 : d->current_data = s->current_data;
1126 10133 : d->current_length = s->current_length;
1127 10133 : d->flags = s->flags & flag_mask;
1128 10133 : d->trace_handle = s->trace_handle;
1129 10133 : d->total_length_not_including_first_buffer =
1130 10133 : s->total_length_not_including_first_buffer;
1131 10133 : clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1132 10133 : clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1133 10133 : clib_memcpy_fast (vlib_buffer_get_current (d),
1134 10133 : vlib_buffer_get_current (s), s->current_length);
1135 :
1136 : /* next segments */
1137 10136 : for (i = 1; i < n_buffers; i++)
1138 : {
1139 : /* previous */
1140 3 : d->next_buffer = new_buffers[i];
1141 : /* current */
1142 3 : s = vlib_get_buffer (vm, s->next_buffer);
1143 3 : d = vlib_get_buffer (vm, new_buffers[i]);
1144 3 : d->current_data = s->current_data;
1145 3 : d->current_length = s->current_length;
1146 3 : clib_memcpy_fast (vlib_buffer_get_current (d),
1147 3 : vlib_buffer_get_current (s), s->current_length);
1148 3 : d->flags = s->flags & flag_mask;
1149 : }
1150 :
1151 10133 : return fd;
1152 : }
1153 :
1154 : /* duplicate first buffer in chain */
1155 : always_inline vlib_buffer_t *
1156 51 : vlib_buffer_copy_no_chain (vlib_main_t * vm, vlib_buffer_t * b, u32 * di)
1157 : {
1158 : vlib_buffer_t *d;
1159 :
1160 51 : if ((vlib_buffer_alloc (vm, di, 1)) != 1)
1161 0 : return 0;
1162 :
1163 51 : d = vlib_get_buffer (vm, *di);
1164 : /* 1st segment */
1165 51 : d->current_data = b->current_data;
1166 51 : d->current_length = b->current_length;
1167 51 : clib_memcpy_fast (d->opaque, b->opaque, sizeof (b->opaque));
1168 51 : clib_memcpy_fast (d->opaque2, b->opaque2, sizeof (b->opaque2));
1169 51 : clib_memcpy_fast (vlib_buffer_get_current (d),
1170 51 : vlib_buffer_get_current (b), b->current_length);
1171 :
1172 51 : return d;
1173 : }
1174 :
1175 : /* \brief Move packet from current position to offset position in buffer.
1176 : Only work for small packet using one buffer with room to fit the move
1177 : @param vm - (vlib_main_t *) vlib main data structure pointer
1178 : @param b - (vlib_buffer_t *) pointer to buffer
1179 : @param offset - (i16) position to move the packet in buffer
1180 : */
1181 : always_inline void
1182 10 : vlib_buffer_move (vlib_main_t * vm, vlib_buffer_t * b, i16 offset)
1183 : {
1184 10 : ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1185 10 : ASSERT (offset + VLIB_BUFFER_PRE_DATA_SIZE >= 0);
1186 10 : ASSERT (offset + b->current_length <
1187 : vlib_buffer_get_default_data_size (vm));
1188 :
1189 10 : u8 *source = vlib_buffer_get_current (b);
1190 10 : b->current_data = offset;
1191 10 : u8 *destination = vlib_buffer_get_current (b);
1192 10 : u16 length = b->current_length;
1193 :
1194 10 : if (source + length <= destination) /* no overlap */
1195 0 : clib_memcpy_fast (destination, source, length);
1196 : else
1197 10 : memmove (destination, source, length);
1198 10 : }
1199 :
1200 : /** \brief Create a maximum of 256 clones of buffer and store them
1201 : in the supplied array
1202 :
1203 : @param vm - (vlib_main_t *) vlib main data structure pointer
1204 : @param src_buffer - (u32) source buffer index
1205 : @param buffers - (u32 * ) buffer index array
1206 : @param n_buffers - (u16) number of buffer clones requested (<=256)
1207 : @param head_end_offset - (u16) offset relative to current position
1208 : where packet head ends
1209 : @param offset - (i16) copy packet head at current position if 0,
1210 : else at offset position to change headroom space as specified
1211 : @return - (u16) number of buffers actually cloned, may be
1212 : less than the number requested or zero
1213 : */
1214 : always_inline u16
1215 10573 : vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1216 : u16 n_buffers, u16 head_end_offset, i16 offset)
1217 : {
1218 : u16 i;
1219 10573 : vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1220 :
1221 10573 : ASSERT (s->ref_count == 1);
1222 10573 : ASSERT (n_buffers);
1223 10573 : ASSERT (n_buffers <= 256);
1224 10573 : ASSERT (offset + VLIB_BUFFER_PRE_DATA_SIZE >= 0);
1225 10573 : ASSERT ((offset + head_end_offset) <
1226 : vlib_buffer_get_default_data_size (vm));
1227 :
1228 10573 : if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
1229 : {
1230 10138 : buffers[0] = src_buffer;
1231 10138 : if (offset)
1232 0 : vlib_buffer_move (vm, s, offset);
1233 :
1234 17121 : for (i = 1; i < n_buffers; i++)
1235 : {
1236 : vlib_buffer_t *d;
1237 6983 : d = vlib_buffer_copy (vm, s);
1238 6983 : if (d == 0)
1239 0 : return i;
1240 6983 : buffers[i] = vlib_get_buffer_index (vm, d);
1241 :
1242 : }
1243 10138 : return n_buffers;
1244 : }
1245 :
1246 435 : if (PREDICT_FALSE ((n_buffers == 1) && (offset == 0)))
1247 : {
1248 201 : buffers[0] = src_buffer;
1249 201 : return 1;
1250 : }
1251 :
1252 468 : n_buffers = vlib_buffer_alloc_from_pool (vm, buffers, n_buffers,
1253 234 : s->buffer_pool_index);
1254 :
1255 796 : for (i = 0; i < n_buffers; i++)
1256 : {
1257 562 : vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
1258 562 : if (offset)
1259 0 : d->current_data = offset;
1260 : else
1261 562 : d->current_data = s->current_data;
1262 :
1263 562 : d->current_length = head_end_offset;
1264 562 : ASSERT (d->buffer_pool_index == s->buffer_pool_index);
1265 :
1266 562 : d->total_length_not_including_first_buffer = s->current_length -
1267 : head_end_offset;
1268 562 : if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
1269 : {
1270 176 : d->total_length_not_including_first_buffer +=
1271 176 : s->total_length_not_including_first_buffer;
1272 : }
1273 562 : d->flags = (s->flags & VLIB_BUFFER_COPY_CLONE_FLAGS_MASK) |
1274 : VLIB_BUFFER_NEXT_PRESENT;
1275 562 : d->trace_handle = s->trace_handle;
1276 562 : clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1277 562 : clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1278 562 : clib_memcpy_fast (vlib_buffer_get_current (d),
1279 562 : vlib_buffer_get_current (s), head_end_offset);
1280 562 : d->next_buffer = src_buffer;
1281 : }
1282 234 : vlib_buffer_advance (s, head_end_offset);
1283 234 : s->ref_count = n_buffers ? n_buffers : s->ref_count;
1284 554 : while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1285 : {
1286 320 : s = vlib_get_buffer (vm, s->next_buffer);
1287 320 : s->ref_count = n_buffers ? n_buffers : s->ref_count;
1288 : }
1289 :
1290 234 : return n_buffers;
1291 : }
1292 :
1293 : /** \brief Create multiple clones of buffer and store them
1294 : in the supplied array
1295 :
1296 : @param vm - (vlib_main_t *) vlib main data structure pointer
1297 : @param src_buffer - (u32) source buffer index
1298 : @param buffers - (u32 * ) buffer index array
1299 : @param n_buffers - (u16) number of buffer clones requested (<=256)
1300 : @param head_end_offset - (u16) offset relative to current position
1301 : where packet head ends
1302 : @param offset - (i16) copy packet head at current position if 0,
1303 : else at offset position to change headroom space as specified
1304 : @return - (u16) number of buffers actually cloned, may be
1305 : less than the number requested or zero
1306 : */
1307 : always_inline u16
1308 10573 : vlib_buffer_clone_at_offset (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1309 : u16 n_buffers, u16 head_end_offset, i16 offset)
1310 : {
1311 10573 : vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1312 10573 : u16 n_cloned = 0;
1313 :
1314 10573 : while (n_buffers > 256)
1315 : {
1316 : vlib_buffer_t *copy;
1317 0 : copy = vlib_buffer_copy (vm, s);
1318 0 : n_cloned += vlib_buffer_clone_256 (vm,
1319 : vlib_get_buffer_index (vm, copy),
1320 0 : (buffers + n_cloned),
1321 : 256, head_end_offset, offset);
1322 0 : n_buffers -= 256;
1323 : }
1324 21146 : n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
1325 10573 : buffers + n_cloned,
1326 : n_buffers, head_end_offset, offset);
1327 :
1328 10573 : return n_cloned;
1329 : }
1330 :
1331 : /** \brief Create multiple clones of buffer and store them
1332 : in the supplied array
1333 :
1334 : @param vm - (vlib_main_t *) vlib main data structure pointer
1335 : @param src_buffer - (u32) source buffer index
1336 : @param buffers - (u32 * ) buffer index array
1337 : @param n_buffers - (u16) number of buffer clones requested (<=256)
1338 : @param head_end_offset - (u16) offset relative to current position
1339 : where packet head ends
1340 : @return - (u16) number of buffers actually cloned, may be
1341 : less than the number requested or zero
1342 : */
1343 : always_inline u16
1344 10573 : vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1345 : u16 n_buffers, u16 head_end_offset)
1346 : {
1347 10573 : return vlib_buffer_clone_at_offset (vm, src_buffer, buffers, n_buffers,
1348 : head_end_offset, 0);
1349 : }
1350 :
1351 : /** \brief Attach cloned tail to the buffer
1352 :
1353 : @param vm - (vlib_main_t *) vlib main data structure pointer
1354 : @param head - (vlib_buffer_t *) head buffer
1355 : @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
1356 : */
1357 :
1358 : always_inline void
1359 : vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
1360 : vlib_buffer_t * tail)
1361 : {
1362 : ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1363 : ASSERT (head->buffer_pool_index == tail->buffer_pool_index);
1364 :
1365 : head->flags |= VLIB_BUFFER_NEXT_PRESENT;
1366 : head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1367 : head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
1368 : head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
1369 : head->next_buffer = vlib_get_buffer_index (vm, tail);
1370 : head->total_length_not_including_first_buffer = tail->current_length +
1371 : tail->total_length_not_including_first_buffer;
1372 :
1373 : next_segment:
1374 : clib_atomic_add_fetch (&tail->ref_count, 1);
1375 :
1376 : if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
1377 : {
1378 : tail = vlib_get_buffer (vm, tail->next_buffer);
1379 : goto next_segment;
1380 : }
1381 : }
1382 :
1383 : /* Initializes the buffer as an empty packet with no chained buffers. */
1384 : always_inline void
1385 : vlib_buffer_chain_init (vlib_buffer_t * first)
1386 : {
1387 : first->total_length_not_including_first_buffer = 0;
1388 : first->current_length = 0;
1389 : first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1390 : first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
1391 : }
1392 :
1393 : /* The provided next_bi buffer index is appended to the end of the packet. */
1394 : always_inline vlib_buffer_t *
1395 8 : vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
1396 : {
1397 8 : vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
1398 8 : last->next_buffer = next_bi;
1399 8 : last->flags |= VLIB_BUFFER_NEXT_PRESENT;
1400 8 : next_buffer->current_length = 0;
1401 8 : next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1402 8 : return next_buffer;
1403 : }
1404 :
1405 : /* Increases or decreases the packet length.
1406 : * It does not allocate or deallocate new buffers.
1407 : * Therefore, the added length must be compatible
1408 : * with the last buffer. */
1409 : always_inline void
1410 7023 : vlib_buffer_chain_increase_length (vlib_buffer_t * first,
1411 : vlib_buffer_t * last, i32 len)
1412 : {
1413 7023 : last->current_length += len;
1414 7023 : if (first != last)
1415 48 : first->total_length_not_including_first_buffer += len;
1416 7023 : }
1417 :
1418 : /* Copy data to the end of the packet and increases its length.
1419 : * It does not allocate new buffers.
1420 : * Returns the number of copied bytes. */
1421 : always_inline u16
1422 : vlib_buffer_chain_append_data (vlib_main_t * vm,
1423 : vlib_buffer_t * first,
1424 : vlib_buffer_t * last, void *data, u16 data_len)
1425 : {
1426 : u32 n_buffer_bytes = vlib_buffer_get_default_data_size (vm);
1427 : ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
1428 : u16 len = clib_min (data_len,
1429 : n_buffer_bytes - last->current_length -
1430 : last->current_data);
1431 : clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
1432 : data, len);
1433 : vlib_buffer_chain_increase_length (first, last, len);
1434 : return len;
1435 : }
1436 :
1437 : /* Copy data to the end of the packet and increases its length.
1438 : * Allocates additional buffers from the free list if necessary.
1439 : * Returns the number of copied bytes.
1440 : * 'last' value is modified whenever new buffers are allocated and
1441 : * chained and points to the last buffer in the chain. */
1442 : u16
1443 : vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
1444 : vlib_buffer_t * first,
1445 : vlib_buffer_t ** last, void *data,
1446 : u16 data_len);
1447 : void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
1448 :
1449 : format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
1450 : format_vlib_buffer_contents, format_vlib_buffer_no_chain;
1451 :
1452 : typedef struct
1453 : {
1454 : /* Vector of packet data. */
1455 : u8 *packet_data;
1456 :
1457 : /* Number of buffers to allocate in each call to allocator. */
1458 : u32 min_n_buffers_each_alloc;
1459 :
1460 : u8 *name;
1461 : } vlib_packet_template_t;
1462 :
1463 : void vlib_packet_template_init (vlib_main_t * vm,
1464 : vlib_packet_template_t * t,
1465 : void *packet_data,
1466 : uword n_packet_data_bytes,
1467 : uword min_n_buffers_each_alloc,
1468 : char *fmt, ...);
1469 :
1470 : void *vlib_packet_template_get_packet (vlib_main_t * vm,
1471 : vlib_packet_template_t * t,
1472 : u32 * bi_result);
1473 :
1474 : always_inline void
1475 : vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
1476 : {
1477 : vec_free (t->packet_data);
1478 : }
1479 :
1480 : always_inline u32
1481 145637 : vlib_buffer_space_left_at_end (vlib_main_t * vm, vlib_buffer_t * b)
1482 : {
1483 145637 : return b->data + vlib_buffer_get_default_data_size (vm) -
1484 145637 : ((u8 *) vlib_buffer_get_current (b) + b->current_length);
1485 : }
1486 :
1487 : #define VLIB_BUFFER_LINEARIZE_MAX 64
1488 :
1489 : always_inline u32
1490 459483 : vlib_buffer_chain_linearize (vlib_main_t * vm, vlib_buffer_t * b)
1491 : {
1492 : vlib_buffer_t *dst_b;
1493 459483 : u32 n_buffers = 1, to_free = 0;
1494 459483 : u16 rem_len, dst_len, data_size, src_len = 0;
1495 459483 : u8 *dst, *src = 0;
1496 :
1497 459483 : if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
1498 341762 : return 1;
1499 :
1500 117721 : ASSERT (1 == b->ref_count);
1501 117721 : if (PREDICT_FALSE (1 != b->ref_count))
1502 0 : return 0;
1503 :
1504 117721 : data_size = vlib_buffer_get_default_data_size (vm);
1505 117721 : rem_len = vlib_buffer_length_in_chain (vm, b) - b->current_length;
1506 :
1507 117721 : dst_b = b;
1508 117721 : dst = vlib_buffer_get_tail (dst_b);
1509 117721 : dst_len = vlib_buffer_space_left_at_end (vm, dst_b);
1510 :
1511 117721 : b->total_length_not_including_first_buffer -= dst_len;
1512 :
1513 289522 : while (rem_len > 0)
1514 : {
1515 : u16 copy_len;
1516 :
1517 338988 : while (0 == src_len)
1518 : {
1519 167187 : ASSERT (b->flags & VLIB_BUFFER_NEXT_PRESENT);
1520 167187 : if (PREDICT_FALSE (!(b->flags & VLIB_BUFFER_NEXT_PRESENT)))
1521 0 : break; /* malformed chained buffer */
1522 :
1523 167187 : b = vlib_get_buffer (vm, b->next_buffer);
1524 167187 : src = vlib_buffer_get_current (b);
1525 167187 : src_len = b->current_length;
1526 : }
1527 :
1528 171801 : if (0 == dst_len)
1529 : {
1530 152342 : ASSERT (dst_b->flags & VLIB_BUFFER_NEXT_PRESENT);
1531 152342 : if (PREDICT_FALSE (!(dst_b->flags & VLIB_BUFFER_NEXT_PRESENT)))
1532 0 : break; /* malformed chained buffer */
1533 :
1534 152342 : vlib_buffer_t *next_dst_b = vlib_get_buffer (vm, dst_b->next_buffer);
1535 :
1536 152342 : if (PREDICT_TRUE (1 == next_dst_b->ref_count))
1537 : {
1538 : /* normal case: buffer is not cloned, just use it */
1539 152342 : dst_b = next_dst_b;
1540 : }
1541 : else
1542 : {
1543 : /* cloned buffer, build a new dest chain from there */
1544 : vlib_buffer_t *bufs[VLIB_BUFFER_LINEARIZE_MAX];
1545 : u32 bis[VLIB_BUFFER_LINEARIZE_MAX + 1];
1546 0 : const int n = (rem_len + data_size - 1) / data_size;
1547 : int n_alloc;
1548 : int i;
1549 :
1550 0 : ASSERT (n <= VLIB_BUFFER_LINEARIZE_MAX);
1551 0 : if (PREDICT_FALSE (n > VLIB_BUFFER_LINEARIZE_MAX))
1552 0 : return 0;
1553 :
1554 0 : n_alloc = vlib_buffer_alloc (vm, bis, n);
1555 0 : if (PREDICT_FALSE (n_alloc != n))
1556 : {
1557 0 : vlib_buffer_free (vm, bis, n_alloc);
1558 0 : return 0;
1559 : }
1560 :
1561 0 : vlib_get_buffers (vm, bis, bufs, n);
1562 :
1563 0 : for (i = 0; i < n - 1; i++)
1564 : {
1565 0 : bufs[i]->flags |= VLIB_BUFFER_NEXT_PRESENT;
1566 0 : bufs[i]->next_buffer = bis[i + 1];
1567 : }
1568 :
1569 0 : to_free = dst_b->next_buffer;
1570 0 : dst_b->next_buffer = bis[0];
1571 0 : dst_b = bufs[0];
1572 : }
1573 :
1574 152342 : n_buffers++;
1575 :
1576 152342 : dst_b->current_data = clib_min (0, dst_b->current_data);
1577 152342 : dst_b->current_length = 0;
1578 :
1579 152342 : dst = dst_b->data + dst_b->current_data;
1580 152342 : dst_len = data_size - dst_b->current_data;
1581 : }
1582 :
1583 171801 : copy_len = clib_min (src_len, dst_len);
1584 :
1585 171801 : if (PREDICT_TRUE (src == dst))
1586 : {
1587 : /* nothing to do */
1588 : }
1589 24076 : else if (src + copy_len > dst && dst + copy_len > src)
1590 : {
1591 : /* src and dst overlap */
1592 1040 : ASSERT (b == dst_b);
1593 1040 : memmove (dst, src, copy_len);
1594 : }
1595 : else
1596 : {
1597 23036 : clib_memcpy_fast (dst, src, copy_len);
1598 : }
1599 :
1600 171801 : dst_b->current_length += copy_len;
1601 :
1602 171801 : dst += copy_len;
1603 171801 : src += copy_len;
1604 171801 : dst_len -= copy_len;
1605 171801 : src_len -= copy_len;
1606 171801 : rem_len -= copy_len;
1607 : }
1608 :
1609 : /* in case of a malformed chain buffer, we'll exit early from the loop. */
1610 117721 : ASSERT (0 == rem_len);
1611 117721 : b->total_length_not_including_first_buffer -= rem_len;
1612 :
1613 117721 : if (to_free)
1614 0 : vlib_buffer_free_one (vm, to_free);
1615 :
1616 117721 : if (dst_b->flags & VLIB_BUFFER_NEXT_PRESENT)
1617 : {
1618 : /* the resulting chain is smaller than the original, cut it there */
1619 3511 : dst_b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1620 3511 : vlib_buffer_free_one (vm, dst_b->next_buffer);
1621 3511 : if (1 == n_buffers)
1622 : {
1623 : /* no longer a chained buffer */
1624 1066 : dst_b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1625 1066 : dst_b->total_length_not_including_first_buffer = 0;
1626 : }
1627 : }
1628 :
1629 117721 : return n_buffers;
1630 : }
1631 :
1632 : #endif /* included_vlib_buffer_funcs_h */
1633 :
1634 : /*
1635 : * fd.io coding-style-patch-verification: ON
1636 : *
1637 : * Local Variables:
1638 : * eval: (c-set-style "gnu")
1639 : * End:
1640 : */
|