Line data Source code
1 : /*
2 : * Copyright (c) 2015 Cisco and/or its affiliates.
3 : * Licensed under the Apache License, Version 2.0 (the "License");
4 : * you may not use this file except in compliance with the License.
5 : * You may obtain a copy of the License at:
6 : *
7 : * http://www.apache.org/licenses/LICENSE-2.0
8 : *
9 : * Unless required by applicable law or agreed to in writing, software
10 : * distributed under the License is distributed on an "AS IS" BASIS,
11 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 : * See the License for the specific language governing permissions and
13 : * limitations under the License.
14 : */
15 : /*
16 : * pg_input.c: buffer generator input
17 : *
18 : * Copyright (c) 2008 Eliot Dresselhaus
19 : *
20 : * Permission is hereby granted, free of charge, to any person obtaining
21 : * a copy of this software and associated documentation files (the
22 : * "Software"), to deal in the Software without restriction, including
23 : * without limitation the rights to use, copy, modify, merge, publish,
24 : * distribute, sublicense, and/or sell copies of the Software, and to
25 : * permit persons to whom the Software is furnished to do so, subject to
26 : * the following conditions:
27 : *
28 : * The above copyright notice and this permission notice shall be
29 : * included in all copies or substantial portions of the Software.
30 : *
31 : * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 : * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 : * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 : * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 : * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 : * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 : * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 : */
39 :
40 : /*
41 : * To be honest, the packet generator needs an extreme
42 : * makeover. Two key assumptions which drove the current implementation
43 : * are no longer true. First, buffer managers implement a
44 : * post-TX recycle list. Second, that packet generator performance
45 : * is first-order important.
46 : */
47 :
48 : #include <vlib/vlib.h>
49 : #include <vnet/pg/pg.h>
50 : #include <vnet/vnet.h>
51 : #include <vnet/ethernet/ethernet.h>
52 : #include <vnet/feature/feature.h>
53 : #include <vnet/ip/ip4_packet.h>
54 : #include <vnet/ip/ip6_packet.h>
55 : #include <vnet/udp/udp_packet.h>
56 : #include <vnet/devices/devices.h>
57 : #include <vnet/gso/gro_func.h>
58 :
59 : static int
60 445 : validate_buffer_data2 (vlib_buffer_t * b, pg_stream_t * s,
61 : u32 data_offset, u32 n_bytes)
62 : {
63 : u8 *bd, *pd, *pm;
64 : u32 i;
65 :
66 445 : bd = b->data;
67 445 : pd = s->fixed_packet_data + data_offset;
68 445 : pm = s->fixed_packet_data_mask + data_offset;
69 :
70 445 : if (pd + n_bytes >= vec_end (s->fixed_packet_data))
71 890 : n_bytes = (pd < vec_end (s->fixed_packet_data)
72 445 : ? vec_end (s->fixed_packet_data) - pd : 0);
73 :
74 57405 : for (i = 0; i < n_bytes; i++)
75 56960 : if ((bd[i] & pm[i]) != pd[i])
76 0 : break;
77 :
78 445 : if (i >= n_bytes)
79 445 : return 1;
80 :
81 0 : clib_warning ("buffer %U", format_vnet_buffer_no_chain, b);
82 0 : clib_warning ("differ at index %d", i);
83 0 : clib_warning ("is %U", format_hex_bytes, bd, n_bytes);
84 0 : clib_warning ("mask %U", format_hex_bytes, pm, n_bytes);
85 0 : clib_warning ("expect %U", format_hex_bytes, pd, n_bytes);
86 0 : return 0;
87 : }
88 :
89 : static int
90 445 : validate_buffer_data (vlib_buffer_t * b, pg_stream_t * s)
91 : {
92 445 : return validate_buffer_data2 (b, s, 0, s->buffer_bytes);
93 : }
94 :
95 : always_inline void
96 29 : set_1 (void *a0,
97 : u64 v0, u64 v_min, u64 v_max, u32 n_bits, u32 is_net_byte_order)
98 : {
99 29 : ASSERT (v0 >= v_min && v0 <= v_max);
100 29 : if (n_bits == BITS (u8))
101 : {
102 0 : ((u8 *) a0)[0] = v0;
103 : }
104 29 : else if (n_bits == BITS (u16))
105 : {
106 17 : if (is_net_byte_order)
107 0 : v0 = clib_host_to_net_u16 (v0);
108 17 : clib_mem_unaligned (a0, u16) = v0;
109 : }
110 12 : else if (n_bits == BITS (u32))
111 : {
112 12 : if (is_net_byte_order)
113 12 : v0 = clib_host_to_net_u32 (v0);
114 12 : clib_mem_unaligned (a0, u32) = v0;
115 : }
116 0 : else if (n_bits == BITS (u64))
117 : {
118 0 : if (is_net_byte_order)
119 0 : v0 = clib_host_to_net_u64 (v0);
120 0 : clib_mem_unaligned (a0, u64) = v0;
121 : }
122 29 : }
123 :
124 : always_inline void
125 213 : set_2 (void *a0, void *a1,
126 : u64 v0, u64 v1,
127 : u64 v_min, u64 v_max,
128 : u32 n_bits, u32 is_net_byte_order, u32 is_increment)
129 : {
130 213 : ASSERT (v0 >= v_min && v0 <= v_max);
131 213 : ASSERT (v1 >= v_min && v1 <= (v_max + is_increment));
132 213 : if (n_bits == BITS (u8))
133 : {
134 0 : ((u8 *) a0)[0] = v0;
135 0 : ((u8 *) a1)[0] = v1;
136 : }
137 213 : else if (n_bits == BITS (u16))
138 : {
139 164 : if (is_net_byte_order)
140 : {
141 0 : v0 = clib_host_to_net_u16 (v0);
142 0 : v1 = clib_host_to_net_u16 (v1);
143 : }
144 164 : clib_mem_unaligned (a0, u16) = v0;
145 164 : clib_mem_unaligned (a1, u16) = v1;
146 : }
147 49 : else if (n_bits == BITS (u32))
148 : {
149 49 : if (is_net_byte_order)
150 : {
151 49 : v0 = clib_host_to_net_u32 (v0);
152 49 : v1 = clib_host_to_net_u32 (v1);
153 : }
154 49 : clib_mem_unaligned (a0, u32) = v0;
155 49 : clib_mem_unaligned (a1, u32) = v1;
156 : }
157 0 : else if (n_bits == BITS (u64))
158 : {
159 0 : if (is_net_byte_order)
160 : {
161 0 : v0 = clib_host_to_net_u64 (v0);
162 0 : v1 = clib_host_to_net_u64 (v1);
163 : }
164 0 : clib_mem_unaligned (a0, u64) = v0;
165 0 : clib_mem_unaligned (a1, u64) = v1;
166 : }
167 213 : }
168 :
169 : static_always_inline void
170 8 : do_set_fixed (pg_main_t * pg,
171 : pg_stream_t * s,
172 : u32 * buffers,
173 : u32 n_buffers,
174 : u32 n_bits,
175 : u32 byte_offset, u32 is_net_byte_order, u64 v_min, u64 v_max)
176 : {
177 8 : vlib_main_t *vm = vlib_get_main ();
178 :
179 172 : while (n_buffers >= 4)
180 : {
181 : vlib_buffer_t *b0, *b1, *b2, *b3;
182 : void *a0, *a1;
183 :
184 164 : b0 = vlib_get_buffer (vm, buffers[0]);
185 164 : b1 = vlib_get_buffer (vm, buffers[1]);
186 164 : b2 = vlib_get_buffer (vm, buffers[2]);
187 164 : b3 = vlib_get_buffer (vm, buffers[3]);
188 164 : buffers += 2;
189 164 : n_buffers -= 2;
190 :
191 164 : a0 = (void *) b0 + byte_offset;
192 164 : a1 = (void *) b1 + byte_offset;
193 164 : CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
194 164 : CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
195 :
196 164 : set_2 (a0, a1, v_min, v_min, v_min, v_max, n_bits, is_net_byte_order,
197 : /* is_increment */ 0);
198 :
199 164 : ASSERT (validate_buffer_data (b0, s));
200 164 : ASSERT (validate_buffer_data (b1, s));
201 : }
202 :
203 25 : while (n_buffers > 0)
204 : {
205 : vlib_buffer_t *b0;
206 : void *a0;
207 :
208 17 : b0 = vlib_get_buffer (vm, buffers[0]);
209 17 : buffers += 1;
210 17 : n_buffers -= 1;
211 :
212 17 : a0 = (void *) b0 + byte_offset;
213 :
214 17 : set_1 (a0, v_min, v_min, v_max, n_bits, is_net_byte_order);
215 :
216 17 : ASSERT (validate_buffer_data (b0, s));
217 : }
218 8 : }
219 :
220 : static_always_inline u64
221 1 : do_set_increment (pg_main_t * pg,
222 : pg_stream_t * s,
223 : u32 * buffers,
224 : u32 n_buffers,
225 : u32 n_bits,
226 : u32 byte_offset,
227 : u32 is_net_byte_order,
228 : u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max, u64 v)
229 : {
230 1 : vlib_main_t *vm = vlib_get_main ();
231 1 : u64 sum = 0;
232 :
233 1 : ASSERT (v >= v_min && v <= v_max);
234 :
235 50 : while (n_buffers >= 4)
236 : {
237 : vlib_buffer_t *b0, *b1, *b2, *b3;
238 : void *a0, *a1;
239 : u64 v_old;
240 :
241 49 : b0 = vlib_get_buffer (vm, buffers[0]);
242 49 : b1 = vlib_get_buffer (vm, buffers[1]);
243 49 : b2 = vlib_get_buffer (vm, buffers[2]);
244 49 : b3 = vlib_get_buffer (vm, buffers[3]);
245 49 : buffers += 2;
246 49 : n_buffers -= 2;
247 :
248 49 : a0 = (void *) b0 + byte_offset;
249 49 : a1 = (void *) b1 + byte_offset;
250 49 : CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
251 49 : CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
252 :
253 49 : v_old = v;
254 49 : v = v_old + 2;
255 49 : v = v > v_max ? v_min : v;
256 49 : set_2 (a0, a1,
257 : v_old + 0, v_old + 1, v_min, v_max, n_bits, is_net_byte_order,
258 : /* is_increment */ 1);
259 :
260 49 : if (want_sum)
261 0 : sum += 2 * v_old + 1;
262 :
263 49 : if (PREDICT_FALSE (v_old + 1 > v_max))
264 : {
265 5 : if (want_sum)
266 0 : sum -= 2 * v_old + 1;
267 :
268 5 : v = v_old;
269 5 : set_1 (a0, v + 0, v_min, v_max, n_bits, is_net_byte_order);
270 5 : if (want_sum)
271 0 : sum += v;
272 5 : v += 1;
273 :
274 5 : v = v > v_max ? v_min : v;
275 5 : set_1 (a1, v + 0, v_min, v_max, n_bits, is_net_byte_order);
276 5 : if (want_sum)
277 0 : sum += v;
278 5 : v += 1;
279 : }
280 :
281 49 : ASSERT (validate_buffer_data (b0, s));
282 49 : ASSERT (validate_buffer_data (b1, s));
283 : }
284 :
285 3 : while (n_buffers > 0)
286 : {
287 : vlib_buffer_t *b0;
288 : void *a0;
289 : u64 v_old;
290 :
291 2 : b0 = vlib_get_buffer (vm, buffers[0]);
292 2 : buffers += 1;
293 2 : n_buffers -= 1;
294 :
295 2 : a0 = (void *) b0 + byte_offset;
296 :
297 2 : v_old = v;
298 2 : if (want_sum)
299 0 : sum += v_old;
300 2 : v += 1;
301 2 : v = v > v_max ? v_min : v;
302 :
303 2 : ASSERT (v_old >= v_min && v_old <= v_max);
304 2 : set_1 (a0, v_old, v_min, v_max, n_bits, is_net_byte_order);
305 :
306 2 : ASSERT (validate_buffer_data (b0, s));
307 : }
308 :
309 1 : if (want_sum)
310 0 : *sum_result = sum;
311 :
312 1 : return v;
313 : }
314 :
315 : static_always_inline void
316 0 : do_set_random (pg_main_t * pg,
317 : pg_stream_t * s,
318 : u32 * buffers,
319 : u32 n_buffers,
320 : u32 n_bits,
321 : u32 byte_offset,
322 : u32 is_net_byte_order,
323 : u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max)
324 : {
325 0 : vlib_main_t *vm = vlib_get_main ();
326 0 : u64 v_diff = v_max - v_min + 1;
327 0 : u64 r_mask = max_pow2 (v_diff) - 1;
328 : u64 v0, v1;
329 0 : u64 sum = 0;
330 : void *random_data;
331 :
332 0 : random_data = clib_random_buffer_get_data
333 0 : (&vm->random_buffer, n_buffers * n_bits / BITS (u8));
334 :
335 0 : v0 = v1 = v_min;
336 :
337 0 : while (n_buffers >= 4)
338 : {
339 : vlib_buffer_t *b0, *b1, *b2, *b3;
340 : void *a0, *a1;
341 0 : u64 r0 = 0, r1 = 0; /* warnings be gone */
342 :
343 0 : b0 = vlib_get_buffer (vm, buffers[0]);
344 0 : b1 = vlib_get_buffer (vm, buffers[1]);
345 0 : b2 = vlib_get_buffer (vm, buffers[2]);
346 0 : b3 = vlib_get_buffer (vm, buffers[3]);
347 0 : buffers += 2;
348 0 : n_buffers -= 2;
349 :
350 0 : a0 = (void *) b0 + byte_offset;
351 0 : a1 = (void *) b1 + byte_offset;
352 0 : CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
353 0 : CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
354 :
355 0 : switch (n_bits)
356 : {
357 : #define _(n) \
358 : case BITS (u##n): \
359 : { \
360 : u##n * r = random_data; \
361 : r0 = r[0]; \
362 : r1 = r[1]; \
363 : random_data = r + 2; \
364 : } \
365 : break;
366 :
367 0 : _(8);
368 0 : _(16);
369 0 : _(32);
370 0 : _(64);
371 :
372 : #undef _
373 : }
374 :
375 : /* Add power of 2 sized random number which may be out of range. */
376 0 : v0 += r0 & r_mask;
377 0 : v1 += r1 & r_mask;
378 :
379 : /* Twice should be enough to reduce to v_min .. v_max range. */
380 0 : v0 = v0 > v_max ? v0 - v_diff : v0;
381 0 : v1 = v1 > v_max ? v1 - v_diff : v1;
382 0 : v0 = v0 > v_max ? v0 - v_diff : v0;
383 0 : v1 = v1 > v_max ? v1 - v_diff : v1;
384 :
385 0 : if (want_sum)
386 0 : sum += v0 + v1;
387 :
388 0 : set_2 (a0, a1, v0, v1, v_min, v_max, n_bits, is_net_byte_order,
389 : /* is_increment */ 0);
390 :
391 0 : ASSERT (validate_buffer_data (b0, s));
392 0 : ASSERT (validate_buffer_data (b1, s));
393 : }
394 :
395 0 : while (n_buffers > 0)
396 : {
397 : vlib_buffer_t *b0;
398 : void *a0;
399 0 : u64 r0 = 0; /* warnings be gone */
400 :
401 0 : b0 = vlib_get_buffer (vm, buffers[0]);
402 0 : buffers += 1;
403 0 : n_buffers -= 1;
404 :
405 0 : a0 = (void *) b0 + byte_offset;
406 :
407 0 : switch (n_bits)
408 : {
409 : #define _(n) \
410 : case BITS (u##n): \
411 : { \
412 : u##n * r = random_data; \
413 : r0 = r[0]; \
414 : random_data = r + 1; \
415 : } \
416 : break;
417 :
418 0 : _(8);
419 0 : _(16);
420 0 : _(32);
421 0 : _(64);
422 :
423 : #undef _
424 : }
425 :
426 : /* Add power of 2 sized random number which may be out of range. */
427 0 : v0 += r0 & r_mask;
428 :
429 : /* Twice should be enough to reduce to v_min .. v_max range. */
430 0 : v0 = v0 > v_max ? v0 - v_diff : v0;
431 0 : v0 = v0 > v_max ? v0 - v_diff : v0;
432 :
433 0 : if (want_sum)
434 0 : sum += v0;
435 :
436 0 : set_1 (a0, v0, v_min, v_max, n_bits, is_net_byte_order);
437 :
438 0 : ASSERT (validate_buffer_data (b0, s));
439 : }
440 :
441 0 : if (want_sum)
442 0 : *sum_result = sum;
443 0 : }
444 :
445 : #define _(i,t) \
446 : clib_mem_unaligned (a##i, t) = \
447 : clib_host_to_net_##t ((clib_net_to_host_mem_##t (a##i) &~ mask) \
448 : | (v##i << shift))
449 :
450 : always_inline void
451 0 : setbits_1 (void *a0,
452 : u64 v0,
453 : u64 v_min, u64 v_max,
454 : u32 max_bits, u32 n_bits, u64 mask, u32 shift)
455 : {
456 0 : ASSERT (v0 >= v_min && v0 <= v_max);
457 0 : if (max_bits == BITS (u8))
458 0 : ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
459 :
460 0 : else if (max_bits == BITS (u16))
461 : {
462 0 : _(0, u16);
463 : }
464 0 : else if (max_bits == BITS (u32))
465 : {
466 0 : _(0, u32);
467 : }
468 0 : else if (max_bits == BITS (u64))
469 : {
470 0 : _(0, u64);
471 : }
472 0 : }
473 :
474 : always_inline void
475 0 : setbits_2 (void *a0, void *a1,
476 : u64 v0, u64 v1,
477 : u64 v_min, u64 v_max,
478 : u32 max_bits, u32 n_bits, u64 mask, u32 shift, u32 is_increment)
479 : {
480 0 : ASSERT (v0 >= v_min && v0 <= v_max);
481 0 : ASSERT (v1 >= v_min && v1 <= v_max + is_increment);
482 0 : if (max_bits == BITS (u8))
483 : {
484 0 : ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
485 0 : ((u8 *) a1)[0] = (((u8 *) a1)[0] & ~mask) | (v1 << shift);
486 : }
487 :
488 0 : else if (max_bits == BITS (u16))
489 : {
490 0 : _(0, u16);
491 0 : _(1, u16);
492 : }
493 0 : else if (max_bits == BITS (u32))
494 : {
495 0 : _(0, u32);
496 0 : _(1, u32);
497 : }
498 0 : else if (max_bits == BITS (u64))
499 : {
500 0 : _(0, u64);
501 0 : _(1, u64);
502 : }
503 0 : }
504 :
505 : #undef _
506 :
507 : static_always_inline void
508 0 : do_setbits_fixed (pg_main_t * pg,
509 : pg_stream_t * s,
510 : u32 * buffers,
511 : u32 n_buffers,
512 : u32 max_bits,
513 : u32 n_bits,
514 : u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
515 : {
516 0 : vlib_main_t *vm = vlib_get_main ();
517 :
518 0 : while (n_buffers >= 4)
519 : {
520 : vlib_buffer_t *b0, *b1, *b2, *b3;
521 : void *a0, *a1;
522 :
523 0 : b0 = vlib_get_buffer (vm, buffers[0]);
524 0 : b1 = vlib_get_buffer (vm, buffers[1]);
525 0 : b2 = vlib_get_buffer (vm, buffers[2]);
526 0 : b3 = vlib_get_buffer (vm, buffers[3]);
527 0 : buffers += 2;
528 0 : n_buffers -= 2;
529 :
530 0 : a0 = (void *) b0 + byte_offset;
531 0 : a1 = (void *) b1 + byte_offset;
532 0 : CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
533 0 : CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
534 :
535 0 : setbits_2 (a0, a1,
536 : v_min, v_min, v_min, v_max, max_bits, n_bits, mask, shift,
537 : /* is_increment */ 0);
538 :
539 0 : ASSERT (validate_buffer_data (b0, s));
540 0 : ASSERT (validate_buffer_data (b1, s));
541 : }
542 :
543 0 : while (n_buffers > 0)
544 : {
545 : vlib_buffer_t *b0;
546 : void *a0;
547 :
548 0 : b0 = vlib_get_buffer (vm, buffers[0]);
549 0 : buffers += 1;
550 0 : n_buffers -= 1;
551 :
552 0 : a0 = (void *) b0 + byte_offset;
553 :
554 0 : setbits_1 (a0, v_min, v_min, v_max, max_bits, n_bits, mask, shift);
555 0 : ASSERT (validate_buffer_data (b0, s));
556 : }
557 0 : }
558 :
559 : static_always_inline u64
560 0 : do_setbits_increment (pg_main_t * pg,
561 : pg_stream_t * s,
562 : u32 * buffers,
563 : u32 n_buffers,
564 : u32 max_bits,
565 : u32 n_bits,
566 : u32 byte_offset,
567 : u64 v_min, u64 v_max, u64 v, u64 mask, u32 shift)
568 : {
569 0 : vlib_main_t *vm = vlib_get_main ();
570 :
571 0 : ASSERT (v >= v_min && v <= v_max);
572 :
573 0 : while (n_buffers >= 4)
574 : {
575 : vlib_buffer_t *b0, *b1, *b2, *b3;
576 : void *a0, *a1;
577 : u64 v_old;
578 :
579 0 : b0 = vlib_get_buffer (vm, buffers[0]);
580 0 : b1 = vlib_get_buffer (vm, buffers[1]);
581 0 : b2 = vlib_get_buffer (vm, buffers[2]);
582 0 : b3 = vlib_get_buffer (vm, buffers[3]);
583 0 : buffers += 2;
584 0 : n_buffers -= 2;
585 :
586 0 : a0 = (void *) b0 + byte_offset;
587 0 : a1 = (void *) b1 + byte_offset;
588 0 : CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
589 0 : CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
590 :
591 0 : v_old = v;
592 0 : v = v_old + 2;
593 0 : v = v > v_max ? v_min : v;
594 0 : setbits_2 (a0, a1,
595 : v_old + 0, v_old + 1,
596 : v_min, v_max, max_bits, n_bits, mask, shift,
597 : /* is_increment */ 1);
598 :
599 0 : if (PREDICT_FALSE (v_old + 1 > v_max))
600 : {
601 0 : v = v_old;
602 0 : setbits_1 (a0, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
603 0 : v += 1;
604 :
605 0 : v = v > v_max ? v_min : v;
606 0 : setbits_1 (a1, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
607 0 : v += 1;
608 : }
609 0 : ASSERT (validate_buffer_data (b0, s));
610 0 : ASSERT (validate_buffer_data (b1, s));
611 : }
612 :
613 0 : while (n_buffers > 0)
614 : {
615 : vlib_buffer_t *b0;
616 : void *a0;
617 : u64 v_old;
618 :
619 0 : b0 = vlib_get_buffer (vm, buffers[0]);
620 0 : buffers += 1;
621 0 : n_buffers -= 1;
622 :
623 0 : a0 = (void *) b0 + byte_offset;
624 :
625 0 : v_old = v;
626 0 : v = v_old + 1;
627 0 : v = v > v_max ? v_min : v;
628 :
629 0 : ASSERT (v_old >= v_min && v_old <= v_max);
630 0 : setbits_1 (a0, v_old, v_min, v_max, max_bits, n_bits, mask, shift);
631 :
632 0 : ASSERT (validate_buffer_data (b0, s));
633 : }
634 :
635 0 : return v;
636 : }
637 :
638 : static_always_inline void
639 0 : do_setbits_random (pg_main_t * pg,
640 : pg_stream_t * s,
641 : u32 * buffers,
642 : u32 n_buffers,
643 : u32 max_bits,
644 : u32 n_bits,
645 : u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
646 : {
647 0 : vlib_main_t *vm = vlib_get_main ();
648 0 : u64 v_diff = v_max - v_min + 1;
649 0 : u64 r_mask = max_pow2 (v_diff) - 1;
650 : u64 v0, v1;
651 : void *random_data;
652 :
653 0 : random_data = clib_random_buffer_get_data
654 0 : (&vm->random_buffer, n_buffers * max_bits / BITS (u8));
655 0 : v0 = v1 = v_min;
656 :
657 0 : while (n_buffers >= 4)
658 : {
659 : vlib_buffer_t *b0, *b1, *b2, *b3;
660 : void *a0, *a1;
661 0 : u64 r0 = 0, r1 = 0; /* warnings be gone */
662 :
663 0 : b0 = vlib_get_buffer (vm, buffers[0]);
664 0 : b1 = vlib_get_buffer (vm, buffers[1]);
665 0 : b2 = vlib_get_buffer (vm, buffers[2]);
666 0 : b3 = vlib_get_buffer (vm, buffers[3]);
667 0 : buffers += 2;
668 0 : n_buffers -= 2;
669 :
670 0 : a0 = (void *) b0 + byte_offset;
671 0 : a1 = (void *) b1 + byte_offset;
672 0 : CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
673 0 : CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
674 :
675 0 : switch (max_bits)
676 : {
677 : #define _(n) \
678 : case BITS (u##n): \
679 : { \
680 : u##n * r = random_data; \
681 : r0 = r[0]; \
682 : r1 = r[1]; \
683 : random_data = r + 2; \
684 : } \
685 : break;
686 :
687 0 : _(8);
688 0 : _(16);
689 0 : _(32);
690 0 : _(64);
691 :
692 : #undef _
693 : }
694 :
695 : /* Add power of 2 sized random number which may be out of range. */
696 0 : v0 += r0 & r_mask;
697 0 : v1 += r1 & r_mask;
698 :
699 : /* Twice should be enough to reduce to v_min .. v_max range. */
700 0 : v0 = v0 > v_max ? v0 - v_diff : v0;
701 0 : v1 = v1 > v_max ? v1 - v_diff : v1;
702 0 : v0 = v0 > v_max ? v0 - v_diff : v0;
703 0 : v1 = v1 > v_max ? v1 - v_diff : v1;
704 :
705 0 : setbits_2 (a0, a1, v0, v1, v_min, v_max, max_bits, n_bits, mask, shift,
706 : /* is_increment */ 0);
707 :
708 0 : ASSERT (validate_buffer_data (b0, s));
709 0 : ASSERT (validate_buffer_data (b1, s));
710 : }
711 :
712 0 : while (n_buffers > 0)
713 : {
714 : vlib_buffer_t *b0;
715 : void *a0;
716 0 : u64 r0 = 0; /* warnings be gone */
717 :
718 0 : b0 = vlib_get_buffer (vm, buffers[0]);
719 0 : buffers += 1;
720 0 : n_buffers -= 1;
721 :
722 0 : a0 = (void *) b0 + byte_offset;
723 :
724 0 : switch (max_bits)
725 : {
726 : #define _(n) \
727 : case BITS (u##n): \
728 : { \
729 : u##n * r = random_data; \
730 : r0 = r[0]; \
731 : random_data = r + 1; \
732 : } \
733 : break;
734 :
735 0 : _(8);
736 0 : _(16);
737 0 : _(32);
738 0 : _(64);
739 :
740 : #undef _
741 : }
742 :
743 : /* Add power of 2 sized random number which may be out of range. */
744 0 : v0 += r0 & r_mask;
745 :
746 : /* Twice should be enough to reduce to v_min .. v_max range. */
747 0 : v0 = v0 > v_max ? v0 - v_diff : v0;
748 0 : v0 = v0 > v_max ? v0 - v_diff : v0;
749 :
750 0 : setbits_1 (a0, v0, v_min, v_max, max_bits, n_bits, mask, shift);
751 :
752 0 : ASSERT (validate_buffer_data (b0, s));
753 : }
754 0 : }
755 :
756 : static u64
757 1 : do_it (pg_main_t * pg,
758 : pg_stream_t * s,
759 : u32 * buffers,
760 : u32 n_buffers,
761 : u32 lo_bit, u32 hi_bit,
762 : u64 v_min, u64 v_max, u64 v, pg_edit_type_t edit_type)
763 : {
764 : u32 max_bits, l0, l1, h1, start_bit;
765 :
766 1 : if (v_min == v_max)
767 0 : edit_type = PG_EDIT_FIXED;
768 :
769 1 : l0 = lo_bit / BITS (u8);
770 1 : l1 = lo_bit % BITS (u8);
771 1 : h1 = hi_bit % BITS (u8);
772 :
773 1 : start_bit = l0 * BITS (u8);
774 :
775 1 : max_bits = hi_bit - start_bit;
776 1 : ASSERT (max_bits <= 64);
777 :
778 : #define _(n) \
779 : case (n): \
780 : if (edit_type == PG_EDIT_INCREMENT) \
781 : v = do_set_increment (pg, s, buffers, n_buffers, \
782 : BITS (u##n), \
783 : l0, \
784 : /* is_net_byte_order */ 1, \
785 : /* want sum */ 0, 0, \
786 : v_min, v_max, \
787 : v); \
788 : else if (edit_type == PG_EDIT_RANDOM) \
789 : do_set_random (pg, s, buffers, n_buffers, \
790 : BITS (u##n), \
791 : l0, \
792 : /* is_net_byte_order */ 1, \
793 : /* want sum */ 0, 0, \
794 : v_min, v_max); \
795 : else /* edit_type == PG_EDIT_FIXED */ \
796 : do_set_fixed (pg, s, buffers, n_buffers, \
797 : BITS (u##n), \
798 : l0, \
799 : /* is_net_byte_order */ 1, \
800 : v_min, v_max); \
801 : goto done;
802 :
803 1 : if (l1 == 0 && h1 == 0)
804 : {
805 1 : switch (max_bits)
806 : {
807 0 : _(8);
808 0 : _(16);
809 1 : _(32);
810 0 : _(64);
811 : }
812 : }
813 :
814 : #undef _
815 :
816 : {
817 : u64 mask;
818 0 : u32 shift = l1;
819 0 : u32 n_bits = max_bits;
820 :
821 0 : max_bits = clib_max (max_pow2 (n_bits), 8);
822 :
823 0 : mask = ((u64) 1 << (u64) n_bits) - 1;
824 0 : mask &= ~(((u64) 1 << (u64) shift) - 1);
825 :
826 0 : mask <<= max_bits - n_bits;
827 0 : shift += max_bits - n_bits;
828 :
829 0 : switch (max_bits)
830 : {
831 : #define _(n) \
832 : case (n): \
833 : if (edit_type == PG_EDIT_INCREMENT) \
834 : v = do_setbits_increment (pg, s, buffers, n_buffers, \
835 : BITS (u##n), n_bits, \
836 : l0, v_min, v_max, v, \
837 : mask, shift); \
838 : else if (edit_type == PG_EDIT_RANDOM) \
839 : do_setbits_random (pg, s, buffers, n_buffers, \
840 : BITS (u##n), n_bits, \
841 : l0, v_min, v_max, \
842 : mask, shift); \
843 : else /* edit_type == PG_EDIT_FIXED */ \
844 : do_setbits_fixed (pg, s, buffers, n_buffers, \
845 : BITS (u##n), n_bits, \
846 : l0, v_min, v_max, \
847 : mask, shift); \
848 : goto done;
849 :
850 0 : _(8);
851 0 : _(16);
852 0 : _(32);
853 0 : _(64);
854 :
855 : #undef _
856 : }
857 : }
858 :
859 1 : done:
860 1 : return v;
861 : }
862 :
863 : static void
864 8 : pg_generate_set_lengths (pg_main_t * pg,
865 : pg_stream_t * s, u32 * buffers, u32 n_buffers)
866 : {
867 : u64 v_min, v_max, length_sum;
868 : pg_edit_type_t edit_type;
869 :
870 8 : v_min = s->min_packet_bytes;
871 8 : v_max = s->max_packet_bytes;
872 8 : edit_type = s->packet_size_edit_type;
873 :
874 8 : if (edit_type == PG_EDIT_INCREMENT)
875 : s->last_increment_packet_size
876 0 : = do_set_increment (pg, s, buffers, n_buffers,
877 : 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
878 : STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
879 : /* is_net_byte_order */ 0,
880 : /* want sum */ 1, &length_sum,
881 0 : v_min, v_max, s->last_increment_packet_size);
882 :
883 8 : else if (edit_type == PG_EDIT_RANDOM)
884 0 : do_set_random (pg, s, buffers, n_buffers,
885 : 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
886 : STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
887 : /* is_net_byte_order */ 0,
888 : /* want sum */ 1, &length_sum,
889 : v_min, v_max);
890 :
891 : else /* edit_type == PG_EDIT_FIXED */
892 : {
893 8 : do_set_fixed (pg, s, buffers, n_buffers,
894 : 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
895 : STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
896 : /* is_net_byte_order */ 0,
897 : v_min, v_max);
898 8 : length_sum = v_min * n_buffers;
899 : }
900 :
901 : {
902 8 : vnet_main_t *vnm = vnet_get_main ();
903 8 : vnet_interface_main_t *im = &vnm->interface_main;
904 : vnet_sw_interface_t *si =
905 8 : vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
906 :
907 8 : vlib_increment_combined_counter (im->combined_sw_if_counters
908 : + VNET_INTERFACE_COUNTER_RX,
909 8 : vlib_get_thread_index (),
910 : si->sw_if_index, n_buffers, length_sum);
911 : }
912 :
913 8 : }
914 :
915 : static void
916 0 : pg_generate_fix_multi_buffer_lengths (pg_main_t * pg,
917 : pg_stream_t * s,
918 : u32 * buffers, u32 n_buffers)
919 : {
920 0 : vlib_main_t *vm = vlib_get_main ();
921 : pg_buffer_index_t *pbi;
922 : uword n_bytes_left;
923 : static u32 *unused_buffers = 0;
924 :
925 0 : while (n_buffers > 0)
926 : {
927 : vlib_buffer_t *b;
928 : u32 bi;
929 :
930 0 : bi = buffers[0];
931 0 : b = vlib_get_buffer (vm, bi);
932 :
933 : /* Current length here is length of whole packet. */
934 0 : n_bytes_left = b->current_length;
935 :
936 0 : pbi = s->buffer_indices;
937 : while (1)
938 0 : {
939 0 : uword n = clib_min (n_bytes_left, s->buffer_bytes);
940 :
941 0 : b->current_length = n;
942 0 : n_bytes_left -= n;
943 0 : if (n_bytes_left > 0)
944 0 : b->flags |= VLIB_BUFFER_NEXT_PRESENT;
945 : else
946 0 : b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
947 :
948 : /* Return unused buffers to fifos. */
949 0 : if (n == 0)
950 0 : vec_add1 (unused_buffers, bi);
951 :
952 0 : pbi++;
953 0 : if (pbi >= vec_end (s->buffer_indices))
954 0 : break;
955 :
956 0 : bi = b->next_buffer;
957 0 : b = vlib_get_buffer (vm, bi);
958 : }
959 0 : ASSERT (n_bytes_left == 0);
960 :
961 0 : buffers += 1;
962 0 : n_buffers -= 1;
963 : }
964 :
965 0 : if (vec_len (unused_buffers) > 0)
966 : {
967 0 : vlib_buffer_free_no_next (vm, unused_buffers, vec_len (unused_buffers));
968 0 : vec_set_len (unused_buffers, 0);
969 : }
970 0 : }
971 :
972 : static void
973 8 : pg_generate_edit (pg_main_t * pg,
974 : pg_stream_t * s, u32 * buffers, u32 n_buffers)
975 : {
976 : pg_edit_t *e;
977 :
978 20 : vec_foreach (e, s->non_fixed_edits)
979 : {
980 12 : switch (e->type)
981 : {
982 1 : case PG_EDIT_RANDOM:
983 : case PG_EDIT_INCREMENT:
984 : {
985 : u32 lo_bit, hi_bit;
986 : u64 v_min, v_max;
987 :
988 1 : v_min = pg_edit_get_value (e, PG_EDIT_LO);
989 1 : v_max = pg_edit_get_value (e, PG_EDIT_HI);
990 :
991 1 : hi_bit = (BITS (u8) * STRUCT_OFFSET_OF (vlib_buffer_t, data)
992 1 : + BITS (u8) + e->lsb_bit_offset);
993 1 : lo_bit = hi_bit - e->n_bits;
994 :
995 : e->last_increment_value
996 1 : = do_it (pg, s, buffers, n_buffers, lo_bit, hi_bit, v_min, v_max,
997 : e->last_increment_value, e->type);
998 : }
999 1 : break;
1000 :
1001 11 : case PG_EDIT_UNSPECIFIED:
1002 11 : break;
1003 :
1004 0 : default:
1005 : /* Should not be any fixed edits left. */
1006 0 : ASSERT (0);
1007 0 : break;
1008 : }
1009 : }
1010 :
1011 : /* Call any edit functions to e.g. completely IP lengths, checksums, ... */
1012 : {
1013 : int i;
1014 39 : for (i = vec_len (s->edit_groups) - 1; i >= 0; i--)
1015 : {
1016 31 : pg_edit_group_t *g = s->edit_groups + i;
1017 31 : if (g->edit_function)
1018 8 : g->edit_function (pg, s, g, buffers, n_buffers);
1019 : }
1020 : }
1021 8 : }
1022 :
1023 : static void
1024 0 : pg_set_next_buffer_pointers (pg_main_t * pg,
1025 : pg_stream_t * s,
1026 : u32 * buffers, u32 * next_buffers, u32 n_buffers)
1027 : {
1028 0 : vlib_main_t *vm = vlib_get_main ();
1029 :
1030 0 : while (n_buffers >= 4)
1031 : {
1032 : u32 ni0, ni1;
1033 : vlib_buffer_t *b0, *b1;
1034 :
1035 0 : b0 = vlib_get_buffer (vm, buffers[0]);
1036 0 : b1 = vlib_get_buffer (vm, buffers[1]);
1037 0 : ni0 = next_buffers[0];
1038 0 : ni1 = next_buffers[1];
1039 :
1040 0 : vlib_prefetch_buffer_with_index (vm, buffers[2], WRITE);
1041 0 : vlib_prefetch_buffer_with_index (vm, buffers[3], WRITE);
1042 :
1043 0 : b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1044 0 : b1->flags |= VLIB_BUFFER_NEXT_PRESENT;
1045 0 : b0->next_buffer = ni0;
1046 0 : b1->next_buffer = ni1;
1047 :
1048 0 : buffers += 2;
1049 0 : next_buffers += 2;
1050 0 : n_buffers -= 2;
1051 : }
1052 :
1053 0 : while (n_buffers > 0)
1054 : {
1055 : u32 ni0;
1056 : vlib_buffer_t *b0;
1057 :
1058 0 : b0 = vlib_get_buffer (vm, buffers[0]);
1059 0 : ni0 = next_buffers[0];
1060 0 : buffers += 1;
1061 0 : next_buffers += 1;
1062 0 : n_buffers -= 1;
1063 :
1064 0 : b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1065 0 : b0->next_buffer = ni0;
1066 : }
1067 0 : }
1068 :
1069 : static_always_inline void
1070 8 : init_buffers_inline (vlib_main_t * vm,
1071 : pg_stream_t * s,
1072 : u32 * buffers,
1073 : u32 n_buffers, u32 data_offset, u32 n_data, u32 set_data)
1074 : {
1075 : u32 n_left, *b;
1076 : u8 *data, *mask;
1077 :
1078 8 : ASSERT (s->replay_packet_templates == 0);
1079 :
1080 8 : data = s->fixed_packet_data + data_offset;
1081 8 : mask = s->fixed_packet_data_mask + data_offset;
1082 8 : if (data + n_data >= vec_end (s->fixed_packet_data))
1083 16 : n_data = (data < vec_end (s->fixed_packet_data)
1084 8 : ? vec_end (s->fixed_packet_data) - data : 0);
1085 8 : if (n_data > 0)
1086 : {
1087 8 : ASSERT (data + n_data <= vec_end (s->fixed_packet_data));
1088 8 : ASSERT (mask + n_data <= vec_end (s->fixed_packet_data_mask));
1089 : }
1090 :
1091 8 : n_left = n_buffers;
1092 8 : b = buffers;
1093 :
1094 172 : while (n_left >= 4)
1095 : {
1096 : u32 bi0, bi1;
1097 : vlib_buffer_t *b0, *b1;
1098 :
1099 : /* Prefetch next iteration. */
1100 164 : vlib_prefetch_buffer_with_index (vm, b[2], STORE);
1101 164 : vlib_prefetch_buffer_with_index (vm, b[3], STORE);
1102 :
1103 164 : bi0 = b[0];
1104 164 : bi1 = b[1];
1105 164 : b += 2;
1106 164 : n_left -= 2;
1107 :
1108 164 : b0 = vlib_get_buffer (vm, bi0);
1109 164 : b1 = vlib_get_buffer (vm, bi1);
1110 164 : b0->flags |= s->buffer_flags;
1111 164 : b1->flags |= s->buffer_flags;
1112 :
1113 164 : vnet_buffer (b0)->sw_if_index[VLIB_RX] =
1114 164 : vnet_buffer (b1)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1115 :
1116 164 : vnet_buffer (b0)->sw_if_index[VLIB_TX] =
1117 164 : vnet_buffer (b1)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
1118 :
1119 164 : if (set_data)
1120 : {
1121 164 : clib_memcpy_fast (b0->data, data, n_data);
1122 164 : clib_memcpy_fast (b1->data, data, n_data);
1123 : }
1124 : else
1125 : {
1126 0 : ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1127 0 : ASSERT (validate_buffer_data2 (b1, s, data_offset, n_data));
1128 : }
1129 : }
1130 :
1131 25 : while (n_left >= 1)
1132 : {
1133 : u32 bi0;
1134 : vlib_buffer_t *b0;
1135 :
1136 17 : bi0 = b[0];
1137 17 : b += 1;
1138 17 : n_left -= 1;
1139 :
1140 17 : b0 = vlib_get_buffer (vm, bi0);
1141 17 : b0->flags |= s->buffer_flags;
1142 17 : vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1143 17 : vnet_buffer (b0)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
1144 :
1145 17 : if (set_data)
1146 17 : clib_memcpy_fast (b0->data, data, n_data);
1147 : else
1148 0 : ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1149 : }
1150 8 : }
1151 :
1152 : static u32
1153 8 : pg_stream_fill_helper (pg_main_t * pg,
1154 : pg_stream_t * s,
1155 : pg_buffer_index_t * bi,
1156 : u32 * buffers, u32 * next_buffers, u32 n_alloc)
1157 : {
1158 8 : vlib_main_t *vm = vlib_get_main ();
1159 8 : uword is_start_of_packet = bi == s->buffer_indices;
1160 : u32 n_allocated;
1161 :
1162 8 : ASSERT (vec_len (s->replay_packet_templates) == 0);
1163 :
1164 8 : n_allocated = vlib_buffer_alloc (vm, buffers, n_alloc);
1165 8 : if (n_allocated == 0)
1166 0 : return 0;
1167 :
1168 : /*
1169 : * We can't assume we got all the buffers we asked for...
1170 : * This never worked until recently.
1171 : */
1172 8 : n_alloc = n_allocated;
1173 :
1174 : /* Reinitialize buffers */
1175 8 : init_buffers_inline
1176 : (vm, s,
1177 : buffers,
1178 8 : n_alloc, (bi - s->buffer_indices) * s->buffer_bytes /* data offset */ ,
1179 : s->buffer_bytes,
1180 : /* set_data */ 1);
1181 :
1182 8 : if (next_buffers)
1183 0 : pg_set_next_buffer_pointers (pg, s, buffers, next_buffers, n_alloc);
1184 :
1185 8 : if (is_start_of_packet)
1186 : {
1187 8 : pg_generate_set_lengths (pg, s, buffers, n_alloc);
1188 8 : if (vec_len (s->buffer_indices) > 1)
1189 0 : pg_generate_fix_multi_buffer_lengths (pg, s, buffers, n_alloc);
1190 :
1191 8 : pg_generate_edit (pg, s, buffers, n_alloc);
1192 : }
1193 :
1194 8 : return n_alloc;
1195 : }
1196 :
1197 : static u32
1198 15564 : pg_stream_fill_replay (pg_main_t * pg, pg_stream_t * s, u32 n_alloc)
1199 : {
1200 : pg_buffer_index_t *bi;
1201 : u32 n_left, i, l;
1202 15564 : u32 buffer_alloc_request = 0;
1203 : u32 buffer_alloc_result;
1204 : u32 current_buffer_index;
1205 : u32 *buffers;
1206 15564 : vlib_main_t *vm = vlib_get_main ();
1207 15564 : vnet_main_t *vnm = vnet_get_main ();
1208 15569 : u32 buf_sz = vlib_buffer_get_default_data_size (vm);
1209 15567 : vnet_interface_main_t *im = &vnm->interface_main;
1210 : vnet_sw_interface_t *si;
1211 :
1212 15567 : buffers = pg->replay_buffers_by_thread[vm->thread_index];
1213 15567 : vec_reset_length (buffers);
1214 15562 : bi = s->buffer_indices;
1215 :
1216 15562 : n_left = n_alloc;
1217 15562 : i = s->current_replay_packet_index;
1218 15562 : l = vec_len (s->replay_packet_templates);
1219 :
1220 : /* Figure out how many buffers we need */
1221 539575 : while (n_left > 0)
1222 : {
1223 : u8 *d0;
1224 :
1225 523999 : d0 = vec_elt (s->replay_packet_templates, i);
1226 523991 : buffer_alloc_request += (vec_len (d0) + (buf_sz - 1)) / buf_sz;
1227 :
1228 524011 : i = ((i + 1) == l) ? 0 : i + 1;
1229 524011 : n_left--;
1230 : }
1231 :
1232 15576 : ASSERT (buffer_alloc_request > 0);
1233 15576 : vec_validate (buffers, buffer_alloc_request - 1);
1234 :
1235 : /* Allocate that many buffers */
1236 15575 : buffer_alloc_result = vlib_buffer_alloc (vm, buffers, buffer_alloc_request);
1237 15582 : if (buffer_alloc_result < buffer_alloc_request)
1238 : {
1239 0 : clib_warning ("alloc failure, got %d not %d", buffer_alloc_result,
1240 : buffer_alloc_request);
1241 0 : vlib_buffer_free_no_next (vm, buffers, buffer_alloc_result);
1242 0 : pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1243 0 : return 0;
1244 : }
1245 :
1246 : /* Now go generate the buffers, and add them to the FIFO */
1247 15582 : n_left = n_alloc;
1248 :
1249 15582 : current_buffer_index = 0;
1250 15582 : i = s->current_replay_packet_index;
1251 15582 : l = vec_len (s->replay_packet_templates);
1252 541475 : while (n_left > 0)
1253 : {
1254 : u8 *d0;
1255 : int not_last;
1256 : u32 data_offset;
1257 : u32 bytes_to_copy, bytes_this_chunk;
1258 : vlib_buffer_t *b;
1259 :
1260 525918 : d0 = vec_elt (s->replay_packet_templates, i);
1261 525898 : data_offset = 0;
1262 525898 : bytes_to_copy = vec_len (d0);
1263 :
1264 : /* Add head chunk to pg fifo */
1265 525827 : clib_fifo_add1 (bi->buffer_fifo, buffers[current_buffer_index]);
1266 :
1267 : /* Copy the data */
1268 1193100 : while (bytes_to_copy)
1269 : {
1270 667210 : bytes_this_chunk = clib_min (bytes_to_copy, buf_sz);
1271 667210 : ASSERT (current_buffer_index < vec_len (buffers));
1272 667135 : b = vlib_get_buffer (vm, buffers[current_buffer_index]);
1273 667555 : clib_memcpy_fast (b->data, d0 + data_offset, bytes_this_chunk);
1274 667219 : vnet_buffer (b)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1275 667219 : vnet_buffer (b)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
1276 667219 : b->flags = s->buffer_flags;
1277 667219 : b->next_buffer = 0;
1278 667219 : b->current_data = 0;
1279 667219 : b->current_length = bytes_this_chunk;
1280 :
1281 667219 : not_last = bytes_this_chunk < bytes_to_copy;
1282 667219 : if (not_last)
1283 : {
1284 141331 : ASSERT (current_buffer_index < (vec_len (buffers) - 1));
1285 141331 : b->flags |= VLIB_BUFFER_NEXT_PRESENT;
1286 141331 : b->next_buffer = buffers[current_buffer_index + 1];
1287 : }
1288 667219 : bytes_to_copy -= bytes_this_chunk;
1289 667219 : data_offset += bytes_this_chunk;
1290 667219 : current_buffer_index++;
1291 : }
1292 :
1293 525893 : i = ((i + 1) == l) ? 0 : i + 1;
1294 525893 : n_left--;
1295 : }
1296 :
1297 : /* Update the interface counters */
1298 15557 : si = vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
1299 15581 : l = 0;
1300 543289 : for (i = 0; i < n_alloc; i++)
1301 527715 : l += vlib_buffer_index_length_in_chain (vm, buffers[i]);
1302 15582 : vlib_increment_combined_counter (im->combined_sw_if_counters
1303 : + VNET_INTERFACE_COUNTER_RX,
1304 15574 : vlib_get_thread_index (),
1305 : si->sw_if_index, n_alloc, l);
1306 :
1307 15582 : s->current_replay_packet_index += n_alloc;
1308 15582 : s->current_replay_packet_index %= vec_len (s->replay_packet_templates);
1309 :
1310 15582 : pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1311 15582 : return n_alloc;
1312 : }
1313 :
1314 :
1315 : static u32
1316 15573 : pg_stream_fill (pg_main_t * pg, pg_stream_t * s, u32 n_buffers)
1317 : {
1318 : pg_buffer_index_t *bi;
1319 : word i, n_in_fifo, n_alloc, n_free, n_added;
1320 : u32 *tail, *start, *end, *last_tail, *last_start;
1321 :
1322 15573 : bi = s->buffer_indices;
1323 :
1324 15573 : n_in_fifo = clib_fifo_elts (bi->buffer_fifo);
1325 15571 : if (n_in_fifo >= n_buffers)
1326 0 : return n_in_fifo;
1327 :
1328 15571 : n_alloc = n_buffers - n_in_fifo;
1329 :
1330 : /* Round up, but never generate more than limit. */
1331 15571 : n_alloc = clib_max (VLIB_FRAME_SIZE, n_alloc);
1332 :
1333 15571 : if (s->n_packets_limit > 0
1334 15571 : && s->n_packets_generated + n_in_fifo + n_alloc >= s->n_packets_limit)
1335 : {
1336 15044 : n_alloc = s->n_packets_limit - s->n_packets_generated - n_in_fifo;
1337 15044 : if (n_alloc < 0)
1338 0 : n_alloc = 0;
1339 : }
1340 :
1341 : /*
1342 : * Handle pcap replay directly
1343 : */
1344 15571 : if (s->replay_packet_templates)
1345 15563 : return pg_stream_fill_replay (pg, s, n_alloc);
1346 :
1347 : /* All buffer fifos should have the same size. */
1348 : if (CLIB_DEBUG > 0)
1349 : {
1350 8 : uword l = ~0, e;
1351 16 : vec_foreach (bi, s->buffer_indices)
1352 : {
1353 8 : e = clib_fifo_elts (bi->buffer_fifo);
1354 8 : if (bi == s->buffer_indices)
1355 8 : l = e;
1356 8 : ASSERT (l == e);
1357 : }
1358 : }
1359 :
1360 8 : last_tail = last_start = 0;
1361 8 : n_added = n_alloc;
1362 :
1363 16 : for (i = vec_len (s->buffer_indices) - 1; i >= 0; i--)
1364 : {
1365 8 : bi = vec_elt_at_index (s->buffer_indices, i);
1366 :
1367 8 : n_free = clib_fifo_free_elts (bi->buffer_fifo);
1368 8 : if (n_free < n_alloc)
1369 6 : clib_fifo_resize (bi->buffer_fifo, n_alloc - n_free);
1370 :
1371 8 : tail = clib_fifo_advance_tail (bi->buffer_fifo, n_alloc);
1372 8 : start = bi->buffer_fifo;
1373 8 : end = clib_fifo_end (bi->buffer_fifo);
1374 :
1375 8 : if (tail + n_alloc <= end)
1376 : {
1377 8 : n_added =
1378 8 : pg_stream_fill_helper (pg, s, bi, tail, last_tail, n_alloc);
1379 : }
1380 : else
1381 : {
1382 0 : u32 n = clib_min (end - tail, n_alloc);
1383 0 : n_added = pg_stream_fill_helper (pg, s, bi, tail, last_tail, n);
1384 :
1385 0 : if (n_added == n && n_alloc > n_added)
1386 : {
1387 0 : n_added += pg_stream_fill_helper
1388 : (pg, s, bi, start, last_start, n_alloc - n_added);
1389 : }
1390 : }
1391 :
1392 8 : if (PREDICT_FALSE (n_added < n_alloc))
1393 0 : tail = clib_fifo_advance_tail (bi->buffer_fifo, n_added - n_alloc);
1394 :
1395 8 : last_tail = tail;
1396 8 : last_start = start;
1397 :
1398 : /* Verify that pkts in the fifo are properly allocated */
1399 : }
1400 :
1401 8 : return n_in_fifo + n_added;
1402 : }
1403 :
1404 : typedef struct
1405 : {
1406 : u32 stream_index;
1407 :
1408 : u32 packet_length;
1409 : u32 sw_if_index;
1410 :
1411 : /* Use pre data for packet data. */
1412 : vlib_buffer_t buffer;
1413 : } pg_input_trace_t;
1414 :
1415 : static u8 *
1416 338897 : format_pg_input_trace (u8 * s, va_list * va)
1417 : {
1418 338897 : vlib_main_t *vm = va_arg (*va, vlib_main_t *);
1419 338897 : CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
1420 338897 : pg_input_trace_t *t = va_arg (*va, pg_input_trace_t *);
1421 338897 : pg_main_t *pg = &pg_main;
1422 : pg_stream_t *stream;
1423 : vlib_node_t *n;
1424 338897 : u32 indent = format_get_indent (s);
1425 :
1426 338897 : stream = 0;
1427 338897 : if (!pool_is_free_index (pg->streams, t->stream_index))
1428 172 : stream = pool_elt_at_index (pg->streams, t->stream_index);
1429 :
1430 338897 : if (stream)
1431 172 : s = format (s, "stream %v", pg->streams[t->stream_index].name);
1432 : else
1433 338725 : s = format (s, "stream %d", t->stream_index);
1434 :
1435 338897 : s = format (s, ", %d bytes", t->packet_length);
1436 338897 : s = format (s, ", sw_if_index %d", t->sw_if_index);
1437 :
1438 338897 : s = format (s, "\n%U%U", format_white_space, indent,
1439 : format_vnet_buffer_no_chain, &t->buffer);
1440 :
1441 338897 : s = format (s, "\n%U", format_white_space, indent);
1442 :
1443 338897 : n = 0;
1444 338897 : if (stream)
1445 172 : n = vlib_get_node (vm, stream->node_index);
1446 :
1447 338897 : if (n && n->format_buffer)
1448 172 : s = format (s, "%U", n->format_buffer,
1449 172 : t->buffer.pre_data, sizeof (t->buffer.pre_data));
1450 : else
1451 338725 : s = format (s, "%U",
1452 338725 : format_hex_bytes, t->buffer.pre_data,
1453 : ARRAY_LEN (t->buffer.pre_data));
1454 338897 : return s;
1455 : }
1456 :
1457 : static int
1458 15462 : pg_input_trace (pg_main_t * pg,
1459 : vlib_node_runtime_t * node, u32 stream_index, u32 next_index,
1460 : u32 * buffers, const u32 n_buffers, const u32 n_trace)
1461 : {
1462 15462 : vlib_main_t *vm = vlib_get_main ();
1463 : u32 *b, n_left;
1464 15462 : u32 n_trace0 = 0, n_trace1 = 0;
1465 :
1466 15462 : n_left = clib_min (n_buffers, n_trace);
1467 15462 : b = buffers;
1468 :
1469 260202 : while (n_left >= 2)
1470 : {
1471 : u32 bi0, bi1;
1472 : vlib_buffer_t *b0, *b1;
1473 : pg_input_trace_t *t0, *t1;
1474 :
1475 244741 : bi0 = b[0];
1476 244741 : bi1 = b[1];
1477 244741 : b += 2;
1478 244741 : n_left -= 2;
1479 :
1480 244741 : b0 = vlib_get_buffer (vm, bi0);
1481 244760 : b1 = vlib_get_buffer (vm, bi1);
1482 :
1483 244602 : n_trace0 +=
1484 244734 : vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1485 244563 : n_trace1 +=
1486 244602 : vlib_trace_buffer (vm, node, next_index, b1, /* follow_chain */ 1);
1487 :
1488 244563 : t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1489 244901 : t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
1490 :
1491 244855 : t0->stream_index = stream_index;
1492 244855 : t1->stream_index = stream_index;
1493 :
1494 244855 : t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1495 244841 : t1->packet_length = vlib_buffer_length_in_chain (vm, b1);
1496 :
1497 244803 : t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1498 244803 : t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
1499 :
1500 244803 : clib_memcpy_fast (&t0->buffer, b0,
1501 : sizeof (b0[0]) - sizeof (b0->pre_data));
1502 244716 : clib_memcpy_fast (&t1->buffer, b1,
1503 : sizeof (b1[0]) - sizeof (b1->pre_data));
1504 :
1505 244732 : clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1506 : sizeof (t0->buffer.pre_data));
1507 244731 : clib_memcpy_fast (t1->buffer.pre_data, b1->data,
1508 : sizeof (t1->buffer.pre_data));
1509 : }
1510 :
1511 28266 : while (n_left >= 1)
1512 : {
1513 : u32 bi0;
1514 : vlib_buffer_t *b0;
1515 : pg_input_trace_t *t0;
1516 :
1517 12804 : bi0 = b[0];
1518 12804 : b += 1;
1519 12804 : n_left -= 1;
1520 :
1521 12804 : b0 = vlib_get_buffer (vm, bi0);
1522 :
1523 12804 : n_trace0 +=
1524 12804 : vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1525 12804 : t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1526 :
1527 12804 : t0->stream_index = stream_index;
1528 12804 : t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1529 12804 : t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1530 12804 : clib_memcpy_fast (&t0->buffer, b0,
1531 : sizeof (b0[0]) - sizeof (b0->pre_data));
1532 12804 : clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1533 : sizeof (t0->buffer.pre_data));
1534 : }
1535 :
1536 15462 : return n_trace - n_trace0 - n_trace1;
1537 : }
1538 :
1539 : static_always_inline void
1540 56 : fill_buffer_offload_flags (vlib_main_t *vm, u32 *buffers, u32 n_buffers,
1541 : u32 buffer_oflags, int gso_enabled, u32 gso_size)
1542 : {
1543 793 : for (int i = 0; i < n_buffers; i++)
1544 : {
1545 737 : vlib_buffer_t *b0 = vlib_get_buffer (vm, buffers[i]);
1546 737 : u8 l4_proto = 0;
1547 737 : vnet_buffer_oflags_t oflags = 0;
1548 :
1549 : ethernet_header_t *eh =
1550 737 : (ethernet_header_t *) vlib_buffer_get_current (b0);
1551 737 : u16 ethertype = clib_net_to_host_u16 (eh->type);
1552 737 : u16 l2hdr_sz = sizeof (ethernet_header_t);
1553 :
1554 737 : if (ethernet_frame_is_tagged (ethertype))
1555 : {
1556 0 : ethernet_vlan_header_t *vlan = (ethernet_vlan_header_t *) (eh + 1);
1557 :
1558 0 : ethertype = clib_net_to_host_u16 (vlan->type);
1559 0 : l2hdr_sz += sizeof (*vlan);
1560 0 : if (ethertype == ETHERNET_TYPE_VLAN)
1561 : {
1562 0 : vlan++;
1563 0 : ethertype = clib_net_to_host_u16 (vlan->type);
1564 0 : l2hdr_sz += sizeof (*vlan);
1565 : }
1566 : }
1567 :
1568 737 : vnet_buffer (b0)->l2_hdr_offset = 0;
1569 737 : vnet_buffer (b0)->l3_hdr_offset = l2hdr_sz;
1570 :
1571 737 : if (PREDICT_TRUE (ethertype == ETHERNET_TYPE_IP4))
1572 : {
1573 472 : ip4_header_t *ip4 =
1574 472 : (ip4_header_t *) (vlib_buffer_get_current (b0) + l2hdr_sz);
1575 472 : vnet_buffer (b0)->l4_hdr_offset = l2hdr_sz + ip4_header_bytes (ip4);
1576 472 : l4_proto = ip4->protocol;
1577 472 : b0->flags |=
1578 : (VNET_BUFFER_F_IS_IP4 | VNET_BUFFER_F_L2_HDR_OFFSET_VALID |
1579 : VNET_BUFFER_F_L3_HDR_OFFSET_VALID |
1580 : VNET_BUFFER_F_L4_HDR_OFFSET_VALID);
1581 472 : if (buffer_oflags & VNET_BUFFER_OFFLOAD_F_IP_CKSUM)
1582 130 : oflags |= VNET_BUFFER_OFFLOAD_F_IP_CKSUM;
1583 : }
1584 265 : else if (PREDICT_TRUE (ethertype == ETHERNET_TYPE_IP6))
1585 : {
1586 251 : ip6_header_t *ip6 =
1587 251 : (ip6_header_t *) (vlib_buffer_get_current (b0) + l2hdr_sz);
1588 251 : vnet_buffer (b0)->l4_hdr_offset = l2hdr_sz + sizeof (ip6_header_t);
1589 : /* FIXME IPv6 EH traversal */
1590 251 : l4_proto = ip6->protocol;
1591 251 : b0->flags |=
1592 : (VNET_BUFFER_F_IS_IP6 | VNET_BUFFER_F_L2_HDR_OFFSET_VALID |
1593 : VNET_BUFFER_F_L3_HDR_OFFSET_VALID |
1594 : VNET_BUFFER_F_L4_HDR_OFFSET_VALID);
1595 : }
1596 :
1597 737 : if (l4_proto == IP_PROTOCOL_TCP)
1598 : {
1599 579 : if (buffer_oflags & VNET_BUFFER_OFFLOAD_F_TCP_CKSUM)
1600 0 : oflags |= VNET_BUFFER_OFFLOAD_F_TCP_CKSUM;
1601 :
1602 : /* only set GSO flag for chained buffers */
1603 579 : if (gso_enabled && (b0->flags & VLIB_BUFFER_NEXT_PRESENT))
1604 : {
1605 279 : b0->flags |= VNET_BUFFER_F_GSO;
1606 279 : tcp_header_t *tcp =
1607 279 : (tcp_header_t *) (vlib_buffer_get_current (b0) +
1608 279 : vnet_buffer (b0)->l4_hdr_offset);
1609 279 : vnet_buffer2 (b0)->gso_l4_hdr_sz = tcp_header_bytes (tcp);
1610 279 : vnet_buffer2 (b0)->gso_size = gso_size;
1611 : }
1612 : }
1613 158 : else if (l4_proto == IP_PROTOCOL_UDP)
1614 : {
1615 130 : if (buffer_oflags & VNET_BUFFER_OFFLOAD_F_UDP_CKSUM)
1616 130 : oflags |= VNET_BUFFER_OFFLOAD_F_UDP_CKSUM;
1617 : }
1618 :
1619 737 : if (oflags)
1620 130 : vnet_buffer_offload_flags_set (b0, oflags);
1621 : }
1622 56 : }
1623 :
1624 : static uword
1625 15576 : pg_generate_packets (vlib_node_runtime_t * node,
1626 : pg_main_t * pg,
1627 : pg_stream_t * s, uword n_packets_to_generate)
1628 : {
1629 15576 : vlib_main_t *vm = vlib_get_main ();
1630 : u32 *to_next, n_this_frame, n_left, n_trace, n_packets_in_fifo;
1631 : uword n_packets_generated;
1632 : pg_buffer_index_t *bi, *bi0;
1633 15575 : u32 next_index = s->next_index;
1634 15575 : vnet_feature_main_t *fm = &feature_main;
1635 : vnet_feature_config_main_t *cm;
1636 15575 : u8 feature_arc_index = fm->device_input_feature_arc_index;
1637 15575 : cm = &fm->feature_config_mains[feature_arc_index];
1638 15575 : u32 current_config_index = ~(u32) 0;
1639 : pg_interface_t *pi;
1640 : int i;
1641 :
1642 15575 : pi = pool_elt_at_index (pg->interfaces,
1643 : pg->if_id_by_sw_if_index[s->sw_if_index[VLIB_RX]]);
1644 15573 : bi0 = s->buffer_indices;
1645 :
1646 15573 : n_packets_in_fifo = pg_stream_fill (pg, s, n_packets_to_generate);
1647 15590 : n_packets_to_generate = clib_min (n_packets_in_fifo, n_packets_to_generate);
1648 15590 : n_packets_generated = 0;
1649 :
1650 15590 : if (PREDICT_FALSE
1651 : (vnet_have_features (feature_arc_index, s->sw_if_index[VLIB_RX])))
1652 : {
1653 42 : current_config_index =
1654 42 : vec_elt (cm->config_index_by_sw_if_index, s->sw_if_index[VLIB_RX]);
1655 42 : vnet_get_config_data (&cm->config_main, ¤t_config_index,
1656 : &next_index, 0);
1657 : }
1658 :
1659 15589 : if (PREDICT_FALSE (pi->coalesce_enabled))
1660 : {
1661 5 : vnet_hw_if_tx_queue_t txq = { 0 };
1662 5 : vnet_gro_flow_table_schedule_node_on_dispatcher (vm, &txq,
1663 : pi->flow_table);
1664 : }
1665 :
1666 31179 : while (n_packets_to_generate > 0)
1667 : {
1668 : u32 *head, *start, *end;
1669 :
1670 15589 : if (PREDICT_TRUE (next_index == VNET_DEVICE_INPUT_NEXT_ETHERNET_INPUT))
1671 : {
1672 : vlib_next_frame_t *nf;
1673 : vlib_frame_t *f;
1674 : ethernet_input_frame_t *ef;
1675 15537 : vlib_get_new_next_frame (vm, node, next_index, to_next, n_left);
1676 15538 : nf = vlib_node_runtime_get_next_frame (vm, node, next_index);
1677 15538 : f = vlib_get_frame (vm, nf->frame);
1678 15538 : f->flags = ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX;
1679 :
1680 15538 : ef = vlib_frame_scalar_args (f);
1681 15538 : ef->sw_if_index = pi->sw_if_index;
1682 15538 : ef->hw_if_index = pi->hw_if_index;
1683 15538 : vlib_frame_no_append (f);
1684 : }
1685 : else
1686 52 : vlib_get_next_frame (vm, node, next_index, to_next, n_left);
1687 :
1688 15590 : n_this_frame = n_packets_to_generate;
1689 15590 : if (n_this_frame > n_left)
1690 0 : n_this_frame = n_left;
1691 :
1692 15590 : start = bi0->buffer_fifo;
1693 15590 : end = clib_fifo_end (bi0->buffer_fifo);
1694 15589 : head = clib_fifo_head (bi0->buffer_fifo);
1695 :
1696 15589 : if (head + n_this_frame <= end)
1697 15589 : vlib_buffer_copy_indices (to_next, head, n_this_frame);
1698 : else
1699 : {
1700 0 : u32 n = end - head;
1701 0 : vlib_buffer_copy_indices (to_next + 0, head, n);
1702 0 : vlib_buffer_copy_indices (to_next + n, start, n_this_frame - n);
1703 : }
1704 :
1705 15590 : if (s->replay_packet_templates == 0)
1706 : {
1707 16 : vec_foreach (bi, s->buffer_indices)
1708 8 : clib_fifo_advance_head (bi->buffer_fifo, n_this_frame);
1709 : }
1710 : else
1711 : {
1712 15582 : clib_fifo_advance_head (bi0->buffer_fifo, n_this_frame);
1713 : }
1714 :
1715 15590 : if (current_config_index != ~(u32) 0)
1716 2255 : for (i = 0; i < n_this_frame; i++)
1717 : {
1718 : vlib_buffer_t *b;
1719 2213 : b = vlib_get_buffer (vm, to_next[i]);
1720 2213 : b->current_config_index = current_config_index;
1721 2213 : vnet_buffer (b)->feature_arc_index = feature_arc_index;
1722 : }
1723 :
1724 15590 : if (pi->gso_enabled || (s->buffer_flags & VNET_BUFFER_F_OFFLOAD))
1725 : {
1726 56 : fill_buffer_offload_flags (vm, to_next, n_this_frame,
1727 56 : s->buffer_oflags, pi->gso_enabled,
1728 : pi->gso_size);
1729 : }
1730 :
1731 15590 : n_trace = vlib_get_trace_count (vm, node);
1732 15590 : if (PREDICT_FALSE (n_trace > 0))
1733 : {
1734 15462 : n_trace =
1735 15462 : pg_input_trace (pg, node, s - pg->streams, next_index, to_next,
1736 : n_this_frame, n_trace);
1737 15462 : vlib_set_trace_count (vm, node, n_trace);
1738 : }
1739 15590 : n_packets_to_generate -= n_this_frame;
1740 15590 : n_packets_generated += n_this_frame;
1741 15590 : n_left -= n_this_frame;
1742 : if (CLIB_DEBUG > 0)
1743 : {
1744 : int i;
1745 : vlib_buffer_t *b;
1746 :
1747 544452 : for (i = 0; i < n_this_frame; i++)
1748 : {
1749 528862 : b = vlib_get_buffer (vm, to_next[i]);
1750 528862 : ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0 ||
1751 : b->current_length >= VLIB_BUFFER_MIN_CHAIN_SEG_SIZE);
1752 : }
1753 : }
1754 15590 : vlib_put_next_frame (vm, node, next_index, n_left);
1755 : }
1756 :
1757 15590 : return n_packets_generated;
1758 : }
1759 :
1760 : static uword
1761 30618 : pg_input_stream (vlib_node_runtime_t * node, pg_main_t * pg, pg_stream_t * s)
1762 : {
1763 30618 : vlib_main_t *vm = vlib_get_main ();
1764 : uword n_packets;
1765 : f64 time_now, dt;
1766 :
1767 30620 : if (s->n_packets_limit > 0 && s->n_packets_generated >= s->n_packets_limit)
1768 : {
1769 15050 : pg_stream_enable_disable (pg, s, /* want_enabled */ 0);
1770 15051 : return 0;
1771 : }
1772 :
1773 : /* Apply rate limit. */
1774 15570 : time_now = vlib_time_now (vm);
1775 15574 : if (s->time_last_generate == 0)
1776 15043 : s->time_last_generate = time_now;
1777 :
1778 15574 : dt = time_now - s->time_last_generate;
1779 15574 : s->time_last_generate = time_now;
1780 :
1781 15574 : n_packets = VLIB_FRAME_SIZE;
1782 15574 : if (s->rate_packets_per_second > 0)
1783 : {
1784 0 : s->packet_accumulator += dt * s->rate_packets_per_second;
1785 0 : n_packets = s->packet_accumulator;
1786 :
1787 : /* Never allow accumulator to grow if we get behind. */
1788 0 : s->packet_accumulator -= n_packets;
1789 : }
1790 :
1791 : /* Apply fixed limit. */
1792 15574 : if (s->n_packets_limit > 0
1793 15574 : && s->n_packets_generated + n_packets > s->n_packets_limit)
1794 15047 : n_packets = s->n_packets_limit - s->n_packets_generated;
1795 :
1796 : /* Generate up to one frame's worth of packets. */
1797 15574 : if (n_packets > s->n_max_frame)
1798 0 : n_packets = s->n_max_frame;
1799 :
1800 15574 : if (n_packets > 0)
1801 15574 : n_packets = pg_generate_packets (node, pg, s, n_packets);
1802 :
1803 15590 : s->n_packets_generated += n_packets;
1804 :
1805 15590 : return n_packets;
1806 : }
1807 :
1808 : uword
1809 29622 : pg_input (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame)
1810 : {
1811 : uword i;
1812 29622 : pg_main_t *pg = &pg_main;
1813 29622 : uword n_packets = 0;
1814 29622 : u32 worker_index = 0;
1815 :
1816 29622 : if (vlib_num_workers ())
1817 1563 : worker_index = vlib_get_current_worker_index ();
1818 :
1819 : /* *INDENT-OFF* */
1820 60260 : clib_bitmap_foreach (i, pg->enabled_streams[worker_index]) {
1821 30619 : pg_stream_t *s = vec_elt_at_index (pg->streams, i);
1822 30617 : n_packets += pg_input_stream (node, pg, s);
1823 : }
1824 : /* *INDENT-ON* */
1825 :
1826 29639 : return n_packets;
1827 : }
1828 :
1829 : /* *INDENT-OFF* */
1830 178120 : VLIB_REGISTER_NODE (pg_input_node) = {
1831 : .function = pg_input,
1832 : .flags = VLIB_NODE_FLAG_TRACE_SUPPORTED,
1833 : .name = "pg-input",
1834 : .sibling_of = "device-input",
1835 : .type = VLIB_NODE_TYPE_INPUT,
1836 :
1837 : .format_trace = format_pg_input_trace,
1838 :
1839 : /* Input node will be left disabled until a stream is active. */
1840 : .state = VLIB_NODE_STATE_DISABLED,
1841 : };
1842 : /* *INDENT-ON* */
1843 :
1844 572 : VLIB_NODE_FN (pg_input_mac_filter) (vlib_main_t * vm,
1845 : vlib_node_runtime_t * node,
1846 : vlib_frame_t * frame)
1847 : {
1848 13 : vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
1849 : u16 nexts[VLIB_FRAME_SIZE], *next;
1850 13 : pg_main_t *pg = &pg_main;
1851 : u32 n_left, *from;
1852 :
1853 13 : from = vlib_frame_vector_args (frame);
1854 13 : n_left = frame->n_vectors;
1855 13 : next = nexts;
1856 :
1857 13 : clib_memset_u16 (next, 0, VLIB_FRAME_SIZE);
1858 :
1859 13 : vlib_get_buffers (vm, from, bufs, n_left);
1860 :
1861 1196 : while (n_left)
1862 : {
1863 : const ethernet_header_t *eth;
1864 : pg_interface_t *pi;
1865 : mac_address_t in;
1866 :
1867 1183 : pi = pool_elt_at_index
1868 : (pg->interfaces,
1869 : pg->if_id_by_sw_if_index[vnet_buffer (b[0])->sw_if_index[VLIB_RX]]);
1870 1183 : eth = vlib_buffer_get_current (b[0]);
1871 :
1872 1183 : mac_address_from_bytes (&in, eth->dst_address);
1873 :
1874 1183 : if (PREDICT_FALSE (ethernet_address_cast (in.bytes)))
1875 : {
1876 : mac_address_t *allowed;
1877 :
1878 1183 : if (0 != vec_len (pi->allowed_mcast_macs))
1879 : {
1880 1183 : vec_foreach (allowed, pi->allowed_mcast_macs)
1881 : {
1882 1183 : if (0 != mac_address_cmp (allowed, &in))
1883 1183 : break;
1884 : }
1885 :
1886 1183 : if (vec_is_member (allowed, pi->allowed_mcast_macs))
1887 1183 : vnet_feature_next_u16 (&next[0], b[0]);
1888 : }
1889 : }
1890 :
1891 1183 : b += 1;
1892 1183 : next += 1;
1893 1183 : n_left -= 1;
1894 : }
1895 :
1896 13 : vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
1897 :
1898 13 : return (frame->n_vectors);
1899 : }
1900 :
1901 : /* *INDENT-OFF* */
1902 178120 : VLIB_REGISTER_NODE (pg_input_mac_filter) = {
1903 : .name = "pg-input-mac-filter",
1904 : .vector_size = sizeof (u32),
1905 : .format_trace = format_pg_input_trace,
1906 : .n_next_nodes = 1,
1907 : .next_nodes = {
1908 : [0] = "error-drop",
1909 : },
1910 : };
1911 70583 : VNET_FEATURE_INIT (pg_input_mac_filter_feat, static) = {
1912 : .arc_name = "device-input",
1913 : .node_name = "pg-input-mac-filter",
1914 : };
1915 : /* *INDENT-ON* */
1916 :
1917 : static clib_error_t *
1918 28 : pg_input_mac_filter_cfg (vlib_main_t * vm,
1919 : unformat_input_t * input, vlib_cli_command_t * cmd)
1920 : {
1921 28 : unformat_input_t _line_input, *line_input = &_line_input;
1922 28 : u32 sw_if_index = ~0;
1923 28 : int is_enable = 1;
1924 :
1925 28 : if (!unformat_user (input, unformat_line_input, line_input))
1926 0 : return 0;
1927 :
1928 84 : while (unformat_check_input (line_input) != UNFORMAT_END_OF_INPUT)
1929 : {
1930 56 : if (unformat (line_input, "%U",
1931 : unformat_vnet_sw_interface,
1932 : vnet_get_main (), &sw_if_index))
1933 : ;
1934 28 : else if (unformat (line_input, "%U",
1935 : unformat_vlib_enable_disable, &is_enable))
1936 : ;
1937 : else
1938 0 : return clib_error_create ("unknown input `%U'",
1939 : format_unformat_error, line_input);
1940 : }
1941 28 : unformat_free (line_input);
1942 :
1943 28 : if (~0 == sw_if_index)
1944 0 : return clib_error_create ("specify interface");
1945 :
1946 28 : vnet_feature_enable_disable ("device-input",
1947 : "pg-input-mac-filter",
1948 : sw_if_index, is_enable, 0, 0);
1949 :
1950 28 : return NULL;
1951 : }
1952 :
1953 : /* *INDENT-OFF* */
1954 272887 : VLIB_CLI_COMMAND (enable_streams_cli, static) = {
1955 : .path = "packet-generator mac-filter",
1956 : .short_help = "packet-generator mac-filter <INTERFACE> <on|off>",
1957 : .function = pg_input_mac_filter_cfg,
1958 : };
1959 : /* *INDENT-ON* */
1960 :
1961 :
1962 : /*
1963 : * fd.io coding-style-patch-verification: ON
1964 : *
1965 : * Local Variables:
1966 : * eval: (c-set-style "gnu")
1967 : * End:
1968 : */
|