• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2    american fuzzy lop++ - fuzze_one routines in different flavours
3    ---------------------------------------------------------------
4 
5    Originally written by Michal Zalewski
6 
7    Now maintained by Marc Heuse <mh@mh-sec.de>,
8                         Heiko Eißfeldt <heiko.eissfeldt@hexco.de> and
9                         Andrea Fioraldi <andreafioraldi@gmail.com>
10 
11    Copyright 2016, 2017 Google Inc. All rights reserved.
12    Copyright 2019-2022 AFLplusplus Project. All rights reserved.
13 
14    Licensed under the Apache License, Version 2.0 (the "License");
15    you may not use this file except in compliance with the License.
16    You may obtain a copy of the License at:
17 
18      https://www.apache.org/licenses/LICENSE-2.0
19 
20    This is the real deal: the program takes an instrumented binary and
21    attempts a variety of basic fuzzing tricks, paying close attention to
22    how they affect the execution path.
23 
24  */
25 
26 #include "afl-fuzz.h"
27 #include <string.h>
28 #include <limits.h>
29 #include "cmplog.h"
30 
31 /* MOpt */
32 
select_algorithm(afl_state_t * afl,u32 max_algorithm)33 static int select_algorithm(afl_state_t *afl, u32 max_algorithm) {
34 
35   int i_puppet, j_puppet = 0, operator_number = max_algorithm;
36 
37   double range_sele =
38       (double)afl->probability_now[afl->swarm_now][operator_number - 1];
39   double sele = ((double)(rand_below(afl, 10000) * 0.0001 * range_sele));
40 
41   for (i_puppet = 0; i_puppet < operator_num; ++i_puppet) {
42 
43     if (unlikely(i_puppet == 0)) {
44 
45       if (sele < afl->probability_now[afl->swarm_now][i_puppet]) { break; }
46 
47     } else {
48 
49       if (sele < afl->probability_now[afl->swarm_now][i_puppet]) {
50 
51         j_puppet = 1;
52         break;
53 
54       }
55 
56     }
57 
58   }
59 
60   if ((j_puppet == 1 &&
61        sele < afl->probability_now[afl->swarm_now][i_puppet - 1]) ||
62       (i_puppet + 1 < operator_num &&
63        sele > afl->probability_now[afl->swarm_now][i_puppet + 1])) {
64 
65     FATAL("error select_algorithm");
66 
67   }
68 
69   return i_puppet;
70 
71 }
72 
73 /* Helper to choose random block len for block operations in fuzz_one().
74    Doesn't return zero, provided that max_len is > 0. */
75 
choose_block_len(afl_state_t * afl,u32 limit)76 static inline u32 choose_block_len(afl_state_t *afl, u32 limit) {
77 
78   u32 min_value, max_value;
79   u32 rlim = MIN(afl->queue_cycle, (u32)3);
80 
81   if (unlikely(!afl->run_over10m)) { rlim = 1; }
82 
83   switch (rand_below(afl, rlim)) {
84 
85     case 0:
86       min_value = 1;
87       max_value = HAVOC_BLK_SMALL;
88       break;
89 
90     case 1:
91       min_value = HAVOC_BLK_SMALL;
92       max_value = HAVOC_BLK_MEDIUM;
93       break;
94 
95     default:
96 
97       if (likely(rand_below(afl, 10))) {
98 
99         min_value = HAVOC_BLK_MEDIUM;
100         max_value = HAVOC_BLK_LARGE;
101 
102       } else {
103 
104         min_value = HAVOC_BLK_LARGE;
105         max_value = HAVOC_BLK_XL;
106 
107       }
108 
109   }
110 
111   if (min_value >= limit) { min_value = 1; }
112 
113   return min_value + rand_below(afl, MIN(max_value, limit) - min_value + 1);
114 
115 }
116 
117 /* Helper function to see if a particular change (xor_val = old ^ new) could
118    be a product of deterministic bit flips with the lengths and stepovers
119    attempted by afl-fuzz. This is used to avoid dupes in some of the
120    deterministic fuzzing operations that follow bit flips. We also
121    return 1 if xor_val is zero, which implies that the old and attempted new
122    values are identical and the exec would be a waste of time. */
123 
could_be_bitflip(u32 xor_val)124 static u8 could_be_bitflip(u32 xor_val) {
125 
126   u32 sh = 0;
127 
128   if (!xor_val) { return 1; }
129 
130   /* Shift left until first bit set. */
131 
132   while (!(xor_val & 1)) {
133 
134     ++sh;
135     xor_val >>= 1;
136 
137   }
138 
139   /* 1-, 2-, and 4-bit patterns are OK anywhere. */
140 
141   if (xor_val == 1 || xor_val == 3 || xor_val == 15) { return 1; }
142 
143   /* 8-, 16-, and 32-bit patterns are OK only if shift factor is
144      divisible by 8, since that's the stepover for these ops. */
145 
146   if (sh & 7) { return 0; }
147 
148   if (xor_val == 0xff || xor_val == 0xffff || xor_val == 0xffffffff) {
149 
150     return 1;
151 
152   }
153 
154   return 0;
155 
156 }
157 
158 /* Helper function to see if a particular value is reachable through
159    arithmetic operations. Used for similar purposes. */
160 
could_be_arith(u32 old_val,u32 new_val,u8 blen)161 static u8 could_be_arith(u32 old_val, u32 new_val, u8 blen) {
162 
163   u32 i, ov = 0, nv = 0, diffs = 0;
164 
165   if (old_val == new_val) { return 1; }
166 
167   /* See if one-byte adjustments to any byte could produce this result. */
168 
169   for (i = 0; (u8)i < blen; ++i) {
170 
171     u8 a = old_val >> (8 * i), b = new_val >> (8 * i);
172 
173     if (a != b) {
174 
175       ++diffs;
176       ov = a;
177       nv = b;
178 
179     }
180 
181   }
182 
183   /* If only one byte differs and the values are within range, return 1. */
184 
185   if (diffs == 1) {
186 
187     if ((u8)(ov - nv) <= ARITH_MAX || (u8)(nv - ov) <= ARITH_MAX) { return 1; }
188 
189   }
190 
191   if (blen == 1) { return 0; }
192 
193   /* See if two-byte adjustments to any byte would produce this result. */
194 
195   diffs = 0;
196 
197   for (i = 0; (u8)i < blen / 2; ++i) {
198 
199     u16 a = old_val >> (16 * i), b = new_val >> (16 * i);
200 
201     if (a != b) {
202 
203       ++diffs;
204       ov = a;
205       nv = b;
206 
207     }
208 
209   }
210 
211   /* If only one word differs and the values are within range, return 1. */
212 
213   if (diffs == 1) {
214 
215     if ((u16)(ov - nv) <= ARITH_MAX || (u16)(nv - ov) <= ARITH_MAX) {
216 
217       return 1;
218 
219     }
220 
221     ov = SWAP16(ov);
222     nv = SWAP16(nv);
223 
224     if ((u16)(ov - nv) <= ARITH_MAX || (u16)(nv - ov) <= ARITH_MAX) {
225 
226       return 1;
227 
228     }
229 
230   }
231 
232   /* Finally, let's do the same thing for dwords. */
233 
234   if (blen == 4) {
235 
236     if ((u32)(old_val - new_val) <= ARITH_MAX ||
237         (u32)(new_val - old_val) <= ARITH_MAX) {
238 
239       return 1;
240 
241     }
242 
243     new_val = SWAP32(new_val);
244     old_val = SWAP32(old_val);
245 
246     if ((u32)(old_val - new_val) <= ARITH_MAX ||
247         (u32)(new_val - old_val) <= ARITH_MAX) {
248 
249       return 1;
250 
251     }
252 
253   }
254 
255   return 0;
256 
257 }
258 
259 /* Last but not least, a similar helper to see if insertion of an
260    interesting integer is redundant given the insertions done for
261    shorter blen. The last param (check_le) is set if the caller
262    already executed LE insertion for current blen and wants to see
263    if BE variant passed in new_val is unique. */
264 
could_be_interest(u32 old_val,u32 new_val,u8 blen,u8 check_le)265 static u8 could_be_interest(u32 old_val, u32 new_val, u8 blen, u8 check_le) {
266 
267   u32 i, j;
268 
269   if (old_val == new_val) { return 1; }
270 
271   /* See if one-byte insertions from interesting_8 over old_val could
272      produce new_val. */
273 
274   for (i = 0; i < blen; ++i) {
275 
276     for (j = 0; j < sizeof(interesting_8); ++j) {
277 
278       u32 tval =
279           (old_val & ~(0xff << (i * 8))) | (((u8)interesting_8[j]) << (i * 8));
280 
281       if (new_val == tval) { return 1; }
282 
283     }
284 
285   }
286 
287   /* Bail out unless we're also asked to examine two-byte LE insertions
288      as a preparation for BE attempts. */
289 
290   if (blen == 2 && !check_le) { return 0; }
291 
292   /* See if two-byte insertions over old_val could give us new_val. */
293 
294   for (i = 0; (u8)i < blen - 1; ++i) {
295 
296     for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
297 
298       u32 tval = (old_val & ~(0xffff << (i * 8))) |
299                  (((u16)interesting_16[j]) << (i * 8));
300 
301       if (new_val == tval) { return 1; }
302 
303       /* Continue here only if blen > 2. */
304 
305       if (blen > 2) {
306 
307         tval = (old_val & ~(0xffff << (i * 8))) |
308                (SWAP16(interesting_16[j]) << (i * 8));
309 
310         if (new_val == tval) { return 1; }
311 
312       }
313 
314     }
315 
316   }
317 
318   if (blen == 4 && check_le) {
319 
320     /* See if four-byte insertions could produce the same result
321        (LE only). */
322 
323     for (j = 0; j < sizeof(interesting_32) / 4; ++j) {
324 
325       if (new_val == (u32)interesting_32[j]) { return 1; }
326 
327     }
328 
329   }
330 
331   return 0;
332 
333 }
334 
335 #ifndef IGNORE_FINDS
336 
337 /* Helper function to compare buffers; returns first and last differing offset.
338    We use this to find reasonable locations for splicing two files. */
339 
locate_diffs(u8 * ptr1,u8 * ptr2,u32 len,s32 * first,s32 * last)340 static void locate_diffs(u8 *ptr1, u8 *ptr2, u32 len, s32 *first, s32 *last) {
341 
342   s32 f_loc = -1;
343   s32 l_loc = -1;
344   u32 pos;
345 
346   for (pos = 0; pos < len; ++pos) {
347 
348     if (*(ptr1++) != *(ptr2++)) {
349 
350       if (f_loc == -1) { f_loc = pos; }
351       l_loc = pos;
352 
353     }
354 
355   }
356 
357   *first = f_loc;
358   *last = l_loc;
359 
360   return;
361 
362 }
363 
364 #endif                                                     /* !IGNORE_FINDS */
365 
366 /* Take the current entry from the queue, fuzz it for a while. This
367    function is a tad too long... returns 0 if fuzzed successfully, 1 if
368    skipped or bailed out. */
369 
fuzz_one_original(afl_state_t * afl)370 u8 fuzz_one_original(afl_state_t *afl) {
371 
372   u32 len, temp_len;
373   u32 j;
374   u32 i;
375   u8 *in_buf, *out_buf, *orig_in, *ex_tmp, *eff_map = 0;
376   u64 havoc_queued = 0, orig_hit_cnt, new_hit_cnt = 0, prev_cksum, _prev_cksum;
377   u32 splice_cycle = 0, perf_score = 100, orig_perf, eff_cnt = 1;
378 
379   u8 ret_val = 1, doing_det = 0;
380 
381   u8  a_collect[MAX_AUTO_EXTRA];
382   u32 a_len = 0;
383 
384 #ifdef IGNORE_FINDS
385 
386   /* In IGNORE_FINDS mode, skip any entries that weren't in the
387      initial data set. */
388 
389   if (afl->queue_cur->depth > 1) return 1;
390 
391 #else
392 
393   if (unlikely(afl->custom_mutators_count)) {
394 
395     /* The custom mutator will decide to skip this test case or not. */
396 
397     LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
398 
399       if (el->afl_custom_queue_get &&
400           !el->afl_custom_queue_get(el->data, afl->queue_cur->fname)) {
401 
402         return 1;
403 
404       }
405 
406     });
407 
408   }
409 
410   if (likely(afl->pending_favored)) {
411 
412     /* If we have any favored, non-fuzzed new arrivals in the queue,
413        possibly skip to them at the expense of already-fuzzed or non-favored
414        cases. */
415 
416     if ((afl->queue_cur->fuzz_level || !afl->queue_cur->favored) &&
417         likely(rand_below(afl, 100) < SKIP_TO_NEW_PROB)) {
418 
419       return 1;
420 
421     }
422 
423   } else if (!afl->non_instrumented_mode && !afl->queue_cur->favored &&
424 
425              afl->queued_items > 10) {
426 
427     /* Otherwise, still possibly skip non-favored cases, albeit less often.
428        The odds of skipping stuff are higher for already-fuzzed inputs and
429        lower for never-fuzzed entries. */
430 
431     if (afl->queue_cycle > 1 && !afl->queue_cur->fuzz_level) {
432 
433       if (likely(rand_below(afl, 100) < SKIP_NFAV_NEW_PROB)) { return 1; }
434 
435     } else {
436 
437       if (likely(rand_below(afl, 100) < SKIP_NFAV_OLD_PROB)) { return 1; }
438 
439     }
440 
441   }
442 
443 #endif                                                     /* ^IGNORE_FINDS */
444 
445   if (unlikely(afl->not_on_tty)) {
446 
447     ACTF(
448         "Fuzzing test case #%u (%u total, %llu crashes saved, "
449         "perf_score=%0.0f, exec_us=%llu, hits=%u, map=%u, ascii=%u)...",
450         afl->current_entry, afl->queued_items, afl->saved_crashes,
451         afl->queue_cur->perf_score, afl->queue_cur->exec_us,
452         likely(afl->n_fuzz) ? afl->n_fuzz[afl->queue_cur->n_fuzz_entry] : 0,
453         afl->queue_cur->bitmap_size, afl->queue_cur->is_ascii);
454     fflush(stdout);
455 
456   }
457 
458   orig_in = in_buf = queue_testcase_get(afl, afl->queue_cur);
459   len = afl->queue_cur->len;
460 
461   out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
462   if (unlikely(!out_buf)) { PFATAL("alloc"); }
463 
464   afl->subseq_tmouts = 0;
465 
466   afl->cur_depth = afl->queue_cur->depth;
467 
468   /*******************************************
469    * CALIBRATION (only if failed earlier on) *
470    *******************************************/
471 
472   if (unlikely(afl->queue_cur->cal_failed)) {
473 
474     u8 res = FSRV_RUN_TMOUT;
475 
476     if (afl->queue_cur->cal_failed < CAL_CHANCES) {
477 
478       afl->queue_cur->exec_cksum = 0;
479 
480       res =
481           calibrate_case(afl, afl->queue_cur, in_buf, afl->queue_cycle - 1, 0);
482 
483       if (unlikely(res == FSRV_RUN_ERROR)) {
484 
485         FATAL("Unable to execute target application");
486 
487       }
488 
489     }
490 
491     if (unlikely(afl->stop_soon) || res != afl->crash_mode) {
492 
493       ++afl->cur_skipped_items;
494       goto abandon_entry;
495 
496     }
497 
498   }
499 
500   /************
501    * TRIMMING *
502    ************/
503 
504   if (unlikely(!afl->non_instrumented_mode && !afl->queue_cur->trim_done &&
505                !afl->disable_trim)) {
506 
507     u32 old_len = afl->queue_cur->len;
508 
509     u8 res = trim_case(afl, afl->queue_cur, in_buf);
510     orig_in = in_buf = queue_testcase_get(afl, afl->queue_cur);
511 
512     if (unlikely(res == FSRV_RUN_ERROR)) {
513 
514       FATAL("Unable to execute target application");
515 
516     }
517 
518     if (unlikely(afl->stop_soon)) {
519 
520       ++afl->cur_skipped_items;
521       goto abandon_entry;
522 
523     }
524 
525     /* Don't retry trimming, even if it failed. */
526 
527     afl->queue_cur->trim_done = 1;
528 
529     len = afl->queue_cur->len;
530 
531     /* maybe current entry is not ready for splicing anymore */
532     if (unlikely(len <= 4 && old_len > 4)) --afl->ready_for_splicing_count;
533 
534   }
535 
536   memcpy(out_buf, in_buf, len);
537 
538   /*********************
539    * PERFORMANCE SCORE *
540    *********************/
541 
542   if (likely(!afl->old_seed_selection))
543     orig_perf = perf_score = afl->queue_cur->perf_score;
544   else
545     afl->queue_cur->perf_score = orig_perf = perf_score =
546         calculate_score(afl, afl->queue_cur);
547 
548   if (unlikely(perf_score <= 0 && afl->active_items > 1)) {
549 
550     goto abandon_entry;
551 
552   }
553 
554   if (unlikely(afl->shm.cmplog_mode &&
555                afl->queue_cur->colorized < afl->cmplog_lvl &&
556                (u32)len <= afl->cmplog_max_filesize)) {
557 
558     if (unlikely(len < 4)) {
559 
560       afl->queue_cur->colorized = CMPLOG_LVL_MAX;
561 
562     } else {
563 
564       if (afl->cmplog_lvl == 3 ||
565           (afl->cmplog_lvl == 2 && afl->queue_cur->tc_ref) ||
566           afl->queue_cur->favored ||
567           !(afl->fsrv.total_execs % afl->queued_items) ||
568           get_cur_time() - afl->last_find_time > 300000) {  // 300 seconds
569 
570         if (input_to_state_stage(afl, in_buf, out_buf, len)) {
571 
572           goto abandon_entry;
573 
574         }
575 
576       }
577 
578     }
579 
580   }
581 
582   /* Skip right away if -d is given, if it has not been chosen sufficiently
583      often to warrant the expensive deterministic stage (fuzz_level), or
584      if it has gone through deterministic testing in earlier, resumed runs
585      (passed_det). */
586 
587   if (likely(afl->queue_cur->passed_det) || likely(afl->skip_deterministic) ||
588       likely(perf_score <
589              (afl->queue_cur->depth * 30 <= afl->havoc_max_mult * 100
590                   ? afl->queue_cur->depth * 30
591                   : afl->havoc_max_mult * 100))) {
592 
593     goto custom_mutator_stage;
594 
595   }
596 
597   /* Skip deterministic fuzzing if exec path checksum puts this out of scope
598      for this main instance. */
599 
600   if (unlikely(afl->main_node_max &&
601                (afl->queue_cur->exec_cksum % afl->main_node_max) !=
602                    afl->main_node_id - 1)) {
603 
604     goto custom_mutator_stage;
605 
606   }
607 
608   doing_det = 1;
609 
610   /*********************************************
611    * SIMPLE BITFLIP (+dictionary construction) *
612    *********************************************/
613 
614 #define FLIP_BIT(_ar, _b)                   \
615   do {                                      \
616                                             \
617     u8 *_arf = (u8 *)(_ar);                 \
618     u32 _bf = (_b);                         \
619     _arf[(_bf) >> 3] ^= (128 >> ((_bf)&7)); \
620                                             \
621   } while (0)
622 
623   /* Single walking bit. */
624 
625   afl->stage_short = "flip1";
626   afl->stage_max = len << 3;
627   afl->stage_name = "bitflip 1/1";
628 
629   afl->stage_val_type = STAGE_VAL_NONE;
630 
631   orig_hit_cnt = afl->queued_items + afl->saved_crashes;
632 
633   /* Get a clean cksum. */
634 
635   if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
636 
637   prev_cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
638   _prev_cksum = prev_cksum;
639 
640   /* Now flip bits. */
641 
642   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
643 
644     afl->stage_cur_byte = afl->stage_cur >> 3;
645 
646     FLIP_BIT(out_buf, afl->stage_cur);
647 
648 #ifdef INTROSPECTION
649     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT1-%u",
650              afl->queue_cur->fname, afl->stage_cur);
651 #endif
652 
653     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
654 
655     FLIP_BIT(out_buf, afl->stage_cur);
656 
657     /* While flipping the least significant bit in every byte, pull of an extra
658        trick to detect possible syntax tokens. In essence, the idea is that if
659        you have a binary blob like this:
660 
661        xxxxxxxxIHDRxxxxxxxx
662 
663        ...and changing the leading and trailing bytes causes variable or no
664        changes in program flow, but touching any character in the "IHDR" string
665        always produces the same, distinctive path, it's highly likely that
666        "IHDR" is an atomically-checked magic value of special significance to
667        the fuzzed format.
668 
669        We do this here, rather than as a separate stage, because it's a nice
670        way to keep the operation approximately "free" (i.e., no extra execs).
671 
672        Empirically, performing the check when flipping the least significant bit
673        is advantageous, compared to doing it at the time of more disruptive
674        changes, where the program flow may be affected in more violent ways.
675 
676        The caveat is that we won't generate dictionaries in the -d mode or -S
677        mode - but that's probably a fair trade-off.
678 
679        This won't work particularly well with paths that exhibit variable
680        behavior, but fails gracefully, so we'll carry out the checks anyway.
681 
682       */
683 
684     if (!afl->non_instrumented_mode && (afl->stage_cur & 7) == 7) {
685 
686       u64 cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
687 
688       if (afl->stage_cur == afl->stage_max - 1 && cksum == prev_cksum) {
689 
690         /* If at end of file and we are still collecting a string, grab the
691            final character and force output. */
692 
693         if (a_len < MAX_AUTO_EXTRA) {
694 
695           a_collect[a_len] = out_buf[afl->stage_cur >> 3];
696 
697         }
698 
699         ++a_len;
700 
701         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) {
702 
703           maybe_add_auto(afl, a_collect, a_len);
704 
705         }
706 
707       } else if (cksum != prev_cksum) {
708 
709         /* Otherwise, if the checksum has changed, see if we have something
710            worthwhile queued up, and collect that if the answer is yes. */
711 
712         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) {
713 
714           maybe_add_auto(afl, a_collect, a_len);
715 
716         }
717 
718         a_len = 0;
719         prev_cksum = cksum;
720 
721       }
722 
723       /* Continue collecting string, but only if the bit flip actually made
724          any difference - we don't want no-op tokens. */
725 
726       if (cksum != _prev_cksum) {
727 
728         if (a_len < MAX_AUTO_EXTRA) {
729 
730           a_collect[a_len] = out_buf[afl->stage_cur >> 3];
731 
732         }
733 
734         ++a_len;
735 
736       }
737 
738     }
739 
740   }
741 
742   new_hit_cnt = afl->queued_items + afl->saved_crashes;
743 
744   afl->stage_finds[STAGE_FLIP1] += new_hit_cnt - orig_hit_cnt;
745   afl->stage_cycles[STAGE_FLIP1] += afl->stage_max;
746 
747   /* Two walking bits. */
748 
749   afl->stage_name = "bitflip 2/1";
750   afl->stage_short = "flip2";
751   afl->stage_max = (len << 3) - 1;
752 
753   orig_hit_cnt = new_hit_cnt;
754 
755   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
756 
757     afl->stage_cur_byte = afl->stage_cur >> 3;
758 
759     FLIP_BIT(out_buf, afl->stage_cur);
760     FLIP_BIT(out_buf, afl->stage_cur + 1);
761 
762 #ifdef INTROSPECTION
763     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT2-%u",
764              afl->queue_cur->fname, afl->stage_cur);
765 #endif
766 
767     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
768 
769     FLIP_BIT(out_buf, afl->stage_cur);
770     FLIP_BIT(out_buf, afl->stage_cur + 1);
771 
772   }
773 
774   new_hit_cnt = afl->queued_items + afl->saved_crashes;
775 
776   afl->stage_finds[STAGE_FLIP2] += new_hit_cnt - orig_hit_cnt;
777   afl->stage_cycles[STAGE_FLIP2] += afl->stage_max;
778 
779   /* Four walking bits. */
780 
781   afl->stage_name = "bitflip 4/1";
782   afl->stage_short = "flip4";
783   afl->stage_max = (len << 3) - 3;
784 
785   orig_hit_cnt = new_hit_cnt;
786 
787   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
788 
789     afl->stage_cur_byte = afl->stage_cur >> 3;
790 
791     FLIP_BIT(out_buf, afl->stage_cur);
792     FLIP_BIT(out_buf, afl->stage_cur + 1);
793     FLIP_BIT(out_buf, afl->stage_cur + 2);
794     FLIP_BIT(out_buf, afl->stage_cur + 3);
795 
796 #ifdef INTROSPECTION
797     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT4-%u",
798              afl->queue_cur->fname, afl->stage_cur);
799 #endif
800 
801     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
802 
803     FLIP_BIT(out_buf, afl->stage_cur);
804     FLIP_BIT(out_buf, afl->stage_cur + 1);
805     FLIP_BIT(out_buf, afl->stage_cur + 2);
806     FLIP_BIT(out_buf, afl->stage_cur + 3);
807 
808   }
809 
810   new_hit_cnt = afl->queued_items + afl->saved_crashes;
811 
812   afl->stage_finds[STAGE_FLIP4] += new_hit_cnt - orig_hit_cnt;
813   afl->stage_cycles[STAGE_FLIP4] += afl->stage_max;
814 
815   /* Effector map setup. These macros calculate:
816 
817      EFF_APOS      - position of a particular file offset in the map.
818      EFF_ALEN      - length of a map with a particular number of bytes.
819      EFF_SPAN_ALEN - map span for a sequence of bytes.
820 
821    */
822 
823 #define EFF_APOS(_p) ((_p) >> EFF_MAP_SCALE2)
824 #define EFF_REM(_x) ((_x) & ((1 << EFF_MAP_SCALE2) - 1))
825 #define EFF_ALEN(_l) (EFF_APOS(_l) + !!EFF_REM(_l))
826 #define EFF_SPAN_ALEN(_p, _l) (EFF_APOS((_p) + (_l)-1) - EFF_APOS(_p) + 1)
827 
828   /* Initialize effector map for the next step (see comments below). Always
829      flag first and last byte as doing something. */
830 
831   eff_map = afl_realloc(AFL_BUF_PARAM(eff), EFF_ALEN(len));
832   if (unlikely(!eff_map)) { PFATAL("alloc"); }
833   eff_map[0] = 1;
834 
835   if (EFF_APOS(len - 1) != 0) {
836 
837     eff_map[EFF_APOS(len - 1)] = 1;
838     ++eff_cnt;
839 
840   }
841 
842   /* Walking byte. */
843 
844   afl->stage_name = "bitflip 8/8";
845   afl->stage_short = "flip8";
846   afl->stage_max = len;
847 
848   orig_hit_cnt = new_hit_cnt;
849   prev_cksum = _prev_cksum;
850 
851   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
852 
853     afl->stage_cur_byte = afl->stage_cur;
854 
855     out_buf[afl->stage_cur] ^= 0xFF;
856 
857 #ifdef INTROSPECTION
858     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT8-%u",
859              afl->queue_cur->fname, afl->stage_cur);
860 #endif
861 
862     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
863 
864     /* We also use this stage to pull off a simple trick: we identify
865        bytes that seem to have no effect on the current execution path
866        even when fully flipped - and we skip them during more expensive
867        deterministic stages, such as arithmetics or known ints. */
868 
869     if (!eff_map[EFF_APOS(afl->stage_cur)]) {
870 
871       u64 cksum;
872 
873       /* If in non-instrumented mode or if the file is very short, just flag
874          everything without wasting time on checksums. */
875 
876       if (!afl->non_instrumented_mode && len >= EFF_MIN_LEN) {
877 
878         cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
879 
880       } else {
881 
882         cksum = ~prev_cksum;
883 
884       }
885 
886       if (cksum != prev_cksum) {
887 
888         eff_map[EFF_APOS(afl->stage_cur)] = 1;
889         ++eff_cnt;
890 
891       }
892 
893     }
894 
895     out_buf[afl->stage_cur] ^= 0xFF;
896 
897   }
898 
899   /* If the effector map is more than EFF_MAX_PERC dense, just flag the
900      whole thing as worth fuzzing, since we wouldn't be saving much time
901      anyway. */
902 
903   if (eff_cnt != (u32)EFF_ALEN(len) &&
904       eff_cnt * 100 / EFF_ALEN(len) > EFF_MAX_PERC) {
905 
906     memset(eff_map, 1, EFF_ALEN(len));
907 
908     afl->blocks_eff_select += EFF_ALEN(len);
909 
910   } else {
911 
912     afl->blocks_eff_select += eff_cnt;
913 
914   }
915 
916   afl->blocks_eff_total += EFF_ALEN(len);
917 
918   new_hit_cnt = afl->queued_items + afl->saved_crashes;
919 
920   afl->stage_finds[STAGE_FLIP8] += new_hit_cnt - orig_hit_cnt;
921   afl->stage_cycles[STAGE_FLIP8] += afl->stage_max;
922 
923   /* Two walking bytes. */
924 
925   if (len < 2) { goto skip_bitflip; }
926 
927   afl->stage_name = "bitflip 16/8";
928   afl->stage_short = "flip16";
929   afl->stage_cur = 0;
930   afl->stage_max = len - 1;
931 
932   orig_hit_cnt = new_hit_cnt;
933 
934   for (i = 0; i < len - 1; ++i) {
935 
936     /* Let's consult the effector map... */
937 
938     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
939 
940       --afl->stage_max;
941       continue;
942 
943     }
944 
945     afl->stage_cur_byte = i;
946 
947     *(u16 *)(out_buf + i) ^= 0xFFFF;
948 
949 #ifdef INTROSPECTION
950     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT16-%u",
951              afl->queue_cur->fname, afl->stage_cur);
952 #endif
953 
954     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
955     ++afl->stage_cur;
956 
957     *(u16 *)(out_buf + i) ^= 0xFFFF;
958 
959   }
960 
961   new_hit_cnt = afl->queued_items + afl->saved_crashes;
962 
963   afl->stage_finds[STAGE_FLIP16] += new_hit_cnt - orig_hit_cnt;
964   afl->stage_cycles[STAGE_FLIP16] += afl->stage_max;
965 
966   if (len < 4) { goto skip_bitflip; }
967 
968   /* Four walking bytes. */
969 
970   afl->stage_name = "bitflip 32/8";
971   afl->stage_short = "flip32";
972   afl->stage_cur = 0;
973   afl->stage_max = len - 3;
974 
975   orig_hit_cnt = new_hit_cnt;
976 
977   for (i = 0; i < len - 3; ++i) {
978 
979     /* Let's consult the effector map... */
980     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
981         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
982 
983       --afl->stage_max;
984       continue;
985 
986     }
987 
988     afl->stage_cur_byte = i;
989 
990     *(u32 *)(out_buf + i) ^= 0xFFFFFFFF;
991 
992 #ifdef INTROSPECTION
993     snprintf(afl->mutation, sizeof(afl->mutation), "%s FLIP_BIT32-%u",
994              afl->queue_cur->fname, afl->stage_cur);
995 #endif
996 
997     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
998     ++afl->stage_cur;
999 
1000     *(u32 *)(out_buf + i) ^= 0xFFFFFFFF;
1001 
1002   }
1003 
1004   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1005 
1006   afl->stage_finds[STAGE_FLIP32] += new_hit_cnt - orig_hit_cnt;
1007   afl->stage_cycles[STAGE_FLIP32] += afl->stage_max;
1008 
1009 skip_bitflip:
1010 
1011   if (afl->no_arith) { goto skip_arith; }
1012 
1013   /**********************
1014    * ARITHMETIC INC/DEC *
1015    **********************/
1016 
1017   /* 8-bit arithmetics. */
1018 
1019   afl->stage_name = "arith 8/8";
1020   afl->stage_short = "arith8";
1021   afl->stage_cur = 0;
1022   afl->stage_max = 2 * len * ARITH_MAX;
1023 
1024   afl->stage_val_type = STAGE_VAL_LE;
1025 
1026   orig_hit_cnt = new_hit_cnt;
1027 
1028   for (i = 0; i < (u32)len; ++i) {
1029 
1030     u8 orig = out_buf[i];
1031 
1032     /* Let's consult the effector map... */
1033 
1034     if (!eff_map[EFF_APOS(i)]) {
1035 
1036       afl->stage_max -= 2 * ARITH_MAX;
1037       continue;
1038 
1039     }
1040 
1041     afl->stage_cur_byte = i;
1042 
1043     for (j = 1; j <= ARITH_MAX; ++j) {
1044 
1045       u8 r = orig ^ (orig + j);
1046 
1047       /* Do arithmetic operations only if the result couldn't be a product
1048          of a bitflip. */
1049 
1050       if (!could_be_bitflip(r)) {
1051 
1052         afl->stage_cur_val = j;
1053         out_buf[i] = orig + j;
1054 
1055 #ifdef INTROSPECTION
1056         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH8+-%u-%u",
1057                  afl->queue_cur->fname, i, j);
1058 #endif
1059 
1060         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1061         ++afl->stage_cur;
1062 
1063       } else {
1064 
1065         --afl->stage_max;
1066 
1067       }
1068 
1069       r = orig ^ (orig - j);
1070 
1071       if (!could_be_bitflip(r)) {
1072 
1073         afl->stage_cur_val = -j;
1074         out_buf[i] = orig - j;
1075 
1076 #ifdef INTROSPECTION
1077         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH8--%u-%u",
1078                  afl->queue_cur->fname, i, j);
1079 #endif
1080 
1081         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1082         ++afl->stage_cur;
1083 
1084       } else {
1085 
1086         --afl->stage_max;
1087 
1088       }
1089 
1090       out_buf[i] = orig;
1091 
1092     }
1093 
1094   }
1095 
1096   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1097 
1098   afl->stage_finds[STAGE_ARITH8] += new_hit_cnt - orig_hit_cnt;
1099   afl->stage_cycles[STAGE_ARITH8] += afl->stage_max;
1100 
1101   /* 16-bit arithmetics, both endians. */
1102 
1103   if (len < 2) { goto skip_arith; }
1104 
1105   afl->stage_name = "arith 16/8";
1106   afl->stage_short = "arith16";
1107   afl->stage_cur = 0;
1108   afl->stage_max = 4 * (len - 1) * ARITH_MAX;
1109 
1110   orig_hit_cnt = new_hit_cnt;
1111 
1112   for (i = 0; i < (u32)len - 1; ++i) {
1113 
1114     u16 orig = *(u16 *)(out_buf + i);
1115 
1116     /* Let's consult the effector map... */
1117 
1118     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
1119 
1120       afl->stage_max -= 4 * ARITH_MAX;
1121       continue;
1122 
1123     }
1124 
1125     afl->stage_cur_byte = i;
1126 
1127     for (j = 1; j <= ARITH_MAX; ++j) {
1128 
1129       u16 r1 = orig ^ (orig + j), r2 = orig ^ (orig - j),
1130           r3 = orig ^ SWAP16(SWAP16(orig) + j),
1131           r4 = orig ^ SWAP16(SWAP16(orig) - j);
1132 
1133       /* Try little endian addition and subtraction first. Do it only
1134          if the operation would affect more than one byte (hence the
1135          & 0xff overflow checks) and if it couldn't be a product of
1136          a bitflip. */
1137 
1138       afl->stage_val_type = STAGE_VAL_LE;
1139 
1140       if ((orig & 0xff) + j > 0xff && !could_be_bitflip(r1)) {
1141 
1142         afl->stage_cur_val = j;
1143         *(u16 *)(out_buf + i) = orig + j;
1144 
1145 #ifdef INTROSPECTION
1146         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH16+-%u-%u",
1147                  afl->queue_cur->fname, i, j);
1148 #endif
1149 
1150         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1151         ++afl->stage_cur;
1152 
1153       } else {
1154 
1155         --afl->stage_max;
1156 
1157       }
1158 
1159       if ((orig & 0xff) < j && !could_be_bitflip(r2)) {
1160 
1161         afl->stage_cur_val = -j;
1162         *(u16 *)(out_buf + i) = orig - j;
1163 
1164 #ifdef INTROSPECTION
1165         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH16--%u-%u",
1166                  afl->queue_cur->fname, i, j);
1167 #endif
1168 
1169         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1170         ++afl->stage_cur;
1171 
1172       } else {
1173 
1174         --afl->stage_max;
1175 
1176       }
1177 
1178       /* Big endian comes next. Same deal. */
1179 
1180       afl->stage_val_type = STAGE_VAL_BE;
1181 
1182       if ((orig >> 8) + j > 0xff && !could_be_bitflip(r3)) {
1183 
1184         afl->stage_cur_val = j;
1185         *(u16 *)(out_buf + i) = SWAP16(SWAP16(orig) + j);
1186 
1187 #ifdef INTROSPECTION
1188         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH16+BE-%u-%u",
1189                  afl->queue_cur->fname, i, j);
1190 #endif
1191 
1192         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1193         ++afl->stage_cur;
1194 
1195       } else {
1196 
1197         --afl->stage_max;
1198 
1199       }
1200 
1201       if ((orig >> 8) < j && !could_be_bitflip(r4)) {
1202 
1203         afl->stage_cur_val = -j;
1204         *(u16 *)(out_buf + i) = SWAP16(SWAP16(orig) - j);
1205 
1206 #ifdef INTROSPECTION
1207         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH16_BE-%u-%u",
1208                  afl->queue_cur->fname, i, j);
1209 #endif
1210 
1211         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1212         ++afl->stage_cur;
1213 
1214       } else {
1215 
1216         --afl->stage_max;
1217 
1218       }
1219 
1220       *(u16 *)(out_buf + i) = orig;
1221 
1222     }
1223 
1224   }
1225 
1226   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1227 
1228   afl->stage_finds[STAGE_ARITH16] += new_hit_cnt - orig_hit_cnt;
1229   afl->stage_cycles[STAGE_ARITH16] += afl->stage_max;
1230 
1231   /* 32-bit arithmetics, both endians. */
1232 
1233   if (len < 4) { goto skip_arith; }
1234 
1235   afl->stage_name = "arith 32/8";
1236   afl->stage_short = "arith32";
1237   afl->stage_cur = 0;
1238   afl->stage_max = 4 * (len - 3) * ARITH_MAX;
1239 
1240   orig_hit_cnt = new_hit_cnt;
1241 
1242   for (i = 0; i < (u32)len - 3; ++i) {
1243 
1244     u32 orig = *(u32 *)(out_buf + i);
1245 
1246     /* Let's consult the effector map... */
1247 
1248     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
1249         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
1250 
1251       afl->stage_max -= 4 * ARITH_MAX;
1252       continue;
1253 
1254     }
1255 
1256     afl->stage_cur_byte = i;
1257 
1258     for (j = 1; j <= ARITH_MAX; ++j) {
1259 
1260       u32 r1 = orig ^ (orig + j), r2 = orig ^ (orig - j),
1261           r3 = orig ^ SWAP32(SWAP32(orig) + j),
1262           r4 = orig ^ SWAP32(SWAP32(orig) - j);
1263 
1264       /* Little endian first. Same deal as with 16-bit: we only want to
1265          try if the operation would have effect on more than two bytes. */
1266 
1267       afl->stage_val_type = STAGE_VAL_LE;
1268 
1269       if ((orig & 0xffff) + j > 0xffff && !could_be_bitflip(r1)) {
1270 
1271         afl->stage_cur_val = j;
1272         *(u32 *)(out_buf + i) = orig + j;
1273 
1274 #ifdef INTROSPECTION
1275         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH32+-%u-%u",
1276                  afl->queue_cur->fname, i, j);
1277 #endif
1278 
1279         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1280         ++afl->stage_cur;
1281 
1282       } else {
1283 
1284         --afl->stage_max;
1285 
1286       }
1287 
1288       if ((orig & 0xffff) < (u32)j && !could_be_bitflip(r2)) {
1289 
1290         afl->stage_cur_val = -j;
1291         *(u32 *)(out_buf + i) = orig - j;
1292 
1293 #ifdef INTROSPECTION
1294         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH32_-%u-%u",
1295                  afl->queue_cur->fname, i, j);
1296 #endif
1297 
1298         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1299         ++afl->stage_cur;
1300 
1301       } else {
1302 
1303         --afl->stage_max;
1304 
1305       }
1306 
1307       /* Big endian next. */
1308 
1309       afl->stage_val_type = STAGE_VAL_BE;
1310 
1311       if ((SWAP32(orig) & 0xffff) + j > 0xffff && !could_be_bitflip(r3)) {
1312 
1313         afl->stage_cur_val = j;
1314         *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) + j);
1315 
1316 #ifdef INTROSPECTION
1317         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH32+BE-%u-%u",
1318                  afl->queue_cur->fname, i, j);
1319 #endif
1320 
1321         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1322         ++afl->stage_cur;
1323 
1324       } else {
1325 
1326         --afl->stage_max;
1327 
1328       }
1329 
1330       if ((SWAP32(orig) & 0xffff) < (u32)j && !could_be_bitflip(r4)) {
1331 
1332         afl->stage_cur_val = -j;
1333         *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) - j);
1334 
1335 #ifdef INTROSPECTION
1336         snprintf(afl->mutation, sizeof(afl->mutation), "%s ARITH32_BE-%u-%u",
1337                  afl->queue_cur->fname, i, j);
1338 #endif
1339 
1340         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1341         ++afl->stage_cur;
1342 
1343       } else {
1344 
1345         --afl->stage_max;
1346 
1347       }
1348 
1349       *(u32 *)(out_buf + i) = orig;
1350 
1351     }
1352 
1353   }
1354 
1355   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1356 
1357   afl->stage_finds[STAGE_ARITH32] += new_hit_cnt - orig_hit_cnt;
1358   afl->stage_cycles[STAGE_ARITH32] += afl->stage_max;
1359 
1360 skip_arith:
1361 
1362   /**********************
1363    * INTERESTING VALUES *
1364    **********************/
1365 
1366   afl->stage_name = "interest 8/8";
1367   afl->stage_short = "int8";
1368   afl->stage_cur = 0;
1369   afl->stage_max = len * sizeof(interesting_8);
1370 
1371   afl->stage_val_type = STAGE_VAL_LE;
1372 
1373   orig_hit_cnt = new_hit_cnt;
1374 
1375   /* Setting 8-bit integers. */
1376 
1377   for (i = 0; i < (u32)len; ++i) {
1378 
1379     u8 orig = out_buf[i];
1380 
1381     /* Let's consult the effector map... */
1382 
1383     if (!eff_map[EFF_APOS(i)]) {
1384 
1385       afl->stage_max -= sizeof(interesting_8);
1386       continue;
1387 
1388     }
1389 
1390     afl->stage_cur_byte = i;
1391 
1392     for (j = 0; j < (u32)sizeof(interesting_8); ++j) {
1393 
1394       /* Skip if the value could be a product of bitflips or arithmetics. */
1395 
1396       if (could_be_bitflip(orig ^ (u8)interesting_8[j]) ||
1397           could_be_arith(orig, (u8)interesting_8[j], 1)) {
1398 
1399         --afl->stage_max;
1400         continue;
1401 
1402       }
1403 
1404       afl->stage_cur_val = interesting_8[j];
1405       out_buf[i] = interesting_8[j];
1406 
1407 #ifdef INTROSPECTION
1408       snprintf(afl->mutation, sizeof(afl->mutation), "%s INTERESTING8_%u_%u",
1409                afl->queue_cur->fname, i, j);
1410 #endif
1411 
1412       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1413 
1414       out_buf[i] = orig;
1415       ++afl->stage_cur;
1416 
1417     }
1418 
1419   }
1420 
1421   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1422 
1423   afl->stage_finds[STAGE_INTEREST8] += new_hit_cnt - orig_hit_cnt;
1424   afl->stage_cycles[STAGE_INTEREST8] += afl->stage_max;
1425 
1426   /* Setting 16-bit integers, both endians. */
1427 
1428   if (afl->no_arith || len < 2) { goto skip_interest; }
1429 
1430   afl->stage_name = "interest 16/8";
1431   afl->stage_short = "int16";
1432   afl->stage_cur = 0;
1433   afl->stage_max = 2 * (len - 1) * (sizeof(interesting_16) >> 1);
1434 
1435   orig_hit_cnt = new_hit_cnt;
1436 
1437   for (i = 0; i < len - 1; ++i) {
1438 
1439     u16 orig = *(u16 *)(out_buf + i);
1440 
1441     /* Let's consult the effector map... */
1442 
1443     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
1444 
1445       afl->stage_max -= sizeof(interesting_16);
1446       continue;
1447 
1448     }
1449 
1450     afl->stage_cur_byte = i;
1451 
1452     for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
1453 
1454       afl->stage_cur_val = interesting_16[j];
1455 
1456       /* Skip if this could be a product of a bitflip, arithmetics,
1457          or single-byte interesting value insertion. */
1458 
1459       if (!could_be_bitflip(orig ^ (u16)interesting_16[j]) &&
1460           !could_be_arith(orig, (u16)interesting_16[j], 2) &&
1461           !could_be_interest(orig, (u16)interesting_16[j], 2, 0)) {
1462 
1463         afl->stage_val_type = STAGE_VAL_LE;
1464 
1465         *(u16 *)(out_buf + i) = interesting_16[j];
1466 
1467 #ifdef INTROSPECTION
1468         snprintf(afl->mutation, sizeof(afl->mutation), "%s INTERESTING16_%u_%u",
1469                  afl->queue_cur->fname, i, j);
1470 #endif
1471 
1472         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1473         ++afl->stage_cur;
1474 
1475       } else {
1476 
1477         --afl->stage_max;
1478 
1479       }
1480 
1481       if ((u16)interesting_16[j] != SWAP16(interesting_16[j]) &&
1482           !could_be_bitflip(orig ^ SWAP16(interesting_16[j])) &&
1483           !could_be_arith(orig, SWAP16(interesting_16[j]), 2) &&
1484           !could_be_interest(orig, SWAP16(interesting_16[j]), 2, 1)) {
1485 
1486         afl->stage_val_type = STAGE_VAL_BE;
1487 
1488 #ifdef INTROSPECTION
1489         snprintf(afl->mutation, sizeof(afl->mutation),
1490                  "%s INTERESTING16BE_%u_%u", afl->queue_cur->fname, i, j);
1491 #endif
1492 
1493         *(u16 *)(out_buf + i) = SWAP16(interesting_16[j]);
1494         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1495         ++afl->stage_cur;
1496 
1497       } else {
1498 
1499         --afl->stage_max;
1500 
1501       }
1502 
1503     }
1504 
1505     *(u16 *)(out_buf + i) = orig;
1506 
1507   }
1508 
1509   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1510 
1511   afl->stage_finds[STAGE_INTEREST16] += new_hit_cnt - orig_hit_cnt;
1512   afl->stage_cycles[STAGE_INTEREST16] += afl->stage_max;
1513 
1514   if (len < 4) { goto skip_interest; }
1515 
1516   /* Setting 32-bit integers, both endians. */
1517 
1518   afl->stage_name = "interest 32/8";
1519   afl->stage_short = "int32";
1520   afl->stage_cur = 0;
1521   afl->stage_max = 2 * (len - 3) * (sizeof(interesting_32) >> 2);
1522 
1523   orig_hit_cnt = new_hit_cnt;
1524 
1525   for (i = 0; i < len - 3; i++) {
1526 
1527     u32 orig = *(u32 *)(out_buf + i);
1528 
1529     /* Let's consult the effector map... */
1530 
1531     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
1532         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
1533 
1534       afl->stage_max -= sizeof(interesting_32) >> 1;
1535       continue;
1536 
1537     }
1538 
1539     afl->stage_cur_byte = i;
1540 
1541     for (j = 0; j < sizeof(interesting_32) / 4; ++j) {
1542 
1543       afl->stage_cur_val = interesting_32[j];
1544 
1545       /* Skip if this could be a product of a bitflip, arithmetics,
1546          or word interesting value insertion. */
1547 
1548       if (!could_be_bitflip(orig ^ (u32)interesting_32[j]) &&
1549           !could_be_arith(orig, interesting_32[j], 4) &&
1550           !could_be_interest(orig, interesting_32[j], 4, 0)) {
1551 
1552         afl->stage_val_type = STAGE_VAL_LE;
1553 
1554         *(u32 *)(out_buf + i) = interesting_32[j];
1555 
1556 #ifdef INTROSPECTION
1557         snprintf(afl->mutation, sizeof(afl->mutation), "%s INTERESTING32_%u_%u",
1558                  afl->queue_cur->fname, i, j);
1559 #endif
1560 
1561         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1562         ++afl->stage_cur;
1563 
1564       } else {
1565 
1566         --afl->stage_max;
1567 
1568       }
1569 
1570       if ((u32)interesting_32[j] != SWAP32(interesting_32[j]) &&
1571           !could_be_bitflip(orig ^ SWAP32(interesting_32[j])) &&
1572           !could_be_arith(orig, SWAP32(interesting_32[j]), 4) &&
1573           !could_be_interest(orig, SWAP32(interesting_32[j]), 4, 1)) {
1574 
1575         afl->stage_val_type = STAGE_VAL_BE;
1576 
1577 #ifdef INTROSPECTION
1578         snprintf(afl->mutation, sizeof(afl->mutation),
1579                  "%s INTERESTING32BE_%u_%u", afl->queue_cur->fname, i, j);
1580 #endif
1581 
1582         *(u32 *)(out_buf + i) = SWAP32(interesting_32[j]);
1583         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1584         ++afl->stage_cur;
1585 
1586       } else {
1587 
1588         --afl->stage_max;
1589 
1590       }
1591 
1592     }
1593 
1594     *(u32 *)(out_buf + i) = orig;
1595 
1596   }
1597 
1598   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1599 
1600   afl->stage_finds[STAGE_INTEREST32] += new_hit_cnt - orig_hit_cnt;
1601   afl->stage_cycles[STAGE_INTEREST32] += afl->stage_max;
1602 
1603 skip_interest:
1604 
1605   /********************
1606    * DICTIONARY STUFF *
1607    ********************/
1608 
1609   if (!afl->extras_cnt) { goto skip_user_extras; }
1610 
1611   /* Overwrite with user-supplied extras. */
1612 
1613   afl->stage_name = "user extras (over)";
1614   afl->stage_short = "ext_UO";
1615   afl->stage_cur = 0;
1616   afl->stage_max = afl->extras_cnt * len;
1617 
1618   afl->stage_val_type = STAGE_VAL_NONE;
1619 
1620   orig_hit_cnt = new_hit_cnt;
1621 
1622   for (i = 0; i < (u32)len; ++i) {
1623 
1624     u32 last_len = 0;
1625 
1626     afl->stage_cur_byte = i;
1627 
1628     /* Extras are sorted by size, from smallest to largest. This means
1629        that we don't have to worry about restoring the buffer in
1630        between writes at a particular offset determined by the outer
1631        loop. */
1632 
1633     for (j = 0; j < afl->extras_cnt; ++j) {
1634 
1635       /* Skip extras probabilistically if afl->extras_cnt > AFL_MAX_DET_EXTRAS.
1636          Also skip them if there's no room to insert the payload, if the token
1637          is redundant, or if its entire span has no bytes set in the effector
1638          map. */
1639 
1640       if ((afl->extras_cnt > afl->max_det_extras &&
1641            rand_below(afl, afl->extras_cnt) >= afl->max_det_extras) ||
1642           afl->extras[j].len > len - i ||
1643           !memcmp(afl->extras[j].data, out_buf + i, afl->extras[j].len) ||
1644           !memchr(eff_map + EFF_APOS(i), 1,
1645                   EFF_SPAN_ALEN(i, afl->extras[j].len))) {
1646 
1647         --afl->stage_max;
1648         continue;
1649 
1650       }
1651 
1652       last_len = afl->extras[j].len;
1653       memcpy(out_buf + i, afl->extras[j].data, last_len);
1654 
1655 #ifdef INTROSPECTION
1656       snprintf(afl->mutation, sizeof(afl->mutation),
1657                "%s EXTRAS_overwrite-%u-%u", afl->queue_cur->fname, i, j);
1658 #endif
1659 
1660       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1661 
1662       ++afl->stage_cur;
1663 
1664     }
1665 
1666     /* Restore all the clobbered memory. */
1667     memcpy(out_buf + i, in_buf + i, last_len);
1668 
1669   }
1670 
1671   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1672 
1673   afl->stage_finds[STAGE_EXTRAS_UO] += new_hit_cnt - orig_hit_cnt;
1674   afl->stage_cycles[STAGE_EXTRAS_UO] += afl->stage_max;
1675 
1676   /* Insertion of user-supplied extras. */
1677 
1678   afl->stage_name = "user extras (insert)";
1679   afl->stage_short = "ext_UI";
1680   afl->stage_cur = 0;
1681   afl->stage_max = afl->extras_cnt * (len + 1);
1682 
1683   orig_hit_cnt = new_hit_cnt;
1684 
1685   ex_tmp = afl_realloc(AFL_BUF_PARAM(ex), len + MAX_DICT_FILE);
1686   if (unlikely(!ex_tmp)) { PFATAL("alloc"); }
1687 
1688   for (i = 0; i <= (u32)len; ++i) {
1689 
1690     afl->stage_cur_byte = i;
1691 
1692     for (j = 0; j < afl->extras_cnt; ++j) {
1693 
1694       if (len + afl->extras[j].len > MAX_FILE) {
1695 
1696         --afl->stage_max;
1697         continue;
1698 
1699       }
1700 
1701       /* Insert token */
1702       memcpy(ex_tmp + i, afl->extras[j].data, afl->extras[j].len);
1703 
1704       /* Copy tail */
1705       memcpy(ex_tmp + i + afl->extras[j].len, out_buf + i, len - i);
1706 
1707 #ifdef INTROSPECTION
1708       snprintf(afl->mutation, sizeof(afl->mutation), "%s EXTRAS_insert-%u-%u",
1709                afl->queue_cur->fname, i, j);
1710 #endif
1711 
1712       if (common_fuzz_stuff(afl, ex_tmp, len + afl->extras[j].len)) {
1713 
1714         goto abandon_entry;
1715 
1716       }
1717 
1718       ++afl->stage_cur;
1719 
1720     }
1721 
1722     /* Copy head */
1723     ex_tmp[i] = out_buf[i];
1724 
1725   }
1726 
1727   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1728 
1729   afl->stage_finds[STAGE_EXTRAS_UI] += new_hit_cnt - orig_hit_cnt;
1730   afl->stage_cycles[STAGE_EXTRAS_UI] += afl->stage_max;
1731 
1732 skip_user_extras:
1733 
1734   if (!afl->a_extras_cnt) { goto skip_extras; }
1735 
1736   afl->stage_name = "auto extras (over)";
1737   afl->stage_short = "ext_AO";
1738   afl->stage_cur = 0;
1739   afl->stage_max = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS) * len;
1740 
1741   afl->stage_val_type = STAGE_VAL_NONE;
1742 
1743   orig_hit_cnt = new_hit_cnt;
1744 
1745   for (i = 0; i < (u32)len; ++i) {
1746 
1747     u32 last_len = 0;
1748 
1749     afl->stage_cur_byte = i;
1750 
1751     u32 min_extra_len = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS);
1752     for (j = 0; j < min_extra_len; ++j) {
1753 
1754       /* See the comment in the earlier code; extras are sorted by size. */
1755 
1756       if (afl->a_extras[j].len > len - i ||
1757           !memcmp(afl->a_extras[j].data, out_buf + i, afl->a_extras[j].len) ||
1758           !memchr(eff_map + EFF_APOS(i), 1,
1759                   EFF_SPAN_ALEN(i, afl->a_extras[j].len))) {
1760 
1761         --afl->stage_max;
1762         continue;
1763 
1764       }
1765 
1766       last_len = afl->a_extras[j].len;
1767       memcpy(out_buf + i, afl->a_extras[j].data, last_len);
1768 
1769 #ifdef INTROSPECTION
1770       snprintf(afl->mutation, sizeof(afl->mutation),
1771                "%s AUTO_EXTRAS_overwrite-%u-%u", afl->queue_cur->fname, i, j);
1772 #endif
1773 
1774       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
1775 
1776       ++afl->stage_cur;
1777 
1778     }
1779 
1780     /* Restore all the clobbered memory. */
1781     memcpy(out_buf + i, in_buf + i, last_len);
1782 
1783   }
1784 
1785   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1786 
1787   afl->stage_finds[STAGE_EXTRAS_AO] += new_hit_cnt - orig_hit_cnt;
1788   afl->stage_cycles[STAGE_EXTRAS_AO] += afl->stage_max;
1789 
1790   /* Insertion of auto extras. */
1791 
1792   afl->stage_name = "auto extras (insert)";
1793   afl->stage_short = "ext_AI";
1794   afl->stage_cur = 0;
1795   afl->stage_max = afl->a_extras_cnt * (len + 1);
1796 
1797   orig_hit_cnt = new_hit_cnt;
1798 
1799   ex_tmp = afl_realloc(AFL_BUF_PARAM(ex), len + MAX_DICT_FILE);
1800   if (unlikely(!ex_tmp)) { PFATAL("alloc"); }
1801 
1802   for (i = 0; i <= (u32)len; ++i) {
1803 
1804     afl->stage_cur_byte = i;
1805 
1806     for (j = 0; j < afl->a_extras_cnt; ++j) {
1807 
1808       if (len + afl->a_extras[j].len > MAX_FILE) {
1809 
1810         --afl->stage_max;
1811         continue;
1812 
1813       }
1814 
1815       /* Insert token */
1816       memcpy(ex_tmp + i, afl->a_extras[j].data, afl->a_extras[j].len);
1817 
1818       /* Copy tail */
1819       memcpy(ex_tmp + i + afl->a_extras[j].len, out_buf + i, len - i);
1820 
1821 #ifdef INTROSPECTION
1822       snprintf(afl->mutation, sizeof(afl->mutation),
1823                "%s AUTO_EXTRAS_insert-%u-%u", afl->queue_cur->fname, i, j);
1824 #endif
1825 
1826       if (common_fuzz_stuff(afl, ex_tmp, len + afl->a_extras[j].len)) {
1827 
1828         goto abandon_entry;
1829 
1830       }
1831 
1832       ++afl->stage_cur;
1833 
1834     }
1835 
1836     /* Copy head */
1837     ex_tmp[i] = out_buf[i];
1838 
1839   }
1840 
1841   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1842 
1843   afl->stage_finds[STAGE_EXTRAS_AI] += new_hit_cnt - orig_hit_cnt;
1844   afl->stage_cycles[STAGE_EXTRAS_AI] += afl->stage_max;
1845 
1846 skip_extras:
1847 
1848   /* If we made this to here without jumping to havoc_stage or abandon_entry,
1849      we're properly done with deterministic steps and can mark it as such
1850      in the .state/ directory. */
1851 
1852   if (!afl->queue_cur->passed_det) { mark_as_det_done(afl, afl->queue_cur); }
1853 
1854 custom_mutator_stage:
1855   /*******************
1856    * CUSTOM MUTATORS *
1857    *******************/
1858 
1859   if (likely(!afl->custom_mutators_count)) { goto havoc_stage; }
1860 
1861   afl->stage_name = "custom mutator";
1862   afl->stage_short = "custom";
1863   afl->stage_max = HAVOC_CYCLES * perf_score / afl->havoc_div / 100;
1864   afl->stage_val_type = STAGE_VAL_NONE;
1865   bool has_custom_fuzz = false;
1866 
1867   if (afl->stage_max < HAVOC_MIN) { afl->stage_max = HAVOC_MIN; }
1868 
1869   const u32 max_seed_size = MAX_FILE, saved_max = afl->stage_max;
1870 
1871   orig_hit_cnt = afl->queued_items + afl->saved_crashes;
1872 
1873 #ifdef INTROSPECTION
1874   afl->mutation[0] = 0;
1875 #endif
1876 
1877   LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
1878 
1879     if (el->afl_custom_fuzz) {
1880 
1881       afl->current_custom_fuzz = el;
1882 
1883       if (el->afl_custom_fuzz_count) {
1884 
1885         afl->stage_max = el->afl_custom_fuzz_count(el->data, out_buf, len);
1886 
1887       } else {
1888 
1889         afl->stage_max = saved_max;
1890 
1891       }
1892 
1893       has_custom_fuzz = true;
1894 
1895       afl->stage_short = el->name_short;
1896 
1897       if (afl->stage_max) {
1898 
1899         for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max;
1900              ++afl->stage_cur) {
1901 
1902           struct queue_entry *target = NULL;
1903           u32                 tid;
1904           u8 *                new_buf = NULL;
1905           u32                 target_len = 0;
1906 
1907           /* check if splicing makes sense yet (enough entries) */
1908           if (likely(afl->ready_for_splicing_count > 1)) {
1909 
1910             /* Pick a random other queue entry for passing to external API
1911                that has the necessary length */
1912 
1913             do {
1914 
1915               tid = rand_below(afl, afl->queued_items);
1916 
1917             } while (unlikely(tid == afl->current_entry ||
1918 
1919                               afl->queue_buf[tid]->len < 4));
1920 
1921             target = afl->queue_buf[tid];
1922             afl->splicing_with = tid;
1923 
1924             /* Read the additional testcase into a new buffer. */
1925             new_buf = queue_testcase_get(afl, target);
1926             target_len = target->len;
1927 
1928           }
1929 
1930           u8 *mutated_buf = NULL;
1931 
1932           size_t mutated_size =
1933               el->afl_custom_fuzz(el->data, out_buf, len, &mutated_buf, new_buf,
1934                                   target_len, max_seed_size);
1935 
1936           if (unlikely(!mutated_buf)) {
1937 
1938             FATAL("Error in custom_fuzz. Size returned: %zu", mutated_size);
1939 
1940           }
1941 
1942           if (mutated_size > 0) {
1943 
1944             if (common_fuzz_stuff(afl, mutated_buf, (u32)mutated_size)) {
1945 
1946               goto abandon_entry;
1947 
1948             }
1949 
1950             if (!el->afl_custom_fuzz_count) {
1951 
1952               /* If we're finding new stuff, let's run for a bit longer, limits
1953                 permitting. */
1954 
1955               if (afl->queued_items != havoc_queued) {
1956 
1957                 if (perf_score <= afl->havoc_max_mult * 100) {
1958 
1959                   afl->stage_max *= 2;
1960                   perf_score *= 2;
1961 
1962                 }
1963 
1964                 havoc_queued = afl->queued_items;
1965 
1966               }
1967 
1968             }
1969 
1970           }
1971 
1972           /* out_buf may have been changed by the call to custom_fuzz */
1973           memcpy(out_buf, in_buf, len);
1974 
1975         }
1976 
1977       }
1978 
1979     }
1980 
1981   });
1982 
1983   afl->current_custom_fuzz = NULL;
1984 
1985   if (!has_custom_fuzz) goto havoc_stage;
1986 
1987   new_hit_cnt = afl->queued_items + afl->saved_crashes;
1988 
1989   afl->stage_finds[STAGE_CUSTOM_MUTATOR] += new_hit_cnt - orig_hit_cnt;
1990   afl->stage_cycles[STAGE_CUSTOM_MUTATOR] += afl->stage_max;
1991 
1992   if (likely(afl->custom_only)) {
1993 
1994     /* Skip other stages */
1995     ret_val = 0;
1996     goto abandon_entry;
1997 
1998   }
1999 
2000   /****************
2001    * RANDOM HAVOC *
2002    ****************/
2003 
2004 havoc_stage:
2005 
2006   afl->stage_cur_byte = -1;
2007 
2008   /* The havoc stage mutation code is also invoked when splicing files; if the
2009      splice_cycle variable is set, generate different descriptions and such. */
2010 
2011   if (!splice_cycle) {
2012 
2013     afl->stage_name = "havoc";
2014     afl->stage_short = "havoc";
2015     afl->stage_max = (doing_det ? HAVOC_CYCLES_INIT : HAVOC_CYCLES) *
2016                      perf_score / afl->havoc_div / 100;
2017 
2018   } else {
2019 
2020     perf_score = orig_perf;
2021 
2022     snprintf(afl->stage_name_buf, STAGE_BUF_SIZE, "splice %u", splice_cycle);
2023     afl->stage_name = afl->stage_name_buf;
2024     afl->stage_short = "splice";
2025     afl->stage_max = SPLICE_HAVOC * perf_score / afl->havoc_div / 100;
2026 
2027   }
2028 
2029   if (afl->stage_max < HAVOC_MIN) { afl->stage_max = HAVOC_MIN; }
2030 
2031   temp_len = len;
2032 
2033   orig_hit_cnt = afl->queued_items + afl->saved_crashes;
2034 
2035   havoc_queued = afl->queued_items;
2036 
2037   if (afl->custom_mutators_count) {
2038 
2039     LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
2040 
2041       if (el->stacked_custom && el->afl_custom_havoc_mutation_probability) {
2042 
2043         el->stacked_custom_prob =
2044             el->afl_custom_havoc_mutation_probability(el->data);
2045         if (el->stacked_custom_prob > 100) {
2046 
2047           FATAL(
2048               "The probability returned by "
2049               "afl_custom_havoc_mutation_propability "
2050               "has to be in the range 0-100.");
2051 
2052         }
2053 
2054       }
2055 
2056     });
2057 
2058   }
2059 
2060   /* We essentially just do several thousand runs (depending on perf_score)
2061      where we take the input file and make random stacked tweaks. */
2062 
2063 #define MAX_HAVOC_ENTRY 64
2064 #define MUTATE_ASCII_DICT 64
2065 
2066   u32 r_max, r;
2067 
2068   r_max = (MAX_HAVOC_ENTRY + 1) + (afl->extras_cnt ? 4 : 0) +
2069           (afl->a_extras_cnt
2070                ? (unlikely(afl->cmplog_binary && afl->queue_cur->is_ascii)
2071                       ? MUTATE_ASCII_DICT
2072                       : 4)
2073                : 0);
2074 
2075   if (unlikely(afl->expand_havoc && afl->ready_for_splicing_count > 1)) {
2076 
2077     /* add expensive havoc cases here, they are activated after a full
2078        cycle without finds happened */
2079 
2080     r_max += 4;
2081 
2082   }
2083 
2084   if (unlikely(get_cur_time() - afl->last_find_time > 5000 /* 5 seconds */ &&
2085                afl->ready_for_splicing_count > 1)) {
2086 
2087     /* add expensive havoc cases here if there is no findings in the last 5s */
2088 
2089     r_max += 4;
2090 
2091   }
2092 
2093   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
2094 
2095     u32 use_stacking = 1 << (1 + rand_below(afl, afl->havoc_stack_pow2));
2096 
2097     afl->stage_cur_val = use_stacking;
2098 
2099 #ifdef INTROSPECTION
2100     snprintf(afl->mutation, sizeof(afl->mutation), "%s HAVOC-%u",
2101              afl->queue_cur->fname, use_stacking);
2102 #endif
2103 
2104     for (i = 0; i < use_stacking; ++i) {
2105 
2106       if (afl->custom_mutators_count) {
2107 
2108         LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
2109 
2110           if (el->stacked_custom &&
2111               rand_below(afl, 100) < el->stacked_custom_prob) {
2112 
2113             u8 *   custom_havoc_buf = NULL;
2114             size_t new_len = el->afl_custom_havoc_mutation(
2115                 el->data, out_buf, temp_len, &custom_havoc_buf, MAX_FILE);
2116             if (unlikely(!custom_havoc_buf)) {
2117 
2118               FATAL("Error in custom_havoc (return %zu)", new_len);
2119 
2120             }
2121 
2122             if (likely(new_len > 0 && custom_havoc_buf)) {
2123 
2124               temp_len = new_len;
2125               if (out_buf != custom_havoc_buf) {
2126 
2127                 out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len);
2128                 if (unlikely(!afl->out_buf)) { PFATAL("alloc"); }
2129                 memcpy(out_buf, custom_havoc_buf, temp_len);
2130 
2131               }
2132 
2133             }
2134 
2135           }
2136 
2137         });
2138 
2139       }
2140 
2141       switch ((r = rand_below(afl, r_max))) {
2142 
2143         case 0 ... 3: {
2144 
2145           /* Flip a single bit somewhere. Spooky! */
2146 
2147 #ifdef INTROSPECTION
2148           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT1");
2149           strcat(afl->mutation, afl->m_tmp);
2150 #endif
2151           FLIP_BIT(out_buf, rand_below(afl, temp_len << 3));
2152           break;
2153 
2154         }
2155 
2156         case 4 ... 7: {
2157 
2158           /* Set byte to interesting value. */
2159 
2160 #ifdef INTROSPECTION
2161           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING8");
2162           strcat(afl->mutation, afl->m_tmp);
2163 #endif
2164           out_buf[rand_below(afl, temp_len)] =
2165               interesting_8[rand_below(afl, sizeof(interesting_8))];
2166           break;
2167 
2168         }
2169 
2170         case 8 ... 9: {
2171 
2172           /* Set word to interesting value, little endian. */
2173 
2174           if (temp_len < 2) { break; }
2175 
2176 #ifdef INTROSPECTION
2177           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16");
2178           strcat(afl->mutation, afl->m_tmp);
2179 #endif
2180           *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) =
2181               interesting_16[rand_below(afl, sizeof(interesting_16) >> 1)];
2182 
2183           break;
2184 
2185         }
2186 
2187         case 10 ... 11: {
2188 
2189           /* Set word to interesting value, big endian. */
2190 
2191           if (temp_len < 2) { break; }
2192 
2193 #ifdef INTROSPECTION
2194           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16BE");
2195           strcat(afl->mutation, afl->m_tmp);
2196 #endif
2197           *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) = SWAP16(
2198               interesting_16[rand_below(afl, sizeof(interesting_16) >> 1)]);
2199 
2200           break;
2201 
2202         }
2203 
2204         case 12 ... 13: {
2205 
2206           /* Set dword to interesting value, little endian. */
2207 
2208           if (temp_len < 4) { break; }
2209 
2210 #ifdef INTROSPECTION
2211           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32");
2212           strcat(afl->mutation, afl->m_tmp);
2213 #endif
2214           *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) =
2215               interesting_32[rand_below(afl, sizeof(interesting_32) >> 2)];
2216 
2217           break;
2218 
2219         }
2220 
2221         case 14 ... 15: {
2222 
2223           /* Set dword to interesting value, big endian. */
2224 
2225           if (temp_len < 4) { break; }
2226 
2227 #ifdef INTROSPECTION
2228           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32BE");
2229           strcat(afl->mutation, afl->m_tmp);
2230 #endif
2231           *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) = SWAP32(
2232               interesting_32[rand_below(afl, sizeof(interesting_32) >> 2)]);
2233 
2234           break;
2235 
2236         }
2237 
2238         case 16 ... 19: {
2239 
2240           /* Randomly subtract from byte. */
2241 
2242 #ifdef INTROSPECTION
2243           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8_");
2244           strcat(afl->mutation, afl->m_tmp);
2245 #endif
2246           out_buf[rand_below(afl, temp_len)] -= 1 + rand_below(afl, ARITH_MAX);
2247           break;
2248 
2249         }
2250 
2251         case 20 ... 23: {
2252 
2253           /* Randomly add to byte. */
2254 
2255 #ifdef INTROSPECTION
2256           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8+");
2257           strcat(afl->mutation, afl->m_tmp);
2258 #endif
2259           out_buf[rand_below(afl, temp_len)] += 1 + rand_below(afl, ARITH_MAX);
2260           break;
2261 
2262         }
2263 
2264         case 24 ... 25: {
2265 
2266           /* Randomly subtract from word, little endian. */
2267 
2268           if (temp_len < 2) { break; }
2269 
2270           u32 pos = rand_below(afl, temp_len - 1);
2271 
2272 #ifdef INTROSPECTION
2273           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16_-%u", pos);
2274           strcat(afl->mutation, afl->m_tmp);
2275 #endif
2276           *(u16 *)(out_buf + pos) -= 1 + rand_below(afl, ARITH_MAX);
2277 
2278           break;
2279 
2280         }
2281 
2282         case 26 ... 27: {
2283 
2284           /* Randomly subtract from word, big endian. */
2285 
2286           if (temp_len < 2) { break; }
2287 
2288           u32 pos = rand_below(afl, temp_len - 1);
2289           u16 num = 1 + rand_below(afl, ARITH_MAX);
2290 
2291 #ifdef INTROSPECTION
2292           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16_BE-%u_%u", pos,
2293                    num);
2294           strcat(afl->mutation, afl->m_tmp);
2295 #endif
2296           *(u16 *)(out_buf + pos) =
2297               SWAP16(SWAP16(*(u16 *)(out_buf + pos)) - num);
2298 
2299           break;
2300 
2301         }
2302 
2303         case 28 ... 29: {
2304 
2305           /* Randomly add to word, little endian. */
2306 
2307           if (temp_len < 2) { break; }
2308 
2309           u32 pos = rand_below(afl, temp_len - 1);
2310 
2311 #ifdef INTROSPECTION
2312           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16+-%u", pos);
2313           strcat(afl->mutation, afl->m_tmp);
2314 #endif
2315           *(u16 *)(out_buf + pos) += 1 + rand_below(afl, ARITH_MAX);
2316 
2317           break;
2318 
2319         }
2320 
2321         case 30 ... 31: {
2322 
2323           /* Randomly add to word, big endian. */
2324 
2325           if (temp_len < 2) { break; }
2326 
2327           u32 pos = rand_below(afl, temp_len - 1);
2328           u16 num = 1 + rand_below(afl, ARITH_MAX);
2329 
2330 #ifdef INTROSPECTION
2331           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16+BE-%u_%u", pos,
2332                    num);
2333           strcat(afl->mutation, afl->m_tmp);
2334 #endif
2335           *(u16 *)(out_buf + pos) =
2336               SWAP16(SWAP16(*(u16 *)(out_buf + pos)) + num);
2337 
2338           break;
2339 
2340         }
2341 
2342         case 32 ... 33: {
2343 
2344           /* Randomly subtract from dword, little endian. */
2345 
2346           if (temp_len < 4) { break; }
2347 
2348           u32 pos = rand_below(afl, temp_len - 3);
2349 
2350 #ifdef INTROSPECTION
2351           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32_-%u", pos);
2352           strcat(afl->mutation, afl->m_tmp);
2353 #endif
2354           *(u32 *)(out_buf + pos) -= 1 + rand_below(afl, ARITH_MAX);
2355 
2356           break;
2357 
2358         }
2359 
2360         case 34 ... 35: {
2361 
2362           /* Randomly subtract from dword, big endian. */
2363 
2364           if (temp_len < 4) { break; }
2365 
2366           u32 pos = rand_below(afl, temp_len - 3);
2367           u32 num = 1 + rand_below(afl, ARITH_MAX);
2368 
2369 #ifdef INTROSPECTION
2370           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32_BE-%u-%u", pos,
2371                    num);
2372           strcat(afl->mutation, afl->m_tmp);
2373 #endif
2374           *(u32 *)(out_buf + pos) =
2375               SWAP32(SWAP32(*(u32 *)(out_buf + pos)) - num);
2376 
2377           break;
2378 
2379         }
2380 
2381         case 36 ... 37: {
2382 
2383           /* Randomly add to dword, little endian. */
2384 
2385           if (temp_len < 4) { break; }
2386 
2387           u32 pos = rand_below(afl, temp_len - 3);
2388 
2389 #ifdef INTROSPECTION
2390           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32+-%u", pos);
2391           strcat(afl->mutation, afl->m_tmp);
2392 #endif
2393           *(u32 *)(out_buf + pos) += 1 + rand_below(afl, ARITH_MAX);
2394 
2395           break;
2396 
2397         }
2398 
2399         case 38 ... 39: {
2400 
2401           /* Randomly add to dword, big endian. */
2402 
2403           if (temp_len < 4) { break; }
2404 
2405           u32 pos = rand_below(afl, temp_len - 3);
2406           u32 num = 1 + rand_below(afl, ARITH_MAX);
2407 
2408 #ifdef INTROSPECTION
2409           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32+BE-%u-%u", pos,
2410                    num);
2411           strcat(afl->mutation, afl->m_tmp);
2412 #endif
2413           *(u32 *)(out_buf + pos) =
2414               SWAP32(SWAP32(*(u32 *)(out_buf + pos)) + num);
2415 
2416           break;
2417 
2418         }
2419 
2420         case 40 ... 43: {
2421 
2422           /* Just set a random byte to a random value. Because,
2423              why not. We use XOR with 1-255 to eliminate the
2424              possibility of a no-op. */
2425 
2426 #ifdef INTROSPECTION
2427           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " RAND8");
2428           strcat(afl->mutation, afl->m_tmp);
2429 #endif
2430           out_buf[rand_below(afl, temp_len)] ^= 1 + rand_below(afl, 255);
2431           break;
2432 
2433         }
2434 
2435         case 44 ... 46: {
2436 
2437           if (temp_len + HAVOC_BLK_XL < MAX_FILE) {
2438 
2439             /* Clone bytes. */
2440 
2441             u32 clone_len = choose_block_len(afl, temp_len);
2442             u32 clone_from = rand_below(afl, temp_len - clone_len + 1);
2443             u32 clone_to = rand_below(afl, temp_len);
2444 
2445 #ifdef INTROSPECTION
2446             snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE-%s-%u-%u-%u",
2447                      "clone", clone_from, clone_to, clone_len);
2448             strcat(afl->mutation, afl->m_tmp);
2449 #endif
2450             u8 *new_buf =
2451                 afl_realloc(AFL_BUF_PARAM(out_scratch), temp_len + clone_len);
2452             if (unlikely(!new_buf)) { PFATAL("alloc"); }
2453 
2454             /* Head */
2455 
2456             memcpy(new_buf, out_buf, clone_to);
2457 
2458             /* Inserted part */
2459 
2460             memcpy(new_buf + clone_to, out_buf + clone_from, clone_len);
2461 
2462             /* Tail */
2463             memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
2464                    temp_len - clone_to);
2465 
2466             out_buf = new_buf;
2467             afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
2468             temp_len += clone_len;
2469 
2470           }
2471 
2472           break;
2473 
2474         }
2475 
2476         case 47: {
2477 
2478           if (temp_len + HAVOC_BLK_XL < MAX_FILE) {
2479 
2480             /* Insert a block of constant bytes (25%). */
2481 
2482             u32 clone_len = choose_block_len(afl, HAVOC_BLK_XL);
2483             u32 clone_to = rand_below(afl, temp_len);
2484 
2485 #ifdef INTROSPECTION
2486             snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE-%s-%u-%u",
2487                      "insert", clone_to, clone_len);
2488             strcat(afl->mutation, afl->m_tmp);
2489 #endif
2490             u8 *new_buf =
2491                 afl_realloc(AFL_BUF_PARAM(out_scratch), temp_len + clone_len);
2492             if (unlikely(!new_buf)) { PFATAL("alloc"); }
2493 
2494             /* Head */
2495 
2496             memcpy(new_buf, out_buf, clone_to);
2497 
2498             /* Inserted part */
2499 
2500             memset(new_buf + clone_to,
2501                    rand_below(afl, 2) ? rand_below(afl, 256)
2502                                       : out_buf[rand_below(afl, temp_len)],
2503                    clone_len);
2504 
2505             /* Tail */
2506             memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
2507                    temp_len - clone_to);
2508 
2509             out_buf = new_buf;
2510             afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
2511             temp_len += clone_len;
2512 
2513           }
2514 
2515           break;
2516 
2517         }
2518 
2519         case 48 ... 50: {
2520 
2521           /* Overwrite bytes with a randomly selected chunk bytes. */
2522 
2523           if (temp_len < 2) { break; }
2524 
2525           u32 copy_len = choose_block_len(afl, temp_len - 1);
2526           u32 copy_from = rand_below(afl, temp_len - copy_len + 1);
2527           u32 copy_to = rand_below(afl, temp_len - copy_len + 1);
2528 
2529           if (likely(copy_from != copy_to)) {
2530 
2531 #ifdef INTROSPECTION
2532             snprintf(afl->m_tmp, sizeof(afl->m_tmp), " OVERWRITE_COPY-%u-%u-%u",
2533                      copy_from, copy_to, copy_len);
2534             strcat(afl->mutation, afl->m_tmp);
2535 #endif
2536             memmove(out_buf + copy_to, out_buf + copy_from, copy_len);
2537 
2538           }
2539 
2540           break;
2541 
2542         }
2543 
2544         case 51: {
2545 
2546           /* Overwrite bytes with fixed bytes. */
2547 
2548           if (temp_len < 2) { break; }
2549 
2550           u32 copy_len = choose_block_len(afl, temp_len - 1);
2551           u32 copy_to = rand_below(afl, temp_len - copy_len + 1);
2552 
2553 #ifdef INTROSPECTION
2554           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " OVERWRITE_FIXED-%u-%u",
2555                    copy_to, copy_len);
2556           strcat(afl->mutation, afl->m_tmp);
2557 #endif
2558           memset(out_buf + copy_to,
2559                  rand_below(afl, 2) ? rand_below(afl, 256)
2560                                     : out_buf[rand_below(afl, temp_len)],
2561                  copy_len);
2562 
2563           break;
2564 
2565         }
2566 
2567         case 52: {
2568 
2569           /* Increase byte by 1. */
2570 
2571 #ifdef INTROSPECTION
2572           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ADDBYTE_");
2573           strcat(afl->mutation, afl->m_tmp);
2574 #endif
2575           out_buf[rand_below(afl, temp_len)]++;
2576           break;
2577 
2578         }
2579 
2580         case 53: {
2581 
2582           /* Decrease byte by 1. */
2583 
2584 #ifdef INTROSPECTION
2585           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " SUBBYTE_");
2586           strcat(afl->mutation, afl->m_tmp);
2587 #endif
2588           out_buf[rand_below(afl, temp_len)]++;
2589           break;
2590 
2591         }
2592 
2593         case 54: {
2594 
2595           /* Flip byte. */
2596 
2597 #ifdef INTROSPECTION
2598           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP8_");
2599           strcat(afl->mutation, afl->m_tmp);
2600 #endif
2601           out_buf[rand_below(afl, temp_len)] ^= 0xff;
2602           break;
2603 
2604         }
2605 
2606         case 55 ... 56: {
2607 
2608           if (temp_len < 4) { break; }
2609 
2610           /* Switch bytes. */
2611 
2612           u32 to_end, switch_to, switch_len, switch_from;
2613           switch_from = rand_below(afl, temp_len);
2614           do {
2615 
2616             switch_to = rand_below(afl, temp_len);
2617 
2618           } while (switch_from == switch_to);
2619 
2620           if (switch_from < switch_to) {
2621 
2622             switch_len = switch_to - switch_from;
2623             to_end = temp_len - switch_to;
2624 
2625           } else {
2626 
2627             switch_len = switch_from - switch_to;
2628             to_end = temp_len - switch_from;
2629 
2630           }
2631 
2632           switch_len = choose_block_len(afl, MIN(switch_len, to_end));
2633 
2634 #ifdef INTROSPECTION
2635           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " SWITCH-%s-%u-%u-%u",
2636                    "switch", switch_from, switch_to, switch_len);
2637           strcat(afl->mutation, afl->m_tmp);
2638 #endif
2639           u8 *new_buf = afl_realloc(AFL_BUF_PARAM(out_scratch), switch_len);
2640           if (unlikely(!new_buf)) { PFATAL("alloc"); }
2641 
2642           /* Backup */
2643 
2644           memcpy(new_buf, out_buf + switch_from, switch_len);
2645 
2646           /* Switch 1 */
2647 
2648           memcpy(out_buf + switch_from, out_buf + switch_to, switch_len);
2649 
2650           /* Switch 2 */
2651 
2652           memcpy(out_buf + switch_to, new_buf, switch_len);
2653 
2654           break;
2655 
2656         }
2657 
2658         // MAX_HAVOC_ENTRY = 64
2659         case 57 ... MAX_HAVOC_ENTRY: {
2660 
2661           /* Delete bytes. */
2662 
2663           if (temp_len < 2) { break; }
2664 
2665           /* Don't delete too much. */
2666 
2667           u32 del_len = choose_block_len(afl, temp_len - 1);
2668           u32 del_from = rand_below(afl, temp_len - del_len + 1);
2669 
2670 #ifdef INTROSPECTION
2671           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " DEL-%u-%u", del_from,
2672                    del_len);
2673           strcat(afl->mutation, afl->m_tmp);
2674 #endif
2675           memmove(out_buf + del_from, out_buf + del_from + del_len,
2676                   temp_len - del_from - del_len);
2677 
2678           temp_len -= del_len;
2679 
2680           break;
2681 
2682         }
2683 
2684         default:
2685 
2686           r -= (MAX_HAVOC_ENTRY + 1);
2687 
2688           if (afl->extras_cnt) {
2689 
2690             if (r < 2) {
2691 
2692               /* Use the dictionary. */
2693 
2694               u32 use_extra = rand_below(afl, afl->extras_cnt);
2695               u32 extra_len = afl->extras[use_extra].len;
2696 
2697               if (extra_len > temp_len) { break; }
2698 
2699               u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
2700 #ifdef INTROSPECTION
2701               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " EXTRA_OVERWRITE-%u-%u",
2702                        insert_at, extra_len);
2703               strcat(afl->mutation, afl->m_tmp);
2704 #endif
2705               memcpy(out_buf + insert_at, afl->extras[use_extra].data,
2706                      extra_len);
2707 
2708               break;
2709 
2710             } else if (r < 4) {
2711 
2712               u32 use_extra = rand_below(afl, afl->extras_cnt);
2713               u32 extra_len = afl->extras[use_extra].len;
2714               if (temp_len + extra_len >= MAX_FILE) { break; }
2715 
2716               u8 *ptr = afl->extras[use_extra].data;
2717               u32 insert_at = rand_below(afl, temp_len + 1);
2718 #ifdef INTROSPECTION
2719               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " EXTRA_INSERT-%u-%u",
2720                        insert_at, extra_len);
2721               strcat(afl->mutation, afl->m_tmp);
2722 #endif
2723 
2724               out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
2725               if (unlikely(!out_buf)) { PFATAL("alloc"); }
2726 
2727               /* Tail */
2728               memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
2729                       temp_len - insert_at);
2730 
2731               /* Inserted part */
2732               memcpy(out_buf + insert_at, ptr, extra_len);
2733               temp_len += extra_len;
2734 
2735               break;
2736 
2737             } else {
2738 
2739               r -= 4;
2740 
2741             }
2742 
2743           }
2744 
2745           if (afl->a_extras_cnt) {
2746 
2747             u32 r_cmp = 2;
2748 
2749             if (unlikely(afl->cmplog_binary && afl->queue_cur->is_ascii)) {
2750 
2751               r_cmp = MUTATE_ASCII_DICT >> 1;
2752 
2753             }
2754 
2755             if (r < r_cmp) {
2756 
2757               /* Use the dictionary. */
2758 
2759               u32 use_extra = rand_below(afl, afl->a_extras_cnt);
2760               u32 extra_len = afl->a_extras[use_extra].len;
2761 
2762               if (extra_len > temp_len) { break; }
2763 
2764               u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
2765 #ifdef INTROSPECTION
2766               snprintf(afl->m_tmp, sizeof(afl->m_tmp),
2767                        " AUTO_EXTRA_OVERWRITE-%u-%u", insert_at, extra_len);
2768               strcat(afl->mutation, afl->m_tmp);
2769 #endif
2770               memcpy(out_buf + insert_at, afl->a_extras[use_extra].data,
2771                      extra_len);
2772 
2773               break;
2774 
2775             } else if (r < (r_cmp << 1)) {
2776 
2777               u32 use_extra = rand_below(afl, afl->a_extras_cnt);
2778               u32 extra_len = afl->a_extras[use_extra].len;
2779               if (temp_len + extra_len >= MAX_FILE) { break; }
2780 
2781               u8 *ptr = afl->a_extras[use_extra].data;
2782               u32 insert_at = rand_below(afl, temp_len + 1);
2783 #ifdef INTROSPECTION
2784               snprintf(afl->m_tmp, sizeof(afl->m_tmp),
2785                        " AUTO_EXTRA_INSERT-%u-%u", insert_at, extra_len);
2786               strcat(afl->mutation, afl->m_tmp);
2787 #endif
2788 
2789               out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
2790               if (unlikely(!out_buf)) { PFATAL("alloc"); }
2791 
2792               /* Tail */
2793               memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
2794                       temp_len - insert_at);
2795 
2796               /* Inserted part */
2797               memcpy(out_buf + insert_at, ptr, extra_len);
2798               temp_len += extra_len;
2799 
2800               break;
2801 
2802             } else {
2803 
2804               r -= (r_cmp << 1);
2805 
2806             }
2807 
2808           }
2809 
2810           /* Splicing otherwise if we are still here.
2811              Overwrite bytes with a randomly selected chunk from another
2812              testcase or insert that chunk. */
2813 
2814           /* Pick a random queue entry and seek to it. */
2815 
2816           u32 tid;
2817           do {
2818 
2819             tid = rand_below(afl, afl->queued_items);
2820 
2821           } while (tid == afl->current_entry || afl->queue_buf[tid]->len < 4);
2822 
2823           /* Get the testcase for splicing. */
2824           struct queue_entry *target = afl->queue_buf[tid];
2825           u32                 new_len = target->len;
2826           u8 *                new_buf = queue_testcase_get(afl, target);
2827 
2828           if ((temp_len >= 2 && r % 2) || temp_len + HAVOC_BLK_XL >= MAX_FILE) {
2829 
2830             /* overwrite mode */
2831 
2832             u32 copy_from, copy_to, copy_len;
2833 
2834             copy_len = choose_block_len(afl, new_len - 1);
2835             if (copy_len > temp_len) copy_len = temp_len;
2836 
2837             copy_from = rand_below(afl, new_len - copy_len + 1);
2838             copy_to = rand_below(afl, temp_len - copy_len + 1);
2839 
2840 #ifdef INTROSPECTION
2841             snprintf(afl->m_tmp, sizeof(afl->m_tmp),
2842                      " SPLICE_OVERWRITE-%u-%u-%u-%s", copy_from, copy_to,
2843                      copy_len, target->fname);
2844             strcat(afl->mutation, afl->m_tmp);
2845 #endif
2846             memmove(out_buf + copy_to, new_buf + copy_from, copy_len);
2847 
2848           } else {
2849 
2850             /* insert mode */
2851 
2852             u32 clone_from, clone_to, clone_len;
2853 
2854             clone_len = choose_block_len(afl, new_len);
2855             clone_from = rand_below(afl, new_len - clone_len + 1);
2856             clone_to = rand_below(afl, temp_len + 1);
2857 
2858             u8 *temp_buf = afl_realloc(AFL_BUF_PARAM(out_scratch),
2859                                        temp_len + clone_len + 1);
2860             if (unlikely(!temp_buf)) { PFATAL("alloc"); }
2861 
2862 #ifdef INTROSPECTION
2863             snprintf(afl->m_tmp, sizeof(afl->m_tmp),
2864                      " SPLICE_INSERT-%u-%u-%u-%s", clone_from, clone_to,
2865                      clone_len, target->fname);
2866             strcat(afl->mutation, afl->m_tmp);
2867 #endif
2868             /* Head */
2869 
2870             memcpy(temp_buf, out_buf, clone_to);
2871 
2872             /* Inserted part */
2873 
2874             memcpy(temp_buf + clone_to, new_buf + clone_from, clone_len);
2875 
2876             /* Tail */
2877             memcpy(temp_buf + clone_to + clone_len, out_buf + clone_to,
2878                    temp_len - clone_to);
2879 
2880             out_buf = temp_buf;
2881             afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
2882             temp_len += clone_len;
2883 
2884           }
2885 
2886           break;
2887 
2888           // end of default
2889 
2890       }
2891 
2892     }
2893 
2894     if (common_fuzz_stuff(afl, out_buf, temp_len)) { goto abandon_entry; }
2895 
2896     /* out_buf might have been mangled a bit, so let's restore it to its
2897        original size and shape. */
2898 
2899     out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
2900     if (unlikely(!out_buf)) { PFATAL("alloc"); }
2901     temp_len = len;
2902     memcpy(out_buf, in_buf, len);
2903 
2904     /* If we're finding new stuff, let's run for a bit longer, limits
2905        permitting. */
2906 
2907     if (afl->queued_items != havoc_queued) {
2908 
2909       if (perf_score <= afl->havoc_max_mult * 100) {
2910 
2911         afl->stage_max *= 2;
2912         perf_score *= 2;
2913 
2914       }
2915 
2916       havoc_queued = afl->queued_items;
2917 
2918     }
2919 
2920   }
2921 
2922   new_hit_cnt = afl->queued_items + afl->saved_crashes;
2923 
2924   if (!splice_cycle) {
2925 
2926     afl->stage_finds[STAGE_HAVOC] += new_hit_cnt - orig_hit_cnt;
2927     afl->stage_cycles[STAGE_HAVOC] += afl->stage_max;
2928 
2929   } else {
2930 
2931     afl->stage_finds[STAGE_SPLICE] += new_hit_cnt - orig_hit_cnt;
2932     afl->stage_cycles[STAGE_SPLICE] += afl->stage_max;
2933 
2934   }
2935 
2936 #ifndef IGNORE_FINDS
2937 
2938   /************
2939    * SPLICING *
2940    ************/
2941 
2942   /* This is a last-resort strategy triggered by a full round with no findings.
2943      It takes the current input file, randomly selects another input, and
2944      splices them together at some offset, then relies on the havoc
2945      code to mutate that blob. */
2946 
2947 retry_splicing:
2948 
2949   if (afl->use_splicing && splice_cycle++ < SPLICE_CYCLES &&
2950       afl->ready_for_splicing_count > 1 && afl->queue_cur->len >= 4) {
2951 
2952     struct queue_entry *target;
2953     u32                 tid, split_at;
2954     u8 *                new_buf;
2955     s32                 f_diff, l_diff;
2956 
2957     /* First of all, if we've modified in_buf for havoc, let's clean that
2958        up... */
2959 
2960     if (in_buf != orig_in) {
2961 
2962       in_buf = orig_in;
2963       len = afl->queue_cur->len;
2964 
2965     }
2966 
2967     /* Pick a random queue entry and seek to it. Don't splice with yourself. */
2968 
2969     do {
2970 
2971       tid = rand_below(afl, afl->queued_items);
2972 
2973     } while (tid == afl->current_entry || afl->queue_buf[tid]->len < 4);
2974 
2975     /* Get the testcase */
2976     afl->splicing_with = tid;
2977     target = afl->queue_buf[tid];
2978     new_buf = queue_testcase_get(afl, target);
2979 
2980     /* Find a suitable splicing location, somewhere between the first and
2981        the last differing byte. Bail out if the difference is just a single
2982        byte or so. */
2983 
2984     locate_diffs(in_buf, new_buf, MIN(len, (s64)target->len), &f_diff, &l_diff);
2985 
2986     if (f_diff < 0 || l_diff < 2 || f_diff == l_diff) { goto retry_splicing; }
2987 
2988     /* Split somewhere between the first and last differing byte. */
2989 
2990     split_at = f_diff + rand_below(afl, l_diff - f_diff);
2991 
2992     /* Do the thing. */
2993 
2994     len = target->len;
2995     afl->in_scratch_buf = afl_realloc(AFL_BUF_PARAM(in_scratch), len);
2996     memcpy(afl->in_scratch_buf, in_buf, split_at);
2997     memcpy(afl->in_scratch_buf + split_at, new_buf, len - split_at);
2998     in_buf = afl->in_scratch_buf;
2999     afl_swap_bufs(AFL_BUF_PARAM(in), AFL_BUF_PARAM(in_scratch));
3000 
3001     out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
3002     if (unlikely(!out_buf)) { PFATAL("alloc"); }
3003     memcpy(out_buf, in_buf, len);
3004 
3005     goto custom_mutator_stage;
3006 
3007   }
3008 
3009 #endif                                                     /* !IGNORE_FINDS */
3010 
3011   ret_val = 0;
3012 
3013 /* we are through with this queue entry - for this iteration */
3014 abandon_entry:
3015 
3016   afl->splicing_with = -1;
3017 
3018   /* Update afl->pending_not_fuzzed count if we made it through the calibration
3019      cycle and have not seen this entry before. */
3020 
3021   if (!afl->stop_soon && !afl->queue_cur->cal_failed &&
3022       !afl->queue_cur->was_fuzzed && !afl->queue_cur->disabled) {
3023 
3024     --afl->pending_not_fuzzed;
3025     afl->queue_cur->was_fuzzed = 1;
3026     afl->reinit_table = 1;
3027     if (afl->queue_cur->favored) { --afl->pending_favored; }
3028 
3029   }
3030 
3031   ++afl->queue_cur->fuzz_level;
3032   orig_in = NULL;
3033   return ret_val;
3034 
3035 #undef FLIP_BIT
3036 
3037 }
3038 
3039 /* MOpt mode */
mopt_common_fuzzing(afl_state_t * afl,MOpt_globals_t MOpt_globals)3040 static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) {
3041 
3042   if (!MOpt_globals.is_pilot_mode) {
3043 
3044     if (swarm_num == 1) {
3045 
3046       afl->key_module = 2;
3047       return 0;
3048 
3049     }
3050 
3051   }
3052 
3053   u32 len, temp_len;
3054   u32 i;
3055   u32 j;
3056   u8 *in_buf, *out_buf, *orig_in, *ex_tmp, *eff_map = 0;
3057   u64 havoc_queued = 0, orig_hit_cnt, new_hit_cnt = 0, cur_ms_lv, prev_cksum,
3058       _prev_cksum;
3059   u32 splice_cycle = 0, perf_score = 100, orig_perf, eff_cnt = 1;
3060 
3061   u8 ret_val = 1, doing_det = 0;
3062 
3063   u8  a_collect[MAX_AUTO_EXTRA];
3064   u32 a_len = 0;
3065 
3066 #ifdef IGNORE_FINDS
3067 
3068   /* In IGNORE_FINDS mode, skip any entries that weren't in the
3069      initial data set. */
3070 
3071   if (afl->queue_cur->depth > 1) return 1;
3072 
3073 #else
3074 
3075   if (likely(afl->pending_favored)) {
3076 
3077     /* If we have any favored, non-fuzzed new arrivals in the queue,
3078        possibly skip to them at the expense of already-fuzzed or non-favored
3079        cases. */
3080 
3081     if ((afl->queue_cur->fuzz_level || !afl->queue_cur->favored) &&
3082         rand_below(afl, 100) < SKIP_TO_NEW_PROB) {
3083 
3084       return 1;
3085 
3086     }
3087 
3088   } else if (!afl->non_instrumented_mode && !afl->queue_cur->favored &&
3089 
3090              afl->queued_items > 10) {
3091 
3092     /* Otherwise, still possibly skip non-favored cases, albeit less often.
3093        The odds of skipping stuff are higher for already-fuzzed inputs and
3094        lower for never-fuzzed entries. */
3095 
3096     if (afl->queue_cycle > 1 && !afl->queue_cur->fuzz_level) {
3097 
3098       if (likely(rand_below(afl, 100) < SKIP_NFAV_NEW_PROB)) { return 1; }
3099 
3100     } else {
3101 
3102       if (likely(rand_below(afl, 100) < SKIP_NFAV_OLD_PROB)) { return 1; }
3103 
3104     }
3105 
3106   }
3107 
3108 #endif                                                     /* ^IGNORE_FINDS */
3109 
3110   if (afl->not_on_tty) {
3111 
3112     ACTF("Fuzzing test case #%u (%u total, %llu crashes saved)...",
3113          afl->current_entry, afl->queued_items, afl->saved_crashes);
3114     fflush(stdout);
3115 
3116   }
3117 
3118   /* Map the test case into memory. */
3119   orig_in = in_buf = queue_testcase_get(afl, afl->queue_cur);
3120   len = afl->queue_cur->len;
3121 
3122   out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
3123   if (unlikely(!out_buf)) { PFATAL("alloc"); }
3124 
3125   afl->subseq_tmouts = 0;
3126 
3127   afl->cur_depth = afl->queue_cur->depth;
3128 
3129   /*******************************************
3130    * CALIBRATION (only if failed earlier on) *
3131    *******************************************/
3132 
3133   if (unlikely(afl->queue_cur->cal_failed)) {
3134 
3135     u8 res = FSRV_RUN_TMOUT;
3136 
3137     if (afl->queue_cur->cal_failed < CAL_CHANCES) {
3138 
3139       afl->queue_cur->exec_cksum = 0;
3140 
3141       res =
3142           calibrate_case(afl, afl->queue_cur, in_buf, afl->queue_cycle - 1, 0);
3143 
3144       if (res == FSRV_RUN_ERROR) {
3145 
3146         FATAL("Unable to execute target application");
3147 
3148       }
3149 
3150     }
3151 
3152     if (afl->stop_soon || res != afl->crash_mode) {
3153 
3154       ++afl->cur_skipped_items;
3155       goto abandon_entry;
3156 
3157     }
3158 
3159   }
3160 
3161   /************
3162    * TRIMMING *
3163    ************/
3164 
3165   if (unlikely(!afl->non_instrumented_mode && !afl->queue_cur->trim_done &&
3166                !afl->disable_trim)) {
3167 
3168     u32 old_len = afl->queue_cur->len;
3169 
3170     u8 res = trim_case(afl, afl->queue_cur, in_buf);
3171     orig_in = in_buf = queue_testcase_get(afl, afl->queue_cur);
3172 
3173     if (unlikely(res == FSRV_RUN_ERROR)) {
3174 
3175       FATAL("Unable to execute target application");
3176 
3177     }
3178 
3179     if (unlikely(afl->stop_soon)) {
3180 
3181       ++afl->cur_skipped_items;
3182       goto abandon_entry;
3183 
3184     }
3185 
3186     /* Don't retry trimming, even if it failed. */
3187 
3188     afl->queue_cur->trim_done = 1;
3189 
3190     len = afl->queue_cur->len;
3191 
3192     /* maybe current entry is not ready for splicing anymore */
3193     if (unlikely(len <= 4 && old_len > 4)) --afl->ready_for_splicing_count;
3194 
3195   }
3196 
3197   memcpy(out_buf, in_buf, len);
3198 
3199   /*********************
3200    * PERFORMANCE SCORE *
3201    *********************/
3202 
3203   if (likely(!afl->old_seed_selection))
3204     orig_perf = perf_score = afl->queue_cur->perf_score;
3205   else
3206     orig_perf = perf_score = calculate_score(afl, afl->queue_cur);
3207 
3208   if (unlikely(perf_score <= 0 && afl->active_items > 1)) {
3209 
3210     goto abandon_entry;
3211 
3212   }
3213 
3214   if (unlikely(afl->shm.cmplog_mode &&
3215                afl->queue_cur->colorized < afl->cmplog_lvl &&
3216                (u32)len <= afl->cmplog_max_filesize)) {
3217 
3218     if (unlikely(len < 4)) {
3219 
3220       afl->queue_cur->colorized = CMPLOG_LVL_MAX;
3221 
3222     } else {
3223 
3224       if (afl->cmplog_lvl == 3 ||
3225           (afl->cmplog_lvl == 2 && afl->queue_cur->tc_ref) ||
3226           !(afl->fsrv.total_execs % afl->queued_items) ||
3227           get_cur_time() - afl->last_find_time > 300000) {  // 300 seconds
3228 
3229         if (input_to_state_stage(afl, in_buf, out_buf, len)) {
3230 
3231           goto abandon_entry;
3232 
3233         }
3234 
3235       }
3236 
3237     }
3238 
3239   }
3240 
3241   /* Go to pacemker fuzzing if MOpt is doing well */
3242 
3243   cur_ms_lv = get_cur_time();
3244   if (!(afl->key_puppet == 0 &&
3245         ((cur_ms_lv - afl->last_find_time < (u32)afl->limit_time_puppet) ||
3246          (afl->last_crash_time != 0 &&
3247           cur_ms_lv - afl->last_crash_time < (u32)afl->limit_time_puppet) ||
3248          afl->last_find_time == 0))) {
3249 
3250     afl->key_puppet = 1;
3251     goto pacemaker_fuzzing;
3252 
3253   }
3254 
3255   /* Skip right away if -d is given, if we have done deterministic fuzzing on
3256      this entry ourselves (was_fuzzed), or if it has gone through deterministic
3257      testing in earlier, resumed runs (passed_det). */
3258 
3259   if (likely(afl->skip_deterministic || afl->queue_cur->was_fuzzed ||
3260              afl->queue_cur->passed_det)) {
3261 
3262     goto havoc_stage;
3263 
3264   }
3265 
3266   /* Skip deterministic fuzzing if exec path checksum puts this out of scope
3267      for this main instance. */
3268 
3269   if (unlikely(afl->main_node_max &&
3270                (afl->queue_cur->exec_cksum % afl->main_node_max) !=
3271                    afl->main_node_id - 1)) {
3272 
3273     goto havoc_stage;
3274 
3275   }
3276 
3277   doing_det = 1;
3278 
3279   /*********************************************
3280    * SIMPLE BITFLIP (+dictionary construction) *
3281    *********************************************/
3282 
3283 #define FLIP_BIT(_ar, _b)                   \
3284   do {                                      \
3285                                             \
3286     u8 *_arf = (u8 *)(_ar);                 \
3287     u32 _bf = (_b);                         \
3288     _arf[(_bf) >> 3] ^= (128 >> ((_bf)&7)); \
3289                                             \
3290   } while (0)
3291 
3292   /* Single walking bit. */
3293 
3294   afl->stage_short = "flip1";
3295   afl->stage_max = len << 3;
3296   afl->stage_name = "bitflip 1/1";
3297 
3298   afl->stage_val_type = STAGE_VAL_NONE;
3299 
3300   orig_hit_cnt = afl->queued_items + afl->saved_crashes;
3301 
3302   /* Get a clean cksum. */
3303 
3304   if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3305 
3306   prev_cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
3307   _prev_cksum = prev_cksum;
3308 
3309   /* Now flip bits. */
3310 
3311   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
3312 
3313     afl->stage_cur_byte = afl->stage_cur >> 3;
3314 
3315     FLIP_BIT(out_buf, afl->stage_cur);
3316 
3317 #ifdef INTROSPECTION
3318     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT1-%u",
3319              afl->queue_cur->fname, afl->stage_cur);
3320 #endif
3321     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3322 
3323     FLIP_BIT(out_buf, afl->stage_cur);
3324 
3325     /* While flipping the least significant bit in every byte, pull of an extra
3326        trick to detect possible syntax tokens. In essence, the idea is that if
3327        you have a binary blob like this:
3328 
3329        xxxxxxxxIHDRxxxxxxxx
3330 
3331        ...and changing the leading and trailing bytes causes variable or no
3332        changes in program flow, but touching any character in the "IHDR" string
3333        always produces the same, distinctive path, it's highly likely that
3334        "IHDR" is an atomically-checked magic value of special significance to
3335        the fuzzed format.
3336 
3337        We do this here, rather than as a separate stage, because it's a nice
3338        way to keep the operation approximately "free" (i.e., no extra execs).
3339 
3340        Empirically, performing the check when flipping the least significant bit
3341        is advantageous, compared to doing it at the time of more disruptive
3342        changes, where the program flow may be affected in more violent ways.
3343 
3344        The caveat is that we won't generate dictionaries in the -d mode or -S
3345        mode - but that's probably a fair trade-off.
3346 
3347        This won't work particularly well with paths that exhibit variable
3348        behavior, but fails gracefully, so we'll carry out the checks anyway.
3349 
3350       */
3351 
3352     if (!afl->non_instrumented_mode && (afl->stage_cur & 7) == 7) {
3353 
3354       u64 cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
3355 
3356       if (afl->stage_cur == afl->stage_max - 1 && cksum == prev_cksum) {
3357 
3358         /* If at end of file and we are still collecting a string, grab the
3359            final character and force output. */
3360 
3361         if (a_len < MAX_AUTO_EXTRA) {
3362 
3363           a_collect[a_len] = out_buf[afl->stage_cur >> 3];
3364 
3365         }
3366 
3367         ++a_len;
3368 
3369         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) {
3370 
3371           maybe_add_auto(afl, a_collect, a_len);
3372 
3373         }
3374 
3375       } else if (cksum != prev_cksum) {
3376 
3377         /* Otherwise, if the checksum has changed, see if we have something
3378            worthwhile queued up, and collect that if the answer is yes. */
3379 
3380         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) {
3381 
3382           maybe_add_auto(afl, a_collect, a_len);
3383 
3384         }
3385 
3386         a_len = 0;
3387         prev_cksum = cksum;
3388 
3389       }
3390 
3391       /* Continue collecting string, but only if the bit flip actually made
3392          any difference - we don't want no-op tokens. */
3393 
3394       if (cksum != _prev_cksum) {
3395 
3396         if (a_len < MAX_AUTO_EXTRA) {
3397 
3398           a_collect[a_len] = out_buf[afl->stage_cur >> 3];
3399 
3400         }
3401 
3402         ++a_len;
3403 
3404       }
3405 
3406     }                                       /* if (afl->stage_cur & 7) == 7 */
3407 
3408   }                                                   /* for afl->stage_cur */
3409 
3410   new_hit_cnt = afl->queued_items + afl->saved_crashes;
3411 
3412   afl->stage_finds[STAGE_FLIP1] += new_hit_cnt - orig_hit_cnt;
3413   afl->stage_cycles[STAGE_FLIP1] += afl->stage_max;
3414 
3415   /* Two walking bits. */
3416 
3417   afl->stage_name = "bitflip 2/1";
3418   afl->stage_short = "flip2";
3419   afl->stage_max = (len << 3) - 1;
3420 
3421   orig_hit_cnt = new_hit_cnt;
3422 
3423   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
3424 
3425     afl->stage_cur_byte = afl->stage_cur >> 3;
3426 
3427     FLIP_BIT(out_buf, afl->stage_cur);
3428     FLIP_BIT(out_buf, afl->stage_cur + 1);
3429 
3430 #ifdef INTROSPECTION
3431     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT2-%u",
3432              afl->queue_cur->fname, afl->stage_cur);
3433 #endif
3434     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3435 
3436     FLIP_BIT(out_buf, afl->stage_cur);
3437     FLIP_BIT(out_buf, afl->stage_cur + 1);
3438 
3439   }                                                   /* for afl->stage_cur */
3440 
3441   new_hit_cnt = afl->queued_items + afl->saved_crashes;
3442 
3443   afl->stage_finds[STAGE_FLIP2] += new_hit_cnt - orig_hit_cnt;
3444   afl->stage_cycles[STAGE_FLIP2] += afl->stage_max;
3445 
3446   /* Four walking bits. */
3447 
3448   afl->stage_name = "bitflip 4/1";
3449   afl->stage_short = "flip4";
3450   afl->stage_max = (len << 3) - 3;
3451 
3452   orig_hit_cnt = new_hit_cnt;
3453 
3454   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
3455 
3456     afl->stage_cur_byte = afl->stage_cur >> 3;
3457 
3458     FLIP_BIT(out_buf, afl->stage_cur);
3459     FLIP_BIT(out_buf, afl->stage_cur + 1);
3460     FLIP_BIT(out_buf, afl->stage_cur + 2);
3461     FLIP_BIT(out_buf, afl->stage_cur + 3);
3462 
3463 #ifdef INTROSPECTION
3464     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT4-%u",
3465              afl->queue_cur->fname, afl->stage_cur);
3466 #endif
3467     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3468 
3469     FLIP_BIT(out_buf, afl->stage_cur);
3470     FLIP_BIT(out_buf, afl->stage_cur + 1);
3471     FLIP_BIT(out_buf, afl->stage_cur + 2);
3472     FLIP_BIT(out_buf, afl->stage_cur + 3);
3473 
3474   }                                                   /* for afl->stage_cur */
3475 
3476   new_hit_cnt = afl->queued_items + afl->saved_crashes;
3477 
3478   afl->stage_finds[STAGE_FLIP4] += new_hit_cnt - orig_hit_cnt;
3479   afl->stage_cycles[STAGE_FLIP4] += afl->stage_max;
3480 
3481   /* Effector map setup. These macros calculate:
3482 
3483      EFF_APOS      - position of a particular file offset in the map.
3484      EFF_ALEN      - length of a map with a particular number of bytes.
3485      EFF_SPAN_ALEN - map span for a sequence of bytes.
3486 
3487    */
3488 
3489 #define EFF_APOS(_p) ((_p) >> EFF_MAP_SCALE2)
3490 #define EFF_REM(_x) ((_x) & ((1 << EFF_MAP_SCALE2) - 1))
3491 #define EFF_ALEN(_l) (EFF_APOS(_l) + !!EFF_REM(_l))
3492 #define EFF_SPAN_ALEN(_p, _l) (EFF_APOS((_p) + (_l)-1) - EFF_APOS(_p) + 1)
3493 
3494   /* Initialize effector map for the next step (see comments below). Always
3495          flag first and last byte as doing something. */
3496 
3497   eff_map = afl_realloc(AFL_BUF_PARAM(eff), EFF_ALEN(len));
3498   if (unlikely(!eff_map)) { PFATAL("alloc"); }
3499   eff_map[0] = 1;
3500 
3501   if (EFF_APOS(len - 1) != 0) {
3502 
3503     eff_map[EFF_APOS(len - 1)] = 1;
3504     ++eff_cnt;
3505 
3506   }
3507 
3508   /* Walking byte. */
3509 
3510   afl->stage_name = "bitflip 8/8";
3511   afl->stage_short = "flip8";
3512   afl->stage_max = len;
3513 
3514   orig_hit_cnt = new_hit_cnt;
3515   prev_cksum = _prev_cksum;
3516 
3517   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
3518 
3519     afl->stage_cur_byte = afl->stage_cur;
3520 
3521     out_buf[afl->stage_cur] ^= 0xFF;
3522 
3523 #ifdef INTROSPECTION
3524     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT8-%u",
3525              afl->queue_cur->fname, afl->stage_cur);
3526 #endif
3527     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3528 
3529     /* We also use this stage to pull off a simple trick: we identify
3530        bytes that seem to have no effect on the current execution path
3531        even when fully flipped - and we skip them during more expensive
3532        deterministic stages, such as arithmetics or known ints. */
3533 
3534     if (!eff_map[EFF_APOS(afl->stage_cur)]) {
3535 
3536       u64 cksum;
3537 
3538       /* If in non-instrumented mode or if the file is very short, just flag
3539          everything without wasting time on checksums. */
3540 
3541       if (!afl->non_instrumented_mode && len >= EFF_MIN_LEN) {
3542 
3543         cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
3544 
3545       } else {
3546 
3547         cksum = ~prev_cksum;
3548 
3549       }
3550 
3551       if (cksum != prev_cksum) {
3552 
3553         eff_map[EFF_APOS(afl->stage_cur)] = 1;
3554         ++eff_cnt;
3555 
3556       }
3557 
3558     }
3559 
3560     out_buf[afl->stage_cur] ^= 0xFF;
3561 
3562   }                                                   /* for afl->stage_cur */
3563 
3564   /* If the effector map is more than EFF_MAX_PERC dense, just flag the
3565      whole thing as worth fuzzing, since we wouldn't be saving much time
3566      anyway. */
3567 
3568   if (eff_cnt != (u32)EFF_ALEN(len) &&
3569       eff_cnt * 100 / EFF_ALEN(len) > EFF_MAX_PERC) {
3570 
3571     memset(eff_map, 1, EFF_ALEN(len));
3572 
3573     afl->blocks_eff_select += EFF_ALEN(len);
3574 
3575   } else {
3576 
3577     afl->blocks_eff_select += eff_cnt;
3578 
3579   }
3580 
3581   afl->blocks_eff_total += EFF_ALEN(len);
3582 
3583   new_hit_cnt = afl->queued_items + afl->saved_crashes;
3584 
3585   afl->stage_finds[STAGE_FLIP8] += new_hit_cnt - orig_hit_cnt;
3586   afl->stage_cycles[STAGE_FLIP8] += afl->stage_max;
3587 
3588   /* Two walking bytes. */
3589 
3590   if (len < 2) { goto skip_bitflip; }
3591 
3592   afl->stage_name = "bitflip 16/8";
3593   afl->stage_short = "flip16";
3594   afl->stage_cur = 0;
3595   afl->stage_max = len - 1;
3596 
3597   orig_hit_cnt = new_hit_cnt;
3598 
3599   for (i = 0; i < len - 1; ++i) {
3600 
3601     /* Let's consult the effector map... */
3602 
3603     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
3604 
3605       --afl->stage_max;
3606       continue;
3607 
3608     }
3609 
3610     afl->stage_cur_byte = i;
3611 
3612     *(u16 *)(out_buf + i) ^= 0xFFFF;
3613 
3614 #ifdef INTROSPECTION
3615     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT16-%u",
3616              afl->queue_cur->fname, afl->stage_cur);
3617 #endif
3618     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3619     ++afl->stage_cur;
3620 
3621     *(u16 *)(out_buf + i) ^= 0xFFFF;
3622 
3623   }                                                   /* for i = 0; i < len */
3624 
3625   new_hit_cnt = afl->queued_items + afl->saved_crashes;
3626 
3627   afl->stage_finds[STAGE_FLIP16] += new_hit_cnt - orig_hit_cnt;
3628   afl->stage_cycles[STAGE_FLIP16] += afl->stage_max;
3629 
3630   if (len < 4) { goto skip_bitflip; }
3631 
3632   /* Four walking bytes. */
3633 
3634   afl->stage_name = "bitflip 32/8";
3635   afl->stage_short = "flip32";
3636   afl->stage_cur = 0;
3637   afl->stage_max = len - 3;
3638 
3639   orig_hit_cnt = new_hit_cnt;
3640 
3641   for (i = 0; i < len - 3; ++i) {
3642 
3643     /* Let's consult the effector map... */
3644     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
3645         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
3646 
3647       --afl->stage_max;
3648       continue;
3649 
3650     }
3651 
3652     afl->stage_cur_byte = i;
3653 
3654     *(u32 *)(out_buf + i) ^= 0xFFFFFFFF;
3655 
3656 #ifdef INTROSPECTION
3657     snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_FLIP_BIT32-%u",
3658              afl->queue_cur->fname, afl->stage_cur);
3659 #endif
3660     if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3661     ++afl->stage_cur;
3662 
3663     *(u32 *)(out_buf + i) ^= 0xFFFFFFFF;
3664 
3665   }                                               /* for i = 0; i < len - 3 */
3666 
3667   new_hit_cnt = afl->queued_items + afl->saved_crashes;
3668 
3669   afl->stage_finds[STAGE_FLIP32] += new_hit_cnt - orig_hit_cnt;
3670   afl->stage_cycles[STAGE_FLIP32] += afl->stage_max;
3671 
3672 skip_bitflip:
3673 
3674   if (afl->no_arith) { goto skip_arith; }
3675 
3676   /**********************
3677    * ARITHMETIC INC/DEC *
3678    **********************/
3679 
3680   /* 8-bit arithmetics. */
3681 
3682   afl->stage_name = "arith 8/8";
3683   afl->stage_short = "arith8";
3684   afl->stage_cur = 0;
3685   afl->stage_max = 2 * len * ARITH_MAX;
3686 
3687   afl->stage_val_type = STAGE_VAL_LE;
3688 
3689   orig_hit_cnt = new_hit_cnt;
3690 
3691   for (i = 0; i < (u32)len; ++i) {
3692 
3693     u8 orig = out_buf[i];
3694 
3695     /* Let's consult the effector map... */
3696 
3697     if (!eff_map[EFF_APOS(i)]) {
3698 
3699       afl->stage_max -= 2 * ARITH_MAX;
3700       continue;
3701 
3702     }
3703 
3704     afl->stage_cur_byte = i;
3705 
3706     for (j = 1; j <= ARITH_MAX; ++j) {
3707 
3708       u8 r = orig ^ (orig + j);
3709 
3710       /* Do arithmetic operations only if the result couldn't be a product
3711          of a bitflip. */
3712 
3713       if (!could_be_bitflip(r)) {
3714 
3715         afl->stage_cur_val = j;
3716         out_buf[i] = orig + j;
3717 
3718 #ifdef INTROSPECTION
3719         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH8+-%u-%u",
3720                  afl->queue_cur->fname, i, j);
3721 #endif
3722         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3723         ++afl->stage_cur;
3724 
3725       } else {
3726 
3727         --afl->stage_max;
3728 
3729       }
3730 
3731       r = orig ^ (orig - j);
3732 
3733       if (!could_be_bitflip(r)) {
3734 
3735         afl->stage_cur_val = -j;
3736         out_buf[i] = orig - j;
3737 
3738 #ifdef INTROSPECTION
3739         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH8_-%u-%u",
3740                  afl->queue_cur->fname, i, j);
3741 #endif
3742         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3743         ++afl->stage_cur;
3744 
3745       } else {
3746 
3747         --afl->stage_max;
3748 
3749       }
3750 
3751       out_buf[i] = orig;
3752 
3753     }
3754 
3755   }                                                   /* for i = 0; i < len */
3756 
3757   new_hit_cnt = afl->queued_items + afl->saved_crashes;
3758 
3759   afl->stage_finds[STAGE_ARITH8] += new_hit_cnt - orig_hit_cnt;
3760   afl->stage_cycles[STAGE_ARITH8] += afl->stage_max;
3761 
3762   /* 16-bit arithmetics, both endians. */
3763 
3764   if (len < 2) { goto skip_arith; }
3765 
3766   afl->stage_name = "arith 16/8";
3767   afl->stage_short = "arith16";
3768   afl->stage_cur = 0;
3769   afl->stage_max = 4 * (len - 1) * ARITH_MAX;
3770 
3771   orig_hit_cnt = new_hit_cnt;
3772 
3773   for (i = 0; i < len - 1; ++i) {
3774 
3775     u16 orig = *(u16 *)(out_buf + i);
3776 
3777     /* Let's consult the effector map... */
3778 
3779     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
3780 
3781       afl->stage_max -= 4 * ARITH_MAX;
3782       continue;
3783 
3784     }
3785 
3786     afl->stage_cur_byte = i;
3787 
3788     for (j = 1; j <= ARITH_MAX; ++j) {
3789 
3790       u16 r1 = orig ^ (orig + j), r2 = orig ^ (orig - j),
3791           r3 = orig ^ SWAP16(SWAP16(orig) + j),
3792           r4 = orig ^ SWAP16(SWAP16(orig) - j);
3793 
3794       /* Try little endian addition and subtraction first. Do it only
3795          if the operation would affect more than one byte (hence the
3796          & 0xff overflow checks) and if it couldn't be a product of
3797          a bitflip. */
3798 
3799       afl->stage_val_type = STAGE_VAL_LE;
3800 
3801       if ((orig & 0xff) + j > 0xff && !could_be_bitflip(r1)) {
3802 
3803         afl->stage_cur_val = j;
3804         *(u16 *)(out_buf + i) = orig + j;
3805 
3806 #ifdef INTROSPECTION
3807         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH16+-%u-%u",
3808                  afl->queue_cur->fname, i, j);
3809 #endif
3810         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3811         ++afl->stage_cur;
3812 
3813       } else {
3814 
3815         --afl->stage_max;
3816 
3817       }
3818 
3819       if ((orig & 0xff) < j && !could_be_bitflip(r2)) {
3820 
3821         afl->stage_cur_val = -j;
3822         *(u16 *)(out_buf + i) = orig - j;
3823 
3824 #ifdef INTROSPECTION
3825         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH16_-%u-%u",
3826                  afl->queue_cur->fname, i, j);
3827 #endif
3828         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3829         ++afl->stage_cur;
3830 
3831       } else {
3832 
3833         --afl->stage_max;
3834 
3835       }
3836 
3837       /* Big endian comes next. Same deal. */
3838 
3839       afl->stage_val_type = STAGE_VAL_BE;
3840 
3841       if ((orig >> 8) + j > 0xff && !could_be_bitflip(r3)) {
3842 
3843         afl->stage_cur_val = j;
3844         *(u16 *)(out_buf + i) = SWAP16(SWAP16(orig) + j);
3845 
3846 #ifdef INTROSPECTION
3847         snprintf(afl->mutation, sizeof(afl->mutation),
3848                  "%s MOPT_ARITH16+BE-%u-%u", afl->queue_cur->fname, i, j);
3849 #endif
3850         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3851         ++afl->stage_cur;
3852 
3853       } else {
3854 
3855         --afl->stage_max;
3856 
3857       }
3858 
3859       if ((orig >> 8) < j && !could_be_bitflip(r4)) {
3860 
3861         afl->stage_cur_val = -j;
3862         *(u16 *)(out_buf + i) = SWAP16(SWAP16(orig) - j);
3863 
3864 #ifdef INTROSPECTION
3865         snprintf(afl->mutation, sizeof(afl->mutation),
3866                  "%s MOPT_ARITH16_BE+%u+%u", afl->queue_cur->fname, i, j);
3867 #endif
3868         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3869         ++afl->stage_cur;
3870 
3871       } else {
3872 
3873         --afl->stage_max;
3874 
3875       }
3876 
3877       *(u16 *)(out_buf + i) = orig;
3878 
3879     }
3880 
3881   }                                               /* for i = 0; i < len - 1 */
3882 
3883   new_hit_cnt = afl->queued_items + afl->saved_crashes;
3884 
3885   afl->stage_finds[STAGE_ARITH16] += new_hit_cnt - orig_hit_cnt;
3886   afl->stage_cycles[STAGE_ARITH16] += afl->stage_max;
3887 
3888   /* 32-bit arithmetics, both endians. */
3889 
3890   if (len < 4) { goto skip_arith; }
3891 
3892   afl->stage_name = "arith 32/8";
3893   afl->stage_short = "arith32";
3894   afl->stage_cur = 0;
3895   afl->stage_max = 4 * (len - 3) * ARITH_MAX;
3896 
3897   orig_hit_cnt = new_hit_cnt;
3898 
3899   for (i = 0; i < len - 3; ++i) {
3900 
3901     u32 orig = *(u32 *)(out_buf + i);
3902 
3903     /* Let's consult the effector map... */
3904 
3905     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
3906         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
3907 
3908       afl->stage_max -= 4 * ARITH_MAX;
3909       continue;
3910 
3911     }
3912 
3913     afl->stage_cur_byte = i;
3914 
3915     for (j = 1; j <= ARITH_MAX; ++j) {
3916 
3917       u32 r1 = orig ^ (orig + j), r2 = orig ^ (orig - j),
3918           r3 = orig ^ SWAP32(SWAP32(orig) + j),
3919           r4 = orig ^ SWAP32(SWAP32(orig) - j);
3920 
3921       /* Little endian first. Same deal as with 16-bit: we only want to
3922          try if the operation would have effect on more than two bytes. */
3923 
3924       afl->stage_val_type = STAGE_VAL_LE;
3925 
3926       if ((orig & 0xffff) + j > 0xffff && !could_be_bitflip(r1)) {
3927 
3928         afl->stage_cur_val = j;
3929         *(u32 *)(out_buf + i) = orig + j;
3930 
3931 #ifdef INTROSPECTION
3932         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH32+-%u-%u",
3933                  afl->queue_cur->fname, i, j);
3934 #endif
3935         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3936         ++afl->stage_cur;
3937 
3938       } else {
3939 
3940         --afl->stage_max;
3941 
3942       }
3943 
3944       if ((orig & 0xffff) < j && !could_be_bitflip(r2)) {
3945 
3946         afl->stage_cur_val = -j;
3947         *(u32 *)(out_buf + i) = orig - j;
3948 
3949 #ifdef INTROSPECTION
3950         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_ARITH32_-%u-%u",
3951                  afl->queue_cur->fname, i, j);
3952 #endif
3953         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3954         ++afl->stage_cur;
3955 
3956       } else {
3957 
3958         --afl->stage_max;
3959 
3960       }
3961 
3962       /* Big endian next. */
3963 
3964       afl->stage_val_type = STAGE_VAL_BE;
3965 
3966       if ((SWAP32(orig) & 0xffff) + j > 0xffff && !could_be_bitflip(r3)) {
3967 
3968         afl->stage_cur_val = j;
3969         *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) + j);
3970 
3971 #ifdef INTROSPECTION
3972         snprintf(afl->mutation, sizeof(afl->mutation),
3973                  "%s MOPT_ARITH32+BE-%u-%u", afl->queue_cur->fname, i, j);
3974 #endif
3975         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3976         ++afl->stage_cur;
3977 
3978       } else {
3979 
3980         --afl->stage_max;
3981 
3982       }
3983 
3984       if ((SWAP32(orig) & 0xffff) < j && !could_be_bitflip(r4)) {
3985 
3986         afl->stage_cur_val = -j;
3987         *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) - j);
3988 
3989 #ifdef INTROSPECTION
3990         snprintf(afl->mutation, sizeof(afl->mutation),
3991                  "%s MOPT_ARITH32_BE-%u-%u", afl->queue_cur->fname, i, j);
3992 #endif
3993         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
3994         ++afl->stage_cur;
3995 
3996       } else {
3997 
3998         --afl->stage_max;
3999 
4000       }
4001 
4002       *(u32 *)(out_buf + i) = orig;
4003 
4004     }
4005 
4006   }                                               /* for i = 0; i < len - 3 */
4007 
4008   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4009 
4010   afl->stage_finds[STAGE_ARITH32] += new_hit_cnt - orig_hit_cnt;
4011   afl->stage_cycles[STAGE_ARITH32] += afl->stage_max;
4012 
4013 skip_arith:
4014 
4015   /**********************
4016    * INTERESTING VALUES *
4017    **********************/
4018 
4019   afl->stage_name = "interest 8/8";
4020   afl->stage_short = "int8";
4021   afl->stage_cur = 0;
4022   afl->stage_max = len * sizeof(interesting_8);
4023 
4024   afl->stage_val_type = STAGE_VAL_LE;
4025 
4026   orig_hit_cnt = new_hit_cnt;
4027 
4028   /* Setting 8-bit integers. */
4029 
4030   for (i = 0; i < (u32)len; ++i) {
4031 
4032     u8 orig = out_buf[i];
4033 
4034     /* Let's consult the effector map... */
4035 
4036     if (!eff_map[EFF_APOS(i)]) {
4037 
4038       afl->stage_max -= sizeof(interesting_8);
4039       continue;
4040 
4041     }
4042 
4043     afl->stage_cur_byte = i;
4044 
4045     for (j = 0; j < sizeof(interesting_8); ++j) {
4046 
4047       /* Skip if the value could be a product of bitflips or arithmetics. */
4048 
4049       if (could_be_bitflip(orig ^ (u8)interesting_8[j]) ||
4050           could_be_arith(orig, (u8)interesting_8[j], 1)) {
4051 
4052         --afl->stage_max;
4053         continue;
4054 
4055       }
4056 
4057       afl->stage_cur_val = interesting_8[j];
4058       out_buf[i] = interesting_8[j];
4059 
4060 #ifdef INTROSPECTION
4061       snprintf(afl->mutation, sizeof(afl->mutation),
4062                "%s MOPT_INTERESTING8-%u-%u", afl->queue_cur->fname, i, j);
4063 #endif
4064       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4065 
4066       out_buf[i] = orig;
4067       ++afl->stage_cur;
4068 
4069     }
4070 
4071   }                                                   /* for i = 0; i < len */
4072 
4073   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4074 
4075   afl->stage_finds[STAGE_INTEREST8] += new_hit_cnt - orig_hit_cnt;
4076   afl->stage_cycles[STAGE_INTEREST8] += afl->stage_max;
4077 
4078   /* Setting 16-bit integers, both endians. */
4079 
4080   if (afl->no_arith || len < 2) { goto skip_interest; }
4081 
4082   afl->stage_name = "interest 16/8";
4083   afl->stage_short = "int16";
4084   afl->stage_cur = 0;
4085   afl->stage_max = 2 * (len - 1) * (sizeof(interesting_16) >> 1);
4086 
4087   orig_hit_cnt = new_hit_cnt;
4088 
4089   for (i = 0; i < len - 1; ++i) {
4090 
4091     u16 orig = *(u16 *)(out_buf + i);
4092 
4093     /* Let's consult the effector map... */
4094 
4095     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)]) {
4096 
4097       afl->stage_max -= sizeof(interesting_16);
4098       continue;
4099 
4100     }
4101 
4102     afl->stage_cur_byte = i;
4103 
4104     for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
4105 
4106       afl->stage_cur_val = interesting_16[j];
4107 
4108       /* Skip if this could be a product of a bitflip, arithmetics,
4109          or single-byte interesting value insertion. */
4110 
4111       if (!could_be_bitflip(orig ^ (u16)interesting_16[j]) &&
4112           !could_be_arith(orig, (u16)interesting_16[j], 2) &&
4113           !could_be_interest(orig, (u16)interesting_16[j], 2, 0)) {
4114 
4115         afl->stage_val_type = STAGE_VAL_LE;
4116 
4117         *(u16 *)(out_buf + i) = interesting_16[j];
4118 
4119 #ifdef INTROSPECTION
4120         snprintf(afl->mutation, sizeof(afl->mutation),
4121                  "%s MOPT_INTERESTING16-%u-%u", afl->queue_cur->fname, i, j);
4122 #endif
4123         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4124         ++afl->stage_cur;
4125 
4126       } else {
4127 
4128         --afl->stage_max;
4129 
4130       }
4131 
4132       if ((u16)interesting_16[j] != SWAP16(interesting_16[j]) &&
4133           !could_be_bitflip(orig ^ SWAP16(interesting_16[j])) &&
4134           !could_be_arith(orig, SWAP16(interesting_16[j]), 2) &&
4135           !could_be_interest(orig, SWAP16(interesting_16[j]), 2, 1)) {
4136 
4137         afl->stage_val_type = STAGE_VAL_BE;
4138 
4139 #ifdef INTROSPECTION
4140         snprintf(afl->mutation, sizeof(afl->mutation),
4141                  "%s MOPT_INTERESTING16BE-%u-%u", afl->queue_cur->fname, i, j);
4142 #endif
4143         *(u16 *)(out_buf + i) = SWAP16(interesting_16[j]);
4144         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4145         ++afl->stage_cur;
4146 
4147       } else {
4148 
4149         --afl->stage_max;
4150 
4151       }
4152 
4153     }
4154 
4155     *(u16 *)(out_buf + i) = orig;
4156 
4157   }                                               /* for i = 0; i < len - 1 */
4158 
4159   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4160 
4161   afl->stage_finds[STAGE_INTEREST16] += new_hit_cnt - orig_hit_cnt;
4162   afl->stage_cycles[STAGE_INTEREST16] += afl->stage_max;
4163 
4164   if (len < 4) { goto skip_interest; }
4165 
4166   /* Setting 32-bit integers, both endians. */
4167 
4168   afl->stage_name = "interest 32/8";
4169   afl->stage_short = "int32";
4170   afl->stage_cur = 0;
4171   afl->stage_max = 2 * (len - 3) * (sizeof(interesting_32) >> 2);
4172 
4173   orig_hit_cnt = new_hit_cnt;
4174 
4175   for (i = 0; i < len - 3; ++i) {
4176 
4177     u32 orig = *(u32 *)(out_buf + i);
4178 
4179     /* Let's consult the effector map... */
4180 
4181     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
4182         !eff_map[EFF_APOS(i + 2)] && !eff_map[EFF_APOS(i + 3)]) {
4183 
4184       afl->stage_max -= sizeof(interesting_32) >> 1;
4185       continue;
4186 
4187     }
4188 
4189     afl->stage_cur_byte = i;
4190 
4191     for (j = 0; j < sizeof(interesting_32) / 4; ++j) {
4192 
4193       afl->stage_cur_val = interesting_32[j];
4194 
4195       /* Skip if this could be a product of a bitflip, arithmetics,
4196          or word interesting value insertion. */
4197 
4198       if (!could_be_bitflip(orig ^ (u32)interesting_32[j]) &&
4199           !could_be_arith(orig, interesting_32[j], 4) &&
4200           !could_be_interest(orig, interesting_32[j], 4, 0)) {
4201 
4202         afl->stage_val_type = STAGE_VAL_LE;
4203 
4204         *(u32 *)(out_buf + i) = interesting_32[j];
4205 
4206 #ifdef INTROSPECTION
4207         snprintf(afl->mutation, sizeof(afl->mutation),
4208                  "%s MOPT_INTERESTING32-%u-%u", afl->queue_cur->fname, i, j);
4209 #endif
4210         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4211         ++afl->stage_cur;
4212 
4213       } else {
4214 
4215         --afl->stage_max;
4216 
4217       }
4218 
4219       if ((u32)interesting_32[j] != SWAP32(interesting_32[j]) &&
4220           !could_be_bitflip(orig ^ SWAP32(interesting_32[j])) &&
4221           !could_be_arith(orig, SWAP32(interesting_32[j]), 4) &&
4222           !could_be_interest(orig, SWAP32(interesting_32[j]), 4, 1)) {
4223 
4224         afl->stage_val_type = STAGE_VAL_BE;
4225 
4226 #ifdef INTROSPECTION
4227         snprintf(afl->mutation, sizeof(afl->mutation),
4228                  "%s MOPT_INTERESTING32BE-%u-%u", afl->queue_cur->fname, i, j);
4229 #endif
4230         *(u32 *)(out_buf + i) = SWAP32(interesting_32[j]);
4231         if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4232         ++afl->stage_cur;
4233 
4234       } else {
4235 
4236         --afl->stage_max;
4237 
4238       }
4239 
4240     }
4241 
4242     *(u32 *)(out_buf + i) = orig;
4243 
4244   }                                               /* for i = 0; i < len - 3 */
4245 
4246   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4247 
4248   afl->stage_finds[STAGE_INTEREST32] += new_hit_cnt - orig_hit_cnt;
4249   afl->stage_cycles[STAGE_INTEREST32] += afl->stage_max;
4250 
4251 skip_interest:
4252 
4253   /********************
4254    * DICTIONARY STUFF *
4255    ********************/
4256 
4257   if (!afl->extras_cnt) { goto skip_user_extras; }
4258 
4259   /* Overwrite with user-supplied extras. */
4260 
4261   afl->stage_name = "user extras (over)";
4262   afl->stage_short = "ext_UO";
4263   afl->stage_cur = 0;
4264   afl->stage_max = afl->extras_cnt * len;
4265 
4266   afl->stage_val_type = STAGE_VAL_NONE;
4267 
4268   orig_hit_cnt = new_hit_cnt;
4269 
4270   for (i = 0; i < (u32)len; ++i) {
4271 
4272     u32 last_len = 0;
4273 
4274     afl->stage_cur_byte = i;
4275 
4276     /* Extras are sorted by size, from smallest to largest. This means
4277        that we don't have to worry about restoring the buffer in
4278        between writes at a particular offset determined by the outer
4279        loop. */
4280 
4281     for (j = 0; j < afl->extras_cnt; ++j) {
4282 
4283       /* Skip extras probabilistically if afl->extras_cnt > AFL_MAX_DET_EXTRAS.
4284          Also skip them if there's no room to insert the payload, if the token
4285          is redundant, or if its entire span has no bytes set in the effector
4286          map. */
4287 
4288       if ((afl->extras_cnt > afl->max_det_extras &&
4289            rand_below(afl, afl->extras_cnt) >= afl->max_det_extras) ||
4290           afl->extras[j].len > len - i ||
4291           !memcmp(afl->extras[j].data, out_buf + i, afl->extras[j].len) ||
4292           !memchr(eff_map + EFF_APOS(i), 1,
4293                   EFF_SPAN_ALEN(i, afl->extras[j].len))) {
4294 
4295         --afl->stage_max;
4296         continue;
4297 
4298       }
4299 
4300       last_len = afl->extras[j].len;
4301       memcpy(out_buf + i, afl->extras[j].data, last_len);
4302 
4303 #ifdef INTROSPECTION
4304       snprintf(afl->mutation, sizeof(afl->mutation),
4305                "%s MOPT_EXTRAS_overwrite-%u-%u", afl->queue_cur->fname, i, j);
4306 #endif
4307 
4308       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4309 
4310       ++afl->stage_cur;
4311 
4312     }
4313 
4314     /* Restore all the clobbered memory. */
4315     memcpy(out_buf + i, in_buf + i, last_len);
4316 
4317   }                                                   /* for i = 0; i < len */
4318 
4319   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4320 
4321   afl->stage_finds[STAGE_EXTRAS_UO] += new_hit_cnt - orig_hit_cnt;
4322   afl->stage_cycles[STAGE_EXTRAS_UO] += afl->stage_max;
4323 
4324   /* Insertion of user-supplied extras. */
4325 
4326   afl->stage_name = "user extras (insert)";
4327   afl->stage_short = "ext_UI";
4328   afl->stage_cur = 0;
4329   afl->stage_max = afl->extras_cnt * (len + 1);
4330 
4331   orig_hit_cnt = new_hit_cnt;
4332 
4333   ex_tmp = afl_realloc(AFL_BUF_PARAM(ex), len + MAX_DICT_FILE);
4334   if (unlikely(!ex_tmp)) { PFATAL("alloc"); }
4335 
4336   for (i = 0; i <= (u32)len; ++i) {
4337 
4338     afl->stage_cur_byte = i;
4339 
4340     for (j = 0; j < afl->extras_cnt; ++j) {
4341 
4342       if (len + afl->extras[j].len > MAX_FILE) {
4343 
4344         --afl->stage_max;
4345         continue;
4346 
4347       }
4348 
4349       /* Insert token */
4350       memcpy(ex_tmp + i, afl->extras[j].data, afl->extras[j].len);
4351 
4352       /* Copy tail */
4353       memcpy(ex_tmp + i + afl->extras[j].len, out_buf + i, len - i);
4354 
4355 #ifdef INTROSPECTION
4356       snprintf(afl->mutation, sizeof(afl->mutation),
4357                "%s MOPT_EXTRAS_insert-%u-%u", afl->queue_cur->fname, i, j);
4358 #endif
4359 
4360       if (common_fuzz_stuff(afl, ex_tmp, len + afl->extras[j].len)) {
4361 
4362         goto abandon_entry;
4363 
4364       }
4365 
4366       ++afl->stage_cur;
4367 
4368     }
4369 
4370     /* Copy head */
4371     ex_tmp[i] = out_buf[i];
4372 
4373   }                                                  /* for i = 0; i <= len */
4374 
4375   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4376 
4377   afl->stage_finds[STAGE_EXTRAS_UI] += new_hit_cnt - orig_hit_cnt;
4378   afl->stage_cycles[STAGE_EXTRAS_UI] += afl->stage_max;
4379 
4380 skip_user_extras:
4381 
4382   if (!afl->a_extras_cnt) { goto skip_extras; }
4383 
4384   afl->stage_name = "auto extras (over)";
4385   afl->stage_short = "ext_AO";
4386   afl->stage_cur = 0;
4387   afl->stage_max = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS) * len;
4388 
4389   afl->stage_val_type = STAGE_VAL_NONE;
4390 
4391   orig_hit_cnt = new_hit_cnt;
4392 
4393   for (i = 0; i < (u32)len; ++i) {
4394 
4395     u32 last_len = 0;
4396 
4397     afl->stage_cur_byte = i;
4398 
4399     u32 min_extra_len = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS);
4400     for (j = 0; j < min_extra_len; ++j) {
4401 
4402       /* See the comment in the earlier code; extras are sorted by size. */
4403 
4404       if ((afl->a_extras[j].len) > (len - i) ||
4405           !memcmp(afl->a_extras[j].data, out_buf + i, afl->a_extras[j].len) ||
4406           !memchr(eff_map + EFF_APOS(i), 1,
4407                   EFF_SPAN_ALEN(i, afl->a_extras[j].len))) {
4408 
4409         --afl->stage_max;
4410         continue;
4411 
4412       }
4413 
4414       last_len = afl->a_extras[j].len;
4415       memcpy(out_buf + i, afl->a_extras[j].data, last_len);
4416 
4417 #ifdef INTROSPECTION
4418       snprintf(afl->mutation, sizeof(afl->mutation),
4419                "%s MOPT_AUTO_EXTRAS_overwrite-%u-%u", afl->queue_cur->fname, i,
4420                j);
4421 #endif
4422 
4423       if (common_fuzz_stuff(afl, out_buf, len)) { goto abandon_entry; }
4424 
4425       ++afl->stage_cur;
4426 
4427     }
4428 
4429     /* Restore all the clobbered memory. */
4430     memcpy(out_buf + i, in_buf + i, last_len);
4431 
4432   }                                                   /* for i = 0; i < len */
4433 
4434   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4435 
4436   afl->stage_finds[STAGE_EXTRAS_AO] += new_hit_cnt - orig_hit_cnt;
4437   afl->stage_cycles[STAGE_EXTRAS_AO] += afl->stage_max;
4438 
4439   /* Insertion of auto extras. */
4440 
4441   afl->stage_name = "auto extras (insert)";
4442   afl->stage_short = "ext_AI";
4443   afl->stage_cur = 0;
4444   afl->stage_max = afl->a_extras_cnt * (len + 1);
4445 
4446   orig_hit_cnt = new_hit_cnt;
4447 
4448   ex_tmp = afl_realloc(AFL_BUF_PARAM(ex), len + MAX_DICT_FILE);
4449   if (unlikely(!ex_tmp)) { PFATAL("alloc"); }
4450 
4451   for (i = 0; i <= (u32)len; ++i) {
4452 
4453     afl->stage_cur_byte = i;
4454 
4455     for (j = 0; j < afl->a_extras_cnt; ++j) {
4456 
4457       if (len + afl->a_extras[j].len > MAX_FILE) {
4458 
4459         --afl->stage_max;
4460         continue;
4461 
4462       }
4463 
4464       /* Insert token */
4465       memcpy(ex_tmp + i, afl->a_extras[j].data, afl->a_extras[j].len);
4466 
4467       /* Copy tail */
4468       memcpy(ex_tmp + i + afl->a_extras[j].len, out_buf + i, len - i);
4469 
4470 #ifdef INTROSPECTION
4471       snprintf(afl->mutation, sizeof(afl->mutation),
4472                "%s MOPT_AUTO_EXTRAS_insert-%u-%u", afl->queue_cur->fname, i, j);
4473 #endif
4474 
4475       if (common_fuzz_stuff(afl, ex_tmp, len + afl->a_extras[j].len)) {
4476 
4477         goto abandon_entry;
4478 
4479       }
4480 
4481       ++afl->stage_cur;
4482 
4483     }
4484 
4485     /* Copy head */
4486     ex_tmp[i] = out_buf[i];
4487 
4488   }                                                  /* for i = 0; i <= len */
4489 
4490   new_hit_cnt = afl->queued_items + afl->saved_crashes;
4491 
4492   afl->stage_finds[STAGE_EXTRAS_AI] += new_hit_cnt - orig_hit_cnt;
4493   afl->stage_cycles[STAGE_EXTRAS_AI] += afl->stage_max;
4494 
4495 skip_extras:
4496 
4497   /* If we made this to here without jumping to havoc_stage or abandon_entry,
4498      we're properly done with deterministic steps and can mark it as such
4499      in the .state/ directory. */
4500 
4501   if (!afl->queue_cur->passed_det) { mark_as_det_done(afl, afl->queue_cur); }
4502 
4503   /****************
4504    * RANDOM HAVOC *
4505    ****************/
4506 
4507 havoc_stage:
4508 pacemaker_fuzzing:
4509 
4510   afl->stage_cur_byte = -1;
4511 
4512   /* The havoc stage mutation code is also invoked when splicing files; if the
4513      splice_cycle variable is set, generate different descriptions and such. */
4514 
4515   if (!splice_cycle) {
4516 
4517     afl->stage_name = MOpt_globals.havoc_stagename;
4518     afl->stage_short = MOpt_globals.havoc_stagenameshort;
4519     afl->stage_max = (doing_det ? HAVOC_CYCLES_INIT : HAVOC_CYCLES) *
4520                      perf_score / afl->havoc_div / 100;
4521 
4522   } else {
4523 
4524     perf_score = orig_perf;
4525 
4526     snprintf(afl->stage_name_buf, STAGE_BUF_SIZE,
4527              MOpt_globals.splice_stageformat, splice_cycle);
4528     afl->stage_name = afl->stage_name_buf;
4529     afl->stage_short = MOpt_globals.splice_stagenameshort;
4530     afl->stage_max = SPLICE_HAVOC * perf_score / afl->havoc_div / 100;
4531 
4532   }
4533 
4534   s32 temp_len_puppet;
4535 
4536   // for (; afl->swarm_now < swarm_num; ++afl->swarm_now)
4537   {
4538 
4539     if (afl->key_puppet == 1) {
4540 
4541       if (unlikely(afl->orig_hit_cnt_puppet == 0)) {
4542 
4543         afl->orig_hit_cnt_puppet = afl->queued_items + afl->saved_crashes;
4544         afl->last_limit_time_start = get_cur_time();
4545         afl->SPLICE_CYCLES_puppet =
4546             (rand_below(
4547                  afl, SPLICE_CYCLES_puppet_up - SPLICE_CYCLES_puppet_low + 1) +
4548              SPLICE_CYCLES_puppet_low);
4549 
4550       }
4551 
4552     }                                            /* if afl->key_puppet == 1 */
4553 
4554     {
4555 
4556 #ifndef IGNORE_FINDS
4557     havoc_stage_puppet:
4558 #endif
4559 
4560       afl->stage_cur_byte = -1;
4561 
4562       /* The havoc stage mutation code is also invoked when splicing files; if
4563          the splice_cycle variable is set, generate different descriptions and
4564          such. */
4565 
4566       if (!splice_cycle) {
4567 
4568         afl->stage_name = MOpt_globals.havoc_stagename;
4569         afl->stage_short = MOpt_globals.havoc_stagenameshort;
4570         afl->stage_max = (doing_det ? HAVOC_CYCLES_INIT : HAVOC_CYCLES) *
4571                          perf_score / afl->havoc_div / 100;
4572 
4573       } else {
4574 
4575         perf_score = orig_perf;
4576         snprintf(afl->stage_name_buf, STAGE_BUF_SIZE,
4577                  MOpt_globals.splice_stageformat, splice_cycle);
4578         afl->stage_name = afl->stage_name_buf;
4579         afl->stage_short = MOpt_globals.splice_stagenameshort;
4580         afl->stage_max = SPLICE_HAVOC * perf_score / afl->havoc_div / 100;
4581 
4582       }
4583 
4584       if (afl->stage_max < HAVOC_MIN) { afl->stage_max = HAVOC_MIN; }
4585 
4586       temp_len = len;
4587 
4588       orig_hit_cnt = afl->queued_items + afl->saved_crashes;
4589 
4590       havoc_queued = afl->queued_items;
4591 
4592       u32 r_max, r;
4593 
4594       r_max = 16 + ((afl->extras_cnt + afl->a_extras_cnt) ? 2 : 0);
4595 
4596       if (unlikely(afl->expand_havoc && afl->ready_for_splicing_count > 1)) {
4597 
4598         /* add expensive havoc cases here, they are activated after a full
4599            cycle without any finds happened */
4600 
4601         ++r_max;
4602 
4603       }
4604 
4605       for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max;
4606            ++afl->stage_cur) {
4607 
4608         u32 use_stacking = 1 << (1 + rand_below(afl, afl->havoc_stack_pow2));
4609 
4610         afl->stage_cur_val = use_stacking;
4611 
4612         for (i = 0; i < operator_num; ++i) {
4613 
4614           MOpt_globals.cycles_v3[i] = MOpt_globals.cycles_v2[i];
4615 
4616         }
4617 
4618 #ifdef INTROSPECTION
4619         snprintf(afl->mutation, sizeof(afl->mutation), "%s MOPT_HAVOC-%u",
4620                  afl->queue_cur->fname, use_stacking);
4621 #endif
4622 
4623         for (i = 0; i < use_stacking; ++i) {
4624 
4625           switch (r = (select_algorithm(afl, r_max))) {
4626 
4627             case 0:
4628               /* Flip a single bit somewhere. Spooky! */
4629               FLIP_BIT(out_buf, rand_below(afl, temp_len << 3));
4630               MOpt_globals.cycles_v2[STAGE_FLIP1]++;
4631 #ifdef INTROSPECTION
4632               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT1");
4633               strcat(afl->mutation, afl->m_tmp);
4634 #endif
4635               break;
4636 
4637             case 1:
4638               if (temp_len < 2) { break; }
4639               temp_len_puppet = rand_below(afl, (temp_len << 3) - 1);
4640               FLIP_BIT(out_buf, temp_len_puppet);
4641               FLIP_BIT(out_buf, temp_len_puppet + 1);
4642               MOpt_globals.cycles_v2[STAGE_FLIP2]++;
4643 #ifdef INTROSPECTION
4644               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT2");
4645               strcat(afl->mutation, afl->m_tmp);
4646 #endif
4647               break;
4648 
4649             case 2:
4650               if (temp_len < 2) { break; }
4651               temp_len_puppet = rand_below(afl, (temp_len << 3) - 3);
4652               FLIP_BIT(out_buf, temp_len_puppet);
4653               FLIP_BIT(out_buf, temp_len_puppet + 1);
4654               FLIP_BIT(out_buf, temp_len_puppet + 2);
4655               FLIP_BIT(out_buf, temp_len_puppet + 3);
4656               MOpt_globals.cycles_v2[STAGE_FLIP4]++;
4657 #ifdef INTROSPECTION
4658               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT4");
4659               strcat(afl->mutation, afl->m_tmp);
4660 #endif
4661               break;
4662 
4663             case 3:
4664               if (temp_len < 4) { break; }
4665               out_buf[rand_below(afl, temp_len)] ^= 0xFF;
4666               MOpt_globals.cycles_v2[STAGE_FLIP8]++;
4667 #ifdef INTROSPECTION
4668               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT8");
4669               strcat(afl->mutation, afl->m_tmp);
4670 #endif
4671               break;
4672 
4673             case 4:
4674               if (temp_len < 8) { break; }
4675               *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) ^= 0xFFFF;
4676               MOpt_globals.cycles_v2[STAGE_FLIP16]++;
4677 #ifdef INTROSPECTION
4678               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT16");
4679               strcat(afl->mutation, afl->m_tmp);
4680 #endif
4681               break;
4682 
4683             case 5:
4684               if (temp_len < 8) { break; }
4685               *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) ^= 0xFFFFFFFF;
4686               MOpt_globals.cycles_v2[STAGE_FLIP32]++;
4687 #ifdef INTROSPECTION
4688               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT32");
4689               strcat(afl->mutation, afl->m_tmp);
4690 #endif
4691               break;
4692 
4693             case 6:
4694               out_buf[rand_below(afl, temp_len)] -=
4695                   1 + rand_below(afl, ARITH_MAX);
4696               out_buf[rand_below(afl, temp_len)] +=
4697                   1 + rand_below(afl, ARITH_MAX);
4698               MOpt_globals.cycles_v2[STAGE_ARITH8]++;
4699 #ifdef INTROSPECTION
4700               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8");
4701               strcat(afl->mutation, afl->m_tmp);
4702 #endif
4703               break;
4704 
4705             case 7:
4706               /* Randomly subtract from word, random endian. */
4707               if (temp_len < 8) { break; }
4708               if (rand_below(afl, 2)) {
4709 
4710                 u32 pos = rand_below(afl, temp_len - 1);
4711                 *(u16 *)(out_buf + pos) -= 1 + rand_below(afl, ARITH_MAX);
4712 #ifdef INTROSPECTION
4713                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16-%u", pos);
4714                 strcat(afl->mutation, afl->m_tmp);
4715 #endif
4716 
4717               } else {
4718 
4719                 u32 pos = rand_below(afl, temp_len - 1);
4720                 u16 num = 1 + rand_below(afl, ARITH_MAX);
4721 #ifdef INTROSPECTION
4722                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16BE-%u-%u",
4723                          pos, num);
4724                 strcat(afl->mutation, afl->m_tmp);
4725 #endif
4726                 *(u16 *)(out_buf + pos) =
4727                     SWAP16(SWAP16(*(u16 *)(out_buf + pos)) - num);
4728 
4729               }
4730 
4731               /* Randomly add to word, random endian. */
4732               if (rand_below(afl, 2)) {
4733 
4734                 u32 pos = rand_below(afl, temp_len - 1);
4735 #ifdef INTROSPECTION
4736                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16+-%u", pos);
4737                 strcat(afl->mutation, afl->m_tmp);
4738 #endif
4739                 *(u16 *)(out_buf + pos) += 1 + rand_below(afl, ARITH_MAX);
4740 
4741               } else {
4742 
4743                 u32 pos = rand_below(afl, temp_len - 1);
4744                 u16 num = 1 + rand_below(afl, ARITH_MAX);
4745 #ifdef INTROSPECTION
4746                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16BE+-%u-%u",
4747                          pos, num);
4748                 strcat(afl->mutation, afl->m_tmp);
4749 #endif
4750                 *(u16 *)(out_buf + pos) =
4751                     SWAP16(SWAP16(*(u16 *)(out_buf + pos)) + num);
4752 
4753               }
4754 
4755               MOpt_globals.cycles_v2[STAGE_ARITH16]++;
4756               break;
4757 
4758             case 8:
4759               /* Randomly subtract from dword, random endian. */
4760               if (temp_len < 8) { break; }
4761               if (rand_below(afl, 2)) {
4762 
4763                 u32 pos = rand_below(afl, temp_len - 3);
4764 #ifdef INTROSPECTION
4765                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32_-%u", pos);
4766                 strcat(afl->mutation, afl->m_tmp);
4767 #endif
4768                 *(u32 *)(out_buf + pos) -= 1 + rand_below(afl, ARITH_MAX);
4769 
4770               } else {
4771 
4772                 u32 pos = rand_below(afl, temp_len - 3);
4773                 u32 num = 1 + rand_below(afl, ARITH_MAX);
4774 #ifdef INTROSPECTION
4775                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32BE_-%u-%u",
4776                          pos, num);
4777                 strcat(afl->mutation, afl->m_tmp);
4778 #endif
4779                 *(u32 *)(out_buf + pos) =
4780                     SWAP32(SWAP32(*(u32 *)(out_buf + pos)) - num);
4781 
4782               }
4783 
4784               /* Randomly add to dword, random endian. */
4785               // if (temp_len < 4) break;
4786               if (rand_below(afl, 2)) {
4787 
4788                 u32 pos = rand_below(afl, temp_len - 3);
4789 #ifdef INTROSPECTION
4790                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32+-%u", pos);
4791                 strcat(afl->mutation, afl->m_tmp);
4792 #endif
4793                 *(u32 *)(out_buf + pos) += 1 + rand_below(afl, ARITH_MAX);
4794 
4795               } else {
4796 
4797                 u32 pos = rand_below(afl, temp_len - 3);
4798                 u32 num = 1 + rand_below(afl, ARITH_MAX);
4799 #ifdef INTROSPECTION
4800                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32BE+-%u-%u",
4801                          pos, num);
4802                 strcat(afl->mutation, afl->m_tmp);
4803 #endif
4804                 *(u32 *)(out_buf + pos) =
4805                     SWAP32(SWAP32(*(u32 *)(out_buf + pos)) + num);
4806 
4807               }
4808 
4809               MOpt_globals.cycles_v2[STAGE_ARITH32]++;
4810               break;
4811 
4812             case 9:
4813               /* Set byte to interesting value. */
4814               if (temp_len < 4) { break; }
4815               out_buf[rand_below(afl, temp_len)] =
4816                   interesting_8[rand_below(afl, sizeof(interesting_8))];
4817               MOpt_globals.cycles_v2[STAGE_INTEREST8]++;
4818 #ifdef INTROSPECTION
4819               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING8");
4820               strcat(afl->mutation, afl->m_tmp);
4821 #endif
4822               break;
4823 
4824             case 10:
4825               /* Set word to interesting value, randomly choosing endian. */
4826               if (temp_len < 8) { break; }
4827               if (rand_below(afl, 2)) {
4828 
4829 #ifdef INTROSPECTION
4830                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16");
4831                 strcat(afl->mutation, afl->m_tmp);
4832 #endif
4833                 *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) =
4834                     interesting_16[rand_below(afl,
4835                                               sizeof(interesting_16) >> 1)];
4836 
4837               } else {
4838 
4839 #ifdef INTROSPECTION
4840                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16BE");
4841                 strcat(afl->mutation, afl->m_tmp);
4842 #endif
4843                 *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) =
4844                     SWAP16(interesting_16[rand_below(
4845                         afl, sizeof(interesting_16) >> 1)]);
4846 
4847               }
4848 
4849               MOpt_globals.cycles_v2[STAGE_INTEREST16]++;
4850               break;
4851 
4852             case 11:
4853               /* Set dword to interesting value, randomly choosing endian. */
4854 
4855               if (temp_len < 8) { break; }
4856 
4857               if (rand_below(afl, 2)) {
4858 
4859 #ifdef INTROSPECTION
4860                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32");
4861                 strcat(afl->mutation, afl->m_tmp);
4862 #endif
4863                 *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) =
4864                     interesting_32[rand_below(afl,
4865                                               sizeof(interesting_32) >> 2)];
4866 
4867               } else {
4868 
4869 #ifdef INTROSPECTION
4870                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32BE");
4871                 strcat(afl->mutation, afl->m_tmp);
4872 #endif
4873                 *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) =
4874                     SWAP32(interesting_32[rand_below(
4875                         afl, sizeof(interesting_32) >> 2)]);
4876 
4877               }
4878 
4879               MOpt_globals.cycles_v2[STAGE_INTEREST32]++;
4880               break;
4881 
4882             case 12:
4883 
4884               /* Just set a random byte to a random value. Because,
4885                  why not. We use XOR with 1-255 to eliminate the
4886                  possibility of a no-op. */
4887 
4888               out_buf[rand_below(afl, temp_len)] ^= 1 + rand_below(afl, 255);
4889               MOpt_globals.cycles_v2[STAGE_RANDOMBYTE]++;
4890 #ifdef INTROSPECTION
4891               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " RAND8");
4892               strcat(afl->mutation, afl->m_tmp);
4893 #endif
4894               break;
4895 
4896             case 13: {
4897 
4898               /* Delete bytes. We're making this a bit more likely
4899                  than insertion (the next option) in hopes of keeping
4900                  files reasonably small. */
4901 
4902               u32 del_from, del_len;
4903 
4904               if (temp_len < 2) { break; }
4905 
4906               /* Don't delete too much. */
4907 
4908               del_len = choose_block_len(afl, temp_len - 1);
4909 
4910               del_from = rand_below(afl, temp_len - del_len + 1);
4911 
4912 #ifdef INTROSPECTION
4913               snprintf(afl->m_tmp, sizeof(afl->m_tmp), " DEL-%u%u", del_from,
4914                        del_len);
4915               strcat(afl->mutation, afl->m_tmp);
4916 #endif
4917               memmove(out_buf + del_from, out_buf + del_from + del_len,
4918                       temp_len - del_from - del_len);
4919 
4920               temp_len -= del_len;
4921               MOpt_globals.cycles_v2[STAGE_DELETEBYTE]++;
4922               break;
4923 
4924             }
4925 
4926             case 14:
4927 
4928               if (temp_len + HAVOC_BLK_XL < MAX_FILE) {
4929 
4930                 /* Clone bytes (75%) or insert a block of constant bytes (25%).
4931                  */
4932 
4933                 u8  actually_clone = rand_below(afl, 4);
4934                 u32 clone_from, clone_to, clone_len;
4935                 u8 *new_buf;
4936 
4937                 if (likely(actually_clone)) {
4938 
4939                   clone_len = choose_block_len(afl, temp_len);
4940                   clone_from = rand_below(afl, temp_len - clone_len + 1);
4941 
4942                 } else {
4943 
4944                   clone_len = choose_block_len(afl, HAVOC_BLK_XL);
4945                   clone_from = 0;
4946 
4947                 }
4948 
4949                 clone_to = rand_below(afl, temp_len);
4950 
4951 #ifdef INTROSPECTION
4952                 snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE_%s-%u-%u-%u",
4953                          actually_clone ? "clone" : "insert", clone_from,
4954                          clone_to, clone_len);
4955                 strcat(afl->mutation, afl->m_tmp);
4956 #endif
4957                 new_buf = afl_realloc(AFL_BUF_PARAM(out_scratch),
4958                                       temp_len + clone_len);
4959                 if (unlikely(!new_buf)) { PFATAL("alloc"); }
4960 
4961                 /* Head */
4962 
4963                 memcpy(new_buf, out_buf, clone_to);
4964 
4965                 /* Inserted part */
4966 
4967                 if (actually_clone) {
4968 
4969                   memcpy(new_buf + clone_to, out_buf + clone_from, clone_len);
4970 
4971                 } else {
4972 
4973                   memset(new_buf + clone_to,
4974                          rand_below(afl, 2)
4975                              ? rand_below(afl, 256)
4976                              : out_buf[rand_below(afl, temp_len)],
4977                          clone_len);
4978 
4979                 }
4980 
4981                 /* Tail */
4982                 memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
4983                        temp_len - clone_to);
4984 
4985                 out_buf = new_buf;
4986                 afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
4987                 temp_len += clone_len;
4988                 MOpt_globals.cycles_v2[STAGE_Clone75]++;
4989 
4990               }
4991 
4992               break;
4993 
4994             case 15: {
4995 
4996               /* Overwrite bytes with a randomly selected chunk (75%) or fixed
4997                  bytes (25%). */
4998 
4999               u32 copy_from, copy_to, copy_len;
5000 
5001               if (temp_len < 2) { break; }
5002 
5003               copy_len = choose_block_len(afl, temp_len - 1);
5004 
5005               copy_from = rand_below(afl, temp_len - copy_len + 1);
5006               copy_to = rand_below(afl, temp_len - copy_len + 1);
5007 
5008               if (likely(rand_below(afl, 4))) {
5009 
5010                 if (likely(copy_from != copy_to)) {
5011 
5012 #ifdef INTROSPECTION
5013                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5014                            " OVERWRITE_COPY-%u-%u-%u", copy_from, copy_to,
5015                            copy_len);
5016                   strcat(afl->mutation, afl->m_tmp);
5017 #endif
5018                   memmove(out_buf + copy_to, out_buf + copy_from, copy_len);
5019 
5020                 }
5021 
5022               } else {
5023 
5024 #ifdef INTROSPECTION
5025                 snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5026                          " OVERWRITE_FIXED-%u-%u-%u", copy_from, copy_to,
5027                          copy_len);
5028                 strcat(afl->mutation, afl->m_tmp);
5029 #endif
5030                 memset(out_buf + copy_to,
5031                        rand_below(afl, 2) ? rand_below(afl, 256)
5032                                           : out_buf[rand_below(afl, temp_len)],
5033                        copy_len);
5034 
5035               }
5036 
5037               MOpt_globals.cycles_v2[STAGE_OverWrite75]++;
5038               break;
5039 
5040             }                                                    /* case 15 */
5041 
5042             default: {
5043 
5044               /* Values 16 and 17 can be selected only if there are any extras
5045                  present in the dictionaries. */
5046 
5047               r -= 16;
5048 
5049               if (r == 0 && (afl->extras_cnt || afl->a_extras_cnt)) {
5050 
5051                 /* Overwrite bytes with an extra. */
5052 
5053                 if (!afl->extras_cnt ||
5054                     (afl->a_extras_cnt && rand_below(afl, 2))) {
5055 
5056                   /* No user-specified extras or odds in our favor. Let's use an
5057                     auto-detected one. */
5058 
5059                   u32 use_extra = rand_below(afl, afl->a_extras_cnt);
5060                   u32 extra_len = afl->a_extras[use_extra].len;
5061 
5062                   if (extra_len > (u32)temp_len) break;
5063 
5064                   u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
5065 #ifdef INTROSPECTION
5066                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5067                            " AUTO_EXTRA_OVERWRITE-%u-%u", insert_at, extra_len);
5068                   strcat(afl->mutation, afl->m_tmp);
5069 #endif
5070                   memcpy(out_buf + insert_at, afl->a_extras[use_extra].data,
5071                          extra_len);
5072 
5073                 } else {
5074 
5075                   /* No auto extras or odds in our favor. Use the dictionary. */
5076 
5077                   u32 use_extra = rand_below(afl, afl->extras_cnt);
5078                   u32 extra_len = afl->extras[use_extra].len;
5079 
5080                   if (extra_len > (u32)temp_len) break;
5081 
5082                   u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
5083 #ifdef INTROSPECTION
5084                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5085                            " EXTRA_OVERWRITE-%u-%u", insert_at, extra_len);
5086                   strcat(afl->mutation, afl->m_tmp);
5087 #endif
5088                   memcpy(out_buf + insert_at, afl->extras[use_extra].data,
5089                          extra_len);
5090 
5091                 }
5092 
5093                 MOpt_globals.cycles_v2[STAGE_OverWriteExtra]++;
5094 
5095                 break;
5096 
5097               }
5098 
5099               /* Insert an extra. */
5100 
5101               else if (r == 1 && (afl->extras_cnt || afl->a_extras_cnt)) {
5102 
5103                 u32 use_extra, extra_len,
5104                     insert_at = rand_below(afl, temp_len + 1);
5105                 u8 *ptr;
5106 
5107                 /* Insert an extra. Do the same dice-rolling stuff as for the
5108                   previous case. */
5109 
5110                 if (!afl->extras_cnt ||
5111                     (afl->a_extras_cnt && rand_below(afl, 2))) {
5112 
5113                   use_extra = rand_below(afl, afl->a_extras_cnt);
5114                   extra_len = afl->a_extras[use_extra].len;
5115                   ptr = afl->a_extras[use_extra].data;
5116 #ifdef INTROSPECTION
5117                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5118                            " AUTO_EXTRA_INSERT-%u-%u", insert_at, extra_len);
5119                   strcat(afl->mutation, afl->m_tmp);
5120 #endif
5121 
5122                 } else {
5123 
5124                   use_extra = rand_below(afl, afl->extras_cnt);
5125                   extra_len = afl->extras[use_extra].len;
5126                   ptr = afl->extras[use_extra].data;
5127 #ifdef INTROSPECTION
5128                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5129                            " EXTRA_INSERT-%u-%u", insert_at, extra_len);
5130                   strcat(afl->mutation, afl->m_tmp);
5131 #endif
5132 
5133                 }
5134 
5135                 if (temp_len + extra_len >= MAX_FILE) break;
5136 
5137                 out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
5138                 if (unlikely(!out_buf)) { PFATAL("alloc"); }
5139 
5140                 /* Tail */
5141                 memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
5142                         temp_len - insert_at);
5143 
5144                 /* Inserted part */
5145                 memcpy(out_buf + insert_at, ptr, extra_len);
5146 
5147                 temp_len += extra_len;
5148                 MOpt_globals.cycles_v2[STAGE_InsertExtra]++;
5149                 break;
5150 
5151               } else {
5152 
5153                 if (unlikely(afl->ready_for_splicing_count < 2)) break;
5154 
5155                 u32 tid;
5156                 do {
5157 
5158                   tid = rand_below(afl, afl->queued_items);
5159 
5160                 } while (tid == afl->current_entry ||
5161 
5162                          afl->queue_buf[tid]->len < 4);
5163 
5164                 /* Get the testcase for splicing. */
5165                 struct queue_entry *target = afl->queue_buf[tid];
5166                 u32                 new_len = target->len;
5167                 u8 *                new_buf = queue_testcase_get(afl, target);
5168 
5169                 if ((temp_len >= 2 && rand_below(afl, 2)) ||
5170                     temp_len + HAVOC_BLK_XL >= MAX_FILE) {
5171 
5172                   /* overwrite mode */
5173 
5174                   u32 copy_from, copy_to, copy_len;
5175 
5176                   copy_len = choose_block_len(afl, new_len - 1);
5177                   if (copy_len > temp_len) copy_len = temp_len;
5178 
5179                   copy_from = rand_below(afl, new_len - copy_len + 1);
5180                   copy_to = rand_below(afl, temp_len - copy_len + 1);
5181 
5182 #ifdef INTROSPECTION
5183                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5184                            " SPLICE_OVERWRITE-%u-%u-%u-%s", copy_from, copy_to,
5185                            copy_len, target->fname);
5186                   strcat(afl->mutation, afl->m_tmp);
5187 #endif
5188                   memmove(out_buf + copy_to, new_buf + copy_from, copy_len);
5189 
5190                 } else {
5191 
5192                   /* insert mode */
5193 
5194                   u32 clone_from, clone_to, clone_len;
5195 
5196                   clone_len = choose_block_len(afl, new_len);
5197                   clone_from = rand_below(afl, new_len - clone_len + 1);
5198                   clone_to = rand_below(afl, temp_len + 1);
5199 
5200                   u8 *temp_buf = afl_realloc(AFL_BUF_PARAM(out_scratch),
5201                                              temp_len + clone_len + 1);
5202                   if (unlikely(!temp_buf)) { PFATAL("alloc"); }
5203 
5204 #ifdef INTROSPECTION
5205                   snprintf(afl->m_tmp, sizeof(afl->m_tmp),
5206                            " SPLICE_INSERT-%u-%u-%u-%s", clone_from, clone_to,
5207                            clone_len, target->fname);
5208                   strcat(afl->mutation, afl->m_tmp);
5209 #endif
5210                   /* Head */
5211 
5212                   memcpy(temp_buf, out_buf, clone_to);
5213 
5214                   /* Inserted part */
5215 
5216                   memcpy(temp_buf + clone_to, new_buf + clone_from, clone_len);
5217 
5218                   /* Tail */
5219                   memcpy(temp_buf + clone_to + clone_len, out_buf + clone_to,
5220                          temp_len - clone_to);
5221 
5222                   out_buf = temp_buf;
5223                   afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
5224                   temp_len += clone_len;
5225 
5226                 }
5227 
5228                 MOpt_globals.cycles_v2[STAGE_Splice]++;
5229                 break;
5230 
5231               }
5232 
5233             }  // end of default:
5234 
5235           }                                    /* switch select_algorithm() */
5236 
5237         }                                      /* for i=0; i < use_stacking */
5238 
5239         ++*MOpt_globals.pTime;
5240 
5241         u64 temp_total_found = afl->queued_items + afl->saved_crashes;
5242 
5243         if (common_fuzz_stuff(afl, out_buf, temp_len)) {
5244 
5245           goto abandon_entry_puppet;
5246 
5247         }
5248 
5249         /* out_buf might have been mangled a bit, so let's restore it to its
5250            original size and shape. */
5251 
5252         out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
5253         if (unlikely(!out_buf)) { PFATAL("alloc"); }
5254         temp_len = len;
5255         memcpy(out_buf, in_buf, len);
5256 
5257         /* If we're finding new stuff, let's run for a bit longer, limits
5258            permitting. */
5259 
5260         if (afl->queued_items != havoc_queued) {
5261 
5262           if (perf_score <= afl->havoc_max_mult * 100) {
5263 
5264             afl->stage_max *= 2;
5265             perf_score *= 2;
5266 
5267           }
5268 
5269           havoc_queued = afl->queued_items;
5270 
5271         }
5272 
5273         if (unlikely(afl->queued_items + afl->saved_crashes >
5274                      temp_total_found)) {
5275 
5276           u64 temp_temp_puppet =
5277               afl->queued_items + afl->saved_crashes - temp_total_found;
5278           afl->total_puppet_find = afl->total_puppet_find + temp_temp_puppet;
5279 
5280           if (MOpt_globals.is_pilot_mode) {
5281 
5282             for (i = 0; i < operator_num; ++i) {
5283 
5284               if (MOpt_globals.cycles_v2[i] > MOpt_globals.cycles_v3[i]) {
5285 
5286                 MOpt_globals.finds_v2[i] += temp_temp_puppet;
5287 
5288               }
5289 
5290             }
5291 
5292           } else {
5293 
5294             for (i = 0; i < operator_num; i++) {
5295 
5296               if (afl->core_operator_cycles_puppet_v2[i] >
5297                   afl->core_operator_cycles_puppet_v3[i])
5298 
5299                 afl->core_operator_finds_puppet_v2[i] += temp_temp_puppet;
5300 
5301             }
5302 
5303           }
5304 
5305         }                                                             /* if */
5306 
5307       } /* for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max;
5308 
5309            ++afl->stage_cur) { */
5310 
5311       new_hit_cnt = afl->queued_items + afl->saved_crashes;
5312 
5313       if (MOpt_globals.is_pilot_mode) {
5314 
5315         if (!splice_cycle) {
5316 
5317           afl->stage_finds[STAGE_HAVOC] += new_hit_cnt - orig_hit_cnt;
5318           afl->stage_cycles[STAGE_HAVOC] += afl->stage_max;
5319 
5320         } else {
5321 
5322           afl->stage_finds[STAGE_SPLICE] += new_hit_cnt - orig_hit_cnt;
5323           afl->stage_cycles[STAGE_SPLICE] += afl->stage_max;
5324 
5325         }
5326 
5327       }
5328 
5329 #ifndef IGNORE_FINDS
5330 
5331       /************
5332        * SPLICING *
5333        ************/
5334 
5335     retry_splicing_puppet:
5336 
5337       if (afl->use_splicing &&
5338           splice_cycle++ < (u32)afl->SPLICE_CYCLES_puppet &&
5339           afl->ready_for_splicing_count > 1 && afl->queue_cur->len >= 4) {
5340 
5341         struct queue_entry *target;
5342         u32                 tid, split_at;
5343         u8 *                new_buf;
5344         s32                 f_diff, l_diff;
5345 
5346         /* First of all, if we've modified in_buf for havoc, let's clean that
5347            up... */
5348 
5349         if (in_buf != orig_in) {
5350 
5351           in_buf = orig_in;
5352           len = afl->queue_cur->len;
5353 
5354         }
5355 
5356         /* Pick a random queue entry and seek to it. Don't splice with yourself.
5357          */
5358 
5359         do {
5360 
5361           tid = rand_below(afl, afl->queued_items);
5362 
5363         } while (tid == afl->current_entry || afl->queue_buf[tid]->len < 4);
5364 
5365         afl->splicing_with = tid;
5366         target = afl->queue_buf[tid];
5367 
5368         /* Read the testcase into a new buffer. */
5369         new_buf = queue_testcase_get(afl, target);
5370 
5371         /* Find a suitable splicin g location, somewhere between the first and
5372            the last differing byte. Bail out if the difference is just a single
5373            byte or so. */
5374 
5375         locate_diffs(in_buf, new_buf, MIN(len, target->len), &f_diff, &l_diff);
5376 
5377         if (f_diff < 0 || l_diff < 2 || f_diff == l_diff) {
5378 
5379           goto retry_splicing_puppet;
5380 
5381         }
5382 
5383         /* Split somewhere between the first and last differing byte. */
5384 
5385         split_at = f_diff + rand_below(afl, l_diff - f_diff);
5386 
5387         /* Do the thing. */
5388 
5389         len = target->len;
5390         afl->in_scratch_buf = afl_realloc(AFL_BUF_PARAM(in_scratch), len);
5391         memcpy(afl->in_scratch_buf, in_buf, split_at);
5392         memcpy(afl->in_scratch_buf + split_at, new_buf, len - split_at);
5393         in_buf = afl->in_scratch_buf;
5394         afl_swap_bufs(AFL_BUF_PARAM(in), AFL_BUF_PARAM(in_scratch));
5395 
5396         out_buf = afl_realloc(AFL_BUF_PARAM(out), len);
5397         if (unlikely(!out_buf)) { PFATAL("alloc"); }
5398         memcpy(out_buf, in_buf, len);
5399 
5400         goto havoc_stage_puppet;
5401 
5402       }                                                  /* if splice_cycle */
5403 
5404 #endif                                                     /* !IGNORE_FINDS */
5405 
5406       ret_val = 0;
5407 
5408     abandon_entry:
5409     abandon_entry_puppet:
5410 
5411       if ((s64)splice_cycle >= afl->SPLICE_CYCLES_puppet) {
5412 
5413         afl->SPLICE_CYCLES_puppet =
5414             (rand_below(
5415                  afl, SPLICE_CYCLES_puppet_up - SPLICE_CYCLES_puppet_low + 1) +
5416              SPLICE_CYCLES_puppet_low);
5417 
5418       }
5419 
5420       afl->splicing_with = -1;
5421 
5422       /* Update afl->pending_not_fuzzed count if we made it through the
5423          calibration cycle and have not seen this entry before. */
5424       /*
5425         // TODO FIXME: I think we need this plus need an -L -1 check
5426         if (!afl->stop_soon && !afl->queue_cur->cal_failed &&
5427             (afl->queue_cur->was_fuzzed == 0 || afl->queue_cur->fuzz_level == 0)
5428         && !afl->queue_cur->disabled) {
5429 
5430           if (!afl->queue_cur->was_fuzzed) {
5431 
5432             --afl->pending_not_fuzzed;
5433             afl->queue_cur->was_fuzzed = 1;
5434             if (afl->queue_cur->favored) { --afl->pending_favored; }
5435 
5436           }
5437 
5438         }
5439 
5440       */
5441 
5442       orig_in = NULL;
5443 
5444       if (afl->key_puppet == 1) {
5445 
5446         if (unlikely(
5447                 afl->queued_items + afl->saved_crashes >
5448                 ((afl->queued_items + afl->saved_crashes) * limit_time_bound +
5449                  afl->orig_hit_cnt_puppet))) {
5450 
5451           afl->key_puppet = 0;
5452           afl->orig_hit_cnt_puppet = 0;
5453           afl->last_limit_time_start = 0;
5454 
5455         }
5456 
5457       }
5458 
5459       if (unlikely(*MOpt_globals.pTime > MOpt_globals.period)) {
5460 
5461         afl->total_pacemaker_time += *MOpt_globals.pTime;
5462         *MOpt_globals.pTime = 0;
5463         new_hit_cnt = afl->queued_items + afl->saved_crashes;
5464 
5465         if (MOpt_globals.is_pilot_mode) {
5466 
5467           afl->swarm_fitness[afl->swarm_now] =
5468               (double)(afl->total_puppet_find - afl->temp_puppet_find) /
5469               ((double)(afl->tmp_pilot_time) / afl->period_pilot_tmp);
5470 
5471         }
5472 
5473         afl->temp_puppet_find = afl->total_puppet_find;
5474         for (i = 0; i < operator_num; ++i) {
5475 
5476           if (MOpt_globals.is_pilot_mode) {
5477 
5478             double temp_eff = 0.0;
5479 
5480             if (MOpt_globals.cycles_v2[i] > MOpt_globals.cycles[i]) {
5481 
5482               temp_eff =
5483                   (double)(MOpt_globals.finds_v2[i] - MOpt_globals.finds[i]) /
5484                   (double)(MOpt_globals.cycles_v2[i] - MOpt_globals.cycles[i]);
5485 
5486             }
5487 
5488             if (afl->eff_best[afl->swarm_now][i] < temp_eff) {
5489 
5490               afl->eff_best[afl->swarm_now][i] = temp_eff;
5491               afl->L_best[afl->swarm_now][i] = afl->x_now[afl->swarm_now][i];
5492 
5493             }
5494 
5495           }
5496 
5497           MOpt_globals.finds[i] = MOpt_globals.finds_v2[i];
5498           MOpt_globals.cycles[i] = MOpt_globals.cycles_v2[i];
5499 
5500         }                                    /* for i = 0; i < operator_num */
5501 
5502         if (MOpt_globals.is_pilot_mode) {
5503 
5504           afl->swarm_now = afl->swarm_now + 1;
5505           if (afl->swarm_now == swarm_num) {
5506 
5507             afl->key_module = 1;
5508             for (i = 0; i < operator_num; ++i) {
5509 
5510               afl->core_operator_cycles_puppet_v2[i] =
5511                   afl->core_operator_cycles_puppet[i];
5512               afl->core_operator_cycles_puppet_v3[i] =
5513                   afl->core_operator_cycles_puppet[i];
5514               afl->core_operator_finds_puppet_v2[i] =
5515                   afl->core_operator_finds_puppet[i];
5516 
5517             }
5518 
5519             double swarm_eff = 0.0;
5520             afl->swarm_now = 0;
5521             for (i = 0; i < swarm_num; ++i) {
5522 
5523               if (afl->swarm_fitness[i] > swarm_eff) {
5524 
5525                 swarm_eff = afl->swarm_fitness[i];
5526                 afl->swarm_now = i;
5527 
5528               }
5529 
5530             }
5531 
5532             if (afl->swarm_now < 0 || afl->swarm_now > swarm_num - 1) {
5533 
5534               PFATAL("swarm_now error number  %d", afl->swarm_now);
5535 
5536             }
5537 
5538           }                               /* if afl->swarm_now == swarm_num */
5539 
5540           /* adjust pointers dependent on 'afl->swarm_now' */
5541           afl->mopt_globals_pilot.finds =
5542               afl->stage_finds_puppet[afl->swarm_now];
5543           afl->mopt_globals_pilot.finds_v2 =
5544               afl->stage_finds_puppet_v2[afl->swarm_now];
5545           afl->mopt_globals_pilot.cycles =
5546               afl->stage_cycles_puppet[afl->swarm_now];
5547           afl->mopt_globals_pilot.cycles_v2 =
5548               afl->stage_cycles_puppet_v2[afl->swarm_now];
5549           afl->mopt_globals_pilot.cycles_v3 =
5550               afl->stage_cycles_puppet_v3[afl->swarm_now];
5551 
5552         } else {
5553 
5554           for (i = 0; i < operator_num; i++) {
5555 
5556             afl->core_operator_finds_puppet[i] =
5557                 afl->core_operator_finds_puppet_v2[i];
5558             afl->core_operator_cycles_puppet[i] =
5559                 afl->core_operator_cycles_puppet_v2[i];
5560 
5561           }
5562 
5563           afl->key_module = 2;
5564 
5565           afl->old_hit_count = new_hit_cnt;
5566 
5567         }                                                  /* if pilot_mode */
5568 
5569       }         /* if (unlikely(*MOpt_globals.pTime > MOpt_globals.period)) */
5570 
5571     }                                                              /* block */
5572 
5573   }                                                                /* block */
5574 
5575   return ret_val;
5576 
5577 }
5578 
5579 #undef FLIP_BIT
5580 
core_fuzzing(afl_state_t * afl)5581 u8 core_fuzzing(afl_state_t *afl) {
5582 
5583   return mopt_common_fuzzing(afl, afl->mopt_globals_core);
5584 
5585 }
5586 
pilot_fuzzing(afl_state_t * afl)5587 u8 pilot_fuzzing(afl_state_t *afl) {
5588 
5589   return mopt_common_fuzzing(afl, afl->mopt_globals_pilot);
5590 
5591 }
5592 
pso_updating(afl_state_t * afl)5593 void pso_updating(afl_state_t *afl) {
5594 
5595   afl->g_now++;
5596   if (afl->g_now > afl->g_max) { afl->g_now = 0; }
5597   afl->w_now =
5598       (afl->w_init - afl->w_end) * (afl->g_max - afl->g_now) / (afl->g_max) +
5599       afl->w_end;
5600   int tmp_swarm, i, j;
5601   u64 temp_operator_finds_puppet = 0;
5602   for (i = 0; i < operator_num; ++i) {
5603 
5604     afl->operator_finds_puppet[i] = afl->core_operator_finds_puppet[i];
5605 
5606     for (j = 0; j < swarm_num; ++j) {
5607 
5608       afl->operator_finds_puppet[i] =
5609           afl->operator_finds_puppet[i] + afl->stage_finds_puppet[j][i];
5610 
5611     }
5612 
5613     temp_operator_finds_puppet =
5614         temp_operator_finds_puppet + afl->operator_finds_puppet[i];
5615 
5616   }
5617 
5618   for (i = 0; i < operator_num; ++i) {
5619 
5620     if (afl->operator_finds_puppet[i]) {
5621 
5622       afl->G_best[i] = (double)((double)(afl->operator_finds_puppet[i]) /
5623                                 (double)(temp_operator_finds_puppet));
5624 
5625     }
5626 
5627   }
5628 
5629   for (tmp_swarm = 0; tmp_swarm < swarm_num; ++tmp_swarm) {
5630 
5631     double x_temp = 0.0;
5632     for (i = 0; i < operator_num; ++i) {
5633 
5634       afl->probability_now[tmp_swarm][i] = 0.0;
5635       afl->v_now[tmp_swarm][i] =
5636           afl->w_now * afl->v_now[tmp_swarm][i] +
5637           RAND_C * (afl->L_best[tmp_swarm][i] - afl->x_now[tmp_swarm][i]) +
5638           RAND_C * (afl->G_best[i] - afl->x_now[tmp_swarm][i]);
5639       afl->x_now[tmp_swarm][i] += afl->v_now[tmp_swarm][i];
5640       if (afl->x_now[tmp_swarm][i] > v_max) {
5641 
5642         afl->x_now[tmp_swarm][i] = v_max;
5643 
5644       } else if (afl->x_now[tmp_swarm][i] < v_min) {
5645 
5646         afl->x_now[tmp_swarm][i] = v_min;
5647 
5648       }
5649 
5650       x_temp += afl->x_now[tmp_swarm][i];
5651 
5652     }
5653 
5654     for (i = 0; i < operator_num; ++i) {
5655 
5656       afl->x_now[tmp_swarm][i] = afl->x_now[tmp_swarm][i] / x_temp;
5657       if (likely(i != 0)) {
5658 
5659         afl->probability_now[tmp_swarm][i] =
5660             afl->probability_now[tmp_swarm][i - 1] + afl->x_now[tmp_swarm][i];
5661 
5662       } else {
5663 
5664         afl->probability_now[tmp_swarm][i] = afl->x_now[tmp_swarm][i];
5665 
5666       }
5667 
5668     }
5669 
5670     if (afl->probability_now[tmp_swarm][operator_num - 1] < 0.99 ||
5671         afl->probability_now[tmp_swarm][operator_num - 1] > 1.01) {
5672 
5673       FATAL("ERROR probability");
5674 
5675     }
5676 
5677   }
5678 
5679   afl->swarm_now = 0;
5680   afl->key_module = 0;
5681 
5682 }
5683 
5684 /* larger change for MOpt implementation: the original fuzz_one was renamed
5685    to fuzz_one_original. All documentation references to fuzz_one therefore
5686    mean fuzz_one_original */
5687 
fuzz_one(afl_state_t * afl)5688 u8 fuzz_one(afl_state_t *afl) {
5689 
5690   int key_val_lv_1 = 0, key_val_lv_2 = 0;
5691 
5692 #ifdef _AFL_DOCUMENT_MUTATIONS
5693 
5694   u8 path_buf[PATH_MAX];
5695   if (afl->do_document == 0) {
5696 
5697     snprintf(path_buf, PATH_MAX, "%s/mutations", afl->out_dir);
5698     afl->do_document = mkdir(path_buf, 0700);  // if it exists we do not care
5699     afl->do_document = 1;
5700 
5701   } else {
5702 
5703     afl->do_document = 2;
5704     afl->stop_soon = 2;
5705 
5706   }
5707 
5708 #endif
5709 
5710   // if limit_time_sig == -1 then both are run after each other
5711 
5712   if (afl->limit_time_sig <= 0) { key_val_lv_1 = fuzz_one_original(afl); }
5713 
5714   if (afl->limit_time_sig != 0) {
5715 
5716     if (afl->key_module == 0) {
5717 
5718       key_val_lv_2 = pilot_fuzzing(afl);
5719 
5720     } else if (afl->key_module == 1) {
5721 
5722       key_val_lv_2 = core_fuzzing(afl);
5723 
5724     } else if (afl->key_module == 2) {
5725 
5726       pso_updating(afl);
5727 
5728     }
5729 
5730   }
5731 
5732   return (key_val_lv_1 | key_val_lv_2);
5733 
5734 }
5735 
5736