Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,12 @@

import com.code_intelligence.jazzer.api.FuzzedDataProvider;
import com.code_intelligence.jazzer.junit.FuzzTest;
import com.code_intelligence.jazzer.mutation.annotation.WithSize;
import com.code_intelligence.jazzer.mutation.annotation.WithUtf8Length;
import com.code_intelligence.jazzer.protobuf.Proto3;
import com.code_intelligence.selffuzz.jazzer.mutation.ArgumentsMutator;
import com.code_intelligence.selffuzz.jazzer.mutation.annotation.NotNull;
import com.code_intelligence.selffuzz.jazzer.mutation.annotation.WithLength;
import com.code_intelligence.selffuzz.jazzer.mutation.annotation.WithSize;
import com.code_intelligence.selffuzz.jazzer.mutation.annotation.WithUtf8Length;
import com.code_intelligence.selffuzz.jazzer.mutation.mutator.Mutators;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
Expand All @@ -41,6 +41,7 @@
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

public class ArgumentsMutatorFuzzTest {
Expand Down Expand Up @@ -101,21 +102,39 @@ void fuzzStrings(
@NotNull String s1,
@NotNull @WithUtf8Length(min = 10, max = 20) String s2) {}

@SelfFuzzTest // BUG: null pointer exception
void fuzzListOfMaps(Map<String, Integer> nullableMap) {}
@SelfFuzzTest
void fuzzListOfMaps(@WithSize(max = 4) Map<String, Integer> nullableMap) {
if (nullableMap != null) {
assertThat(nullableMap.size()).isAtMost(4);
}
}

@SelfFuzzTest
void fuzzListOfSets(@WithSize(max = 10) @NotNull Set<@NotNull Integer> setWithSize) {
if (setWithSize != null) {
assertThat(setWithSize.size()).isAtMost(10);
}
}

@SelfFuzzTest
void fuzzListOfLists(List<@NotNull List<String>> nullableMap, List<List<Integer>> nullableList) {}

@SelfFuzzTest
void fuzzPPrimitiveArrays(
int @WithLength(max = 10) [] a0, boolean[] a2, int @WithLength(max = 8193) [] a3) {}
void fuzzPrimitiveArrays(
int @WithLength(max = 10) [] a0, boolean[] a2, int @WithLength(max = 8193) [] a3) {
if (a0 != null) assertThat(a0.length).isAtMost(10);
if (a3 != null) assertThat(a3.length).isAtMost(8193);
}

@SelfFuzzTest
void fuzzBean(@NotNull ConstructorPropertiesAnnotatedBean bean, BeanWithParent beanWithParent) {}

@SelfFuzzTest
void fuzzListOfBeans(@WithSize(max = 4) List<BeanWithParent> beanWithParent) {}
void fuzzListOfBeans(@WithSize(max = 4) List<BeanWithParent> beanWithParent) {
if (beanWithParent != null) {
assertThat(beanWithParent.size()).isAtMost(4);
}
}

@SelfFuzzTest
void fuzzListOfListOfBeans(
Expand Down Expand Up @@ -184,7 +203,15 @@ void fuzzPrimitiveArrays(
Byte @WithLength(max = 3) [] by0,
byte[] by1,
Short @WithLength(max = 3) [] s0,
short[] s1) {}
short[] s1) {
if (i0 != null) assertThat(i0.length).isAtMost(3);
if (b0 != null) assertThat(b0.length).isAtMost(3);
if (d0 != null) assertThat(d0.length).isAtMost(3);
if (f0 != null) assertThat(f0.length).isAtMost(3);
if (l0 != null) assertThat(l0.length).isAtMost(3);
if (by0 != null) assertThat(by0.length).isAtMost(3);
if (s0 != null) assertThat(s0.length).isAtMost(3);
}

enum MyEnum {
A,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,11 @@
import java.lang.annotation.Target;
import java.util.List;
import java.util.Map;
import java.util.Set;

@Target(TYPE_USE)
@Retention(RUNTIME)
@AppliesTo({List.class, Map.class})
@AppliesTo({List.class, Map.class, Set.class})
@ValidateContainerDimensions
@PropertyConstraint
public @interface WithSize {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,11 @@
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;

final class ChunkCrossOvers {
private ChunkCrossOvers() {}
Expand Down Expand Up @@ -98,6 +100,25 @@ static <K, V> void insertChunk(
}
}

static <K> void insertChunk(
Set<K> set, Set<K> otherSet, int maxSize, PseudoRandom prng, boolean hasFixedSize) {
int originalSize = set.size();
int maxChunkSize = Math.min(maxSize - originalSize, otherSet.size());
int chunkSize = prng.sizeInClosedRange(1, maxChunkSize, hasFixedSize);
int fromChunkOffset = prng.closedRange(0, otherSet.size() - chunkSize);
Iterator<K> fromIterator = otherSet.iterator();
for (int i = 0; i < fromChunkOffset; i++) {
fromIterator.next();
}
// insertChunk only inserts new entries and does not overwrite existing
// ones. As skipping those entries would lead to fewer insertions than
// requested, loop over the rest of the map to fill the chunk if possible.
while (set.size() < originalSize + chunkSize && fromIterator.hasNext()) {
K key = fromIterator.next();
set.add(key);
}
}

static <K, V> void overwriteChunk(
Map<K, V> map, Map<K, V> otherMap, PseudoRandom prng, boolean hasFixedSize) {
onCorrespondingChunks(
Expand All @@ -117,6 +138,59 @@ static <K, V> void overwriteChunk(
hasFixedSize);
}

static <K> void overwriteChunk(
Set<K> set, Set<K> otherSet, PseudoRandom prng, boolean hasFixedSize) {
onCorrespondingChunks(
set,
otherSet,
prng,
(fromIterator, toIterator, chunkSize) -> {
// As keys can not be overwritten, only removed and new ones added, this
// cross over overwrites the values. Removal of keys is handled by the
// removeChunk mutation. Value equality is not checked here.
for (int i = 0; i < chunkSize; i++) {
K from = fromIterator.next();
K to = toIterator.next();
}
},
hasFixedSize);
}

static <K> void crossOverChunk(
Set<K> set, Set<K> otherSet, SerializingMutator<K> keyMutator, PseudoRandom prng) {
onCorrespondingChunks(
set,
otherSet,
prng,
(fromIterator, toIterator, chunkSize) -> {
Set<K> entriesToAdd = new LinkedHashSet<>(chunkSize);
for (int i = 0; i < chunkSize; i++) {
K to = toIterator.next();
K from = fromIterator.next();

// The entry has to be removed from the map before the cross-over, as
// mutating its key could cause problems in subsequent lookups.
// Furthermore, no new entries may be added while using the iterator,
// so crossed-over keys are collected for later addition.
toIterator.remove();

// As cross-overs do not guarantee to mutate the given object, no
// checks if the crossed over key already exists in the map are
// performed. This potentially overwrites existing entries or
// generates equal keys.
// In case of cross over this behavior is acceptable.
K newKey = keyMutator.crossOver(to, from, prng);

// Prevent null keys, as those are not allowed in some map implementations.
if (newKey != null) {
entriesToAdd.add(newKey);
}
}
set.addAll(entriesToAdd);
},
keyMutator.hasFixedSize());
}

static <K, V> void crossOverChunk(
Map<K, V> map,
Map<K, V> otherMap,
Expand Down Expand Up @@ -198,6 +272,11 @@ private interface ChunkMapOperation<K, V> {
void apply(Iterator<Entry<K, V>> fromIterator, Iterator<Entry<K, V>> toIterator, int chunkSize);
}

@FunctionalInterface
private interface ChunkSetOperation<K> {
void apply(Iterator<K> fromIterator, Iterator<K> toIterator, int chunkSize);
}

static <K, V> void onCorrespondingChunks(
Map<K, V> map,
Map<K, V> otherMap,
Expand All @@ -219,6 +298,27 @@ static <K, V> void onCorrespondingChunks(
operation.apply(fromIterator, toIterator, chunkSize);
}

static <K> void onCorrespondingChunks(
Set<K> set,
Set<K> otherSet,
PseudoRandom prng,
ChunkSetOperation<K> operation,
boolean hasFixedSize) {
int maxChunkSize = Math.min(set.size(), otherSet.size());
int chunkSize = prng.sizeInClosedRange(1, maxChunkSize, hasFixedSize);
int fromChunkOffset = prng.closedRange(0, otherSet.size() - chunkSize);
int toChunkOffset = prng.closedRange(0, set.size() - chunkSize);
Iterator<K> fromIterator = otherSet.iterator();
for (int i = 0; i < fromChunkOffset; i++) {
fromIterator.next();
}
Iterator<K> toIterator = set.iterator();
for (int i = 0; i < toChunkOffset; i++) {
toIterator.next();
}
operation.apply(fromIterator, toIterator, chunkSize);
}

public enum CrossOverAction {
INSERT_CHUNK,
OVERWRITE_CHUNK,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,11 @@

package com.code_intelligence.jazzer.mutation.mutator.collection;

import static com.code_intelligence.jazzer.mutation.support.Preconditions.require;

import com.code_intelligence.jazzer.mutation.api.PseudoRandom;
import com.code_intelligence.jazzer.mutation.api.SerializingMutator;
import com.code_intelligence.jazzer.mutation.api.ValueMutator;
import com.code_intelligence.jazzer.mutation.support.Preconditions;
import java.util.AbstractList;
import java.util.ArrayDeque;
import java.util.ArrayList;
Expand Down Expand Up @@ -162,6 +163,56 @@ static <K, V, KW, VW> boolean mutateRandomKeysChunk(
return grownBy > 0;
}

static <K, KW> boolean mutateRandomKeysChunk(
Set<K> set, SerializingMutator<K> keyMutator, PseudoRandom prng) {
int originalSize = set.size();
int chunkSize = prng.sizeInClosedRange(1, originalSize, keyMutator.hasFixedSize());
int chunkOffset = prng.closedRange(0, originalSize - chunkSize);

// To ensure that mutating keys actually results in the set of keys changing, we keep the keys
// to mutate in the set, try to add new keys (that are therefore distinct from the keys to
// mutate) and only remove the successfully mutated keys in the end.
ArrayDeque<KW> keysToMutate = new ArrayDeque<>(chunkSize);
ArrayList<K> keysToRemove = new ArrayList<>(chunkSize);
// get the set iterator
Iterator<K> it = set.iterator();
for (int i = 0; i < chunkOffset; i++) {
it.next();
}
for (int i = chunkOffset; i < chunkOffset + chunkSize; i++) {
K entry = it.next();
// ArrayDeque cannot hold null elements, which requires us to replace null with a sentinel.
// Also detach the key as keys may be mutable and mutation could destroy them.
keysToMutate.add(boxNull(keyMutator.detach(entry)));
keysToRemove.add(entry);
}

Consumer<K> addIfNew =
key -> {
int sizeBeforeAdd = set.size();
set.add(key);
// The mutated key was new, try to mutate and add the next in line.
if (set.size() > sizeBeforeAdd) {
keysToMutate.removeFirst();
}
};
Supplier<K> nextCandidate =
() -> {
// Mutate the next candidate in the queue.
K candidate = keyMutator.mutate(unboxNull(keysToMutate.removeFirst()), prng);
keysToMutate.addFirst(boxNull(candidate));
return candidate;
};

growBy(set, addIfNew, chunkSize, nextCandidate);
// Remove the original keys that were successfully mutated into new keys. Since the original
// keys have been kept in the set up to this point, all keys added were successfully mutated to
// be unequal to the original keys.
int grownBy = set.size() - originalSize;
keysToRemove.stream().limit(grownBy).forEach(set::remove);
return grownBy > 0;
}

public static <K, V> void mutateRandomValuesChunk(
Map<K, V> map, ValueMutator<V> valueMutator, PseudoRandom prng) {
Collection<Map.Entry<K, V>> collection = map.entrySet();
Expand All @@ -182,7 +233,7 @@ public static <K, V> void mutateRandomValuesChunk(
static <T> boolean growBy(
Set<T> set, Consumer<T> addIfNew, int delta, Supplier<T> candidateSupplier) {
int oldSize = set.size();
Preconditions.require(delta >= 0);
require(delta >= 0);

final int targetSize = oldSize + delta;
int remainingAttempts = MAX_FAILED_INSERTION_ATTEMPTS;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@ public final class CollectionMutators {
private CollectionMutators() {}

public static Stream<MutatorFactory> newFactories() {
return Stream.of(new ListMutatorFactory(), new MapMutatorFactory(), new ArrayMutatorFactory());
return Stream.of(
new ListMutatorFactory(),
new MapMutatorFactory(),
new SetMutatorFactory(),
new ArrayMutatorFactory());
}
}
Loading