mirror of
https://github.com/VectorCamp/vectorscan.git
synced 2025-06-28 16:41:01 +03:00
castle/repeat: fix a number of bugs
- Add fits_in_len_bytes assertions for packed stores. Corrects the assertion formerly on line 888. - In exclusive mode, don't overwrite packedCtrlSize with the max of the group; each repeat should know how many bytes it is using, even if they share the same stream state. - Ensure that exclusive mode stream state is sized correctly.
This commit is contained in:
parent
e10d2eb269
commit
d0aa138ada
@ -361,25 +361,22 @@ void buildSubcastles(const CastleProto &proto, vector<SubCastle> &subs,
|
|||||||
DEBUG_PRINTF("sub %u: selected %s model for %s repeat\n", i,
|
DEBUG_PRINTF("sub %u: selected %s model for %s repeat\n", i,
|
||||||
repeatTypeName(rtype), pr.bounds.str().c_str());
|
repeatTypeName(rtype), pr.bounds.str().c_str());
|
||||||
|
|
||||||
u32 subScratchStateSize;
|
|
||||||
u32 subStreamStateSize;
|
|
||||||
|
|
||||||
SubCastle &sub = subs[i];
|
SubCastle &sub = subs[i];
|
||||||
RepeatInfo &info = infos[i];
|
RepeatInfo &info = infos[i];
|
||||||
|
|
||||||
// handle exclusive case differently
|
info.packedCtrlSize = rsi.packedCtrlSize;
|
||||||
|
u32 subStreamStateSize = verify_u32(rsi.packedCtrlSize + rsi.stateSize);
|
||||||
|
|
||||||
|
// Handle stream/scratch space alloc for exclusive case differently.
|
||||||
if (contains(groupId, i)) {
|
if (contains(groupId, i)) {
|
||||||
u32 id = groupId.at(i);
|
u32 id = groupId.at(i);
|
||||||
maxStreamSize[id] = MAX(maxStreamSize[id], rsi.packedCtrlSize);
|
maxStreamSize[id] = max(maxStreamSize[id], subStreamStateSize);
|
||||||
|
// SubCastle full/stream state offsets are written in for the group
|
||||||
|
// below.
|
||||||
} else {
|
} else {
|
||||||
subScratchStateSize = verify_u32(sizeof(RepeatControl));
|
|
||||||
subStreamStateSize = verify_u32(rsi.packedCtrlSize + rsi.stateSize);
|
|
||||||
|
|
||||||
info.packedCtrlSize = rsi.packedCtrlSize;
|
|
||||||
sub.fullStateOffset = scratchStateSize;
|
sub.fullStateOffset = scratchStateSize;
|
||||||
sub.streamStateOffset = streamStateSize;
|
sub.streamStateOffset = streamStateSize;
|
||||||
|
scratchStateSize += verify_u32(sizeof(RepeatControl));
|
||||||
scratchStateSize += subScratchStateSize;
|
|
||||||
streamStateSize += subStreamStateSize;
|
streamStateSize += subStreamStateSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -420,8 +417,6 @@ void buildSubcastles(const CastleProto &proto, vector<SubCastle> &subs,
|
|||||||
u32 top = j.first;
|
u32 top = j.first;
|
||||||
u32 id = j.second;
|
u32 id = j.second;
|
||||||
SubCastle &sub = subs[top];
|
SubCastle &sub = subs[top];
|
||||||
RepeatInfo &info = infos[top];
|
|
||||||
info.packedCtrlSize = maxStreamSize[id];
|
|
||||||
if (!scratchOffset[id]) {
|
if (!scratchOffset[id]) {
|
||||||
sub.fullStateOffset = scratchStateSize;
|
sub.fullStateOffset = scratchStateSize;
|
||||||
sub.streamStateOffset = streamStateSize;
|
sub.streamStateOffset = streamStateSize;
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2015, Intel Corporation
|
* Copyright (c) 2015-2016, Intel Corporation
|
||||||
*
|
*
|
||||||
* Redistribution and use in source and binary forms, with or without
|
* Redistribution and use in source and binary forms, with or without
|
||||||
* modification, are permitted provided that the following conditions are met:
|
* modification, are permitted provided that the following conditions are met:
|
||||||
@ -882,15 +882,25 @@ enum RepeatMatch repeatHasMatchTrailer(const struct RepeatInfo *info,
|
|||||||
return REPEAT_NOMATCH;
|
return REPEAT_NOMATCH;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** \brief True if the given value can be packed into len bytes. */
|
||||||
|
static really_inline
|
||||||
|
int fits_in_len_bytes(u64a val, u32 len) {
|
||||||
|
if (len >= 8) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
return val <= (1ULL << (len * 8));
|
||||||
|
}
|
||||||
|
|
||||||
static really_inline
|
static really_inline
|
||||||
void storePackedRelative(char *dest, u64a val, u64a offset, u64a max, u32 len) {
|
void storePackedRelative(char *dest, u64a val, u64a offset, u64a max, u32 len) {
|
||||||
assert(val <= offset);
|
assert(val <= offset);
|
||||||
assert(max < (1ULL << (8 * len)));
|
assert(fits_in_len_bytes(max, len));
|
||||||
u64a delta = offset - val;
|
u64a delta = offset - val;
|
||||||
if (delta >= max) {
|
if (delta >= max) {
|
||||||
delta = max;
|
delta = max;
|
||||||
}
|
}
|
||||||
DEBUG_PRINTF("delta %llu\n", delta);
|
DEBUG_PRINTF("delta %llu\n", delta);
|
||||||
|
assert(fits_in_len_bytes(delta, len));
|
||||||
partial_store_u64a(dest, delta, len);
|
partial_store_u64a(dest, delta, len);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -967,6 +977,7 @@ void repeatPackBitmap(char *dest, const struct RepeatInfo *info,
|
|||||||
DEBUG_PRINTF("packing %llu into %u bytes\n", bitmap, info->packedCtrlSize);
|
DEBUG_PRINTF("packing %llu into %u bytes\n", bitmap, info->packedCtrlSize);
|
||||||
|
|
||||||
// Write out packed bitmap.
|
// Write out packed bitmap.
|
||||||
|
assert(fits_in_len_bytes(bitmap, info->packedCtrlSize));
|
||||||
partial_store_u64a(dest, bitmap, info->packedCtrlSize);
|
partial_store_u64a(dest, bitmap, info->packedCtrlSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1440,6 +1451,7 @@ void repeatStoreSparseOptimalP(const struct RepeatInfo *info,
|
|||||||
DEBUG_PRINTF("xs->first:%u xs->last:%u patch:%u\n",
|
DEBUG_PRINTF("xs->first:%u xs->last:%u patch:%u\n",
|
||||||
xs->first, xs->last, patch);
|
xs->first, xs->last, patch);
|
||||||
DEBUG_PRINTF("value:%llu\n", val);
|
DEBUG_PRINTF("value:%llu\n", val);
|
||||||
|
assert(fits_in_len_bytes(val, encoding_size));
|
||||||
partial_store_u64a(ring + encoding_size * idx, val, encoding_size);
|
partial_store_u64a(ring + encoding_size * idx, val, encoding_size);
|
||||||
mmbit_set(active, patch_count, idx);
|
mmbit_set(active, patch_count, idx);
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user