mirror of
https://github.com/VectorCamp/vectorscan.git
synced 2025-06-28 16:41:01 +03:00
ng_equivalence: don't use ptr_vector
Switch over ptr_vector<T> to vector<unique_ptr<T>>. This works around some issues we were seeing with MSVC builds, where the contents of the ptr_vector were being destroyed when it was returned.
This commit is contained in:
parent
779bebfd12
commit
1a24b0b4db
@ -38,17 +38,16 @@
|
|||||||
#include "ng_util.h"
|
#include "ng_util.h"
|
||||||
#include "util/compile_context.h"
|
#include "util/compile_context.h"
|
||||||
#include "util/graph_range.h"
|
#include "util/graph_range.h"
|
||||||
|
#include "util/make_unique.h"
|
||||||
#include "util/ue2_containers.h"
|
#include "util/ue2_containers.h"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
#include <memory>
|
||||||
#include <set>
|
#include <set>
|
||||||
#include <stack>
|
#include <stack>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include <boost/ptr_container/ptr_vector.hpp>
|
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using boost::ptr_vector;
|
|
||||||
|
|
||||||
namespace ue2 {
|
namespace ue2 {
|
||||||
|
|
||||||
@ -276,47 +275,47 @@ bool hasEdgeAsserts(NFAVertex v, const NGHolder &g) {
|
|||||||
|
|
||||||
// populate VertexInfo table
|
// populate VertexInfo table
|
||||||
static
|
static
|
||||||
ptr_vector<VertexInfo> getVertexInfos(const NGHolder &g) {
|
vector<unique_ptr<VertexInfo>> getVertexInfos(const NGHolder &g) {
|
||||||
const size_t num_verts = num_vertices(g);
|
const size_t num_verts = num_vertices(g);
|
||||||
|
|
||||||
ptr_vector<VertexInfo> infos;
|
vector<unique_ptr<VertexInfo>> infos;
|
||||||
infos.reserve(num_verts * 2);
|
infos.reserve(num_verts * 2);
|
||||||
|
|
||||||
vector<VertexInfo *> vertex_map; // indexed by vertex_index property
|
vector<VertexInfo *> vertex_map; // indexed by vertex_index property
|
||||||
vertex_map.resize(num_verts);
|
vertex_map.resize(num_verts);
|
||||||
|
|
||||||
for (auto v : vertices_range(g)) {
|
for (auto v : vertices_range(g)) {
|
||||||
VertexInfo *vi = new VertexInfo(v, g);
|
infos.push_back(make_unique<VertexInfo>(v, g));
|
||||||
|
vertex_map[g[v].index] = infos.back().get();
|
||||||
// insert our new shiny VertexInfo into the info map
|
|
||||||
infos.push_back(vi);
|
|
||||||
|
|
||||||
vertex_map[g[v].index] = vi;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// now, go through each vertex and populate its predecessor and successor lists
|
// now, go through each vertex and populate its predecessor and successor
|
||||||
for (VertexInfo &cur_vi : infos) {
|
// lists
|
||||||
// find predecessors
|
for (auto &vi : infos) {
|
||||||
for (const auto &e : in_edges_range(cur_vi.v, g)) {
|
assert(vi);
|
||||||
NFAVertex u = source(e, g);
|
NFAVertex v = vi->v;
|
||||||
VertexInfo *vmi = vertex_map[g[u].index];
|
|
||||||
|
|
||||||
cur_vi.pred_cr |= vmi->cr;
|
// find predecessors
|
||||||
cur_vi.pred.insert(vmi);
|
for (const auto &e : in_edges_range(v, g)) {
|
||||||
|
NFAVertex u = source(e, g);
|
||||||
|
VertexInfo *u_vi = vertex_map[g[u].index];
|
||||||
|
|
||||||
|
vi->pred_cr |= u_vi->cr;
|
||||||
|
vi->pred.insert(u_vi);
|
||||||
|
|
||||||
// also set up edge tops
|
// also set up edge tops
|
||||||
if (is_triggered(g) && u == g.start) {
|
if (is_triggered(g) && u == g.start) {
|
||||||
cur_vi.edge_tops = g[e].tops;
|
vi->edge_tops = g[e].tops;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// find successors
|
// find successors
|
||||||
for (auto w : adjacent_vertices_range(cur_vi.v, g)) {
|
for (auto w : adjacent_vertices_range(v, g)) {
|
||||||
VertexInfo *vmi = vertex_map[g[w].index];
|
VertexInfo *w_vi = vertex_map[g[w].index];
|
||||||
cur_vi.succ_cr |= vmi->cr;
|
vi->succ_cr |= w_vi->cr;
|
||||||
cur_vi.succ.insert(vmi);
|
vi->succ.insert(w_vi);
|
||||||
}
|
}
|
||||||
assert(!hasEdgeAsserts(cur_vi.v, g));
|
assert(!hasEdgeAsserts(vi->v, g));
|
||||||
}
|
}
|
||||||
|
|
||||||
return infos;
|
return infos;
|
||||||
@ -324,7 +323,7 @@ ptr_vector<VertexInfo> getVertexInfos(const NGHolder &g) {
|
|||||||
|
|
||||||
// store equivalence class in VertexInfo for each vertex
|
// store equivalence class in VertexInfo for each vertex
|
||||||
static
|
static
|
||||||
vector<VertexInfoSet> partitionGraph(ptr_vector<VertexInfo> &infos,
|
vector<VertexInfoSet> partitionGraph(vector<unique_ptr<VertexInfo>> &infos,
|
||||||
WorkQueue &work_queue, const NGHolder &g,
|
WorkQueue &work_queue, const NGHolder &g,
|
||||||
EquivalenceType eq) {
|
EquivalenceType eq) {
|
||||||
const size_t num_verts = infos.size();
|
const size_t num_verts = infos.size();
|
||||||
@ -349,28 +348,30 @@ vector<VertexInfoSet> partitionGraph(ptr_vector<VertexInfo> &infos,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// partition the graph based on CharReach
|
// partition the graph based on CharReach
|
||||||
for (VertexInfo &vi : infos) {
|
for (auto &vi : infos) {
|
||||||
|
assert(vi);
|
||||||
|
|
||||||
ClassInfo::ClassDepth depth;
|
ClassInfo::ClassDepth depth;
|
||||||
|
|
||||||
if (eq == LEFT_EQUIVALENCE) {
|
if (eq == LEFT_EQUIVALENCE) {
|
||||||
depth = depths[vi.vert_index];
|
depth = depths[vi->vert_index];
|
||||||
} else {
|
} else {
|
||||||
depth = rdepths[vi.vert_index];
|
depth = rdepths[vi->vert_index];
|
||||||
}
|
}
|
||||||
ClassInfo ci(g, vi, depth, eq);
|
ClassInfo ci(g, *vi, depth, eq);
|
||||||
|
|
||||||
auto ii = classinfomap.find(ci);
|
auto ii = classinfomap.find(ci);
|
||||||
if (ii == classinfomap.end()) {
|
if (ii == classinfomap.end()) {
|
||||||
// vertex is in a new equivalence class by itself.
|
// vertex is in a new equivalence class by itself.
|
||||||
unsigned eq_class = classes.size();
|
unsigned eq_class = classes.size();
|
||||||
vi.equivalence_class = eq_class;
|
vi->equivalence_class = eq_class;
|
||||||
classes.push_back({&vi});
|
classes.push_back({vi.get()});
|
||||||
classinfomap.emplace(move(ci), eq_class);
|
classinfomap.emplace(move(ci), eq_class);
|
||||||
} else {
|
} else {
|
||||||
// vertex is added to an existing class.
|
// vertex is added to an existing class.
|
||||||
unsigned eq_class = ii->second;
|
unsigned eq_class = ii->second;
|
||||||
vi.equivalence_class = eq_class;
|
vi->equivalence_class = eq_class;
|
||||||
classes.at(eq_class).insert(&vi);
|
classes.at(eq_class).insert(vi.get());
|
||||||
|
|
||||||
// we now know that this particular class has more than one
|
// we now know that this particular class has more than one
|
||||||
// vertex, so we add it to the work queue
|
// vertex, so we add it to the work queue
|
||||||
@ -500,8 +501,9 @@ bool require_separate_eod_vertex(const VertexInfoSet &vert_infos,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static
|
static
|
||||||
void mergeClass(ptr_vector<VertexInfo> &infos, NGHolder &g, unsigned eq_class,
|
void mergeClass(vector<unique_ptr<VertexInfo>> &infos, NGHolder &g,
|
||||||
VertexInfoSet &cur_class_vertices, set<NFAVertex> *toRemove) {
|
unsigned eq_class, VertexInfoSet &cur_class_vertices,
|
||||||
|
set<NFAVertex> *toRemove) {
|
||||||
DEBUG_PRINTF("Replacing %zd vertices from equivalence class %u with a "
|
DEBUG_PRINTF("Replacing %zd vertices from equivalence class %u with a "
|
||||||
"single vertex.\n", cur_class_vertices.size(), eq_class);
|
"single vertex.\n", cur_class_vertices.size(), eq_class);
|
||||||
|
|
||||||
@ -529,9 +531,9 @@ void mergeClass(ptr_vector<VertexInfo> &infos, NGHolder &g, unsigned eq_class,
|
|||||||
* props */
|
* props */
|
||||||
g[new_v].reports.clear(); /* populated as we pull in succs */
|
g[new_v].reports.clear(); /* populated as we pull in succs */
|
||||||
|
|
||||||
VertexInfo *new_vertex_info = new VertexInfo(new_v, g);
|
|
||||||
// store this vertex in our global vertex list
|
// store this vertex in our global vertex list
|
||||||
infos.push_back(new_vertex_info);
|
infos.push_back(make_unique<VertexInfo>(new_v, g));
|
||||||
|
VertexInfo *new_vertex_info = infos.back().get();
|
||||||
|
|
||||||
NFAVertex new_v_eod = NGHolder::null_vertex();
|
NFAVertex new_v_eod = NGHolder::null_vertex();
|
||||||
VertexInfo *new_vertex_info_eod = nullptr;
|
VertexInfo *new_vertex_info_eod = nullptr;
|
||||||
@ -539,8 +541,8 @@ void mergeClass(ptr_vector<VertexInfo> &infos, NGHolder &g, unsigned eq_class,
|
|||||||
if (require_separate_eod_vertex(cur_class_vertices, g)) {
|
if (require_separate_eod_vertex(cur_class_vertices, g)) {
|
||||||
new_v_eod = clone_vertex(g, old_v);
|
new_v_eod = clone_vertex(g, old_v);
|
||||||
g[new_v_eod].reports.clear();
|
g[new_v_eod].reports.clear();
|
||||||
new_vertex_info_eod = new VertexInfo(new_v_eod, g);
|
infos.push_back(make_unique<VertexInfo>(new_v_eod, g));
|
||||||
infos.push_back(new_vertex_info_eod);
|
new_vertex_info_eod = infos.back().get();
|
||||||
}
|
}
|
||||||
|
|
||||||
const auto &edgetops = (*cur_class_vertices.begin())->edge_tops;
|
const auto &edgetops = (*cur_class_vertices.begin())->edge_tops;
|
||||||
@ -627,7 +629,8 @@ void mergeClass(ptr_vector<VertexInfo> &infos, NGHolder &g, unsigned eq_class,
|
|||||||
// report behaviour with a single vertex).
|
// report behaviour with a single vertex).
|
||||||
static
|
static
|
||||||
bool mergeEquivalentClasses(vector<VertexInfoSet> &classes,
|
bool mergeEquivalentClasses(vector<VertexInfoSet> &classes,
|
||||||
ptr_vector<VertexInfo> &infos, NGHolder &g) {
|
vector<unique_ptr<VertexInfo>> &infos,
|
||||||
|
NGHolder &g) {
|
||||||
bool merged = false;
|
bool merged = false;
|
||||||
set<NFAVertex> toRemove;
|
set<NFAVertex> toRemove;
|
||||||
|
|
||||||
@ -657,7 +660,7 @@ bool reduceGraphEquivalences(NGHolder &g, EquivalenceType eq_type) {
|
|||||||
|
|
||||||
// get information on every vertex in the graph
|
// get information on every vertex in the graph
|
||||||
// new vertices are allocated here, and stored in infos
|
// new vertices are allocated here, and stored in infos
|
||||||
ptr_vector<VertexInfo> infos = getVertexInfos(g);
|
auto infos = getVertexInfos(g);
|
||||||
|
|
||||||
// partition the graph
|
// partition the graph
|
||||||
auto classes = partitionGraph(infos, work_queue, g, eq_type);
|
auto classes = partitionGraph(infos, work_queue, g, eq_type);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user