mirror of
https://github.com/VectorCamp/vectorscan.git
synced 2025-06-28 16:41:01 +03:00
replace push_back by emplace_back where possible
This commit is contained in:
parent
9f7088a9e0
commit
556206f138
@ -231,7 +231,7 @@ void checkForMultilineStart(ReportManager &rm, NGHolder &g,
|
||||
* required so that ^ doesn't match trailing \n */
|
||||
for (const auto &e : out_edges_range(v, g)) {
|
||||
if (target(e, g) == g.accept) {
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
}
|
||||
}
|
||||
/* assert has been resolved; clear flag */
|
||||
|
@ -494,11 +494,11 @@ map<BucketIndex, vector<LiteralIndex>> assignStringsToBuckets(
|
||||
u32 cnt = last_id - first_id;
|
||||
// long literals first for included literals checking
|
||||
for (u32 k = 0; k < cnt; k++) {
|
||||
litIds.push_back(last_id - k - 1);
|
||||
litIds.emplace_back(last_id - k - 1);
|
||||
}
|
||||
|
||||
i = j;
|
||||
buckets.push_back(litIds);
|
||||
buckets.emplace_back(litIds);
|
||||
}
|
||||
|
||||
// reverse bucket id, longer literals come first
|
||||
|
@ -162,7 +162,7 @@ bytecode_ptr<FDRConfirm> getFDRConfirm(const vector<hwlmLiteral> &lits,
|
||||
LitInfo & li = tmpLitInfo[i];
|
||||
u32 hash = CONF_HASH_CALL(li.v, andmsk, mult, nBits);
|
||||
DEBUG_PRINTF("%016llx --> %u\n", li.v, hash);
|
||||
res2lits[hash].push_back(i);
|
||||
res2lits[hash].emplace_back(i);
|
||||
gm |= li.groups;
|
||||
}
|
||||
|
||||
@ -303,7 +303,7 @@ setupFullConfs(const vector<hwlmLiteral> &lits,
|
||||
if (contains(bucketToLits, b)) {
|
||||
vector<hwlmLiteral> vl;
|
||||
for (const LiteralIndex &lit_idx : bucketToLits.at(b)) {
|
||||
vl.push_back(lits[lit_idx]);
|
||||
vl.emplace_back(lits[lit_idx]);
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("b %d sz %zu\n", b, vl.size());
|
||||
|
@ -166,7 +166,7 @@ public:
|
||||
nibbleSets[i * 2] = nibbleSets[i * 2 + 1] = 0xffff;
|
||||
}
|
||||
}
|
||||
litIds.push_back(lit_id);
|
||||
litIds.emplace_back(lit_id);
|
||||
sort_and_unique(litIds);
|
||||
}
|
||||
|
||||
@ -515,7 +515,7 @@ void fillReinforcedTable(const map<BucketIndex,
|
||||
u8 *rtable_base, const u32 num_tables) {
|
||||
vector<u8 *> tables;
|
||||
for (u32 i = 0; i < num_tables; i++) {
|
||||
tables.push_back(rtable_base + i * RTABLE_SIZE);
|
||||
tables.emplace_back(rtable_base + i * RTABLE_SIZE);
|
||||
}
|
||||
|
||||
for (auto t : tables) {
|
||||
|
@ -105,7 +105,7 @@ static
|
||||
path append(const path &orig, const CharReach &cr, u32 new_dest) {
|
||||
path p(new_dest);
|
||||
p.reach = orig.reach;
|
||||
p.reach.push_back(cr);
|
||||
p.reach.emplace_back(cr);
|
||||
|
||||
return p;
|
||||
}
|
||||
@ -117,25 +117,25 @@ void extend(const raw_dfa &rdfa, const vector<CharReach> &rev_map,
|
||||
const dstate &s = rdfa.states[p.dest];
|
||||
|
||||
if (!p.reach.empty() && p.reach.back().none()) {
|
||||
out.push_back(p);
|
||||
out.emplace_back(p);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!s.reports.empty()) {
|
||||
if (generates_callbacks(rdfa.kind)) {
|
||||
out.push_back(p);
|
||||
out.emplace_back(p);
|
||||
return;
|
||||
} else {
|
||||
path pp = append(p, CharReach(), p.dest);
|
||||
all[p.dest].push_back(pp);
|
||||
out.push_back(move(pp));
|
||||
all[p.dest].emplace_back(pp);
|
||||
out.emplace_back(move(pp));
|
||||
}
|
||||
}
|
||||
|
||||
if (!s.reports_eod.empty()) {
|
||||
path pp = append(p, CharReach(), p.dest);
|
||||
all[p.dest].push_back(pp);
|
||||
out.push_back(move(pp));
|
||||
all[p.dest].emplace_back(pp);
|
||||
out.emplace_back(move(pp));
|
||||
}
|
||||
|
||||
flat_map<u32, CharReach> dest;
|
||||
@ -154,8 +154,8 @@ void extend(const raw_dfa &rdfa, const vector<CharReach> &rev_map,
|
||||
|
||||
DEBUG_PRINTF("----good: [%s] -> %u\n",
|
||||
describeClasses(pp.reach).c_str(), pp.dest);
|
||||
all[e.first].push_back(pp);
|
||||
out.push_back(move(pp));
|
||||
all[e.first].emplace_back(pp);
|
||||
out.emplace_back(move(pp));
|
||||
}
|
||||
}
|
||||
|
||||
@ -165,7 +165,7 @@ vector<vector<CharReach>> generate_paths(const raw_dfa &rdfa,
|
||||
const vector<CharReach> rev_map = reverse_alpha_remapping(rdfa);
|
||||
vector<path> paths{path(base)};
|
||||
unordered_map<u32, vector<path>> all;
|
||||
all[base].push_back(path(base));
|
||||
all[base].emplace_back(path(base));
|
||||
for (u32 i = 0; i < len && paths.size() < PATHS_LIMIT; i++) {
|
||||
vector<path> next_gen;
|
||||
for (const auto &p : paths) {
|
||||
@ -180,7 +180,7 @@ vector<vector<CharReach>> generate_paths(const raw_dfa &rdfa,
|
||||
vector<vector<CharReach>> rv;
|
||||
rv.reserve(paths.size());
|
||||
for (auto &p : paths) {
|
||||
rv.push_back(vector<CharReach>(std::make_move_iterator(p.reach.begin()),
|
||||
rv.emplace_back(vector<CharReach>(std::make_move_iterator(p.reach.begin()),
|
||||
std::make_move_iterator(p.reach.end())));
|
||||
}
|
||||
return rv;
|
||||
@ -318,7 +318,7 @@ set<dstate_id_t> find_region(const raw_dfa &rdfa, dstate_id_t base,
|
||||
|
||||
DEBUG_PRINTF(" %hu is in region\n", t);
|
||||
region.insert(t);
|
||||
pending.push_back(t);
|
||||
pending.emplace_back(t);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -157,7 +157,7 @@ void getNeighborInfo(const CliqueGraph &g, vector<u32> &neighbor,
|
||||
// find neighbors for cv
|
||||
for (const auto &v : adjacent_vertices_range(cv, g)) {
|
||||
if (g[v].stateId != id && contains(group, g[v].stateId)) {
|
||||
neighbor.push_back(g[v].stateId);
|
||||
neighbor.emplace_back(g[v].stateId);
|
||||
DEBUG_PRINTF("Neighbor:%u\n", g[v].stateId);
|
||||
}
|
||||
}
|
||||
@ -172,7 +172,7 @@ void findCliqueGroup(CliqueGraph &cg, vector<u32> &clique) {
|
||||
vector<u32> init;
|
||||
for (const auto &v : vertices_range(cg)) {
|
||||
vertexMap[cg[v].stateId] = v;
|
||||
init.push_back(cg[v].stateId);
|
||||
init.emplace_back(cg[v].stateId);
|
||||
}
|
||||
gStack.push(init);
|
||||
|
||||
@ -186,7 +186,7 @@ void findCliqueGroup(CliqueGraph &cg, vector<u32> &clique) {
|
||||
// Choose a vertex from the graph
|
||||
u32 id = g[0];
|
||||
const CliqueVertex &n = vertexMap.at(id);
|
||||
clique.push_back(id);
|
||||
clique.emplace_back(id);
|
||||
// Corresponding vertex in the original graph
|
||||
vector<u32> neighbor;
|
||||
set<u32> subgraphId(g.begin(), g.end());
|
||||
@ -215,7 +215,7 @@ vector<u32> removeClique(CliqueGraph &cg) {
|
||||
vector<CliqueVertex> dead;
|
||||
for (const auto &v : vertices_range(cg)) {
|
||||
if (find(c.begin(), c.end(), cg[v].stateId) != c.end()) {
|
||||
dead.push_back(v);
|
||||
dead.emplace_back(v);
|
||||
}
|
||||
}
|
||||
for (const auto &v : dead) {
|
||||
@ -227,7 +227,7 @@ vector<u32> removeClique(CliqueGraph &cg) {
|
||||
}
|
||||
vector<u32> clique;
|
||||
findCliqueGroup(cg, clique);
|
||||
cliquesVec.push_back(clique);
|
||||
cliquesVec.emplace_back(clique);
|
||||
}
|
||||
|
||||
// get the independent set with max size
|
||||
@ -288,11 +288,11 @@ vector<vector<u32>> checkExclusion(u32 &streamStateSize,
|
||||
// get min reset distance for each repeat
|
||||
for (size_t i = lower; i < upper; i++) {
|
||||
CliqueVertex v = add_vertex(CliqueVertexProps(i), *cg);
|
||||
vertices.push_back(v);
|
||||
vertices.emplace_back(v);
|
||||
|
||||
const vector<size_t> &tmp_dist =
|
||||
minResetDistToEnd(triggers[i], cr);
|
||||
min_reset_dist.push_back(tmp_dist);
|
||||
min_reset_dist.emplace_back(tmp_dist);
|
||||
}
|
||||
|
||||
// find exclusive pair for each repeat
|
||||
@ -311,7 +311,7 @@ vector<vector<u32>> checkExclusion(u32 &streamStateSize,
|
||||
auto clique = removeClique(*cg);
|
||||
size_t cliqueSize = clique.size();
|
||||
if (cliqueSize > 1) {
|
||||
groups.push_back(clique);
|
||||
groups.emplace_back(clique);
|
||||
exclusive = EXCLUSIVE;
|
||||
total += cliqueSize;
|
||||
}
|
||||
@ -387,7 +387,7 @@ void buildSubcastles(const CastleProto &proto, vector<SubCastle> &subs,
|
||||
}
|
||||
|
||||
if (pr.bounds.max.is_finite()) {
|
||||
may_stale.push_back(i);
|
||||
may_stale.emplace_back(i);
|
||||
}
|
||||
|
||||
info.type = verify_u8(rtype);
|
||||
@ -411,7 +411,7 @@ void buildSubcastles(const CastleProto &proto, vector<SubCastle> &subs,
|
||||
|
||||
if (rtype == REPEAT_SPARSE_OPTIMAL_P) {
|
||||
for (u32 j = 0; j < rsi.patchSize; j++) {
|
||||
tables.push_back(rsi.table[j]);
|
||||
tables.emplace_back(rsi.table[j]);
|
||||
}
|
||||
sparseRepeats++;
|
||||
patchSize[i] = rsi.patchSize;
|
||||
@ -509,10 +509,10 @@ buildCastle(const CastleProto &proto,
|
||||
is_reset = true;
|
||||
}
|
||||
|
||||
repeatInfoPair.push_back(make_pair(min_period, is_reset));
|
||||
repeatInfoPair.emplace_back(make_pair(min_period, is_reset));
|
||||
|
||||
candidateTriggers.push_back(triggers.at(top));
|
||||
candidateRepeats.push_back(i);
|
||||
candidateTriggers.emplace_back(triggers.at(top));
|
||||
candidateRepeats.emplace_back(i);
|
||||
}
|
||||
|
||||
// Case 1: exclusive repeats
|
||||
|
@ -152,7 +152,7 @@ HopcroftInfo::HopcroftInfo(const raw_dfa &rdfa)
|
||||
for (size_t i = 0; i < states.size(); i++) { // i is the previous state
|
||||
for (size_t sym = 0; sym < alpha_size; sym++) {
|
||||
dstate_id_t present_state = rdfa.states[i].next[sym];
|
||||
states[present_state].prev[sym].push_back(i);
|
||||
states[present_state].prev[sym].emplace_back(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -263,7 +263,7 @@ void mapping_new_states(const HopcroftInfo &info,
|
||||
new_states.reserve(num_partitions);
|
||||
|
||||
for (const auto &m : ordering) {
|
||||
new_states.push_back(rdfa.states[m.first]);
|
||||
new_states.emplace_back(rdfa.states[m.first]);
|
||||
}
|
||||
rdfa.states = std::move(new_states);
|
||||
}
|
||||
|
@ -147,7 +147,7 @@ void translateRawReports(UNUSED GoughGraph &cfg, UNUSED const raw_som_dfa &raw,
|
||||
} else {
|
||||
var = joins_at_s.at(sr.slot);
|
||||
}
|
||||
reports_out->push_back(make_pair(sr.report, var));
|
||||
reports_out->emplace_back(make_pair(sr.report, var));
|
||||
}
|
||||
}
|
||||
|
||||
@ -190,7 +190,7 @@ void makeCFG_top_edge(GoughGraph &cfg, const vector<GoughVertex> &vertices,
|
||||
shared_ptr<GoughSSAVarNew> vnew;
|
||||
if (slot_id == trigger_slot) {
|
||||
vnew = make_shared<GoughSSAVarNew>(0U);
|
||||
cfg[e].vars.push_back(vnew);
|
||||
cfg[e].vars.emplace_back(vnew);
|
||||
} else {
|
||||
assert(contains(src_slots, slot_id));
|
||||
}
|
||||
@ -207,7 +207,7 @@ void makeCFG_top_edge(GoughGraph &cfg, const vector<GoughVertex> &vertices,
|
||||
assert(contains(src_slots, slot_id));
|
||||
|
||||
shared_ptr<GoughSSAVarMin> vmin = make_shared<GoughSSAVarMin>();
|
||||
cfg[e].vars.push_back(vmin);
|
||||
cfg[e].vars.emplace_back(vmin);
|
||||
final_var = vmin.get();
|
||||
|
||||
DEBUG_PRINTF("slot %u gets a new value\n", slot_id);
|
||||
@ -280,7 +280,7 @@ void makeCFG_edge(GoughGraph &cfg, const map<u32, u32> &som_creators,
|
||||
vnew = vnew_by_adj[adjust];
|
||||
} else {
|
||||
vnew = make_shared<GoughSSAVarNew>(adjust);
|
||||
cfg[e].vars.push_back(vnew);
|
||||
cfg[e].vars.emplace_back(vnew);
|
||||
vnew_by_adj[adjust] = vnew;
|
||||
}
|
||||
assert(vnew);
|
||||
@ -318,7 +318,7 @@ void makeCFG_edge(GoughGraph &cfg, const map<u32, u32> &som_creators,
|
||||
DEBUG_PRINTF("bypassing min on join %u\n", slot_id);
|
||||
} else {
|
||||
shared_ptr<GoughSSAVarMin> vmin = make_shared<GoughSSAVarMin>();
|
||||
cfg[e].vars.push_back(vmin);
|
||||
cfg[e].vars.emplace_back(vmin);
|
||||
final_var = vmin.get();
|
||||
|
||||
if (vnew) {
|
||||
@ -352,13 +352,13 @@ unique_ptr<GoughGraph> makeCFG(const raw_som_dfa &raw) {
|
||||
u32 min_state = !is_triggered(raw.kind);
|
||||
|
||||
if (min_state) {
|
||||
vertices.push_back(GoughGraph::null_vertex()); /* skip dead state */
|
||||
vertices.emplace_back(GoughGraph::null_vertex()); /* skip dead state */
|
||||
}
|
||||
|
||||
vector<flat_map<u32, GoughSSAVarJoin *> > joins(raw.states.size());
|
||||
for (u32 i = min_state; i < raw.states.size(); ++i) {
|
||||
GoughVertex v = add_vertex(GoughVertexProps(i), *cfg);
|
||||
vertices.push_back(v);
|
||||
vertices.emplace_back(v);
|
||||
|
||||
/* create JOIN variables */
|
||||
for (som_tran_info::const_iterator it = raw.state_som[i].preds.begin();
|
||||
@ -366,7 +366,7 @@ unique_ptr<GoughGraph> makeCFG(const raw_som_dfa &raw) {
|
||||
u32 slot_id = it->first;
|
||||
if (!contains(raw.new_som_nfa_states, slot_id)
|
||||
|| raw.new_som_nfa_states.at(slot_id)) {
|
||||
(*cfg)[v].vars.push_back(make_shared<GoughSSAVarJoin>());
|
||||
(*cfg)[v].vars.emplace_back(make_shared<GoughSSAVarJoin>());
|
||||
joins[get(vertex_index, *cfg, v)][slot_id]
|
||||
= (*cfg)[v].vars.back().get();
|
||||
DEBUG_PRINTF("dfa %u:: slot %u\n", i, slot_id);
|
||||
@ -525,7 +525,7 @@ void mark_live_reports(const vector<pair<ReportID, GoughSSAVar *> > &reps,
|
||||
continue;
|
||||
}
|
||||
var->seen = true;
|
||||
queue->push_back(var);
|
||||
queue->emplace_back(var);
|
||||
}
|
||||
}
|
||||
|
||||
@ -546,7 +546,7 @@ void remove_dead(GoughGraph &g) {
|
||||
continue;
|
||||
}
|
||||
var->seen = true;
|
||||
queue.push_back(var);
|
||||
queue.emplace_back(var);
|
||||
}
|
||||
}
|
||||
|
||||
@ -589,7 +589,7 @@ gough_ins make_gough_ins(u8 op, u32 dest = INVALID_SLOT,
|
||||
|
||||
void GoughSSAVarNew::generate(vector<gough_ins> *out) const {
|
||||
assert(slot != INVALID_SLOT);
|
||||
out->push_back(make_gough_ins(GOUGH_INS_NEW, slot, adjust));
|
||||
out->emplace_back(make_gough_ins(GOUGH_INS_NEW, slot, adjust));
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
@ -616,7 +616,7 @@ void GoughSSAVarMin::generate(vector<gough_ins> *out) const {
|
||||
/* if the destination is one of the sources, no need to move it */
|
||||
first = false;
|
||||
} else {
|
||||
input_slots.push_back(var->slot);
|
||||
input_slots.emplace_back(var->slot);
|
||||
}
|
||||
}
|
||||
|
||||
@ -624,10 +624,10 @@ void GoughSSAVarMin::generate(vector<gough_ins> *out) const {
|
||||
|
||||
for (const u32 &input_slot : input_slots) {
|
||||
if (first) {
|
||||
out->push_back(make_gough_ins(GOUGH_INS_MOV, slot, input_slot));
|
||||
out->emplace_back(make_gough_ins(GOUGH_INS_MOV, slot, input_slot));
|
||||
first = false;
|
||||
} else {
|
||||
out->push_back(make_gough_ins(GOUGH_INS_MIN, slot, input_slot));
|
||||
out->emplace_back(make_gough_ins(GOUGH_INS_MIN, slot, input_slot));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -842,7 +842,7 @@ void add_simple_joins(edge_join_info &eji, vector<gough_ins> *out) {
|
||||
/* value of destination slot is not used by any remaining joins;
|
||||
* we can output this join immediately */
|
||||
DEBUG_PRINTF("out %u<-%u\n", dest, src);
|
||||
out->push_back(make_gough_ins(GOUGH_INS_MOV, dest, src));
|
||||
out->emplace_back(make_gough_ins(GOUGH_INS_MOV, dest, src));
|
||||
|
||||
eji.erase(src, dest);
|
||||
|
||||
@ -877,14 +877,14 @@ void add_joins_to_block(edge_join_info &eji, vector<gough_ins> *out,
|
||||
/* stash the initial value of the split register in a temp register */
|
||||
u32 temp = base_temp_slot++;
|
||||
DEBUG_PRINTF("out %u<-%u\n", temp, split);
|
||||
out->push_back(make_gough_ins(GOUGH_INS_MOV, temp, split));
|
||||
out->emplace_back(make_gough_ins(GOUGH_INS_MOV, temp, split));
|
||||
eji.remap_src(split, temp); /* update maps */
|
||||
|
||||
/* split can now be safely written out to as all the uses of it as an
|
||||
* input now refer to temp instead */
|
||||
|
||||
DEBUG_PRINTF("out %u<-%u\n", split, input_for_split);
|
||||
out->push_back(make_gough_ins(GOUGH_INS_MOV, split, input_for_split));
|
||||
out->emplace_back(make_gough_ins(GOUGH_INS_MOV, split, input_for_split));
|
||||
eji.erase(input_for_split, split);
|
||||
|
||||
/* handle any uncovered simple cases */
|
||||
@ -931,7 +931,7 @@ void build_blocks(const GoughGraph &g,
|
||||
|
||||
for (vector<gough_ins> &ins_list : *blocks | map_values) {
|
||||
assert(!ins_list.empty());
|
||||
ins_list.push_back(make_gough_ins(GOUGH_INS_END));
|
||||
ins_list.emplace_back(make_gough_ins(GOUGH_INS_END));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1252,39 +1252,39 @@ unique_ptr<raw_report_info> gough_build_strat::gatherReports(
|
||||
|
||||
DEBUG_PRINTF("i = %zu [%zu]\n", reports.size(), gg[v].reports.size());
|
||||
if (v == GoughGraph::null_vertex() || gg[v].reports.empty()) {
|
||||
reports.push_back(MO_INVALID_IDX);
|
||||
reports.emplace_back(MO_INVALID_IDX);
|
||||
continue;
|
||||
}
|
||||
|
||||
raw_gough_report_list rrl(gg[v].reports, rm, remap_reports);
|
||||
DEBUG_PRINTF("non empty r %zu\n", reports.size());
|
||||
if (rev.find(rrl) != rev.end()) {
|
||||
reports.push_back(rev[rrl]);
|
||||
reports.emplace_back(rev[rrl]);
|
||||
} else {
|
||||
DEBUG_PRINTF("adding to rl\n");
|
||||
rev[rrl] = ri->size();
|
||||
reports.push_back(ri->size());
|
||||
ri->rl.push_back(rrl);
|
||||
reports.emplace_back(ri->size());
|
||||
ri->rl.emplace_back(rrl);
|
||||
}
|
||||
}
|
||||
|
||||
for (auto v : verts) {
|
||||
if (v == GoughGraph::null_vertex() || gg[v].reports_eod.empty()) {
|
||||
reports_eod.push_back(MO_INVALID_IDX);
|
||||
reports_eod.emplace_back(MO_INVALID_IDX);
|
||||
continue;
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("non empty r eod\n");
|
||||
raw_gough_report_list rrl(gg[v].reports_eod, rm, remap_reports);
|
||||
if (rev.find(rrl) != rev.end()) {
|
||||
reports_eod.push_back(rev[rrl]);
|
||||
reports_eod.emplace_back(rev[rrl]);
|
||||
continue;
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("adding to rl eod %zu\n", gg[v].reports_eod.size());
|
||||
rev[rrl] = ri->size();
|
||||
reports_eod.push_back(ri->size());
|
||||
ri->rl.push_back(rrl);
|
||||
reports_eod.emplace_back(ri->size());
|
||||
ri->rl.emplace_back(rrl);
|
||||
}
|
||||
|
||||
/* TODO: support single report in gough */
|
||||
@ -1313,7 +1313,7 @@ size_t raw_gough_report_info_impl::size() const {
|
||||
void raw_gough_report_info_impl::fillReportLists(NFA *n, size_t base_offset,
|
||||
vector<u32> &ro) const {
|
||||
for (const raw_gough_report_list &r : rl) {
|
||||
ro.push_back(base_offset);
|
||||
ro.emplace_back(base_offset);
|
||||
|
||||
gough_report_list *p = (gough_report_list *)((char *)n + base_offset);
|
||||
u32 i = 0;
|
||||
|
@ -145,7 +145,7 @@ void dump_var_mapping(const GoughGraph &g, const string &base,
|
||||
fprintf(f, "\tuses:");
|
||||
vector<u32> used_id;
|
||||
for (const GoughSSAVar *var : used) {
|
||||
used_id.push_back(var->slot);
|
||||
used_id.emplace_back(var->slot);
|
||||
}
|
||||
for (const u32 &id : used_id) {
|
||||
fprintf(f, " %u", id);
|
||||
@ -167,7 +167,7 @@ void dump_var_mapping(const GoughGraph &g, const string &base,
|
||||
fprintf(f, "\tuses:");
|
||||
vector<u32> used_id;
|
||||
for (const GoughSSAVar *var : used) {
|
||||
used_id.push_back(var->slot);
|
||||
used_id.emplace_back(var->slot);
|
||||
}
|
||||
for (const u32 &id : used_id) {
|
||||
fprintf(f, " %u", id);
|
||||
@ -194,7 +194,7 @@ void gather_vars(const GoughGraph &g, vector<const GoughSSAVar *> *vars,
|
||||
const GoughSSAVar *vp = g[v].vars[i].get();
|
||||
stringstream ss;
|
||||
ss << dump_name(g[v]) << "_" << i;
|
||||
vars->push_back(vp);
|
||||
vars->emplace_back(vp);
|
||||
names->insert(make_pair(vp, ss.str()));
|
||||
src_label->insert(make_pair(vp, dump_name(g[v])));
|
||||
}
|
||||
@ -205,7 +205,7 @@ void gather_vars(const GoughGraph &g, vector<const GoughSSAVar *> *vars,
|
||||
const GoughSSAVar *vp = g[e].vars[i].get();
|
||||
stringstream ss;
|
||||
ss << dump_name(g, e) << "_" << i;
|
||||
vars->push_back(vp);
|
||||
vars->emplace_back(vp);
|
||||
names->insert(make_pair(vp, ss.str()));
|
||||
src_label->insert(make_pair(vp, dump_name(g, e)));
|
||||
}
|
||||
|
@ -49,19 +49,19 @@ using boost::adaptors::map_values;
|
||||
namespace ue2 {
|
||||
|
||||
template<typename VarP, typename VarQ>
|
||||
void push_back_all_raw(vector<VarP> *out, const vector<VarQ> &in) {
|
||||
void emplace_back_all_raw(vector<VarP> *out, const vector<VarQ> &in) {
|
||||
for (const auto &var : in) {
|
||||
out->push_back(var.get());
|
||||
out->emplace_back(var.get());
|
||||
}
|
||||
}
|
||||
|
||||
static
|
||||
void all_vars(const GoughGraph &g, vector<GoughSSAVar *> *out) {
|
||||
for (auto v : vertices_range(g)) {
|
||||
push_back_all_raw(out, g[v].vars);
|
||||
emplace_back_all_raw(out, g[v].vars);
|
||||
}
|
||||
for (const auto &e : edges_range(g)) {
|
||||
push_back_all_raw(out, g[e].vars);
|
||||
emplace_back_all_raw(out, g[e].vars);
|
||||
}
|
||||
}
|
||||
|
||||
@ -380,7 +380,7 @@ template<typename VarP>
|
||||
void add_to_dom_ordering(const vector<VarP> &vars,
|
||||
vector<GoughSSAVar *> *out) {
|
||||
for (const auto &var : vars) {
|
||||
out->push_back(var.get());
|
||||
out->emplace_back(var.get());
|
||||
}
|
||||
}
|
||||
|
||||
@ -389,7 +389,7 @@ class FinishVisitor : public boost::default_dfs_visitor {
|
||||
public:
|
||||
explicit FinishVisitor(vector<GoughVertex> *o) : out(o) {}
|
||||
void finish_vertex(const GoughVertex v, const GoughGraph &) {
|
||||
out->push_back(v);
|
||||
out->emplace_back(v);
|
||||
}
|
||||
vector<GoughVertex> *out;
|
||||
};
|
||||
|
@ -331,7 +331,7 @@ void buildReachMapping(const build_info &args, vector<NFAStateSet> &reach,
|
||||
verts.reserve(args.num_states);
|
||||
for (auto v : vertices_range(h)) {
|
||||
if (state_ids.at(v) != NO_STATE) {
|
||||
verts.push_back(v);
|
||||
verts.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -362,7 +362,7 @@ void buildReachMapping(const build_info &args, vector<NFAStateSet> &reach,
|
||||
u8 num = 0;
|
||||
for (auto mi = mapping.begin(), me = mapping.end(); mi != me; ++mi, ++num) {
|
||||
// Reach entry.
|
||||
reach.push_back(mi->first);
|
||||
reach.emplace_back(mi->first);
|
||||
|
||||
// Character mapping.
|
||||
const CharReach &cr = mi->second;
|
||||
@ -427,7 +427,7 @@ void gatherAccelStates(const build_info &bi, vector<AccelBuild> &accelStates) {
|
||||
DEBUG_PRINTF("state %u is accelerable\n", bi.state_ids.at(v));
|
||||
AccelBuild a;
|
||||
findStopLiterals(bi, v, a);
|
||||
accelStates.push_back(a);
|
||||
accelStates.emplace_back(a);
|
||||
}
|
||||
|
||||
// AccelStates should be sorted by state number, so that we build our accel
|
||||
@ -548,7 +548,7 @@ void filterAccelStates(NGHolder &g, const map<u32, set<NFAVertex>> &tops,
|
||||
for (const auto &vv : tops | map_values) {
|
||||
for (NFAVertex v : vv) {
|
||||
if (!edge(g.start, v, g).second) {
|
||||
tempEdges.push_back(add_edge(g.start, v, g).first);
|
||||
tempEdges.emplace_back(add_edge(g.start, v, g).first);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -556,7 +556,7 @@ void filterAccelStates(NGHolder &g, const map<u32, set<NFAVertex>> &tops,
|
||||
// Similarly, connect (start, startDs) if necessary.
|
||||
if (!edge(g.start, g.startDs, g).second) {
|
||||
NFAEdge e = add_edge(g.start, g.startDs, g);
|
||||
tempEdges.push_back(e); // Remove edge later.
|
||||
tempEdges.emplace_back(e); // Remove edge later.
|
||||
}
|
||||
|
||||
unordered_map<NFAVertex, AccelScheme> out;
|
||||
@ -623,7 +623,7 @@ void fillAccelInfo(build_info &bi) {
|
||||
|
||||
vector<NFAVertex> astates;
|
||||
for (const auto &m : accel_map) {
|
||||
astates.push_back(m.first);
|
||||
astates.emplace_back(m.first);
|
||||
}
|
||||
|
||||
NFAStateSet useful(num_states);
|
||||
@ -644,7 +644,7 @@ void fillAccelInfo(build_info &bi) {
|
||||
for (u32 j = 0, j_end = astates.size(); j < j_end; j++) {
|
||||
if (i & (1U << j)) {
|
||||
NFAVertex v = astates[j];
|
||||
states.push_back(v);
|
||||
states.emplace_back(v);
|
||||
state_set.set(state_ids.at(v));
|
||||
}
|
||||
}
|
||||
@ -886,12 +886,12 @@ void buildAccel(const build_info &args, NFAStateSet &accelMask,
|
||||
// bits in accelStates.
|
||||
vector<AccelBuild> accelOuts(accelCount);
|
||||
vector<u32> effective_accel_set;
|
||||
effective_accel_set.push_back(0); /* empty is effectively empty */
|
||||
effective_accel_set.emplace_back(0); /* empty is effectively empty */
|
||||
|
||||
for (u32 i = 1; i < accelCount; i++) {
|
||||
u32 effective_i = getEffectiveAccelStates(args, dom_map, i,
|
||||
accelStates);
|
||||
effective_accel_set.push_back(effective_i);
|
||||
effective_accel_set.emplace_back(effective_i);
|
||||
|
||||
if (effective_i == IMPOSSIBLE_ACCEL_MASK) {
|
||||
DEBUG_PRINTF("this combination of accel states is not possible\n");
|
||||
@ -913,7 +913,7 @@ void buildAccel(const build_info &args, NFAStateSet &accelMask,
|
||||
// an index.
|
||||
|
||||
// Start with the NONE case.
|
||||
auxvec.push_back(AccelAux());
|
||||
auxvec.emplace_back(AccelAux());
|
||||
memset(&auxvec[0], 0, sizeof(AccelAux));
|
||||
auxvec[0].accel_type = ACCEL_NONE; // no states on.
|
||||
|
||||
@ -949,7 +949,7 @@ void buildAccel(const build_info &args, NFAStateSet &accelMask,
|
||||
auto it = find_if(auxvec.begin(), auxvec.end(), AccelAuxCmp(aux));
|
||||
if (it == auxvec.end()) {
|
||||
accelTable[i] = verify_u8(auxvec.size());
|
||||
auxvec.push_back(aux);
|
||||
auxvec.emplace_back(aux);
|
||||
} else {
|
||||
accelTable[i] = verify_u8(it - auxvec.begin());
|
||||
}
|
||||
@ -995,7 +995,7 @@ u32 addSquashMask(const build_info &args, const NFAVertex &v,
|
||||
return verify_u32(std::distance(squash.begin(), it));
|
||||
}
|
||||
u32 idx = verify_u32(squash.size());
|
||||
squash.push_back(sit->second);
|
||||
squash.emplace_back(sit->second);
|
||||
return idx;
|
||||
}
|
||||
|
||||
@ -1007,7 +1007,7 @@ u32 addReports(const flat_set<ReportID> &r, vector<ReportID> &reports,
|
||||
assert(!r.empty());
|
||||
|
||||
vector<ReportID> my_reports(begin(r), end(r));
|
||||
my_reports.push_back(MO_INVALID_IDX); // sentinel
|
||||
my_reports.emplace_back(MO_INVALID_IDX); // sentinel
|
||||
|
||||
auto cache_it = reports_cache.find(my_reports);
|
||||
if (cache_it != end(reports_cache)) {
|
||||
@ -1064,7 +1064,7 @@ void buildAcceptsList(const build_info &args, ReportListCache &reports_cache,
|
||||
a.reports = addReports(h[v].reports, reports, reports_cache);
|
||||
}
|
||||
a.squash = addSquashMask(args, v, squash);
|
||||
accepts.push_back(move(a));
|
||||
accepts.emplace_back(move(a));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1089,11 +1089,11 @@ void buildAccepts(const build_info &args, ReportListCache &reports_cache,
|
||||
|
||||
if (edge(v, h.accept, h).second) {
|
||||
acceptMask.set(state_id);
|
||||
verts_accept.push_back(v);
|
||||
verts_accept.emplace_back(v);
|
||||
} else {
|
||||
assert(edge(v, h.acceptEod, h).second);
|
||||
acceptEodMask.set(state_id);
|
||||
verts_accept_eod.push_back(v);
|
||||
verts_accept_eod.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1510,7 +1510,7 @@ u32 buildExceptionMap(const build_info &args, ReportListCache &reports_cache,
|
||||
// of states.
|
||||
assert(e.succ_states.size() == num_states);
|
||||
assert(e.squash_states.size() == num_states);
|
||||
exceptionMap[e].push_back(i);
|
||||
exceptionMap[e].emplace_back(i);
|
||||
exceptionCount++;
|
||||
}
|
||||
}
|
||||
@ -2513,7 +2513,7 @@ bool isFast(const build_info &args) {
|
||||
unordered_set<NFAVertex> visited;
|
||||
for (const auto &m : args.tops) {
|
||||
for (NFAVertex v : m.second) {
|
||||
cur.push_back(v);
|
||||
cur.emplace_back(v);
|
||||
visited.insert(v);
|
||||
}
|
||||
}
|
||||
@ -2537,7 +2537,7 @@ bool isFast(const build_info &args) {
|
||||
continue;
|
||||
}
|
||||
if (!contains(visited, w)) {
|
||||
next.push_back(w);
|
||||
next.emplace_back(w);
|
||||
visited.insert(w);
|
||||
}
|
||||
}
|
||||
|
@ -354,7 +354,7 @@ static
|
||||
void setupReach(const u8 *reachMap, const u8 *reachBase, u32 size,
|
||||
u32 state_count, vector<CharReach> *perStateReach) {
|
||||
for (u32 i = 0; i < state_count; i++) {
|
||||
perStateReach->push_back(CharReach());
|
||||
perStateReach->emplace_back(CharReach());
|
||||
for (u32 j = 0; j < N_CHARS; j++) {
|
||||
u8 k = reachMap[j];
|
||||
const u8 *r = reachBase + k * (size/8);
|
||||
|
@ -162,7 +162,7 @@ DfaPrevInfo::DfaPrevInfo(raw_dfa &rdfa)
|
||||
for (size_t i = 0; i < states.size(); i++) {
|
||||
for (symbol_t sym = 0; sym < impl_alpha_size; sym++) {
|
||||
dstate_id_t curr = rdfa.states[i].next[sym];
|
||||
states[curr].prev_vec[sym].push_back(i);
|
||||
states[curr].prev_vec[sym].emplace_back(i);
|
||||
}
|
||||
if (!rdfa.states[i].reports.empty()
|
||||
|| !rdfa.states[i].reports_eod.empty()) {
|
||||
@ -398,7 +398,7 @@ unique_ptr<raw_report_info> mcclellan_build_strat::gatherReports(
|
||||
|
||||
for (const dstate &s : rdfa.states) {
|
||||
if (s.reports.empty()) {
|
||||
reports.push_back(MO_INVALID_IDX);
|
||||
reports.emplace_back(MO_INVALID_IDX);
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -406,18 +406,18 @@ unique_ptr<raw_report_info> mcclellan_build_strat::gatherReports(
|
||||
DEBUG_PRINTF("non empty r\n");
|
||||
auto it = rev.find(rrl);
|
||||
if (it != rev.end()) {
|
||||
reports.push_back(it->second);
|
||||
reports.emplace_back(it->second);
|
||||
} else {
|
||||
DEBUG_PRINTF("adding to rl %zu\n", ri->size());
|
||||
rev.emplace(rrl, ri->size());
|
||||
reports.push_back(ri->size());
|
||||
ri->rl.push_back(rrl);
|
||||
reports.emplace_back(ri->size());
|
||||
ri->rl.emplace_back(rrl);
|
||||
}
|
||||
}
|
||||
|
||||
for (const dstate &s : rdfa.states) {
|
||||
if (s.reports_eod.empty()) {
|
||||
reports_eod.push_back(MO_INVALID_IDX);
|
||||
reports_eod.emplace_back(MO_INVALID_IDX);
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -425,14 +425,14 @@ unique_ptr<raw_report_info> mcclellan_build_strat::gatherReports(
|
||||
raw_report_list rrl(s.reports_eod, rm, remap_reports);
|
||||
auto it = rev.find(rrl);
|
||||
if (it != rev.end()) {
|
||||
reports_eod.push_back(it->second);
|
||||
reports_eod.emplace_back(it->second);
|
||||
continue;
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("adding to rl eod %zu\n", s.reports_eod.size());
|
||||
rev.emplace(rrl, ri->size());
|
||||
reports_eod.push_back(ri->size());
|
||||
ri->rl.push_back(rrl);
|
||||
reports_eod.emplace_back(ri->size());
|
||||
ri->rl.emplace_back(rrl);
|
||||
}
|
||||
|
||||
assert(!ri->rl.empty()); /* all components should be able to generate
|
||||
@ -484,7 +484,7 @@ size_t raw_report_info_impl::size() const {
|
||||
void raw_report_info_impl::fillReportLists(NFA *n, size_t base_offset,
|
||||
vector<u32> &ro) const {
|
||||
for (const auto &reps : rl) {
|
||||
ro.push_back(base_offset);
|
||||
ro.emplace_back(base_offset);
|
||||
|
||||
report_list *p = (report_list *)((char *)n + base_offset);
|
||||
|
||||
@ -569,13 +569,13 @@ bool allocateFSN16(dfa_info &info, dstate_id_t *sherman_base,
|
||||
|
||||
for (u32 i = 1; i < info.size(); i++) {
|
||||
if (info.is_widehead(i)) {
|
||||
wideHead.push_back(i);
|
||||
wideHead.emplace_back(i);
|
||||
} else if (info.is_widestate(i)) {
|
||||
wideState.push_back(i);
|
||||
wideState.emplace_back(i);
|
||||
} else if (info.is_sherman(i)) {
|
||||
sherm.push_back(i);
|
||||
sherm.emplace_back(i);
|
||||
} else {
|
||||
norm.push_back(i);
|
||||
norm.emplace_back(i);
|
||||
}
|
||||
}
|
||||
|
||||
@ -893,11 +893,11 @@ void allocateFSN8(dfa_info &info,
|
||||
|
||||
for (u32 i = 1; i < info.size(); i++) {
|
||||
if (!info.states[i].reports.empty()) {
|
||||
accept.push_back(i);
|
||||
accept.emplace_back(i);
|
||||
} else if (contains(accel_escape_info, i)) {
|
||||
accel.push_back(i);
|
||||
accel.emplace_back(i);
|
||||
} else {
|
||||
norm.push_back(i);
|
||||
norm.emplace_back(i);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1248,7 +1248,7 @@ dstate_id_t find_chain_candidate(const raw_dfa &rdfa, const DfaPrevInfo &info,
|
||||
const symbol_t curr_sym,
|
||||
vector<dstate_id_t> &temp_chain) {
|
||||
//Record current id first.
|
||||
temp_chain.push_back(curr_id);
|
||||
temp_chain.emplace_back(curr_id);
|
||||
|
||||
const u16 size = info.impl_alpha_size;
|
||||
|
||||
@ -1311,7 +1311,7 @@ bool store_chain_longest(vector<vector<dstate_id_t>> &candidate_chain,
|
||||
DEBUG_PRINTF("This is a new chain!\n");
|
||||
|
||||
// Add this new chain and get it marked.
|
||||
candidate_chain.push_back(temp_chain);
|
||||
candidate_chain.emplace_back(temp_chain);
|
||||
|
||||
for (auto &id : temp_chain) {
|
||||
DEBUG_PRINTF("(Marking s%u ...)\n", id);
|
||||
@ -1385,18 +1385,18 @@ void generate_symbol_chain(dfa_info &info, vector<symbol_t> &chain_tail) {
|
||||
|
||||
// The tail symbol comes from vector chain_tail;
|
||||
if (j == width - 1) {
|
||||
symbol_chain.push_back(chain_tail[i]);
|
||||
symbol_chain.emplace_back(chain_tail[i]);
|
||||
} else {
|
||||
for (symbol_t sym = 0; sym < info.impl_alpha_size; sym++) {
|
||||
if (rdfa.states[curr_id].next[sym] == next_id) {
|
||||
symbol_chain.push_back(sym);
|
||||
symbol_chain.emplace_back(sym);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info.wide_symbol_chain.push_back(symbol_chain);
|
||||
info.wide_symbol_chain.emplace_back(symbol_chain);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1445,12 +1445,12 @@ void find_wide_state(dfa_info &info) {
|
||||
}
|
||||
|
||||
reverse(temp_chain.begin(), temp_chain.end());
|
||||
temp_chain.push_back(curr_id);
|
||||
temp_chain.emplace_back(curr_id);
|
||||
|
||||
assert(head > 0 && head == temp_chain.front());
|
||||
if (store_chain_longest(info.wide_state_chain, temp_chain,
|
||||
added, head_is_new)) {
|
||||
chain_tail.push_back(sym);
|
||||
chain_tail.emplace_back(sym);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -150,7 +150,7 @@ u32 calc_min_dist_from_bob(raw_dfa &raw, vector<u32> *dist_in) {
|
||||
continue;
|
||||
}
|
||||
if (dist[t] == ~0U) {
|
||||
to_visit.push_back(t);
|
||||
to_visit.emplace_back(t);
|
||||
dist[t] = d + 1;
|
||||
} else {
|
||||
assert(dist[t] <= d + 1);
|
||||
|
@ -390,15 +390,15 @@ bool allocateImplId16(dfa_info &info, dstate_id_t sheng_end,
|
||||
continue; /* sheng impl ids have already been allocated */
|
||||
} if (info.is_sherman(i)) {
|
||||
if (info.is_sheng_succ(i)) {
|
||||
sherm_sheng_succ.push_back(i);
|
||||
sherm_sheng_succ.emplace_back(i);
|
||||
} else {
|
||||
sherm.push_back(i);
|
||||
sherm.emplace_back(i);
|
||||
}
|
||||
} else {
|
||||
if (info.is_sheng_succ(i)) {
|
||||
norm_sheng_succ.push_back(i);
|
||||
norm_sheng_succ.emplace_back(i);
|
||||
} else {
|
||||
norm.push_back(i);
|
||||
norm.emplace_back(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -589,7 +589,7 @@ dstate_id_t find_sheng_states(dfa_info &info,
|
||||
sheng_states.insert(v);
|
||||
for (const auto &t : adjacent_vertices_range(v, g)) {
|
||||
if (!contains(considered, g[t].index)) {
|
||||
to_consider.push_back(t);
|
||||
to_consider.emplace_back(t);
|
||||
}
|
||||
if (t == base_cyclic) {
|
||||
seen_back_edge = true;
|
||||
@ -1279,11 +1279,11 @@ void allocateImplId8(dfa_info &info, dstate_id_t sheng_end,
|
||||
if (info.is_sheng(i)) {
|
||||
continue; /* already allocated */
|
||||
} else if (!info.states[i].reports.empty()) {
|
||||
accept.push_back(i);
|
||||
accept.emplace_back(i);
|
||||
} else if (contains(accel_escape_info, i)) {
|
||||
accel.push_back(i);
|
||||
accel.emplace_back(i);
|
||||
} else {
|
||||
norm.push_back(i);
|
||||
norm.emplace_back(i);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -140,12 +140,12 @@ void populateClusters(const vector<raw_puff> &puffs_in,
|
||||
|
||||
u32 e = MQE_TOP_FIRST;
|
||||
for (const auto &puff : triggered_puffs) {
|
||||
puff_clusters[ClusterKey(e, puff)].push_back(puff);
|
||||
puff_clusters[ClusterKey(e, puff)].emplace_back(puff);
|
||||
e++;
|
||||
}
|
||||
|
||||
for (const auto &puff : puffs_in) {
|
||||
puff_clusters[ClusterKey(puff)].push_back(puff);
|
||||
puff_clusters[ClusterKey(puff)].emplace_back(puff);
|
||||
}
|
||||
|
||||
|
||||
@ -264,7 +264,7 @@ void fillCounterInfos(vector<mpv_counter_info> *out, u32 *curr_decomp_offset,
|
||||
assert(it->first.trigger_event
|
||||
== MQE_TOP_FIRST + distance(kilopuffs.begin(), it));
|
||||
|
||||
out->push_back(mpv_counter_info());
|
||||
out->emplace_back(mpv_counter_info());
|
||||
map<ClusterKey, vector<raw_puff>>::const_iterator it_o = it;
|
||||
++it;
|
||||
fillCounterInfo(&out->back(), curr_decomp_offset, curr_comp_offset,
|
||||
@ -282,14 +282,14 @@ void fillCounterInfos(vector<mpv_counter_info> *out, u32 *curr_decomp_offset,
|
||||
++it;
|
||||
}
|
||||
if (it != trig_ite) {
|
||||
out->push_back(mpv_counter_info());
|
||||
out->emplace_back(mpv_counter_info());
|
||||
fillCounterInfo(&out->back(), curr_decomp_offset, curr_comp_offset,
|
||||
kilopuffs, kilopuffs.begin(), it);
|
||||
}
|
||||
while (it != kilopuffs.end() && it->first.auto_restart) {
|
||||
assert(it->first.trigger_event == MQE_INVALID);
|
||||
|
||||
out->push_back(mpv_counter_info());
|
||||
out->emplace_back(mpv_counter_info());
|
||||
map<ClusterKey, vector<raw_puff>>::const_iterator it_o = it;
|
||||
++it;
|
||||
fillCounterInfo(&out->back(), curr_decomp_offset, curr_comp_offset,
|
||||
|
@ -44,7 +44,7 @@ RdfaGraph::RdfaGraph(const raw_dfa &rdfa) {
|
||||
vector<RdfaGraph::vertex_descriptor> verts;
|
||||
verts.reserve(rdfa.states.size());
|
||||
for (dstate_id_t i = 0; i < rdfa.states.size(); i++) {
|
||||
verts.push_back(add_vertex(g));
|
||||
verts.emplace_back(add_vertex(g));
|
||||
assert(g[verts.back()].index == i);
|
||||
}
|
||||
|
||||
|
@ -132,7 +132,7 @@ public:
|
||||
|
||||
if (t.any() && t != esets[i]) {
|
||||
esets[i] &= ~t;
|
||||
esets.push_back(t);
|
||||
esets.emplace_back(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -204,7 +204,7 @@ public:
|
||||
const vector<StateSet> initial() {
|
||||
vector<StateSet> rv = {as};
|
||||
if (start_floating != DEAD_STATE && start_floating != start_anchored) {
|
||||
rv.push_back(fs);
|
||||
rv.emplace_back(fs);
|
||||
}
|
||||
return rv;
|
||||
}
|
||||
@ -342,17 +342,17 @@ void mergeDfas(vector<unique_ptr<raw_dfa>> &dfas, size_t max_states,
|
||||
// Put the larger of the two DFAs on the output list, retain the
|
||||
// smaller one on the queue for further merge attempts.
|
||||
if (d2->states.size() > d1->states.size()) {
|
||||
dfas.push_back(move(d2));
|
||||
dfas.emplace_back(move(d2));
|
||||
q.push(move(d1));
|
||||
} else {
|
||||
dfas.push_back(move(d1));
|
||||
dfas.emplace_back(move(d1));
|
||||
q.push(move(d2));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while (!q.empty()) {
|
||||
dfas.push_back(move(q.front()));
|
||||
dfas.emplace_back(move(q.front()));
|
||||
q.pop();
|
||||
}
|
||||
|
||||
|
@ -80,10 +80,10 @@ u32 repeatRecurTable(struct RepeatStateInfo *info, const depth &repeatMax,
|
||||
u32 repeatTmp = info->patchCount > 2 ? 64 : (u32)repeatMax;
|
||||
u32 repeat_index = repeatTmp < minPeriod ? repeatTmp : minPeriod;
|
||||
for (u32 i = 0; i <= repeat_index; i++) {
|
||||
info->table.push_back(i + 1);
|
||||
info->table.emplace_back(i + 1);
|
||||
}
|
||||
for (u32 i = minPeriod + 1; i <= repeatTmp; i++) {
|
||||
info->table.push_back(info->table[i - 1] + info->table[i - minPeriod]);
|
||||
info->table.emplace_back(info->table[i - 1] + info->table[i - minPeriod]);
|
||||
if (info->table[i] < info->table[i - 1]) {
|
||||
return i - 1;
|
||||
}
|
||||
@ -341,7 +341,7 @@ vector<size_t> minResetDistToEnd(const vector<vector<CharReach>> &triggers,
|
||||
break;
|
||||
}
|
||||
}
|
||||
out.push_back(i);
|
||||
out.emplace_back(i);
|
||||
}
|
||||
|
||||
return out;
|
||||
|
@ -179,7 +179,7 @@ size_t raw_report_info_impl::size() const {
|
||||
void raw_report_info_impl::fillReportLists(NFA *n, size_t base_offset,
|
||||
vector<u32> &ro) const {
|
||||
for (const auto &reps : rl) {
|
||||
ro.push_back(base_offset);
|
||||
ro.emplace_back(base_offset);
|
||||
|
||||
report_list *p = (report_list *)((char *)n + base_offset);
|
||||
|
||||
@ -208,39 +208,39 @@ unique_ptr<raw_report_info> sheng_build_strat::gatherReports(
|
||||
|
||||
for (const dstate &s : rdfa.states) {
|
||||
if (s.reports.empty()) {
|
||||
reports.push_back(MO_INVALID_IDX);
|
||||
reports.emplace_back(MO_INVALID_IDX);
|
||||
continue;
|
||||
}
|
||||
|
||||
raw_report_list rrl(s.reports, rm, remap_reports);
|
||||
DEBUG_PRINTF("non empty r\n");
|
||||
if (rev.find(rrl) != rev.end()) {
|
||||
reports.push_back(rev[rrl]);
|
||||
reports.emplace_back(rev[rrl]);
|
||||
} else {
|
||||
DEBUG_PRINTF("adding to rl %zu\n", ri->size());
|
||||
rev[rrl] = ri->size();
|
||||
reports.push_back(ri->size());
|
||||
ri->rl.push_back(rrl);
|
||||
reports.emplace_back(ri->size());
|
||||
ri->rl.emplace_back(rrl);
|
||||
}
|
||||
}
|
||||
|
||||
for (const dstate &s : rdfa.states) {
|
||||
if (s.reports_eod.empty()) {
|
||||
reports_eod.push_back(MO_INVALID_IDX);
|
||||
reports_eod.emplace_back(MO_INVALID_IDX);
|
||||
continue;
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("non empty r eod\n");
|
||||
raw_report_list rrl(s.reports_eod, rm, remap_reports);
|
||||
if (rev.find(rrl) != rev.end()) {
|
||||
reports_eod.push_back(rev[rrl]);
|
||||
reports_eod.emplace_back(rev[rrl]);
|
||||
continue;
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("adding to rl eod %zu\n", s.reports_eod.size());
|
||||
rev[rrl] = ri->size();
|
||||
reports_eod.push_back(ri->size());
|
||||
ri->rl.push_back(rrl);
|
||||
reports_eod.emplace_back(ri->size());
|
||||
ri->rl.emplace_back(rrl);
|
||||
}
|
||||
|
||||
assert(!ri->rl.empty()); /* all components should be able to generate
|
||||
|
@ -182,7 +182,7 @@ bool shuftiBuildDoubleMasks(const CharReach &onechar,
|
||||
}
|
||||
nibble_masks.clear();
|
||||
for (const auto &e : new_masks) {
|
||||
nibble_masks.push_back(e.second);
|
||||
nibble_masks.emplace_back(e.second);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -54,7 +54,7 @@ void remapTops(const TamaInfo &tamaInfo,
|
||||
u32 cur = 0;
|
||||
for (const auto &sub : tamaInfo.subengines) {
|
||||
u32 base = cur;
|
||||
top_base.push_back(base + MQE_TOP_FIRST);
|
||||
top_base.emplace_back(base + MQE_TOP_FIRST);
|
||||
DEBUG_PRINTF("subengine:%u\n", i);
|
||||
for (const auto &t : tamaInfo.tops[i++]) {
|
||||
cur = base + t;
|
||||
@ -163,8 +163,8 @@ set<ReportID> all_reports(const TamaProto &proto) {
|
||||
|
||||
void TamaInfo::add(NFA *sub, const set<u32> &top) {
|
||||
assert(subengines.size() < max_occupancy);
|
||||
subengines.push_back(sub);
|
||||
tops.push_back(top);
|
||||
subengines.emplace_back(sub);
|
||||
tops.emplace_back(top);
|
||||
}
|
||||
|
||||
void TamaProto::add(const NFA *n, const u32 id, const u32 top,
|
||||
|
@ -94,7 +94,7 @@ vector<NFAEdge> getAsserts(const NGHolder &g) {
|
||||
vector<NFAEdge> out;
|
||||
for (const auto &e : edges_range(g)) {
|
||||
if (g[e].assert_flags) {
|
||||
out.push_back(e);
|
||||
out.emplace_back(e);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
|
@ -213,7 +213,7 @@ vector<NFAEdge> findShellEdges(const NGHolder &g,
|
||||
(is_special(v, g) || contains(tail_shell, v))) {
|
||||
DEBUG_PRINTF("edge (%zu,%zu) is a shell edge\n", g[u].index,
|
||||
g[v].index);
|
||||
shell_edges.push_back(e);
|
||||
shell_edges.emplace_back(e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -291,7 +291,7 @@ void splitIntoComponents(unique_ptr<NGHolder> g,
|
||||
if (head_shell.size() + tail_shell.size() + N_SPECIALS >=
|
||||
num_vertices(*g)) {
|
||||
DEBUG_PRINTF("all in shell component\n");
|
||||
comps.push_back(std::move(g));
|
||||
comps.emplace_back(std::move(g));
|
||||
*shell_comp = true;
|
||||
return;
|
||||
}
|
||||
@ -306,7 +306,7 @@ void splitIntoComponents(unique_ptr<NGHolder> g,
|
||||
// into the tail shell, we aren't going to find more than one component.
|
||||
if (shell_edges.empty() && shellHasOnePath(*g, head_shell, tail_shell)) {
|
||||
DEBUG_PRINTF("single component\n");
|
||||
comps.push_back(std::move(g));
|
||||
comps.emplace_back(std::move(g));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -329,7 +329,7 @@ void splitIntoComponents(unique_ptr<NGHolder> g,
|
||||
assert(num > 0);
|
||||
if (num == 1 && shell_edges.empty()) {
|
||||
DEBUG_PRINTF("single component\n");
|
||||
comps.push_back(std::move(g));
|
||||
comps.emplace_back(std::move(g));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -341,7 +341,7 @@ void splitIntoComponents(unique_ptr<NGHolder> g,
|
||||
for (const auto &m : split_components) {
|
||||
NFAVertex v = m.first;
|
||||
u32 c = m.second;
|
||||
verts[c].push_back(v);
|
||||
verts[c].emplace_back(v);
|
||||
DEBUG_PRINTF("vertex %zu is in comp %u\n", (*g)[v].index, c);
|
||||
}
|
||||
|
||||
@ -370,7 +370,7 @@ void splitIntoComponents(unique_ptr<NGHolder> g,
|
||||
pruneUseless(*gc);
|
||||
DEBUG_PRINTF("component %zu has %zu vertices\n", comps.size(),
|
||||
num_vertices(*gc));
|
||||
comps.push_back(move(gc));
|
||||
comps.emplace_back(move(gc));
|
||||
}
|
||||
|
||||
// Another component to handle the direct shell-to-shell edges.
|
||||
@ -386,7 +386,7 @@ void splitIntoComponents(unique_ptr<NGHolder> g,
|
||||
pruneUseless(*gc);
|
||||
DEBUG_PRINTF("shell edge component %zu has %zu vertices\n",
|
||||
comps.size(), num_vertices(*gc));
|
||||
comps.push_back(move(gc));
|
||||
comps.emplace_back(move(gc));
|
||||
*shell_comp = true;
|
||||
}
|
||||
|
||||
@ -410,7 +410,7 @@ deque<unique_ptr<NGHolder>> calcComponents(unique_ptr<NGHolder> g,
|
||||
// For trivial cases, we needn't bother running the full
|
||||
// connected_components algorithm.
|
||||
if (!grey.calcComponents || isAlternationOfClasses(*g)) {
|
||||
comps.push_back(std::move(g));
|
||||
comps.emplace_back(std::move(g));
|
||||
return comps;
|
||||
}
|
||||
|
||||
@ -444,7 +444,7 @@ void recalcComponents(deque<unique_ptr<NGHolder>> &comps, const Grey &grey) {
|
||||
}
|
||||
|
||||
if (isAlternationOfClasses(*gc)) {
|
||||
out.push_back(std::move(gc));
|
||||
out.emplace_back(std::move(gc));
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -493,7 +493,7 @@ bool removeSiblingsOfStartDotStar(NGHolder &g) {
|
||||
continue;
|
||||
}
|
||||
DEBUG_PRINTF("removing %zu->%zu\n", g[u].index, g[v].index);
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -520,7 +520,7 @@ bool optimiseVirtualStarts(NGHolder &g) {
|
||||
|
||||
for (const auto &e : in_edges_range(v, g)) {
|
||||
if (!is_any_start(source(e, g), g)) {
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -148,7 +148,7 @@ public:
|
||||
// unique push
|
||||
void push(unsigned id) {
|
||||
if (ids.insert(id).second) {
|
||||
q.push_back(id);
|
||||
q.emplace_back(id);
|
||||
}
|
||||
}
|
||||
|
||||
@ -269,7 +269,7 @@ vector<unique_ptr<VertexInfo>> getVertexInfos(const NGHolder &g) {
|
||||
vertex_map.resize(num_verts);
|
||||
|
||||
for (auto v : vertices_range(g)) {
|
||||
infos.push_back(make_unique<VertexInfo>(v, g));
|
||||
infos.emplace_back(make_unique<VertexInfo>(v, g));
|
||||
vertex_map[g[v].index] = infos.back().get();
|
||||
}
|
||||
|
||||
@ -442,7 +442,7 @@ void equivalence(vector<VertexInfoSet> &classes, WorkQueue &work_queue,
|
||||
classes[cur_class].erase(vi);
|
||||
new_class_vertices.insert(vi);
|
||||
}
|
||||
classes.push_back(move(new_class_vertices));
|
||||
classes.emplace_back(move(new_class_vertices));
|
||||
|
||||
if (contains(tmi->first, cur_class)) {
|
||||
reval_queue.push(new_class);
|
||||
@ -516,7 +516,7 @@ void mergeClass(vector<unique_ptr<VertexInfo>> &infos, NGHolder &g,
|
||||
g[new_v].reports.clear(); /* populated as we pull in succs */
|
||||
|
||||
// store this vertex in our global vertex list
|
||||
infos.push_back(make_unique<VertexInfo>(new_v, g));
|
||||
infos.emplace_back(make_unique<VertexInfo>(new_v, g));
|
||||
VertexInfo *new_vertex_info = infos.back().get();
|
||||
|
||||
NFAVertex new_v_eod = NGHolder::null_vertex();
|
||||
@ -525,7 +525,7 @@ void mergeClass(vector<unique_ptr<VertexInfo>> &infos, NGHolder &g,
|
||||
if (require_separate_eod_vertex(cur_class_vertices, g)) {
|
||||
new_v_eod = clone_vertex(g, old_v);
|
||||
g[new_v_eod].reports.clear();
|
||||
infos.push_back(make_unique<VertexInfo>(new_v_eod, g));
|
||||
infos.emplace_back(make_unique<VertexInfo>(new_v_eod, g));
|
||||
new_vertex_info_eod = infos.back().get();
|
||||
}
|
||||
|
||||
|
@ -68,7 +68,7 @@ void removeLeadingVirtualVerticesFromRoot(NGHolder &g, NFAVertex root) {
|
||||
for (auto v : adjacent_vertices_range(root, g)) {
|
||||
if (g[v].assert_flags & POS_FLAG_VIRTUAL_START) {
|
||||
DEBUG_PRINTF("(?m)^ vertex or leading \\[bB] vertex\n");
|
||||
victims.push_back(v);
|
||||
victims.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -353,7 +353,7 @@ bool anchorPatternWithBoundedRepeat(NGHolder &g, ReportManager &rm) {
|
||||
if (v == g.startDs) {
|
||||
continue;
|
||||
}
|
||||
initials.push_back(v);
|
||||
initials.emplace_back(v);
|
||||
}
|
||||
if (initials.empty()) {
|
||||
DEBUG_PRINTF("no initial vertices\n");
|
||||
@ -576,13 +576,13 @@ bool transformMinLengthToRepeat(NGHolder &g, ReportManager &rm) {
|
||||
if (u == cyclic) {
|
||||
continue;
|
||||
}
|
||||
preds.push_back(u);
|
||||
preds.emplace_back(u);
|
||||
|
||||
// We want to delete the out-edges of each predecessor, but need to
|
||||
// make sure we don't delete the startDs self loop.
|
||||
for (const auto &e : out_edges_range(u, g)) {
|
||||
if (target(e, g) != g.startDs) {
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -601,7 +601,7 @@ bool transformMinLengthToRepeat(NGHolder &g, ReportManager &rm) {
|
||||
add_edge(u, v, g);
|
||||
}
|
||||
preds.clear();
|
||||
preds.push_back(v);
|
||||
preds.emplace_back(v);
|
||||
}
|
||||
assert(!preds.empty());
|
||||
for (auto u : preds) {
|
||||
@ -732,7 +732,7 @@ void pruneExtUnreachable(NGHolder &g, const ReportManager &rm) {
|
||||
for (const auto &e : edges_range(g)) {
|
||||
if (isEdgePrunable(g, report, depths, e)) {
|
||||
DEBUG_PRINTF("pruning\n");
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -775,14 +775,14 @@ void pruneVacuousEdges(NGHolder &g, const ReportManager &rm) {
|
||||
// a min_offset.
|
||||
if (u == g.start && is_any_accept(v, g) && has_min_offset(u)) {
|
||||
DEBUG_PRINTF("vacuous edge in graph with min_offset!\n");
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
continue;
|
||||
}
|
||||
|
||||
// If a min_length is set, vacuous edges can be removed.
|
||||
if (is_any_start(u, g) && is_any_accept(v, g) && has_min_length(u)) {
|
||||
DEBUG_PRINTF("vacuous edge in graph with min_length!\n");
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -825,14 +825,14 @@ void pruneUnmatchable(NGHolder &g, const vector<DepthMinMax> &depths,
|
||||
if (d.max.is_finite() && d.max < report.minLength) {
|
||||
DEBUG_PRINTF("prune, max match length %s < min_length=%llu\n",
|
||||
d.max.str().c_str(), report.minLength);
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (report.maxOffset != MAX_OFFSET && d.min > report.maxOffset) {
|
||||
DEBUG_PRINTF("prune, min match length %s > max_offset=%llu\n",
|
||||
d.min.str().c_str(), report.maxOffset);
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ bool findMask(const NGHolder &g, vector<CharReach> *mask, bool *anchored,
|
||||
return true;
|
||||
}
|
||||
|
||||
mask->push_back(g[v].char_reach);
|
||||
mask->emplace_back(g[v].char_reach);
|
||||
|
||||
if (out_degree(v, g) != 1) {
|
||||
DEBUG_PRINTF("out_degree != 1\n");
|
||||
|
@ -194,7 +194,7 @@ public:
|
||||
const vector<StateSet> initial() {
|
||||
vector<StateSet> rv = {init};
|
||||
if (start_floating != DEAD_STATE && start_floating != start_anchored) {
|
||||
rv.push_back(initDS);
|
||||
rv.emplace_back(initDS);
|
||||
}
|
||||
return rv;
|
||||
}
|
||||
@ -354,7 +354,7 @@ public:
|
||||
|
||||
if (t.any() && t != esets[i]) {
|
||||
esets[i] &= ~t;
|
||||
esets.push_back(t);
|
||||
esets.emplace_back(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -380,7 +380,7 @@ public:
|
||||
const vector<StateSet> initial() {
|
||||
vector<StateSet> rv(1, as);
|
||||
if (start_floating != DEAD_STATE && start_floating != start_anchored) {
|
||||
rv.push_back(fs);
|
||||
rv.emplace_back(fs);
|
||||
}
|
||||
return rv;
|
||||
}
|
||||
@ -454,7 +454,7 @@ void haig_do_preds(const NGHolder &g, const stateset &nfa_states,
|
||||
DEBUG_PRINTF("d vertex %zu\n", g[v].index);
|
||||
vector<u32> &out_map = preds[slot_id];
|
||||
for (auto u : inv_adjacent_vertices_range(v, g)) {
|
||||
out_map.push_back(g[u].index);
|
||||
out_map.emplace_back(g[u].index);
|
||||
}
|
||||
|
||||
sort(out_map.begin(), out_map.end());
|
||||
@ -536,7 +536,7 @@ bool doHaig(const NGHolder &g, som_type som,
|
||||
|
||||
rdfa->state_som.reserve(rdfa->states.size());
|
||||
for (u32 i = 0; i < rdfa->states.size(); i++) {
|
||||
rdfa->state_som.push_back(dstate_som());
|
||||
rdfa->state_som.emplace_back(dstate_som());
|
||||
const StateSet &source_states = nfa_state_map[i];
|
||||
if (source_states.count() > HAIG_MAX_LIVE_SOM_SLOTS) {
|
||||
DEBUG_PRINTF("too many live states\n");
|
||||
@ -632,9 +632,9 @@ void haig_merge_do_preds(const vector<const raw_som_dfa *> &dfas,
|
||||
for (vector<u32>::const_iterator jt = it->second.begin();
|
||||
jt != it->second.end(); ++jt) {
|
||||
if (*jt < N_SPECIALS || *jt == CREATE_NEW_SOM) {
|
||||
out.push_back(*jt);
|
||||
out.emplace_back(*jt);
|
||||
} else {
|
||||
out.push_back(*jt + adj);
|
||||
out.emplace_back(*jt + adj);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -741,7 +741,7 @@ unique_ptr<raw_som_dfa> attemptToMergeHaig(const vector<const raw_som_dfa *> &df
|
||||
vector<u32> per_dfa_adj;
|
||||
u32 curr_adj = 0;
|
||||
for (const auto &haig : dfas) {
|
||||
per_dfa_adj.push_back(curr_adj);
|
||||
per_dfa_adj.emplace_back(curr_adj);
|
||||
curr_adj += total_slots_used(*haig);
|
||||
if (curr_adj < per_dfa_adj.back()) {
|
||||
/* overflowed our som slot count */
|
||||
@ -751,7 +751,7 @@ unique_ptr<raw_som_dfa> attemptToMergeHaig(const vector<const raw_som_dfa *> &df
|
||||
|
||||
rdfa->state_som.reserve(rdfa->states.size());
|
||||
for (u32 i = 0; i < rdfa->states.size(); i++) {
|
||||
rdfa->state_som.push_back(dstate_som());
|
||||
rdfa->state_som.emplace_back(dstate_som());
|
||||
const vector<dstate_id_t> &source_nfa_states = nfa_state_map[i];
|
||||
DEBUG_PRINTF("finishing state %u\n", i);
|
||||
|
||||
|
@ -391,7 +391,7 @@ void reusePredsAsStarts(const NGHolder &g, const map<u32, CharReach> &top_reach,
|
||||
vector<NFAVertex> cand_starts;
|
||||
for (NFAVertex u : unhandled_succ_tops | map_keys) {
|
||||
if (hasSelfLoop(u, g)) {
|
||||
cand_starts.push_back(u);
|
||||
cand_starts.emplace_back(u);
|
||||
}
|
||||
}
|
||||
|
||||
@ -525,7 +525,7 @@ void reverseStateOrdering(unordered_map<NFAVertex, u32> &state_ids) {
|
||||
if (e.second == NO_STATE) {
|
||||
continue;
|
||||
}
|
||||
ordering.push_back(e.first);
|
||||
ordering.emplace_back(e.first);
|
||||
}
|
||||
|
||||
// Sort in reverse order by state ID.
|
||||
|
@ -148,7 +148,7 @@ void findPaths(const NGHolder &g, NFAVertex v,
|
||||
if (v == g.accept || v == g.acceptEod) {
|
||||
paths->push_back({});
|
||||
if (!generates_callbacks(g) || v == g.acceptEod) {
|
||||
paths->back().push_back(CharReach()); /* red tape options */
|
||||
paths->back().emplace_back(CharReach()); /* red tape options */
|
||||
}
|
||||
return;
|
||||
}
|
||||
@ -181,8 +181,8 @@ void findPaths(const NGHolder &g, NFAVertex v,
|
||||
} while (new_depth-- && curr.size() >= MAGIC_TOO_WIDE_NUMBER);
|
||||
|
||||
for (auto &c : curr) {
|
||||
c.push_back(cr);
|
||||
paths->push_back(std::move(c));
|
||||
c.emplace_back(cr);
|
||||
paths->emplace_back(std::move(c));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -254,7 +254,7 @@ void findBestInternal(vector<vector<CharReach>>::const_iterator pb,
|
||||
DEBUG_PRINTF("worse\n");
|
||||
continue;
|
||||
}
|
||||
priority_path.push_back(move(as));
|
||||
priority_path.emplace_back(move(as));
|
||||
}
|
||||
|
||||
sort(priority_path.begin(), priority_path.end());
|
||||
@ -422,7 +422,7 @@ void findDoubleBest(vector<vector<CharReach> >::const_iterator pb,
|
||||
DEBUG_PRINTF("worse\n");
|
||||
continue;
|
||||
}
|
||||
priority_path.push_back(move(as));
|
||||
priority_path.emplace_back(move(as));
|
||||
}
|
||||
|
||||
sort(priority_path.begin(), priority_path.end());
|
||||
|
@ -113,7 +113,7 @@ void dumpGraph(const char *filename, const LitGraph &lg) {
|
||||
fout << "[label=\"SINK\"];";
|
||||
} else {
|
||||
ue2_literal s;
|
||||
s.push_back(lg[v].c);
|
||||
s.emplace_back(lg[v].c);
|
||||
fout << "[label=\"" << dumpString(s) << "\"];";
|
||||
}
|
||||
fout << endl;
|
||||
@ -558,12 +558,12 @@ void findMinCut(LitGraph &lg, vector<LitEdge> &cutset) {
|
||||
|
||||
if (ucolor != small_color::white && vcolor == small_color::white) {
|
||||
assert(v != lg.sink);
|
||||
white_cut.push_back(e);
|
||||
white_cut.emplace_back(e);
|
||||
white_flow += lg[e].score;
|
||||
}
|
||||
if (ucolor == small_color::black && vcolor != small_color::black) {
|
||||
assert(v != lg.sink);
|
||||
black_cut.push_back(e);
|
||||
black_cut.emplace_back(e);
|
||||
black_flow += lg[e].score;
|
||||
}
|
||||
}
|
||||
@ -657,7 +657,7 @@ u64a sanitizeAndCompressAndScore(set<ue2_literal> &lits) {
|
||||
continue;
|
||||
dont_explode:
|
||||
make_nocase(&s);
|
||||
replacements.push_back(s);
|
||||
replacements.emplace_back(s);
|
||||
}
|
||||
|
||||
insert(&lits, replacements);
|
||||
|
@ -102,8 +102,8 @@ bool findPaths(const NGHolder &g, vector<Path> &paths) {
|
||||
assert(read_count[g[u].index]);
|
||||
|
||||
for (const auto &p : built[g[u].index]) {
|
||||
out.push_back(p);
|
||||
out.back().push_back(v);
|
||||
out.emplace_back(p);
|
||||
out.back().emplace_back(v);
|
||||
|
||||
if (out.size() > MAX_PATHS) {
|
||||
// All these paths should eventually end up at a sink, so
|
||||
@ -182,7 +182,7 @@ struct PathMask {
|
||||
if (is_special(v, g)) {
|
||||
continue;
|
||||
}
|
||||
mask.push_back(g[v].char_reach);
|
||||
mask.emplace_back(g[v].char_reach);
|
||||
}
|
||||
|
||||
// Reports are attached to the second-to-last vertex.
|
||||
@ -238,7 +238,7 @@ bool handleDecoratedLiterals(RoseBuild &rose, const NGHolder &g,
|
||||
DEBUG_PRINTF("failed validation\n");
|
||||
return false;
|
||||
}
|
||||
masks.push_back(move(pm));
|
||||
masks.emplace_back(move(pm));
|
||||
}
|
||||
|
||||
for (const auto &pm : masks) {
|
||||
|
@ -116,7 +116,7 @@ void calculateAlphabet(const NGHolder &g, array<u16, ALPHABET_SIZE> &alpha,
|
||||
CharReach t = cr & esets[i];
|
||||
if (t.any() && t != esets[i]) {
|
||||
esets[i] &= ~t;
|
||||
esets.push_back(t);
|
||||
esets.emplace_back(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -401,7 +401,7 @@ public:
|
||||
const vector<StateSet> initial() {
|
||||
vector<StateSet> rv = {init};
|
||||
if (start_floating != DEAD_STATE && start_floating != start_anchored) {
|
||||
rv.push_back(initDS);
|
||||
rv.emplace_back(initDS);
|
||||
}
|
||||
return rv;
|
||||
}
|
||||
|
@ -112,7 +112,7 @@ void findCandidates(NGHolder &g, const vector<NFAVertex> &ordering,
|
||||
}
|
||||
}
|
||||
DEBUG_PRINTF("vertex %zu is a candidate\n", g[v].index);
|
||||
cand->push_back(v);
|
||||
cand->emplace_back(v);
|
||||
next_cand:;
|
||||
}
|
||||
}
|
||||
@ -143,7 +143,7 @@ void findCandidates_rev(NGHolder &g, const vector<NFAVertex> &ordering,
|
||||
}
|
||||
}
|
||||
DEBUG_PRINTF("vertex %zu is a candidate\n", g[v].index);
|
||||
cand->push_back(v);
|
||||
cand->emplace_back(v);
|
||||
next_cand:;
|
||||
}
|
||||
}
|
||||
@ -525,7 +525,7 @@ bool mergeCyclicDotStars(NGHolder &g) {
|
||||
add_edge_if_not_present(g.startDs, t, g);
|
||||
|
||||
// mark this edge for removal
|
||||
deadEdges.push_back(e);
|
||||
deadEdges.emplace_back(e);
|
||||
}
|
||||
// if the number of edges to be removed equals out degree, vertex
|
||||
// needs to be removed; else, only remove the edges
|
||||
@ -641,7 +641,7 @@ bool pruneUsingSuccessors(NGHolder &g, PrunePathsInfo &info, NFAVertex u,
|
||||
* existing in progress matches. */
|
||||
continue;
|
||||
}
|
||||
u_succs.push_back(v);
|
||||
u_succs.emplace_back(v);
|
||||
}
|
||||
|
||||
stable_sort(u_succs.begin(), u_succs.end(),
|
||||
|
@ -193,14 +193,14 @@ vector<NFAEdge> findMinCut(NGHolder &h, const vector<u64a> &scores) {
|
||||
DEBUG_PRINTF("found white cut edge %zu->%zu cap %llu\n",
|
||||
h[from].index, h[to].index, ec);
|
||||
observed_white_flow += ec;
|
||||
picked_white.push_back(e);
|
||||
picked_white.emplace_back(e);
|
||||
}
|
||||
if (fromColor == small_color::black && toColor != small_color::black) {
|
||||
assert(ec <= INVALID_EDGE_CAP);
|
||||
DEBUG_PRINTF("found black cut edge %zu->%zu cap %llu\n",
|
||||
h[from].index, h[to].index, ec);
|
||||
observed_black_flow += ec;
|
||||
picked_black.push_back(e);
|
||||
picked_black.emplace_back(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -183,7 +183,7 @@ map<u32, RegionInfo> findRegionInfo(const NGHolder &h,
|
||||
}
|
||||
u32 id = region_map.at(v);
|
||||
RegionInfo &ri = regions.emplace(id, RegionInfo(id)).first->second;
|
||||
ri.vertices.push_back(v);
|
||||
ri.vertices.emplace_back(v);
|
||||
ri.reach |= h[v].char_reach;
|
||||
}
|
||||
|
||||
@ -283,7 +283,7 @@ void replaceRegion(NGHolder &g, const RegionInfo &ri,
|
||||
if (i > 0) {
|
||||
add_edge(verts.back(), v, g);
|
||||
}
|
||||
verts.push_back(v);
|
||||
verts.emplace_back(v);
|
||||
}
|
||||
|
||||
if (maxWidth.is_infinite()) {
|
||||
|
@ -64,7 +64,7 @@ void pruneUnreachable(NGHolder &g) {
|
||||
// accept->acceptEod), so all non-specials are unreachable.
|
||||
for (auto v : vertices_range(g)) {
|
||||
if (!is_special(v, g)) {
|
||||
dead.push_back(v);
|
||||
dead.emplace_back(v);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -88,7 +88,7 @@ void pruneUnreachable(NGHolder &g) {
|
||||
continue;
|
||||
}
|
||||
if (!contains(colours, v)) {
|
||||
dead.push_back(v);
|
||||
dead.emplace_back(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -120,7 +120,7 @@ bool pruneForwardUseless(NGHolder &h, const nfag_t &g,
|
||||
if (!is_special(v, g) && get(colors, v) == small_color::white) {
|
||||
DEBUG_PRINTF("vertex %zu is unreachable from %zu\n",
|
||||
g[v].index, g[s].index);
|
||||
dead.push_back(NFAVertex(v));
|
||||
dead.emplace_back(NFAVertex(v));
|
||||
}
|
||||
}
|
||||
|
||||
@ -169,7 +169,7 @@ void pruneEmptyVertices(NGHolder &g) {
|
||||
const CharReach &cr = g[v].char_reach;
|
||||
if (cr.none()) {
|
||||
DEBUG_PRINTF("empty: %zu\n", g[v].index);
|
||||
dead.push_back(v);
|
||||
dead.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -207,7 +207,7 @@ void pruneHighlanderAccepts(NGHolder &g, const ReportManager &rm) {
|
||||
// We can prune any out-edges that aren't accepts
|
||||
for (const auto &e : out_edges_range(u, g)) {
|
||||
if (!is_any_accept(target(e, g), g)) {
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -272,7 +272,7 @@ void pruneHighlanderDominated(NGHolder &g, const ReportManager &rm) {
|
||||
for (const auto &report_id : g[v].reports) {
|
||||
const Report &r = rm.getReport(report_id);
|
||||
if (isSimpleExhaustible(r)) {
|
||||
reporters.push_back(v);
|
||||
reporters.emplace_back(v);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -281,7 +281,7 @@ void pruneHighlanderDominated(NGHolder &g, const ReportManager &rm) {
|
||||
for (const auto &report_id : g[v].reports) {
|
||||
const Report &r = rm.getReport(report_id);
|
||||
if (isSimpleExhaustible(r)) {
|
||||
reporters.push_back(v);
|
||||
reporters.emplace_back(v);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -346,7 +346,7 @@ bool doComponent(RoseBuild &rose, ReportManager &rm, NGHolder &g, NFAVertex a,
|
||||
unbounded = true;
|
||||
}
|
||||
|
||||
nodes.push_back(a);
|
||||
nodes.emplace_back(a);
|
||||
DEBUG_PRINTF("vertex %zu has in_degree %zu\n", g[a].index,
|
||||
in_degree(a, g));
|
||||
|
||||
@ -379,7 +379,7 @@ bool doComponent(RoseBuild &rose, ReportManager &rm, NGHolder &g, NFAVertex a,
|
||||
if (a != g.startDs && edge(g.startDs, a, g).second
|
||||
&& proper_out_degree(a, g) == 1
|
||||
&& g[a].char_reach == cr) {
|
||||
nodes.push_back(a);
|
||||
nodes.emplace_back(a);
|
||||
a = g.startDs;
|
||||
}
|
||||
|
||||
|
@ -207,7 +207,7 @@ void succPredIntersection(const NFAVertex v, const flat_set<NFAVertex> &predSet,
|
||||
// Break out if we've reduced our intersection to [v]
|
||||
if (best->size() == 1) {
|
||||
assert(*(best->begin()) == v);
|
||||
intersection.push_back(v);
|
||||
intersection.emplace_back(v);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -256,7 +256,7 @@ void predSuccIntersection(const NFAVertex v,
|
||||
// Break out if we've reduced our intersection to [v]
|
||||
if (best->size() == 1) {
|
||||
assert(*(best->begin()) == v);
|
||||
intersection.push_back(v);
|
||||
intersection.emplace_back(v);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ void checkAndAddExitCandidate(const AcyclicGraph &g,
|
||||
|
||||
if (!open.empty()) {
|
||||
DEBUG_PRINTF("exit %zu\n", g[v].index);
|
||||
exits.push_back(move(v_exit));
|
||||
exits.emplace_back(move(v_exit));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -256,7 +256,7 @@ void removeRegionRedundancy(NGHolder &g, som_type som) {
|
||||
}
|
||||
u32 region = region_map.at(v);
|
||||
if (contains(deadRegions, region)) {
|
||||
dead.push_back(v);
|
||||
dead.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -320,7 +320,7 @@ void splitSubgraph(const NGHolder &g, const deque<NFAVertex> &verts,
|
||||
}
|
||||
u32 comp_id = rit->second;
|
||||
assert(comp_id < num);
|
||||
rs[comp_id].vertices.push_back(v);
|
||||
rs[comp_id].vertices.emplace_back(v);
|
||||
}
|
||||
|
||||
for (const auto &rsi : rs) {
|
||||
@ -409,7 +409,7 @@ void checkReachSubgraphs(const NGHolder &g, vector<ReachSubgraph> &rs,
|
||||
continue;
|
||||
}
|
||||
|
||||
verts.push_back(v);
|
||||
verts.emplace_back(v);
|
||||
}
|
||||
|
||||
if (recalc) {
|
||||
@ -421,7 +421,7 @@ void checkReachSubgraphs(const NGHolder &g, vector<ReachSubgraph> &rs,
|
||||
splitSubgraph(g, verts, minNumVertices, q);
|
||||
} else {
|
||||
DEBUG_PRINTF("subgraph is ok\n");
|
||||
rs_out.push_back(rsi);
|
||||
rs_out.emplace_back(rsi);
|
||||
}
|
||||
q.pop();
|
||||
}
|
||||
@ -638,7 +638,7 @@ void buildTugTrigger(NGHolder &g, NFAVertex cyclic, NFAVertex v,
|
||||
DEBUG_PRINTF("all preds in subgraph, vertex %zu becomes tug\n",
|
||||
g[v].index);
|
||||
add_edge(cyclic, v, g);
|
||||
tugs.push_back(v);
|
||||
tugs.emplace_back(v);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -650,7 +650,7 @@ void buildTugTrigger(NGHolder &g, NFAVertex cyclic, NFAVertex v,
|
||||
DEBUG_PRINTF("there are other paths, cloned tug %zu from vertex %zu\n",
|
||||
g[t].index, g[v].index);
|
||||
|
||||
tugs.push_back(t);
|
||||
tugs.emplace_back(t);
|
||||
add_edge(cyclic, t, g);
|
||||
|
||||
// New vertex gets all of v's successors, including v itself if it's
|
||||
@ -738,7 +738,7 @@ void unpeelNearEnd(NGHolder &g, ReachSubgraph &rsi,
|
||||
}
|
||||
|
||||
succs->clear();
|
||||
succs->push_back(d);
|
||||
succs->emplace_back(d);
|
||||
|
||||
rsi.repeatMax -= 1;
|
||||
|
||||
@ -761,7 +761,7 @@ void getSuccessors(const NGHolder &g, const ReachSubgraph &rsi,
|
||||
if (v == last) { /* ignore self loop */
|
||||
continue;
|
||||
}
|
||||
succs->push_back(v);
|
||||
succs->emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -837,7 +837,7 @@ void replaceSubgraphWithSpecial(NGHolder &g, ReachSubgraph &rsi,
|
||||
remove_vertices(rsi.vertices, g, false);
|
||||
erase_all(&depths, rsi.vertices);
|
||||
|
||||
repeats->push_back(BoundedRepeatData(rsi.historyType, rsi.repeatMin,
|
||||
repeats->emplace_back(BoundedRepeatData(rsi.historyType, rsi.repeatMin,
|
||||
rsi.repeatMax, rsi.minPeriod, cyclic,
|
||||
pos_trigger, tugs));
|
||||
}
|
||||
@ -905,7 +905,7 @@ void replaceSubgraphWithLazySpecial(NGHolder &g, ReachSubgraph &rsi,
|
||||
remove_vertices(rsi.vertices, g, false);
|
||||
erase_all(&depths, rsi.vertices);
|
||||
|
||||
repeats->push_back(BoundedRepeatData(rsi.historyType, rsi.repeatMin,
|
||||
repeats->emplace_back(BoundedRepeatData(rsi.historyType, rsi.repeatMin,
|
||||
rsi.repeatMax, rsi.minPeriod, cyclic,
|
||||
pos_trigger, tugs));
|
||||
}
|
||||
@ -1057,7 +1057,7 @@ void buildReachSubgraphs(const NGHolder &g, vector<ReachSubgraph> &rs,
|
||||
}
|
||||
u32 comp_id = rit->second;
|
||||
assert(comp_id < num);
|
||||
rs[comp_id].vertices.push_back(v);
|
||||
rs[comp_id].vertices.emplace_back(v);
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
@ -1176,9 +1176,9 @@ void addTriggers(NGHolder &g,
|
||||
goto next_edge;
|
||||
}
|
||||
|
||||
starts_by_top[top].push_back(v);
|
||||
starts_by_top[top].emplace_back(v);
|
||||
}
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
next_edge:;
|
||||
}
|
||||
|
||||
@ -1519,7 +1519,7 @@ struct StrawWalker {
|
||||
}
|
||||
|
||||
v = next;
|
||||
straw.push_back(v);
|
||||
straw.emplace_back(v);
|
||||
}
|
||||
|
||||
straw.clear();
|
||||
@ -1615,13 +1615,13 @@ vector<CharReach> getUnionedTrigger(const NGHolder &g, const NFAVertex v) {
|
||||
|
||||
if (contains(curr, g.start)) {
|
||||
DEBUG_PRINTF("start in repeat's immediate preds\n");
|
||||
trigger.push_back(CharReach::dot()); // Trigger could be anything!
|
||||
trigger.emplace_back(CharReach::dot()); // Trigger could be anything!
|
||||
return trigger;
|
||||
}
|
||||
|
||||
for (size_t num_steps = 0; num_steps < MAX_TRIGGER_STEPS; num_steps++) {
|
||||
next.clear();
|
||||
trigger.push_back(CharReach());
|
||||
trigger.emplace_back(CharReach());
|
||||
CharReach &cr = trigger.back();
|
||||
|
||||
for (auto v_c : curr) {
|
||||
@ -1664,7 +1664,7 @@ vector<vector<CharReach>> getRepeatTriggers(const NGHolder &g,
|
||||
triggers.push_back({}); // empty
|
||||
return triggers;
|
||||
}
|
||||
q.push_back(Path(1, u));
|
||||
q.emplace_back(Path(1, u));
|
||||
}
|
||||
|
||||
while (!q.empty()) {
|
||||
@ -1673,7 +1673,7 @@ vector<vector<CharReach>> getRepeatTriggers(const NGHolder &g,
|
||||
|
||||
if (path.size() >= max_len) {
|
||||
max_len = min(max_len, path.size());
|
||||
done.push_back(path);
|
||||
done.emplace_back(path);
|
||||
goto next_path;
|
||||
}
|
||||
|
||||
@ -1682,16 +1682,16 @@ vector<vector<CharReach>> getRepeatTriggers(const NGHolder &g,
|
||||
// Found an accept. There's no point expanding this path any
|
||||
// further, we're done.
|
||||
max_len = min(max_len, path.size());
|
||||
done.push_back(path);
|
||||
done.emplace_back(path);
|
||||
goto next_path;
|
||||
}
|
||||
|
||||
if (path.size() + 1 >= max_len) {
|
||||
done.push_back(path);
|
||||
done.back().push_back(u);
|
||||
done.emplace_back(path);
|
||||
done.back().emplace_back(u);
|
||||
} else {
|
||||
q.push_back(path); // copy
|
||||
q.back().push_back(u);
|
||||
q.emplace_back(path); // copy
|
||||
q.back().emplace_back(u);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1703,7 +1703,7 @@ vector<vector<CharReach>> getRepeatTriggers(const NGHolder &g,
|
||||
if (q.size() + done.size() > UNIONED_FALLBACK_THRESHOLD) {
|
||||
DEBUG_PRINTF("search too large, fall back to union trigger\n");
|
||||
triggers.clear();
|
||||
triggers.push_back(getUnionedTrigger(g, sink));
|
||||
triggers.emplace_back(getUnionedTrigger(g, sink));
|
||||
return triggers;
|
||||
}
|
||||
}
|
||||
@ -1715,7 +1715,7 @@ vector<vector<CharReach>> getRepeatTriggers(const NGHolder &g,
|
||||
for (const auto &path : done) {
|
||||
vector<CharReach> reach_path;
|
||||
for (auto jt = path.rbegin(), jte = path.rend(); jt != jte; ++jt) {
|
||||
reach_path.push_back(g[*jt].char_reach);
|
||||
reach_path.emplace_back(g[*jt].char_reach);
|
||||
}
|
||||
unique_triggers.insert(reach_path);
|
||||
}
|
||||
@ -1960,7 +1960,7 @@ vector<NFAVertex> makeOwnStraw(NGHolder &g, BoundedRepeatData &rd,
|
||||
if (!own_straw.empty()) {
|
||||
add_edge(own_straw.back(), v2, g);
|
||||
}
|
||||
own_straw.push_back(v2);
|
||||
own_straw.emplace_back(v2);
|
||||
}
|
||||
|
||||
// Wire our straw to start, not startDs.
|
||||
@ -2536,7 +2536,7 @@ void findRepeats(const NGHolder &h, u32 minRepeatVertices,
|
||||
repeatMax = depth::infinity(); /* will continue to pump out matches */
|
||||
}
|
||||
|
||||
repeats_out->push_back(GraphRepeatInfo());
|
||||
repeats_out->emplace_back(GraphRepeatInfo());
|
||||
GraphRepeatInfo &ri = repeats_out->back();
|
||||
ri.vertices.swap(rsi.vertices);
|
||||
ri.repeatMin = rsi.repeatMin;
|
||||
|
@ -56,7 +56,7 @@ void wireStartToTops(NGHolder &g, const flat_set<NFAVertex> &tops,
|
||||
assert(!isLeafNode(v, g));
|
||||
|
||||
const NFAEdge &e = add_edge(g.start, v, g);
|
||||
tempEdges.push_back(e);
|
||||
tempEdges.emplace_back(e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -109,10 +109,10 @@ void getStateOrdering(NGHolder &g, const flat_set<NFAVertex> &tops,
|
||||
temp.erase(remove(temp.begin(), temp.end(), g.startDs));
|
||||
temp.erase(remove(temp.begin(), temp.end(), g.start));
|
||||
if (proper_out_degree(g.startDs, g)) {
|
||||
temp.push_back(g.startDs);
|
||||
temp.emplace_back(g.startDs);
|
||||
}
|
||||
if (!startIsRedundant(g)) {
|
||||
temp.push_back(g.start);
|
||||
temp.emplace_back(g.start);
|
||||
}
|
||||
|
||||
// Walk ordering, remove vertices that shouldn't be participating in state
|
||||
@ -122,7 +122,7 @@ void getStateOrdering(NGHolder &g, const flat_set<NFAVertex> &tops,
|
||||
continue; // accepts don't need states
|
||||
}
|
||||
|
||||
ordering.push_back(v);
|
||||
ordering.emplace_back(v);
|
||||
}
|
||||
|
||||
// Output of topo order was in reverse.
|
||||
@ -167,7 +167,7 @@ void optimiseTightLoops(const NGHolder &g, vector<NFAVertex> &ordering) {
|
||||
continue;
|
||||
}
|
||||
if (edge(t, v, g).second && find(start, it, t) != ite) {
|
||||
candidates.push_back(make_pair(v, t));
|
||||
candidates.emplace_back(make_pair(v, t));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -166,12 +166,12 @@ void buildRegionMapping(const NGHolder &g,
|
||||
}
|
||||
|
||||
if (isRegionEntry(g, v, regions)) {
|
||||
info[region].enters.push_back(v);
|
||||
info[region].enters.emplace_back(v);
|
||||
}
|
||||
if (isRegionExit(g, v, regions)) {
|
||||
info[region].exits.push_back(v);
|
||||
info[region].exits.emplace_back(v);
|
||||
}
|
||||
info[region].full.push_back(v);
|
||||
info[region].full.emplace_back(v);
|
||||
}
|
||||
|
||||
for (auto &m : info) {
|
||||
@ -410,7 +410,7 @@ makePrefix(const NGHolder &g, const unordered_map<NFAVertex, u32> ®ions,
|
||||
if (p_v == prefix.accept || regions.at(v) < dead_region) {
|
||||
continue;
|
||||
}
|
||||
to_clear.push_back(p_v);
|
||||
to_clear.emplace_back(p_v);
|
||||
}
|
||||
|
||||
for (auto v : to_clear) {
|
||||
@ -1045,7 +1045,7 @@ void addReporterVertices(const region_info &r, const NGHolder &g,
|
||||
for (auto v : r.exits) {
|
||||
if (edge(v, g.accept, g).second || edge(v, g.acceptEod, g).second) {
|
||||
DEBUG_PRINTF("add reporter %zu\n", g[v].index);
|
||||
reporters.push_back(v);
|
||||
reporters.emplace_back(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1060,7 +1060,7 @@ void addMappedReporterVertices(const region_info &r, const NGHolder &g,
|
||||
DEBUG_PRINTF("adding v=%zu\n", g[v].index);
|
||||
auto it = mapping.find(v);
|
||||
assert(it != mapping.end());
|
||||
reporters.push_back(it->second);
|
||||
reporters.emplace_back(it->second);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1109,7 +1109,7 @@ void expandGraph(NGHolder &g, unordered_map<NFAVertex, u32> ®ions,
|
||||
if (is_special(v, g) || regions.at(v) < split_region) {
|
||||
continue;
|
||||
}
|
||||
tail_vertices.push_back(v);
|
||||
tail_vertices.emplace_back(v);
|
||||
}
|
||||
|
||||
for (auto enter : enters) {
|
||||
@ -1166,7 +1166,7 @@ void expandGraph(NGHolder &g, unordered_map<NFAVertex, u32> ®ions,
|
||||
}, g);
|
||||
}
|
||||
|
||||
new_enters.push_back(orig_to_copy[enter]);
|
||||
new_enters.emplace_back(orig_to_copy[enter]);
|
||||
}
|
||||
|
||||
// Remove the original set of tail vertices.
|
||||
@ -1659,7 +1659,7 @@ void anchorStarts(NGHolder &g) {
|
||||
continue;
|
||||
}
|
||||
add_edge_if_not_present(g.start, v, g[e], g);
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
}
|
||||
remove_edges(dead, g);
|
||||
}
|
||||
@ -1720,7 +1720,7 @@ void clearProperInEdges(NGHolder &g, const NFAVertex sink) {
|
||||
if (source(e, g) == g.accept) {
|
||||
continue;
|
||||
}
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
}
|
||||
|
||||
if (dead.empty()) {
|
||||
@ -2214,7 +2214,7 @@ bool leadingLiterals(const NGHolder &g, set<ue2_literal> *lits,
|
||||
sds_succ.erase(g.startDs);
|
||||
|
||||
map<NFAVertex, vector<ue2_literal> > curr;
|
||||
curr[g.startDs].push_back(ue2_literal());
|
||||
curr[g.startDs].emplace_back(ue2_literal());
|
||||
|
||||
map<NFAVertex, set<NFAVertex> > seen;
|
||||
map<NFAVertex, vector<ue2_literal> > next;
|
||||
@ -2273,7 +2273,7 @@ bool leadingLiterals(const NGHolder &g, set<ue2_literal> *lits,
|
||||
goto exit;
|
||||
}
|
||||
did_expansion = true;
|
||||
out.push_back(lit);
|
||||
out.emplace_back(lit);
|
||||
out.back().push_back(c, nocase);
|
||||
count++;
|
||||
if (out.back().length() > MAX_MASK2_WIDTH
|
||||
@ -2469,7 +2469,7 @@ bool doLitHaigSom(NG &ng, NGHolder &g, som_type som) {
|
||||
dumpHolder(*rhs, 91, "lithaig_rhs", ng.cc.grey);
|
||||
|
||||
vector<vector<CharReach> > triggers;
|
||||
triggers.push_back(as_cr_seq(lit));
|
||||
triggers.emplace_back(as_cr_seq(lit));
|
||||
|
||||
assert(rhs->kind == NFA_SUFFIX);
|
||||
shared_ptr<raw_som_dfa> haig
|
||||
@ -2579,7 +2579,7 @@ bool doHaigLitHaigSom(NG &ng, NGHolder &g,
|
||||
assert(rhs->kind == NFA_SUFFIX);
|
||||
|
||||
vector<vector<CharReach> > triggers;
|
||||
triggers.push_back(as_cr_seq(lit));
|
||||
triggers.emplace_back(as_cr_seq(lit));
|
||||
|
||||
ue2_literal lit2;
|
||||
if (getTrailingLiteral(g, &lit2)
|
||||
@ -2677,7 +2677,7 @@ bool doMultiLitHaigSom(NG &ng, NGHolder &g, som_type som) {
|
||||
}
|
||||
|
||||
assert(lit.length() <= MAX_MASK2_WIDTH || !mixed_sensitivity(lit));
|
||||
triggers.push_back(as_cr_seq(lit));
|
||||
triggers.emplace_back(as_cr_seq(lit));
|
||||
}
|
||||
|
||||
bool unordered_som_triggers = true; /* TODO: check overlaps to ensure that
|
||||
@ -2791,7 +2791,7 @@ map<u32, region_info>::const_iterator tryForLaterRevNfaCut(const NGHolder &g,
|
||||
continue;
|
||||
}
|
||||
|
||||
cands.push_back(it);
|
||||
cands.emplace_back(it);
|
||||
}
|
||||
|
||||
while (!cands.empty()) {
|
||||
@ -3023,7 +3023,7 @@ sombe_rv doSom(NG &ng, NGHolder &g, const ExpressionInfo &expr, u32 comp_id,
|
||||
vector<som_plan> plan;
|
||||
retry:
|
||||
// Note: no-one should ever pay attention to the root plan's parent.
|
||||
plan.push_back(som_plan(prefix, escapes, false, 0));
|
||||
plan.emplace_back(som_plan(prefix, escapes, false, 0));
|
||||
dumpHolder(*plan.back().prefix, 12, "som_prefix", cc.grey);
|
||||
if (!prefix_by_rev) {
|
||||
if (!doSomPlanning(g, stuck, regions, info, picked, plan, cc.grey)) {
|
||||
|
@ -102,7 +102,7 @@ bool forkVertex(NFAVertex v, NGHolder &g, vector<DepthMinMax> &depths,
|
||||
for (const auto &e : in_edges_range(v, g)) {
|
||||
const DepthMinMax &d = getDepth(source(e, g), g, depths);
|
||||
assert(d.min == d.max);
|
||||
predGroups[d.min].push_back(e);
|
||||
predGroups[d.min].emplace_back(e);
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("forking vertex with %zu pred groups\n", predGroups.size());
|
||||
@ -121,7 +121,7 @@ bool forkVertex(NFAVertex v, NGHolder &g, vector<DepthMinMax> &depths,
|
||||
NFAVertex clone = add_vertex(g[v], g);
|
||||
depth clone_depth = predDepth + 1;
|
||||
g[clone].index = clone_idx;
|
||||
depths.push_back(DepthMinMax(clone_depth, clone_depth));
|
||||
depths.emplace_back(DepthMinMax(clone_depth, clone_depth));
|
||||
DEBUG_PRINTF("cloned vertex %u with depth %s\n", clone_idx,
|
||||
clone_depth.str().c_str());
|
||||
|
||||
|
@ -60,10 +60,10 @@ vector<DepthMinMax> getDistancesFromSOM(const NGHolder &g_orig) {
|
||||
vector<NFAVertex> vstarts;
|
||||
for (auto v : vertices_range(g)) {
|
||||
if (is_virtual_start(v, g)) {
|
||||
vstarts.push_back(v);
|
||||
vstarts.emplace_back(v);
|
||||
}
|
||||
}
|
||||
vstarts.push_back(g.startDs);
|
||||
vstarts.emplace_back(g.startDs);
|
||||
|
||||
// wire the successors of every virtual start or startDs to g.start.
|
||||
for (auto v : vstarts) {
|
||||
|
@ -281,7 +281,7 @@ void findDerivedSquashers(const NGHolder &g, const vector<NFAVertex> &vByIndex,
|
||||
smgb_cache &cache) {
|
||||
deque<NFAVertex> remaining;
|
||||
for (const auto &m : *squash) {
|
||||
remaining.push_back(m.first);
|
||||
remaining.emplace_back(m.first);
|
||||
}
|
||||
|
||||
while (!remaining.empty()) {
|
||||
@ -313,7 +313,7 @@ void findDerivedSquashers(const NGHolder &g, const vector<NFAVertex> &vByIndex,
|
||||
DEBUG_PRINTF("%zu is an upstream squasher of %zu\n", u_index,
|
||||
g[v].index);
|
||||
(*squash)[u] = u_squash;
|
||||
remaining.push_back(u);
|
||||
remaining.emplace_back(u);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -639,7 +639,7 @@ vector<NFAVertex> findUnreachable(const NGHolder &g) {
|
||||
vector<NFAVertex> unreach;
|
||||
for (auto v : vertices_range(revg)) {
|
||||
if (!contains(colours, v)) {
|
||||
unreach.push_back(NFAVertex(v));
|
||||
unreach.emplace_back(NFAVertex(v));
|
||||
}
|
||||
}
|
||||
return unreach;
|
||||
|
@ -92,7 +92,7 @@ struct ranking_info {
|
||||
u32 add_to_tail(NFAVertex v) {
|
||||
u32 rank = size();
|
||||
to_rank[v] = rank;
|
||||
to_vertex.push_back(v);
|
||||
to_vertex.emplace_back(v);
|
||||
return rank;
|
||||
}
|
||||
|
||||
|
@ -178,7 +178,7 @@ void findSeeds(const NGHolder &h, const bool som, vector<NFAVertex> *seeds) {
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("%zu is a seed\n", h[v].index);
|
||||
seeds->push_back(v);
|
||||
seeds->emplace_back(v);
|
||||
already_seeds.insert(v);
|
||||
}
|
||||
}
|
||||
|
@ -407,7 +407,7 @@ void appendLiteral(NGHolder &h, const ue2_literal &s) {
|
||||
vector<NFAVertex> tail;
|
||||
assert(in_degree(h.acceptEod, h) == 1);
|
||||
for (auto v : inv_adjacent_vertices_range(h.accept, h)) {
|
||||
tail.push_back(v);
|
||||
tail.emplace_back(v);
|
||||
}
|
||||
assert(!tail.empty());
|
||||
|
||||
@ -422,7 +422,7 @@ void appendLiteral(NGHolder &h, const ue2_literal &s) {
|
||||
add_edge(u, v, h);
|
||||
}
|
||||
tail.clear();
|
||||
tail.push_back(v);
|
||||
tail.emplace_back(v);
|
||||
}
|
||||
|
||||
for (auto v : tail) {
|
||||
|
@ -394,7 +394,7 @@ void getSimpleRoseLiterals(const NGHolder &g, bool seeking_anchored,
|
||||
|
||||
lits->reserve(lit_info.size());
|
||||
for (auto &m : lit_info) {
|
||||
lits->push_back(move(m.second));
|
||||
lits->emplace_back(move(m.second));
|
||||
}
|
||||
DEBUG_PRINTF("%zu candidate literal sets\n", lits->size());
|
||||
}
|
||||
@ -434,7 +434,7 @@ void getRegionRoseLiterals(const NGHolder &g, bool seeking_anchored,
|
||||
}
|
||||
|
||||
if (isRegionExit(g, v, regions)) {
|
||||
exits[region].push_back(v);
|
||||
exits[region].emplace_back(v);
|
||||
}
|
||||
|
||||
if (isRegionEntry(g, v, regions)) {
|
||||
@ -531,7 +531,7 @@ void getRegionRoseLiterals(const NGHolder &g, bool seeking_anchored,
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("candidate is a candidate\n");
|
||||
lits->push_back(make_unique<VertLitInfo>(vv, s, anchored));
|
||||
lits->emplace_back(make_unique<VertLitInfo>(vv, s, anchored));
|
||||
}
|
||||
}
|
||||
|
||||
@ -592,7 +592,7 @@ void getCandidatePivots(const NGHolder &g, set<NFAVertex> *cand,
|
||||
assert(ait != accepts.end());
|
||||
NFAVertex curr = *ait;
|
||||
while (curr && !is_special(curr, g)) {
|
||||
dom_trace.push_back(curr);
|
||||
dom_trace.emplace_back(curr);
|
||||
curr = dominators[curr];
|
||||
}
|
||||
reverse(dom_trace.begin(), dom_trace.end());
|
||||
@ -600,7 +600,7 @@ void getCandidatePivots(const NGHolder &g, set<NFAVertex> *cand,
|
||||
curr = *ait;
|
||||
vector<NFAVertex> dom_trace2;
|
||||
while (curr && !is_special(curr, g)) {
|
||||
dom_trace2.push_back(curr);
|
||||
dom_trace2.emplace_back(curr);
|
||||
curr = dominators[curr];
|
||||
}
|
||||
reverse(dom_trace2.begin(), dom_trace2.end());
|
||||
@ -1095,7 +1095,7 @@ bool splitRoseEdge(const NGHolder &base_graph, RoseInGraph &vg,
|
||||
for (const RoseInEdge &e : ee) {
|
||||
RoseInVertex src = source(e, vg);
|
||||
RoseInVertex dest = target(e, vg);
|
||||
images[src].push_back(dest);
|
||||
images[src].emplace_back(dest);
|
||||
remove_edge(e, vg);
|
||||
}
|
||||
|
||||
@ -1149,7 +1149,7 @@ bool splitRoseEdge(const NGHolder &base_graph, RoseInGraph &vg,
|
||||
add_edge(v, dest, RoseInEdgeProps(rhs, 0U), vg);
|
||||
}
|
||||
}
|
||||
verts_by_image[image].push_back(v);
|
||||
verts_by_image[image].emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1598,7 +1598,7 @@ void removeRedundantLiteralsFromPrefixes(RoseInGraph &g,
|
||||
|
||||
if (delay == lit.length() && edge(h->start, h->accept, *h).second
|
||||
&& num_vertices(*h) == N_SPECIALS) {
|
||||
to_anchor.push_back(e);
|
||||
to_anchor.emplace_back(e);
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -1775,7 +1775,7 @@ void removeRedundantLiteralsFromInfixes(RoseInGraph &g,
|
||||
}
|
||||
|
||||
NGHolder *h = g[e].graph.get();
|
||||
infixes[h].push_back(e);
|
||||
infixes[h].emplace_back(e);
|
||||
}
|
||||
|
||||
for (const auto &m : infixes) {
|
||||
@ -2110,7 +2110,7 @@ void findBetterPrefixes(RoseInGraph &vg, const CompileContext &cc) {
|
||||
assert(vg[target(e, vg)].type == RIV_LITERAL);
|
||||
if (vg[e].graph) {
|
||||
NGHolder *h = vg[e].graph.get();
|
||||
prefixes[h].push_back(e);
|
||||
prefixes[h].emplace_back(e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2174,7 +2174,7 @@ void extractStrongLiterals(RoseInGraph &vg, const CompileContext &cc) {
|
||||
|
||||
if (vg[ve].graph) {
|
||||
NGHolder *h = vg[ve].graph.get();
|
||||
edges_by_graph[h].push_back(ve);
|
||||
edges_by_graph[h].emplace_back(ve);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2262,7 +2262,7 @@ void improveWeakInfixes(RoseInGraph &vg, const CompileContext &cc) {
|
||||
for (const RoseInEdge &ve : edges_range(vg)) {
|
||||
NGHolder *h = vg[ve].graph.get();
|
||||
if (contains(weak, h)) {
|
||||
weak_edges[h].push_back(ve);
|
||||
weak_edges[h].emplace_back(ve);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2366,7 +2366,7 @@ bool replaceSuffixWithInfix(const NGHolder &h, RoseInGraph &vg,
|
||||
|
||||
VertLitInfo &vli = by_reports[make_pair(false, h[v].reports)];
|
||||
insert(&vli.lit, ss);
|
||||
vli.vv.push_back(v);
|
||||
vli.vv.emplace_back(v);
|
||||
seen.insert(v);
|
||||
}
|
||||
|
||||
@ -2384,7 +2384,7 @@ bool replaceSuffixWithInfix(const NGHolder &h, RoseInGraph &vg,
|
||||
|
||||
VertLitInfo &vli = by_reports[make_pair(true, h[v].reports)];
|
||||
insert(&vli.lit, ss);
|
||||
vli.vv.push_back(v);
|
||||
vli.vv.emplace_back(v);
|
||||
}
|
||||
|
||||
assert(!by_reports.empty());
|
||||
@ -2435,7 +2435,7 @@ void avoidSuffixes(RoseInGraph &vg, const CompileContext &cc) {
|
||||
assert(vg[e].graph); /* non suffix paths should be wired to other
|
||||
accepts */
|
||||
const NGHolder *h = vg[e].graph.get();
|
||||
suffixes[h].push_back(e);
|
||||
suffixes[h].emplace_back(e);
|
||||
}
|
||||
|
||||
/* look at suffixes and try to split */
|
||||
@ -2530,7 +2530,7 @@ void lookForDoubleCut(RoseInGraph &vg, const CompileContext &cc) {
|
||||
for (const RoseInEdge &ve : edges_range(vg)) {
|
||||
if (vg[ve].graph && vg[source(ve, vg)].type == RIV_LITERAL) {
|
||||
const NGHolder *h = vg[ve].graph.get();
|
||||
right_edges[h].push_back(ve);
|
||||
right_edges[h].emplace_back(ve);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2671,7 +2671,7 @@ void decomposeLiteralChains(RoseInGraph &vg, const CompileContext &cc) {
|
||||
for (const RoseInEdge &ve : edges_range(vg)) {
|
||||
if (vg[ve].graph && vg[source(ve, vg)].type == RIV_LITERAL) {
|
||||
const NGHolder *h = vg[ve].graph.get();
|
||||
right_edges[h].push_back(ve);
|
||||
right_edges[h].emplace_back(ve);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2721,7 +2721,7 @@ void lookForCleanEarlySplits(RoseInGraph &vg, const CompileContext &cc) {
|
||||
for (const RoseInEdge &e : out_edges_range(v, vg)) {
|
||||
if (vg[e].graph) {
|
||||
NGHolder *h = vg[e].graph.get();
|
||||
rightfixes[h].push_back(e);
|
||||
rightfixes[h].emplace_back(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2757,7 +2757,7 @@ void rehomeEodSuffixes(RoseInGraph &vg) {
|
||||
continue;
|
||||
}
|
||||
|
||||
acc_edges.push_back(e);
|
||||
acc_edges.emplace_back(e);
|
||||
}
|
||||
|
||||
for (const RoseInEdge &e : acc_edges) {
|
||||
@ -2797,7 +2797,7 @@ vector<vector<CharReach>> getDfaTriggers(RoseInGraph &vg,
|
||||
for (const auto &e : edges) {
|
||||
RoseInVertex s = source(e, vg);
|
||||
if (vg[s].type == RIV_LITERAL) {
|
||||
triggers.push_back(as_cr_seq(vg[s].s));
|
||||
triggers.emplace_back(as_cr_seq(vg[s].s));
|
||||
}
|
||||
ENSURE_AT_LEAST(&max_offset, vg[s].max_offset);
|
||||
LIMIT_TO_AT_MOST(&min_offset, vg[s].min_offset);
|
||||
@ -2911,7 +2911,7 @@ bool ensureImplementable(RoseBuild &rose, RoseInGraph &vg, bool allow_changes,
|
||||
for (const RoseInEdge &ve : edges_range(vg)) {
|
||||
if (vg[ve].graph && !vg[ve].dfa) {
|
||||
auto &h = vg[ve].graph;
|
||||
edges_by_graph[h].push_back(ve);
|
||||
edges_by_graph[h].emplace_back(ve);
|
||||
}
|
||||
}
|
||||
for (auto &m : edges_by_graph) {
|
||||
|
@ -57,7 +57,7 @@ ComponentAlternation::ComponentAlternation(const ComponentAlternation &other)
|
||||
: Component(other) {
|
||||
for (const auto &c : other.children) {
|
||||
assert(c);
|
||||
children.push_back(unique_ptr<Component>(c->clone()));
|
||||
children.emplace_back(unique_ptr<Component>(c->clone()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -103,7 +103,7 @@ void ComponentAlternation::accept(ConstComponentVisitor &v) const {
|
||||
}
|
||||
|
||||
void ComponentAlternation::append(unique_ptr<Component> component) {
|
||||
children.push_back(move(component));
|
||||
children.emplace_back(move(component));
|
||||
}
|
||||
|
||||
vector<PositionInfo> ComponentAlternation::first() const {
|
||||
|
@ -94,11 +94,11 @@ void ComponentBoundary::notePositions(GlushkovBuildState & bs) {
|
||||
{
|
||||
PositionInfo epsilon(GlushkovBuildState::POS_EPSILON);
|
||||
epsilon.flags = POS_FLAG_NOFLOAT;
|
||||
m_first.push_back(epsilon);
|
||||
m_first.emplace_back(epsilon);
|
||||
|
||||
// We have the start vertex in firsts so that we can discourage
|
||||
// the mid-pattern use of boundaries.
|
||||
m_first.push_back(startState);
|
||||
m_first.emplace_back(startState);
|
||||
|
||||
break;
|
||||
}
|
||||
@ -106,11 +106,11 @@ void ComponentBoundary::notePositions(GlushkovBuildState & bs) {
|
||||
{
|
||||
PositionInfo epsilon(GlushkovBuildState::POS_EPSILON);
|
||||
epsilon.flags = POS_FLAG_NOFLOAT;
|
||||
m_first.push_back(epsilon);
|
||||
m_first.emplace_back(epsilon);
|
||||
|
||||
// We have the start vertex in firsts so that we can discourage
|
||||
// the mid-pattern use of boundaries.
|
||||
m_first.push_back(startState);
|
||||
m_first.emplace_back(startState);
|
||||
|
||||
// Newline
|
||||
m_newline = makeNewline(bs);
|
||||
@ -118,8 +118,8 @@ void ComponentBoundary::notePositions(GlushkovBuildState & bs) {
|
||||
builder.setAssertFlag(m_newline, POS_FLAG_VIRTUAL_START);
|
||||
PositionInfo nl(m_newline);
|
||||
nl.flags = POS_FLAG_MUST_FLOAT | POS_FLAG_FIDDLE_ACCEPT;
|
||||
m_first.push_back(nl);
|
||||
m_last.push_back(nl);
|
||||
m_first.emplace_back(nl);
|
||||
m_last.emplace_back(nl);
|
||||
recordPosBounds(m_newline, m_newline + 1);
|
||||
break;
|
||||
}
|
||||
@ -128,7 +128,7 @@ void ComponentBoundary::notePositions(GlushkovBuildState & bs) {
|
||||
PositionInfo epsilon(GlushkovBuildState::POS_EPSILON);
|
||||
epsilon.flags = POS_FLAG_WIRE_EOD | POS_FLAG_NO_NL_EOD |
|
||||
POS_FLAG_NO_NL_ACCEPT | POS_FLAG_ONLY_ENDS;
|
||||
m_first.push_back(epsilon);
|
||||
m_first.emplace_back(epsilon);
|
||||
break;
|
||||
}
|
||||
case END_STRING_OPTIONAL_LF: // end of data with optional LF ('$')
|
||||
@ -136,7 +136,7 @@ void ComponentBoundary::notePositions(GlushkovBuildState & bs) {
|
||||
PositionInfo epsilon(GlushkovBuildState::POS_EPSILON);
|
||||
epsilon.flags = POS_FLAG_WIRE_EOD | POS_FLAG_WIRE_NL_EOD |
|
||||
POS_FLAG_NO_NL_ACCEPT | POS_FLAG_ONLY_ENDS;
|
||||
m_first.push_back(epsilon);
|
||||
m_first.emplace_back(epsilon);
|
||||
break;
|
||||
}
|
||||
case END_LINE: // multiline anchor: end of data or a newline
|
||||
@ -144,7 +144,7 @@ void ComponentBoundary::notePositions(GlushkovBuildState & bs) {
|
||||
PositionInfo epsilon(GlushkovBuildState::POS_EPSILON);
|
||||
epsilon.flags = POS_FLAG_WIRE_EOD | POS_FLAG_WIRE_NL_EOD |
|
||||
POS_FLAG_WIRE_NL_ACCEPT | POS_FLAG_ONLY_ENDS;
|
||||
m_first.push_back(epsilon);
|
||||
m_first.emplace_back(epsilon);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
|
@ -177,7 +177,7 @@ void ComponentRepeat::notePositions(GlushkovBuildState &bs) {
|
||||
|
||||
// Each optional repeat has an epsilon at the end of its firsts list.
|
||||
for (u32 i = m_min; i < m_firsts.size(); i++) {
|
||||
m_firsts[i].push_back(GlushkovBuildState::POS_EPSILON);
|
||||
m_firsts[i].emplace_back(GlushkovBuildState::POS_EPSILON);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ ComponentSequence::ComponentSequence(const ComponentSequence &other)
|
||||
// Deep copy children.
|
||||
for (const auto &c : other.children) {
|
||||
assert(c);
|
||||
children.push_back(unique_ptr<Component>(c->clone()));
|
||||
children.emplace_back(unique_ptr<Component>(c->clone()));
|
||||
}
|
||||
if (other.alternation) {
|
||||
const ComponentAlternation &c = *other.alternation;
|
||||
@ -117,7 +117,7 @@ void ComponentSequence::accept(ConstComponentVisitor &v) const {
|
||||
}
|
||||
|
||||
void ComponentSequence::addComponent(unique_ptr<Component> comp) {
|
||||
children.push_back(move(comp));
|
||||
children.emplace_back(move(comp));
|
||||
}
|
||||
|
||||
bool ComponentSequence::addRepeat(u32 min, u32 max,
|
||||
@ -152,7 +152,7 @@ void ComponentSequence::finalize() {
|
||||
if (alternation) {
|
||||
addAlternation();
|
||||
assert(children.empty());
|
||||
children.push_back(move(alternation));
|
||||
children.emplace_back(move(alternation));
|
||||
alternation = nullptr;
|
||||
}
|
||||
}
|
||||
@ -171,7 +171,7 @@ vector<PositionInfo> ComponentSequence::first() const {
|
||||
if (firsts.empty()) {
|
||||
DEBUG_PRINTF("trivial empty sequence %zu\n", firsts.size());
|
||||
assert(children.empty());
|
||||
firsts.push_back(GlushkovBuildState::POS_EPSILON);
|
||||
firsts.emplace_back(GlushkovBuildState::POS_EPSILON);
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("%zu firsts\n", firsts.size());
|
||||
@ -202,7 +202,7 @@ void epsilonVisit(vector<eps_info> *info, const vector<PositionInfo> &f) {
|
||||
continue;
|
||||
}
|
||||
|
||||
out.push_back(*it);
|
||||
out.emplace_back(*it);
|
||||
out.back().flags = flags;
|
||||
seen_flags.insert(flags);
|
||||
}
|
||||
@ -220,7 +220,7 @@ void applyEpsilonVisits(vector<PositionInfo> &lasts,
|
||||
|
||||
for (const auto &last : lasts) {
|
||||
for (const auto &e : eps_visits) {
|
||||
out.push_back(last);
|
||||
out.emplace_back(last);
|
||||
out.back().flags |= e.flags;
|
||||
}
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ ComponentWordBoundary * ComponentWordBoundary::clone() const {
|
||||
|
||||
vector<PositionInfo> ComponentWordBoundary::first() const {
|
||||
vector<PositionInfo> firsts;
|
||||
firsts.push_back(position);
|
||||
firsts.emplace_back(position);
|
||||
return firsts;
|
||||
}
|
||||
|
||||
|
@ -1145,20 +1145,20 @@ void UTF8ComponentClass::buildFollowSet(GlushkovBuildState &,
|
||||
vector<PositionInfo> UTF8ComponentClass::first(void) const {
|
||||
vector<PositionInfo> rv;
|
||||
if (single_pos != GlushkovBuildState::POS_UNINITIALIZED) {
|
||||
rv.push_back(single_pos);
|
||||
rv.emplace_back(single_pos);
|
||||
}
|
||||
if (two_char_dot_head != GlushkovBuildState::POS_UNINITIALIZED) {
|
||||
rv.push_back(two_char_dot_head);
|
||||
rv.emplace_back(two_char_dot_head);
|
||||
}
|
||||
if (three_char_dot_head != GlushkovBuildState::POS_UNINITIALIZED) {
|
||||
rv.push_back(three_char_dot_head);
|
||||
rv.emplace_back(three_char_dot_head);
|
||||
}
|
||||
if (four_char_dot_head != GlushkovBuildState::POS_UNINITIALIZED) {
|
||||
rv.push_back(four_char_dot_head);
|
||||
rv.emplace_back(four_char_dot_head);
|
||||
}
|
||||
|
||||
for (auto it = heads.begin(); it != heads.end(); ++it) {
|
||||
rv.push_back(it->second);
|
||||
rv.emplace_back(it->second);
|
||||
}
|
||||
return rv;
|
||||
}
|
||||
|
@ -155,9 +155,9 @@ GlushkovBuildStateImpl::GlushkovBuildStateImpl(NFABuilder &b,
|
||||
vector<PositionInfo> lasts, firsts;
|
||||
|
||||
// start->startDs and startDs self-loop.
|
||||
lasts.push_back(startState);
|
||||
lasts.push_back(startDotstarState);
|
||||
firsts.push_back(startDotstarState);
|
||||
lasts.emplace_back(startState);
|
||||
lasts.emplace_back(startDotstarState);
|
||||
firsts.emplace_back(startDotstarState);
|
||||
connectRegions(lasts, firsts);
|
||||
|
||||
// accept to acceptEod edges already wired
|
||||
@ -255,7 +255,7 @@ void generateAccepts(GlushkovBuildStateImpl &bs, const PositionInfo &from,
|
||||
bool require_accept = !(flags & POS_FLAG_ONLY_ENDS);
|
||||
|
||||
if (require_eod) {
|
||||
tolist->push_back(bs.acceptEodState);
|
||||
tolist->emplace_back(bs.acceptEodState);
|
||||
}
|
||||
|
||||
if (require_nl_accept) {
|
||||
@ -264,7 +264,7 @@ void generateAccepts(GlushkovBuildStateImpl &bs, const PositionInfo &from,
|
||||
bs.addSuccessor(newline, builder.getAccept());
|
||||
bs.acceptNlState = newline;
|
||||
}
|
||||
tolist->push_back(bs.acceptNlState);
|
||||
tolist->emplace_back(bs.acceptNlState);
|
||||
}
|
||||
|
||||
if (require_nl_eod) {
|
||||
@ -273,11 +273,11 @@ void generateAccepts(GlushkovBuildStateImpl &bs, const PositionInfo &from,
|
||||
bs.addSuccessor(newline, builder.getAcceptEOD());
|
||||
bs.acceptNlEodState = newline;
|
||||
}
|
||||
tolist->push_back(bs.acceptNlEodState);
|
||||
tolist->emplace_back(bs.acceptNlEodState);
|
||||
}
|
||||
|
||||
if (require_accept) {
|
||||
tolist->push_back(bs.acceptState);
|
||||
tolist->emplace_back(bs.acceptState);
|
||||
}
|
||||
}
|
||||
|
||||
@ -458,7 +458,7 @@ void cleanupPositions(vector<PositionInfo> &a) {
|
||||
|
||||
for (const auto &p : a) {
|
||||
if (seen.emplace(p.pos, p.flags).second) {
|
||||
out.push_back(p); // first encounter
|
||||
out.emplace_back(p); // first encounter
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -92,7 +92,7 @@ u32 ParsedLogical::logicalTreeAdd(u32 op, u32 left, u32 right) {
|
||||
lop.op = op;
|
||||
lop.lo = left;
|
||||
lop.ro = right;
|
||||
logicalTree.push_back(lop);
|
||||
logicalTree.emplace_back(lop);
|
||||
return lop.id;
|
||||
}
|
||||
|
||||
@ -107,7 +107,7 @@ void ParsedLogical::combinationInfoAdd(UNUSED u32 ckey, u32 id, u32 ekey,
|
||||
ci.result = lkey_result;
|
||||
ci.min_offset = min_offset;
|
||||
ci.max_offset = max_offset;
|
||||
combInfoMap.push_back(ci);
|
||||
combInfoMap.emplace_back(ci);
|
||||
|
||||
DEBUG_PRINTF("ckey %u (id %u) -> lkey %u..%u, ekey=0x%x\n", ckey, ci.id,
|
||||
ci.start, ci.result, ci.ekey);
|
||||
@ -251,7 +251,7 @@ void popOperator(vector<LogicalOperator> &op_stack, vector<u32> &subid_stack,
|
||||
left = subid_stack.back();
|
||||
subid_stack.pop_back();
|
||||
}
|
||||
subid_stack.push_back(pl.logicalTreeAdd(op_stack.back().op, left, right));
|
||||
subid_stack.emplace_back(pl.logicalTreeAdd(op_stack.back().op, left, right));
|
||||
op_stack.pop_back();
|
||||
}
|
||||
|
||||
@ -274,7 +274,7 @@ void ParsedLogical::parseLogicalCombination(unsigned id, const char *logical,
|
||||
}
|
||||
} else {
|
||||
if ((subid = fetchSubID(logical, digit, i)) != (u32)-1) {
|
||||
subid_stack.push_back(getLogicalKey(subid));
|
||||
subid_stack.emplace_back(getLogicalKey(subid));
|
||||
addRelateCKey(subid_stack.back(), ckey);
|
||||
}
|
||||
if (logical[i] == ' ') { // skip whitespace
|
||||
@ -298,7 +298,7 @@ void ParsedLogical::parseLogicalCombination(unsigned id, const char *logical,
|
||||
lkey_start = subid_stack.back();
|
||||
}
|
||||
}
|
||||
op_stack.push_back(op);
|
||||
op_stack.emplace_back(op);
|
||||
} else {
|
||||
throw LocatedParseError("Unknown character");
|
||||
}
|
||||
@ -309,7 +309,7 @@ void ParsedLogical::parseLogicalCombination(unsigned id, const char *logical,
|
||||
throw LocatedParseError("Not enough right parentheses");
|
||||
}
|
||||
if ((subid = fetchSubID(logical, digit, i)) != (u32)-1) {
|
||||
subid_stack.push_back(getLogicalKey(subid));
|
||||
subid_stack.emplace_back(getLogicalKey(subid));
|
||||
addRelateCKey(subid_stack.back(), ckey);
|
||||
}
|
||||
while (!op_stack.empty()) {
|
||||
|
@ -301,7 +301,7 @@ void createVertices(RoseBuildImpl *tbi,
|
||||
}
|
||||
|
||||
DEBUG_PRINTF(" adding new vertex index=%zu\n", tbi->g[w].index);
|
||||
vertex_map[iv].push_back(w);
|
||||
vertex_map[iv].emplace_back(w);
|
||||
} else {
|
||||
w = created[key];
|
||||
}
|
||||
@ -612,7 +612,7 @@ void doRoseLiteralVertex(RoseBuildImpl *tbi, bool use_eod_table,
|
||||
RoseVertex v = tryForAnchoredVertex(tbi, iv_info, ep);
|
||||
if (v != RoseGraph::null_vertex()) {
|
||||
DEBUG_PRINTF("add anchored literal vertex\n");
|
||||
vertex_map[iv].push_back(v);
|
||||
vertex_map[iv].emplace_back(v);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -656,7 +656,7 @@ unique_ptr<NGHolder> makeRoseEodPrefix(const NGHolder &h, RoseBuildImpl &build,
|
||||
continue;
|
||||
}
|
||||
add_edge_if_not_present(u, g.accept, g);
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
|
||||
if (!contains(remap, g[u].reports)) {
|
||||
remap[g[u].reports] = build.getNewNfaReport();
|
||||
@ -967,11 +967,11 @@ void populateRoseGraph(RoseBuildImpl *tbi, RoseBuildData &bd) {
|
||||
|
||||
if (ig[iv].type == RIV_START) {
|
||||
DEBUG_PRINTF("is root\n");
|
||||
vertex_map[iv].push_back(tbi->root);
|
||||
vertex_map[iv].emplace_back(tbi->root);
|
||||
continue;
|
||||
} else if (ig[iv].type == RIV_ANCHORED_START) {
|
||||
DEBUG_PRINTF("is anchored root\n");
|
||||
vertex_map[iv].push_back(tbi->anchored_root);
|
||||
vertex_map[iv].emplace_back(tbi->anchored_root);
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -1544,7 +1544,7 @@ bool RoseBuildImpl::addRose(const RoseInGraph &ig, bool prefilter) {
|
||||
NGHolder *h = in[e].graph.get();
|
||||
|
||||
assert(isCorrectlyTopped(*h));
|
||||
graphs[h].push_back(e);
|
||||
graphs[h].emplace_back(e);
|
||||
}
|
||||
|
||||
vector<RoseInEdge> graph_edges;
|
||||
@ -1624,7 +1624,7 @@ bool roseCheckRose(const RoseInGraph &ig, bool prefilter,
|
||||
continue;
|
||||
}
|
||||
|
||||
graphs.push_back(ig[e].graph.get());
|
||||
graphs.emplace_back(ig[e].graph.get());
|
||||
}
|
||||
|
||||
for (const auto &g : graphs) {
|
||||
@ -1781,9 +1781,9 @@ bool RoseBuildImpl::addOutfix(const NGHolder &h) {
|
||||
}
|
||||
|
||||
if (rdfa) {
|
||||
outfixes.push_back(OutfixInfo(move(rdfa)));
|
||||
outfixes.emplace_back(OutfixInfo(move(rdfa)));
|
||||
} else {
|
||||
outfixes.push_back(OutfixInfo(cloneHolder(h)));
|
||||
outfixes.emplace_back(OutfixInfo(cloneHolder(h)));
|
||||
}
|
||||
|
||||
populateOutfixInfo(outfixes.back(), h, *this);
|
||||
@ -1794,7 +1794,7 @@ bool RoseBuildImpl::addOutfix(const NGHolder &h) {
|
||||
bool RoseBuildImpl::addOutfix(const NGHolder &h, const raw_som_dfa &haig) {
|
||||
DEBUG_PRINTF("haig with %zu states\n", haig.states.size());
|
||||
|
||||
outfixes.push_back(OutfixInfo(ue2::make_unique<raw_som_dfa>(haig)));
|
||||
outfixes.emplace_back(OutfixInfo(ue2::make_unique<raw_som_dfa>(haig)));
|
||||
populateOutfixInfo(outfixes.back(), h, *this);
|
||||
|
||||
return true; /* failure is not yet an option */
|
||||
@ -1807,7 +1807,7 @@ bool RoseBuildImpl::addOutfix(const raw_puff &rp) {
|
||||
|
||||
auto *mpv = mpv_outfix->mpv();
|
||||
assert(mpv);
|
||||
mpv->puffettes.push_back(rp);
|
||||
mpv->puffettes.emplace_back(rp);
|
||||
|
||||
mpv_outfix->maxBAWidth = ROSE_BOUND_INF; /* not ba */
|
||||
mpv_outfix->minWidth = min(mpv_outfix->minWidth, depth(rp.repeats));
|
||||
@ -1832,7 +1832,7 @@ bool RoseBuildImpl::addChainTail(const raw_puff &rp, u32 *queue_out,
|
||||
|
||||
auto *mpv = mpv_outfix->mpv();
|
||||
assert(mpv);
|
||||
mpv->triggered_puffettes.push_back(rp);
|
||||
mpv->triggered_puffettes.emplace_back(rp);
|
||||
|
||||
mpv_outfix->maxBAWidth = ROSE_BOUND_INF; /* not ba */
|
||||
mpv_outfix->minWidth = min(mpv_outfix->minWidth, depth(rp.repeats));
|
||||
|
@ -185,7 +185,7 @@ bool expandFmlCandidates(const CharReach &cr, vector<ue2_literal> &curr,
|
||||
return false;
|
||||
}
|
||||
|
||||
curr.push_back(lit);
|
||||
curr.emplace_back(lit);
|
||||
curr.back().push_back(c, nocase);
|
||||
}
|
||||
}
|
||||
@ -335,8 +335,8 @@ void buildLiteralMask(const vector<CharReach> &mask, vector<u8> &msk,
|
||||
auto it = ite - min(size_t{HWLM_MASKLEN}, mask.size() - delay);
|
||||
|
||||
for (; it != ite; ++it) {
|
||||
msk.push_back(0);
|
||||
cmp.push_back(0);
|
||||
msk.emplace_back(0);
|
||||
cmp.emplace_back(0);
|
||||
make_and_cmp_mask(*it, &msk.back(), &cmp.back());
|
||||
}
|
||||
|
||||
|
@ -145,9 +145,9 @@ void mergeAnchoredDfas(vector<unique_ptr<raw_dfa>> &dfas,
|
||||
for (auto &rdfa : dfas) {
|
||||
u32 start_size = mcclellanStartReachSize(rdfa.get());
|
||||
if (start_size <= MAX_SMALL_START_REACH) {
|
||||
small_starts.push_back(move(rdfa));
|
||||
small_starts.emplace_back(move(rdfa));
|
||||
} else {
|
||||
big_starts.push_back(move(rdfa));
|
||||
big_starts.emplace_back(move(rdfa));
|
||||
}
|
||||
}
|
||||
dfas.clear();
|
||||
@ -159,10 +159,10 @@ void mergeAnchoredDfas(vector<unique_ptr<raw_dfa>> &dfas,
|
||||
|
||||
// Rehome our groups into one vector.
|
||||
for (auto &rdfa : small_starts) {
|
||||
dfas.push_back(move(rdfa));
|
||||
dfas.emplace_back(move(rdfa));
|
||||
}
|
||||
for (auto &rdfa : big_starts) {
|
||||
dfas.push_back(move(rdfa));
|
||||
dfas.emplace_back(move(rdfa));
|
||||
}
|
||||
|
||||
// Final test: if we've built two DFAs here that are small enough, we can
|
||||
@ -300,7 +300,7 @@ public:
|
||||
explicit Automaton_Holder(const NGHolder &g_in) : g(g_in) {
|
||||
for (auto v : vertices_range(g)) {
|
||||
vertexToIndex[v] = indexToVertex.size();
|
||||
indexToVertex.push_back(v);
|
||||
indexToVertex.emplace_back(v);
|
||||
}
|
||||
|
||||
assert(indexToVertex.size() <= ANCHORED_NFA_STATE_LIMIT);
|
||||
@ -331,7 +331,7 @@ private:
|
||||
|
||||
if (t.any() && t != esets[i]) {
|
||||
esets[i] &= ~t;
|
||||
esets.push_back(t);
|
||||
esets.emplace_back(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -511,7 +511,7 @@ NFAVertex extractLiteral(const NGHolder &h, ue2_literal *lit) {
|
||||
if (cr.count() > 1 && !cr.isCaselessChar()) {
|
||||
break;
|
||||
}
|
||||
lit_verts.push_back(v);
|
||||
lit_verts.emplace_back(v);
|
||||
}
|
||||
|
||||
if (lit_verts.empty()) {
|
||||
@ -686,7 +686,7 @@ int finalise_out(RoseBuildImpl &build, const NGHolder &h,
|
||||
if (check_dupe(*out_dfa, build.anchored_nfas[hash], remap)) {
|
||||
return ANCHORED_REMAP;
|
||||
}
|
||||
build.anchored_nfas[hash].push_back(move(out_dfa));
|
||||
build.anchored_nfas[hash].emplace_back(move(out_dfa));
|
||||
return ANCHORED_SUCCESS;
|
||||
}
|
||||
|
||||
@ -768,7 +768,7 @@ void buildSimpleDfas(const RoseBuildImpl &build, const vector<u32> &frag_map,
|
||||
rdfa->start_floating = DEAD_STATE;
|
||||
rdfa->alpha_size = autom.alphasize;
|
||||
rdfa->alpha_remap = autom.alpha;
|
||||
anchored_dfas->push_back(move(rdfa));
|
||||
anchored_dfas->emplace_back(move(rdfa));
|
||||
}
|
||||
}
|
||||
|
||||
@ -785,7 +785,7 @@ vector<unique_ptr<raw_dfa>> getAnchoredDfas(RoseBuildImpl &build,
|
||||
// DFAs that already exist as raw_dfas.
|
||||
for (auto &anch_dfas : build.anchored_nfas) {
|
||||
for (auto &rdfa : anch_dfas.second) {
|
||||
dfas.push_back(move(rdfa));
|
||||
dfas.emplace_back(move(rdfa));
|
||||
}
|
||||
}
|
||||
build.anchored_nfas.clear();
|
||||
@ -823,7 +823,7 @@ size_t buildNfas(vector<raw_dfa> &anchored_dfas,
|
||||
|
||||
for (auto &rdfa : anchored_dfas) {
|
||||
u32 removed_dots = remove_leading_dots(rdfa);
|
||||
start_offset->push_back(removed_dots);
|
||||
start_offset->emplace_back(removed_dots);
|
||||
|
||||
minimize_hopcroft(rdfa, cc.grey);
|
||||
|
||||
@ -835,7 +835,7 @@ size_t buildNfas(vector<raw_dfa> &anchored_dfas,
|
||||
|
||||
assert(nfa->length);
|
||||
total_size += ROUNDUP_CL(sizeof(anchored_matcher_info) + nfa->length);
|
||||
nfas->push_back(move(nfa));
|
||||
nfas->emplace_back(move(nfa));
|
||||
}
|
||||
|
||||
// We no longer need to keep the raw_dfa structures around.
|
||||
@ -862,7 +862,7 @@ vector<raw_dfa> buildAnchoredDfas(RoseBuildImpl &build,
|
||||
dfas.reserve(anch_dfas.size());
|
||||
for (auto &rdfa : anch_dfas) {
|
||||
assert(rdfa);
|
||||
dfas.push_back(move(*rdfa));
|
||||
dfas.emplace_back(move(*rdfa));
|
||||
}
|
||||
return dfas;
|
||||
}
|
||||
|
@ -750,7 +750,7 @@ static
|
||||
vector<CharReach> as_cr_seq(const rose_literal_id &lit) {
|
||||
vector<CharReach> rv = as_cr_seq(lit.s);
|
||||
for (u32 i = 0; i < lit.delay; i++) {
|
||||
rv.push_back(CharReach::dot());
|
||||
rv.emplace_back(CharReach::dot());
|
||||
}
|
||||
|
||||
/* TODO: take into account cmp/msk */
|
||||
@ -776,7 +776,7 @@ void findTriggerSequences(const RoseBuildImpl &tbi,
|
||||
|
||||
for (u32 id : lit_ids) {
|
||||
const rose_literal_id &lit = tbi.literals.at(id);
|
||||
(*trigger_lits)[top].push_back(as_cr_seq(lit));
|
||||
(*trigger_lits)[top].emplace_back(as_cr_seq(lit));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -914,7 +914,7 @@ void appendTailToHolder(NGHolder &h, const vector<CharReach> &tail) {
|
||||
|
||||
map<flat_set<ReportID>, vector<NFAVertex> > reporters;
|
||||
for (auto v : inv_adjacent_vertices_range(h.accept, h)) {
|
||||
reporters[h[v].reports].push_back(v);
|
||||
reporters[h[v].reports].emplace_back(v);
|
||||
}
|
||||
|
||||
for (const auto &e : reporters) {
|
||||
@ -1425,10 +1425,10 @@ void buildExclusiveInfixes(RoseBuildImpl &build, build_context &bc,
|
||||
ExclusiveSubengine engine;
|
||||
engine.nfa = move(n);
|
||||
engine.vertices = verts;
|
||||
info.subengines.push_back(move(engine));
|
||||
info.subengines.emplace_back(move(engine));
|
||||
}
|
||||
info.queue = qif.get_queue();
|
||||
exclusive_info.push_back(move(info));
|
||||
exclusive_info.emplace_back(move(info));
|
||||
}
|
||||
updateExclusiveInfixProperties(build, exclusive_info, bc.leftfix_info,
|
||||
no_retrigger_queues);
|
||||
@ -1462,7 +1462,7 @@ void findExclusiveInfixes(RoseBuildImpl &build, build_context &bc,
|
||||
// NFA already built.
|
||||
u32 id = leftfixes[leftfix];
|
||||
if (contains(vertex_map, id)) {
|
||||
vertex_map[id].push_back(v);
|
||||
vertex_map[id].emplace_back(v);
|
||||
}
|
||||
DEBUG_PRINTF("sharing leftfix, id=%u\n", id);
|
||||
continue;
|
||||
@ -1474,7 +1474,7 @@ void findExclusiveInfixes(RoseBuildImpl &build, build_context &bc,
|
||||
|
||||
if (leftfix.graph() || leftfix.castle()) {
|
||||
leftfixes.emplace(leftfix, role_id);
|
||||
vertex_map[role_id].push_back(v);
|
||||
vertex_map[role_id].emplace_back(v);
|
||||
|
||||
map<u32, vector<vector<CharReach>>> triggers;
|
||||
findTriggerSequences(build, infixTriggers.at(leftfix), &triggers);
|
||||
@ -1545,7 +1545,7 @@ bool buildLeftfixes(RoseBuildImpl &tbi, build_context &bc,
|
||||
}
|
||||
}
|
||||
|
||||
succs[leftfix].push_back(v);
|
||||
succs[leftfix].emplace_back(v);
|
||||
}
|
||||
|
||||
rose_group initial_groups = tbi.getInitialGroups();
|
||||
@ -1867,13 +1867,13 @@ void buildExclusiveSuffixes(RoseBuildImpl &build, build_context &bc,
|
||||
ExclusiveSubengine engine;
|
||||
engine.nfa = move(n);
|
||||
engine.vertices = verts;
|
||||
info.subengines.push_back(move(engine));
|
||||
info.subengines.emplace_back(move(engine));
|
||||
|
||||
const auto &reports = all_reports(s);
|
||||
info.reports.insert(reports.begin(), reports.end());
|
||||
}
|
||||
info.queue = qif.get_queue();
|
||||
exclusive_info.push_back(move(info));
|
||||
exclusive_info.emplace_back(move(info));
|
||||
}
|
||||
updateExclusiveSuffixProperties(build, exclusive_info,
|
||||
no_retrigger_queues);
|
||||
@ -1904,7 +1904,7 @@ void findExclusiveSuffixes(RoseBuildImpl &tbi, build_context &bc,
|
||||
if (contains(suffixes, s)) {
|
||||
u32 id = suffixes[s];
|
||||
if (!tbi.isInETable(v)) {
|
||||
vertex_map[id].push_back(v);
|
||||
vertex_map[id].emplace_back(v);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
@ -1918,7 +1918,7 @@ void findExclusiveSuffixes(RoseBuildImpl &tbi, build_context &bc,
|
||||
DEBUG_PRINTF("assigning %p to id %u\n", s.graph(), role_id);
|
||||
suffixes.emplace(s, role_id);
|
||||
|
||||
vertex_map[role_id].push_back(v);
|
||||
vertex_map[role_id].emplace_back(v);
|
||||
const set<PredTopPair> &s_triggers = suffixTriggers.at(s);
|
||||
map<u32, vector<vector<CharReach>>> triggers;
|
||||
findTriggerSequences(tbi, s_triggers, &triggers);
|
||||
@ -2191,7 +2191,7 @@ u32 buildLastByteIter(const RoseGraph &g, build_context &bc) {
|
||||
// Eager EOD reporters won't have state indices.
|
||||
auto it = bc.roleStateIndices.find(v);
|
||||
if (it != end(bc.roleStateIndices)) {
|
||||
lb_roles.push_back(it->second);
|
||||
lb_roles.emplace_back(it->second);
|
||||
DEBUG_PRINTF("last byte %u\n", it->second);
|
||||
}
|
||||
}
|
||||
@ -2264,7 +2264,7 @@ vector<u32> buildSuffixEkeyLists(const RoseBuildImpl &build, build_context &bc,
|
||||
u32 qi = e.first;
|
||||
auto &ekeys = e.second;
|
||||
assert(!ekeys.empty());
|
||||
ekeys.push_back(INVALID_EKEY); /* terminator */
|
||||
ekeys.emplace_back(INVALID_EKEY); /* terminator */
|
||||
out[qi] = bc.engine_blob.add_range(ekeys);
|
||||
}
|
||||
|
||||
@ -2279,7 +2279,7 @@ u32 buildEodNfaIterator(build_context &bc, const u32 activeQueueCount) {
|
||||
const auto &eng_info = bc.engine_info_by_queue.at(qi);
|
||||
if (eng_info.accepts_eod) {
|
||||
DEBUG_PRINTF("nfa qi=%u accepts eod\n", qi);
|
||||
keys.push_back(qi);
|
||||
keys.emplace_back(qi);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2354,7 +2354,7 @@ void addSomRevNfas(build_context &bc, RoseEngine &proto,
|
||||
u32 offset = bc.engine_blob.add(*nfa, nfa->length);
|
||||
DEBUG_PRINTF("wrote SOM rev NFA %zu (len %u) to offset %u\n",
|
||||
nfa_offsets.size(), nfa->length, offset);
|
||||
nfa_offsets.push_back(offset);
|
||||
nfa_offsets.emplace_back(offset);
|
||||
/* note: som rev nfas don't need a queue assigned as only run in block
|
||||
* mode reverse */
|
||||
}
|
||||
@ -2428,7 +2428,7 @@ u32 writeActiveLeftIter(RoseEngineBlob &engine_blob,
|
||||
for (size_t i = 0; i < leftInfoTable.size(); i++) {
|
||||
if (!leftInfoTable[i].transient) {
|
||||
DEBUG_PRINTF("leftfix %zu is active\n", i);
|
||||
keys.push_back(verify_u32(i));
|
||||
keys.emplace_back(verify_u32(i));
|
||||
}
|
||||
}
|
||||
|
||||
@ -2753,7 +2753,7 @@ RoseProgram makeFragmentProgram(const RoseBuildImpl &build, build_context &bc,
|
||||
for (const auto &lit_id : lit_ids) {
|
||||
auto prog = makeLiteralProgram(build, bc, prog_build, lit_id,
|
||||
lit_edge_map, false);
|
||||
blocks.push_back(move(prog));
|
||||
blocks.emplace_back(move(prog));
|
||||
}
|
||||
|
||||
return assembleProgramBlocks(move(blocks));
|
||||
@ -2857,7 +2857,7 @@ vector<LitFragment> groupByFragment(const RoseBuildImpl &build) {
|
||||
DEBUG_PRINTF("fragment candidate: lit_id=%u %s\n", lit_id,
|
||||
dumpString(lit.s).c_str());
|
||||
auto &fi = frag_info[getFragment(lit)];
|
||||
fi.lit_ids.push_back(lit_id);
|
||||
fi.lit_ids.emplace_back(lit_id);
|
||||
fi.groups |= groups;
|
||||
}
|
||||
|
||||
@ -2919,7 +2919,7 @@ void findInclusionGroups(vector<LitFragment> &fragments,
|
||||
u32 id = j;
|
||||
if (contains(includedIdMap, id) ||
|
||||
contains(includedDelayIdMap, id)) {
|
||||
candidates.push_back(j);
|
||||
candidates.emplace_back(j);
|
||||
DEBUG_PRINTF("find candidate\n");
|
||||
}
|
||||
}
|
||||
@ -3101,7 +3101,7 @@ pair<u32, u32> writeDelayPrograms(const RoseBuildImpl &build,
|
||||
delay_id, offset);
|
||||
} else {
|
||||
delay_id = verify_u32(programs.size());
|
||||
programs.push_back(offset);
|
||||
programs.emplace_back(offset);
|
||||
cache.emplace(offset, delay_id);
|
||||
DEBUG_PRINTF("assigned new delay_id %u for offset %u\n",
|
||||
delay_id, offset);
|
||||
@ -3162,7 +3162,7 @@ pair<u32, u32> writeAnchoredPrograms(const RoseBuildImpl &build,
|
||||
offset);
|
||||
} else {
|
||||
anch_id = verify_u32(programs.size());
|
||||
programs.push_back(offset);
|
||||
programs.emplace_back(offset);
|
||||
cache.emplace(offset, anch_id);
|
||||
DEBUG_PRINTF("assigned new anch_id %u for offset %u\n", anch_id,
|
||||
offset);
|
||||
@ -3212,7 +3212,7 @@ pair<u32, u32> buildReportPrograms(const RoseBuildImpl &build,
|
||||
for (ReportID id : reports) {
|
||||
auto program = makeReportProgram(build, bc.needs_mpv_catchup, id);
|
||||
u32 offset = writeProgram(bc, move(program));
|
||||
programs.push_back(offset);
|
||||
programs.emplace_back(offset);
|
||||
build.rm.setProgramOffset(id, offset);
|
||||
DEBUG_PRINTF("program for report %u @ %u (%zu instructions)\n", id,
|
||||
programs.back(), program.size());
|
||||
@ -3278,7 +3278,7 @@ void addEodAnchorProgram(const RoseBuildImpl &build, const build_context &bc,
|
||||
g[u].index);
|
||||
continue;
|
||||
}
|
||||
edge_list.push_back(e);
|
||||
edge_list.emplace_back(e);
|
||||
}
|
||||
|
||||
const bool multiple_preds = edge_list.size() > 1;
|
||||
@ -3311,7 +3311,7 @@ void addEodEventProgram(const RoseBuildImpl &build, build_context &bc,
|
||||
vector<RoseEdge> edge_list;
|
||||
for (const auto &v : lit_info.vertices) {
|
||||
for (const auto &e : in_edges_range(v, g)) {
|
||||
edge_list.push_back(e);
|
||||
edge_list.emplace_back(e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3478,7 +3478,7 @@ u32 writeEagerQueueIter(const set<u32> &eager, u32 leftfixBeginQueue,
|
||||
vector<u32> vec;
|
||||
for (u32 q : eager) {
|
||||
assert(q >= leftfixBeginQueue);
|
||||
vec.push_back(q - leftfixBeginQueue);
|
||||
vec.emplace_back(q - leftfixBeginQueue);
|
||||
}
|
||||
|
||||
auto iter = mmbBuildSparseIterator(vec, queue_count - leftfixBeginQueue);
|
||||
|
@ -130,7 +130,7 @@ vector<rose_literal_id> literals_for_vertex(const RoseBuildImpl &tbi,
|
||||
vector<rose_literal_id> rv;
|
||||
|
||||
for (const u32 id : tbi.g[v].literals) {
|
||||
rv.push_back(tbi.literals.at(id));
|
||||
rv.emplace_back(tbi.literals.at(id));
|
||||
}
|
||||
|
||||
return rv;
|
||||
@ -227,7 +227,7 @@ void makeCastles(RoseBuildImpl &tbi) {
|
||||
if (g[v].left && !tbi.isRootSuccessor(v)) {
|
||||
makeCastle(g[v].left, left_cache);
|
||||
if (g[v].left.castle) {
|
||||
rev[g[v].left.castle.get()].push_back(v);
|
||||
rev[g[v].left.castle.get()].emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -253,11 +253,11 @@ bool unmakeCastles(RoseBuildImpl &tbi) {
|
||||
for (auto v : vertices_range(g)) {
|
||||
const LeftEngInfo &left = g[v].left;
|
||||
if (left.castle && left.castle->repeats.size() > 1) {
|
||||
left_castles[left].push_back(v);
|
||||
left_castles[left].emplace_back(v);
|
||||
}
|
||||
const RoseSuffixInfo &suffix = g[v].suffix;
|
||||
if (suffix.castle && suffix.castle->repeats.size() > 1) {
|
||||
suffix_castles[suffix].push_back(v);
|
||||
suffix_castles[suffix].emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -303,10 +303,10 @@ void remapCastleTops(RoseBuildImpl &tbi) {
|
||||
RoseGraph &g = tbi.g;
|
||||
for (auto v : vertices_range(g)) {
|
||||
if (g[v].left.castle) {
|
||||
rose_castles[g[v].left.castle.get()].push_back(v);
|
||||
rose_castles[g[v].left.castle.get()].emplace_back(v);
|
||||
}
|
||||
if (g[v].suffix.castle) {
|
||||
suffix_castles[g[v].suffix.castle.get()].push_back(v);
|
||||
suffix_castles[g[v].suffix.castle.get()].emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -194,7 +194,7 @@ void RoseBuildImpl::handleMixedSensitivity(void) {
|
||||
limited_explosion(lit.s) && literal_info[id].delayed_ids.empty()) {
|
||||
DEBUG_PRINTF("need to explode existing string '%s'\n",
|
||||
dumpString(lit.s).c_str());
|
||||
explode.push_back(id);
|
||||
explode.emplace_back(id);
|
||||
} else {
|
||||
literal_info[id].requires_benefits = true;
|
||||
}
|
||||
@ -734,9 +734,9 @@ void stealEodVertices(RoseBuildImpl &tbi) {
|
||||
|
||||
if (lit.table == ROSE_EOD_ANCHORED) {
|
||||
if (suitableForAnchored(tbi, lit, info)) {
|
||||
eodLiteralsForAnchored.push_back(i);
|
||||
eodLiteralsForAnchored.emplace_back(i);
|
||||
} else {
|
||||
eodLiteralsForFloating.push_back(i);
|
||||
eodLiteralsForFloating.emplace_back(i);
|
||||
}
|
||||
} else if (lit.table == ROSE_FLOATING) {
|
||||
numFloatingLiterals++;
|
||||
@ -863,7 +863,7 @@ map<left_id, vector<RoseVertex>> findLeftSucc(const RoseBuildImpl &build) {
|
||||
for (auto v : vertices_range(build.g)) {
|
||||
if (build.g[v].left) {
|
||||
const LeftEngInfo &lei = build.g[v].left;
|
||||
leftfixes[lei].push_back(v);
|
||||
leftfixes[lei].emplace_back(v);
|
||||
}
|
||||
}
|
||||
return leftfixes;
|
||||
@ -1046,7 +1046,7 @@ void packInfixTops(NGHolder &h, RoseGraph &g,
|
||||
h[e].tops = std::move(updated_tops);
|
||||
if (h[e].tops.empty()) {
|
||||
DEBUG_PRINTF("edge (start,%zu) has only unused tops\n", h[v].index);
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1481,7 +1481,7 @@ bool extractSEPLiterals(const raw_dfa &rdfa,
|
||||
if (!stateIsSEPLiteral(next, i, rdfa)) {
|
||||
return false;
|
||||
}
|
||||
lits[rdfa.states[next].reports].push_back(i);
|
||||
lits[rdfa.states[next].reports].emplace_back(i);
|
||||
}
|
||||
|
||||
// Map from symbols back to character reachability.
|
||||
@ -1577,7 +1577,7 @@ void addAnchoredSmallBlockLiterals(RoseBuildImpl &tbi) {
|
||||
dumpString(sai.literal).c_str(), sai.min_bound);
|
||||
}
|
||||
|
||||
anchored_lits.push_back(make_pair(sai, lit_ids));
|
||||
anchored_lits.emplace_back(make_pair(sai, lit_ids));
|
||||
if (sai.literal.length() == 1) {
|
||||
oneByteLiterals++;
|
||||
}
|
||||
@ -1588,7 +1588,7 @@ void addAnchoredSmallBlockLiterals(RoseBuildImpl &tbi) {
|
||||
map<ue2_literal, flat_set<ReportID>> sep_literals;
|
||||
for (OutfixInfo &oi : tbi.outfixes) {
|
||||
if (extractSEPLiterals(oi, tbi.rm, sep_literals)) {
|
||||
sep_outfixes.push_back(&oi);
|
||||
sep_outfixes.emplace_back(&oi);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1782,7 +1782,7 @@ bytecode_ptr<RoseEngine> RoseBuildImpl::buildRose(u32 minWidth) {
|
||||
|
||||
/* transfer mpv outfix to main queue */
|
||||
if (mpv_outfix) {
|
||||
outfixes.push_back(move(*mpv_outfix));
|
||||
outfixes.emplace_back(move(*mpv_outfix));
|
||||
mpv_outfix = nullptr;
|
||||
}
|
||||
|
||||
|
@ -413,7 +413,7 @@ bool handleStartPrefixCliche(const NGHolder &h, RoseGraph &g, RoseVertex v,
|
||||
} else {
|
||||
RoseEdge e_new = add_edge(ar, v, g);
|
||||
setEdgeBounds(g, e_new, bound_min, bound_max);
|
||||
to_delete->push_back(e_old);
|
||||
to_delete->emplace_back(e_old);
|
||||
}
|
||||
|
||||
g[v].left.reset(); /* clear the prefix info */
|
||||
@ -605,7 +605,7 @@ bool handleMixedPrefixCliche(const NGHolder &h, RoseGraph &g, RoseVertex v,
|
||||
} else {
|
||||
RoseEdge e_new = add_edge(ar, v, g);
|
||||
setEdgeBounds(g, e_new, ri.repeatMin + width, ri.repeatMax + width);
|
||||
to_delete->push_back(e_old);
|
||||
to_delete->emplace_back(e_old);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -177,7 +177,7 @@ static
|
||||
vector<CharReach> makePath(const rose_literal_id &lit) {
|
||||
vector<CharReach> path(begin(lit.s), end(lit.s));
|
||||
for (u32 i = 0; i < lit.delay; i++) {
|
||||
path.push_back(CharReach::dot());
|
||||
path.emplace_back(CharReach::dot());
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
@ -696,7 +696,7 @@ vector<u32> sparseIterValues(const mmbit_sparse_iter *it, u32 num_bits) {
|
||||
u32 idx = 0;
|
||||
u32 i = mmbit_sparse_iter_begin(b, num_bits, &idx, it, s);
|
||||
while (i != MMB_INVALID) {
|
||||
keys.push_back(i);
|
||||
keys.emplace_back(i);
|
||||
i = mmbit_sparse_iter_next(b, num_bits, i, &idx, it, s);
|
||||
}
|
||||
|
||||
@ -1575,10 +1575,10 @@ void dumpRoseLitPrograms(const vector<LitFragment> &fragments,
|
||||
vector<u32> programs;
|
||||
for (const auto &frag : fragments) {
|
||||
if (frag.lit_program_offset) {
|
||||
programs.push_back(frag.lit_program_offset);
|
||||
programs.emplace_back(frag.lit_program_offset);
|
||||
}
|
||||
if (frag.delay_program_offset) {
|
||||
programs.push_back(frag.delay_program_offset);
|
||||
programs.emplace_back(frag.delay_program_offset);
|
||||
}
|
||||
}
|
||||
sort_and_unique(programs);
|
||||
|
@ -72,15 +72,15 @@ vector<RoleChunk<role_id>> divideIntoChunks(const RoseBuildImpl &build,
|
||||
for (const auto &roleInfo : roleInfoSet) {
|
||||
if (cnt == chunkSize) {
|
||||
cnt -= chunkSize;
|
||||
chunks.push_back(roleChunk);
|
||||
chunks.emplace_back(roleChunk);
|
||||
roleChunk.roles.clear();
|
||||
}
|
||||
roleChunk.roles.push_back(roleInfo);
|
||||
roleChunk.roles.emplace_back(roleInfo);
|
||||
cnt++;
|
||||
}
|
||||
|
||||
if (cnt > 1) {
|
||||
chunks.push_back(roleChunk);
|
||||
chunks.emplace_back(roleChunk);
|
||||
}
|
||||
|
||||
return chunks;
|
||||
@ -106,14 +106,14 @@ bool addPrefixLiterals(NGHolder &h, unordered_set<u32> &tailId,
|
||||
NFAVertex u = add_vertex(h);
|
||||
h[u].char_reach = c;
|
||||
if (!i++) {
|
||||
heads.push_back(u);
|
||||
heads.emplace_back(u);
|
||||
last = u;
|
||||
continue;
|
||||
}
|
||||
add_edge(last, u, h);
|
||||
last = u;
|
||||
}
|
||||
tails.push_back(last);
|
||||
tails.emplace_back(last);
|
||||
tailId.insert(h[last].index);
|
||||
}
|
||||
|
||||
@ -309,7 +309,7 @@ void findCliques(const map<u32, set<u32>> &exclusiveGroups,
|
||||
for (const auto &i : clique) {
|
||||
DEBUG_PRINTF("cliq:%zu\n", i.size());
|
||||
if (i.size() > 1) {
|
||||
exclusive_roles.push_back(i);
|
||||
exclusive_roles.emplace_back(i);
|
||||
}
|
||||
}
|
||||
DEBUG_PRINTF("Clique graph size:%zu\n", exclusive_roles.size());
|
||||
@ -359,7 +359,7 @@ bool setTriggerLiterals(RoleInfo<role_id> &roleInfo,
|
||||
for (const auto &c : lit) {
|
||||
roleInfo.prefix_cr |= c;
|
||||
}
|
||||
roleInfo.literals.push_back(lit);
|
||||
roleInfo.literals.emplace_back(lit);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -326,7 +326,7 @@ void assignGroupsToLiterals(RoseBuildImpl &build) {
|
||||
/* long literals will either be stuck in a mega group or spread around
|
||||
* depending on availability */
|
||||
if (superStrong(lit)) {
|
||||
long_lits.push_back(id);
|
||||
long_lits.emplace_back(id);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -343,7 +343,7 @@ public:
|
||||
return {it->second, false};
|
||||
}
|
||||
u32 id = verify_u32(lits.size());
|
||||
lits.push_back(lit);
|
||||
lits.emplace_back(lit);
|
||||
lits_index.emplace(lit, id);
|
||||
return {id, true};
|
||||
}
|
||||
|
@ -163,7 +163,7 @@ u32 findMaxLiteralMatches(const NGHolder &h, const set<ue2_literal> &lits) {
|
||||
}
|
||||
|
||||
contractVertex(g, v, all_edges);
|
||||
dead.push_back(v);
|
||||
dead.emplace_back(v);
|
||||
}
|
||||
|
||||
remove_vertices(dead, g);
|
||||
|
@ -131,8 +131,8 @@ void RoseInstrCheckLookaround::write(void *dest, RoseEngineBlob &blob,
|
||||
vector<s8> look_offsets;
|
||||
vector<CharReach> reaches;
|
||||
for (const auto &le : look) {
|
||||
look_offsets.push_back(le.offset);
|
||||
reaches.push_back(le.reach);
|
||||
look_offsets.emplace_back(le.offset);
|
||||
reaches.emplace_back(le.reach);
|
||||
}
|
||||
inst->look_index = blob.lookaround_cache.get_offset_of(look_offsets, blob);
|
||||
inst->reach_index = blob.lookaround_cache.get_offset_of(reaches, blob);
|
||||
@ -486,9 +486,9 @@ void RoseInstrSparseIterBegin::write(void *dest, RoseEngineBlob &blob,
|
||||
vector<u32> keys;
|
||||
vector<u32> jump_offsets;
|
||||
for (const auto &jump : jump_table) {
|
||||
keys.push_back(jump.first);
|
||||
keys.emplace_back(jump.first);
|
||||
assert(contains(offset_map, jump.second));
|
||||
jump_offsets.push_back(offset_map.at(jump.second));
|
||||
jump_offsets.emplace_back(offset_map.at(jump.second));
|
||||
}
|
||||
|
||||
auto iter = mmbBuildSparseIterator(keys, num_keys);
|
||||
@ -589,11 +589,11 @@ void RoseInstrMultipathLookaround::write(void *dest, RoseEngineBlob &blob,
|
||||
bool done_offset = false;
|
||||
|
||||
for (const auto &le : vle) {
|
||||
reaches.back().push_back(le.reach);
|
||||
reaches.back().emplace_back(le.reach);
|
||||
|
||||
/* empty reaches don't have valid offsets */
|
||||
if (!done_offset && le.reach.any()) {
|
||||
look_offsets.push_back(le.offset);
|
||||
look_offsets.emplace_back(le.offset);
|
||||
done_offset = true;
|
||||
}
|
||||
}
|
||||
|
@ -346,7 +346,7 @@ void filterLits(const vector<AccelString> &lits, hwlm_group_t expected_groups,
|
||||
DEBUG_PRINTF("lit: '%s', nocase=%d, groups=0x%llx\n",
|
||||
escapeString(lit.s).c_str(), lit.nocase ? 1 : 0,
|
||||
lit.groups);
|
||||
filtered_lits->push_back(&lit);
|
||||
filtered_lits->emplace_back(&lit);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -279,13 +279,13 @@ void findForwardReach(const RoseGraph &g, const RoseVertex v,
|
||||
DEBUG_PRINTF("successor %zu has no leftfix\n", g[t].index);
|
||||
return;
|
||||
}
|
||||
rose_look.push_back(map<s32, CharReach>());
|
||||
rose_look.emplace_back(map<s32, CharReach>());
|
||||
getRoseForwardReach(g[t].left, g[e].rose_top, rose_look.back());
|
||||
}
|
||||
|
||||
if (g[v].suffix) {
|
||||
DEBUG_PRINTF("suffix engine\n");
|
||||
rose_look.push_back(map<s32, CharReach>());
|
||||
rose_look.emplace_back(map<s32, CharReach>());
|
||||
getSuffixForwardReach(g[v].suffix, g[v].suffix.top, rose_look.back());
|
||||
}
|
||||
|
||||
@ -319,7 +319,7 @@ void normalise(map<s32, CharReach> &look) {
|
||||
vector<s32> dead;
|
||||
for (const auto &m : look) {
|
||||
if (m.second.all()) {
|
||||
dead.push_back(m.first);
|
||||
dead.emplace_back(m.first);
|
||||
}
|
||||
}
|
||||
erase_all(&look, dead);
|
||||
@ -569,7 +569,7 @@ void normaliseLeftfix(map<s32, CharReach> &look) {
|
||||
vector<s32> dead;
|
||||
for (const auto &m : look) {
|
||||
if (m.second.all() && m.first != earliest) {
|
||||
dead.push_back(m.first);
|
||||
dead.emplace_back(m.first);
|
||||
}
|
||||
}
|
||||
erase_all(&look, dead);
|
||||
@ -617,7 +617,7 @@ void transToLookaround(const vector<map<s32, CharReach>> &looks,
|
||||
s8 offset = verify_s8(m.first);
|
||||
lookaround.emplace_back(offset, m.second);
|
||||
}
|
||||
lookarounds.push_back(lookaround);
|
||||
lookarounds.emplace_back(lookaround);
|
||||
}
|
||||
}
|
||||
|
||||
@ -711,7 +711,7 @@ bool getTransientPrefixReach(const NGHolder &g, ReportID report, u32 lag,
|
||||
return true;
|
||||
}
|
||||
if (contains(g[v].reports, report)) {
|
||||
curr.push_back(v);
|
||||
curr.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -765,8 +765,8 @@ bool getTransientPrefixReach(const NGHolder &g, ReportID report, u32 lag,
|
||||
looks[idx][0 - i] = g[u].char_reach;
|
||||
total_len++;
|
||||
} else {
|
||||
curr.push_back(u);
|
||||
looks.push_back(looks[idx]);
|
||||
curr.emplace_back(u);
|
||||
looks.emplace_back(looks[idx]);
|
||||
(looks.back())[0 - i] = g[u].char_reach;
|
||||
total_len += looks.back().size();
|
||||
}
|
||||
|
@ -353,7 +353,7 @@ void findMoreLiteralMasks(RoseBuildImpl &build) {
|
||||
continue;
|
||||
}
|
||||
|
||||
candidates.push_back(id);
|
||||
candidates.emplace_back(id);
|
||||
}
|
||||
|
||||
for (const u32 &id : candidates) {
|
||||
@ -827,7 +827,7 @@ MatcherProto makeMatcherProto(const RoseBuildImpl &build,
|
||||
}
|
||||
}
|
||||
|
||||
used_lit_ids.push_back(id);
|
||||
used_lit_ids.emplace_back(id);
|
||||
}
|
||||
|
||||
if (used_lit_ids.empty()) {
|
||||
|
@ -239,7 +239,7 @@ bool dedupeLeftfixes(RoseBuildImpl &tbi) {
|
||||
continue;
|
||||
}
|
||||
|
||||
roses[RoseGroup(tbi, v)].push_back(v);
|
||||
roses[RoseGroup(tbi, v)].emplace_back(v);
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("collected %zu rose groups\n", roses.size());
|
||||
@ -338,7 +338,7 @@ void dedupeSuffixes(RoseBuildImpl &tbi) {
|
||||
|
||||
set<RoseVertex> &verts = suffix_map[s];
|
||||
if (verts.empty()) {
|
||||
part[make_pair(suffix_size_key(s), all_reports(s))].push_back(s);
|
||||
part[make_pair(suffix_size_key(s), all_reports(s))].emplace_back(s);
|
||||
}
|
||||
verts.insert(v);
|
||||
}
|
||||
@ -393,17 +393,17 @@ public:
|
||||
void insert(const EngineRef &h, RoseVertex v) {
|
||||
typename BouquetMap::iterator f = bouquet.find(h);
|
||||
if (f == bouquet.end()) {
|
||||
ordering.push_back(h);
|
||||
bouquet[h].push_back(v);
|
||||
ordering.emplace_back(h);
|
||||
bouquet[h].emplace_back(v);
|
||||
} else {
|
||||
f->second.push_back(v);
|
||||
f->second.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
void insert(const EngineRef &h, const deque<RoseVertex> &verts) {
|
||||
typename BouquetMap::iterator f = bouquet.find(h);
|
||||
if (f == bouquet.end()) {
|
||||
ordering.push_back(h);
|
||||
ordering.emplace_back(h);
|
||||
bouquet.insert(make_pair(h, verts));
|
||||
} else {
|
||||
f->second.insert(f->second.end(), verts.begin(), verts.end());
|
||||
@ -472,14 +472,14 @@ static void chunkBouquets(const Bouquet<EngineRef> &in,
|
||||
deque<Bouquet<EngineRef>> &out,
|
||||
const size_t chunk_size) {
|
||||
if (in.size() <= chunk_size) {
|
||||
out.push_back(in);
|
||||
out.emplace_back(in);
|
||||
return;
|
||||
}
|
||||
|
||||
out.push_back(Bouquet<EngineRef>());
|
||||
out.emplace_back(Bouquet<EngineRef>());
|
||||
for (const auto &engine : in) {
|
||||
if (out.back().size() >= chunk_size) {
|
||||
out.push_back(Bouquet<EngineRef>());
|
||||
out.emplace_back(Bouquet<EngineRef>());
|
||||
}
|
||||
out.back().insert(engine, in.vertices(engine));
|
||||
}
|
||||
@ -820,7 +820,7 @@ bool checkPredDelays(const RoseBuildImpl &build, const VertexCont &v1,
|
||||
vector<const rose_literal_id *> pred_rose_lits;
|
||||
pred_rose_lits.reserve(pred_lits.size());
|
||||
for (const auto &p : pred_lits) {
|
||||
pred_rose_lits.push_back(&build.literals.at(p));
|
||||
pred_rose_lits.emplace_back(&build.literals.at(p));
|
||||
}
|
||||
|
||||
for (auto v : v2) {
|
||||
@ -1322,18 +1322,18 @@ template <typename T>
|
||||
static
|
||||
void chunk(vector<T> in, vector<vector<T>> *out, size_t chunk_size) {
|
||||
if (in.size() <= chunk_size) {
|
||||
out->push_back(std::move(in));
|
||||
out->emplace_back(std::move(in));
|
||||
return;
|
||||
}
|
||||
|
||||
out->push_back(vector<T>());
|
||||
out->emplace_back(vector<T>());
|
||||
out->back().reserve(chunk_size);
|
||||
for (const auto &t : in) {
|
||||
if (out->back().size() >= chunk_size) {
|
||||
out->push_back(vector<T>());
|
||||
out->emplace_back(vector<T>());
|
||||
out->back().reserve(chunk_size);
|
||||
}
|
||||
out->back().push_back(std::move(t));
|
||||
out->back().emplace_back(std::move(t));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1346,7 +1346,7 @@ insertion_ordered_map<left_id, vector<RoseVertex>> get_eng_verts(RoseGraph &g) {
|
||||
continue;
|
||||
}
|
||||
assert(contains(all_reports(left), left.leftfix_report));
|
||||
eng_verts[left].push_back(v);
|
||||
eng_verts[left].emplace_back(v);
|
||||
}
|
||||
|
||||
return eng_verts;
|
||||
@ -1438,7 +1438,7 @@ void mergeLeftfixesVariableLag(RoseBuildImpl &build) {
|
||||
assert(!parents.empty());
|
||||
|
||||
#ifndef _WIN32
|
||||
engine_groups[MergeKey(left, parents)].push_back(left);
|
||||
engine_groups[MergeKey(left, parents)].emplace_back(left);
|
||||
#else
|
||||
// On windows, when passing MergeKey object into map 'engine_groups',
|
||||
// it will not be copied, but will be freed along with
|
||||
@ -1448,7 +1448,7 @@ void mergeLeftfixesVariableLag(RoseBuildImpl &build) {
|
||||
// will cause is_block_type_valid() assertion error in MergeKey
|
||||
// destructor.
|
||||
MergeKey *mk = new MergeKey(left, parents);
|
||||
engine_groups[*mk].push_back(left);
|
||||
engine_groups[*mk].emplace_back(left);
|
||||
#endif
|
||||
}
|
||||
|
||||
@ -1611,7 +1611,7 @@ void dedupeLeftfixesVariableLag(RoseBuildImpl &build) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
engine_groups[DedupeLeftKey(build, move(preds), left)].push_back(left);
|
||||
engine_groups[DedupeLeftKey(build, move(preds), left)].emplace_back(left);
|
||||
}
|
||||
|
||||
/* We don't bother chunking as we expect deduping to be successful if the
|
||||
@ -1871,7 +1871,7 @@ void mergeNfaLeftfixes(RoseBuildImpl &tbi, LeftfixBouquet &roses) {
|
||||
}
|
||||
roses.insert(r1, verts2);
|
||||
|
||||
merged.push_back(r2);
|
||||
merged.emplace_back(r2);
|
||||
|
||||
if (num_vertices(*winner) >= small_merge_max_vertices(tbi.cc)) {
|
||||
DEBUG_PRINTF("h1 now has %zu vertices, proceeding to next\n",
|
||||
@ -2050,12 +2050,12 @@ void mergeCastleLeftfixes(RoseBuildImpl &build) {
|
||||
continue;
|
||||
}
|
||||
|
||||
eng_verts[g[v].left].push_back(v);
|
||||
eng_verts[g[v].left].emplace_back(v);
|
||||
}
|
||||
|
||||
map<CharReach, vector<left_id>> by_reach;
|
||||
for (const auto &left : eng_verts | map_keys) {
|
||||
by_reach[left.castle()->reach()].push_back(left);
|
||||
by_reach[left.castle()->reach()].emplace_back(left);
|
||||
}
|
||||
|
||||
vector<vector<left_id>> chunks;
|
||||
@ -2151,7 +2151,7 @@ void mergeSuffixes(RoseBuildImpl &tbi, SuffixBouquet &suffixes,
|
||||
g[v].suffix.graph = winner;
|
||||
}
|
||||
suffixes.insert(s1, verts2);
|
||||
merged.push_back(s2);
|
||||
merged.emplace_back(s2);
|
||||
|
||||
if (num_vertices(*s1.graph()) >= small_merge_max_vertices(tbi.cc)) {
|
||||
DEBUG_PRINTF("h1 now has %zu vertices, proceeding to next\n",
|
||||
@ -2324,7 +2324,7 @@ map<NGHolder *, NGHolder *> chunkedNfaMerge(RoseBuildImpl &build,
|
||||
|
||||
vector<NGHolder *> batch;
|
||||
for (auto it = begin(nfas), ite = end(nfas); it != ite; ++it) {
|
||||
batch.push_back(*it);
|
||||
batch.emplace_back(*it);
|
||||
assert((*it)->kind == NFA_OUTFIX);
|
||||
if (batch.size() == MERGE_GROUP_SIZE_MAX || next(it) == ite) {
|
||||
auto batch_merged = mergeNfaCluster(batch, &build.rm, build.cc);
|
||||
@ -2463,7 +2463,7 @@ void chunkedDfaMerge(vector<RawDfa *> &dfas,
|
||||
vector<RawDfa *> out_dfas;
|
||||
vector<RawDfa *> chunk;
|
||||
for (auto it = begin(dfas), ite = end(dfas); it != ite; ++it) {
|
||||
chunk.push_back(*it);
|
||||
chunk.emplace_back(*it);
|
||||
if (chunk.size() >= DFA_CHUNK_SIZE_MAX || next(it) == ite) {
|
||||
pairwiseDfaMerge(chunk, dfa_mapping, outfixes, merge_func);
|
||||
out_dfas.insert(end(out_dfas), begin(chunk), end(chunk));
|
||||
@ -2542,7 +2542,7 @@ void mergeOutfixCombo(RoseBuildImpl &tbi, const ReportManager &rm,
|
||||
|
||||
if (outfix.rdfa()) {
|
||||
auto *rdfa = outfix.rdfa();
|
||||
dfas.push_back(rdfa);
|
||||
dfas.emplace_back(rdfa);
|
||||
dfa_mapping[rdfa] = it - tbi.outfixes.begin();
|
||||
continue;
|
||||
}
|
||||
@ -2557,7 +2557,7 @@ void mergeOutfixCombo(RoseBuildImpl &tbi, const ReportManager &rm,
|
||||
if (rdfa) {
|
||||
// Transform this outfix into a DFA and add it to the merge set.
|
||||
dfa_mapping[rdfa.get()] = it - tbi.outfixes.begin();
|
||||
dfas.push_back(rdfa.get());
|
||||
dfas.emplace_back(rdfa.get());
|
||||
outfix.proto = move(rdfa);
|
||||
new_dfas++;
|
||||
}
|
||||
@ -2615,11 +2615,11 @@ void mergeOutfixes(RoseBuildImpl &tbi) {
|
||||
|
||||
for (auto &outfix : tbi.outfixes) {
|
||||
if (outfix.rdfa()) {
|
||||
dfas.push_back(outfix.rdfa());
|
||||
dfas.emplace_back(outfix.rdfa());
|
||||
} else if (outfix.holder()) {
|
||||
nfas.push_back(outfix.holder());
|
||||
nfas.emplace_back(outfix.holder());
|
||||
} else if (outfix.haig()) {
|
||||
som_dfas.push_back(outfix.haig());
|
||||
som_dfas.emplace_back(outfix.haig());
|
||||
}
|
||||
}
|
||||
|
||||
@ -2805,9 +2805,9 @@ void mergeCastleSuffixes(RoseBuildImpl &build) {
|
||||
}
|
||||
|
||||
if (!contains(eng_verts, c)) {
|
||||
by_reach[c->reach()].push_back(c);
|
||||
by_reach[c->reach()].emplace_back(c);
|
||||
}
|
||||
eng_verts[c].push_back(v);
|
||||
eng_verts[c].emplace_back(v);
|
||||
}
|
||||
|
||||
for (auto &chunk : by_reach | map_values) {
|
||||
|
@ -375,7 +375,7 @@ u32 RoseBuildImpl::getLiteralId(const ue2_literal &s, u32 delay,
|
||||
bool inserted = m.second;
|
||||
|
||||
if (inserted) {
|
||||
literal_info.push_back(rose_literal_info());
|
||||
literal_info.emplace_back(rose_literal_info());
|
||||
assert(literal_info.size() == id + 1);
|
||||
|
||||
if (delay) {
|
||||
@ -465,7 +465,7 @@ u32 RoseBuildImpl::getLiteralId(const ue2_literal &s, const vector<u8> &msk,
|
||||
bool inserted = m.second;
|
||||
|
||||
if (inserted) {
|
||||
literal_info.push_back(rose_literal_info());
|
||||
literal_info.emplace_back(rose_literal_info());
|
||||
assert(literal_info.size() == id + 1);
|
||||
|
||||
if (delay) {
|
||||
@ -488,7 +488,7 @@ u32 RoseBuildImpl::getNewLiteralId() {
|
||||
assert(m.second);
|
||||
u32 id = m.first;
|
||||
|
||||
literal_info.push_back(rose_literal_info());
|
||||
literal_info.emplace_back(rose_literal_info());
|
||||
assert(literal_info.size() == id + 1);
|
||||
|
||||
literal_info[id].undelayed_id = id;
|
||||
|
@ -95,7 +95,7 @@ OffsetMap makeOffsetMap(const RoseProgram &program, u32 *total_len) {
|
||||
}
|
||||
|
||||
RoseProgram::RoseProgram() {
|
||||
prog.push_back(make_unique<RoseInstrEnd>());
|
||||
prog.emplace_back(make_unique<RoseInstrEnd>());
|
||||
}
|
||||
|
||||
RoseProgram::~RoseProgram() = default;
|
||||
@ -1142,7 +1142,7 @@ void getAllBuckets(const vector<LookEntry> &look,
|
||||
}
|
||||
for (const auto &it : lo2hi) {
|
||||
u32 hi_lo = (it.second << 16) | it.first;
|
||||
buckets[hi_lo].push_back(entry.offset);
|
||||
buckets[hi_lo].emplace_back(entry.offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2195,7 +2195,7 @@ RoseProgram assembleProgramBlocks(vector<RoseProgram> &&blocks_in) {
|
||||
continue;
|
||||
}
|
||||
|
||||
blocks.push_back(move(block));
|
||||
blocks.emplace_back(move(block));
|
||||
seen.emplace(blocks.back());
|
||||
}
|
||||
|
||||
@ -2322,7 +2322,7 @@ RoseProgram makeDelayRebuildProgram(const RoseBuildImpl &build,
|
||||
makePushDelayedInstructions(build.literals, prog_build,
|
||||
build.literal_info.at(lit_id).delayed_ids,
|
||||
prog);
|
||||
blocks.push_back(move(prog));
|
||||
blocks.emplace_back(move(prog));
|
||||
}
|
||||
|
||||
return assembleProgramBlocks(move(blocks));
|
||||
@ -2424,7 +2424,7 @@ void addPredBlocksAny(map<u32, RoseProgram> &pred_blocks, u32 num_states,
|
||||
|
||||
vector<u32> keys;
|
||||
for (const u32 &key : pred_blocks | map_keys) {
|
||||
keys.push_back(key);
|
||||
keys.emplace_back(key);
|
||||
}
|
||||
|
||||
const RoseInstruction *end_inst = sparse_program.end_instruction();
|
||||
|
@ -846,7 +846,7 @@ void pruneUnusedTops(NGHolder &h, const RoseGraph &g,
|
||||
h[e].tops = std::move(pruned_tops);
|
||||
if (h[e].tops.empty()) {
|
||||
DEBUG_PRINTF("edge (start,%zu) has only unused tops\n", h[v].index);
|
||||
dead.push_back(e);
|
||||
dead.emplace_back(e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1457,7 +1457,7 @@ void splitAndFilterBuckets(vector<vector<RoseVertex>> &buckets,
|
||||
out.emplace_back();
|
||||
}
|
||||
auto out_bucket = p.first->second;
|
||||
out[out_bucket].push_back(v);
|
||||
out[out_bucket].emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1511,7 +1511,7 @@ void splitByNeighbour(const RoseGraph &g, vector<vector<RoseVertex>> &buckets,
|
||||
for (RoseVertex v : adjacent_vertices_range(u, g)) {
|
||||
auto it = inv.find(v);
|
||||
if (it != end(inv)) {
|
||||
neighbours_by_bucket[it->second].push_back(v);
|
||||
neighbours_by_bucket[it->second].emplace_back(v);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -1519,7 +1519,7 @@ void splitByNeighbour(const RoseGraph &g, vector<vector<RoseVertex>> &buckets,
|
||||
for (RoseVertex v : inv_adjacent_vertices_range(u, g)) {
|
||||
auto it = inv.find(v);
|
||||
if (it != end(inv)) {
|
||||
neighbours_by_bucket[it->second].push_back(v);
|
||||
neighbours_by_bucket[it->second].emplace_back(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1540,14 +1540,14 @@ void splitByNeighbour(const RoseGraph &g, vector<vector<RoseVertex>> &buckets,
|
||||
if (contains(picked, v)) {
|
||||
inv[v] = new_key;
|
||||
} else {
|
||||
leftovers.push_back(v);
|
||||
leftovers.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
assert(!leftovers.empty());
|
||||
assert(e.second.size() + leftovers.size()
|
||||
== buckets[old_key].size());
|
||||
extras.push_back(e.second);
|
||||
extras.emplace_back(e.second);
|
||||
buckets[old_key].swap(leftovers);
|
||||
}
|
||||
insert(&buckets, buckets.end(), extras);
|
||||
@ -1650,7 +1650,7 @@ void diamondMergePass(CandidateSet &candidates, RoseBuildImpl &build,
|
||||
}
|
||||
|
||||
mergeVerticesDiamond(a, b, build, rai);
|
||||
dead->push_back(a);
|
||||
dead->emplace_back(a);
|
||||
candidates.erase(a);
|
||||
break; // next a
|
||||
}
|
||||
@ -1758,7 +1758,7 @@ void leftMergePass(CandidateSet &candidates, RoseBuildImpl &build,
|
||||
RoseVertex b = *jt;
|
||||
if (attemptRoseMerge(build, true, a, b, false, rai)) {
|
||||
mergeVerticesLeft(a, b, build, rai);
|
||||
dead->push_back(a);
|
||||
dead->emplace_back(a);
|
||||
candidates.erase(ait);
|
||||
break; // consider next a
|
||||
}
|
||||
@ -1918,7 +1918,7 @@ void rightMergePass(CandidateSet &candidates, RoseBuildImpl &build,
|
||||
RoseVertex b = *jt;
|
||||
if (attemptRoseMerge(build, false, a, b, !mergeRoses, rai)) {
|
||||
mergeVerticesRight(a, b, build, rai);
|
||||
dead->push_back(a);
|
||||
dead->emplace_back(a);
|
||||
candidates.erase(a);
|
||||
break; // consider next a
|
||||
}
|
||||
@ -1978,7 +1978,7 @@ void filterDiamondCandidates(RoseGraph &g, CandidateSet &candidates) {
|
||||
vector<RoseVertex> dead;
|
||||
for (const auto &v : candidates) {
|
||||
if (hasNoDiamondSiblings(g, v)) {
|
||||
dead.push_back(v);
|
||||
dead.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2145,13 +2145,13 @@ void mergeDupeLeaves(RoseBuildImpl &build) {
|
||||
if (g[et].minBound <= g[e].minBound
|
||||
&& g[et].maxBound >= g[e].maxBound) {
|
||||
DEBUG_PRINTF("remove more constrained edge\n");
|
||||
deadEdges.push_back(e);
|
||||
deadEdges.emplace_back(e);
|
||||
}
|
||||
} else {
|
||||
DEBUG_PRINTF("rehome edge: add %zu->%zu\n", g[u].index,
|
||||
g[t].index);
|
||||
add_edge(u, t, g[e], g);
|
||||
deadEdges.push_back(e);
|
||||
deadEdges.emplace_back(e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2159,7 +2159,7 @@ void mergeDupeLeaves(RoseBuildImpl &build) {
|
||||
for (auto &e : deadEdges) {
|
||||
remove_edge(e, g);
|
||||
}
|
||||
changed.push_back(v);
|
||||
changed.emplace_back(v);
|
||||
g[t].min_offset = min(g[t].min_offset, g[v].min_offset);
|
||||
g[t].max_offset = max(g[t].max_offset, g[v].max_offset);
|
||||
}
|
||||
@ -2212,7 +2212,7 @@ void mergeCluster(RoseGraph &g, const ReportManager &rm,
|
||||
NGHolder *h = g[v].suffix.graph.get();
|
||||
assert(!g[v].suffix.haig); /* should not be here if haig */
|
||||
rev[h] = v;
|
||||
cluster.push_back(h);
|
||||
cluster.emplace_back(h);
|
||||
}
|
||||
it = it2;
|
||||
|
||||
@ -2230,7 +2230,7 @@ void mergeCluster(RoseGraph &g, const ReportManager &rm,
|
||||
ENSURE_AT_LEAST(&g[winner].max_offset, g[victim].max_offset);
|
||||
insert(&g[winner].reports, g[victim].reports);
|
||||
|
||||
dead.push_back(victim);
|
||||
dead.emplace_back(victim);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2263,7 +2263,7 @@ void findUncalcLeavesCandidates(RoseBuildImpl &build,
|
||||
continue;
|
||||
}
|
||||
|
||||
suffix_vertices.push_back(v);
|
||||
suffix_vertices.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2289,9 +2289,9 @@ void findUncalcLeavesCandidates(RoseBuildImpl &build,
|
||||
vector<RoseVertex> &vec = clusters[key];
|
||||
if (vec.empty()) {
|
||||
|
||||
ordered.push_back(key);
|
||||
ordered.emplace_back(key);
|
||||
}
|
||||
vec.push_back(v);
|
||||
vec.emplace_back(v);
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("find loop done\n");
|
||||
|
@ -67,7 +67,7 @@ u32 findMinWidth(const RoseBuildImpl &tbi, enum rose_literal_table table) {
|
||||
|
||||
for (auto v : vertices_range(g)) {
|
||||
if (tbi.hasLiteralInTable(v, table)) {
|
||||
table_verts.push_back(v);
|
||||
table_verts.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -193,7 +193,7 @@ u32 findMaxBAWidth(const RoseBuildImpl &tbi, enum rose_literal_table table) {
|
||||
for (auto v : vertices_range(g)) {
|
||||
if ((table == ROSE_FLOATING && tbi.isFloating(v))
|
||||
|| (table == ROSE_ANCHORED && tbi.isAnchored(v))) {
|
||||
table_verts.push_back(v);
|
||||
table_verts.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -170,7 +170,7 @@ bool pruneOverlongReports(NFAVertex v, NGHolder &g, const depth &max_depth,
|
||||
for (ReportID id : g[v].reports) {
|
||||
const auto &report = rm.getReport(id);
|
||||
if (report.minOffset > max_depth) {
|
||||
bad_reports.push_back(id);
|
||||
bad_reports.emplace_back(id);
|
||||
}
|
||||
}
|
||||
|
||||
@ -242,7 +242,7 @@ bool mergeDfas(vector<unique_ptr<raw_dfa>> &dfas, const ReportManager &rm,
|
||||
vector<const raw_dfa *> dfa_ptrs;
|
||||
dfa_ptrs.reserve(dfas.size());
|
||||
for (auto &d : dfas) {
|
||||
dfa_ptrs.push_back(d.get());
|
||||
dfa_ptrs.emplace_back(d.get());
|
||||
}
|
||||
|
||||
auto merged = mergeAllDfas(dfa_ptrs, DFA_MERGE_MAX_STATES, &rm, cc.grey);
|
||||
@ -254,7 +254,7 @@ bool mergeDfas(vector<unique_ptr<raw_dfa>> &dfas, const ReportManager &rm,
|
||||
DEBUG_PRINTF("merge succeeded, result has %zu states\n",
|
||||
merged->states.size());
|
||||
dfas.clear();
|
||||
dfas.push_back(std::move(merged));
|
||||
dfas.emplace_back(std::move(merged));
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -315,7 +315,7 @@ void SmallWriteBuildImpl::add(const NGHolder &g, const ExpressionInfo &expr) {
|
||||
minimize_hopcroft(*r, cc.grey);
|
||||
}
|
||||
|
||||
dfas.push_back(std::move(r));
|
||||
dfas.emplace_back(std::move(r));
|
||||
|
||||
if (dfas.size() >= cc.grey.smallWriteMergeBatchSize) {
|
||||
if (!mergeDfas(dfas, rm, cc)) {
|
||||
@ -426,7 +426,7 @@ struct ACVisitor : public boost::default_bfs_visitor {
|
||||
auto v = target(e, trie);
|
||||
DEBUG_PRINTF("bfs (%zu, %zu) on '%c'\n", trie[u].index, trie[v].index,
|
||||
trie[v].c);
|
||||
ordering.push_back(v);
|
||||
ordering.emplace_back(v);
|
||||
|
||||
auto f = find_failure_target(u, v, trie);
|
||||
|
||||
@ -524,7 +524,7 @@ vector<u32> findDistToAccept(const LitTrie &trie) {
|
||||
deque<LitTrieVertex> q;
|
||||
for (auto v : vertices_range(trie)) {
|
||||
if (!trie[v].reports.empty()) {
|
||||
q.push_back(v);
|
||||
q.emplace_back(v);
|
||||
dist[trie[v].index] = 0;
|
||||
}
|
||||
}
|
||||
@ -538,7 +538,7 @@ vector<u32> findDistToAccept(const LitTrie &trie) {
|
||||
for (auto u : inv_adjacent_vertices_range(v, trie)) {
|
||||
auto &u_dist = dist[trie[u].index];
|
||||
if (u_dist == UINT32_MAX) {
|
||||
q.push_back(u);
|
||||
q.emplace_back(u);
|
||||
u_dist = d + 1;
|
||||
}
|
||||
}
|
||||
@ -573,7 +573,7 @@ void pruneTrie(LitTrie &trie, u32 max_depth) {
|
||||
DEBUG_PRINTF("pruning vertex %zu (min path len %u)\n",
|
||||
trie[v].index, min_path_len);
|
||||
clear_vertex(v, trie);
|
||||
dead.push_back(v);
|
||||
dead.emplace_back(v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -615,7 +615,7 @@ vector<CharReach> getAlphabet(const LitTrie &trie, bool nocase) {
|
||||
CharReach t = cr & esets[i];
|
||||
if (t.any() && t != esets[i]) {
|
||||
esets[i] &= ~t;
|
||||
esets.push_back(t);
|
||||
esets.emplace_back(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -892,12 +892,12 @@ bytecode_ptr<SmallWriteEngine> SmallWriteBuildImpl::build(u32 roseQuality) {
|
||||
}
|
||||
|
||||
if (!is_empty(lit_trie)) {
|
||||
dfas.push_back(buildDfa(lit_trie, false));
|
||||
dfas.emplace_back(buildDfa(lit_trie, false));
|
||||
DEBUG_PRINTF("caseful literal dfa with %zu states\n",
|
||||
dfas.back()->states.size());
|
||||
}
|
||||
if (!is_empty(lit_trie_nocase)) {
|
||||
dfas.push_back(buildDfa(lit_trie_nocase, true));
|
||||
dfas.emplace_back(buildDfa(lit_trie_nocase, true));
|
||||
DEBUG_PRINTF("nocase literal dfa with %zu states\n",
|
||||
dfas.back()->states.size());
|
||||
}
|
||||
|
@ -243,7 +243,7 @@ u32 SomSlotManager::numSomSlots() const {
|
||||
|
||||
u32 SomSlotManager::addRevNfa(bytecode_ptr<NFA> nfa, u32 maxWidth) {
|
||||
u32 rv = verify_u32(rev_nfas.size());
|
||||
rev_nfas.push_back(move(nfa));
|
||||
rev_nfas.emplace_back(move(nfa));
|
||||
|
||||
// A rev nfa commits us to having enough history around to handle its
|
||||
// max width.
|
||||
|
@ -51,7 +51,7 @@ vector<u32> getNeighborInfo(const CliqueGraph &g,
|
||||
// find neighbors for cv
|
||||
for (const auto &v : adjacent_vertices_range(cv, g)) {
|
||||
if (g[v].stateId != id && contains(group, g[v].stateId)){
|
||||
neighbor.push_back(g[v].stateId);
|
||||
neighbor.emplace_back(g[v].stateId);
|
||||
DEBUG_PRINTF("Neighbor:%u\n", g[v].stateId);
|
||||
}
|
||||
}
|
||||
@ -68,7 +68,7 @@ vector<u32> findCliqueGroup(CliqueGraph &cg) {
|
||||
vector<u32> init;
|
||||
for (const auto &v : vertices_range(cg)) {
|
||||
vertexMap[cg[v].stateId] = v;
|
||||
init.push_back(cg[v].stateId);
|
||||
init.emplace_back(cg[v].stateId);
|
||||
}
|
||||
gStack.push(init);
|
||||
|
||||
@ -81,7 +81,7 @@ vector<u32> findCliqueGroup(CliqueGraph &cg) {
|
||||
// Choose a vertex from the graph
|
||||
u32 id = g[0];
|
||||
CliqueVertex &n = vertexMap.at(id);
|
||||
clique.push_back(id);
|
||||
clique.emplace_back(id);
|
||||
// Corresponding vertex in the original graph
|
||||
set<u32> subgraphId(g.begin(), g.end());
|
||||
auto neighbor = getNeighborInfo(cg, n, subgraphId);
|
||||
@ -110,7 +110,7 @@ vector<vector<u32>> removeClique(CliqueGraph &cg) {
|
||||
for (const auto &v : vertices_range(cg)) {
|
||||
u32 id = cg[v].stateId;
|
||||
if (find(c.begin(), c.end(), id) != c.end()) {
|
||||
dead.push_back(v);
|
||||
dead.emplace_back(v);
|
||||
}
|
||||
}
|
||||
for (const auto &v : dead) {
|
||||
@ -121,7 +121,7 @@ vector<vector<u32>> removeClique(CliqueGraph &cg) {
|
||||
break;
|
||||
}
|
||||
auto clique = findCliqueGroup(cg);
|
||||
cliquesVec.push_back(clique);
|
||||
cliquesVec.emplace_back(clique);
|
||||
}
|
||||
|
||||
return cliquesVec;
|
||||
|
@ -88,7 +88,7 @@ bool determinise(Auto &n, std::vector<ds> &dstates, size_t state_limit,
|
||||
dstates.reserve(state_limit);
|
||||
|
||||
dstate_ids.emplace(n.dead, DEAD_STATE);
|
||||
dstates.push_back(ds(alphabet_size));
|
||||
dstates.emplace_back(ds(alphabet_size));
|
||||
std::fill_n(dstates[0].next.begin(), alphabet_size, DEAD_STATE);
|
||||
|
||||
std::queue<std::pair<StateSet, dstate_id_t>> q;
|
||||
@ -99,7 +99,7 @@ bool determinise(Auto &n, std::vector<ds> &dstates, size_t state_limit,
|
||||
q.emplace(init[i], dstates.size());
|
||||
assert(!contains(dstate_ids, init[i]));
|
||||
dstate_ids.emplace(init[i], dstates.size());
|
||||
dstates.push_back(ds(alphabet_size));
|
||||
dstates.emplace_back(ds(alphabet_size));
|
||||
}
|
||||
|
||||
std::vector<StateSet> succs(alphabet_size, n.dead);
|
||||
@ -149,7 +149,7 @@ bool determinise(Auto &n, std::vector<ds> &dstates, size_t state_limit,
|
||||
} else {
|
||||
succ_id = dstate_ids.size();
|
||||
dstate_ids.emplace(succs[s], succ_id);
|
||||
dstates.push_back(ds(alphabet_size));
|
||||
dstates.emplace_back(ds(alphabet_size));
|
||||
dstates.back().daddy = n.unalpha[s] < N_CHARS ? curr_id : 0;
|
||||
q.emplace(succs[s], succ_id);
|
||||
}
|
||||
|
@ -157,7 +157,7 @@ find_vertices_in_cycles(const Graph &g) {
|
||||
std::map<size_t, std::vector<vertex_descriptor>> comps;
|
||||
|
||||
for (const auto &e : comp_map) {
|
||||
comps[e.second].push_back(e.first);
|
||||
comps[e.second].emplace_back(e.first);
|
||||
}
|
||||
|
||||
flat_set<vertex_descriptor> rv;
|
||||
|
@ -163,7 +163,7 @@ public:
|
||||
std::pair<iterator, bool> insert(const Key &key, const Element &element) {
|
||||
const auto idx = data.size();
|
||||
if (map.emplace(key, idx).second) {
|
||||
data.push_back(element);
|
||||
data.emplace_back(element);
|
||||
return {begin() + idx, true};
|
||||
}
|
||||
return {end(), false};
|
||||
|
@ -112,13 +112,13 @@ void bfs(vector<mmbit_sparse_iter> &out, const TreeNode &tree) {
|
||||
|
||||
if (depth != t->depth) {
|
||||
depth = t->depth;
|
||||
levels.push_back(out.size());
|
||||
levels.emplace_back(out.size());
|
||||
}
|
||||
|
||||
DEBUG_PRINTF("pop: mask=0x%08llx, depth=%u, children.size()=%zu\n",
|
||||
t->mask, t->depth, t->children.size());
|
||||
|
||||
out.push_back(mmbit_sparse_iter());
|
||||
out.emplace_back(mmbit_sparse_iter());
|
||||
memset(&out.back(), 0, sizeof(mmbit_sparse_iter));
|
||||
mmbit_sparse_iter &record = out.back();
|
||||
record.mask = t->mask;
|
||||
|
@ -139,9 +139,9 @@ public:
|
||||
}
|
||||
|
||||
if (*sp_it > member) {
|
||||
split_temp_diff.push_back(member);
|
||||
split_temp_diff.emplace_back(member);
|
||||
} else {
|
||||
split_temp_inter.push_back(member);
|
||||
split_temp_inter.emplace_back(member);
|
||||
}
|
||||
}
|
||||
|
||||
@ -177,7 +177,7 @@ public:
|
||||
|
||||
/* smaller subset is placed in the new subset */
|
||||
size_t new_index = subsets.size();
|
||||
subsets.push_back(subset());
|
||||
subsets.emplace_back(subset());
|
||||
insert(&subsets.back().members, subsets.back().members.end(), *small);
|
||||
|
||||
for (const auto &e : *small) {
|
||||
@ -203,7 +203,7 @@ public:
|
||||
|
||||
for (size_t i = seen.find_first(); i != seen.npos;
|
||||
i = seen.find_next(i)) {
|
||||
containing->push_back(i);
|
||||
containing->emplace_back(i);
|
||||
}
|
||||
}
|
||||
|
||||
@ -240,7 +240,7 @@ public:
|
||||
assert(sub < subsets.size());
|
||||
|
||||
member_to_subset[i] = sub;
|
||||
subsets[sub].members.push_back(i);
|
||||
subsets[sub].members.emplace_back(i);
|
||||
}
|
||||
|
||||
/* none of the subsets should be empty */
|
||||
|
@ -66,7 +66,7 @@ u32 ReportManager::getInternalId(const Report &ir) {
|
||||
}
|
||||
|
||||
u32 size = reportIds.size();
|
||||
reportIds.push_back(ir);
|
||||
reportIds.emplace_back(ir);
|
||||
reportIdToInternalMap.emplace(ir, size);
|
||||
DEBUG_PRINTF("new report %u\n", size);
|
||||
return size;
|
||||
|
Loading…
x
Reference in New Issue
Block a user