mirror of
https://github.com/openappsec/openappsec.git
synced 2025-11-16 01:12:18 +03:00
Compare commits
30 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
448991ef75 | ||
|
|
2b1ee84280 | ||
|
|
77dd288eee | ||
|
|
3cb4def82e | ||
|
|
a0dd7dd614 | ||
|
|
88eed946ec | ||
|
|
3e1ad8b0f7 | ||
|
|
bd35c421c6 | ||
|
|
9d6e883724 | ||
|
|
cd020a7ddd | ||
|
|
bb35eaf657 | ||
|
|
648f9ae2b1 | ||
|
|
47e47d706a | ||
|
|
b852809d1a | ||
|
|
a77732f84c | ||
|
|
a1a8e28019 | ||
|
|
a99c2ec4a3 | ||
|
|
f1303c1703 | ||
|
|
bd8174ead3 | ||
|
|
4ddcd2462a | ||
|
|
81433bac25 | ||
|
|
8d03b49176 | ||
|
|
84f9624c00 | ||
|
|
3ecda7b979 | ||
|
|
8f05508e02 | ||
|
|
f5b9c93fbe | ||
|
|
62b74c9a10 | ||
|
|
e3163cd4fa | ||
|
|
1e98fc8c66 | ||
|
|
6fbe272378 |
@@ -95,6 +95,18 @@ getFailOpenHoldTimeout()
|
||||
return conf_data.getNumericalValue("fail_open_hold_timeout");
|
||||
}
|
||||
|
||||
unsigned int
|
||||
getHoldVerdictPollingTime()
|
||||
{
|
||||
return conf_data.getNumericalValue("hold_verdict_polling_time");
|
||||
}
|
||||
|
||||
unsigned int
|
||||
getHoldVerdictRetries()
|
||||
{
|
||||
return conf_data.getNumericalValue("hold_verdict_retries");
|
||||
}
|
||||
|
||||
unsigned int
|
||||
getMaxSessionsPerMinute()
|
||||
{
|
||||
|
||||
@@ -66,6 +66,8 @@ TEST_F(HttpAttachmentUtilTest, GetValidAttachmentConfiguration)
|
||||
"\"static_resources_path\": \"" + static_resources_path + "\",\n"
|
||||
"\"min_retries_for_verdict\": 1,\n"
|
||||
"\"max_retries_for_verdict\": 3,\n"
|
||||
"\"hold_verdict_retries\": 3,\n"
|
||||
"\"hold_verdict_polling_time\": 1,\n"
|
||||
"\"body_size_trigger\": 777,\n"
|
||||
"\"remove_server_header\": 1\n"
|
||||
"}\n";
|
||||
@@ -97,6 +99,8 @@ TEST_F(HttpAttachmentUtilTest, GetValidAttachmentConfiguration)
|
||||
EXPECT_EQ(getWaitingForVerdictThreadTimeout(), 75u);
|
||||
EXPECT_EQ(getInspectionMode(), ngx_http_inspection_mode::BLOCKING_THREAD);
|
||||
EXPECT_EQ(getRemoveResServerHeader(), 1u);
|
||||
EXPECT_EQ(getHoldVerdictRetries(), 3u);
|
||||
EXPECT_EQ(getHoldVerdictPollingTime(), 1u);
|
||||
|
||||
EXPECT_EQ(isDebugContext("1.2.3.4", "5.6.7.8", 80, "GET", "test", "/abc"), 1);
|
||||
EXPECT_EQ(isDebugContext("1.2.3.9", "5.6.7.8", 80, "GET", "test", "/abc"), 0);
|
||||
|
||||
@@ -98,19 +98,19 @@ while true; do
|
||||
init=true
|
||||
/etc/cp/watchdog/cp-nano-watchdog >/dev/null 2>&1 &
|
||||
sleep 5
|
||||
active_watchdog_pid=$(pgrep -f -x -o "/bin/bash /etc/cp/watchdog/cp-nano-watchdog")
|
||||
active_watchdog_pid=$(pgrep -f -x -o "/bin/(bash|sh) /etc/cp/watchdog/cp-nano-watchdog")
|
||||
fi
|
||||
|
||||
current_watchdog_pid=$(pgrep -f -x -o "/bin/bash /etc/cp/watchdog/cp-nano-watchdog")
|
||||
current_watchdog_pid=$(pgrep -f -x -o "/bin/(bash|sh) /etc/cp/watchdog/cp-nano-watchdog")
|
||||
if [ ! -f /tmp/restart_watchdog ] && [ "$current_watchdog_pid" != "$active_watchdog_pid" ]; then
|
||||
echo "Error: Watchdog exited abnormally"
|
||||
exit 1
|
||||
elif [ -f /tmp/restart_watchdog ]; then
|
||||
rm -f /tmp/restart_watchdog
|
||||
kill -9 "$(pgrep -f -x -o "/bin/bash /etc/cp/watchdog/cp-nano-watchdog")"
|
||||
kill -9 "$(pgrep -f -x -o "/bin/(bash|sh) /etc/cp/watchdog/cp-nano-watchdog")"
|
||||
/etc/cp/watchdog/cp-nano-watchdog >/dev/null 2>&1 &
|
||||
sleep 5
|
||||
active_watchdog_pid=$(pgrep -f -x -o "/bin/bash /etc/cp/watchdog/cp-nano-watchdog")
|
||||
active_watchdog_pid=$(pgrep -f -x -o "/bin/(bash|sh) /etc/cp/watchdog/cp-nano-watchdog")
|
||||
fi
|
||||
|
||||
sleep 5
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
#include <stdarg.h>
|
||||
|
||||
#include <boost/range/iterator_range.hpp>
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <boost/regex.hpp>
|
||||
|
||||
#include "nginx_attachment_config.h"
|
||||
@@ -260,6 +261,22 @@ public:
|
||||
);
|
||||
}
|
||||
|
||||
const char* ignored_headers_env = getenv("SAAS_IGNORED_UPSTREAM_HEADERS");
|
||||
if (ignored_headers_env) {
|
||||
string ignored_headers_str = ignored_headers_env;
|
||||
ignored_headers_str = NGEN::Strings::trim(ignored_headers_str);
|
||||
|
||||
if (!ignored_headers_str.empty()) {
|
||||
dbgInfo(D_HTTP_MANAGER)
|
||||
<< "Ignoring SAAS_IGNORED_UPSTREAM_HEADERS environment variable: "
|
||||
<< ignored_headers_str;
|
||||
|
||||
vector<string> ignored_headers_vec;
|
||||
boost::split(ignored_headers_vec, ignored_headers_str, boost::is_any_of(";"));
|
||||
for (const string &header : ignored_headers_vec) ignored_headers.insert(header);
|
||||
}
|
||||
}
|
||||
|
||||
dbgInfo(D_NGINX_ATTACHMENT) << "Successfully initialized NGINX Attachment";
|
||||
}
|
||||
|
||||
@@ -1034,7 +1051,11 @@ private:
|
||||
case ChunkType::REQUEST_START:
|
||||
return handleStartTransaction(data, opaque);
|
||||
case ChunkType::REQUEST_HEADER:
|
||||
return handleMultiModifiableChunks(NginxParser::parseRequestHeaders(data), "request header", true);
|
||||
return handleMultiModifiableChunks(
|
||||
NginxParser::parseRequestHeaders(data, ignored_headers),
|
||||
"request header",
|
||||
true
|
||||
);
|
||||
case ChunkType::REQUEST_BODY:
|
||||
return handleModifiableChunk(NginxParser::parseRequestBody(data), "request body", true);
|
||||
case ChunkType::REQUEST_END: {
|
||||
@@ -1814,6 +1835,7 @@ private:
|
||||
HttpAttachmentConfig attachment_config;
|
||||
I_MainLoop::RoutineID attachment_routine_id = 0;
|
||||
bool traffic_indicator = false;
|
||||
unordered_set<string> ignored_headers;
|
||||
|
||||
// Interfaces
|
||||
I_Socket *i_socket = nullptr;
|
||||
|
||||
@@ -240,6 +240,21 @@ HttpAttachmentConfig::setRetriesForVerdict()
|
||||
"Max retries for verdict"
|
||||
));
|
||||
|
||||
conf_data.setNumericalValue("hold_verdict_retries", getAttachmentConf<uint>(
|
||||
3,
|
||||
"agent.retriesForHoldVerdict.nginxModule",
|
||||
"HTTP manager",
|
||||
"Retries for hold verdict"
|
||||
));
|
||||
|
||||
conf_data.setNumericalValue("hold_verdict_polling_time", getAttachmentConf<uint>(
|
||||
1,
|
||||
"agent.holdVerdictPollingInterval.nginxModule",
|
||||
"HTTP manager",
|
||||
"Hold verdict polling interval seconds"
|
||||
));
|
||||
|
||||
|
||||
conf_data.setNumericalValue("body_size_trigger", getAttachmentConf<uint>(
|
||||
200000,
|
||||
"agent.reqBodySizeTrigger.nginxModule",
|
||||
|
||||
@@ -19,12 +19,15 @@
|
||||
|
||||
#include "config.h"
|
||||
#include "virtual_modifiers.h"
|
||||
#include "agent_core_utilities.h"
|
||||
|
||||
using namespace std;
|
||||
using namespace boost::uuids;
|
||||
|
||||
USE_DEBUG_FLAG(D_HTTP_MANAGER);
|
||||
|
||||
extern bool is_keep_alive_ctx;
|
||||
|
||||
NginxAttachmentOpaque::NginxAttachmentOpaque(HttpTransactionData _transaction_data)
|
||||
:
|
||||
TableOpaqueSerialize<NginxAttachmentOpaque>(this),
|
||||
@@ -119,3 +122,47 @@ NginxAttachmentOpaque::setSavedData(const string &name, const string &data, EnvK
|
||||
saved_data[name] = data;
|
||||
ctx.registerValue(name, data, log_ctx);
|
||||
}
|
||||
|
||||
bool
|
||||
NginxAttachmentOpaque::setKeepAliveCtx(const string &hdr_key, const string &hdr_val)
|
||||
{
|
||||
if (!is_keep_alive_ctx) return false;
|
||||
|
||||
static pair<string, string> keep_alive_hdr;
|
||||
static bool keep_alive_hdr_initialized = false;
|
||||
|
||||
if (keep_alive_hdr_initialized) {
|
||||
if (!keep_alive_hdr.first.empty() && hdr_key == keep_alive_hdr.first && hdr_val == keep_alive_hdr.second) {
|
||||
dbgTrace(D_HTTP_MANAGER) << "Registering keep alive context";
|
||||
ctx.registerValue("keep_alive_request_ctx", true);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
const char* saas_keep_alive_hdr_name_env = getenv("SAAS_KEEP_ALIVE_HDR_NAME");
|
||||
if (saas_keep_alive_hdr_name_env) {
|
||||
keep_alive_hdr.first = NGEN::Strings::trim(saas_keep_alive_hdr_name_env);
|
||||
dbgInfo(D_HTTP_MANAGER) << "Using SAAS_KEEP_ALIVE_HDR_NAME environment variable: " << keep_alive_hdr.first;
|
||||
}
|
||||
|
||||
if (!keep_alive_hdr.first.empty()) {
|
||||
const char* saas_keep_alive_hdr_value_env = getenv("SAAS_KEEP_ALIVE_HDR_VALUE");
|
||||
if (saas_keep_alive_hdr_value_env) {
|
||||
keep_alive_hdr.second = NGEN::Strings::trim(saas_keep_alive_hdr_value_env);
|
||||
dbgInfo(D_HTTP_MANAGER)
|
||||
<< "Using SAAS_KEEP_ALIVE_HDR_VALUE environment variable: "
|
||||
<< keep_alive_hdr.second;
|
||||
}
|
||||
|
||||
if (!keep_alive_hdr.second.empty() && (hdr_key == keep_alive_hdr.first && hdr_val == keep_alive_hdr.second)) {
|
||||
dbgTrace(D_HTTP_MANAGER) << "Registering keep alive context";
|
||||
ctx.registerValue("keep_alive_request_ctx", true);
|
||||
keep_alive_hdr_initialized = true;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
keep_alive_hdr_initialized = true;
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -85,6 +85,7 @@ public:
|
||||
EnvKeyAttr::LogSection log_ctx = EnvKeyAttr::LogSection::NONE
|
||||
);
|
||||
void setApplicationState(const ApplicationState &app_state) { application_state = app_state; }
|
||||
bool setKeepAliveCtx(const std::string &hdr_key, const std::string &hdr_val);
|
||||
|
||||
private:
|
||||
CompressionStream *response_compression_stream;
|
||||
|
||||
@@ -29,6 +29,7 @@ USE_DEBUG_FLAG(D_NGINX_ATTACHMENT_PARSER);
|
||||
Buffer NginxParser::tenant_header_key = Buffer();
|
||||
static const Buffer proxy_ip_header_key("X-Forwarded-For", 15, Buffer::MemoryType::STATIC);
|
||||
static const Buffer source_ip("sourceip", 8, Buffer::MemoryType::STATIC);
|
||||
bool is_keep_alive_ctx = getenv("SAAS_KEEP_ALIVE_HDR_NAME") != nullptr;
|
||||
|
||||
map<Buffer, CompressionType> NginxParser::content_encodings = {
|
||||
{Buffer("identity"), CompressionType::NO_COMPRESSION},
|
||||
@@ -177,22 +178,54 @@ getActivetenantAndProfile(const string &str, const string &deli = ",")
|
||||
}
|
||||
|
||||
Maybe<vector<HttpHeader>>
|
||||
NginxParser::parseRequestHeaders(const Buffer &data)
|
||||
NginxParser::parseRequestHeaders(const Buffer &data, const unordered_set<string> &ignored_headers)
|
||||
{
|
||||
auto parsed_headers = genHeaders(data);
|
||||
if (!parsed_headers.ok()) return parsed_headers.passErr();
|
||||
auto maybe_parsed_headers = genHeaders(data);
|
||||
if (!maybe_parsed_headers.ok()) return maybe_parsed_headers.passErr();
|
||||
|
||||
auto i_transaction_table = Singleton::Consume<I_TableSpecific<SessionID>>::by<NginxAttachment>();
|
||||
auto parsed_headers = maybe_parsed_headers.unpack();
|
||||
NginxAttachmentOpaque &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
||||
|
||||
for (const HttpHeader &header : *parsed_headers) {
|
||||
if (is_keep_alive_ctx || !ignored_headers.empty()) {
|
||||
bool is_last_header_removed = false;
|
||||
parsed_headers.erase(
|
||||
remove_if(
|
||||
parsed_headers.begin(),
|
||||
parsed_headers.end(),
|
||||
[&opaque, &is_last_header_removed, &ignored_headers](const HttpHeader &header)
|
||||
{
|
||||
string hdr_key = static_cast<string>(header.getKey());
|
||||
string hdr_val = static_cast<string>(header.getValue());
|
||||
if (
|
||||
opaque.setKeepAliveCtx(hdr_key, hdr_val)
|
||||
|| ignored_headers.find(hdr_key) != ignored_headers.end()
|
||||
) {
|
||||
dbgTrace(D_NGINX_ATTACHMENT_PARSER) << "Header was removed from headers list: " << hdr_key;
|
||||
if (header.isLastHeader()) {
|
||||
dbgTrace(D_NGINX_ATTACHMENT_PARSER) << "Last header was removed from headers list";
|
||||
is_last_header_removed = true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
),
|
||||
parsed_headers.end()
|
||||
);
|
||||
if (is_last_header_removed) {
|
||||
dbgTrace(D_NGINX_ATTACHMENT_PARSER) << "Adjusting last header flag";
|
||||
if (!parsed_headers.empty()) parsed_headers.back().setIsLastHeader();
|
||||
}
|
||||
}
|
||||
|
||||
for (const HttpHeader &header : parsed_headers) {
|
||||
auto source_identifiers = getConfigurationWithDefault<UsersAllIdentifiersConfig>(
|
||||
UsersAllIdentifiersConfig(),
|
||||
"rulebase",
|
||||
"usersIdentifiers"
|
||||
);
|
||||
source_identifiers.parseRequestHeaders(header);
|
||||
|
||||
NginxAttachmentOpaque &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
||||
opaque.addToSavedData(
|
||||
HttpTransactionData::req_headers,
|
||||
static_cast<string>(header.getKey()) + ": " + static_cast<string>(header.getValue()) + "\r\n"
|
||||
|
||||
@@ -28,7 +28,10 @@ public:
|
||||
static Maybe<HttpTransactionData> parseStartTrasaction(const Buffer &data);
|
||||
static Maybe<ResponseCode> parseResponseCode(const Buffer &data);
|
||||
static Maybe<uint64_t> parseContentLength(const Buffer &data);
|
||||
static Maybe<std::vector<HttpHeader>> parseRequestHeaders(const Buffer &data);
|
||||
static Maybe<std::vector<HttpHeader>> parseRequestHeaders(
|
||||
const Buffer &data,
|
||||
const std::unordered_set<std::string> &ignored_headers
|
||||
);
|
||||
static Maybe<std::vector<HttpHeader>> parseResponseHeaders(const Buffer &data);
|
||||
static Maybe<HttpBody> parseRequestBody(const Buffer &data);
|
||||
static Maybe<HttpBody> parseResponseBody(const Buffer &raw_response_body, CompressionStream *compression_stream);
|
||||
|
||||
@@ -285,17 +285,21 @@ Maybe<string>
|
||||
UsersAllIdentifiersConfig::parseXForwardedFor(const string &str, ExtractType type) const
|
||||
{
|
||||
vector<string> header_values = split(str);
|
||||
|
||||
if (header_values.empty()) return genError("No IP found in the xff header list");
|
||||
|
||||
vector<string> xff_values = getHeaderValuesFromConfig("x-forwarded-for");
|
||||
vector<CIDRSData> cidr_values(xff_values.begin(), xff_values.end());
|
||||
string last_valid_ip;
|
||||
|
||||
for (auto it = header_values.rbegin(); it != header_values.rend() - 1; ++it) {
|
||||
if (!IPAddr::createIPAddr(*it).ok()) {
|
||||
dbgWarning(D_NGINX_ATTACHMENT_PARSER) << "Invalid IP address found in the xff header IPs list: " << *it;
|
||||
return genError("Invalid IP address");
|
||||
if (last_valid_ip.empty()) {
|
||||
return genError("Invalid IP address");
|
||||
}
|
||||
return last_valid_ip;
|
||||
}
|
||||
last_valid_ip = *it;
|
||||
if (type == ExtractType::PROXYIP) continue;
|
||||
if (!isIpTrusted(*it, cidr_values)) {
|
||||
dbgDebug(D_NGINX_ATTACHMENT_PARSER) << "Found untrusted IP in the xff header IPs list: " << *it;
|
||||
@@ -307,7 +311,10 @@ UsersAllIdentifiersConfig::parseXForwardedFor(const string &str, ExtractType typ
|
||||
dbgWarning(D_NGINX_ATTACHMENT_PARSER)
|
||||
<< "Invalid IP address found in the xff header IPs list: "
|
||||
<< header_values[0];
|
||||
return genError("Invalid IP address");
|
||||
if (last_valid_ip.empty()) {
|
||||
return genError("No Valid Ip address was found");
|
||||
}
|
||||
return last_valid_ip;
|
||||
}
|
||||
|
||||
return header_values[0];
|
||||
|
||||
@@ -15,18 +15,14 @@
|
||||
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <sys/stat.h>
|
||||
#include <climits>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
#include <boost/range/iterator_range.hpp>
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <fstream>
|
||||
#include <algorithm>
|
||||
|
||||
#include "common.h"
|
||||
#include "config.h"
|
||||
#include "table_opaque.h"
|
||||
#include "http_manager_opaque.h"
|
||||
#include "log_generator.h"
|
||||
#include "http_inspection_events.h"
|
||||
@@ -69,22 +65,6 @@ public:
|
||||
i_transaction_table = Singleton::Consume<I_Table>::by<HttpManager>();
|
||||
|
||||
Singleton::Consume<I_Logging>::by<HttpManager>()->addGeneralModifier(compressAppSecLogs);
|
||||
|
||||
const char* ignored_headers_env = getenv("SAAS_IGNORED_UPSTREAM_HEADERS");
|
||||
if (ignored_headers_env) {
|
||||
string ignored_headers_str = ignored_headers_env;
|
||||
ignored_headers_str = NGEN::Strings::removeTrailingWhitespaces(ignored_headers_str);
|
||||
|
||||
if (!ignored_headers_str.empty()) {
|
||||
dbgInfo(D_HTTP_MANAGER)
|
||||
<< "Ignoring SAAS_IGNORED_UPSTREAM_HEADERS environment variable: "
|
||||
<< ignored_headers_str;
|
||||
|
||||
vector<string> ignored_headers_vec;
|
||||
boost::split(ignored_headers_vec, ignored_headers_str, boost::is_any_of(";"));
|
||||
for (const string &header : ignored_headers_vec) ignored_headers.insert(header);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
FilterVerdict
|
||||
@@ -109,19 +89,12 @@ public:
|
||||
return FilterVerdict(default_verdict);
|
||||
}
|
||||
|
||||
if (is_request && ignored_headers.find(static_cast<string>(event.getKey())) != ignored_headers.end()) {
|
||||
dbgTrace(D_HTTP_MANAGER)
|
||||
<< "Ignoring header key - "
|
||||
<< static_cast<string>(event.getKey())
|
||||
<< " - as it is in the ignored headers list";
|
||||
return FilterVerdict(ngx_http_cp_verdict_e::TRAFFIC_VERDICT_INSPECT);
|
||||
}
|
||||
|
||||
ScopedContext ctx;
|
||||
ctx.registerValue(app_sec_marker_key, i_transaction_table->keyToString(), EnvKeyAttr::LogSection::MARKER);
|
||||
|
||||
HttpManagerOpaque &state = i_transaction_table->getState<HttpManagerOpaque>();
|
||||
string event_key = static_cast<string>(event.getKey());
|
||||
|
||||
if (event_key == getProfileAgentSettingWithDefault<string>("", "agent.customHeaderValueLogging")) {
|
||||
string event_value = static_cast<string>(event.getValue());
|
||||
dbgTrace(D_HTTP_MANAGER)
|
||||
@@ -421,7 +394,6 @@ private:
|
||||
I_Table *i_transaction_table;
|
||||
static const ngx_http_cp_verdict_e default_verdict;
|
||||
static const string app_sec_marker_key;
|
||||
unordered_set<string> ignored_headers;
|
||||
};
|
||||
|
||||
const ngx_http_cp_verdict_e HttpManager::Impl::default_verdict(ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP);
|
||||
|
||||
@@ -239,6 +239,7 @@ public:
|
||||
const Buffer & getValue() const { return value; }
|
||||
|
||||
bool isLastHeader() const { return is_last_header; }
|
||||
void setIsLastHeader() { is_last_header = true; }
|
||||
uint8_t getHeaderIndex() const { return header_index; }
|
||||
|
||||
private:
|
||||
|
||||
@@ -62,6 +62,7 @@ public:
|
||||
|
||||
private:
|
||||
Maybe<std::string> downloadPackage(const Package &package, bool is_clean_installation);
|
||||
std::string getCurrentTimestamp();
|
||||
|
||||
std::string manifest_file_path;
|
||||
std::string temp_ext;
|
||||
|
||||
@@ -30,6 +30,7 @@
|
||||
#include "generic_metric.h"
|
||||
|
||||
#define LOGGING_INTERVAL_IN_MINUTES 10
|
||||
USE_DEBUG_FLAG(D_WAAP);
|
||||
enum class AssetType { API, WEB, ALL, COUNT };
|
||||
|
||||
class WaapTelemetryEvent : public Event<WaapTelemetryEvent>
|
||||
@@ -132,6 +133,7 @@ private:
|
||||
std::map<std::string, std::shared_ptr<T>>& telemetryMap
|
||||
) {
|
||||
if (!telemetryMap.count(asset_id)) {
|
||||
dbgTrace(D_WAAP) << "creating telemetry data for asset: " << data.assetName;
|
||||
telemetryMap.emplace(asset_id, std::make_shared<T>());
|
||||
telemetryMap[asset_id]->init(
|
||||
telemetryName,
|
||||
@@ -139,7 +141,9 @@ private:
|
||||
ReportIS::IssuingEngine::AGENT_CORE,
|
||||
std::chrono::minutes(LOGGING_INTERVAL_IN_MINUTES),
|
||||
true,
|
||||
ReportIS::Audience::SECURITY
|
||||
ReportIS::Audience::SECURITY,
|
||||
false,
|
||||
asset_id
|
||||
);
|
||||
|
||||
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||
@@ -152,29 +156,30 @@ private:
|
||||
std::string("Web Application"),
|
||||
EnvKeyAttr::LogSection::SOURCE
|
||||
);
|
||||
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||
"assetId",
|
||||
asset_id,
|
||||
EnvKeyAttr::LogSection::SOURCE
|
||||
);
|
||||
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||
"assetName",
|
||||
data.assetName,
|
||||
EnvKeyAttr::LogSection::SOURCE
|
||||
);
|
||||
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||
"practiceId",
|
||||
data.practiceId,
|
||||
EnvKeyAttr::LogSection::SOURCE
|
||||
);
|
||||
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||
"practiceName",
|
||||
data.practiceName,
|
||||
EnvKeyAttr::LogSection::SOURCE
|
||||
);
|
||||
|
||||
telemetryMap[asset_id]->registerListener();
|
||||
}
|
||||
dbgTrace(D_WAAP) << "updating telemetry data for asset: " << data.assetName;
|
||||
|
||||
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||
"assetId",
|
||||
asset_id,
|
||||
EnvKeyAttr::LogSection::SOURCE
|
||||
);
|
||||
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||
"assetName",
|
||||
data.assetName,
|
||||
EnvKeyAttr::LogSection::SOURCE
|
||||
);
|
||||
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||
"practiceId",
|
||||
data.practiceId,
|
||||
EnvKeyAttr::LogSection::SOURCE
|
||||
);
|
||||
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||
"practiceName",
|
||||
data.practiceName,
|
||||
EnvKeyAttr::LogSection::SOURCE
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -33,6 +33,7 @@ class I_WaapAssetStatesManager;
|
||||
class I_Messaging;
|
||||
class I_AgentDetails;
|
||||
class I_Encryptor;
|
||||
class I_WaapModelResultLogger;
|
||||
|
||||
const std::string WAAP_APPLICATION_NAME = "waap application";
|
||||
|
||||
@@ -50,7 +51,8 @@ class WaapComponent
|
||||
Singleton::Consume<I_AgentDetails>,
|
||||
Singleton::Consume<I_Messaging>,
|
||||
Singleton::Consume<I_Encryptor>,
|
||||
Singleton::Consume<I_Environment>
|
||||
Singleton::Consume<I_Environment>,
|
||||
Singleton::Consume<I_WaapModelResultLogger>
|
||||
{
|
||||
public:
|
||||
WaapComponent();
|
||||
|
||||
@@ -179,7 +179,7 @@ private:
|
||||
Maybe<void>
|
||||
configureSyslog()
|
||||
{
|
||||
if (!getProfileAgentSettingWithDefault<bool>(true, "centralNginxManagement.syslogEnabled")) {
|
||||
if (!getProfileAgentSettingWithDefault<bool>(false, "centralNginxManagement.syslogEnabled")) {
|
||||
dbgTrace(D_NGINX_MANAGER) << "Syslog is disabled via settings";
|
||||
return {};
|
||||
}
|
||||
@@ -331,6 +331,8 @@ public:
|
||||
logError("Could not reload central NGINX configuration. Error: " + reload_result.getErr());
|
||||
return;
|
||||
}
|
||||
|
||||
logInfo("Central NGINX configuration has been successfully reloaded");
|
||||
}
|
||||
|
||||
void
|
||||
@@ -351,11 +353,37 @@ private:
|
||||
{
|
||||
LogGen log(
|
||||
error,
|
||||
ReportIS::Level::ACTION,
|
||||
ReportIS::Audience::SECURITY,
|
||||
ReportIS::Severity::CRITICAL,
|
||||
ReportIS::Priority::HIGH,
|
||||
ReportIS::Priority::URGENT,
|
||||
ReportIS::Tags::POLICY_INSTALLATION
|
||||
);
|
||||
|
||||
log.addToOrigin(LogField("eventTopic", "Central NGINX Management"));
|
||||
log << LogField("notificationId", "4165c3b1-e9bc-44c3-888b-863e204c1bfb");
|
||||
log << LogField(
|
||||
"eventRemediation",
|
||||
"Please verify your NGINX configuration and enforce policy again. "
|
||||
"Contact Check Point support if the issue persists."
|
||||
);
|
||||
}
|
||||
|
||||
void
|
||||
logInfo(const string &info)
|
||||
{
|
||||
LogGen log(
|
||||
info,
|
||||
ReportIS::Level::ACTION,
|
||||
ReportIS::Audience::SECURITY,
|
||||
ReportIS::Severity::INFO,
|
||||
ReportIS::Priority::LOW,
|
||||
ReportIS::Tags::POLICY_INSTALLATION
|
||||
);
|
||||
|
||||
log.addToOrigin(LogField("eventTopic", "Central NGINX Management"));
|
||||
log << LogField("notificationId", "4165c3b1-e9bc-44c3-888b-863e204c1bfb");
|
||||
log << LogField("eventRemediation", "No action required");
|
||||
}
|
||||
|
||||
I_MainLoop *i_mainloop = nullptr;
|
||||
|
||||
@@ -497,7 +497,8 @@ WebAppSection::WebAppSection(
|
||||
const AppsecPracticeAntiBotSection &_anti_bots,
|
||||
const LogTriggerSection &parsed_log_trigger,
|
||||
const AppSecTrustedSources &parsed_trusted_sources,
|
||||
const NewAppSecWebAttackProtections &protections)
|
||||
const NewAppSecWebAttackProtections &protections,
|
||||
const vector<InnerException> &exceptions)
|
||||
:
|
||||
application_urls(_application_urls),
|
||||
asset_id(_asset_id),
|
||||
@@ -541,6 +542,10 @@ WebAppSection::WebAppSection(
|
||||
overrides.push_back(AppSecOverride(source_ident));
|
||||
}
|
||||
|
||||
for (const auto &exception : exceptions) {
|
||||
overrides.push_back(AppSecOverride(exception));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// LCOV_EXCL_STOP
|
||||
|
||||
@@ -298,7 +298,8 @@ public:
|
||||
const AppsecPracticeAntiBotSection &_anti_bots,
|
||||
const LogTriggerSection &parsed_log_trigger,
|
||||
const AppSecTrustedSources &parsed_trusted_sources,
|
||||
const NewAppSecWebAttackProtections &protections);
|
||||
const NewAppSecWebAttackProtections &protections,
|
||||
const std::vector<InnerException> &exceptions);
|
||||
|
||||
void save(cereal::JSONOutputArchive &out_ar) const;
|
||||
|
||||
|
||||
@@ -45,7 +45,6 @@ public:
|
||||
|
||||
private:
|
||||
std::string name;
|
||||
std::string mode;
|
||||
std::vector<std::string> hosts;
|
||||
};
|
||||
|
||||
|
||||
@@ -206,7 +206,8 @@ private:
|
||||
const RulesConfigRulebase& rule_config,
|
||||
const std::string &practice_id, const std::string &full_url,
|
||||
const std::string &default_mode,
|
||||
std::map<AnnotationTypes, std::string> &rule_annotations
|
||||
std::map<AnnotationTypes, std::string> &rule_annotations,
|
||||
std::vector<InnerException>
|
||||
);
|
||||
|
||||
void
|
||||
|
||||
@@ -698,8 +698,12 @@ K8sPolicyUtils::createAppsecPolicies()
|
||||
}
|
||||
}
|
||||
|
||||
auto maybe_policy_activation =
|
||||
getObjectFromCluster<PolicyActivationData>("/apis/openappsec.io/v1beta2/policyactivations");
|
||||
|
||||
string ns_suffix = getAppSecScopeType() == "namespaced" ? "ns" : "";
|
||||
string ns = getAppSecScopeType() == "namespaced" ? "namespaces/" : "";
|
||||
auto maybe_policy_activation = getObjectFromCluster<PolicyActivationData>(
|
||||
"/apis/openappsec.io/v1beta2/" + ns + agent_ns + "policyactivations" + ns_suffix
|
||||
);
|
||||
|
||||
if (!maybe_policy_activation.ok()) {
|
||||
dbgWarning(D_LOCAL_POLICY)
|
||||
|
||||
@@ -69,7 +69,7 @@ Identifier::load(cereal::JSONInputArchive &archive_in)
|
||||
dbgWarning(D_LOCAL_POLICY) << "AppSec identifier invalid: " << identifier;
|
||||
identifier = "sourceip";
|
||||
}
|
||||
parseMandatoryAppsecJSONKey<vector<string>>("value", value, archive_in);
|
||||
parseAppsecJSONKey<vector<string>>("value", value, archive_in);
|
||||
}
|
||||
|
||||
const string &
|
||||
|
||||
@@ -18,14 +18,6 @@ using namespace std;
|
||||
|
||||
USE_DEBUG_FLAG(D_LOCAL_POLICY);
|
||||
|
||||
static const set<string> valid_modes = {
|
||||
"prevent-learn",
|
||||
"detect-learn",
|
||||
"prevent",
|
||||
"detect",
|
||||
"inactive"
|
||||
};
|
||||
|
||||
void
|
||||
PolicyActivationMetadata::load(cereal::JSONInputArchive &archive_in)
|
||||
{
|
||||
@@ -39,11 +31,6 @@ EnabledPolicy::load(cereal::JSONInputArchive &archive_in)
|
||||
dbgTrace(D_LOCAL_POLICY) << "Loading policyActivation enabled policy";
|
||||
parseMandatoryAppsecJSONKey<vector<string>>("hosts", hosts, archive_in);
|
||||
parseAppsecJSONKey<string>("name", name, archive_in);
|
||||
parseAppsecJSONKey<string>("mode", mode, archive_in, "detect");
|
||||
if (valid_modes.count(mode) == 0) {
|
||||
dbgWarning(D_LOCAL_POLICY) << "AppSec policy activation mode invalid: " << mode;
|
||||
mode = "detect";
|
||||
}
|
||||
}
|
||||
|
||||
const string &
|
||||
|
||||
@@ -928,7 +928,6 @@ createMultiRulesSections(
|
||||
PracticeSection practice = PracticeSection(practice_id, practice_type, practice_name);
|
||||
vector<ParametersSection> exceptions_result;
|
||||
for (auto exception : exceptions) {
|
||||
|
||||
const auto &exception_name = exception.first;
|
||||
for (const auto &inner_exception : exception.second) {
|
||||
exceptions_result.push_back(ParametersSection(inner_exception.getBehaviorId(), exception_name));
|
||||
@@ -1220,7 +1219,8 @@ PolicyMakerUtils::createWebAppSection(
|
||||
const string &practice_id,
|
||||
const string &full_url,
|
||||
const string &default_mode,
|
||||
map<AnnotationTypes, string> &rule_annotations)
|
||||
map<AnnotationTypes, string> &rule_annotations,
|
||||
vector<InnerException> rule_inner_exceptions)
|
||||
{
|
||||
auto apssec_practice =
|
||||
getAppsecPracticeSpec<V1beta2AppsecLinuxPolicy, NewAppSecPracticeSpec>(
|
||||
@@ -1255,7 +1255,8 @@ PolicyMakerUtils::createWebAppSection(
|
||||
apssec_practice.getAntiBot(),
|
||||
log_triggers[rule_annotations[AnnotationTypes::TRIGGER]],
|
||||
trusted_sources[rule_annotations[AnnotationTypes::TRUSTED_SOURCES]],
|
||||
apssec_practice.getWebAttacks().getProtections()
|
||||
apssec_practice.getWebAttacks().getProtections(),
|
||||
rule_inner_exceptions
|
||||
);
|
||||
web_apps[rule_config.getAssetName()] = web_app;
|
||||
}
|
||||
@@ -1366,7 +1367,8 @@ PolicyMakerUtils::createThreatPreventionPracticeSections(
|
||||
practice_id,
|
||||
asset_name,
|
||||
default_mode,
|
||||
rule_annotations);
|
||||
rule_annotations,
|
||||
inner_exceptions[rule_annotations[AnnotationTypes::EXCEPTION]]);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -80,7 +80,9 @@ DetailsResolver::Impl::getHostname()
|
||||
Maybe<string>
|
||||
DetailsResolver::Impl::getPlatform()
|
||||
{
|
||||
#if defined(gaia)
|
||||
#if defined(gaia_arm)
|
||||
return string("gaia_arm");
|
||||
#elif defined(gaia)
|
||||
return string("gaia");
|
||||
#elif defined(arm32_rpi)
|
||||
return string("glibc");
|
||||
|
||||
@@ -71,7 +71,18 @@ checkPepIdaIdnStatus(const string &command_output)
|
||||
Maybe<string>
|
||||
getRequiredNanoServices(const string &command_output)
|
||||
{
|
||||
return command_output;
|
||||
string idaRequiredServices[2] = {"idaSaml", "idaIdn"};
|
||||
string platform_str = "gaia";
|
||||
#if defined(gaia_arm)
|
||||
platform_str = "gaia_arm";
|
||||
#endif // gaia_arm
|
||||
string result = "";
|
||||
for(const string &serv : idaRequiredServices) {
|
||||
string add_service = serv + "_" + platform_str;
|
||||
result = result + add_service + ";";
|
||||
}
|
||||
command_output.empty(); // overcome unused variable
|
||||
return result;
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
@@ -342,6 +353,28 @@ getSMCBasedMgmtName(const string &command_output)
|
||||
return getAttr(command_output, "Mgmt object Name was not found");
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
getSmbObjectUid(const string &command_output)
|
||||
{
|
||||
static const char centrally_managed_comd_output = '0';
|
||||
|
||||
if (command_output.empty() || command_output[0] != centrally_managed_comd_output) {
|
||||
return genError("Object UUID was not found");
|
||||
}
|
||||
|
||||
Maybe<string> obj_uuid = getAttrFromCpsdwanGetDataJson("uuid");
|
||||
if (obj_uuid.ok()) {
|
||||
return obj_uuid.unpack();
|
||||
}
|
||||
|
||||
static const string obj_path = (getenv("FWDIR") ? string(getenv("FWDIR")) : "") + "/database/myown.C";
|
||||
auto file_stream = std::make_shared<std::ifstream>(obj_path);
|
||||
if (!file_stream->is_open()) {
|
||||
return genError("Failed to open the object file");
|
||||
}
|
||||
return getMgmtObjAttr(file_stream, "uuid ");
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
getSmbObjectName(const string &command_output)
|
||||
{
|
||||
|
||||
@@ -42,11 +42,6 @@ SHELL_PRE_CMD("gunzip local.cfg", "gunzip -c $FWDIR/state/local/FW1/local.cfg.gz
|
||||
#ifdef SHELL_CMD_HANDLER
|
||||
#if defined(gaia) || defined(smb) || defined(smb_thx_v3) || defined(smb_sve_v2) || defined(smb_mrv_v1)
|
||||
SHELL_CMD_HANDLER("cpProductIntegrationMgmtObjectType", "cpprod_util CPPROD_IsMgmtMachine", getMgmtObjType)
|
||||
SHELL_CMD_HANDLER(
|
||||
"cpProductIntegrationMgmtObjectUid",
|
||||
"mgmt_cli --format json -r true show-session | jq -r '.[\"connected-server\"].uid'",
|
||||
getMgmtObjUid
|
||||
)
|
||||
SHELL_CMD_HANDLER("prerequisitesForHorizonTelemetry",
|
||||
"FS_PATH=<FILESYSTEM-PREFIX>; [ -f ${FS_PATH}/cp-nano-horizon-telemetry-prerequisites.log ] "
|
||||
"&& head -1 ${FS_PATH}/cp-nano-horizon-telemetry-prerequisites.log || echo ''",
|
||||
@@ -150,12 +145,17 @@ SHELL_CMD_HANDLER("hasSAMLSupportedBlade", "enabled_blades", checkSAMLSupportedB
|
||||
SHELL_CMD_HANDLER("hasIDABlade", "enabled_blades", checkIDABlade)
|
||||
SHELL_CMD_HANDLER("hasSAMLPortal", "mpclient status nac", checkSAMLPortal)
|
||||
SHELL_CMD_HANDLER("hasIdaIdnEnabled", "fw ctl get int nac_pep_identity_next_enabled", checkPepIdaIdnStatus)
|
||||
SHELL_CMD_HANDLER("requiredNanoServices", "echo 'idaSaml_gaia;idaIdn_gaia;'", getRequiredNanoServices)
|
||||
SHELL_CMD_HANDLER("requiredNanoServices", "echo ida", getRequiredNanoServices)
|
||||
SHELL_CMD_HANDLER(
|
||||
"cpProductIntegrationMgmtObjectName",
|
||||
"mgmt_cli --format json -r true show-session | jq -r '.[\"connected-server\"].name'",
|
||||
getMgmtObjName
|
||||
)
|
||||
SHELL_CMD_HANDLER(
|
||||
"cpProductIntegrationMgmtObjectUid",
|
||||
"mgmt_cli --format json -r true show-session | jq -r '.[\"connected-server\"].uid'",
|
||||
getMgmtObjUid
|
||||
)
|
||||
SHELL_CMD_HANDLER(
|
||||
"cpProductIntegrationMgmtParentObjectName",
|
||||
"cat $FWDIR/database/myself_objects.C "
|
||||
@@ -227,6 +227,11 @@ SHELL_CMD_HANDLER(
|
||||
"cpprod_util FwIsLocalMgmt",
|
||||
getSmbObjectName
|
||||
)
|
||||
SHELL_CMD_HANDLER(
|
||||
"cpProductIntegrationMgmtObjectUid",
|
||||
"cpprod_util FwIsLocalMgmt",
|
||||
getSmbObjectUid
|
||||
)
|
||||
SHELL_CMD_HANDLER(
|
||||
"Application Control",
|
||||
"cat $FWDIR/conf/active_blades.txt | grep -o 'APCL [01]' | cut -d ' ' -f2",
|
||||
|
||||
@@ -100,6 +100,7 @@ private:
|
||||
string packages_dir;
|
||||
string orch_service_name;
|
||||
set<string> ignore_packages;
|
||||
Maybe<string> forbidden_versions = genError("Forbidden versions file does not exist");
|
||||
};
|
||||
|
||||
void
|
||||
@@ -135,7 +136,8 @@ ManifestController::Impl::init()
|
||||
"Ignore packages list file path"
|
||||
);
|
||||
|
||||
if (Singleton::Consume<I_OrchestrationTools>::by<ManifestController>()->doesFileExist(ignore_packages_path)) {
|
||||
auto orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<ManifestController>();
|
||||
if (orchestration_tools->doesFileExist(ignore_packages_path)) {
|
||||
try {
|
||||
ifstream input_stream(ignore_packages_path);
|
||||
if (!input_stream) {
|
||||
@@ -156,6 +158,9 @@ ManifestController::Impl::init()
|
||||
<< " Error: " << f.what();
|
||||
}
|
||||
}
|
||||
|
||||
const string forbidden_versions_path = getFilesystemPathConfig() + "/revert/forbidden_versions";
|
||||
forbidden_versions = orchestration_tools->readFile(forbidden_versions_path);
|
||||
}
|
||||
|
||||
bool
|
||||
@@ -271,6 +276,17 @@ ManifestController::Impl::updateManifest(const string &new_manifest_file)
|
||||
}
|
||||
|
||||
map<string, Package> new_packages = parsed_manifest.unpack();
|
||||
if (!new_packages.empty()) {
|
||||
const Package &package = new_packages.begin()->second;
|
||||
if (forbidden_versions.ok() &&
|
||||
forbidden_versions.unpack().find(package.getVersion()) != string::npos
|
||||
) {
|
||||
dbgWarning(D_ORCHESTRATOR)
|
||||
<< "Packages version is in the forbidden versions list. No upgrade will be performed.";
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
map<string, Package> all_packages = parsed_manifest.unpack();
|
||||
map<string, Package> current_packages;
|
||||
parsed_manifest = orchestration_tools->loadPackagesFromJson(manifest_file_path);
|
||||
|
||||
@@ -58,6 +58,9 @@ public:
|
||||
Debug::setUnitTestFlag(D_ORCHESTRATOR, Debug::DebugLevel::TRACE);
|
||||
const string ignore_packages_file = "/etc/cp/conf/ignore-packages.txt";
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(ignore_packages_file)).WillOnce(Return(false));
|
||||
Maybe<string> forbidden_versions(string("a1\na2"));
|
||||
EXPECT_CALL(mock_orchestration_tools, readFile("/etc/cp/revert/forbidden_versions"))
|
||||
.WillOnce(Return(forbidden_versions));
|
||||
manifest_controller.init();
|
||||
manifest_file_path = getConfigurationWithDefault<string>(
|
||||
"/etc/cp/conf/manifest.json",
|
||||
@@ -224,6 +227,10 @@ TEST_F(ManifestControllerTest, createNewManifest)
|
||||
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillOnce(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillOnce(Return(false));
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -363,6 +370,11 @@ TEST_F(ManifestControllerTest, updateManifest)
|
||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).Times(2).WillRepeatedly(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).Times(2).WillRepeatedly(Return(true));
|
||||
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillOnce(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillOnce(Return(false));
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
|
||||
manifest =
|
||||
@@ -417,6 +429,9 @@ TEST_F(ManifestControllerTest, updateManifest)
|
||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(file_name)).WillOnce(Return(new_services));
|
||||
EXPECT_CALL(mock_orchestration_tools,
|
||||
loadPackagesFromJson(manifest_file_path)).WillOnce(Return(old_services));
|
||||
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillOnce(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillRepeatedly(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -478,6 +493,11 @@ TEST_F(ManifestControllerTest, selfUpdate)
|
||||
|
||||
EXPECT_CALL(mock_orchestration_tools, copyFile("/tmp/temp_file", path +
|
||||
temp_ext)).WillOnce(Return(true));
|
||||
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillOnce(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillOnce(Return(false));
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -607,6 +627,10 @@ TEST_F(ManifestControllerTest, removeCurrentErrorPackage)
|
||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillOnce(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillOnce(Return(false));
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
corrupted_packages.clear();
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
@@ -666,6 +690,10 @@ TEST_F(ManifestControllerTest, selfUpdateWithOldCopy)
|
||||
|
||||
EXPECT_CALL(mock_orchestration_tools, copyFile("/tmp/temp_file", path +
|
||||
temp_ext)).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillOnce(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillOnce(Return(false));
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -722,6 +750,10 @@ TEST_F(ManifestControllerTest, selfUpdateWithOldCopyWithError)
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(path)).WillOnce(Return(false)).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, copyFile(path, path + backup_ext + temp_ext)).WillOnce(Return(false));
|
||||
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return(hostname));
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillOnce(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillOnce(Return(false));
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_FALSE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -798,6 +830,10 @@ TEST_F(ManifestControllerTest, installAndRemove)
|
||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).Times(2).WillRepeatedly(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).Times(2).WillRepeatedly(Return(true));
|
||||
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillOnce(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillOnce(Return(false));
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
|
||||
string new_manifest =
|
||||
@@ -858,6 +894,63 @@ TEST_F(ManifestControllerTest, installAndRemove)
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/packages/my1/my1")).Times(2)
|
||||
.WillOnce(Return(false));
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillOnce(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillRepeatedly(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
TEST_F(ManifestControllerTest, manifestWithForbiddenVersion)
|
||||
{
|
||||
new_services.clear();
|
||||
old_services.clear();
|
||||
|
||||
string manifest =
|
||||
"{"
|
||||
" \"packages\": ["
|
||||
" {"
|
||||
" \"download-path\": \"http://172.23.92.135/my.sh\","
|
||||
" \"relative-path\": \"\","
|
||||
" \"name\": \"my\","
|
||||
" \"version\": \"a1\","
|
||||
" \"checksum-type\": \"sha1sum\","
|
||||
" \"checksum\": \"a58bbab8020b0e6d08568714b5e582a3adf9c805\","
|
||||
" \"package-type\": \"service\","
|
||||
" \"require\": []"
|
||||
" },"
|
||||
" {"
|
||||
" \"download-path\": \"http://172.23.92.135/my.sh\","
|
||||
" \"relative-path\": \"\","
|
||||
" \"name\": \"orchestration\","
|
||||
" \"version\": \"a1\","
|
||||
" \"checksum-type\": \"sha1sum\","
|
||||
" \"checksum\": \"a58bbab8020b0e6d08568714b5e582a3adf9c805\","
|
||||
" \"package-type\": \"service\","
|
||||
" \"require\": []"
|
||||
" },"
|
||||
" {"
|
||||
" \"download-path\": \"\","
|
||||
" \"relative-path\": \"\","
|
||||
" \"name\": \"waap\","
|
||||
" \"version\": \"a1\","
|
||||
" \"checksum-type\": \"sha1sum\","
|
||||
" \"checksum\": \"\","
|
||||
" \"package-type\": \"service\","
|
||||
" \"status\": false,\n"
|
||||
" \"message\": \"This security app isn't valid for this agent\"\n"
|
||||
" }"
|
||||
" ]"
|
||||
"}";
|
||||
|
||||
map<string, Package> manifest_services;
|
||||
load(manifest, manifest_services);
|
||||
checkIfFileExistsCall(manifest_services.at("my"));
|
||||
|
||||
|
||||
load(manifest, new_services);
|
||||
load(old_manifest, old_services);
|
||||
|
||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(file_name)).WillOnce(Return(new_services));
|
||||
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -947,6 +1040,10 @@ TEST_F(ManifestControllerTest, badInstall)
|
||||
EXPECT_CALL(mock_orchestration_tools,
|
||||
packagesToJsonFile(corrupted_packages, corrupted_file_list)).WillOnce(Return(true));
|
||||
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillOnce(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillOnce(Return(false));
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_FALSE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -1112,6 +1209,12 @@ TEST_F(ManifestControllerTest, requireUpdate)
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, removeFile("new_manifest.json")).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillRepeatedly(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status"))
|
||||
.WillOnce(Return(false))
|
||||
.WillRepeatedly(Return(true));;
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -1212,6 +1315,10 @@ TEST_F(ManifestControllerTest, sharedObjectNotInstalled)
|
||||
).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, copyFile("/tmp/temp_file1", path +
|
||||
temp_ext)).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillOnce(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillOnce(Return(false));
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -1313,6 +1420,12 @@ TEST_F(ManifestControllerTest, requireSharedObjectUpdate)
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, removeFile("new_manifest.json"))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillRepeatedly(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status"))
|
||||
.WillOnce(Return(false))
|
||||
.WillRepeatedly(Return(true));;
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -1389,6 +1502,7 @@ TEST_F(ManifestControllerTest, failureOnDownloadSharedObject)
|
||||
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return(string("hostname")));
|
||||
EXPECT_CALL(mock_orchestration_tools, removeFile("/tmp/temp_file1")).WillOnce(Return(true));
|
||||
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillRepeatedly(Return("b"));
|
||||
EXPECT_FALSE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -1524,6 +1638,12 @@ TEST_F(ManifestControllerTest, multiRequireUpdate)
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, removeFile("new_manifest.json"))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillRepeatedly(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status"))
|
||||
.WillOnce(Return(false))
|
||||
.WillRepeatedly(Return(true));;
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -1610,6 +1730,12 @@ TEST_F(ManifestControllerTest, createNewManifestWithUninstallablePackage)
|
||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillRepeatedly(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status"))
|
||||
.WillOnce(Return(false))
|
||||
.WillRepeatedly(Return(true));;
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -1624,7 +1750,7 @@ TEST_F(ManifestControllerTest, updateUninstallPackage)
|
||||
" \"download-path\": \"\","
|
||||
" \"relative-path\": \"\","
|
||||
" \"name\": \"my\","
|
||||
" \"version\": \"\","
|
||||
" \"version\": \"c\","
|
||||
" \"checksum-type\": \"sha1sum\","
|
||||
" \"checksum\": \"\","
|
||||
" \"package-type\": \"service\","
|
||||
@@ -1721,6 +1847,11 @@ TEST_F(ManifestControllerTest, updateUninstallPackage)
|
||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(file_name)).WillOnce(Return(new_services));
|
||||
EXPECT_CALL(mock_orchestration_tools,
|
||||
loadPackagesFromJson(manifest_file_path)).WillOnce(Return(old_services));
|
||||
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillOnce(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillOnce(Return(false));
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -1744,6 +1875,9 @@ public:
|
||||
setConfiguration<string>(ignore_packages_file, "orchestration", "Ignore packages list file path");
|
||||
writeIgnoreList(ignore_packages_file, ignore_services);
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(ignore_packages_file)).WillOnce(Return(true));
|
||||
Maybe<string> forbidden_versions(string("a1\na2"));
|
||||
EXPECT_CALL(mock_orchestration_tools, readFile("/etc/cp/revert/forbidden_versions"))
|
||||
.WillOnce(Return(forbidden_versions));
|
||||
manifest_controller.init();
|
||||
manifest_file_path = getConfigurationWithDefault<string>(
|
||||
"/etc/cp/conf/manifest.json",
|
||||
@@ -1839,6 +1973,7 @@ public:
|
||||
StrictMock<MockOrchestrationStatus> mock_status;
|
||||
StrictMock<MockDownloader> mock_downloader;
|
||||
StrictMock<MockOrchestrationTools> mock_orchestration_tools;
|
||||
StrictMock<MockDetailsResolver> mock_details_resolver;
|
||||
NiceMock<MockShellCmd> mock_shell_cmd;
|
||||
|
||||
ManifestController manifest_controller;
|
||||
@@ -2122,6 +2257,12 @@ TEST_F(ManifestControllerIgnorePakckgeTest, addIgnorePackageAndUpdateNormal)
|
||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillRepeatedly(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status"))
|
||||
.WillOnce(Return(false))
|
||||
.WillRepeatedly(Return(true));;
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
}
|
||||
|
||||
@@ -2387,6 +2528,12 @@ TEST_F(ManifestControllerIgnorePakckgeTest, overrideIgnoredPackageFromProfileSet
|
||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillRepeatedly(Return("b"));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status"))
|
||||
.WillOnce(Return(false))
|
||||
.WillRepeatedly(Return(true));;
|
||||
EXPECT_CALL(mock_orchestration_tools, writeFile(_, "/etc/cp/revert/upgrade_status", false))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||
|
||||
EXPECT_THAT(capture_debug.str(), Not(HasSubstr("Ignoring a package from the manifest. Package name: my")));
|
||||
@@ -2411,6 +2558,9 @@ public:
|
||||
doesFileExist("/etc/cp/conf/ignore-packages.txt")
|
||||
).WillOnce(Return(false));
|
||||
|
||||
Maybe<string> forbidden_versions(string("a1\na2"));
|
||||
EXPECT_CALL(mock_orchestration_tools, readFile("/etc/cp/revert/forbidden_versions"))
|
||||
.WillOnce(Return(forbidden_versions));
|
||||
manifest_controller.init();
|
||||
}
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
#include "manifest_handler.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <ctime>
|
||||
|
||||
#include "debug.h"
|
||||
#include "config.h"
|
||||
@@ -201,18 +202,29 @@ ManifestHandler::installPackage(
|
||||
auto span_scope = i_env->startNewSpanScope(Span::ContextType::CHILD_OF);
|
||||
auto orchestration_status = Singleton::Consume<I_OrchestrationStatus>::by<ManifestHandler>();
|
||||
|
||||
auto details_resolver = Singleton::Consume<I_DetailsResolver>::by<ManifestHandler>();
|
||||
auto orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<ManifestHandler>();
|
||||
|
||||
auto &package = package_downloaded_file.first;
|
||||
auto &package_name = package.getName();
|
||||
auto &package_handler_path = package_downloaded_file.second;
|
||||
|
||||
dbgInfo(D_ORCHESTRATOR) << "Handling package installation. Package: " << package_name;
|
||||
|
||||
string upgrade_info =
|
||||
details_resolver->getAgentVersion() + " " + package.getVersion() + " " + getCurrentTimestamp();
|
||||
if (!orchestration_tools->doesFileExist(getFilesystemPathConfig() + "/revert/upgrade_status") &&
|
||||
!orchestration_tools->writeFile(upgrade_info, getFilesystemPathConfig() + "/revert/upgrade_status")
|
||||
) {
|
||||
dbgWarning(D_ORCHESTRATOR) << "Failed to write to " + getFilesystemPathConfig() + "/revert/upgrade_status";
|
||||
}
|
||||
|
||||
if (package_name.compare(orch_service_name) == 0) {
|
||||
orchestration_status->writeStatusToFile();
|
||||
bool self_update_status = selfUpdate(package, current_packages, package_handler_path);
|
||||
if (!self_update_status) {
|
||||
auto details = Singleton::Consume<I_AgentDetails>::by<ManifestHandler>();
|
||||
auto hostname = Singleton::Consume<I_DetailsResolver>::by<ManifestHandler>()->getHostname();
|
||||
auto hostname = details_resolver->getHostname();
|
||||
string err_hostname = (hostname.ok() ? "on host '" + *hostname : "'" + details->getAgentId()) + "'";
|
||||
string install_error =
|
||||
"Warning: Agent/Gateway " +
|
||||
@@ -246,7 +258,6 @@ ManifestHandler::installPackage(
|
||||
return true;
|
||||
}
|
||||
string current_installation_file = packages_dir + "/" + package_name + "/" + package_name;
|
||||
auto orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<ManifestHandler>();
|
||||
bool is_clean_installation = !orchestration_tools->doesFileExist(current_installation_file);
|
||||
|
||||
|
||||
@@ -368,3 +379,13 @@ ManifestHandler::selfUpdate(
|
||||
package_handler->preInstallPackage(orch_service_name, current_installation_file) &&
|
||||
package_handler->installPackage(orch_service_name, current_installation_file, false);
|
||||
}
|
||||
|
||||
string
|
||||
ManifestHandler::getCurrentTimestamp()
|
||||
{
|
||||
time_t now = time(nullptr);
|
||||
tm* now_tm = localtime(&now);
|
||||
char timestamp[20];
|
||||
strftime(timestamp, sizeof(timestamp), "%Y-%m-%d %H:%M:%S", now_tm);
|
||||
return string(timestamp);
|
||||
}
|
||||
|
||||
@@ -55,6 +55,8 @@ USE_DEBUG_FLAG(D_ORCHESTRATOR);
|
||||
static string fw_last_update_time = "";
|
||||
#endif // gaia || smb
|
||||
|
||||
static const size_t MAX_SERVER_NAME_LENGTH = 253;
|
||||
|
||||
class SetAgentUninstall
|
||||
:
|
||||
public ServerRest,
|
||||
@@ -103,6 +105,19 @@ public:
|
||||
<< "Initializing Orchestration component, file system path prefix: "
|
||||
<< filesystem_prefix;
|
||||
|
||||
int check_upgrade_success_interval = getSettingWithDefault<uint>(10, "successUpgradeInterval");
|
||||
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->addOneTimeRoutine(
|
||||
I_MainLoop::RoutineType::Timer,
|
||||
[this, check_upgrade_success_interval]()
|
||||
{
|
||||
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->yield(
|
||||
std::chrono::minutes(check_upgrade_success_interval)
|
||||
);
|
||||
processUpgradeCompletion();
|
||||
},
|
||||
"Orchestration successfully updated (One-Time After Interval)",
|
||||
true
|
||||
);
|
||||
auto orch_policy = loadDefaultOrchestrationPolicy();
|
||||
if (!orch_policy.ok()) {
|
||||
dbgWarning(D_ORCHESTRATOR) << "Failed to load Orchestration Policy. Error: " << orch_policy.getErr();
|
||||
@@ -141,6 +156,113 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
void
|
||||
saveLastKnownOrchInfo(string curr_agent_version)
|
||||
{
|
||||
static const string upgrades_dir = filesystem_prefix + "/revert";
|
||||
static const string last_known_orchestrator = upgrades_dir + "/last_known_working_orchestrator";
|
||||
static const string current_orchestration_package =
|
||||
filesystem_prefix + "/packages/orchestration/orchestration";
|
||||
static const string last_known_manifest = upgrades_dir + "/last_known_manifest";
|
||||
static const string current_manifest_file = getConfigurationWithDefault<string>(
|
||||
filesystem_prefix + "/conf/manifest.json",
|
||||
"orchestration",
|
||||
"Manifest file path"
|
||||
);
|
||||
|
||||
if (!i_orchestration_tools->copyFile(current_orchestration_package, last_known_orchestrator)) {
|
||||
dbgWarning(D_ORCHESTRATOR) << "Failed to copy the orchestration package to " << upgrades_dir;
|
||||
} else {
|
||||
dbgInfo(D_ORCHESTRATOR) << "last known orchestrator version updated to: " << curr_agent_version;
|
||||
}
|
||||
|
||||
if (!i_orchestration_tools->copyFile(current_manifest_file, last_known_manifest)) {
|
||||
dbgWarning(D_ORCHESTRATOR) << "Failed to copy " << current_manifest_file << " to " << upgrades_dir;
|
||||
} else {
|
||||
dbgInfo(D_ORCHESTRATOR) << "last known manifest updated";
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
void
|
||||
processUpgradeCompletion()
|
||||
{
|
||||
if (!is_first_check_update_success) {
|
||||
int check_upgrade_success_interval = getSettingWithDefault<uint>(10, "successUpgradeInterval");
|
||||
// LCOV_EXCL_START
|
||||
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->addOneTimeRoutine(
|
||||
I_MainLoop::RoutineType::Timer,
|
||||
[this, check_upgrade_success_interval]()
|
||||
{
|
||||
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->yield(
|
||||
std::chrono::minutes(check_upgrade_success_interval)
|
||||
);
|
||||
processUpgradeCompletion();
|
||||
},
|
||||
"Orchestration successfully updated",
|
||||
true
|
||||
);
|
||||
// LCOV_EXCL_STOP
|
||||
return;
|
||||
}
|
||||
|
||||
static const string upgrades_dir = filesystem_prefix + "/revert";
|
||||
static const string upgrade_status = upgrades_dir + "/upgrade_status";
|
||||
static const string last_known_orchestrator = upgrades_dir + "/last_known_working_orchestrator";
|
||||
static const string upgrade_failure_info_path = upgrades_dir + "/failed_upgrade_info";
|
||||
|
||||
I_DetailsResolver *i_details_resolver = Singleton::Consume<I_DetailsResolver>::by<OrchestrationComp>();
|
||||
|
||||
bool is_upgrade_status_exist = i_orchestration_tools->doesFileExist(upgrade_status);
|
||||
bool is_last_known_orchestrator_exist = i_orchestration_tools->doesFileExist(last_known_orchestrator);
|
||||
|
||||
if (!is_upgrade_status_exist) {
|
||||
if (!is_last_known_orchestrator_exist) {
|
||||
saveLastKnownOrchInfo(i_details_resolver->getAgentVersion());
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
auto maybe_upgrade_data = i_orchestration_tools->readFile(upgrade_status);
|
||||
string upgrade_data, from_version, to_version;
|
||||
if (maybe_upgrade_data.ok()) {
|
||||
upgrade_data = maybe_upgrade_data.unpack();
|
||||
istringstream stream(upgrade_data);
|
||||
stream >> from_version >> to_version;
|
||||
}
|
||||
i_orchestration_tools->removeFile(upgrade_status);
|
||||
|
||||
if (i_orchestration_tools->doesFileExist(upgrade_failure_info_path)) {
|
||||
string info = "Orchestration revert. ";
|
||||
auto failure_info = i_orchestration_tools->readFile(upgrade_failure_info_path);
|
||||
if (failure_info.ok()) info.append(failure_info.unpack());
|
||||
LogGen(
|
||||
info,
|
||||
ReportIS::Level::ACTION,
|
||||
ReportIS::Audience::INTERNAL,
|
||||
ReportIS::Severity::CRITICAL,
|
||||
ReportIS::Priority::URGENT,
|
||||
ReportIS::Tags::ORCHESTRATOR
|
||||
);
|
||||
dbgError(D_ORCHESTRATOR) <<
|
||||
"Error in orchestration version: " << to_version <<
|
||||
". Orchestration reverted to version: " << i_details_resolver->getAgentVersion();
|
||||
i_orchestration_tools->removeFile(upgrade_failure_info_path);
|
||||
return;
|
||||
}
|
||||
|
||||
saveLastKnownOrchInfo(i_details_resolver->getAgentVersion());
|
||||
i_orchestration_tools->writeFile(
|
||||
upgrade_data + "\n",
|
||||
getLogFilesPathConfig() + "/nano_agent/prev_upgrades",
|
||||
true
|
||||
);
|
||||
dbgWarning(D_ORCHESTRATOR) <<
|
||||
"Upgrade process from version: " << from_version <<
|
||||
" to version: " << to_version <<
|
||||
" completed successfully";
|
||||
}
|
||||
|
||||
Maybe<void>
|
||||
registerToTheFog()
|
||||
{
|
||||
@@ -1022,6 +1144,7 @@ private:
|
||||
UpdatesProcessResult::SUCCESS,
|
||||
UpdatesConfigType::GENERAL
|
||||
).notify();
|
||||
if (!is_first_check_update_success) is_first_check_update_success = true;
|
||||
return Maybe<void>();
|
||||
}
|
||||
|
||||
@@ -1347,9 +1470,10 @@ private:
|
||||
string cc_opt;
|
||||
tie(config_opt, cc_opt, nginx_version) = nginx_data.unpack();
|
||||
agent_data_report
|
||||
<< make_pair("nginxVersion", nginx_version)
|
||||
<< make_pair("configureOpt", config_opt)
|
||||
<< make_pair("extraCompilerOpt", cc_opt);
|
||||
<< make_pair("attachmentVersion", "Legacy")
|
||||
<< make_pair("nginxVersion", nginx_version)
|
||||
<< make_pair("configureOpt", config_opt)
|
||||
<< make_pair("extraCompilerOpt", cc_opt);
|
||||
} else {
|
||||
dbgDebug(D_ORCHESTRATOR) << nginx_data.getErr();
|
||||
}
|
||||
@@ -1389,6 +1513,8 @@ private:
|
||||
|
||||
agent_data_report << AgentReportFieldWithLabel("userEdition", FogCommunication::getUserEdition());
|
||||
|
||||
agent_data_report << make_pair("registeredServer", i_agent_details->getRegisteredServer());
|
||||
|
||||
#if defined(gaia) || defined(smb)
|
||||
if (i_details_resolver->compareCheckpointVersion(8100, greater_equal<int>())) {
|
||||
agent_data_report << AgentReportFieldWithLabel("isCheckpointVersionGER81", "true");
|
||||
@@ -1403,6 +1529,7 @@ private:
|
||||
} else {
|
||||
curr_agent_data_report = agent_data_report;
|
||||
curr_agent_data_report.disableReportSending();
|
||||
agent_data_report << AgentReportFieldWithLabel("timestamp", i_time->getWalltimeStr());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1549,6 +1676,11 @@ private:
|
||||
<< LogField("agentType", "Orchestration")
|
||||
<< LogField("agentVersion", Version::get());
|
||||
|
||||
string registered_server = getAttribute("registered-server", "registered_server");
|
||||
dbgTrace(D_ORCHESTRATOR) << "Registered server: " << registered_server;
|
||||
if (!registered_server.empty()) {
|
||||
i_agent_details->setRegisteredServer(registered_server.substr(0, MAX_SERVER_NAME_LENGTH));
|
||||
}
|
||||
auto mainloop = Singleton::Consume<I_MainLoop>::by<OrchestrationComp>();
|
||||
mainloop->addOneTimeRoutine(
|
||||
I_MainLoop::RoutineType::Offline,
|
||||
@@ -1629,7 +1761,7 @@ private:
|
||||
}
|
||||
}
|
||||
|
||||
string server_name = getAttribute("registered-server", "registered_server");
|
||||
string server_name = Singleton::Consume<I_AgentDetails>::by<OrchestrationComp>()->getRegisteredServer();
|
||||
auto server = TagAndEnumManagement::convertStringToTag(server_name);
|
||||
if (server_name == "'SWAG'" || server_name == "'SWAG Server'") server = Tags::WEB_SERVER_SWAG;
|
||||
if (server.ok()) tags.insert(*server);
|
||||
@@ -1653,7 +1785,7 @@ private:
|
||||
tags
|
||||
);
|
||||
|
||||
if (server_name != "") registration_report.addToOrigin(LogField("eventCategory", server_name));
|
||||
registration_report.addToOrigin(LogField("eventCategory", server_name));
|
||||
|
||||
auto email = getAttribute("email-address", "user_email");
|
||||
if (email != "") registration_report << LogField("userDefinedId", email);
|
||||
@@ -2065,6 +2197,7 @@ private:
|
||||
int failure_count = 0;
|
||||
unsigned int sleep_interval = 0;
|
||||
bool is_new_success = false;
|
||||
bool is_first_check_update_success = false;
|
||||
OrchestrationPolicy policy;
|
||||
UpdatesProcessReporter updates_process_reporter_listener;
|
||||
HybridModeMetric hybrid_mode_metric;
|
||||
@@ -2130,6 +2263,7 @@ OrchestrationComp::preload()
|
||||
registerExpectedSetting<vector<string>>("upgradeDay");
|
||||
registerExpectedSetting<string>("email-address");
|
||||
registerExpectedSetting<string>("registered-server");
|
||||
registerExpectedSetting<uint>("successUpgradeInterval");
|
||||
registerExpectedConfigFile("orchestration", Config::ConfigFileType::Policy);
|
||||
registerExpectedConfigFile("registration-data", Config::ConfigFileType::Policy);
|
||||
}
|
||||
|
||||
@@ -89,6 +89,11 @@ public:
|
||||
|
||||
EXPECT_CALL(mock_service_controller, isServiceInstalled("Access Control")).WillRepeatedly(Return(false));
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_ml,
|
||||
addOneTimeRoutine(_, _, "Orchestration successfully updated (One-Time After Interval)", true)
|
||||
).WillOnce(DoAll(SaveArg<1>(&upgrade_routine), Return(0)));
|
||||
|
||||
// This Holding the Main Routine of the Orchestration.
|
||||
EXPECT_CALL(
|
||||
mock_ml,
|
||||
@@ -156,6 +161,7 @@ public:
|
||||
runRoutine()
|
||||
{
|
||||
routine();
|
||||
upgrade_routine();
|
||||
}
|
||||
|
||||
void
|
||||
@@ -235,6 +241,7 @@ private:
|
||||
}
|
||||
|
||||
I_MainLoop::Routine routine;
|
||||
I_MainLoop::Routine upgrade_routine;
|
||||
I_MainLoop::Routine status_routine;
|
||||
};
|
||||
|
||||
|
||||
@@ -83,6 +83,12 @@ public:
|
||||
EXPECT_CALL(mock_orchestration_tools, readFile(orchestration_policy_file_path)).WillOnce(Return(response));
|
||||
EXPECT_CALL(mock_status, setFogAddress(host_url)).WillRepeatedly(Return());
|
||||
EXPECT_CALL(mock_orchestration_tools, setClusterId());
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_ml,
|
||||
addOneTimeRoutine(_, _, "Orchestration successfully updated (One-Time After Interval)", true)
|
||||
).WillOnce(DoAll(SaveArg<1>(&upgrade_routine), Return(0)));
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_ml,
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Orchestration runner", true)
|
||||
@@ -281,6 +287,12 @@ public:
|
||||
status_routine();
|
||||
}
|
||||
|
||||
void
|
||||
runUpgradeRoutine()
|
||||
{
|
||||
upgrade_routine();
|
||||
}
|
||||
|
||||
void
|
||||
preload()
|
||||
{
|
||||
@@ -359,6 +371,7 @@ private:
|
||||
|
||||
I_MainLoop::Routine routine;
|
||||
I_MainLoop::Routine status_routine;
|
||||
I_MainLoop::Routine upgrade_routine;
|
||||
};
|
||||
|
||||
|
||||
@@ -601,14 +614,6 @@ TEST_F(OrchestrationTest, check_sending_registration_data)
|
||||
|
||||
string version = "1";
|
||||
EXPECT_CALL(mock_service_controller, getUpdatePolicyVersion()).WillOnce(ReturnRef(version));
|
||||
|
||||
EXPECT_CALL(mock_ml, yield(A<chrono::microseconds>()))
|
||||
.WillOnce(Return())
|
||||
.WillOnce(Invoke([] (chrono::microseconds) { throw invalid_argument("stop while loop"); }));
|
||||
try {
|
||||
runRoutine();
|
||||
} catch (const invalid_argument& e) {}
|
||||
|
||||
string config_json =
|
||||
"{\n"
|
||||
" \"email-address\": \"fake@example.com\",\n"
|
||||
@@ -617,9 +622,19 @@ TEST_F(OrchestrationTest, check_sending_registration_data)
|
||||
|
||||
istringstream ss(config_json);
|
||||
Singleton::Consume<Config::I_Config>::from(config_comp)->loadConfiguration(ss);
|
||||
EXPECT_CALL(mock_ml, yield(A<chrono::microseconds>()))
|
||||
.WillOnce(Return())
|
||||
.WillOnce(Invoke([] (chrono::microseconds) { throw invalid_argument("stop while loop"); }));
|
||||
try {
|
||||
runRoutine();
|
||||
} catch (const invalid_argument& e) {}
|
||||
|
||||
|
||||
sending_routine();
|
||||
|
||||
EXPECT_THAT(message_body, HasSubstr("\"userDefinedId\": \"fake@example.com\""));
|
||||
EXPECT_THAT(message_body, HasSubstr("\"eventCategory\""));
|
||||
|
||||
EXPECT_THAT(message_body, AnyOf(HasSubstr("\"Embedded Deployment\""), HasSubstr("\"Kubernetes Deployment\"")));
|
||||
EXPECT_THAT(message_body, HasSubstr("\"NGINX Server\""));
|
||||
}
|
||||
@@ -1004,6 +1019,11 @@ TEST_F(OrchestrationTest, loadOrchestrationPolicyFromBackup)
|
||||
);
|
||||
waitForRestCall();
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_ml,
|
||||
addOneTimeRoutine(_, _, "Orchestration successfully updated (One-Time After Interval)", true)
|
||||
);
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_ml,
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Orchestration runner", true)
|
||||
@@ -1170,6 +1190,29 @@ TEST_F(OrchestrationTest, manifestUpdate)
|
||||
try {
|
||||
runRoutine();
|
||||
} catch (const invalid_argument& e) {}
|
||||
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/upgrade_status")).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/last_known_working_orchestrator"))
|
||||
.WillOnce(Return(true));
|
||||
|
||||
Maybe<string> upgrade_status(string("1.1.1 1.1.2 2025-01-28 07:53:23"));
|
||||
EXPECT_CALL(mock_orchestration_tools, readFile("/etc/cp/revert/upgrade_status"))
|
||||
.WillOnce(Return(upgrade_status));
|
||||
EXPECT_CALL(mock_orchestration_tools, removeFile("/etc/cp/revert/upgrade_status")).WillOnce(Return(true));
|
||||
|
||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/revert/failed_upgrade_info"))
|
||||
.WillOnce(Return(false));
|
||||
|
||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillRepeatedly(Return("1.1.2"));
|
||||
EXPECT_CALL(mock_orchestration_tools, copyFile(_, "/etc/cp/revert/last_known_working_orchestrator"))
|
||||
.WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_orchestration_tools, copyFile(_, "/etc/cp/revert/last_known_manifest")).WillOnce(Return(true));
|
||||
EXPECT_CALL(
|
||||
mock_orchestration_tools,
|
||||
writeFile("1.1.1 1.1.2 2025-01-28 07:53:23\n", "/var/log/nano_agent/prev_upgrades", true)
|
||||
).WillOnce(Return(true));
|
||||
EXPECT_CALL(mock_ml, yield(A<chrono::microseconds>())).WillOnce(Return());
|
||||
runUpgradeRoutine();
|
||||
}
|
||||
|
||||
TEST_F(OrchestrationTest, getBadPolicyUpdate)
|
||||
|
||||
@@ -208,6 +208,7 @@ ServiceDetails::sendNewConfigurations(int configuration_id, const string &policy
|
||||
MessageMetadata new_config_req_md("127.0.0.1", service_port);
|
||||
new_config_req_md.setConnectioFlag(MessageConnectionConfig::ONE_TIME_CONN);
|
||||
new_config_req_md.setConnectioFlag(MessageConnectionConfig::UNSECURE_CONN);
|
||||
new_config_req_md.setSuspension(false);
|
||||
auto res = messaging->sendSyncMessage(
|
||||
HTTPMethod::POST,
|
||||
"/set-new-configuration",
|
||||
|
||||
@@ -377,9 +377,13 @@ FogAuthenticator::registerLocalAgentToFog()
|
||||
{
|
||||
auto local_reg_token = getRegistrationToken();
|
||||
if (!local_reg_token.ok()) return;
|
||||
|
||||
string reg_token = local_reg_token.unpack().getData();
|
||||
if (reg_token.empty()) return;
|
||||
|
||||
dbgInfo(D_ORCHESTRATOR) << "Start local agent registration to the fog";
|
||||
|
||||
string exec_command = "open-appsec-ctl --set-mode --online_mode --token " + local_reg_token.unpack().getData();
|
||||
string exec_command = "open-appsec-ctl --set-mode --online_mode --token " + reg_token;
|
||||
|
||||
auto i_agent_details = Singleton::Consume<I_AgentDetails>::by<FogAuthenticator>();
|
||||
auto fog_address = i_agent_details->getFogDomain();
|
||||
|
||||
@@ -137,9 +137,13 @@ public:
|
||||
void setRemoteSyncEnabled(bool enabled);
|
||||
protected:
|
||||
void mergeProcessedFromRemote();
|
||||
std::string getWindowId();
|
||||
void waitSync();
|
||||
std::string getPostDataUrl();
|
||||
std::string getUri();
|
||||
size_t getIntervalsCount();
|
||||
void incrementIntervalsCount();
|
||||
bool isBase();
|
||||
|
||||
template<typename T>
|
||||
bool sendObject(T &obj, HTTPMethod method, std::string uri)
|
||||
@@ -252,14 +256,13 @@ protected:
|
||||
const std::string m_remotePath; // Created from tenentId + / + assetId + / + class
|
||||
std::chrono::seconds m_interval;
|
||||
std::string m_owner;
|
||||
const std::string m_assetId;
|
||||
|
||||
private:
|
||||
bool localSyncAndProcess();
|
||||
void updateStateFromRemoteService();
|
||||
RemoteFilesList getProcessedFilesList();
|
||||
RemoteFilesList getRemoteProcessedFilesList();
|
||||
std::string getWindowId();
|
||||
bool isBase();
|
||||
std::string getLearningHost();
|
||||
std::string getSharedStorageHost();
|
||||
|
||||
@@ -270,7 +273,6 @@ private:
|
||||
size_t m_windowsCount;
|
||||
size_t m_intervalsCounter;
|
||||
bool m_remoteSyncEnabled;
|
||||
const std::string m_assetId;
|
||||
const bool m_isAssetIdUuid;
|
||||
std::string m_type;
|
||||
std::string m_lastProcessedModified;
|
||||
|
||||
@@ -19,12 +19,14 @@
|
||||
#include "../waap_clib/WaapParameters.h"
|
||||
#include "../waap_clib/WaapOpenRedirectPolicy.h"
|
||||
#include "../waap_clib/WaapErrorDisclosurePolicy.h"
|
||||
#include "../waap_clib/DecisionType.h"
|
||||
#include "../waap_clib/CsrfPolicy.h"
|
||||
#include "../waap_clib/UserLimitsPolicy.h"
|
||||
#include "../waap_clib/RateLimiting.h"
|
||||
#include "../waap_clib/SecurityHeadersPolicy.h"
|
||||
#include <memory>
|
||||
|
||||
|
||||
enum class BlockingLevel {
|
||||
NO_BLOCKING = 0,
|
||||
LOW_BLOCKING_LEVEL,
|
||||
@@ -44,8 +46,8 @@ public:
|
||||
virtual const std::string& get_AssetId() const = 0;
|
||||
virtual const std::string& get_AssetName() const = 0;
|
||||
virtual const BlockingLevel& get_BlockingLevel() const = 0;
|
||||
virtual const std::string& get_PracticeId() const = 0;
|
||||
virtual const std::string& get_PracticeName() const = 0;
|
||||
virtual const std::string& get_PracticeIdByPactice(DecisionType practiceType) const = 0;
|
||||
virtual const std::string& get_PracticeNameByPactice(DecisionType practiceType) const = 0;
|
||||
virtual const std::string& get_PracticeSubType() const = 0;
|
||||
virtual const std::string& get_RuleId() const = 0;
|
||||
virtual const std::string& get_RuleName() const = 0;
|
||||
|
||||
@@ -91,6 +91,7 @@ add_library(waap_clib
|
||||
ParserScreenedJson.cc
|
||||
ParserBinaryFile.cc
|
||||
RegexComparator.cc
|
||||
RequestsMonitor.cc
|
||||
)
|
||||
|
||||
add_definitions("-Wno-unused-function")
|
||||
|
||||
@@ -113,6 +113,9 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
||||
<< parser_depth
|
||||
<< " v_len = "
|
||||
<< v_len;
|
||||
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << m_key;
|
||||
|
||||
// Decide whether to push/pop the value in the keystack.
|
||||
bool shouldUpdateKeyStack = (flags & BUFFERED_RECEIVER_F_UNNAMED) == 0;
|
||||
|
||||
@@ -275,13 +278,23 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
||||
// Detect and decode potential base64 chunks in the value before further processing
|
||||
|
||||
bool base64ParamFound = false;
|
||||
size_t base64_offset = 0;
|
||||
Waap::Util::BinaryFileType base64BinaryFileType = Waap::Util::BinaryFileType::FILE_TYPE_NONE;
|
||||
if (m_depth == 1 && flags == BUFFERED_RECEIVER_F_MIDDLE && m_key.depth() == 1 && m_key.first() != "#base64"){
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << " === will not check base64 since prev data block was not b64-encoded ===";
|
||||
} else {
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << " ===Processing potential base64===";
|
||||
if (isUrlPayload && m_depth == 1 && cur_val[0] == '/') {
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << "removing leading '/' from URL param value";
|
||||
base64_offset = 1;
|
||||
}
|
||||
std::string decoded_val, decoded_key;
|
||||
base64_variants base64_status = Waap::Util::b64Test(cur_val, decoded_key, decoded_val, base64BinaryFileType);
|
||||
base64_variants base64_status = Waap::Util::b64Test(
|
||||
cur_val,
|
||||
decoded_key,
|
||||
decoded_val,
|
||||
base64BinaryFileType,
|
||||
base64_offset);
|
||||
|
||||
dbgTrace(D_WAAP_DEEP_PARSER)
|
||||
<< " status = "
|
||||
@@ -289,16 +302,50 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
||||
<< " key = "
|
||||
<< decoded_key
|
||||
<< " value = "
|
||||
<< decoded_val;
|
||||
<< decoded_val
|
||||
<< "m_depth = "
|
||||
<< m_depth;
|
||||
|
||||
switch (base64_status) {
|
||||
case SINGLE_B64_CHUNK_CONVERT:
|
||||
cur_val = decoded_val;
|
||||
if (base64_offset) {
|
||||
cur_val = "/" + decoded_val;
|
||||
} else {
|
||||
cur_val = decoded_val;
|
||||
}
|
||||
base64ParamFound = true;
|
||||
break;
|
||||
case CONTINUE_DUAL_SCAN:
|
||||
if (decoded_val.size() > 0) {
|
||||
decoded_key = "#base64";
|
||||
base64ParamFound = false;
|
||||
if (base64_offset) {
|
||||
decoded_val = "/" + decoded_val;
|
||||
}
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << m_key;
|
||||
rc = onKv(
|
||||
decoded_key.c_str(),
|
||||
decoded_key.size(),
|
||||
decoded_val.data(),
|
||||
decoded_val.size(),
|
||||
flags,
|
||||
parser_depth
|
||||
);
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << "After call to onKv with suspected value rc = " << rc;
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << m_key;
|
||||
break;
|
||||
} else {
|
||||
dbgTrace(D_WAAP) << "base64 decode suspected and empty value. Skipping.";
|
||||
base64ParamFound = false;
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case KEY_VALUE_B64_PAIR:
|
||||
// going deep with new pair in case value is not empty
|
||||
if (decoded_val.size() > 0) {
|
||||
if (base64_offset) {
|
||||
decoded_key = "/" + decoded_key;
|
||||
}
|
||||
cur_val = decoded_val;
|
||||
base64ParamFound = true;
|
||||
rc = onKv(
|
||||
@@ -309,9 +356,13 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
||||
flags,
|
||||
parser_depth
|
||||
);
|
||||
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << " rc = " << rc;
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << "After call to onKv with suspected value rc = " << rc;
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << m_key;
|
||||
if (rc != CONTINUE_PARSING) {
|
||||
if (shouldUpdateKeyStack) {
|
||||
m_key.pop("deep parser key");
|
||||
}
|
||||
m_depth--;
|
||||
return rc;
|
||||
}
|
||||
}
|
||||
@@ -323,7 +374,7 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
||||
}
|
||||
|
||||
if (base64ParamFound) {
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << "DeepParser::onKv(): pushing #base64 prefix to the key.";
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << "pushing #base64 prefix to the key.";
|
||||
m_key.push("#base64", 7, false);
|
||||
}
|
||||
}
|
||||
@@ -437,7 +488,6 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
||||
if (shouldUpdateKeyStack) {
|
||||
m_key.pop("deep parser key");
|
||||
}
|
||||
|
||||
m_depth--;
|
||||
return rc;
|
||||
}
|
||||
@@ -587,7 +637,6 @@ DeepParser::parseBuffer(
|
||||
if (shouldUpdateKeyStack) {
|
||||
m_key.pop("deep parser key");
|
||||
}
|
||||
|
||||
m_depth--;
|
||||
return DONE_PARSING;
|
||||
}
|
||||
@@ -909,7 +958,6 @@ DeepParser::parseAfterMisleadingMultipartBoundaryCleaned(
|
||||
return rc;
|
||||
}
|
||||
}
|
||||
|
||||
return rc;
|
||||
}
|
||||
|
||||
@@ -1081,7 +1129,7 @@ DeepParser::createInternalParser(
|
||||
<< " isBodyPayload = "
|
||||
<< isBodyPayload;
|
||||
//Detect sensor_data format in body and just use dedicated filter for it
|
||||
if (m_depth == 1
|
||||
if ((m_depth == 1)
|
||||
&& isBodyPayload
|
||||
&& Waap::Util::detectKnownSource(cur_val) == Waap::Util::SOURCE_TYPE_SENSOR_DATA) {
|
||||
m_parsersDeque.push_back(
|
||||
|
||||
@@ -37,14 +37,24 @@ void KeyStack::push(const char* subkey, size_t subkeySize, bool countDepth) {
|
||||
m_nameDepth++;
|
||||
}
|
||||
|
||||
dbgTrace(D_WAAP) << "KeyStack(" << m_name << ")::push(): '" << std::string(subkey, subkeySize) <<
|
||||
"' => full_key='" << std::string(m_key.data(), m_key.size()) << "'";
|
||||
dbgTrace(D_WAAP)
|
||||
<< "KeyStack("
|
||||
<< m_name
|
||||
<< ")::push(): '"
|
||||
<< std::string(subkey, subkeySize)
|
||||
<< "' => full_key='"
|
||||
<< std::string(m_key.data(), m_key.size())
|
||||
<< "'";
|
||||
}
|
||||
|
||||
void KeyStack::pop(const char* log, bool countDepth) {
|
||||
// Keep depth balanced even if m_key[] buffer is full
|
||||
if (m_key.empty() || m_stack.empty()) {
|
||||
dbgDebug(D_WAAP) << "KeyStack(" << m_name << ")::pop(): [ERROR] ATTEMPT TO POP FROM EMPTY KEY STACK! " << log;
|
||||
dbgDebug(D_WAAP)
|
||||
<< "KeyStack("
|
||||
<< m_name
|
||||
<< ")::pop(): [ERROR] ATTEMPT TO POP FROM EMPTY KEY STACK! "
|
||||
<< log;
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -55,6 +65,22 @@ void KeyStack::pop(const char* log, bool countDepth) {
|
||||
// Remove last subkey.
|
||||
m_key.erase(m_stack.back());
|
||||
m_stack.pop_back();
|
||||
dbgTrace(D_WAAP) << "KeyStack(" << m_name << ")::pop(): full_key='" <<
|
||||
std::string(m_key.data(), (int)m_key.size()) << "': pop_key=" << log << "'";
|
||||
dbgTrace(D_WAAP)
|
||||
<< "KeyStack("
|
||||
<< m_name
|
||||
<< ")::pop(): full_key='"
|
||||
<< std::string(m_key.data(), (int)m_key.size())
|
||||
<< "': pop_key="
|
||||
<< log
|
||||
<< "'";
|
||||
}
|
||||
|
||||
void KeyStack::print(std::ostream &os) const
|
||||
{
|
||||
os
|
||||
<< "KeyStack("
|
||||
<< m_name
|
||||
<< ")::show(): full_key='"
|
||||
<< std::string(m_key.data(), (int)m_key.size())
|
||||
<< "'";
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ public:
|
||||
void pop(const char* log, bool countDepth=true);
|
||||
bool empty() const { return m_key.empty(); }
|
||||
void clear() { m_key.clear(); m_stack.clear(); }
|
||||
void print(std::ostream &os) const;
|
||||
size_t depth() const { return m_nameDepth; }
|
||||
size_t size() const {
|
||||
return str().size();
|
||||
|
||||
@@ -111,8 +111,7 @@ int BufferedReceiver::onKvDone()
|
||||
// This must be called even if m_value is empty in order to signal the BUFFERED_RECEIVER_F_LAST flag to the
|
||||
// receiver!
|
||||
dbgTrace(D_WAAP_PARSER)
|
||||
<< " Call onKv on the remainder of the buffer not yet pushed to the receiver "
|
||||
<< "calling onKv()";
|
||||
<< " Call onKv on the remainder of the buffer not yet pushed to the receiver calling onKv()";
|
||||
int rc = onKv(m_key.data(), m_key.size(), m_value.data(), m_value.size(), m_flags, m_parser_depth);
|
||||
|
||||
// Reset the object's state to allow reuse for other parsers
|
||||
|
||||
@@ -21,6 +21,7 @@ USE_DEBUG_FLAG(D_WAAP);
|
||||
|
||||
const std::string ParserPDF::m_parserName = "ParserPDF";
|
||||
const char* PDF_TAIL = "%%EOF";
|
||||
const size_t PDF_TAIL_LEN = 5;
|
||||
|
||||
ParserPDF::ParserPDF(
|
||||
IParserStreamReceiver &receiver,
|
||||
@@ -44,16 +45,21 @@ ParserPDF::push(const char *buf, size_t len)
|
||||
<< "' len="
|
||||
<< len;
|
||||
|
||||
const char *c;
|
||||
|
||||
if (m_state == s_error) {
|
||||
return 0;
|
||||
}
|
||||
if (len == 0)
|
||||
{
|
||||
dbgTrace(D_WAAP_PARSER_PDF) << "ParserPDF::push(): end of stream. m_state=" << m_state;
|
||||
|
||||
if (m_state == s_end) {
|
||||
if (len == 0) {
|
||||
dbgTrace(D_WAAP_PARSER_PDF) << "ParserPDF::push(): end of stream. m_state=" << m_state;
|
||||
if (m_state == s_body && m_tailOffset >= PDF_TAIL_LEN) {
|
||||
if (m_receiver.onKey("PDF", 3) != 0) {
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
if (m_receiver.onValue("", 0) != 0) {
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
m_receiver.onKvDone();
|
||||
} else {
|
||||
m_state = s_error;
|
||||
@@ -61,38 +67,43 @@ ParserPDF::push(const char *buf, size_t len)
|
||||
return 0;
|
||||
}
|
||||
|
||||
size_t start = (len > MAX_PDF_TAIL_LOOKUP) ? len - MAX_PDF_TAIL_LOOKUP : 0;
|
||||
switch (m_state) {
|
||||
case s_start:
|
||||
m_state = s_body;
|
||||
CP_FALL_THROUGH;
|
||||
case s_body:
|
||||
{
|
||||
size_t tail_lookup_offset = (len > MAX_PDF_TAIL_LOOKUP) ? len - MAX_PDF_TAIL_LOOKUP : 0;
|
||||
c = strstr(buf + tail_lookup_offset, PDF_TAIL);
|
||||
for (size_t i = start; i < len; i++) {
|
||||
dbgTrace(D_WAAP_PARSER_PDF)
|
||||
<< "string to search: " << std::string(buf + tail_lookup_offset)
|
||||
<< " c=" << c;
|
||||
if (c) {
|
||||
m_state = s_end;
|
||||
CP_FALL_THROUGH;
|
||||
<< "ParserPDF::push(): m_tailOffset="
|
||||
<< m_tailOffset
|
||||
<< " buf[i]="
|
||||
<< buf[i];
|
||||
if (m_tailOffset <= PDF_TAIL_LEN - 1) {
|
||||
if (buf[i] == PDF_TAIL[m_tailOffset]) {
|
||||
m_tailOffset++;
|
||||
} else {
|
||||
m_tailOffset = 0;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
if (buf[i] == '\r' || buf[i] == '\n' || buf[i] == ' ' || buf[i] == 0) {
|
||||
m_tailOffset++;
|
||||
} else {
|
||||
m_tailOffset = 0;
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
case s_end:
|
||||
if (m_receiver.onKey("PDF", 3) != 0) {
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
if (m_receiver.onValue("", 0) != 0) {
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
dbgTrace(D_WAAP_PARSER_PDF)
|
||||
<< "ParserPDF::push()->s_body: m_tailOffset="
|
||||
<< m_tailOffset;
|
||||
break;
|
||||
case s_error:
|
||||
break;
|
||||
default:
|
||||
dbgTrace(D_WAAP_PARSER_PDF) << "ParserPDF::push(): unknown state: " << m_state;
|
||||
dbgTrace(D_WAAP_PARSER_PDF)
|
||||
<< "ParserPDF::push(): unknown state: "
|
||||
<< m_state;
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -34,7 +34,6 @@ private:
|
||||
enum state {
|
||||
s_start,
|
||||
s_body,
|
||||
s_end,
|
||||
s_error
|
||||
};
|
||||
|
||||
@@ -42,6 +41,7 @@ private:
|
||||
enum state m_state;
|
||||
static const std::string m_parserName;
|
||||
size_t m_parser_depth;
|
||||
size_t m_tailOffset = 0;
|
||||
};
|
||||
|
||||
#endif // __PARSER_PDF_H__
|
||||
|
||||
158
components/security_apps/waap/waap_clib/RequestsMonitor.cc
Normal file
158
components/security_apps/waap/waap_clib/RequestsMonitor.cc
Normal file
@@ -0,0 +1,158 @@
|
||||
#include "RequestsMonitor.h"
|
||||
#include "waap.h"
|
||||
#include "SyncLearningNotification.h"
|
||||
#include "report_messaging.h"
|
||||
#include "customized_cereal_map.h"
|
||||
|
||||
USE_DEBUG_FLAG(D_WAAP_CONFIDENCE_CALCULATOR);
|
||||
using namespace std;
|
||||
|
||||
SourcesRequestMonitor::SourcesRequestMonitor(
|
||||
const string& filePath,
|
||||
const string& remotePath,
|
||||
const string& assetId,
|
||||
const string& owner) :
|
||||
SerializeToLocalAndRemoteSyncBase(
|
||||
chrono::minutes(10),
|
||||
chrono::seconds(30),
|
||||
filePath,
|
||||
remotePath != "" ? remotePath + "/Monitor" : remotePath,
|
||||
assetId,
|
||||
owner
|
||||
), m_sourcesRequests()
|
||||
{
|
||||
}
|
||||
|
||||
SourcesRequestMonitor::~SourcesRequestMonitor()
|
||||
{
|
||||
}
|
||||
|
||||
void SourcesRequestMonitor::syncWorker()
|
||||
{
|
||||
dbgInfo(D_WAAP_CONFIDENCE_CALCULATOR) << "Running the sync worker for assetId='" << m_assetId << "', owner='" <<
|
||||
m_owner << "'";
|
||||
incrementIntervalsCount();
|
||||
OrchestrationMode mode = Singleton::exists<I_AgentDetails>() ?
|
||||
Singleton::Consume<I_AgentDetails>::by<WaapComponent>()->getOrchestrationMode() : OrchestrationMode::ONLINE;
|
||||
|
||||
bool enabled = getProfileAgentSettingWithDefault<bool>(false, "appsec.sourceRequestsMonitor.enabled");
|
||||
|
||||
if (mode == OrchestrationMode::OFFLINE || !enabled || isBase() || !postData()) {
|
||||
dbgInfo(D_WAAP_CONFIDENCE_CALCULATOR)
|
||||
<< "Did not report data. for asset: "
|
||||
<< m_assetId
|
||||
<< " Remote URL: "
|
||||
<< m_remotePath
|
||||
<< " is enabled: "
|
||||
<< to_string(enabled)
|
||||
<< ", mode: " << int(mode);
|
||||
return;
|
||||
}
|
||||
|
||||
dbgTrace(D_WAAP_CONFIDENCE_CALCULATOR) << "Waiting for all agents to post their data";
|
||||
waitSync();
|
||||
|
||||
if (mode == OrchestrationMode::HYBRID) {
|
||||
dbgDebug(D_WAAP_CONFIDENCE_CALCULATOR) << "detected running in standalone mode. not sending sync notification";
|
||||
} else {
|
||||
SyncLearningNotificationObject syncNotification(m_assetId, "Monitor", getWindowId());
|
||||
|
||||
dbgDebug(D_WAAP_CONFIDENCE_CALCULATOR) << "sending sync notification: " << syncNotification;
|
||||
|
||||
ReportMessaging(
|
||||
"sync notification for '" + m_assetId + "'",
|
||||
ReportIS::AudienceTeam::WAAP,
|
||||
syncNotification,
|
||||
MessageCategory::GENERIC,
|
||||
ReportIS::Tags::WAF,
|
||||
ReportIS::Notification::SYNC_LEARNING
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
void SourcesRequestMonitor::logSourceHit(const string& source)
|
||||
{
|
||||
m_sourcesRequests[chrono::duration_cast<chrono::minutes>(
|
||||
Singleton::Consume<I_TimeGet>::by<WaapComponent>()->getWalltime()
|
||||
).count()][source]++;
|
||||
}
|
||||
|
||||
// LCOV_EXCL_START Reason: internal functions not used
|
||||
|
||||
void SourcesRequestMonitor::pullData(const vector<string> &data)
|
||||
{
|
||||
// not used. report only
|
||||
}
|
||||
|
||||
void SourcesRequestMonitor::processData()
|
||||
{
|
||||
// not used. report only
|
||||
}
|
||||
|
||||
void SourcesRequestMonitor::postProcessedData()
|
||||
{
|
||||
// not used. report only
|
||||
}
|
||||
|
||||
void SourcesRequestMonitor::pullProcessedData(const vector<string> &data)
|
||||
{
|
||||
// not used. report only
|
||||
}
|
||||
|
||||
void SourcesRequestMonitor::updateState(const vector<string> &data)
|
||||
{
|
||||
// not used. report only
|
||||
}
|
||||
|
||||
// LCOV_EXCL_STOP
|
||||
|
||||
typedef map<string, map<string, size_t>> MonitorJsonData;
|
||||
|
||||
class SourcesRequestsReport : public RestGetFile
|
||||
{
|
||||
public:
|
||||
SourcesRequestsReport(MonitorData& _sourcesRequests, const string& _agentId)
|
||||
: sourcesRequests(), agentId(_agentId)
|
||||
{
|
||||
MonitorJsonData montiorData;
|
||||
for (const auto& window : _sourcesRequests) {
|
||||
for (const auto& source : window.second) {
|
||||
montiorData[to_string(window.first)][source.first] = source.second;
|
||||
}
|
||||
}
|
||||
sourcesRequests = montiorData;
|
||||
}
|
||||
private:
|
||||
C2S_PARAM(MonitorJsonData, sourcesRequests);
|
||||
C2S_PARAM(string, agentId);
|
||||
};
|
||||
|
||||
bool SourcesRequestMonitor::postData()
|
||||
{
|
||||
dbgInfo(D_WAAP_CONFIDENCE_CALCULATOR) << "Sending the data to remote";
|
||||
// send collected data to remote and clear the local data
|
||||
string url = getPostDataUrl();
|
||||
string agentId = Singleton::Consume<I_AgentDetails>::by<WaapComponent>()->getAgentId();
|
||||
SourcesRequestsReport currentWindow(m_sourcesRequests, agentId);
|
||||
bool ok = sendNoReplyObjectWithRetry(currentWindow,
|
||||
HTTPMethod::PUT,
|
||||
url);
|
||||
if (!ok) {
|
||||
dbgError(D_WAAP_CONFIDENCE_CALCULATOR) << "Failed to post collected data to: " << url;
|
||||
}
|
||||
dbgInfo(D_WAAP_CONFIDENCE_CALCULATOR) << "Data sent to remote: " << ok;
|
||||
m_sourcesRequests.clear();
|
||||
return ok;
|
||||
}
|
||||
|
||||
void SourcesRequestMonitor::serialize(ostream& stream)
|
||||
{
|
||||
cereal::JSONOutputArchive archive(stream);
|
||||
archive(m_sourcesRequests);
|
||||
}
|
||||
|
||||
void SourcesRequestMonitor::deserialize(istream& stream)
|
||||
{
|
||||
cereal::JSONInputArchive archive(stream);
|
||||
archive(m_sourcesRequests);
|
||||
}
|
||||
33
components/security_apps/waap/waap_clib/RequestsMonitor.h
Normal file
33
components/security_apps/waap/waap_clib/RequestsMonitor.h
Normal file
@@ -0,0 +1,33 @@
|
||||
#ifndef __REQUESTS_MONITOR_H__
|
||||
#define __REQUESTS_MONITOR_H__
|
||||
#include "i_serialize.h"
|
||||
|
||||
typedef std::map<uint64_t, std::map<std::string, size_t>> MonitorData;
|
||||
|
||||
class SourcesRequestMonitor : public SerializeToLocalAndRemoteSyncBase
|
||||
{
|
||||
public:
|
||||
SourcesRequestMonitor(
|
||||
const std::string& filePath,
|
||||
const std::string& remotePath,
|
||||
const std::string& assetId,
|
||||
const std::string& owner);
|
||||
virtual ~SourcesRequestMonitor();
|
||||
virtual void syncWorker() override;
|
||||
void logSourceHit(const std::string& source);
|
||||
protected:
|
||||
virtual void pullData(const std::vector<std::string> &data) override;
|
||||
virtual void processData() override;
|
||||
virtual void postProcessedData() override;
|
||||
virtual void pullProcessedData(const std::vector<std::string> &data) override;
|
||||
virtual void updateState(const std::vector<std::string> &data) override;
|
||||
virtual bool postData() override;
|
||||
|
||||
void serialize(std::ostream& stream);
|
||||
void deserialize(std::istream& stream);
|
||||
private:
|
||||
// map of sources and their requests per minute (UNIX)
|
||||
MonitorData m_sourcesRequests;
|
||||
};
|
||||
|
||||
#endif // __REQUESTS_MONITOR_H__
|
||||
@@ -407,6 +407,7 @@ SerializeToLocalAndRemoteSyncBase::SerializeToLocalAndRemoteSyncBase(
|
||||
m_remotePath(replaceAllCopy(remotePath, "//", "/")),
|
||||
m_interval(0),
|
||||
m_owner(owner),
|
||||
m_assetId(replaceAllCopy(assetId, "/", "")),
|
||||
m_pMainLoop(nullptr),
|
||||
m_waitForSync(waitForSync),
|
||||
m_workerRoutineId(0),
|
||||
@@ -414,7 +415,6 @@ SerializeToLocalAndRemoteSyncBase::SerializeToLocalAndRemoteSyncBase(
|
||||
m_windowsCount(0),
|
||||
m_intervalsCounter(0),
|
||||
m_remoteSyncEnabled(true),
|
||||
m_assetId(replaceAllCopy(assetId, "/", "")),
|
||||
m_isAssetIdUuid(Waap::Util::isUuid(assetId)),
|
||||
m_shared_storage_host(genError("not set")),
|
||||
m_learning_host(genError("not set"))
|
||||
@@ -469,6 +469,15 @@ bool SerializeToLocalAndRemoteSyncBase::isBase()
|
||||
return m_remotePath == "";
|
||||
}
|
||||
|
||||
void SerializeToLocalAndRemoteSyncBase::waitSync()
|
||||
{
|
||||
if (m_pMainLoop == nullptr)
|
||||
{
|
||||
return;
|
||||
}
|
||||
m_pMainLoop->yield(m_waitForSync);
|
||||
}
|
||||
|
||||
string SerializeToLocalAndRemoteSyncBase::getUri()
|
||||
{
|
||||
static const string hybridModeUri = "/api";
|
||||
@@ -484,6 +493,11 @@ size_t SerializeToLocalAndRemoteSyncBase::getIntervalsCount()
|
||||
return m_intervalsCounter;
|
||||
}
|
||||
|
||||
void SerializeToLocalAndRemoteSyncBase::incrementIntervalsCount()
|
||||
{
|
||||
m_intervalsCounter++;
|
||||
}
|
||||
|
||||
SerializeToLocalAndRemoteSyncBase::~SerializeToLocalAndRemoteSyncBase()
|
||||
{
|
||||
|
||||
@@ -603,6 +617,17 @@ void SerializeToLocalAndRemoteSyncBase::setInterval(ch::seconds newInterval)
|
||||
|
||||
bool SerializeToLocalAndRemoteSyncBase::localSyncAndProcess()
|
||||
{
|
||||
bool isBackupSyncEnabled = getProfileAgentSettingWithDefault<bool>(
|
||||
true,
|
||||
"appsecLearningSettings.backupLocalSync");
|
||||
|
||||
if (!isBackupSyncEnabled) {
|
||||
dbgInfo(D_WAAP_CONFIDENCE_CALCULATOR) << "Local sync is disabled";
|
||||
processData();
|
||||
saveData();
|
||||
return true;
|
||||
}
|
||||
|
||||
RemoteFilesList rawDataFiles;
|
||||
|
||||
dbgTrace(D_WAAP_CONFIDENCE_CALCULATOR) << "Getting files of all agents";
|
||||
@@ -659,7 +684,7 @@ void SerializeToLocalAndRemoteSyncBase::syncWorker()
|
||||
{
|
||||
dbgInfo(D_WAAP_CONFIDENCE_CALCULATOR) << "Running the sync worker for assetId='" << m_assetId << "', owner='" <<
|
||||
m_owner << "'" << " last modified state: " << m_lastProcessedModified;
|
||||
m_intervalsCounter++;
|
||||
incrementIntervalsCount();
|
||||
OrchestrationMode mode = Singleton::exists<I_AgentDetails>() ?
|
||||
Singleton::Consume<I_AgentDetails>::by<WaapComponent>()->getOrchestrationMode() : OrchestrationMode::ONLINE;
|
||||
|
||||
@@ -678,7 +703,7 @@ void SerializeToLocalAndRemoteSyncBase::syncWorker()
|
||||
}
|
||||
|
||||
dbgTrace(D_WAAP_CONFIDENCE_CALCULATOR) << "Waiting for all agents to post their data";
|
||||
m_pMainLoop->yield(m_waitForSync);
|
||||
waitSync();
|
||||
// check if learning service is operational
|
||||
if (m_lastProcessedModified == "")
|
||||
{
|
||||
|
||||
@@ -33,6 +33,7 @@ WaapTelemetryBase::sendLog(const LogRest &metric_client_rest) const
|
||||
OrchestrationMode mode = Singleton::Consume<I_AgentDetails>::by<GenericMetric>()->getOrchestrationMode();
|
||||
|
||||
GenericMetric::sendLog(metric_client_rest);
|
||||
dbgTrace(D_WAAP) << "Waap telemetry log sent: " << metric_client_rest.genJson().unpack();
|
||||
|
||||
if (mode == OrchestrationMode::ONLINE) {
|
||||
return;
|
||||
@@ -79,7 +80,16 @@ void
|
||||
WaapTelemetrics::updateMetrics(const string &asset_id, const DecisionTelemetryData &data)
|
||||
{
|
||||
initMetrics();
|
||||
requests.report(1);
|
||||
|
||||
auto is_keep_alive_ctx = Singleton::Consume<I_Environment>::by<GenericMetric>()->get<bool>(
|
||||
"keep_alive_request_ctx"
|
||||
);
|
||||
if (!is_keep_alive_ctx.ok() || !*is_keep_alive_ctx) {
|
||||
requests.report(1);
|
||||
} else {
|
||||
dbgTrace(D_WAAP) << "Not increasing the number of requests due to keep alive";
|
||||
}
|
||||
|
||||
if (sources_seen.find(data.source) == sources_seen.end()) {
|
||||
if (sources.getCounter() == 0) sources_seen.clear();
|
||||
sources_seen.insert(data.source);
|
||||
@@ -274,7 +284,9 @@ WaapMetricWrapper::upon(const WaapTelemetryEvent &event)
|
||||
ReportIS::IssuingEngine::AGENT_CORE,
|
||||
chrono::minutes(LOGGING_INTERVAL_IN_MINUTES),
|
||||
true,
|
||||
ReportIS::Audience::INTERNAL
|
||||
ReportIS::Audience::INTERNAL,
|
||||
false,
|
||||
asset_id
|
||||
);
|
||||
metrics[asset_id]->registerListener();
|
||||
}
|
||||
@@ -286,7 +298,9 @@ WaapMetricWrapper::upon(const WaapTelemetryEvent &event)
|
||||
ReportIS::IssuingEngine::AGENT_CORE,
|
||||
chrono::minutes(LOGGING_INTERVAL_IN_MINUTES),
|
||||
true,
|
||||
ReportIS::Audience::INTERNAL
|
||||
ReportIS::Audience::INTERNAL,
|
||||
false,
|
||||
asset_id
|
||||
);
|
||||
attack_types[asset_id]->registerListener();
|
||||
}
|
||||
|
||||
@@ -135,6 +135,7 @@ WaapAssetState::WaapAssetState(std::shared_ptr<Signatures> signatures,
|
||||
m_Signatures(signatures),
|
||||
m_waapDataFileName(waapDataFileName),
|
||||
m_assetId(assetId),
|
||||
m_requestsMonitor(nullptr),
|
||||
scoreBuilder(this),
|
||||
m_rateLimitingState(nullptr),
|
||||
m_errorLimitingState(nullptr),
|
||||
@@ -152,10 +153,14 @@ WaapAssetState::WaapAssetState(std::shared_ptr<Signatures> signatures,
|
||||
I_AgentDetails* agentDetails = Singleton::Consume<I_AgentDetails>::by<WaapComponent>();
|
||||
std::string path = agentDetails->getTenantId() + "/" + assetId;
|
||||
m_filtersMngr = std::make_shared<IndicatorsFiltersManager>(path, assetId, this);
|
||||
m_requestsMonitor = std::make_shared<SourcesRequestMonitor>
|
||||
(getWaapDataDir() + "/monitor.data", path, assetId, "State");
|
||||
}
|
||||
else
|
||||
{
|
||||
m_filtersMngr = std::make_shared<IndicatorsFiltersManager>("", "", this);
|
||||
m_requestsMonitor = std::make_shared<SourcesRequestMonitor>
|
||||
(getWaapDataDir() + "/monitor.data", "", assetId, "State");
|
||||
}
|
||||
// Load keyword scores - copy from ScoreBuilder
|
||||
updateScores();
|
||||
|
||||
@@ -33,6 +33,7 @@
|
||||
#include "KeywordTypeValidator.h"
|
||||
#include "ScanResult.h"
|
||||
#include "WaapSampleValue.h"
|
||||
#include "RequestsMonitor.h"
|
||||
|
||||
enum space_stage {SPACE_SYNBOL, BR_SYMBOL, BN_SYMBOL, BRN_SEQUENCE, BNR_SEQUENCE, NO_SPACES};
|
||||
|
||||
@@ -67,6 +68,8 @@ public:
|
||||
|
||||
const std::string m_assetId;
|
||||
|
||||
std::shared_ptr<SourcesRequestMonitor> m_requestsMonitor;
|
||||
|
||||
ScoreBuilder scoreBuilder;
|
||||
std::shared_ptr<Waap::RateLimiting::State> m_rateLimitingState;
|
||||
std::shared_ptr<Waap::RateLimiting::State> m_errorLimitingState;
|
||||
@@ -90,6 +93,7 @@ public:
|
||||
void logIndicatorsInFilters(const std::string ¶m, Waap::Keywords::KeywordsSet& keywords,
|
||||
IWaf2Transaction* pTransaction);
|
||||
void logParamHit(Waf2ScanResult& res, IWaf2Transaction* pTransaction);
|
||||
void logSourceHit(const std::string& source);
|
||||
void filterKeywords(const std::string ¶m, Waap::Keywords::KeywordsSet& keywords,
|
||||
std::vector<std::string>& filteredKeywords);
|
||||
void clearFilterVerbose();
|
||||
|
||||
@@ -329,14 +329,37 @@ const std::string& WaapConfigBase::get_AssetName() const
|
||||
return m_assetName;
|
||||
}
|
||||
|
||||
const std::string& WaapConfigBase::get_PracticeId() const
|
||||
const std::string& WaapConfigBase::get_PracticeIdByPactice(DecisionType practiceType) const
|
||||
{
|
||||
return m_practiceId;
|
||||
|
||||
switch (practiceType)
|
||||
{
|
||||
case DecisionType::AUTONOMOUS_SECURITY_DECISION:
|
||||
return m_practiceId;
|
||||
default:
|
||||
dbgError(D_WAAP)
|
||||
<< "Can't find practice type for practice ID by practice: "
|
||||
<< practiceType
|
||||
<< ", return web app practice ID";
|
||||
return m_practiceId;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const std::string& WaapConfigBase::get_PracticeName() const
|
||||
const std::string& WaapConfigBase::get_PracticeNameByPactice(DecisionType practiceType) const
|
||||
{
|
||||
return m_practiceName;
|
||||
switch (practiceType)
|
||||
{
|
||||
case DecisionType::AUTONOMOUS_SECURITY_DECISION:
|
||||
return m_practiceName;
|
||||
default:
|
||||
dbgError(D_WAAP)
|
||||
<< "Can't find practice type for practice name by practice: "
|
||||
<< practiceType
|
||||
<< ", return web app practice name";
|
||||
return m_practiceName;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const std::string& WaapConfigBase::get_RuleId() const
|
||||
|
||||
@@ -39,8 +39,8 @@ public:
|
||||
virtual const std::string& get_AssetId() const;
|
||||
virtual const std::string& get_AssetName() const;
|
||||
virtual const BlockingLevel& get_BlockingLevel() const;
|
||||
virtual const std::string& get_PracticeId() const;
|
||||
virtual const std::string& get_PracticeName() const;
|
||||
virtual const std::string& get_PracticeIdByPactice(DecisionType practiceType) const;
|
||||
virtual const std::string& get_PracticeNameByPactice(DecisionType practiceType) const;
|
||||
virtual const std::string& get_RuleId() const;
|
||||
virtual const std::string& get_RuleName() const;
|
||||
virtual const bool& get_WebAttackMitigation() const;
|
||||
|
||||
@@ -89,7 +89,7 @@ bool WaapOverrideFunctor::operator()(
|
||||
}
|
||||
else if (tagLower == "url") {
|
||||
for (const auto &rx : rxes) {
|
||||
if (W2T_REGX_MATCH(getUriStr)) return true;
|
||||
if (W2T_REGX_MATCH(getUri)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ ResponseInjectReasons::ResponseInjectReasons()
|
||||
:
|
||||
csrf(false),
|
||||
antibot(false),
|
||||
captcha(false),
|
||||
securityHeaders(false)
|
||||
{
|
||||
}
|
||||
@@ -53,6 +54,13 @@ ResponseInjectReasons::setAntibot(bool flag)
|
||||
antibot = flag;
|
||||
}
|
||||
|
||||
void
|
||||
ResponseInjectReasons::setCaptcha(bool flag)
|
||||
{
|
||||
dbgTrace(D_WAAP) << "Change ResponseInjectReasons(Captcha) " << captcha << " to " << flag;
|
||||
captcha = flag;
|
||||
}
|
||||
|
||||
void
|
||||
ResponseInjectReasons::setCsrf(bool flag)
|
||||
{
|
||||
@@ -74,6 +82,13 @@ ResponseInjectReasons::shouldInjectAntibot() const
|
||||
return antibot;
|
||||
}
|
||||
|
||||
bool
|
||||
ResponseInjectReasons::shouldInjectCaptcha() const
|
||||
{
|
||||
dbgTrace(D_WAAP) << "shouldInjectCaptcha():: " << captcha;
|
||||
return captcha;
|
||||
}
|
||||
|
||||
bool
|
||||
ResponseInjectReasons::shouldInjectCsrf() const
|
||||
{
|
||||
|
||||
@@ -21,14 +21,17 @@ public:
|
||||
void clear();
|
||||
bool shouldInject() const;
|
||||
void setAntibot(bool flag);
|
||||
void setCaptcha(bool flag);
|
||||
void setCsrf(bool flag);
|
||||
void setSecurityHeaders(bool flag);
|
||||
bool shouldInjectAntibot() const;
|
||||
bool shouldInjectCaptcha() const;
|
||||
bool shouldInjectCsrf() const;
|
||||
bool shouldInjectSecurityHeaders() const;
|
||||
private:
|
||||
bool csrf;
|
||||
bool antibot;
|
||||
bool captcha;
|
||||
bool securityHeaders;
|
||||
};
|
||||
|
||||
|
||||
@@ -97,7 +97,9 @@ calcIndividualKeywords(
|
||||
std::sort(keywords.begin(), keywords.end());
|
||||
|
||||
for (auto pKeyword = keywords.begin(); pKeyword != keywords.end(); ++pKeyword) {
|
||||
addKeywordScore(scoreBuilder, poolName, *pKeyword, 2.0f, 0.3f, scoresArray, coefArray);
|
||||
addKeywordScore(
|
||||
scoreBuilder, poolName, *pKeyword, DEFAULT_KEYWORD_SCORE, DEFAULT_KEYWORD_COEF, scoresArray, coefArray
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -112,8 +114,6 @@ calcCombinations(
|
||||
std::vector<std::string>& keyword_combinations)
|
||||
{
|
||||
keyword_combinations.clear();
|
||||
static const double max_combi_score = 1.0f;
|
||||
double default_coef = 0.8f;
|
||||
|
||||
for (size_t i = 0; i < keyword_matches.size(); ++i) {
|
||||
std::vector<std::string> combinations;
|
||||
@@ -137,8 +137,10 @@ calcCombinations(
|
||||
default_score += scoreBuilder.getSnapshotKeywordScore(*it, 0.0f, poolName);
|
||||
}
|
||||
// set default combination score to be the sum of its keywords, bounded by 1
|
||||
default_score = std::min(default_score, max_combi_score);
|
||||
addKeywordScore(scoreBuilder, poolName, combination, default_score, default_coef, scoresArray, coefArray);
|
||||
default_score = std::min(default_score, DEFAULT_COMBI_SCORE);
|
||||
addKeywordScore(
|
||||
scoreBuilder, poolName, combination, default_score, DEFAULT_COMBI_COEF, scoresArray, coefArray
|
||||
);
|
||||
keyword_combinations.push_back(combination);
|
||||
}
|
||||
}
|
||||
@@ -155,7 +157,7 @@ calcArrayScore(std::vector<double>& scoreArray)
|
||||
// *pScore is always positive and there's a +10 offset
|
||||
score = 10.0f - left * 10.0f / divisor;
|
||||
}
|
||||
dbgTrace(D_WAAP_SCORE_BUILDER) << "calculated score: " << score;
|
||||
dbgDebug(D_WAAP_SCORE_BUILDER) << "calculated score: " << score;
|
||||
return score;
|
||||
}
|
||||
|
||||
@@ -171,7 +173,9 @@ calcLogisticRegressionScore(std::vector<double> &coefArray, double intercept, do
|
||||
}
|
||||
// Apply the expit function to the log-odds to obtain the probability,
|
||||
// and multiply by 10 to obtain a 'score' in the range [0, 10]
|
||||
return 1.0f / (1.0f + exp(-log_odds)) * 10.0f;
|
||||
double score = 1.0f / (1.0f + exp(-log_odds)) * 10.0f;
|
||||
dbgDebug(D_WAAP_SCORE_BUILDER) << "calculated score (log_odds): " << score << " (" << log_odds << ")";
|
||||
return score;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -32,6 +32,11 @@ struct ModelLoggingSettings {
|
||||
bool logToStream;
|
||||
};
|
||||
|
||||
static const double DEFAULT_KEYWORD_COEF = 0.3f;
|
||||
static const double DEFAULT_KEYWORD_SCORE = 2.0f;
|
||||
static const double DEFAULT_COMBI_COEF = 0.8f;
|
||||
static const double DEFAULT_COMBI_SCORE = 1.0f;
|
||||
|
||||
std::string getScorePoolNameByLocation(const std::string &location);
|
||||
std::string getOtherScorePoolName();
|
||||
ModelLoggingSettings getModelLoggingSettings();
|
||||
|
||||
@@ -40,6 +40,7 @@
|
||||
#include "WaapOpenRedirectPolicy.h"
|
||||
#include "WaapErrorDisclosurePolicy.h"
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <boost/regex.hpp>
|
||||
#include "generic_rulebase/parameters_config.h"
|
||||
#include <iostream>
|
||||
#include "ParserDelimiter.h"
|
||||
@@ -1098,6 +1099,7 @@ void Waf2Transaction::end_request_hdrs() {
|
||||
// but the State itself is not needed now
|
||||
Waap::Override::State overrideState = getOverrideState(m_siteConfig);
|
||||
}
|
||||
m_pWaapAssetState->m_requestsMonitor->logSourceHit(m_source_identifier);
|
||||
IdentifiersEvent ids(m_source_identifier, m_pWaapAssetState->m_assetId);
|
||||
ids.notify();
|
||||
// Read relevant headers and extract meta information such as host name
|
||||
@@ -1389,6 +1391,20 @@ Waf2Transaction::findHtmlTagToInject(const char* data, int data_len, int& pos)
|
||||
size_t tagHistPosCheck = m_tagHistPos;
|
||||
for (size_t i=0; i < tagSize; ++i) {
|
||||
if (tag[i] != ::tolower(m_tagHist[tagHistPosCheck])) {
|
||||
if (i == tagSize - 1 && m_tagHist[tagHistPosCheck] == ' ') {
|
||||
// match regex on head element with attributes
|
||||
string dataStr = Waap::Util::charToString(data + pos, data_len - pos);
|
||||
dataStr = dataStr.substr(0, dataStr.find('>')+1);
|
||||
tagMatches = NGEN::Regex::regexMatch(
|
||||
__FILE__,
|
||||
__LINE__,
|
||||
dataStr,
|
||||
boost::regex("(?:\\s+[a-zA-Z_:][-a-zA-Z0-9_:.]*(?:\\s*=\\s*(\"[^\"]*\"|'[^']*'|[^\\s\"'>]*))?)*\\s*>")
|
||||
);
|
||||
pos += dataStr.length() - 1;
|
||||
dbgTrace(D_WAAP_BOT_PROTECTION) << "matching head element with attributes: " << dataStr << ". match: " << tagMatches;
|
||||
break;
|
||||
}
|
||||
tagMatches = false;
|
||||
break;
|
||||
}
|
||||
@@ -1402,12 +1418,8 @@ Waf2Transaction::findHtmlTagToInject(const char* data, int data_len, int& pos)
|
||||
}
|
||||
}
|
||||
|
||||
if(!headFound)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
dbgTrace(D_WAAP_BOT_PROTECTION) << "head element tag found: " << headFound;
|
||||
return headFound;
|
||||
}
|
||||
|
||||
void
|
||||
@@ -1421,6 +1433,15 @@ Waf2Transaction::completeInjectionResponseBody(std::string& strInjection)
|
||||
m_responseInjectReasons.setAntibot(false);
|
||||
}
|
||||
|
||||
if(m_responseInjectReasons.shouldInjectCaptcha()) {
|
||||
dbgTrace(D_WAAP_BOT_PROTECTION) <<
|
||||
"Waf2Transaction::completeInjectionResponseBody(): Injecting data (captcha)";
|
||||
//todo add captcha script
|
||||
strInjection += "<script src=\"cp-cp.js\"></script>";
|
||||
// No need to inject more than once
|
||||
m_responseInjectReasons.setCaptcha(false);
|
||||
}
|
||||
|
||||
if (m_responseInjectReasons.shouldInjectCsrf()) {
|
||||
dbgTrace(D_WAAP) << "Waf2Transaction::completeInjectionResponseBody(): Injecting data (csrf)";
|
||||
strInjection += "<script src=\"cp-csrf.js\"></script>";
|
||||
@@ -1568,6 +1589,8 @@ Waf2Transaction::decideFinal(
|
||||
sitePolicy = &ngenAPIConfig;
|
||||
m_overrideState = getOverrideState(sitePolicy);
|
||||
|
||||
// User limits
|
||||
shouldBlock = (getUserLimitVerdict() == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP);
|
||||
}
|
||||
else if (WaapConfigApplication::getWaapSiteConfig(ngenSiteConfig)) {
|
||||
dbgTrace(D_WAAP) << "Waf2Transaction::decideFinal(): got relevant Application configuration from the I/S";
|
||||
@@ -1646,7 +1669,9 @@ void Waf2Transaction::appendCommonLogFields(LogGen& waapLog,
|
||||
const std::shared_ptr<Waap::Trigger::Log> &triggerLog,
|
||||
bool shouldBlock,
|
||||
const std::string& logOverride,
|
||||
const std::string& incidentType) const
|
||||
const std::string& incidentType,
|
||||
const std::string& practiceID,
|
||||
const std::string& practiceName) const
|
||||
{
|
||||
auto env = Singleton::Consume<I_Environment>::by<WaapComponent>();
|
||||
auto active_id = env->get<std::string>("ActiveTenantId");
|
||||
@@ -1737,8 +1762,8 @@ void Waf2Transaction::appendCommonLogFields(LogGen& waapLog,
|
||||
waapLog << LogField("practiceType", "Threat Prevention");
|
||||
waapLog << LogField("practiceSubType", m_siteConfig->get_PracticeSubType());
|
||||
waapLog << LogField("ruleName", m_siteConfig->get_RuleName());
|
||||
waapLog << LogField("practiceId", m_siteConfig->get_PracticeId());
|
||||
waapLog << LogField("practiceName", m_siteConfig->get_PracticeName());
|
||||
waapLog << LogField("practiceId", practiceID);
|
||||
waapLog << LogField("practiceName", practiceName);
|
||||
waapLog << LogField("waapIncidentType", incidentType);
|
||||
|
||||
// Registering this value would append the list of matched override IDs to the unified log
|
||||
@@ -1805,8 +1830,8 @@ Waf2Transaction::sendLog()
|
||||
|
||||
telemetryData.source = getSourceIdentifier();
|
||||
telemetryData.assetName = m_siteConfig->get_AssetName();
|
||||
telemetryData.practiceId = m_siteConfig->get_PracticeId();
|
||||
telemetryData.practiceName = m_siteConfig->get_PracticeName();
|
||||
telemetryData.practiceId = m_siteConfig->get_PracticeIdByPactice(AUTONOMOUS_SECURITY_DECISION);
|
||||
telemetryData.practiceName = m_siteConfig->get_PracticeNameByPactice(AUTONOMOUS_SECURITY_DECISION);
|
||||
if (m_scanResult) {
|
||||
telemetryData.attackTypes = m_scanResult->attack_types;
|
||||
}
|
||||
@@ -1947,7 +1972,11 @@ Waf2Transaction::sendLog()
|
||||
shouldBlock);
|
||||
|
||||
LogGen& waap_log = logGenWrapper.getLogGen();
|
||||
appendCommonLogFields(waap_log, triggerLog, shouldBlock, logOverride, incidentType);
|
||||
appendCommonLogFields(
|
||||
waap_log, triggerLog, shouldBlock, logOverride, incidentType,
|
||||
m_siteConfig->get_PracticeIdByPactice(AUTONOMOUS_SECURITY_DECISION),
|
||||
m_siteConfig->get_PracticeNameByPactice(AUTONOMOUS_SECURITY_DECISION)
|
||||
);
|
||||
waap_log << LogField("waapIncidentDetails", incidentDetails);
|
||||
waap_log << LogField("eventConfidence", "High");
|
||||
break;
|
||||
@@ -1980,7 +2009,11 @@ Waf2Transaction::sendLog()
|
||||
waap_log << LogField("waapFoundIndicators", getKeywordMatchesStr(), LogFieldOption::XORANDB64);
|
||||
}
|
||||
|
||||
appendCommonLogFields(waap_log, triggerLog, shouldBlock, logOverride, incidentType);
|
||||
appendCommonLogFields(
|
||||
waap_log, triggerLog, shouldBlock, logOverride, incidentType,
|
||||
m_siteConfig->get_PracticeIdByPactice(AUTONOMOUS_SECURITY_DECISION),
|
||||
m_siteConfig->get_PracticeNameByPactice(AUTONOMOUS_SECURITY_DECISION)
|
||||
);
|
||||
|
||||
waap_log << LogField("waapIncidentDetails", incidentDetails);
|
||||
break;
|
||||
@@ -1996,7 +2029,11 @@ Waf2Transaction::sendLog()
|
||||
shouldBlock);
|
||||
|
||||
LogGen& waap_log = logGenWrapper.getLogGen();
|
||||
appendCommonLogFields(waap_log, triggerLog, shouldBlock, logOverride, "Cross Site Request Forgery");
|
||||
appendCommonLogFields(
|
||||
waap_log, triggerLog, shouldBlock, logOverride, "Cross Site Request Forgery",
|
||||
m_siteConfig->get_PracticeIdByPactice(AUTONOMOUS_SECURITY_DECISION),
|
||||
m_siteConfig->get_PracticeNameByPactice(AUTONOMOUS_SECURITY_DECISION)
|
||||
);
|
||||
waap_log << LogField("waapIncidentDetails", "CSRF Attack discovered.");
|
||||
break;
|
||||
}
|
||||
@@ -2177,14 +2214,13 @@ Waf2Transaction::decideAutonomousSecurity(
|
||||
" effective overrides count: " << m_effectiveOverrideIds.size() <<
|
||||
" learned overrides count: " << m_exceptionLearned.size();
|
||||
|
||||
|
||||
|
||||
bool log_all = false;
|
||||
const std::shared_ptr<Waap::Trigger::Policy> triggerPolicy = sitePolicy.get_TriggerPolicy();
|
||||
if (triggerPolicy) {
|
||||
const std::shared_ptr<Waap::Trigger::Log> triggerLog = getTriggerLog(triggerPolicy);
|
||||
if (triggerLog && triggerLog->webRequests) log_all = true;
|
||||
}
|
||||
|
||||
if(decision->getThreatLevel() <= ThreatLevel::THREAT_INFO && !log_all) {
|
||||
decision->setLog(false);
|
||||
} else {
|
||||
@@ -2299,10 +2335,11 @@ bool Waf2Transaction::decideResponse()
|
||||
|
||||
bool
|
||||
Waf2Transaction::reportScanResult(const Waf2ScanResult &res) {
|
||||
if (get_ignoreScore() || (res.score >= SCORE_THRESHOLD &&
|
||||
(m_scanResult == nullptr || res.score > m_scanResult->score)))
|
||||
if ((get_ignoreScore() || res.score >= SCORE_THRESHOLD) &&
|
||||
(m_scanResult == nullptr || res.score > m_scanResult->score))
|
||||
{
|
||||
// Forget any previous scan result and replace with new
|
||||
dbgTrace(D_WAAP) << "Setting scan result. New score: " << res.score;
|
||||
// Forget any previous scan result and replace wit, h new
|
||||
delete m_scanResult;
|
||||
m_scanResult = new Waf2ScanResult(res);
|
||||
return true;
|
||||
|
||||
@@ -247,7 +247,9 @@ private:
|
||||
const std::shared_ptr<Waap::Trigger::Log> &triggerLog,
|
||||
bool shouldBlock,
|
||||
const std::string& logOverride,
|
||||
const std::string& incidentType) const;
|
||||
const std::string& incidentType,
|
||||
const std::string& practiceID,
|
||||
const std::string& practiceName) const;
|
||||
std::string getUserReputationStr(double relativeReputation) const;
|
||||
bool isTrustedSource() const;
|
||||
|
||||
|
||||
@@ -381,7 +381,11 @@ void Waf2Transaction::sendAutonomousSecurityLog(
|
||||
waap_log << LogField("eventConfidence", confidence);
|
||||
}
|
||||
|
||||
appendCommonLogFields(waap_log, triggerLog, shouldBlock, logOverride, attackTypes);
|
||||
appendCommonLogFields(
|
||||
waap_log, triggerLog, shouldBlock, logOverride, attackTypes,
|
||||
m_siteConfig->get_PracticeIdByPactice(AUTONOMOUS_SECURITY_DECISION),
|
||||
m_siteConfig->get_PracticeNameByPactice(AUTONOMOUS_SECURITY_DECISION)
|
||||
);
|
||||
|
||||
std::string sampleString = getSample();
|
||||
if (sampleString.length() > MAX_LOG_FIELD_SIZE) {
|
||||
|
||||
@@ -952,6 +952,145 @@ string filterUTF7(const string& text) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// Decides the status of a Base64 decoded string based on various parameters.
|
||||
// @param decoded The decoded string.
|
||||
// @param entropy The entropy of the original encoded string.
|
||||
// @param decoded_entropy The entropy of the decoded string.
|
||||
// @param spacer_count The number of spacer characters in the decoded string.
|
||||
// @param nonPrintableCharsCount The count of non-printable characters in the decoded string.
|
||||
// @param clear_on_error Flag indicating whether to clear the decoded string on error.
|
||||
// @param terminatorCharsSeen The number of terminator characters seen.
|
||||
// @param called_with_prefix Flag indicating if the function was called with a prefix.
|
||||
// @return The status of the Base64 decoding process.
|
||||
//
|
||||
// Idea:
|
||||
// Check if input chunk should be replaced by decoded, suspected to be checked both as encoded and decoded
|
||||
// or cleaned as binary data. Additional case - define as not base64 encoded.
|
||||
// - in case decoded size less 5 - return invalid
|
||||
// - check entropy delta based on that base64 encoded data has higher entropy than decoded, usually delta = 0.25
|
||||
// - this check should rize suspect but cannot work vice versa
|
||||
// check if decoded chunk has more than 10% of non-printable characters - this is supect for binary data encoded
|
||||
// - if no suspect for binary data and entropy is suspected, check empiric conditions to decide if this binary data
|
||||
// or invalid decoding
|
||||
// - if suspect for binary data, first check is we have entropy suspection
|
||||
// - if entropy is suspected and chunk is short and it have more than 25% of nonprintables, return invalid
|
||||
// since this is not base64 encoded data
|
||||
// - if entropy is not suspected and chunk is short and it have more than 50% of nonprintables, return invalid
|
||||
// since this is not base64 encoded data
|
||||
// - if entropy is suspected and chunk size is between 64-1024, perform additional empiric test
|
||||
// This test will define if returm value should be treated as suspected or as binary data(cleared)
|
||||
|
||||
base64_decode_status decideStatusBase64Decoded(
|
||||
string& decoded,
|
||||
double entropy,
|
||||
double decoded_entropy,
|
||||
size_t spacer_count,
|
||||
size_t nonPrintableCharsCount,
|
||||
bool clear_on_error,
|
||||
double terminatorCharsSeen,
|
||||
bool called_with_prefix
|
||||
)
|
||||
{
|
||||
base64_decode_status tmp_status = B64_DECODE_OK;
|
||||
if (entropy - decoded_entropy + terminatorCharsSeen < BASE64_ENTROPY_THRESHOLD_DELTA) {
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "The chunk is under suspect to be base64,"
|
||||
<< "use dual processing because entropy delta is too low";
|
||||
tmp_status = B64_DECODE_SUSPECTED;
|
||||
}
|
||||
|
||||
bool empiric_condition = false;
|
||||
if (decoded.size() >= 5) {
|
||||
if (spacer_count > 1) {
|
||||
nonPrintableCharsCount = nonPrintableCharsCount - spacer_count + 1;
|
||||
}
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "(before test for unprintables): decoded.size="
|
||||
<< decoded.size()
|
||||
<< ", nonPrintableCharsCount="
|
||||
<< nonPrintableCharsCount
|
||||
<< ", clear_on_error="
|
||||
<< clear_on_error
|
||||
<< ", called_with_prefix="
|
||||
<< called_with_prefix;
|
||||
if (nonPrintableCharsCount * 10 < decoded.size()) {
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "(decode/replace due to small amount of nonprintables): will decide based on entropy values";
|
||||
} else { // more than 10% of non-printable characters
|
||||
dbgTrace(D_WAAP_BASE64) << "large amount of nonporintables";
|
||||
if (tmp_status == B64_DECODE_SUSPECTED) {
|
||||
// entropy - decoded_entropy + terminatorCharsSeen < 0.25
|
||||
if (decoded.size() < 16 && nonPrintableCharsCount * 4 > decoded.size()) {
|
||||
decoded.clear();
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "(large amount of nonporintables + entropy suspect), check emprirics because decoded."
|
||||
<< " terminatorCharsSeen="
|
||||
<< terminatorCharsSeen;
|
||||
// empiric test based on investigation of real payloads
|
||||
empiric_condition = entropy < decoded_entropy
|
||||
&& entropy > BASE64_ENTROPY_BASE_THRESHOLD
|
||||
&& decoded_entropy > BASE64_ENTROPY_DECODED_THRESHOLD
|
||||
&& !called_with_prefix
|
||||
&& decoded.size() > BASE64_MIN_SIZE_LIMIT
|
||||
&& decoded.size() < BASE64_MAX_SIZE_LIMIT
|
||||
&& terminatorCharsSeen != 0;
|
||||
if (!empiric_condition) {
|
||||
if (clear_on_error) decoded.clear();
|
||||
return B64_DECODE_SUSPECTED;
|
||||
} else {
|
||||
if (clear_on_error) decoded.clear();
|
||||
tmp_status = B64_DECODE_OK;
|
||||
}
|
||||
} else { // entropy - decoded_entropy + terminatorCharsSeen >= 0.25
|
||||
// one more empiric based on uT and real payloads
|
||||
if (decoded.size() < 16
|
||||
&& nonPrintableCharsCount * 2 > decoded.size()
|
||||
&& terminatorCharsSeen == 0) {
|
||||
decoded.clear();
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "(delete as binary content) because decoded. Return B64_DECODE_INCOMPLETE";
|
||||
if (clear_on_error) decoded.clear();
|
||||
return B64_DECODE_INCOMPLETE;
|
||||
}
|
||||
} // less than 10% of non-printable characters
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "After handling unprintables checking status";
|
||||
if (tmp_status == B64_DECODE_OK) {
|
||||
dbgTrace(D_WAAP_BASE64) << "replacing with decoded data, return B64_DECODE_OK";
|
||||
return B64_DECODE_OK;
|
||||
} else { // tmp_status == B64_DECODE_SUSPECTED, entropy - decoded_entropy + terminatorCharsSeen < 0.25
|
||||
dbgTrace(D_WAAP_BASE64) << "Suspected due to entropy, making empiric test";
|
||||
// and one more empiric test based on investigation of real payloads
|
||||
empiric_condition = entropy < decoded_entropy
|
||||
&& entropy > BASE64_ENTROPY_BASE_THRESHOLD
|
||||
&& decoded_entropy > BASE64_ENTROPY_DECODED_THRESHOLD
|
||||
&& !called_with_prefix
|
||||
&& decoded.size() > BASE64_MIN_SIZE_LIMIT
|
||||
&& decoded.size() < BASE64_MAX_SIZE_LIMIT;
|
||||
if (empiric_condition) {
|
||||
dbgTrace(D_WAAP_BASE64) << "Empiric test failed, non-base64 chunk, return B64_DECODE_INVALID";
|
||||
decoded.clear();
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
dbgTrace(D_WAAP_BASE64) << "Empiric test passed, return B64_DECODE_SUSPECTED";
|
||||
return B64_DECODE_SUSPECTED;
|
||||
}
|
||||
return B64_DECODE_OK; // successfully decoded. Returns decoded data in "decoded" parameter
|
||||
}
|
||||
|
||||
// If decoded size is too small - leave the encoded value (return false)
|
||||
decoded.clear(); // discard partial data
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "(leave as-is) because decoded too small. decoded.size="
|
||||
<< decoded.size();
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
|
||||
// Attempts to validate and decode base64-encoded chunk.
|
||||
// Value is the full value inside which potential base64-encoded chunk was found,
|
||||
// it and end point to start and end of that chunk.
|
||||
@@ -980,18 +1119,28 @@ base64_decode_status decodeBase64Chunk(
|
||||
uint32_t spacer_count = 0;
|
||||
uint32_t length = end - it;
|
||||
|
||||
dbgTrace(D_WAAP) << "decodeBase64Chunk: value='" << value << "' match='" << string(it, end) << "'";
|
||||
dbgTrace(D_WAAP)
|
||||
<< "value='"
|
||||
<< value
|
||||
<< "' match='"
|
||||
<< string(it, end)
|
||||
<< "' clear_on_error='"
|
||||
<< clear_on_error
|
||||
<< "' called_with_prefix='"
|
||||
<< called_with_prefix
|
||||
<< "'";
|
||||
string::const_iterator begin = it;
|
||||
|
||||
// The encoded data length (without the "base64," prefix) should be exactly divisible by 4
|
||||
// len % 4 is not 0 i.e. this is not base64
|
||||
if ((end - it) % 4 != 0) {
|
||||
dbgTrace(D_WAAP_BASE64) <<
|
||||
"b64DecodeChunk: (leave as-is) because encoded data length should be exactly divisible by 4.";
|
||||
if ((end - it) % 4 == 1) {
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "(leave as-is) because encoded data length should not be <4*x + 1>.";
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
std::unordered_map<char, double> frequency;
|
||||
std::unordered_map<char, double> original_occurences_counter;
|
||||
std::unordered_map<char, double> decoded_occurences_counter;
|
||||
|
||||
while (it != end) {
|
||||
unsigned char c = *it;
|
||||
@@ -999,9 +1148,8 @@ base64_decode_status decodeBase64Chunk(
|
||||
if (terminatorCharsSeen) {
|
||||
// terminator characters must all be '=', until end of match.
|
||||
if (c != '=') {
|
||||
dbgTrace(D_WAAP_BASE64) <<
|
||||
"decodeBase64Chunk: (leave as-is) because terminator characters must all be '='," <<
|
||||
"until end of match.";
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "(leave as-is) because terminator characters must all be '=' until end of match.";
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
@@ -1009,13 +1157,13 @@ base64_decode_status decodeBase64Chunk(
|
||||
terminatorCharsSeen++;
|
||||
|
||||
if (terminatorCharsSeen > 2) {
|
||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (leave as-is) because terminatorCharsSeen > 2";
|
||||
dbgTrace(D_WAAP_BASE64) << "(leave as-is) because terminatorCharsSeen > 2";
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
// allow for more terminator characters
|
||||
it++;
|
||||
frequency[c]++;
|
||||
original_occurences_counter[c]++;
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -1040,12 +1188,18 @@ base64_decode_status decodeBase64Chunk(
|
||||
// Start tracking terminator characters
|
||||
terminatorCharsSeen++;
|
||||
it++;
|
||||
frequency[c]++;
|
||||
original_occurences_counter[c]++;
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (leave as-is) because of non-base64 character ('" <<
|
||||
c << "', ASCII " << (unsigned int)c << ", offset " << (it-begin) << ")";
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "(leave as-is) because of non-base64 character ('"
|
||||
<< c
|
||||
<< "', ASCII "
|
||||
<< (unsigned int)c
|
||||
<< ", offset "
|
||||
<< (it-begin)
|
||||
<< ")";
|
||||
return B64_DECODE_INVALID; // non-base64 character
|
||||
}
|
||||
|
||||
@@ -1068,18 +1222,19 @@ base64_decode_status decodeBase64Chunk(
|
||||
}
|
||||
|
||||
decoded += (char)code;
|
||||
decoded_occurences_counter[(char)code]++;
|
||||
}
|
||||
|
||||
it++;
|
||||
frequency[c]++;
|
||||
original_occurences_counter[c]++;
|
||||
}
|
||||
|
||||
// end of encoded sequence decoded.
|
||||
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "decodeBase64Chunk: decoded.size="
|
||||
<< "decoding done: decoded.size="
|
||||
<< decoded.size()
|
||||
<< ", nonPrintableCharsCount="
|
||||
<< ", uncorrected nonPrintableCharsCount="
|
||||
<< nonPrintableCharsCount
|
||||
<< ", spacer_count = "
|
||||
<< spacer_count
|
||||
@@ -1088,56 +1243,42 @@ base64_decode_status decodeBase64Chunk(
|
||||
<< "; decoded='"
|
||||
<< decoded << "'";
|
||||
|
||||
// Check if entropy is correlates with b64 threshold (initially > 4.5)
|
||||
if (!called_with_prefix) {
|
||||
double entropy = 0;
|
||||
double p = 0;
|
||||
for (const auto& pair : frequency) {
|
||||
p = pair.second / length;
|
||||
entropy -= p * std::log2(p);
|
||||
}
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: base entropy = " << entropy << "length = " << length;
|
||||
// Add short payload factor
|
||||
if (length < 16)
|
||||
entropy = entropy * 16 / length;
|
||||
// Enforce tailoring '=' characters
|
||||
entropy+=terminatorCharsSeen;
|
||||
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: corrected entropy = " << entropy << "length = " << length;
|
||||
if (entropy <= base64_entropy_threshold) {
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
double entropy = 0;
|
||||
double p = 0;
|
||||
double decoded_entropy = 0;
|
||||
for (const auto& pair : original_occurences_counter) {
|
||||
p = pair.second / length;
|
||||
entropy -= p * std::log2(p);
|
||||
}
|
||||
|
||||
// Return success only if decoded.size>=5 and there are less than 10% of non-printable
|
||||
// characters in output.
|
||||
if (decoded.size() >= 5) {
|
||||
if (spacer_count > 1) {
|
||||
nonPrintableCharsCount = nonPrintableCharsCount - spacer_count + 1;
|
||||
}
|
||||
if (nonPrintableCharsCount * 10 < decoded.size()) {
|
||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (decode/replace) decoded.size=" << decoded.size() <<
|
||||
", nonPrintableCharsCount=" << nonPrintableCharsCount << ": replacing with decoded data";
|
||||
}
|
||||
else {
|
||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (delete) because decoded.size=" << decoded.size() <<
|
||||
", nonPrintableCharsCount=" << nonPrintableCharsCount <<
|
||||
", clear_on_error=" << clear_on_error;
|
||||
if (clear_on_error) decoded.clear();
|
||||
return B64_DECODE_INCOMPLETE;
|
||||
}
|
||||
dbgTrace(D_WAAP_BASE64) << "returning true: successfully decoded."
|
||||
<< " Returns decoded data in \"decoded\" parameter";
|
||||
return B64_DECODE_OK; // successfully decoded. Returns decoded data in "decoded" parameter
|
||||
for (const auto &pair : decoded_occurences_counter) {
|
||||
p = pair.second / decoded.size();
|
||||
decoded_entropy -= p * std::log2(p);
|
||||
}
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "Base entropy = "
|
||||
<< entropy
|
||||
<< " Decoded_entropy = "
|
||||
<< decoded_entropy
|
||||
<< "length = "
|
||||
<< length;
|
||||
|
||||
base64_decode_status return_status = decideStatusBase64Decoded(
|
||||
decoded,
|
||||
entropy,
|
||||
decoded_entropy,
|
||||
spacer_count,
|
||||
nonPrintableCharsCount,
|
||||
clear_on_error,
|
||||
terminatorCharsSeen,
|
||||
called_with_prefix
|
||||
);
|
||||
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< "After decideStatusBase64Decoded return_status="
|
||||
<< return_status;
|
||||
|
||||
return return_status;
|
||||
|
||||
// If decoded size is too small - leave the encoded value (return false)
|
||||
decoded.clear(); // discard partial data
|
||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (leave as-is) because decoded too small. decoded.size=" <<
|
||||
decoded.size() <<
|
||||
", nonPrintableCharsCount=" << nonPrintableCharsCount <<
|
||||
", clear_on_error=" << clear_on_error;
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
// Attempts to detect and validate base64 chunk.
|
||||
@@ -1180,8 +1321,9 @@ b64DecodeChunk(
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return decodeBase64Chunk(value, it, end, decoded) != B64_DECODE_INVALID;
|
||||
base64_decode_status status = decodeBase64Chunk(value, it, end, decoded);
|
||||
dbgTrace(D_WAAP_BASE64) << "b64DecodeChunk: status = " << status;
|
||||
return status != B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
vector<string> split(const string& s, char delim) {
|
||||
@@ -1281,6 +1423,7 @@ static void b64TestChunk(const string &s,
|
||||
int &deletedCount,
|
||||
string &outStr)
|
||||
{
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64TestChunk===: starting with = '" << s << "'";
|
||||
size_t chunkLen = (chunkEnd - chunkStart);
|
||||
|
||||
if ((chunkEnd - chunkStart) > static_cast<int>(b64_prefix.size()) &&
|
||||
@@ -1289,11 +1432,9 @@ static void b64TestChunk(const string &s,
|
||||
chunkLen -= b64_prefix.size();
|
||||
}
|
||||
|
||||
size_t chunkRem = chunkLen % 4;
|
||||
|
||||
// Only match chunk whose length is divisible by 4
|
||||
string repl;
|
||||
if (chunkRem == 0 && cb(s, chunkStart, chunkEnd, repl)) {
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64TestChunk===: chunkLen = " << chunkLen;
|
||||
if (cb(s, chunkStart, chunkEnd, repl)) {
|
||||
// Succesfully matched b64 chunk
|
||||
if (!repl.empty()) {
|
||||
outStr += repl;
|
||||
@@ -1340,9 +1481,7 @@ bool detectBase64Chunk(
|
||||
dbgTrace(D_WAAP_BASE64) << " ===detectBase64Chunk===: isB64AlphaChar = true, '" << *it << "'";
|
||||
start = it;
|
||||
end = s.end();
|
||||
if ((end - start) % 4 == 0) {
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
// non base64 before supposed chunk - will not process
|
||||
return false;
|
||||
@@ -1381,17 +1520,31 @@ bool isBase64PrefixProcessingOK (
|
||||
if (detectBase64Chunk(s, start, end)) {
|
||||
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: chunk detected";
|
||||
if ((start != s.end()) && (end == s.end())) {
|
||||
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: chunk detected but not complete";
|
||||
retVal = processDecodedChunk(s, start, end, value, binaryFileType, true);
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< " ===isBase64PrefixProcessingOK===: after processDecodedChunk retVal = "
|
||||
<< retVal
|
||||
<< " binaryFileType = "
|
||||
<< binaryFileType;
|
||||
}
|
||||
} else if (start != s.end()) {
|
||||
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: chunk not detected."
|
||||
" searching for known file header only";
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< " ===isBase64PrefixProcessingOK===: chunk not detected. searching for known file header only";
|
||||
end = (start + MAX_HEADER_LOOKUP < s.end()) ? start + MAX_HEADER_LOOKUP : s.end();
|
||||
processDecodedChunk(s, start, end, value, binaryFileType);
|
||||
value.clear();
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< " ===isBase64PrefixProcessingOK===: after processDecodedChunk binaryFileType = "
|
||||
<< binaryFileType;
|
||||
return binaryFileType != Waap::Util::BinaryFileType::FILE_TYPE_NONE;
|
||||
}
|
||||
}
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< " ===isBase64PrefixProcessingOK===: retVal = "
|
||||
<< retVal
|
||||
<< " binaryFileType = "
|
||||
<< binaryFileType;
|
||||
return retVal != B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
@@ -1399,23 +1552,31 @@ base64_variants b64Test (
|
||||
const string &s,
|
||||
string &key,
|
||||
string &value,
|
||||
BinaryFileType &binaryFileType)
|
||||
BinaryFileType &binaryFileType,
|
||||
const size_t offset)
|
||||
{
|
||||
|
||||
key.clear();
|
||||
bool retVal;
|
||||
binaryFileType = Waap::Util::BinaryFileType::FILE_TYPE_NONE;
|
||||
auto begin = s.begin() + offset;
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< " ===b64Test===: string = "
|
||||
<< s
|
||||
<< " key = "
|
||||
<< key
|
||||
<< " value = "
|
||||
<< value
|
||||
<< " offset = "
|
||||
<< offset;
|
||||
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: string = " << s
|
||||
<< " key = " << key << " value = " << value;
|
||||
// Minimal length
|
||||
if (s.size() < 8) {
|
||||
if (s.size() < 8 + offset) {
|
||||
return CONTINUE_AS_IS;
|
||||
}
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: minimal lenght test passed";
|
||||
|
||||
std::string prefix_decoded_val;
|
||||
string::const_iterator it = s.begin();
|
||||
auto it = begin;
|
||||
|
||||
// 1st check if we have key candidate
|
||||
if (base64_key_value_detector_re.hasMatch(s)) {
|
||||
@@ -1433,7 +1594,7 @@ base64_variants b64Test (
|
||||
break;
|
||||
case EQUAL:
|
||||
if (*it == '=') {
|
||||
it = s.begin();
|
||||
it = begin;
|
||||
state=MISDETECT;
|
||||
continue;
|
||||
}
|
||||
@@ -1455,7 +1616,7 @@ base64_variants b64Test (
|
||||
if (it == s.end() || state == MISDETECT) {
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: detected *it = s.end()" << *it;
|
||||
if (key.size() > 0) {
|
||||
it = s.begin();
|
||||
it = begin;
|
||||
key.clear();
|
||||
}
|
||||
} else {
|
||||
@@ -1479,7 +1640,7 @@ base64_variants b64Test (
|
||||
}
|
||||
}
|
||||
|
||||
string::const_iterator start = s.end();
|
||||
auto start = s.end();
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: B64 itself = " << *it << " =======";
|
||||
bool isB64AlphaChar = Waap::Util::isAlphaAsciiFast(*it) || isdigit(*it) || *it=='/' || *it=='+';
|
||||
if (isB64AlphaChar) {
|
||||
@@ -1487,11 +1648,6 @@ base64_variants b64Test (
|
||||
dbgTrace(D_WAAP_BASE64) <<
|
||||
" ===b64Test===: Start tracking potential b64 chunk = " << *it << " =======";
|
||||
start = it;
|
||||
if ((s.end() - start) % 4 != 0) {
|
||||
key.clear();
|
||||
value.clear();
|
||||
return CONTINUE_AS_IS;
|
||||
}
|
||||
}
|
||||
else {
|
||||
dbgTrace(D_WAAP_BASE64) <<
|
||||
@@ -1512,17 +1668,37 @@ base64_variants b64Test (
|
||||
key.pop_back();
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: FINAL key = '" << key << "'";
|
||||
}
|
||||
retVal = decodeBase64Chunk(s, start, s.end(), value) != B64_DECODE_INVALID;
|
||||
base64_decode_status decode_chunk_status = decodeBase64Chunk(s, start, s.end(), value);
|
||||
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: After testing and conversion value = "
|
||||
<< value << "retVal = '" << retVal <<"'";
|
||||
if (!retVal) {
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< " ===b64Test===: After testing and conversion value = "
|
||||
<< value
|
||||
<< "decode_chunk_status = '"
|
||||
<< decode_chunk_status
|
||||
<<"'";
|
||||
if (decode_chunk_status == B64_DECODE_INVALID) {
|
||||
key.clear();
|
||||
value.clear();
|
||||
return CONTINUE_AS_IS;
|
||||
}
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: After tpassed retVal check = "
|
||||
<< value << "retVal = '" << retVal <<"'" << "key = '" << key << "'";
|
||||
|
||||
if (decode_chunk_status == B64_DECODE_INCOMPLETE) {
|
||||
value.clear();
|
||||
}
|
||||
|
||||
if (decode_chunk_status == B64_DECODE_SUSPECTED) {
|
||||
return CONTINUE_DUAL_SCAN;
|
||||
}
|
||||
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< " ===b64Test===: After tpassed retVal check = "
|
||||
<< value
|
||||
<< "decode_chunk_status = '"
|
||||
<< decode_chunk_status
|
||||
<<"'"
|
||||
<< "key = '"
|
||||
<< key
|
||||
<< "'";
|
||||
if (key.empty()) {
|
||||
return SINGLE_B64_CHUNK_CONVERT;
|
||||
} else {
|
||||
@@ -1548,7 +1724,7 @@ void b64Decode(
|
||||
deletedCount = 0;
|
||||
outStr = "";
|
||||
int offsetFix = 0;
|
||||
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Decode===: starting with = '" << s << "'";
|
||||
string::const_iterator it = s.begin();
|
||||
|
||||
// Minimal length
|
||||
@@ -1596,6 +1772,11 @@ void b64Decode(
|
||||
}
|
||||
|
||||
// Decode and add chunk
|
||||
dbgTrace(D_WAAP_BASE64)
|
||||
<< " ===b64Decode===: chunkStart = "
|
||||
<< *chunkStart
|
||||
<< " it = "
|
||||
<< *it;
|
||||
b64TestChunk(s, chunkStart, it, cb, decodedCount, deletedCount, outStr);
|
||||
|
||||
// stop tracking b64 chunk
|
||||
@@ -1607,6 +1788,7 @@ void b64Decode(
|
||||
}
|
||||
|
||||
if (chunkStart != s.end()) {
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Decode===: chunkStart = " << *chunkStart;
|
||||
b64TestChunk(s, chunkStart, it, cb, decodedCount, deletedCount, outStr);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,9 +32,15 @@
|
||||
|
||||
#define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
|
||||
|
||||
enum base64_variants {SINGLE_B64_CHUNK_CONVERT, KEY_VALUE_B64_PAIR, CONTINUE_AS_IS};
|
||||
enum base64_variants {SINGLE_B64_CHUNK_CONVERT, KEY_VALUE_B64_PAIR, CONTINUE_AS_IS, CONTINUE_DUAL_SCAN};
|
||||
enum base64_stage {BEFORE_EQUAL, EQUAL, DONE, MISDETECT};
|
||||
enum base64_decode_status {B64_DECODE_INVALID, B64_DECODE_OK, B64_DECODE_INCOMPLETE};
|
||||
enum base64_decode_status {B64_DECODE_INVALID, B64_DECODE_OK, B64_DECODE_INCOMPLETE, B64_DECODE_SUSPECTED};
|
||||
|
||||
#define BASE64_ENTROPY_BASE_THRESHOLD 5.0
|
||||
#define BASE64_ENTROPY_DECODED_THRESHOLD 5.4
|
||||
#define BASE64_ENTROPY_THRESHOLD_DELTA 0.25
|
||||
#define BASE64_MIN_SIZE_LIMIT 16
|
||||
#define BASE64_MAX_SIZE_LIMIT 1024
|
||||
|
||||
// This is portable version of stricmp(), which is non-standard function (not even in C).
|
||||
// Contrary to stricmp(), for a slight optimization, s2 is ASSUMED to be already in lowercase.
|
||||
@@ -865,6 +871,17 @@ void unescapeUnicode(std::string &text);
|
||||
// Try to find and decode UTF7 chunks
|
||||
std::string filterUTF7(const std::string &text);
|
||||
|
||||
base64_decode_status
|
||||
decideStatusBase64Decoded(
|
||||
std::string& decoded,
|
||||
double entropy,
|
||||
double decoded_entropy,
|
||||
size_t spacer_count,
|
||||
size_t nonPrintableCharsCount,
|
||||
bool clear_on_error,
|
||||
double terminatorCharsSeen,
|
||||
bool called_with_prefix);
|
||||
|
||||
base64_decode_status
|
||||
decodeBase64Chunk(
|
||||
const std::string &value,
|
||||
@@ -926,7 +943,8 @@ namespace Util {
|
||||
const std::string &s,
|
||||
std::string &key,
|
||||
std::string &value,
|
||||
BinaryFileType &binaryFileType);
|
||||
BinaryFileType &binaryFileType,
|
||||
size_t offset = 0);
|
||||
|
||||
// The original stdlib implementation of isalpha() supports locale settings which we do not really need.
|
||||
// It is also proven to contribute to slow performance in some of the algorithms using it.
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
#include <vector>
|
||||
#include <dirent.h>
|
||||
#include <boost/regex.hpp>
|
||||
#include <algorithm>
|
||||
|
||||
#include "debug.h"
|
||||
#include "maybe_res.h"
|
||||
@@ -75,13 +76,13 @@ NginxConfCollector::expandIncludes(const string &include_pattern) const {
|
||||
struct dirent *entry;
|
||||
while ((entry = readdir(dir)) != nullptr) {
|
||||
if (strcmp(entry->d_name, ".") == 0 || strcmp(entry->d_name, "..") == 0) continue;
|
||||
|
||||
if (NGEN::Regex::regexMatch(__FILE__, __LINE__, entry->d_name, pattern)) {
|
||||
matching_files.push_back(maybe_directory + "/" + entry->d_name);
|
||||
dbgTrace(D_NGINX_MANAGER) << "Matched file: " << maybe_directory << '/' << entry->d_name;
|
||||
}
|
||||
}
|
||||
closedir(dir);
|
||||
sort(matching_files.begin(), matching_files.end());
|
||||
|
||||
return matching_files;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Enter file contents hereapiVersion: apiextensions.k8s.io/v1
|
||||
apiVersion: apiextensions.k8s.io/v1
|
||||
kind: CustomResourceDefinition
|
||||
metadata :
|
||||
name : customresponses.openappsec.io
|
||||
|
||||
@@ -1173,3 +1173,46 @@ spec:
|
||||
kind: TrustedSource
|
||||
shortNames:
|
||||
- trustedsource
|
||||
|
||||
---
|
||||
apiVersion: apiextensions.k8s.io/v1
|
||||
kind: CustomResourceDefinition
|
||||
metadata :
|
||||
name: policyactivations.openappsec.io
|
||||
spec:
|
||||
group: openappsec.io
|
||||
versions:
|
||||
- name: v1beta2
|
||||
served: true
|
||||
storage: true
|
||||
schema:
|
||||
openAPIV3Schema:
|
||||
type: object
|
||||
properties:
|
||||
spec:
|
||||
type: object
|
||||
properties:
|
||||
appsecClassName:
|
||||
type: string
|
||||
enabledPolicies:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
hosts:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
required:
|
||||
- hosts
|
||||
required:
|
||||
- enabledPolicies
|
||||
scope: Cluster
|
||||
names:
|
||||
plural: policyactivations
|
||||
singular: policyactivation
|
||||
kind: PolicyActivation
|
||||
shortNames:
|
||||
- policyactivation
|
||||
|
||||
@@ -11,3 +11,58 @@ spec:
|
||||
source-identifiers: ""
|
||||
trusted-sources: ""
|
||||
exceptions: []
|
||||
---
|
||||
apiVersion: openappsec.io/v1beta1
|
||||
kind: LogTrigger
|
||||
metadata:
|
||||
name: appsec-log-trigger
|
||||
spec:
|
||||
access-control-logging:
|
||||
allow-events: false
|
||||
drop-events: true
|
||||
appsec-logging:
|
||||
detect-events: false
|
||||
prevent-events: true
|
||||
all-web-requests: false
|
||||
additional-suspicious-events-logging:
|
||||
enabled: true
|
||||
minimum-severity: high
|
||||
response-body: false
|
||||
extended-logging:
|
||||
url-path: false
|
||||
url-query: false
|
||||
http-headers: false
|
||||
request-body: false
|
||||
log-destination:
|
||||
cloud: true
|
||||
syslog-service: []
|
||||
file: ""
|
||||
stdout:
|
||||
format: json
|
||||
cef-service: []
|
||||
--
|
||||
apiVersion: openappsec.io/v1beta1
|
||||
kind: Practice
|
||||
metadata:
|
||||
name: appsec-best-practice
|
||||
spec:
|
||||
anti-bot:
|
||||
injected-URIs: []
|
||||
validated-URIs: []
|
||||
openapi-schema-validation:
|
||||
configmap: []
|
||||
snort-signatures:
|
||||
configmap: []
|
||||
web-attacks:
|
||||
minimum-confidence: high
|
||||
override-mode: detect-learn
|
||||
--
|
||||
apiVersion: openappsec.io/v1beta1
|
||||
kind: CustomResponse
|
||||
metadata:
|
||||
name: 403-forbidden
|
||||
spec:
|
||||
http-response-code: 403
|
||||
message-body: ""
|
||||
message-title: ""
|
||||
mode: response-code-only
|
||||
|
||||
@@ -11,3 +11,58 @@ spec:
|
||||
source-identifiers: ""
|
||||
trusted-sources: ""
|
||||
exceptions: []
|
||||
---
|
||||
apiVersion: openappsec.io/v1beta1
|
||||
kind: LogTrigger
|
||||
metadata:
|
||||
name: appsec-log-trigger
|
||||
spec:
|
||||
access-control-logging:
|
||||
allow-events: false
|
||||
drop-events: true
|
||||
appsec-logging:
|
||||
detect-events: false
|
||||
prevent-events: true
|
||||
all-web-requests: false
|
||||
additional-suspicious-events-logging:
|
||||
enabled: true
|
||||
minimum-severity: high
|
||||
response-body: false
|
||||
extended-logging:
|
||||
url-path: false
|
||||
url-query: false
|
||||
http-headers: false
|
||||
request-body: false
|
||||
log-destination:
|
||||
cloud: true
|
||||
syslog-service: []
|
||||
file: ""
|
||||
stdout:
|
||||
format: json
|
||||
cef-service: []
|
||||
--
|
||||
apiVersion: openappsec.io/v1beta1
|
||||
kind: Practice
|
||||
metadata:
|
||||
name: appsec-best-practice
|
||||
spec:
|
||||
anti-bot:
|
||||
injected-URIs: []
|
||||
validated-URIs: []
|
||||
openapi-schema-validation:
|
||||
configmap: []
|
||||
snort-signatures:
|
||||
configmap: []
|
||||
web-attacks:
|
||||
minimum-confidence: high
|
||||
override-mode: prevent-learn
|
||||
--
|
||||
apiVersion: openappsec.io/v1beta1
|
||||
kind: CustomResponse
|
||||
metadata:
|
||||
name: 403-forbidden
|
||||
spec:
|
||||
http-response-code: 403
|
||||
message-body: ""
|
||||
message-title: ""
|
||||
mode: response-code-only
|
||||
|
||||
@@ -17,16 +17,6 @@ spec:
|
||||
customResponse: default-web-user-response
|
||||
triggers:
|
||||
- default-log-trigger
|
||||
specificRules:
|
||||
- host: www.example.com
|
||||
# this is an example for specific rule, adjust the values as required for the protected app
|
||||
mode: detect-learn
|
||||
threatPreventionPractices:
|
||||
- default-threat-prevention-practice
|
||||
accessControlPractices:
|
||||
- default-access-control-practice
|
||||
triggers:
|
||||
- default-log-trigger
|
||||
---
|
||||
apiVersion: openappsec.io/v1beta2
|
||||
kind: ThreatPreventionPractice
|
||||
@@ -112,7 +102,7 @@ spec:
|
||||
responseCode: true
|
||||
logDestination:
|
||||
cloud: true
|
||||
logToAgent: false
|
||||
logToAgent: true
|
||||
stdout:
|
||||
format: json
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ kind: AccessControlPractice
|
||||
metadata:
|
||||
name: access-control-practice-example
|
||||
spec:
|
||||
practiceMode: prevent
|
||||
practiceMode: inherited
|
||||
rateLimit:
|
||||
overrideMode: inherited
|
||||
rules:
|
||||
@@ -80,15 +80,26 @@ metadata:
|
||||
name: policy-example
|
||||
spec:
|
||||
default:
|
||||
mode: prevent-learn
|
||||
mode: detect-learn
|
||||
accessControlPractices: [access-control-practice-example]
|
||||
threatPreventionPractices: [threat-prevention-practice-example]
|
||||
triggers: [log-trigger-example]
|
||||
customResponse: custom-response-block-page-example
|
||||
sourceIdentifiers: sources-identifier-example
|
||||
trustedSources: trusted-sources-example
|
||||
customResponse: custom-response-code-example
|
||||
sourceIdentifiers: ""
|
||||
trustedSources: ""
|
||||
exceptions:
|
||||
- exception-example
|
||||
specificRules:
|
||||
- host: "example.com"
|
||||
mode: prevent-learn
|
||||
threatPreventionPractices: [threat-prevention-practice-example]
|
||||
accessControlPractices: [access-control-practice-example]
|
||||
triggers: [log-trigger-example]
|
||||
customResponse: custom-response-code-example
|
||||
sourceIdentifiers: ""
|
||||
trustedSources: ""
|
||||
exceptions:
|
||||
- exception-example
|
||||
---
|
||||
apiVersion: openappsec.io/v1beta2
|
||||
kind: ThreatPreventionPractice
|
||||
|
||||
@@ -17,16 +17,6 @@ spec:
|
||||
customResponse: default-web-user-response
|
||||
triggers:
|
||||
- default-log-trigger
|
||||
specificRules:
|
||||
- host: www.example.com
|
||||
# this is an example for specific rule, adjust the values as required for the protected app
|
||||
mode: prevent-learn
|
||||
threatPreventionPractices:
|
||||
- default-threat-prevention-practice
|
||||
accessControlPractices:
|
||||
- default-access-control-practice
|
||||
triggers:
|
||||
- default-log-trigger
|
||||
---
|
||||
apiVersion: openappsec.io/v1beta2
|
||||
kind: ThreatPreventionPractice
|
||||
|
||||
113
config/linux/v1beta2/example/local_policy.yaml
Normal file
113
config/linux/v1beta2/example/local_policy.yaml
Normal file
@@ -0,0 +1,113 @@
|
||||
apiVersion: v1beta2
|
||||
|
||||
policies:
|
||||
default:
|
||||
mode: detect-learn
|
||||
accessControlPractices: [access-control-practice-example]
|
||||
threatPreventionPractices: [threat-prevention-practice-example]
|
||||
triggers: [log-trigger-example]
|
||||
customResponse: web-user-response-exmaple
|
||||
sourceIdentifiers: ""
|
||||
trustedSources: ""
|
||||
exceptions:
|
||||
- exception-example
|
||||
specificRules:
|
||||
- host: "example.com"
|
||||
mode: prevent-learn
|
||||
threatPreventionPractices: [threat-prevention-practice-example]
|
||||
accessControlPractices: [access-control-practice-example]
|
||||
triggers: [log-trigger-example]
|
||||
customResponse: web-user-response-exmaple
|
||||
sourceIdentifiers: ""
|
||||
trustedSources: ""
|
||||
exceptions:
|
||||
- exception-example
|
||||
|
||||
threatPreventionPractices:
|
||||
- name: threat-prevention-practice-example
|
||||
practiceMode: inherited
|
||||
webAttacks:
|
||||
overrideMode: inherited
|
||||
minimumConfidence: high
|
||||
intrusionPrevention:
|
||||
# intrusion prevention (IPS) requires "Premium Edition"
|
||||
overrideMode: inherited
|
||||
maxPerformanceImpact: medium
|
||||
minSeverityLevel: medium
|
||||
minCveYear: 2016
|
||||
highConfidenceEventAction: inherited
|
||||
mediumConfidenceEventAction: inherited
|
||||
lowConfidenceEventAction: detect
|
||||
fileSecurity:
|
||||
# file security requires "Premium Edition"
|
||||
overrideMode: inherited
|
||||
minSeverityLevel: medium
|
||||
highConfidenceEventAction: inherited
|
||||
mediumConfidenceEventAction: inherited
|
||||
lowConfidenceEventAction: detect
|
||||
snortSignatures:
|
||||
# you must specify snort signatures in configmap or file to activate snort inspection
|
||||
overrideMode: inherited
|
||||
configmap: []
|
||||
# relevant for deployments on kubernetes
|
||||
# 0 or 1 configmaps supported in array
|
||||
files: []
|
||||
# relevant for docker and linux embedded deployments
|
||||
# 0 or 1 files supported in array
|
||||
schemaValidation: # schema validation requires "Premium Edition"
|
||||
overrideMode: inherited
|
||||
configmap: []
|
||||
# relevant for deployments on kubernetes
|
||||
# 0 or 1 configmaps supported in array
|
||||
files: []
|
||||
# relevant for docker and linux embedded deployments
|
||||
# 0 or 1 files supported in array
|
||||
antiBot: # antibot requires "Premium Edition"
|
||||
overrideMode: inherited
|
||||
injectedUris: []
|
||||
validatedUris: []
|
||||
|
||||
accessControlPractices:
|
||||
- name: access-control-practice-example
|
||||
practiceMode: inherited
|
||||
rateLimit:
|
||||
# specify one or more rules below to use rate limiting
|
||||
overrideMode: inherited
|
||||
rules: []
|
||||
|
||||
customResponses:
|
||||
- name: web-user-response-exmaple
|
||||
mode: response-code-only
|
||||
httpResponseCode: 403
|
||||
|
||||
logTriggers:
|
||||
- name: log-trigger-example
|
||||
accessControlLogging:
|
||||
allowEvents: false
|
||||
dropEvents: true
|
||||
appsecLogging:
|
||||
detectEvents: true
|
||||
preventEvents: true
|
||||
allWebRequests: false
|
||||
extendedLogging:
|
||||
urlPath: true
|
||||
urlQuery: true
|
||||
httpHeaders: false
|
||||
requestBody: false
|
||||
additionalSuspiciousEventsLogging:
|
||||
enabled: true
|
||||
minSeverity: high
|
||||
responseBody: false
|
||||
responseCode: true
|
||||
logDestination:
|
||||
cloud: true
|
||||
logToAgent: false
|
||||
stdout:
|
||||
format: json
|
||||
|
||||
exceptions:
|
||||
- name: exception-example
|
||||
action: "accept"
|
||||
condition:
|
||||
- key: "countryCode"
|
||||
value: "US"
|
||||
@@ -237,6 +237,12 @@ AgentDetails::getAgentId() const
|
||||
return agent_id;
|
||||
}
|
||||
|
||||
string
|
||||
AgentDetails::getRegisteredServer() const
|
||||
{
|
||||
return server;
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
AgentDetails::getProxy() const
|
||||
{
|
||||
@@ -430,6 +436,7 @@ AgentDetails::loadProxyType(const string &proxy_type)
|
||||
}
|
||||
|
||||
#ifdef gaia
|
||||
(void)proxy_type;
|
||||
I_ShellCmd *shell_cmd = Singleton::Consume<I_ShellCmd>::by<AgentDetails>();
|
||||
auto proxy_ip = shell_cmd->getExecOutput("dbget proxy:ip-address| tr -d '\n'");
|
||||
if (!proxy_ip.ok()) return proxy_ip;
|
||||
|
||||
@@ -111,6 +111,8 @@ HttpAttachmentConfiguration::save(cereal::JSONOutputArchive &archive) const
|
||||
cereal::make_nvp("keep_alive_interval_msec", getNumericalValue("keep_alive_interval_msec")),
|
||||
cereal::make_nvp("min_retries_for_verdict", getNumericalValue("min_retries_for_verdict")),
|
||||
cereal::make_nvp("max_retries_for_verdict", getNumericalValue("max_retries_for_verdict")),
|
||||
cereal::make_nvp("hold_verdict_retries", getNumericalValue("hold_verdict_retries")),
|
||||
cereal::make_nvp("hold_verdict_polling_time", getNumericalValue("hold_verdict_polling_time")),
|
||||
cereal::make_nvp("body_size_trigger", getNumericalValue("body_size_trigger")),
|
||||
cereal::make_nvp("remove_server_header", getNumericalValue("remove_server_header"))
|
||||
);
|
||||
@@ -167,6 +169,8 @@ HttpAttachmentConfiguration::load(cereal::JSONInputArchive &archive)
|
||||
loadNumericalValue(archive, "keep_alive_interval_msec", DEFAULT_KEEP_ALIVE_INTERVAL_MSEC);
|
||||
loadNumericalValue(archive, "min_retries_for_verdict", 3);
|
||||
loadNumericalValue(archive, "max_retries_for_verdict", 15);
|
||||
loadNumericalValue(archive, "hold_verdict_retries", 3);
|
||||
loadNumericalValue(archive, "hold_verdict_polling_time", 1);
|
||||
loadNumericalValue(archive, "body_size_trigger", 200000);
|
||||
loadNumericalValue(archive, "remove_server_header", 0);
|
||||
}
|
||||
|
||||
@@ -203,6 +203,7 @@ private:
|
||||
MessageMetadata service_config_req_md("127.0.0.1", 7777);
|
||||
service_config_req_md.setConnectioFlag(MessageConnectionConfig::ONE_TIME_CONN);
|
||||
service_config_req_md.setConnectioFlag(MessageConnectionConfig::UNSECURE_CONN);
|
||||
service_config_req_md.setSuspension(false);
|
||||
auto service_config_status = messaging->sendSyncMessage(
|
||||
HTTPMethod::POST,
|
||||
"/set-nano-service-config",
|
||||
@@ -214,6 +215,7 @@ private:
|
||||
MessageMetadata secondary_port_req_md("127.0.0.1", 7778);
|
||||
secondary_port_req_md.setConnectioFlag(MessageConnectionConfig::ONE_TIME_CONN);
|
||||
secondary_port_req_md.setConnectioFlag(MessageConnectionConfig::UNSECURE_CONN);
|
||||
secondary_port_req_md.setSuspension(false);
|
||||
service_config_status = messaging->sendSyncMessage(
|
||||
HTTPMethod::POST,
|
||||
"/set-nano-service-config",
|
||||
@@ -251,6 +253,7 @@ private:
|
||||
MessageMetadata service_config_req_md("127.0.0.1", 7777);
|
||||
service_config_req_md.setConnectioFlag(MessageConnectionConfig::ONE_TIME_CONN);
|
||||
service_config_req_md.setConnectioFlag(MessageConnectionConfig::UNSECURE_CONN);
|
||||
service_config_req_md.setSuspension(false);
|
||||
bool service_config_status = messaging->sendSyncMessageWithoutResponse(
|
||||
HTTPMethod::POST,
|
||||
"/set-reconf-status",
|
||||
@@ -262,6 +265,7 @@ private:
|
||||
MessageMetadata secondary_port_req_md("127.0.0.1", 7778);
|
||||
secondary_port_req_md.setConnectioFlag(MessageConnectionConfig::ONE_TIME_CONN);
|
||||
secondary_port_req_md.setConnectioFlag(MessageConnectionConfig::UNSECURE_CONN);
|
||||
secondary_port_req_md.setSuspension(false);
|
||||
service_config_status = messaging->sendSyncMessageWithoutResponse(
|
||||
HTTPMethod::POST,
|
||||
"/set-reconf-status",
|
||||
|
||||
@@ -42,6 +42,9 @@ unsigned int getFailOpenTimeout();
|
||||
int isFailOpenHoldMode();
|
||||
unsigned int getFailOpenHoldTimeout();
|
||||
|
||||
unsigned int getHoldVerdictPollingTime();
|
||||
unsigned int getHoldVerdictRetries();
|
||||
|
||||
unsigned int getMaxSessionsPerMinute();
|
||||
int isFailOpenOnSessionLimit();
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ public:
|
||||
virtual Maybe<std::string> getFogDomain() const = 0;
|
||||
virtual std::string getTenantId() const = 0;
|
||||
virtual std::string getProfileId() const = 0;
|
||||
virtual std::string getRegisteredServer() const = 0;
|
||||
|
||||
// Agent Details
|
||||
virtual Maybe<std::string> getProxy() const = 0;
|
||||
@@ -43,6 +44,7 @@ public:
|
||||
virtual void setAgentId(const std::string &_agent_id) = 0;
|
||||
virtual std::string getAgentId() const = 0;
|
||||
virtual void setOrchestrationMode(OrchestrationMode _orchstration_mode) = 0;
|
||||
virtual void setRegisteredServer(const std::string &_server) = 0;
|
||||
virtual OrchestrationMode getOrchestrationMode() const = 0;
|
||||
virtual std::string getAccessToken() const = 0;
|
||||
virtual void loadAccessToken() = 0;
|
||||
|
||||
@@ -69,15 +69,24 @@ public:
|
||||
uint16_t _port_num,
|
||||
Flags<MessageConnectionConfig> _conn_flags,
|
||||
bool _should_buffer = false,
|
||||
bool _is_to_fog = false
|
||||
bool _is_to_fog = false,
|
||||
bool _should_suspend = true
|
||||
) :
|
||||
host_name(_host_name),
|
||||
port_num(_port_num),
|
||||
conn_flags(_conn_flags),
|
||||
should_buffer(_should_buffer),
|
||||
is_to_fog(_is_to_fog)
|
||||
is_to_fog(_is_to_fog),
|
||||
should_send_access_token(true),
|
||||
should_suspend(_should_suspend)
|
||||
{}
|
||||
|
||||
const bool &
|
||||
shouldSendAccessToken() const
|
||||
{
|
||||
return should_send_access_token;
|
||||
}
|
||||
|
||||
const std::string &
|
||||
getHostName() const
|
||||
{
|
||||
@@ -90,6 +99,12 @@ public:
|
||||
return port_num;
|
||||
}
|
||||
|
||||
void
|
||||
setShouldSendAccessToken(const bool &_should_send_access_token)
|
||||
{
|
||||
should_send_access_token = _should_send_access_token;
|
||||
}
|
||||
|
||||
void
|
||||
setConnectioFlag(MessageConnectionConfig flag)
|
||||
{
|
||||
@@ -180,6 +195,12 @@ public:
|
||||
is_dual_auth = true;
|
||||
}
|
||||
|
||||
void
|
||||
setSuspension(bool _should_suspend)
|
||||
{
|
||||
should_suspend = _should_suspend;
|
||||
}
|
||||
|
||||
void
|
||||
setExternalCertificate(const std::string &_external_certificate)
|
||||
{
|
||||
@@ -198,6 +219,12 @@ public:
|
||||
return should_buffer;
|
||||
}
|
||||
|
||||
bool
|
||||
shouldSuspend() const
|
||||
{
|
||||
return should_suspend;
|
||||
}
|
||||
|
||||
bool
|
||||
isProxySet() const
|
||||
{
|
||||
@@ -300,6 +327,8 @@ private:
|
||||
bool is_to_fog = false;
|
||||
bool is_rate_limit_block = false;
|
||||
uint rate_limit_block_time = 0;
|
||||
bool should_send_access_token = true;
|
||||
bool should_suspend = true;
|
||||
};
|
||||
|
||||
#endif // __MESSAGING_METADATA_H__
|
||||
|
||||
@@ -20,11 +20,13 @@ public:
|
||||
MOCK_CONST_METHOD0(getFogDomain, Maybe<std::string>());
|
||||
MOCK_CONST_METHOD0(getTenantId, std::string());
|
||||
MOCK_CONST_METHOD0(getProfileId, std::string());
|
||||
MOCK_CONST_METHOD0(getRegisteredServer, std::string());
|
||||
|
||||
// Agent Details
|
||||
MOCK_CONST_METHOD0(getProxy, Maybe<std::string>());
|
||||
MOCK_METHOD1(setProxy, void(const std::string&));
|
||||
MOCK_METHOD1(setAgentId, void(const std::string&));
|
||||
MOCK_METHOD1(setRegisteredServer, void(const std::string&));
|
||||
MOCK_CONST_METHOD0(getAgentId, std::string());
|
||||
MOCK_METHOD0(loadAccessToken, void());
|
||||
MOCK_CONST_METHOD0(getAccessToken, std::string());
|
||||
|
||||
@@ -73,6 +73,7 @@ public:
|
||||
Maybe<std::string> getOpenSSLDir() const;
|
||||
std::string getClusterId() const;
|
||||
OrchestrationMode getOrchestrationMode() const;
|
||||
std::string getRegisteredServer() const;
|
||||
bool isOpenAppsecAgent() const;
|
||||
std::string getAccessToken() const;
|
||||
void loadAccessToken();
|
||||
@@ -86,6 +87,7 @@ public:
|
||||
void setOpenSSLDir(const std::string &_openssl_dir) { openssl_dir = _openssl_dir; }
|
||||
void setSSLFlag(const bool _encrypted_connection) { encrypted_connection = _encrypted_connection; }
|
||||
void setOrchestrationMode(OrchestrationMode _orchstration_mode) { orchestration_mode = _orchstration_mode; }
|
||||
void setRegisteredServer(const std::string &_server) { server = _server; }
|
||||
bool getSSLFlag() const { return encrypted_connection; }
|
||||
|
||||
bool readAgentDetails();
|
||||
@@ -117,6 +119,7 @@ private:
|
||||
uint16_t fog_port = 0;
|
||||
bool encrypted_connection = false;
|
||||
OrchestrationMode orchestration_mode = OrchestrationMode::ONLINE;
|
||||
std::string server = "Unknown";
|
||||
bool is_proxy_configured_via_settings = false;
|
||||
std::map<ProxyProtocol, ProxyData> proxies;
|
||||
|
||||
|
||||
@@ -48,6 +48,7 @@
|
||||
#include "intelligence_comp_v2.h"
|
||||
#include "messaging.h"
|
||||
#include "env_details.h"
|
||||
#include "metric/metric_scraper.h"
|
||||
|
||||
USE_DEBUG_FLAG(D_COMP_IS);
|
||||
|
||||
@@ -216,6 +217,7 @@ class ComponentListCore
|
||||
Version,
|
||||
Buffer,
|
||||
ShellCmd,
|
||||
MetricScraper,
|
||||
GenericMetric,
|
||||
Messaging,
|
||||
MainloopComponent,
|
||||
|
||||
@@ -153,6 +153,7 @@ DEFINE_FLAG(D_COMPONENT, D_ALL)
|
||||
DEFINE_FLAG(D_SDWAN, D_COMPONENT)
|
||||
DEFINE_FLAG(D_SDWAN_POLICY, D_SDWAN)
|
||||
DEFINE_FLAG(D_SDWAN_DATA, D_SDWAN)
|
||||
DEFINE_FLAG(D_SDWAN_FEATURE_FLAG, D_SDWAN)
|
||||
DEFINE_FLAG(D_LOGGER_SDWAN, D_SDWAN)
|
||||
DEFINE_FLAG(D_SDWAN_API, D_SDWAN)
|
||||
DEFINE_FLAG(D_REVERSE_PROXY, D_COMPONENT)
|
||||
|
||||
@@ -59,10 +59,11 @@ class GenericMetric
|
||||
Singleton::Consume<I_Messaging>,
|
||||
Singleton::Consume<I_RestApi>,
|
||||
Singleton::Consume<I_Encryptor>,
|
||||
public Listener<AllMetricEvent>
|
||||
public Listener<AllMetricEvent>,
|
||||
public Listener<MetricScrapeEvent>
|
||||
{
|
||||
public:
|
||||
enum class Stream { FOG, DEBUG, PROMETHEUS, AIOPS, COUNT };
|
||||
enum class Stream { FOG, DEBUG, AIOPS, COUNT };
|
||||
|
||||
void
|
||||
init(
|
||||
@@ -72,7 +73,8 @@ public:
|
||||
std::chrono::seconds _report_interval,
|
||||
bool _reset,
|
||||
ReportIS::Audience _audience = ReportIS::Audience::INTERNAL,
|
||||
bool _force_buffering = false
|
||||
bool _force_buffering = false,
|
||||
const std::string &_asset_id = ""
|
||||
);
|
||||
|
||||
template <typename Value>
|
||||
@@ -96,6 +98,7 @@ public:
|
||||
void resetMetrics();
|
||||
void upon(const AllMetricEvent &) override;
|
||||
std::string respond(const AllMetricEvent &event) override;
|
||||
std::vector<PrometheusData> respond(const MetricScrapeEvent &event) override;
|
||||
std::string getListenerName() const override;
|
||||
|
||||
std::string getMetricName() const;
|
||||
@@ -113,9 +116,10 @@ private:
|
||||
friend class MetricCalc;
|
||||
void addCalc(MetricCalc *calc);
|
||||
|
||||
std::vector<PrometheusData> getPromMetricsData();
|
||||
|
||||
void handleMetricStreamSending();
|
||||
void generateLog();
|
||||
void generatePrometheus();
|
||||
void generateDebug();
|
||||
void generateAiopsLog();
|
||||
|
||||
@@ -127,10 +131,12 @@ private:
|
||||
ReportIS::Audience audience;
|
||||
std::chrono::seconds report_interval;
|
||||
std::vector<MetricCalc *> calcs;
|
||||
std::vector<MetricCalc *> prometheus_calcs;
|
||||
Flags<Stream> active_streams;
|
||||
bool reset;
|
||||
bool force_buffering = false;
|
||||
Context ctx;
|
||||
std::string asset_id;
|
||||
};
|
||||
|
||||
#include "metric/counter.h"
|
||||
|
||||
@@ -25,6 +25,9 @@
|
||||
#include "customized_cereal_map.h"
|
||||
#include "compression_utils.h"
|
||||
#include "i_encryptor.h"
|
||||
#include "event.h"
|
||||
|
||||
USE_DEBUG_FLAG(D_METRICS);
|
||||
|
||||
class GenericMetric;
|
||||
|
||||
@@ -32,13 +35,35 @@ enum class MetricType { GAUGE, COUNTER };
|
||||
|
||||
struct PrometheusData
|
||||
{
|
||||
template <typename Archive>
|
||||
void
|
||||
serialize(Archive &ar)
|
||||
{
|
||||
try {
|
||||
ar(cereal::make_nvp("metric_name", name));
|
||||
ar(cereal::make_nvp("metric_type", type));
|
||||
ar(cereal::make_nvp("metric_description", description));
|
||||
ar(cereal::make_nvp("labels", label));
|
||||
ar(cereal::make_nvp("value", value));
|
||||
} catch (const cereal::Exception &e) {
|
||||
dbgTrace(D_METRICS) << "Error in serialize Prometheus data: " << e.what();
|
||||
}
|
||||
}
|
||||
|
||||
std::string name;
|
||||
std::string type;
|
||||
std::string desc;
|
||||
std::string description;
|
||||
std::string label;
|
||||
std::string value;
|
||||
};
|
||||
|
||||
class MetricScrapeEvent : public Event<MetricScrapeEvent, std::vector<PrometheusData>>
|
||||
{
|
||||
public:
|
||||
MetricScrapeEvent() {}
|
||||
|
||||
};
|
||||
|
||||
class AiopsMetricData
|
||||
{
|
||||
public:
|
||||
@@ -228,7 +253,10 @@ public:
|
||||
std::string getMetircDescription() const { return getMetadata("Description"); }
|
||||
std::string getMetadata(const std::string &metadata) const;
|
||||
virtual MetricType getMetricType() const { return MetricType::GAUGE; }
|
||||
virtual std::vector<PrometheusData> getPrometheusMetrics() const;
|
||||
virtual std::vector<PrometheusData> getPrometheusMetrics(
|
||||
const std::string &metric_name,
|
||||
const std::string &asset_id = ""
|
||||
) const;
|
||||
virtual float getValue() const = 0;
|
||||
virtual std::vector<AiopsMetricData> getAiopsMetrics() const;
|
||||
|
||||
@@ -240,7 +268,10 @@ public:
|
||||
|
||||
protected:
|
||||
void addMetric(GenericMetric *metric);
|
||||
std::map<std::string, std::string> getBasicLabels() const;
|
||||
std::map<std::string, std::string> getBasicLabels(
|
||||
const std::string &metric_name,
|
||||
const std::string &asset_id = ""
|
||||
) const;
|
||||
|
||||
template <typename Metadata, typename ... OtherMetadata>
|
||||
void
|
||||
|
||||
@@ -55,12 +55,17 @@ class MetricMap : public MetricCalc
|
||||
}
|
||||
|
||||
std::vector<PrometheusData>
|
||||
getPrometheusMetrics(const std::string &label, const std::string &name) const
|
||||
getPrometheusMetrics(
|
||||
const std::string &metric_name,
|
||||
const std::string &label,
|
||||
const std::string &name,
|
||||
const std::string &asset_id
|
||||
) const
|
||||
{
|
||||
std::vector<PrometheusData> res;
|
||||
|
||||
for (auto &metric : inner_map) {
|
||||
auto sub_res = metric.second.getPrometheusMetrics();
|
||||
auto sub_res = metric.second.getPrometheusMetrics(metric_name, asset_id);
|
||||
for (auto &sub_metric : sub_res) {
|
||||
sub_metric.label += "," + label + "=\"" + metric.first + "\"";
|
||||
sub_metric.name = name;
|
||||
@@ -155,9 +160,9 @@ public:
|
||||
}
|
||||
|
||||
std::vector<PrometheusData>
|
||||
getPrometheusMetrics() const override
|
||||
getPrometheusMetrics(const std::string &metric_name, const std::string &asset_id) const override
|
||||
{
|
||||
return metric_map.getPrometheusMetrics(label, getMetricName());
|
||||
return metric_map.getPrometheusMetrics(metric_name, label, getMetricName(), asset_id);
|
||||
}
|
||||
|
||||
std::vector<AiopsMetricData>
|
||||
|
||||
45
core/include/services_sdk/resources/metric/metric_scraper.h
Normal file
45
core/include/services_sdk/resources/metric/metric_scraper.h
Normal file
@@ -0,0 +1,45 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef __METRIC_SCRAPER_H__
|
||||
#define __METRIC_SCRAPER_H__
|
||||
|
||||
#include <string>
|
||||
#include <fstream>
|
||||
#include <vector>
|
||||
#include <streambuf>
|
||||
|
||||
#include "singleton.h"
|
||||
#include "debug.h"
|
||||
#include "component.h"
|
||||
#include "event.h"
|
||||
#include "i_rest_api.h"
|
||||
#include "generic_metric.h"
|
||||
|
||||
class MetricScraper
|
||||
:
|
||||
public Component,
|
||||
Singleton::Consume<I_RestApi>
|
||||
{
|
||||
public:
|
||||
MetricScraper();
|
||||
~MetricScraper();
|
||||
|
||||
void init();
|
||||
|
||||
private:
|
||||
class Impl;
|
||||
std::unique_ptr<Impl> pimpl;
|
||||
};
|
||||
|
||||
#endif // __METRIC_SCRAPER_H__
|
||||
@@ -492,9 +492,9 @@ private:
|
||||
return genError("Local intelligence server ip not configured");
|
||||
}
|
||||
|
||||
auto res = sendLocalIntelligenceToLocalServer(rest_req, *server, primary_port_setting);
|
||||
auto res = sendLocalIntelligenceToLocalServer(rest_req, *server, primary_port_setting, false);
|
||||
if (res.ok()) return res;
|
||||
return sendLocalIntelligenceToLocalServer(rest_req, *server, secondary_port_setting);
|
||||
return sendLocalIntelligenceToLocalServer(rest_req, *server, secondary_port_setting, false);
|
||||
}
|
||||
|
||||
template <typename IntelligenceRest>
|
||||
@@ -502,8 +502,9 @@ private:
|
||||
sendLocalIntelligenceToLocalServer(
|
||||
const IntelligenceRest &rest_req,
|
||||
const string &server,
|
||||
const string &port_setting
|
||||
) const
|
||||
const string &port_setting,
|
||||
const bool should_send_access_token = false
|
||||
) const
|
||||
{
|
||||
auto port = getSetting<uint>("intelligence", port_setting);
|
||||
if (!port.ok()) {
|
||||
@@ -519,6 +520,7 @@ private:
|
||||
req_md.insertHeaders(getHTTPHeaders());
|
||||
req_md.setConnectioFlag(MessageConnectionConfig::UNSECURE_CONN);
|
||||
req_md.setConnectioFlag(MessageConnectionConfig::ONE_TIME_CONN);
|
||||
req_md.setShouldSendAccessToken(should_send_access_token);
|
||||
return sendIntelligenceRequestImpl(rest_req, req_md);
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ using namespace std;
|
||||
|
||||
USE_DEBUG_FLAG(D_INTELLIGENCE);
|
||||
|
||||
static const unsigned int upper_assets_limit = 50;
|
||||
static const unsigned int upper_assets_limit = 200;
|
||||
static const unsigned int upper_confidence_limit = 1000;
|
||||
|
||||
Maybe<void>
|
||||
|
||||
@@ -30,7 +30,8 @@ public:
|
||||
HTTPMethod method,
|
||||
const std::string &uri,
|
||||
const std::map<std::string, std::string> &headers,
|
||||
const std::string &body
|
||||
const std::string &body,
|
||||
const bool should_send_access_token = true
|
||||
);
|
||||
|
||||
Maybe<void> setConnectionHeaders(const Connection &conn, bool is_access_token_needed);
|
||||
|
||||
@@ -79,7 +79,8 @@ HTTPRequest::prepareRequest(
|
||||
HTTPMethod method,
|
||||
const string &uri,
|
||||
const map<string, string> &headers,
|
||||
const string &body
|
||||
const string &body,
|
||||
const bool should_send_access_token
|
||||
)
|
||||
{
|
||||
HTTPRequest req(method, uri, headers, body);
|
||||
@@ -94,6 +95,7 @@ HTTPRequest::prepareRequest(
|
||||
dont_add_access_token = true;
|
||||
dbgTrace(D_MESSAGING) << "Request is for agent authentication";
|
||||
}
|
||||
if (!should_send_access_token) dont_add_access_token = true;
|
||||
auto res = req.addAccessToken(conn, dont_add_access_token);
|
||||
if (!res.ok()) return res.passErr();
|
||||
|
||||
|
||||
@@ -125,7 +125,9 @@ MessagingComp::sendMessage(
|
||||
}
|
||||
|
||||
Connection conn = maybe_conn.unpack();
|
||||
if (conn.isSuspended()) return suspendMessage(body, method, uri, category, message_metadata);
|
||||
if (message_metadata.shouldSuspend() && conn.isSuspended()) {
|
||||
return suspendMessage(body, method, uri, category, message_metadata);
|
||||
}
|
||||
|
||||
bool is_to_fog = isMessageToFog(message_metadata);
|
||||
auto metadata = message_metadata;
|
||||
@@ -142,7 +144,13 @@ MessagingComp::sendMessage(
|
||||
metadata.insertHeaders(i_env->getCurrentHeadersMap());
|
||||
}
|
||||
|
||||
auto req = HTTPRequest::prepareRequest(conn, method, uri, metadata.getHeaders(), body);
|
||||
auto req = HTTPRequest::prepareRequest(
|
||||
conn,
|
||||
method,
|
||||
uri,
|
||||
metadata.getHeaders(),
|
||||
body,
|
||||
metadata.shouldSendAccessToken());
|
||||
if (!req.ok()) return genError(HTTPResponse(HTTPStatusCode::HTTP_UNKNOWN, req.getErr()));
|
||||
|
||||
auto response = i_conn->sendRequest(conn, *req);
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
add_library(metric generic_metric.cc)
|
||||
add_library(metric generic_metric.cc metric_scraper.cc)
|
||||
|
||||
add_subdirectory(metric_ut)
|
||||
|
||||
@@ -49,7 +49,7 @@ MetricCalc::getAiopsMetrics() const
|
||||
string description = getMetircDescription();
|
||||
string type = getMetricType() == MetricType::GAUGE ? "Gauge" : "Counter";
|
||||
|
||||
return { AiopsMetricData(name, type, units, description, getBasicLabels(), value) };
|
||||
return { AiopsMetricData(name, type, units, description, getBasicLabels(getMetricName()), value) };
|
||||
}
|
||||
|
||||
string
|
||||
@@ -77,7 +77,7 @@ MetricCalc::addMetric(GenericMetric *metric)
|
||||
}
|
||||
|
||||
vector<PrometheusData>
|
||||
MetricCalc::getPrometheusMetrics() const
|
||||
MetricCalc::getPrometheusMetrics(const std::string &metric_name, const string &asset_id) const
|
||||
{
|
||||
float value = getValue();
|
||||
if (isnan(value)) return {};
|
||||
@@ -86,10 +86,10 @@ MetricCalc::getPrometheusMetrics() const
|
||||
|
||||
res.name = getMetricDotName() != "" ? getMetricDotName() : getMetricName();
|
||||
res.type = getMetricType() == MetricType::GAUGE ? "gauge" : "counter";
|
||||
res.desc = getMetircDescription();
|
||||
res.description = getMetircDescription();
|
||||
|
||||
stringstream labels;
|
||||
const auto &label_pairs = getBasicLabels();
|
||||
const auto &label_pairs = getBasicLabels(metric_name, asset_id);
|
||||
bool first = true;
|
||||
for (auto &pair : label_pairs) {
|
||||
if (!first) labels << ',';
|
||||
@@ -106,7 +106,7 @@ MetricCalc::getPrometheusMetrics() const
|
||||
}
|
||||
|
||||
map<string, string>
|
||||
MetricCalc::getBasicLabels() const
|
||||
MetricCalc::getBasicLabels(const string &metric_name, const string &asset_id) const
|
||||
{
|
||||
map<string, string> res;
|
||||
|
||||
@@ -121,6 +121,9 @@ MetricCalc::getBasicLabels() const
|
||||
auto executable = env->get<string>("Base Executable Name");
|
||||
if (executable.ok()) res["process"] = *executable;
|
||||
|
||||
if (!asset_id.empty()) res["assetId"] = asset_id;
|
||||
res["metricName"] = metric_name;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -158,7 +161,8 @@ GenericMetric::init(
|
||||
chrono::seconds _report_interval,
|
||||
bool _reset,
|
||||
Audience _audience,
|
||||
bool _force_buffering
|
||||
bool _force_buffering,
|
||||
const string &_asset_id
|
||||
)
|
||||
{
|
||||
turnOnStream(Stream::FOG);
|
||||
@@ -173,6 +177,7 @@ GenericMetric::init(
|
||||
issuing_engine = _issuing_engine;
|
||||
audience = _audience;
|
||||
force_buffering = _force_buffering;
|
||||
asset_id = _asset_id;
|
||||
|
||||
i_mainloop->addRecurringRoutine(
|
||||
I_MainLoop::RoutineType::System,
|
||||
@@ -185,13 +190,13 @@ GenericMetric::init(
|
||||
},
|
||||
"Metric Fog stream messaging for " + _metric_name
|
||||
);
|
||||
registerListener();
|
||||
}
|
||||
|
||||
void
|
||||
GenericMetric::handleMetricStreamSending()
|
||||
{
|
||||
if (active_streams.isSet(Stream::DEBUG)) generateDebug();
|
||||
if (active_streams.isSet(Stream::PROMETHEUS)) generatePrometheus();
|
||||
if (active_streams.isSet(Stream::FOG)) generateLog();
|
||||
if (active_streams.isSet(Stream::AIOPS)) generateAiopsLog();
|
||||
|
||||
@@ -237,6 +242,7 @@ void
|
||||
GenericMetric::addCalc(MetricCalc *calc)
|
||||
{
|
||||
calcs.push_back(calc);
|
||||
prometheus_calcs.push_back(calc);
|
||||
}
|
||||
|
||||
void
|
||||
@@ -254,6 +260,12 @@ GenericMetric::respond(const AllMetricEvent &event)
|
||||
return res;
|
||||
}
|
||||
|
||||
vector<PrometheusData>
|
||||
GenericMetric::respond(const MetricScrapeEvent &)
|
||||
{
|
||||
return getPromMetricsData();
|
||||
}
|
||||
|
||||
string GenericMetric::getListenerName() const { return metric_name; }
|
||||
|
||||
void
|
||||
@@ -316,70 +328,19 @@ GenericMetric::generateLog()
|
||||
sendLog(metric_client_rest);
|
||||
}
|
||||
|
||||
class PrometheusRest : public ClientRest
|
||||
vector<PrometheusData>
|
||||
GenericMetric::getPromMetricsData()
|
||||
{
|
||||
class Metric : public ClientRest
|
||||
{
|
||||
public:
|
||||
Metric(const string &n, const string &t, const string &d, const string &l, const string &v)
|
||||
:
|
||||
metric_name(n),
|
||||
metric_type(t),
|
||||
metric_description(d),
|
||||
labels(l),
|
||||
value(v)
|
||||
{}
|
||||
|
||||
private:
|
||||
C2S_PARAM(string, metric_name);
|
||||
C2S_PARAM(string, metric_type);
|
||||
C2S_PARAM(string, metric_description);
|
||||
C2S_PARAM(string, labels);
|
||||
C2S_PARAM(string, value);
|
||||
};
|
||||
|
||||
public:
|
||||
PrometheusRest() : metrics(vector<Metric>()) {}
|
||||
|
||||
void
|
||||
addMetric(const vector<PrometheusData> &vec)
|
||||
{
|
||||
auto &metric_vec = metrics.get();
|
||||
metric_vec.reserve(vec.size());
|
||||
for (auto &metric : vec) {
|
||||
metric_vec.emplace_back(metric.name, metric.type, metric.desc, "{" + metric.label + "}", metric.value);
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
C2S_PARAM(vector<Metric>, metrics);
|
||||
};
|
||||
|
||||
void
|
||||
GenericMetric::generatePrometheus()
|
||||
{
|
||||
if (!getProfileAgentSettingWithDefault(false, "prometheus")) return;
|
||||
dbgTrace(D_METRICS) << "Generate prometheus metric";
|
||||
|
||||
vector<PrometheusData> all_metrics;
|
||||
for (auto &calc : calcs) {
|
||||
const auto &cal_metrics = calc->getPrometheusMetrics();
|
||||
all_metrics.insert(all_metrics.end(), cal_metrics.begin(), cal_metrics.end());
|
||||
if (!getProfileAgentSettingWithDefault(false, "prometheus")) return all_metrics;
|
||||
dbgTrace(D_METRICS) << "Get prometheus metrics";
|
||||
|
||||
for (auto &calc : prometheus_calcs) {
|
||||
const auto &calc_prom_metrics = calc->getPrometheusMetrics(metric_name, asset_id);
|
||||
all_metrics.insert(all_metrics.end(), calc_prom_metrics.begin(), calc_prom_metrics.end());
|
||||
calc->reset();
|
||||
}
|
||||
|
||||
PrometheusRest rest;
|
||||
rest.addMetric(all_metrics);
|
||||
|
||||
MessageMetadata new_config_req_md("127.0.0.1", 7465);
|
||||
new_config_req_md.setConnectioFlag(MessageConnectionConfig::ONE_TIME_CONN);
|
||||
new_config_req_md.setConnectioFlag(MessageConnectionConfig::UNSECURE_CONN);
|
||||
Singleton::Consume<I_Messaging>::by<GenericMetric>()->sendSyncMessage(
|
||||
HTTPMethod::POST,
|
||||
"/add-metrics",
|
||||
rest,
|
||||
MessageCategory::GENERIC,
|
||||
new_config_req_md
|
||||
);
|
||||
return all_metrics;
|
||||
}
|
||||
|
||||
void
|
||||
|
||||
50
core/metric/metric_scraper.cc
Normal file
50
core/metric/metric_scraper.cc
Normal file
@@ -0,0 +1,50 @@
|
||||
#include "metric/metric_scraper.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
USE_DEBUG_FLAG(D_METRICS);
|
||||
|
||||
class MetricScraper::Impl
|
||||
{
|
||||
public:
|
||||
void
|
||||
init()
|
||||
{
|
||||
Singleton::Consume<I_RestApi>::by<MetricScraper>()->addGetCall(
|
||||
"service-metrics",
|
||||
[&] () { return getAllPrometheusMetrics(); }
|
||||
);
|
||||
}
|
||||
|
||||
string
|
||||
getAllPrometheusMetrics()
|
||||
{
|
||||
auto all_metrics_events_res = MetricScrapeEvent().query();
|
||||
for (auto metric_vec : all_metrics_events_res) {
|
||||
for (PrometheusData metric : metric_vec) {
|
||||
metric.label = "{" + metric.label + "}";
|
||||
all_metrics.emplace_back(metric);
|
||||
}
|
||||
}
|
||||
stringstream ss;
|
||||
{
|
||||
cereal::JSONOutputArchive archive(ss);
|
||||
archive(cereal::make_nvp("metrics", all_metrics));
|
||||
}
|
||||
all_metrics.clear();
|
||||
return ss.str();
|
||||
}
|
||||
|
||||
private:
|
||||
vector<PrometheusData> all_metrics;
|
||||
};
|
||||
|
||||
MetricScraper::MetricScraper() : Component("MetricScraper"), pimpl(make_unique<MetricScraper::Impl>()) {}
|
||||
|
||||
MetricScraper::~MetricScraper() {}
|
||||
|
||||
void
|
||||
MetricScraper::init()
|
||||
{
|
||||
pimpl->init();
|
||||
}
|
||||
@@ -14,6 +14,7 @@
|
||||
#include "mock/mock_instance_awareness.h"
|
||||
#include "config.h"
|
||||
#include "config_component.h"
|
||||
#include "metric/metric_scraper.h"
|
||||
|
||||
using namespace std;
|
||||
using namespace chrono;
|
||||
@@ -191,9 +192,11 @@ public:
|
||||
MetricTest()
|
||||
{
|
||||
EXPECT_CALL(rest, mockRestCall(RestAction::ADD, "declare-boolean-variable", _)).WillOnce(Return(true));
|
||||
env.init();
|
||||
conf.preload();
|
||||
|
||||
ON_CALL(instance, getUniqueID()).WillByDefault(Return(string("87")));
|
||||
ON_CALL(instance, getFamilyID()).WillByDefault(Return(string("")));
|
||||
env.init();
|
||||
Debug::setNewDefaultStdout(&debug_output);
|
||||
Debug::setUnitTestFlag(D_METRICS, Debug::DebugLevel::TRACE);
|
||||
setConfiguration<bool>(true, string("metric"), string("fogMetricSendEnable"));
|
||||
@@ -531,9 +534,12 @@ TEST_F(MetricTest, printMetricsTest)
|
||||
GenericMetric::fini();
|
||||
}
|
||||
|
||||
TEST_F(MetricTest, printPromeathus)
|
||||
TEST_F(MetricTest, getPromeathusMetric)
|
||||
{
|
||||
conf.preload();
|
||||
MetricScraper metric_scraper;
|
||||
function<string()> get_metrics_func;
|
||||
EXPECT_CALL(rest, addGetCall("service-metrics", _)).WillOnce(DoAll(SaveArg<1>(&get_metrics_func), Return(true)));
|
||||
metric_scraper.init();
|
||||
|
||||
stringstream configuration;
|
||||
configuration << "{\"agentSettings\":[{\"key\":\"prometheus\",\"id\":\"id1\",\"value\":\"true\"}]}\n";
|
||||
@@ -546,20 +552,21 @@ TEST_F(MetricTest, printPromeathus)
|
||||
ReportIS::AudienceTeam::AGENT_CORE,
|
||||
ReportIS::IssuingEngine::AGENT_CORE,
|
||||
seconds(5),
|
||||
false
|
||||
false,
|
||||
ReportIS::Audience::INTERNAL,
|
||||
false,
|
||||
"asset id"
|
||||
);
|
||||
cpu_mt.turnOffStream(GenericMetric::Stream::FOG);
|
||||
cpu_mt.turnOffStream(GenericMetric::Stream::DEBUG);
|
||||
cpu_mt.turnOnStream(GenericMetric::Stream::PROMETHEUS);
|
||||
cpu_mt.registerListener();
|
||||
|
||||
CPUEvent cpu_event;
|
||||
cpu_event.setProcessCPU(89);
|
||||
cpu_event.notify();
|
||||
|
||||
string message_body;
|
||||
EXPECT_CALL(messaging_mock, sendSyncMessage(_, "/add-metrics", _, _, _))
|
||||
.WillOnce(DoAll(SaveArg<2>(&message_body), Return(HTTPResponse())));
|
||||
string message_body = get_metrics_func();
|
||||
|
||||
routine();
|
||||
|
||||
string res =
|
||||
@@ -569,42 +576,48 @@ TEST_F(MetricTest, printPromeathus)
|
||||
" \"metric_name\": \"cpuMax\",\n"
|
||||
" \"metric_type\": \"gauge\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",id=\\\"87\\\"}\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"CPU usage\\\"}\",\n"
|
||||
" \"value\": \"89\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"cpuMin\",\n"
|
||||
" \"metric_type\": \"gauge\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",id=\\\"87\\\"}\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"CPU usage\\\"}\",\n"
|
||||
" \"value\": \"89\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"cpuAvg\",\n"
|
||||
" \"metric_type\": \"gauge\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",id=\\\"87\\\"}\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"CPU usage\\\"}\",\n"
|
||||
" \"value\": \"89\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"cpuCurrent\",\n"
|
||||
" \"metric_type\": \"gauge\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",id=\\\"87\\\"}\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"CPU usage\\\"}\",\n"
|
||||
" \"value\": \"89\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"cpuCounter\",\n"
|
||||
" \"metric_type\": \"gauge\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",id=\\\"87\\\"}\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"CPU usage\\\"}\",\n"
|
||||
" \"value\": \"1\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"cpuTotalCounter\",\n"
|
||||
" \"metric_type\": \"counter\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",id=\\\"87\\\"}\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"CPU usage\\\"}\",\n"
|
||||
" \"value\": \"1\"\n"
|
||||
" }\n"
|
||||
" ]\n"
|
||||
@@ -613,9 +626,12 @@ TEST_F(MetricTest, printPromeathus)
|
||||
EXPECT_EQ(message_body, res);
|
||||
}
|
||||
|
||||
TEST_F(MetricTest, printPromeathusMultiMap)
|
||||
TEST_F(MetricTest, getPromeathusMultiMap)
|
||||
{
|
||||
conf.preload();
|
||||
MetricScraper metric_scraper;
|
||||
function<string()> get_metrics_func;
|
||||
EXPECT_CALL(rest, addGetCall("service-metrics", _)).WillOnce(DoAll(SaveArg<1>(&get_metrics_func), Return(true)));
|
||||
metric_scraper.init();
|
||||
|
||||
stringstream configuration;
|
||||
configuration << "{\"agentSettings\":[{\"key\":\"prometheus\",\"id\":\"id1\",\"value\":\"true\"}]}\n";
|
||||
@@ -628,18 +644,18 @@ TEST_F(MetricTest, printPromeathusMultiMap)
|
||||
ReportIS::AudienceTeam::AGENT_CORE,
|
||||
ReportIS::IssuingEngine::AGENT_CORE,
|
||||
seconds(5),
|
||||
true
|
||||
true,
|
||||
ReportIS::Audience::INTERNAL,
|
||||
false,
|
||||
"asset id"
|
||||
);
|
||||
metric.turnOnStream(GenericMetric::Stream::PROMETHEUS);
|
||||
metric.registerListener();
|
||||
|
||||
HttpTransaction("/index.html", "GET", 10).notify();
|
||||
HttpTransaction("/index2.html", "GET", 20).notify();
|
||||
HttpTransaction("/index.html", "POST", 40).notify();
|
||||
|
||||
string message_body;
|
||||
EXPECT_CALL(messaging_mock, sendSyncMessage(_, "/add-metrics", _, _, _))
|
||||
.WillOnce(DoAll(SaveArg<2>(&message_body), Return(HTTPResponse())));
|
||||
string message_body = get_metrics_func();
|
||||
routine();
|
||||
|
||||
string res =
|
||||
@@ -649,24 +665,156 @@ TEST_F(MetricTest, printPromeathusMultiMap)
|
||||
" \"metric_name\": \"request.total\",\n"
|
||||
" \"metric_type\": \"counter\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",id=\\\"87\\\","
|
||||
"method=\\\"GET\\\",url=\\\"/index.html\\\"}\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"Bytes per URL\\\",method=\\\"GET\\\",url=\\\"/index.html\\\"}\",\n"
|
||||
" \"value\": \"1\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"request.total\",\n"
|
||||
" \"metric_type\": \"counter\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",id=\\\"87\\\","
|
||||
"method=\\\"POST\\\",url=\\\"/index.html\\\"}\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"Bytes per URL\\\",method=\\\"POST\\\",url=\\\"/index.html\\\"}\",\n"
|
||||
" \"value\": \"1\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"request.total\",\n"
|
||||
" \"metric_type\": \"counter\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",id=\\\"87\\\","
|
||||
"method=\\\"GET\\\",url=\\\"/index2.html\\\"}\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"Bytes per URL\\\",method=\\\"GET\\\",url=\\\"/index2.html\\\"}\",\n"
|
||||
" \"value\": \"1\"\n"
|
||||
" }\n"
|
||||
" ]\n"
|
||||
"}";
|
||||
|
||||
EXPECT_EQ(message_body, res);
|
||||
}
|
||||
|
||||
TEST_F(MetricTest, getPromeathusTwoMetrics)
|
||||
{
|
||||
MetricScraper metric_scraper;
|
||||
function<string()> get_metrics_func;
|
||||
EXPECT_CALL(rest, addGetCall("service-metrics", _)).WillOnce(DoAll(SaveArg<1>(&get_metrics_func), Return(true)));
|
||||
metric_scraper.init();
|
||||
|
||||
stringstream configuration;
|
||||
configuration << "{\"agentSettings\":[{\"key\":\"prometheus\",\"id\":\"id1\",\"value\":\"true\"}]}\n";
|
||||
|
||||
EXPECT_TRUE(Singleton::Consume<Config::I_Config>::from(conf)->loadConfiguration(configuration));
|
||||
|
||||
CPUMetric cpu_mt;
|
||||
cpu_mt.init(
|
||||
"CPU usage",
|
||||
ReportIS::AudienceTeam::AGENT_CORE,
|
||||
ReportIS::IssuingEngine::AGENT_CORE,
|
||||
seconds(5),
|
||||
false,
|
||||
ReportIS::Audience::INTERNAL,
|
||||
false,
|
||||
"asset id"
|
||||
);
|
||||
cpu_mt.turnOffStream(GenericMetric::Stream::FOG);
|
||||
cpu_mt.turnOffStream(GenericMetric::Stream::DEBUG);
|
||||
cpu_mt.registerListener();
|
||||
|
||||
CPUEvent cpu_event;
|
||||
cpu_event.setProcessCPU(89);
|
||||
cpu_event.notify();
|
||||
|
||||
UrlMetric2 metric;
|
||||
metric.init(
|
||||
"Bytes per URL",
|
||||
ReportIS::AudienceTeam::AGENT_CORE,
|
||||
ReportIS::IssuingEngine::AGENT_CORE,
|
||||
seconds(5),
|
||||
true,
|
||||
ReportIS::Audience::INTERNAL,
|
||||
false,
|
||||
"asset id"
|
||||
);
|
||||
metric.registerListener();
|
||||
|
||||
HttpTransaction("/index.html", "GET", 10).notify();
|
||||
HttpTransaction("/index2.html", "GET", 20).notify();
|
||||
HttpTransaction("/index.html", "POST", 40).notify();
|
||||
|
||||
string message_body = get_metrics_func();
|
||||
routine();
|
||||
|
||||
string res =
|
||||
"{\n"
|
||||
" \"metrics\": [\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"request.total\",\n"
|
||||
" \"metric_type\": \"counter\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"Bytes per URL\\\",method=\\\"GET\\\",url=\\\"/index.html\\\"}\",\n"
|
||||
" \"value\": \"1\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"request.total\",\n"
|
||||
" \"metric_type\": \"counter\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"Bytes per URL\\\",method=\\\"POST\\\",url=\\\"/index.html\\\"}\",\n"
|
||||
" \"value\": \"1\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"request.total\",\n"
|
||||
" \"metric_type\": \"counter\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"Bytes per URL\\\",method=\\\"GET\\\",url=\\\"/index2.html\\\"}\",\n"
|
||||
" \"value\": \"1\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"cpuMax\",\n"
|
||||
" \"metric_type\": \"gauge\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"CPU usage\\\"}\",\n"
|
||||
" \"value\": \"89\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"cpuMin\",\n"
|
||||
" \"metric_type\": \"gauge\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"CPU usage\\\"}\",\n"
|
||||
" \"value\": \"89\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"cpuAvg\",\n"
|
||||
" \"metric_type\": \"gauge\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"CPU usage\\\"}\",\n"
|
||||
" \"value\": \"89\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"cpuCurrent\",\n"
|
||||
" \"metric_type\": \"gauge\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"CPU usage\\\"}\",\n"
|
||||
" \"value\": \"89\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"cpuCounter\",\n"
|
||||
" \"metric_type\": \"gauge\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"CPU usage\\\"}\",\n"
|
||||
" \"value\": \"1\"\n"
|
||||
" },\n"
|
||||
" {\n"
|
||||
" \"metric_name\": \"cpuTotalCounter\",\n"
|
||||
" \"metric_type\": \"counter\",\n"
|
||||
" \"metric_description\": \"\",\n"
|
||||
" \"labels\": \"{agent=\\\"Unknown\\\",assetId=\\\"asset id\\\",id=\\\"87\\\","
|
||||
"metricName=\\\"CPU usage\\\"}\",\n"
|
||||
" \"value\": \"1\"\n"
|
||||
" }\n"
|
||||
" ]\n"
|
||||
|
||||
@@ -37,7 +37,7 @@ services:
|
||||
environment:
|
||||
# adjust with your own email below
|
||||
- user_email=user@email.com
|
||||
- registered_server="APISIX Server"
|
||||
- registered_server="APISIX"
|
||||
- AGENT_TOKEN=<TOKEN>
|
||||
volumes:
|
||||
- ./appsec-config:/etc/cp/conf
|
||||
|
||||
@@ -28,7 +28,7 @@ services:
|
||||
- user_email=${APPSEC_USER_EMAIL}
|
||||
- AGENT_TOKEN=${APPSEC_AGENT_TOKEN}
|
||||
- autoPolicyLoad=${APPSEC_AUTO_POLICY_LOAD}
|
||||
- registered_server=APISIX Server
|
||||
- registered_server=APISIX
|
||||
ipc: shareable
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
@@ -128,4 +128,4 @@ services:
|
||||
# driver_opts:
|
||||
# type: nfs
|
||||
# o: addr=fs-abcdef.efs.eu-west-1.amazonaws.com,rw,nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport
|
||||
# device: ":/"
|
||||
# device: ":/"
|
||||
|
||||
64
deployment/docker-compose/envoy/.env
Normal file
64
deployment/docker-compose/envoy/.env
Normal file
@@ -0,0 +1,64 @@
|
||||
Enter file contents here## .env file for docker-compose deployments of open-appsec integrated with Envoy
|
||||
## for more info see https://docs.openappsec.io
|
||||
|
||||
APPSEC_VERSION=latest
|
||||
APPSEC_CONFIG=./appsec-config
|
||||
APPSEC_DATA=./appsec-data
|
||||
APPSEC_LOGS=./appsec-logs
|
||||
APPSEC_LOCALCONFIG=./appsec-localconfig
|
||||
|
||||
## Make sure the parameter APPSEC_AUTO_POLICY_LOAD is set to false when centrally managing
|
||||
## open-appsec configuration via open-appsec Web UI.
|
||||
## You can optionally set it to true when using local, declarative management for open-appsec,
|
||||
## declarative configuration will then get applied automatically when changed.
|
||||
APPSEC_AUTO_POLICY_LOAD=false
|
||||
|
||||
## Example for configuring HTTPS Proxy:
|
||||
## APPSEC_HTTPS_PROXY=user:password@proxy_address:port
|
||||
APPSEC_HTTPS_PROXY=
|
||||
|
||||
APPSEC_SMART_SYNC_STORAGE=./appsec-smartsync-storage
|
||||
APPSEC_USER_EMAIL=user@email.com
|
||||
APPSEC_DB_PASSWORD=pass
|
||||
APPSEC_DB_USER=postgres
|
||||
APPSEC_DB_HOST=appsec-db
|
||||
APPSEC_POSTGRES_STORAGE=./appsec-postgres-data
|
||||
|
||||
## Make sure to have a valid envoy.yaml Envoy configuration file present in the path below.
|
||||
## For deployment of a simple lab testing environment, you can deploy the example configuration provided
|
||||
## for the vulnerable juice-shop container, see instructions further below.
|
||||
ENVOY_CONFIG=./envoy-config/envoy.yaml
|
||||
|
||||
## The following two parameters are only relevant if you made a custom configuration for
|
||||
## the amount of Envoy worker threads using the optional ENVOY_CONCURRENCY parameter (see also explanation in docker-compose.yaml),
|
||||
## these are then required to make sure that the open-appsec attachment will create the right amount of transaction handlers.
|
||||
## In this case you must set ENVOY_CONCURRENCY_CALC to "custom" and provide the specified amount of Envoy worker
|
||||
## threads via "ENVOY_CONCURRENCY_NUMBER".
|
||||
## Possible values for ENVOY_CONCURRENCY_CALC: "numOfCores" (default), "custom" (allows to set the configured Envoy worker
|
||||
## thread amount using the ENVOY_CONCURRENCY_NUMBER parameter)
|
||||
ENVOY_CONCURRENCY_CALC=numOfCores
|
||||
ENVOY_CONCURRENCY_NUMBER=""
|
||||
|
||||
## To connect your deployment to central open-appsec WebUI provide the token for a profile
|
||||
## which you created in open-appsec WebUI at https://my.openappsec.io
|
||||
## Example: APPSEC_AGENT_TOKEN=111-22222-111
|
||||
APPSEC_AGENT_TOKEN=
|
||||
|
||||
## Important: When not providing token for connection to central WebUI:
|
||||
## Make sure to add the value "standalone" to the COMPOSE_PROFILES value, this will enable
|
||||
## sharing of learning between processes and allow you to perform tuning locally on CLI
|
||||
COMPOSE_PROFILES=
|
||||
|
||||
## JUICE SHOP DEMO CONTAINER:
|
||||
## In order to deploy the optional, additional, vulnerable juiceshop container (for demo and testing purposes only!):
|
||||
## Add the value "juiceshop" to the COMPOSE_PROFILES value above.
|
||||
|
||||
## Make sure to also adjust the envoy.yaml file in ENVOY_CONFIG path
|
||||
## to add a routing configuration for forwarding external traffic on e.g. port 80 to the juiceshop-backend container
|
||||
## you can use the example file available here:
|
||||
## https://raw.githubusercontent.com/openappsec/openappsec/examples/juiceshop/envoy/envoy.yaml
|
||||
## place the file above in ENVOY_CONFIG path
|
||||
## note that juiceshop container listens on HTTP port 3000 by default
|
||||
|
||||
## Note that COMPOSE_PROFILES can also receive multiple values, e.g. as shown here:
|
||||
## COMPOSE_PROFILES=standalone,juiceshop
|
||||
@@ -0,0 +1,62 @@
|
||||
policies:
|
||||
default:
|
||||
triggers:
|
||||
- appsec-default-log-trigger
|
||||
mode: prevent-learn
|
||||
practices:
|
||||
- webapp-default-practice
|
||||
custom-response: appsec-default-web-user-response
|
||||
specific-rules: []
|
||||
|
||||
practices:
|
||||
- name: webapp-default-practice
|
||||
openapi-schema-validation:
|
||||
configmap: []
|
||||
override-mode: as-top-level
|
||||
snort-signatures:
|
||||
configmap: []
|
||||
override-mode: as-top-level
|
||||
web-attacks:
|
||||
max-body-size-kb: 1000000
|
||||
max-header-size-bytes: 102400
|
||||
max-object-depth: 40
|
||||
max-url-size-bytes: 32768
|
||||
minimum-confidence: critical
|
||||
override-mode: as-top-level
|
||||
protections:
|
||||
csrf-protection: inactive
|
||||
error-disclosure: inactive
|
||||
non-valid-http-methods: false
|
||||
open-redirect: inactive
|
||||
anti-bot:
|
||||
injected-URIs: []
|
||||
validated-URIs: []
|
||||
override-mode: as-top-level
|
||||
|
||||
log-triggers:
|
||||
- name: appsec-default-log-trigger
|
||||
access-control-logging:
|
||||
allow-events: false
|
||||
drop-events: true
|
||||
additional-suspicious-events-logging:
|
||||
enabled: true
|
||||
minimum-severity: high
|
||||
response-body: false
|
||||
appsec-logging:
|
||||
all-web-requests: false
|
||||
detect-events: true
|
||||
prevent-events: true
|
||||
extended-logging:
|
||||
http-headers: false
|
||||
request-body: false
|
||||
url-path: false
|
||||
url-query: false
|
||||
log-destination:
|
||||
cloud: true
|
||||
stdout:
|
||||
format: json
|
||||
|
||||
custom-responses:
|
||||
- name: appsec-default-web-user-response
|
||||
mode: response-code-only
|
||||
http-response-code: 403
|
||||
@@ -28,7 +28,7 @@ services:
|
||||
- user_email=${APPSEC_USER_EMAIL}
|
||||
- AGENT_TOKEN=${APPSEC_AGENT_TOKEN}
|
||||
- autoPolicyLoad=${APPSEC_AUTO_POLICY_LOAD}
|
||||
- registered_server="Envoy Server"
|
||||
- registered_server="Envoy"
|
||||
ipc: shareable
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
@@ -39,8 +39,7 @@ services:
|
||||
command: /cp-nano-agent
|
||||
|
||||
appsec-envoy:
|
||||
image: openappsec-envoy:${APPSEC_VERSION}
|
||||
# for docs: image: ghcr.io/openappsec/envoy-attachment:${APPSEC_VERSION}
|
||||
image: ghcr.io/openappsec/envoy-attachment:${APPSEC_VERSION}
|
||||
container_name: appsec-envoy
|
||||
ipc: service:appsec-agent
|
||||
restart: unless-stopped
|
||||
@@ -51,7 +50,9 @@ services:
|
||||
volumes:
|
||||
- ${ENVOY_CONFIG}:/envoy.yaml
|
||||
command: -c /envoy.yaml
|
||||
## adjustment of threads is possible as follows:
|
||||
## If required you can adjust the amount of worker threads envoy will run by commenting out the line above and uncommenting the line below
|
||||
## then specify ENVOY_CONCURRENCY parameter with the desired thread amount in the .env file.
|
||||
## By default there's one worker thread per virtual CPU (vCPU) core available on the machine.
|
||||
# command: -c /envoy.yaml --concurrency ${ENVOY_CONCURRENCY}
|
||||
|
||||
ports:
|
||||
@@ -119,9 +120,12 @@ services:
|
||||
|
||||
## example juice-shop backend container (vulnerable webserver, USE ONLY FOR TESTING AND IN LAB ENV)
|
||||
juiceshop-backend:
|
||||
profiles:
|
||||
- juiceshop
|
||||
image: bkimminich/juice-shop:latest
|
||||
container_name: juiceshop-backend
|
||||
|
||||
|
||||
## advanced configuration: learning_nfs volume for nfs storage in shared_storage container
|
||||
##
|
||||
## when configuring nfs storage in shared_storage container configuration above, make sure to also specify learning_nfs volume (see example below for using AWS EFS storage)
|
||||
|
||||
@@ -53,4 +53,4 @@ static_resources:
|
||||
address:
|
||||
socket_address:
|
||||
address: juiceshop-backend
|
||||
port_value: 3000
|
||||
port_value: 3000
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user