mirror of
https://github.com/openappsec/openappsec.git
synced 2025-11-19 10:34:26 +03:00
Compare commits
24 Commits
orianelou-
...
orianelou-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
161b6dd180 | ||
|
|
84327e0b19 | ||
|
|
b9723ba6ce | ||
|
|
00e183b8c6 | ||
|
|
e859c167ed | ||
|
|
384b59cc87 | ||
|
|
805e958cb9 | ||
|
|
5bcd7cfcf1 | ||
|
|
ae6f2faeec | ||
|
|
705a5e6061 | ||
|
|
c33b74a970 | ||
|
|
2da9fbc385 | ||
|
|
f58e9a6128 | ||
|
|
57ea5c72c5 | ||
|
|
962bd31d46 | ||
|
|
01770475ec | ||
|
|
78b114a274 | ||
|
|
81b1aec487 | ||
|
|
fd5d093b24 | ||
|
|
d6debf8d8d | ||
|
|
395b754575 | ||
|
|
dc000372c4 | ||
|
|
941c641174 | ||
|
|
fdc148aa9b |
@@ -1,7 +1,7 @@
|
|||||||
cmake_minimum_required (VERSION 2.8.4)
|
cmake_minimum_required (VERSION 2.8.4)
|
||||||
project (ngen)
|
project (ngen)
|
||||||
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC -Wall -Wno-terminate")
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2 -fPIC -Wall -Wno-terminate")
|
||||||
|
|
||||||
execute_process(COMMAND grep -c "Alpine Linux" /etc/os-release OUTPUT_VARIABLE IS_ALPINE)
|
execute_process(COMMAND grep -c "Alpine Linux" /etc/os-release OUTPUT_VARIABLE IS_ALPINE)
|
||||||
if(NOT IS_ALPINE EQUAL "0")
|
if(NOT IS_ALPINE EQUAL "0")
|
||||||
|
|||||||
@@ -155,6 +155,24 @@ getWaitingForVerdictThreadTimeout()
|
|||||||
return conf_data.getNumericalValue("waiting_for_verdict_thread_timeout_msec");
|
return conf_data.getNumericalValue("waiting_for_verdict_thread_timeout_msec");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unsigned int
|
||||||
|
getMinRetriesForVerdict()
|
||||||
|
{
|
||||||
|
return conf_data.getNumericalValue("min_retries_for_verdict");
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned int
|
||||||
|
getMaxRetriesForVerdict()
|
||||||
|
{
|
||||||
|
return conf_data.getNumericalValue("max_retries_for_verdict");
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned int
|
||||||
|
getReqBodySizeTrigger()
|
||||||
|
{
|
||||||
|
return conf_data.getNumericalValue("body_size_trigger");
|
||||||
|
}
|
||||||
|
|
||||||
int
|
int
|
||||||
isIPAddress(c_str ip_str)
|
isIPAddress(c_str ip_str)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -63,7 +63,10 @@ TEST_F(HttpAttachmentUtilTest, GetValidAttachmentConfiguration)
|
|||||||
"\"waiting_for_verdict_thread_timeout_msec\": 75,\n"
|
"\"waiting_for_verdict_thread_timeout_msec\": 75,\n"
|
||||||
"\"req_header_thread_timeout_msec\": 10,\n"
|
"\"req_header_thread_timeout_msec\": 10,\n"
|
||||||
"\"ip_ranges\": " + createIPRangesString(ip_ranges) + ",\n"
|
"\"ip_ranges\": " + createIPRangesString(ip_ranges) + ",\n"
|
||||||
"\"static_resources_path\": \"" + static_resources_path + "\""
|
"\"static_resources_path\": \"" + static_resources_path + "\",\n"
|
||||||
|
"\"min_retries_for_verdict\": 1,\n"
|
||||||
|
"\"max_retries_for_verdict\": 3,\n"
|
||||||
|
"\"body_size_trigger\": 777\n"
|
||||||
"}\n";
|
"}\n";
|
||||||
ofstream valid_configuration_file(attachment_configuration_file_name);
|
ofstream valid_configuration_file(attachment_configuration_file_name);
|
||||||
valid_configuration_file << valid_configuration;
|
valid_configuration_file << valid_configuration;
|
||||||
@@ -87,6 +90,9 @@ TEST_F(HttpAttachmentUtilTest, GetValidAttachmentConfiguration)
|
|||||||
EXPECT_EQ(getReqBodyThreadTimeout(), 155);
|
EXPECT_EQ(getReqBodyThreadTimeout(), 155);
|
||||||
EXPECT_EQ(getResHeaderThreadTimeout(), 1);
|
EXPECT_EQ(getResHeaderThreadTimeout(), 1);
|
||||||
EXPECT_EQ(getResBodyThreadTimeout(), 0);
|
EXPECT_EQ(getResBodyThreadTimeout(), 0);
|
||||||
|
EXPECT_EQ(getMinRetriesForVerdict(), 1);
|
||||||
|
EXPECT_EQ(getMaxRetriesForVerdict(), 3);
|
||||||
|
EXPECT_EQ(getReqBodySizeTrigger(), 777);
|
||||||
EXPECT_EQ(getWaitingForVerdictThreadTimeout(), 75);
|
EXPECT_EQ(getWaitingForVerdictThreadTimeout(), 75);
|
||||||
EXPECT_EQ(getInspectionMode(), ngx_http_inspection_mode::BLOCKING_THREAD);
|
EXPECT_EQ(getInspectionMode(), ngx_http_inspection_mode::BLOCKING_THREAD);
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ var_fog_address=
|
|||||||
var_proxy=
|
var_proxy=
|
||||||
var_mode=
|
var_mode=
|
||||||
var_token=
|
var_token=
|
||||||
|
var_ignore=
|
||||||
init=
|
init=
|
||||||
|
|
||||||
if [ ! -f /nano-service-installers/$ORCHESTRATION_INSTALLATION_SCRIPT ]; then
|
if [ ! -f /nano-service-installers/$ORCHESTRATION_INSTALLATION_SCRIPT ]; then
|
||||||
@@ -33,6 +34,8 @@ while true; do
|
|||||||
var_proxy="$1"
|
var_proxy="$1"
|
||||||
elif [ "$1" == "--hybrid-mode" ] || [ "$1" == "--standalone" ]; then
|
elif [ "$1" == "--hybrid-mode" ] || [ "$1" == "--standalone" ]; then
|
||||||
var_mode="--hybrid_mode"
|
var_mode="--hybrid_mode"
|
||||||
|
elif [ "$1" == "--no-upgrade" ]; then
|
||||||
|
var_ignore="--ignore all"
|
||||||
elif [ "$1" == "--token" ]; then
|
elif [ "$1" == "--token" ]; then
|
||||||
shift
|
shift
|
||||||
var_token="$1"
|
var_token="$1"
|
||||||
@@ -60,6 +63,9 @@ fi
|
|||||||
if [ ! -z $var_mode ]; then
|
if [ ! -z $var_mode ]; then
|
||||||
orchestration_service_installation_flags="$orchestration_service_installation_flags $var_mode"
|
orchestration_service_installation_flags="$orchestration_service_installation_flags $var_mode"
|
||||||
fi
|
fi
|
||||||
|
if [ ! -z "$var_ignore" ]; then
|
||||||
|
orchestration_service_installation_flags="$orchestration_service_installation_flags $var_ignore"
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
/nano-service-installers/$ORCHESTRATION_INSTALLATION_SCRIPT --install $orchestration_service_installation_flags
|
/nano-service-installers/$ORCHESTRATION_INSTALLATION_SCRIPT --install $orchestration_service_installation_flags
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -4,7 +4,6 @@ add_subdirectory(signal_handler)
|
|||||||
add_subdirectory(gradual_deployment)
|
add_subdirectory(gradual_deployment)
|
||||||
add_subdirectory(packet)
|
add_subdirectory(packet)
|
||||||
add_subdirectory(pending_key)
|
add_subdirectory(pending_key)
|
||||||
add_subdirectory(health_check_manager)
|
|
||||||
|
|
||||||
add_subdirectory(utils)
|
add_subdirectory(utils)
|
||||||
add_subdirectory(attachment-intakers)
|
add_subdirectory(attachment-intakers)
|
||||||
|
|||||||
@@ -1698,7 +1698,7 @@ private:
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
mainloop->addFileRoutine(
|
mainloop->addFileRoutine(
|
||||||
I_MainLoop::RoutineType::RealTime,
|
I_MainLoop::RoutineType::System,
|
||||||
server_sock,
|
server_sock,
|
||||||
accept_attachment_routine,
|
accept_attachment_routine,
|
||||||
"Nginx Attachment registration listener",
|
"Nginx Attachment registration listener",
|
||||||
|
|||||||
@@ -42,6 +42,7 @@ HttpAttachmentConfig::init()
|
|||||||
setNumOfNginxIpcElements();
|
setNumOfNginxIpcElements();
|
||||||
setDebugByContextValues();
|
setDebugByContextValues();
|
||||||
setKeepAliveIntervalMsec();
|
setKeepAliveIntervalMsec();
|
||||||
|
setRetriesForVerdict();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
@@ -215,6 +216,31 @@ HttpAttachmentConfig::setFailOpenTimeout()
|
|||||||
conf_data.setNumericalValue("nginx_inspection_mode", inspection_mode);
|
conf_data.setNumericalValue("nginx_inspection_mode", inspection_mode);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
HttpAttachmentConfig::setRetriesForVerdict()
|
||||||
|
{
|
||||||
|
conf_data.setNumericalValue("min_retries_for_verdict", getAttachmentConf<uint>(
|
||||||
|
3,
|
||||||
|
"agent.minRetriesForVerdict.nginxModule",
|
||||||
|
"HTTP manager",
|
||||||
|
"Min retries for verdict"
|
||||||
|
));
|
||||||
|
|
||||||
|
conf_data.setNumericalValue("max_retries_for_verdict", getAttachmentConf<uint>(
|
||||||
|
15,
|
||||||
|
"agent.maxRetriesForVerdict.nginxModule",
|
||||||
|
"HTTP manager",
|
||||||
|
"Max retries for verdict"
|
||||||
|
));
|
||||||
|
|
||||||
|
conf_data.setNumericalValue("body_size_trigger", getAttachmentConf<uint>(
|
||||||
|
200000,
|
||||||
|
"agent.reqBodySizeTrigger.nginxModule",
|
||||||
|
"HTTP manager",
|
||||||
|
"Request body size trigger"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
HttpAttachmentConfig::setFailOpenWaitMode()
|
HttpAttachmentConfig::setFailOpenWaitMode()
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -70,6 +70,8 @@ private:
|
|||||||
|
|
||||||
void setDebugByContextValues();
|
void setDebugByContextValues();
|
||||||
|
|
||||||
|
void setRetriesForVerdict();
|
||||||
|
|
||||||
WebTriggerConf web_trigger_conf;
|
WebTriggerConf web_trigger_conf;
|
||||||
HttpAttachmentConfiguration conf_data;
|
HttpAttachmentConfiguration conf_data;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -306,17 +306,20 @@ UsersAllIdentifiersConfig::parseXForwardedFor(const string &str) const
|
|||||||
void
|
void
|
||||||
UsersAllIdentifiersConfig::setXFFValuesToOpaqueCtx(const HttpHeader &header, ExtractType type) const
|
UsersAllIdentifiersConfig::setXFFValuesToOpaqueCtx(const HttpHeader &header, ExtractType type) const
|
||||||
{
|
{
|
||||||
|
auto i_transaction_table = Singleton::Consume<I_TableSpecific<SessionID>>::by<NginxAttachment>();
|
||||||
|
if (!i_transaction_table || !i_transaction_table->hasState<NginxAttachmentOpaque>()) {
|
||||||
|
dbgTrace(D_NGINX_ATTACHMENT_PARSER) << "Can't get the transaction table";
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
NginxAttachmentOpaque &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
||||||
|
opaque.setSavedData(HttpTransactionData::xff_vals_ctx, header.getValue());
|
||||||
|
dbgTrace(D_NGINX_ATTACHMENT_PARSER) << "xff found, value from header: " << static_cast<string>(header.getValue());
|
||||||
auto value = parseXForwardedFor(header.getValue());
|
auto value = parseXForwardedFor(header.getValue());
|
||||||
if (!value.ok()) {
|
if (!value.ok()) {
|
||||||
dbgTrace(D_NGINX_ATTACHMENT_PARSER) << "Could not extract source identifier from X-Forwarded-For header";
|
dbgTrace(D_NGINX_ATTACHMENT_PARSER) << "Could not extract source identifier from X-Forwarded-For header";
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
auto i_transaction_table = Singleton::Consume<I_TableSpecific<SessionID>>::by<NginxAttachment>();
|
|
||||||
if (!i_transaction_table || !i_transaction_table->hasState<NginxAttachmentOpaque>()) {
|
|
||||||
dbgDebug(D_NGINX_ATTACHMENT_PARSER) << "Can't get the transaction table";
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
NginxAttachmentOpaque &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
|
||||||
if (type == ExtractType::SOURCEIDENTIFIER) {
|
if (type == ExtractType::SOURCEIDENTIFIER) {
|
||||||
opaque.setSourceIdentifier(header.getKey(), value.unpack());
|
opaque.setSourceIdentifier(header.getKey(), value.unpack());
|
||||||
dbgDebug(D_NGINX_ATTACHMENT_PARSER)
|
dbgDebug(D_NGINX_ATTACHMENT_PARSER)
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
include_directories(${CMAKE_SOURCE_DIR}/components/include)
|
|
||||||
link_directories(${BOOST_ROOT}/lib)
|
|
||||||
|
|
||||||
add_unit_test(
|
|
||||||
health_check_manager_ut
|
|
||||||
"health_check_manager_ut.cc"
|
|
||||||
"singleton;messaging;mainloop;health_check_manager;event_is;metric;-lboost_regex"
|
|
||||||
)
|
|
||||||
@@ -24,7 +24,8 @@ class ExternalSdkServer
|
|||||||
:
|
:
|
||||||
public Component,
|
public Component,
|
||||||
Singleton::Provide<I_ExternalSdkServer>,
|
Singleton::Provide<I_ExternalSdkServer>,
|
||||||
Singleton::Consume<I_RestApi>
|
Singleton::Consume<I_RestApi>,
|
||||||
|
Singleton::Consume<I_Messaging>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
ExternalSdkServer();
|
ExternalSdkServer();
|
||||||
|
|||||||
@@ -15,7 +15,8 @@ class HttpGeoFilter
|
|||||||
public Component,
|
public Component,
|
||||||
Singleton::Consume<I_MainLoop>,
|
Singleton::Consume<I_MainLoop>,
|
||||||
Singleton::Consume<I_GeoLocation>,
|
Singleton::Consume<I_GeoLocation>,
|
||||||
Singleton::Consume<I_GenericRulebase>
|
Singleton::Consume<I_GenericRulebase>,
|
||||||
|
Singleton::Consume<I_Environment>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
HttpGeoFilter();
|
HttpGeoFilter();
|
||||||
|
|||||||
@@ -136,6 +136,7 @@ public:
|
|||||||
static const std::string req_body;
|
static const std::string req_body;
|
||||||
static const std::string source_identifier;
|
static const std::string source_identifier;
|
||||||
static const std::string proxy_ip_ctx;
|
static const std::string proxy_ip_ctx;
|
||||||
|
static const std::string xff_vals_ctx;
|
||||||
|
|
||||||
static const CompressionType default_response_content_encoding;
|
static const CompressionType default_response_content_encoding;
|
||||||
|
|
||||||
|
|||||||
@@ -31,6 +31,7 @@ public:
|
|||||||
virtual bool isReverseProxy() = 0;
|
virtual bool isReverseProxy() = 0;
|
||||||
virtual bool isCloudStorageEnabled() = 0;
|
virtual bool isCloudStorageEnabled() = 0;
|
||||||
virtual Maybe<std::tuple<std::string, std::string, std::string>> parseNginxMetadata() = 0;
|
virtual Maybe<std::tuple<std::string, std::string, std::string>> parseNginxMetadata() = 0;
|
||||||
|
virtual Maybe<std::tuple<std::string, std::string, std::string, std::string, std::string>> readCloudMetadata() = 0;
|
||||||
virtual std::map<std::string, std::string> getResolvedDetails() = 0;
|
virtual std::map<std::string, std::string> getResolvedDetails() = 0;
|
||||||
#if defined(gaia) || defined(smb)
|
#if defined(gaia) || defined(smb)
|
||||||
virtual bool compareCheckpointVersion(int cp_version, std::function<bool(int, int)> compare_operator) const = 0;
|
virtual bool compareCheckpointVersion(int cp_version, std::function<bool(int, int)> compare_operator) const = 0;
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ public:
|
|||||||
const std::string &service_id
|
const std::string &service_id
|
||||||
) = 0;
|
) = 0;
|
||||||
|
|
||||||
virtual std::map<std::string, PortNumber> getServiceToPortMap() = 0;
|
virtual std::map<std::string, std::vector<PortNumber>> getServiceToPortMap() = 0;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
virtual ~I_ServiceController() {}
|
virtual ~I_ServiceController() {}
|
||||||
|
|||||||
@@ -36,7 +36,6 @@ public:
|
|||||||
title,
|
title,
|
||||||
audience_team,
|
audience_team,
|
||||||
obj,
|
obj,
|
||||||
false,
|
|
||||||
MessageCategory::GENERIC,
|
MessageCategory::GENERIC,
|
||||||
std::forward<Args>(args)...
|
std::forward<Args>(args)...
|
||||||
)
|
)
|
||||||
@@ -48,26 +47,6 @@ public:
|
|||||||
const std::string &title,
|
const std::string &title,
|
||||||
const ReportIS::AudienceTeam &audience_team,
|
const ReportIS::AudienceTeam &audience_team,
|
||||||
const T &obj,
|
const T &obj,
|
||||||
bool is_async_message,
|
|
||||||
Args ...args)
|
|
||||||
:
|
|
||||||
ReportMessaging(
|
|
||||||
title,
|
|
||||||
audience_team,
|
|
||||||
obj,
|
|
||||||
is_async_message,
|
|
||||||
MessageCategory::GENERIC,
|
|
||||||
std::forward<Args>(args)...
|
|
||||||
)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename ...Args, typename T>
|
|
||||||
ReportMessaging(
|
|
||||||
const std::string &title,
|
|
||||||
const ReportIS::AudienceTeam &audience_team,
|
|
||||||
const T &obj,
|
|
||||||
bool is_async_message,
|
|
||||||
const MessageCategory &message_type,
|
const MessageCategory &message_type,
|
||||||
Args ...args)
|
Args ...args)
|
||||||
:
|
:
|
||||||
@@ -77,7 +56,6 @@ public:
|
|||||||
ReportIS::Severity::INFO,
|
ReportIS::Severity::INFO,
|
||||||
ReportIS::Priority::LOW,
|
ReportIS::Priority::LOW,
|
||||||
obj,
|
obj,
|
||||||
is_async_message,
|
|
||||||
message_type,
|
message_type,
|
||||||
std::forward<Args>(args)...
|
std::forward<Args>(args)...
|
||||||
)
|
)
|
||||||
@@ -99,7 +77,6 @@ public:
|
|||||||
severity,
|
severity,
|
||||||
priority,
|
priority,
|
||||||
obj,
|
obj,
|
||||||
false,
|
|
||||||
MessageCategory::GENERIC,
|
MessageCategory::GENERIC,
|
||||||
std::forward<Args>(args)...
|
std::forward<Args>(args)...
|
||||||
)
|
)
|
||||||
@@ -114,7 +91,6 @@ public:
|
|||||||
const ReportIS::Severity &severity,
|
const ReportIS::Severity &severity,
|
||||||
const ReportIS::Priority &priority,
|
const ReportIS::Priority &priority,
|
||||||
const T &obj,
|
const T &obj,
|
||||||
bool _is_async_message,
|
|
||||||
const MessageCategory &message_type,
|
const MessageCategory &message_type,
|
||||||
Args ...args)
|
Args ...args)
|
||||||
:
|
:
|
||||||
@@ -131,7 +107,6 @@ public:
|
|||||||
std::chrono::seconds(0),
|
std::chrono::seconds(0),
|
||||||
std::forward<Args>(args)...
|
std::forward<Args>(args)...
|
||||||
),
|
),
|
||||||
is_async_message(_is_async_message),
|
|
||||||
message_type_tag(message_type)
|
message_type_tag(message_type)
|
||||||
{
|
{
|
||||||
report << LogField("eventObject", obj);
|
report << LogField("eventObject", obj);
|
||||||
@@ -141,11 +116,13 @@ public:
|
|||||||
|
|
||||||
ReportMessaging & operator<<(const LogField &field);
|
ReportMessaging & operator<<(const LogField &field);
|
||||||
|
|
||||||
|
Maybe<void, HTTPResponse> sendReportSynchronously();
|
||||||
|
|
||||||
void setForceBuffering(bool _force_buffering);
|
void setForceBuffering(bool _force_buffering);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
Report report;
|
Report report;
|
||||||
bool is_async_message;
|
bool is_async_message = true;
|
||||||
bool force_buffering = false;
|
bool force_buffering = false;
|
||||||
MessageCategory message_type_tag;
|
MessageCategory message_type_tag;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ static const string url = "/api/v1/agents/events";
|
|||||||
ReportMessaging::~ReportMessaging()
|
ReportMessaging::~ReportMessaging()
|
||||||
{
|
{
|
||||||
if (!Singleton::exists<I_Messaging>()) return;
|
if (!Singleton::exists<I_Messaging>()) return;
|
||||||
|
if (!is_async_message) return;
|
||||||
|
|
||||||
LogRest log_rest(report);
|
LogRest log_rest(report);
|
||||||
|
|
||||||
@@ -47,6 +48,25 @@ ReportMessaging::operator<<(const LogField &field)
|
|||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class LogRestWithReply : public LogRest
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
LogRestWithReply(const Report &report) : LogRest(report) {}
|
||||||
|
|
||||||
|
bool loadJson(const string &) const { return true; }
|
||||||
|
};
|
||||||
|
|
||||||
|
Maybe<void, HTTPResponse>
|
||||||
|
ReportMessaging::sendReportSynchronously()
|
||||||
|
{
|
||||||
|
is_async_message = false;
|
||||||
|
|
||||||
|
LogRestWithReply log_rest(report);
|
||||||
|
|
||||||
|
auto messaging = Singleton::Consume<I_Messaging>::by<ReportMessaging>();
|
||||||
|
return messaging->sendSyncMessage(HTTPMethod::POST, url, log_rest, message_type_tag);
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
ReportMessaging::setForceBuffering(bool _force_buffering)
|
ReportMessaging::setForceBuffering(bool _force_buffering)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -103,7 +103,48 @@ TEST_F(ReportMessagingTest, title_only)
|
|||||||
_
|
_
|
||||||
)
|
)
|
||||||
).Times(1);
|
).Times(1);
|
||||||
ReportMessaging("test", ReportIS::AudienceTeam::AGENT_CORE, 1, true, ReportIS::Tags::ACCESS_CONTROL);
|
ReportMessaging("test", ReportIS::AudienceTeam::AGENT_CORE, 1, ReportIS::Tags::ACCESS_CONTROL);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ReportMessagingTest, sync_sending)
|
||||||
|
{
|
||||||
|
EXPECT_CALL(
|
||||||
|
mock_messaging,
|
||||||
|
sendSyncMessage(
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
"{\n"
|
||||||
|
" \"log\": {\n"
|
||||||
|
" \"eventTime\": \"Best Time ever\",\n"
|
||||||
|
" \"eventName\": \"test\",\n"
|
||||||
|
" \"eventSeverity\": \"Info\",\n"
|
||||||
|
" \"eventPriority\": \"Low\",\n"
|
||||||
|
" \"eventType\": \"Event Driven\",\n"
|
||||||
|
" \"eventLevel\": \"Log\",\n"
|
||||||
|
" \"eventLogLevel\": \"info\",\n"
|
||||||
|
" \"eventAudience\": \"Internal\",\n"
|
||||||
|
" \"eventAudienceTeam\": \"Agent Core\",\n"
|
||||||
|
" \"eventFrequency\": 0,\n"
|
||||||
|
" \"eventTags\": [\n"
|
||||||
|
" \"Access Control\"\n"
|
||||||
|
" ],\n"
|
||||||
|
" \"eventSource\": {\n"
|
||||||
|
" \"eventTraceId\": \"\",\n"
|
||||||
|
" \"eventSpanId\": \"\",\n"
|
||||||
|
" \"issuingEngineVersion\": \"\",\n"
|
||||||
|
" \"serviceName\": \"Unnamed Nano Service\"\n"
|
||||||
|
" },\n"
|
||||||
|
" \"eventData\": {\n"
|
||||||
|
" \"eventObject\": 1\n"
|
||||||
|
" }\n"
|
||||||
|
" }\n"
|
||||||
|
"}",
|
||||||
|
_,
|
||||||
|
_
|
||||||
|
)
|
||||||
|
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, "response!!")));
|
||||||
|
ReportMessaging report("test", ReportIS::AudienceTeam::AGENT_CORE, 1, ReportIS::Tags::ACCESS_CONTROL);
|
||||||
|
EXPECT_TRUE(report.sendReportSynchronously().ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(ReportMessagingTest, with_buffering)
|
TEST_F(ReportMessagingTest, with_buffering)
|
||||||
@@ -144,7 +185,7 @@ TEST_F(ReportMessagingTest, with_buffering)
|
|||||||
true
|
true
|
||||||
)
|
)
|
||||||
).Times(1);
|
).Times(1);
|
||||||
ReportMessaging report("test", ReportIS::AudienceTeam::AGENT_CORE, 1, true, ReportIS::Tags::ACCESS_CONTROL);
|
ReportMessaging report("test", ReportIS::AudienceTeam::AGENT_CORE, 1, ReportIS::Tags::ACCESS_CONTROL);
|
||||||
report.setForceBuffering(true);
|
report.setForceBuffering(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1,5 @@
|
|||||||
|
include_directories(../waap/include)
|
||||||
|
include_directories(../waap/waap_clib)
|
||||||
|
include_directories(../../attachment-intakers/nginx_attachment)
|
||||||
|
|
||||||
add_library(http_geo_filter http_geo_filter.cc)
|
add_library(http_geo_filter http_geo_filter.cc)
|
||||||
|
|||||||
@@ -4,10 +4,16 @@
|
|||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
#include <sstream>
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
#include <boost/algorithm/string.hpp>
|
||||||
|
|
||||||
|
#include "cidrs_data.h"
|
||||||
#include "generic_rulebase/generic_rulebase.h"
|
#include "generic_rulebase/generic_rulebase.h"
|
||||||
#include "generic_rulebase/parameters_config.h"
|
#include "generic_rulebase/parameters_config.h"
|
||||||
#include "generic_rulebase/triggers_config.h"
|
#include "generic_rulebase/triggers_config.h"
|
||||||
|
#include "user_identifiers_config.h"
|
||||||
#include "debug.h"
|
#include "debug.h"
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
#include "rest.h"
|
#include "rest.h"
|
||||||
@@ -21,9 +27,10 @@ USE_DEBUG_FLAG(D_GEO_FILTER);
|
|||||||
|
|
||||||
static const LogTriggerConf default_triger;
|
static const LogTriggerConf default_triger;
|
||||||
|
|
||||||
class HttpGeoFilter::Impl : public Listener<NewHttpTransactionEvent>
|
class HttpGeoFilter::Impl : public Listener<HttpRequestHeaderEvent>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
|
|
||||||
void
|
void
|
||||||
init()
|
init()
|
||||||
{
|
{
|
||||||
@@ -55,32 +62,42 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
EventVerdict
|
EventVerdict
|
||||||
respond(const NewHttpTransactionEvent &event) override
|
respond(const HttpRequestHeaderEvent &event) override
|
||||||
{
|
{
|
||||||
dbgTrace(D_GEO_FILTER) << getListenerName() << " new transaction event";
|
dbgTrace(D_GEO_FILTER) << getListenerName() << " new transaction event";
|
||||||
|
|
||||||
if (!ParameterException::isGeoLocationExceptionExists() &&
|
if (!event.isLastHeader()) return EventVerdict(ngx_http_cp_verdict_e::TRAFFIC_VERDICT_INSPECT);
|
||||||
!getConfiguration<GeoConfig>("rulebase", "httpGeoFilter").ok()
|
std::set<std::string> xff_set;
|
||||||
) {
|
auto env = Singleton::Consume<I_Environment>::by<HttpGeoFilter>();
|
||||||
dbgTrace(D_GEO_FILTER) << "No geo location practice nor exception was found. Returning default verdict";
|
auto maybe_xff = env->get<std::string>(HttpTransactionData::xff_vals_ctx);
|
||||||
|
if (!maybe_xff.ok()) {
|
||||||
|
dbgTrace(D_GEO_FILTER) << "failed to get xff vals from env";
|
||||||
|
} else {
|
||||||
|
xff_set = split(maybe_xff.unpack(), ',');
|
||||||
|
}
|
||||||
|
dbgDebug(D_GEO_FILTER) << getListenerName() << " last header, start lookup";
|
||||||
|
|
||||||
|
if (xff_set.size() > 0) {
|
||||||
|
removeTrustedIpsFromXff(xff_set);
|
||||||
|
} else {
|
||||||
|
dbgDebug(D_GEO_FILTER) << "xff not found in headers";
|
||||||
|
}
|
||||||
|
|
||||||
|
auto maybe_source_ip = env->get<IPAddr>(HttpTransactionData::client_ip_ctx);
|
||||||
|
if (!maybe_source_ip.ok()) {
|
||||||
|
dbgWarning(D_GEO_FILTER) << "failed to get source ip from env";
|
||||||
return EventVerdict(default_action);
|
return EventVerdict(default_action);
|
||||||
}
|
}
|
||||||
|
|
||||||
I_GeoLocation *i_geo_location = Singleton::Consume<I_GeoLocation>::by<HttpGeoFilter>();
|
auto source_ip = convertIpAddrToString(maybe_source_ip.unpack());
|
||||||
auto asset_location = i_geo_location->lookupLocation(event.getSourceIP());
|
xff_set.insert(source_ip);
|
||||||
if (!asset_location.ok()) {
|
|
||||||
dbgTrace(D_GEO_FILTER) << "Lookup location failed, Error: " << asset_location.getErr();
|
|
||||||
return EventVerdict(default_action);
|
|
||||||
}
|
|
||||||
|
|
||||||
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data = asset_location.unpack();
|
ngx_http_cp_verdict_e exception_verdict = getExceptionVerdict(xff_set);
|
||||||
|
|
||||||
ngx_http_cp_verdict_e exception_verdict = getExceptionVerdict(event, geo_location_data);
|
|
||||||
if (exception_verdict != ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT) {
|
if (exception_verdict != ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT) {
|
||||||
return EventVerdict(exception_verdict);
|
return EventVerdict(exception_verdict);
|
||||||
}
|
}
|
||||||
|
|
||||||
ngx_http_cp_verdict_e geo_lookup_verdict = getGeoLookupVerdict(event, geo_location_data);
|
ngx_http_cp_verdict_e geo_lookup_verdict = getGeoLookupVerdict(xff_set);
|
||||||
if (geo_lookup_verdict != ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT) {
|
if (geo_lookup_verdict != ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT) {
|
||||||
return EventVerdict(geo_lookup_verdict);
|
return EventVerdict(geo_lookup_verdict);
|
||||||
}
|
}
|
||||||
@@ -88,6 +105,73 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
std::set<std::string>
|
||||||
|
split(const std::string& s, char delim) {
|
||||||
|
std::set<std::string> elems;
|
||||||
|
std::stringstream ss(s);
|
||||||
|
std::string value;
|
||||||
|
while (std::getline(ss, value, delim)) {
|
||||||
|
elems.insert(trim(value));
|
||||||
|
}
|
||||||
|
return elems;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline std::string <rim(std::string &s) {
|
||||||
|
s.erase(s.begin(), std::find_if(s.begin(), s.end(),
|
||||||
|
[] (char c) { return !std::isspace(c); }));
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
// trim from end
|
||||||
|
static inline std::string &rtrim(std::string &s) {
|
||||||
|
s.erase(std::find_if(s.rbegin(), s.rend(),
|
||||||
|
[] (char c) { return !std::isspace(c); }).base(), s.end());
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
// trim from both ends
|
||||||
|
static inline std::string &trim(std::string &s) {
|
||||||
|
return ltrim(rtrim(s));
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
removeTrustedIpsFromXff(std::set<std::string> &xff_set)
|
||||||
|
{
|
||||||
|
auto identify_config = getConfiguration<UsersAllIdentifiersConfig>(
|
||||||
|
"rulebase",
|
||||||
|
"usersIdentifiers"
|
||||||
|
);
|
||||||
|
if (!identify_config.ok()) {
|
||||||
|
dbgDebug(D_GEO_FILTER) << "did not find users identifiers definition in policy";
|
||||||
|
} else {
|
||||||
|
auto trusted_ips = (*identify_config).getHeaderValuesFromConfig("x-forwarded-for");
|
||||||
|
for (auto it = xff_set.begin(); it != xff_set.end();) {
|
||||||
|
if (isIpTrusted(*it, trusted_ips)) {
|
||||||
|
dbgTrace(D_GEO_FILTER) << "xff value is in trusted ips: " << *it;
|
||||||
|
it = xff_set.erase(it);
|
||||||
|
} else {
|
||||||
|
dbgTrace(D_GEO_FILTER) << "xff value is not in trusted ips: " << *it;
|
||||||
|
++it;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
isIpTrusted(const string &ip, const vector<string> &trusted_ips)
|
||||||
|
{
|
||||||
|
for (const auto &trusted_ip : trusted_ips) {
|
||||||
|
CIDRSData cidr_data(trusted_ip);
|
||||||
|
if (
|
||||||
|
ip == trusted_ip ||
|
||||||
|
(cidr_data.contains(ip))
|
||||||
|
) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
string
|
string
|
||||||
convertIpAddrToString(const IPAddr &ip_to_convert)
|
convertIpAddrToString(const IPAddr &ip_to_convert)
|
||||||
{
|
{
|
||||||
@@ -117,54 +201,75 @@ private:
|
|||||||
}
|
}
|
||||||
|
|
||||||
ngx_http_cp_verdict_e
|
ngx_http_cp_verdict_e
|
||||||
getGeoLookupVerdict(
|
getGeoLookupVerdict(const std::set<std::string> &sources)
|
||||||
const NewHttpTransactionEvent &event,
|
|
||||||
const EnumArray<I_GeoLocation::GeoLocationField, std::string> &geo_location_data)
|
|
||||||
{
|
{
|
||||||
auto maybe_geo_config = getConfiguration<GeoConfig>("rulebase", "httpGeoFilter");
|
auto maybe_geo_config = getConfiguration<GeoConfig>("rulebase", "httpGeoFilter");
|
||||||
if (!maybe_geo_config.ok()) {
|
if (!maybe_geo_config.ok()) {
|
||||||
dbgWarning(D_GEO_FILTER) << "Failed to load HTTP Geo Filter config. Error:" << maybe_geo_config.getErr();
|
dbgTrace(D_GEO_FILTER) << "Failed to load HTTP Geo Filter config. Error:" << maybe_geo_config.getErr();
|
||||||
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT;
|
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT;
|
||||||
}
|
}
|
||||||
GeoConfig geo_config = maybe_geo_config.unpack();
|
GeoConfig geo_config = maybe_geo_config.unpack();
|
||||||
string country_code = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE];
|
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data;
|
||||||
|
I_GeoLocation *i_geo_location = Singleton::Consume<I_GeoLocation>::by<HttpGeoFilter>();
|
||||||
|
|
||||||
if (geo_config.isAllowedCountry(country_code)) {
|
for (const std::string& source : sources) {
|
||||||
dbgTrace(D_GEO_FILTER)
|
Maybe<IPAddr> maybe_source_ip = IPAddr::createIPAddr(source);
|
||||||
<< "geo verdict ACCEPT, practice id: "
|
if (!maybe_source_ip.ok()){
|
||||||
<< geo_config.getId()
|
dbgWarning(D_GEO_FILTER) <<
|
||||||
<< ", country code: "
|
"create ip address failed for source: " <<
|
||||||
<< country_code;
|
source <<
|
||||||
generateVerdictLog(
|
", Error: " <<
|
||||||
ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT,
|
maybe_source_ip.getErr();
|
||||||
event,
|
continue;
|
||||||
geo_config.getId(),
|
}
|
||||||
true,
|
auto asset_location = i_geo_location->lookupLocation(maybe_source_ip.unpack());
|
||||||
geo_location_data
|
if (!asset_location.ok()) {
|
||||||
);
|
dbgWarning(D_GEO_FILTER) <<
|
||||||
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT;
|
"Lookup location failed for source: " <<
|
||||||
}
|
source <<
|
||||||
if (geo_config.isBlockedCountry(country_code)) {
|
", Error: " <<
|
||||||
dbgTrace(D_GEO_FILTER)
|
asset_location.getErr();
|
||||||
<< "geo verdict DROP, practice id: "
|
continue;
|
||||||
<< geo_config.getId()
|
}
|
||||||
<< ", country code: "
|
|
||||||
<< country_code;
|
geo_location_data = asset_location.unpack();
|
||||||
generateVerdictLog(
|
|
||||||
ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP,
|
string country_code = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE];
|
||||||
event,
|
|
||||||
geo_config.getId(),
|
if (geo_config.isAllowedCountry(country_code)) {
|
||||||
true,
|
dbgTrace(D_GEO_FILTER)
|
||||||
geo_location_data
|
<< "geo verdict ACCEPT, practice id: "
|
||||||
);
|
<< geo_config.getId()
|
||||||
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP;
|
<< ", country code: "
|
||||||
|
<< country_code;
|
||||||
|
generateVerdictLog(
|
||||||
|
ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT,
|
||||||
|
geo_config.getId(),
|
||||||
|
true,
|
||||||
|
geo_location_data
|
||||||
|
);
|
||||||
|
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT;
|
||||||
|
}
|
||||||
|
if (geo_config.isBlockedCountry(country_code)) {
|
||||||
|
dbgTrace(D_GEO_FILTER)
|
||||||
|
<< "geo verdict DROP, practice id: "
|
||||||
|
<< geo_config.getId()
|
||||||
|
<< ", country code: "
|
||||||
|
<< country_code;
|
||||||
|
generateVerdictLog(
|
||||||
|
ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP,
|
||||||
|
geo_config.getId(),
|
||||||
|
true,
|
||||||
|
geo_location_data
|
||||||
|
);
|
||||||
|
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
dbgTrace(D_GEO_FILTER)
|
dbgTrace(D_GEO_FILTER)
|
||||||
<< "No matched practice. Returned default action: "
|
<< "No matched practice. Returned default action: "
|
||||||
<< geo_config.getDefaultAction();
|
<< geo_config.getDefaultAction();
|
||||||
generateVerdictLog(
|
generateVerdictLog(
|
||||||
convertActionToVerdict(geo_config.getDefaultAction()),
|
convertActionToVerdict(geo_config.getDefaultAction()),
|
||||||
event,
|
|
||||||
geo_config.getId(),
|
geo_config.getId(),
|
||||||
true,
|
true,
|
||||||
geo_location_data,
|
geo_location_data,
|
||||||
@@ -176,7 +281,6 @@ private:
|
|||||||
Maybe<pair<ngx_http_cp_verdict_e, string>>
|
Maybe<pair<ngx_http_cp_verdict_e, string>>
|
||||||
getBehaviorsVerdict(
|
getBehaviorsVerdict(
|
||||||
const unordered_map<string, set<string>> &behaviors_map_to_search,
|
const unordered_map<string, set<string>> &behaviors_map_to_search,
|
||||||
const NewHttpTransactionEvent &event,
|
|
||||||
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data)
|
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data)
|
||||||
{
|
{
|
||||||
bool is_matched = false;
|
bool is_matched = false;
|
||||||
@@ -193,7 +297,6 @@ private:
|
|||||||
dbgTrace(D_GEO_FILTER) << "behavior verdict: DROP, exception id: " << behavior.getId();
|
dbgTrace(D_GEO_FILTER) << "behavior verdict: DROP, exception id: " << behavior.getId();
|
||||||
generateVerdictLog(
|
generateVerdictLog(
|
||||||
matched_verdict,
|
matched_verdict,
|
||||||
event,
|
|
||||||
behavior.getId(),
|
behavior.getId(),
|
||||||
false,
|
false,
|
||||||
geo_location_data
|
geo_location_data
|
||||||
@@ -218,63 +321,74 @@ private:
|
|||||||
}
|
}
|
||||||
|
|
||||||
ngx_http_cp_verdict_e
|
ngx_http_cp_verdict_e
|
||||||
getExceptionVerdict(
|
getExceptionVerdict(const std::set<std::string> &sources) {
|
||||||
const NewHttpTransactionEvent &event,
|
|
||||||
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data
|
|
||||||
){
|
|
||||||
string country_code = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE];
|
|
||||||
string country_name = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_NAME];
|
|
||||||
string source_ip = convertIpAddrToString(event.getSourceIP());
|
|
||||||
|
|
||||||
pair<ngx_http_cp_verdict_e, string> curr_matched_behavior;
|
pair<ngx_http_cp_verdict_e, string> curr_matched_behavior;
|
||||||
ngx_http_cp_verdict_e verdict = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT;
|
ngx_http_cp_verdict_e verdict = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT;
|
||||||
|
I_GeoLocation *i_geo_location = Singleton::Consume<I_GeoLocation>::by<HttpGeoFilter>();
|
||||||
|
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data;
|
||||||
|
|
||||||
dbgTrace(D_GEO_FILTER)
|
for (const std::string& source : sources) {
|
||||||
|
|
||||||
|
Maybe<IPAddr> maybe_source_ip = IPAddr::createIPAddr(source);
|
||||||
|
if (!maybe_source_ip.ok()){
|
||||||
|
dbgWarning(D_GEO_FILTER) <<
|
||||||
|
"create ip address failed for source: " <<
|
||||||
|
source <<
|
||||||
|
", Error: " <<
|
||||||
|
maybe_source_ip.getErr();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
auto asset_location = i_geo_location->lookupLocation(maybe_source_ip.unpack());
|
||||||
|
if (!asset_location.ok()) {
|
||||||
|
dbgWarning(D_GEO_FILTER) << "Lookup location failed for source: " <<
|
||||||
|
source <<
|
||||||
|
", Error: " <<
|
||||||
|
asset_location.getErr();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
geo_location_data = asset_location.unpack();
|
||||||
|
string country_code = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE];
|
||||||
|
string country_name = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_NAME];
|
||||||
|
dbgTrace(D_GEO_FILTER)
|
||||||
<< "Get exception verdict. "
|
<< "Get exception verdict. "
|
||||||
<< "country code: "
|
<< "country code: "
|
||||||
<< country_code
|
<< country_code
|
||||||
<< ", country name: "
|
<< ", country name: "
|
||||||
<< country_name
|
<< country_name
|
||||||
<< ", source ip address: "
|
<< ", source ip address: "
|
||||||
<< source_ip;
|
<< source;
|
||||||
|
|
||||||
unordered_map<string, set<string>> exception_value_source_ip = {{"sourceIP", {source_ip}}};
|
unordered_map<string, set<string>> exception_value_country_code = {
|
||||||
auto matched_behavior_maybe = getBehaviorsVerdict(exception_value_source_ip, event, geo_location_data);
|
{"countryCode", {country_code}}
|
||||||
if (matched_behavior_maybe.ok()) {
|
};
|
||||||
curr_matched_behavior = matched_behavior_maybe.unpack();
|
auto matched_behavior_maybe = getBehaviorsVerdict(exception_value_country_code, geo_location_data);
|
||||||
verdict = curr_matched_behavior.first;
|
if (matched_behavior_maybe.ok()) {
|
||||||
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
curr_matched_behavior = matched_behavior_maybe.unpack();
|
||||||
return verdict;
|
verdict = curr_matched_behavior.first;
|
||||||
|
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
||||||
|
return verdict;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unordered_map<string, set<string>> exception_value_country_name = {
|
||||||
|
{"countryName", {country_name}}
|
||||||
|
};
|
||||||
|
matched_behavior_maybe = getBehaviorsVerdict(exception_value_country_name, geo_location_data);
|
||||||
|
if (matched_behavior_maybe.ok()) {
|
||||||
|
curr_matched_behavior = matched_behavior_maybe.unpack();
|
||||||
|
verdict = curr_matched_behavior.first;
|
||||||
|
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
||||||
|
return verdict;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
unordered_map<string, set<string>> exception_value_country_code = {
|
|
||||||
{"countryCode", {country_code}}
|
|
||||||
};
|
|
||||||
matched_behavior_maybe = getBehaviorsVerdict(exception_value_country_code, event, geo_location_data);
|
|
||||||
if (matched_behavior_maybe.ok()) {
|
|
||||||
curr_matched_behavior = matched_behavior_maybe.unpack();
|
|
||||||
verdict = curr_matched_behavior.first;
|
|
||||||
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
|
||||||
return verdict;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
unordered_map<string, set<string>> exception_value_country_name = {
|
|
||||||
{"countryName", {country_name}}
|
|
||||||
};
|
|
||||||
matched_behavior_maybe = getBehaviorsVerdict(exception_value_country_name, event, geo_location_data);
|
|
||||||
if (matched_behavior_maybe.ok()) {
|
|
||||||
curr_matched_behavior = matched_behavior_maybe.unpack();
|
|
||||||
verdict = curr_matched_behavior.first;
|
|
||||||
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
|
||||||
return verdict;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT) {
|
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT) {
|
||||||
generateVerdictLog(
|
generateVerdictLog(
|
||||||
verdict,
|
verdict,
|
||||||
event,
|
|
||||||
curr_matched_behavior.second,
|
curr_matched_behavior.second,
|
||||||
false,
|
false,
|
||||||
geo_location_data
|
geo_location_data
|
||||||
@@ -286,7 +400,6 @@ private:
|
|||||||
void
|
void
|
||||||
generateVerdictLog(
|
generateVerdictLog(
|
||||||
const ngx_http_cp_verdict_e &verdict,
|
const ngx_http_cp_verdict_e &verdict,
|
||||||
const NewHttpTransactionEvent &event,
|
|
||||||
const string &matched_id,
|
const string &matched_id,
|
||||||
bool is_geo_filter,
|
bool is_geo_filter,
|
||||||
const EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data,
|
const EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data,
|
||||||
@@ -307,14 +420,27 @@ private:
|
|||||||
LogField(matched_on, matched_id),
|
LogField(matched_on, matched_id),
|
||||||
ReportIS::Tags::HTTP_GEO_FILTER
|
ReportIS::Tags::HTTP_GEO_FILTER
|
||||||
);
|
);
|
||||||
log
|
auto env = Singleton::Consume<I_Environment>::by<HttpGeoFilter>();
|
||||||
<< LogField("sourceIP", convertIpAddrToString(event.getSourceIP()))
|
auto source_ip = env->get<IPAddr>(HttpTransactionData::client_ip_ctx);
|
||||||
<< LogField("sourcePort", event.getSourcePort())
|
if (source_ip.ok()) log << LogField("sourceIP", convertIpAddrToString(source_ip.unpack()));
|
||||||
<< LogField("hostName", event.getDestinationHost())
|
|
||||||
<< LogField("httpMethod", event.getHttpMethod())
|
auto source_identifier = env->get<string>(HttpTransactionData::source_identifier);
|
||||||
<< LogField("securityAction", is_prevent ? "Prevent" : "Detect");
|
if (source_identifier.ok()) log << LogField("httpSourceId", source_identifier.unpack());
|
||||||
|
|
||||||
|
auto source_port = env->get<string>(HttpTransactionData::client_port_ctx);
|
||||||
|
if (source_port.ok()) log << LogField("sourcePort", source_port.unpack());
|
||||||
|
|
||||||
|
auto host_name = env->get<string>(HttpTransactionData::host_name_ctx);
|
||||||
|
if (host_name.ok()) log << LogField("hostName", host_name.unpack());
|
||||||
|
|
||||||
|
auto method = env->get<string>(HttpTransactionData::method_ctx);
|
||||||
|
if (method.ok()) log << LogField("httpMethod", method.unpack());
|
||||||
|
|
||||||
|
log << LogField("securityAction", is_prevent ? "Prevent" : "Detect");
|
||||||
|
|
||||||
if (is_default_action) log << LogField("isDefaultSecurityAction", true);
|
if (is_default_action) log << LogField("isDefaultSecurityAction", true);
|
||||||
|
auto xff = env->get<string>(HttpTransactionData::xff_vals_ctx);
|
||||||
|
if (xff.ok()) log << LogField("proxyIP", xff.unpack());
|
||||||
|
|
||||||
log
|
log
|
||||||
<< LogField("sourceCountryCode", geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE])
|
<< LogField("sourceCountryCode", geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE])
|
||||||
|
|||||||
@@ -142,6 +142,13 @@ string disabled_settings =
|
|||||||
"}"
|
"}"
|
||||||
"],\n";
|
"],\n";
|
||||||
|
|
||||||
|
|
||||||
|
string local_intelligence =
|
||||||
|
"\"intelligence\":{"
|
||||||
|
" \"local intelligence server ip\":\"127.0.0.1\","
|
||||||
|
" \"local intelligence server primary port\":9090"
|
||||||
|
"}\n,";
|
||||||
|
|
||||||
string policy =
|
string policy =
|
||||||
"\"rulebase\": {"
|
"\"rulebase\": {"
|
||||||
"\"usersIdentifiers\": ["
|
"\"usersIdentifiers\": ["
|
||||||
@@ -259,7 +266,7 @@ Layer7AccessControlTest::verifyReport(
|
|||||||
|
|
||||||
TEST_F(Layer7AccessControlTest, ReturnAcceptVerdict)
|
TEST_F(Layer7AccessControlTest, ReturnAcceptVerdict)
|
||||||
{
|
{
|
||||||
stringstream ss_conf(prevent_settings + policy);
|
stringstream ss_conf(prevent_settings + local_intelligence + policy);
|
||||||
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
||||||
|
|
||||||
string intelligence_response_ok = loadIntelligenceResponse("data/ok_intelligence_response.json");
|
string intelligence_response_ok = loadIntelligenceResponse("data/ok_intelligence_response.json");
|
||||||
@@ -305,7 +312,7 @@ TEST_F(Layer7AccessControlTest, ReturnAcceptVerdict)
|
|||||||
|
|
||||||
TEST_F(Layer7AccessControlTest, ReturnDropVerdictOnMaliciousReputation)
|
TEST_F(Layer7AccessControlTest, ReturnDropVerdictOnMaliciousReputation)
|
||||||
{
|
{
|
||||||
stringstream ss_conf(prevent_settings + policy);
|
stringstream ss_conf(prevent_settings + local_intelligence + policy);
|
||||||
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
||||||
|
|
||||||
string malicious_intelligence_response = loadIntelligenceResponse("data/malicious_intelligence_response.json");
|
string malicious_intelligence_response = loadIntelligenceResponse("data/malicious_intelligence_response.json");
|
||||||
@@ -351,7 +358,7 @@ TEST_F(Layer7AccessControlTest, ReturnDropVerdictOnMaliciousReputation)
|
|||||||
|
|
||||||
TEST_F(Layer7AccessControlTest, ReturnDropVerdictCacheBased)
|
TEST_F(Layer7AccessControlTest, ReturnDropVerdictCacheBased)
|
||||||
{
|
{
|
||||||
stringstream ss_conf(prevent_settings + policy);
|
stringstream ss_conf(prevent_settings + local_intelligence + policy);
|
||||||
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
||||||
|
|
||||||
string malicious_intelligence_response = loadIntelligenceResponse("data/malicious_intelligence_response.json");
|
string malicious_intelligence_response = loadIntelligenceResponse("data/malicious_intelligence_response.json");
|
||||||
@@ -403,7 +410,7 @@ TEST_F(Layer7AccessControlTest, ReturnDropVerdictCacheBased)
|
|||||||
|
|
||||||
TEST_F(Layer7AccessControlTest, AcceptOnDetect)
|
TEST_F(Layer7AccessControlTest, AcceptOnDetect)
|
||||||
{
|
{
|
||||||
stringstream ss_conf(detect_settings + policy);
|
stringstream ss_conf(detect_settings + local_intelligence + policy);
|
||||||
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
||||||
|
|
||||||
string malicious_intelligence_response = loadIntelligenceResponse("data/malicious_intelligence_response.json");
|
string malicious_intelligence_response = loadIntelligenceResponse("data/malicious_intelligence_response.json");
|
||||||
@@ -449,7 +456,7 @@ TEST_F(Layer7AccessControlTest, AcceptOnDetect)
|
|||||||
|
|
||||||
TEST_F(Layer7AccessControlTest, FallbackToSourceIPAndDrop)
|
TEST_F(Layer7AccessControlTest, FallbackToSourceIPAndDrop)
|
||||||
{
|
{
|
||||||
stringstream ss_conf(prevent_settings + policy);
|
stringstream ss_conf(prevent_settings + local_intelligence + policy);
|
||||||
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
||||||
|
|
||||||
string malicious_intelligence_response = loadIntelligenceResponse("data/malicious_intelligence_response.json");
|
string malicious_intelligence_response = loadIntelligenceResponse("data/malicious_intelligence_response.json");
|
||||||
|
|||||||
@@ -79,6 +79,7 @@ class DefaultBackend
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
void load(cereal::JSONInputArchive &);
|
void load(cereal::JSONInputArchive &);
|
||||||
|
bool doesExist() const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool is_exists = false;
|
bool is_exists = false;
|
||||||
@@ -90,6 +91,7 @@ public:
|
|||||||
void load(cereal::JSONInputArchive &archive_in);
|
void load(cereal::JSONInputArchive &archive_in);
|
||||||
|
|
||||||
const std::vector<IngressDefinedRule> & getRules() const;
|
const std::vector<IngressDefinedRule> & getRules() const;
|
||||||
|
bool doesDefaultBackendExist() const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::string ingress_class_name;
|
std::string ingress_class_name;
|
||||||
|
|||||||
@@ -129,7 +129,7 @@ public:
|
|||||||
bool shouldBeautifyLogs() const;
|
bool shouldBeautifyLogs() const;
|
||||||
|
|
||||||
bool getCloud() const;
|
bool getCloud() const;
|
||||||
bool isK8SNeeded() const;
|
bool isContainerNeeded() const;
|
||||||
bool isCefNeeded() const;
|
bool isCefNeeded() const;
|
||||||
bool isSyslogNeeded() const;
|
bool isSyslogNeeded() const;
|
||||||
const std::string & getSyslogServerIpv4Address() const;
|
const std::string & getSyslogServerIpv4Address() const;
|
||||||
@@ -140,7 +140,7 @@ private:
|
|||||||
const NewLoggingService & getCefServiceData() const;
|
const NewLoggingService & getCefServiceData() const;
|
||||||
|
|
||||||
bool cloud = false;
|
bool cloud = false;
|
||||||
bool k8s_service = false;
|
bool container_service = false;
|
||||||
bool agent_local = true;
|
bool agent_local = true;
|
||||||
bool beautify_logs = true;
|
bool beautify_logs = true;
|
||||||
NewLoggingService syslog_service;
|
NewLoggingService syslog_service;
|
||||||
|
|||||||
@@ -111,7 +111,7 @@ private:
|
|||||||
SecurityAppsWrapper security_apps;
|
SecurityAppsWrapper security_apps;
|
||||||
};
|
};
|
||||||
|
|
||||||
class PolicyMakerUtils
|
class PolicyMakerUtils : Singleton::Consume<I_EnvDetails>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
std::string proccesSingleAppsecPolicy(
|
std::string proccesSingleAppsecPolicy(
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ public:
|
|||||||
bool _logToAgent,
|
bool _logToAgent,
|
||||||
bool _logToCef,
|
bool _logToCef,
|
||||||
bool _logToCloud,
|
bool _logToCloud,
|
||||||
bool _logToK8sService,
|
bool _logToContainerService,
|
||||||
bool _logToSyslog,
|
bool _logToSyslog,
|
||||||
bool _responseBody,
|
bool _responseBody,
|
||||||
bool _tpDetect,
|
bool _tpDetect,
|
||||||
@@ -73,7 +73,7 @@ private:
|
|||||||
bool logToAgent;
|
bool logToAgent;
|
||||||
bool logToCef;
|
bool logToCef;
|
||||||
bool logToCloud;
|
bool logToCloud;
|
||||||
bool logToK8sService;
|
bool logToContainerService;
|
||||||
bool logToSyslog;
|
bool logToSyslog;
|
||||||
bool responseBody;
|
bool responseBody;
|
||||||
bool tpDetect;
|
bool tpDetect;
|
||||||
@@ -258,7 +258,7 @@ public:
|
|||||||
bool shouldBeautifyLogs() const;
|
bool shouldBeautifyLogs() const;
|
||||||
|
|
||||||
bool getCloud() const;
|
bool getCloud() const;
|
||||||
bool isK8SNeeded() const;
|
bool isContainerNeeded() const;
|
||||||
bool isCefNeeded() const;
|
bool isCefNeeded() const;
|
||||||
bool isSyslogNeeded() const;
|
bool isSyslogNeeded() const;
|
||||||
const std::string & getSyslogServerIpv4Address() const;
|
const std::string & getSyslogServerIpv4Address() const;
|
||||||
@@ -269,7 +269,7 @@ private:
|
|||||||
const LoggingService & getCefServiceData() const;
|
const LoggingService & getCefServiceData() const;
|
||||||
|
|
||||||
bool cloud = false;
|
bool cloud = false;
|
||||||
bool k8s_service = false;
|
bool container_service = false;
|
||||||
bool agent_local = true;
|
bool agent_local = true;
|
||||||
bool beautify_logs = true;
|
bool beautify_logs = true;
|
||||||
LoggingService syslog_service;
|
LoggingService syslog_service;
|
||||||
|
|||||||
@@ -86,6 +86,12 @@ DefaultBackend::load(cereal::JSONInputArchive &)
|
|||||||
is_exists = true;
|
is_exists = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
DefaultBackend::doesExist() const
|
||||||
|
{
|
||||||
|
return is_exists;
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
IngressSpec::load(cereal::JSONInputArchive &archive_in)
|
IngressSpec::load(cereal::JSONInputArchive &archive_in)
|
||||||
{
|
{
|
||||||
@@ -101,6 +107,12 @@ IngressSpec::getRules() const
|
|||||||
return rules;
|
return rules;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
IngressSpec::doesDefaultBackendExist() const
|
||||||
|
{
|
||||||
|
return default_backend.doesExist();
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
SingleIngressData::load(cereal::JSONInputArchive &archive_in)
|
SingleIngressData::load(cereal::JSONInputArchive &archive_in)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -532,6 +532,16 @@ K8sPolicyUtils::createPolicy(
|
|||||||
map<AnnotationKeys, string> &annotations_values,
|
map<AnnotationKeys, string> &annotations_values,
|
||||||
const SingleIngressData &item) const
|
const SingleIngressData &item) const
|
||||||
{
|
{
|
||||||
|
if (policies.find(annotations_values[AnnotationKeys::PolicyKey]) == policies.end()) {
|
||||||
|
policies[annotations_values[AnnotationKeys::PolicyKey]] = appsec_policy;
|
||||||
|
}
|
||||||
|
if (item.getSpec().doesDefaultBackendExist()) {
|
||||||
|
dbgTrace(D_LOCAL_POLICY)
|
||||||
|
<< "Inserting Any host rule to the specific asset set";
|
||||||
|
K ingress_rule = K("*");
|
||||||
|
policies[annotations_values[AnnotationKeys::PolicyKey]].addSpecificRule(ingress_rule);
|
||||||
|
}
|
||||||
|
|
||||||
for (const IngressDefinedRule &rule : item.getSpec().getRules()) {
|
for (const IngressDefinedRule &rule : item.getSpec().getRules()) {
|
||||||
string url = rule.getHost();
|
string url = rule.getHost();
|
||||||
for (const IngressRulePath &uri : rule.getPathsWrapper().getRulePaths()) {
|
for (const IngressRulePath &uri : rule.getPathsWrapper().getRulePaths()) {
|
||||||
@@ -544,14 +554,12 @@ K8sPolicyUtils::createPolicy(
|
|||||||
<< uri.getPath()
|
<< uri.getPath()
|
||||||
<< "'";
|
<< "'";
|
||||||
K ingress_rule = K(url + uri.getPath());
|
K ingress_rule = K(url + uri.getPath());
|
||||||
appsec_policy.addSpecificRule(ingress_rule);
|
policies[annotations_values[AnnotationKeys::PolicyKey]].addSpecificRule(ingress_rule);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
policies[annotations_values[AnnotationKeys::PolicyKey]] = appsec_policy;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::tuple<map<string, AppsecLinuxPolicy>, map<string, V1beta2AppsecLinuxPolicy>>
|
std::tuple<map<string, AppsecLinuxPolicy>, map<string, V1beta2AppsecLinuxPolicy>>
|
||||||
K8sPolicyUtils::createAppsecPoliciesFromIngresses()
|
K8sPolicyUtils::createAppsecPoliciesFromIngresses()
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -126,6 +126,7 @@ NewAppsecPolicySpec::load(cereal::JSONInputArchive &archive_in)
|
|||||||
dbgTrace(D_LOCAL_POLICY) << "Loading AppSec policy spec";
|
dbgTrace(D_LOCAL_POLICY) << "Loading AppSec policy spec";
|
||||||
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
||||||
parseAppsecJSONKey<NewParsedRule>("default", default_rule, archive_in);
|
parseAppsecJSONKey<NewParsedRule>("default", default_rule, archive_in);
|
||||||
|
default_rule.setHost("*");
|
||||||
parseAppsecJSONKey<vector<NewParsedRule>>("specificRules", specific_rules, archive_in);
|
parseAppsecJSONKey<vector<NewParsedRule>>("specificRules", specific_rules, archive_in);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -132,7 +132,7 @@ void
|
|||||||
NewLoggingService::load(cereal::JSONInputArchive &archive_in)
|
NewLoggingService::load(cereal::JSONInputArchive &archive_in)
|
||||||
{
|
{
|
||||||
parseAppsecJSONKey<string>("address", address, archive_in);
|
parseAppsecJSONKey<string>("address", address, archive_in);
|
||||||
parseAppsecJSONKey<string>("proto", proto, archive_in);
|
parseAppsecJSONKey<string>("proto", proto, archive_in, "tcp");
|
||||||
if (valid_protocols.count(proto) == 0) {
|
if (valid_protocols.count(proto) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec Logging Service - proto invalid: " << proto;
|
dbgWarning(D_LOCAL_POLICY) << "AppSec Logging Service - proto invalid: " << proto;
|
||||||
throw PolicyGenException("AppSec Logging Service - proto invalid: " + proto);
|
throw PolicyGenException("AppSec Logging Service - proto invalid: " + proto);
|
||||||
@@ -183,7 +183,9 @@ NewAppsecTriggerLogDestination::load(cereal::JSONInputArchive &archive_in)
|
|||||||
auto mode = Singleton::Consume<I_AgentDetails>::by<NewAppsecTriggerLogDestination>()->getOrchestrationMode();
|
auto mode = Singleton::Consume<I_AgentDetails>::by<NewAppsecTriggerLogDestination>()->getOrchestrationMode();
|
||||||
auto env_type = Singleton::Consume<I_EnvDetails>::by<NewAppsecTriggerLogDestination>()->getEnvType();
|
auto env_type = Singleton::Consume<I_EnvDetails>::by<NewAppsecTriggerLogDestination>()->getEnvType();
|
||||||
bool k8s_service_default = (mode == OrchestrationMode::HYBRID && env_type == EnvType::K8S);
|
bool k8s_service_default = (mode == OrchestrationMode::HYBRID && env_type == EnvType::K8S);
|
||||||
parseAppsecJSONKey<bool>("k8s-service", k8s_service, archive_in, k8s_service_default);
|
// BC try load previous name. TODO: update CRD
|
||||||
|
parseAppsecJSONKey<bool>("k8s-service", container_service, archive_in, k8s_service_default);
|
||||||
|
parseAppsecJSONKey<bool>("container-service", container_service, archive_in, container_service);
|
||||||
|
|
||||||
NewStdoutLogging stdout_log;
|
NewStdoutLogging stdout_log;
|
||||||
parseAppsecJSONKey<NewStdoutLogging>("stdout", stdout_log, archive_in);
|
parseAppsecJSONKey<NewStdoutLogging>("stdout", stdout_log, archive_in);
|
||||||
@@ -224,9 +226,9 @@ NewAppsecTriggerLogDestination::getCloud() const
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
NewAppsecTriggerLogDestination::isK8SNeeded() const
|
NewAppsecTriggerLogDestination::isContainerNeeded() const
|
||||||
{
|
{
|
||||||
return k8s_service;
|
return container_service;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
|
|||||||
@@ -538,7 +538,7 @@ extractLogTriggerData(const string &trigger_annotation_name, const T &trigger_sp
|
|||||||
bool webHeaders = trigger_spec.getAppsecTriggerExtendedLogging().isHttpHeaders();
|
bool webHeaders = trigger_spec.getAppsecTriggerExtendedLogging().isHttpHeaders();
|
||||||
bool webBody = trigger_spec.getAppsecTriggerExtendedLogging().isRequestBody();
|
bool webBody = trigger_spec.getAppsecTriggerExtendedLogging().isRequestBody();
|
||||||
bool logToCloud = trigger_spec.getAppsecTriggerLogDestination().getCloud();
|
bool logToCloud = trigger_spec.getAppsecTriggerLogDestination().getCloud();
|
||||||
bool logToK8sService = trigger_spec.getAppsecTriggerLogDestination().isK8SNeeded();
|
bool logToContainerService = trigger_spec.getAppsecTriggerLogDestination().isContainerNeeded();
|
||||||
bool logToAgent = trigger_spec.getAppsecTriggerLogDestination().isAgentLocal();
|
bool logToAgent = trigger_spec.getAppsecTriggerLogDestination().isAgentLocal();
|
||||||
bool beautify_logs = trigger_spec.getAppsecTriggerLogDestination().shouldBeautifyLogs();
|
bool beautify_logs = trigger_spec.getAppsecTriggerLogDestination().shouldBeautifyLogs();
|
||||||
bool logToCef = trigger_spec.getAppsecTriggerLogDestination().isCefNeeded();
|
bool logToCef = trigger_spec.getAppsecTriggerLogDestination().isCefNeeded();
|
||||||
@@ -565,7 +565,7 @@ extractLogTriggerData(const string &trigger_annotation_name, const T &trigger_sp
|
|||||||
logToAgent,
|
logToAgent,
|
||||||
logToCef,
|
logToCef,
|
||||||
logToCloud,
|
logToCloud,
|
||||||
logToK8sService,
|
logToContainerService,
|
||||||
logToSyslog,
|
logToSyslog,
|
||||||
responseBody,
|
responseBody,
|
||||||
tpDetect,
|
tpDetect,
|
||||||
@@ -1636,7 +1636,9 @@ PolicyMakerUtils::createAgentPolicyFromAppsecPolicy(const string &policy_name, c
|
|||||||
createPolicyElements<T, R>(specific_rules, default_rule, appsec_policy, policy_name);
|
createPolicyElements<T, R>(specific_rules, default_rule, appsec_policy, policy_name);
|
||||||
|
|
||||||
// add default rule to policy
|
// add default rule to policy
|
||||||
createPolicyElementsByRule<T, R>(default_rule, default_rule, appsec_policy, policy_name);
|
if (Singleton::Consume<I_EnvDetails>::by<PolicyMakerUtils>()->getEnvType() != EnvType::K8S) {
|
||||||
|
createPolicyElementsByRule<T, R>(default_rule, default_rule, appsec_policy, policy_name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// LCOV_EXCL_START Reason: no test exist
|
// LCOV_EXCL_START Reason: no test exist
|
||||||
@@ -1659,11 +1661,13 @@ PolicyMakerUtils::createAgentPolicyFromAppsecPolicy<V1beta2AppsecLinuxPolicy, Ne
|
|||||||
);
|
);
|
||||||
|
|
||||||
// add default rule to policy
|
// add default rule to policy
|
||||||
createPolicyElementsByRule<V1beta2AppsecLinuxPolicy, NewParsedRule>(
|
if (Singleton::Consume<I_EnvDetails>::by<PolicyMakerUtils>()->getEnvType() != EnvType::K8S) {
|
||||||
default_rule,
|
createPolicyElementsByRule<V1beta2AppsecLinuxPolicy, NewParsedRule>(
|
||||||
default_rule,
|
default_rule,
|
||||||
appsec_policy,
|
default_rule,
|
||||||
policy_name);
|
appsec_policy,
|
||||||
|
policy_name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// LCOV_EXCL_STOP
|
// LCOV_EXCL_STOP
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ LogTriggerSection::LogTriggerSection(
|
|||||||
bool _logToAgent,
|
bool _logToAgent,
|
||||||
bool _logToCef,
|
bool _logToCef,
|
||||||
bool _logToCloud,
|
bool _logToCloud,
|
||||||
bool _logToK8sService,
|
bool _logToContainerService,
|
||||||
bool _logToSyslog,
|
bool _logToSyslog,
|
||||||
bool _responseBody,
|
bool _responseBody,
|
||||||
bool _tpDetect,
|
bool _tpDetect,
|
||||||
@@ -55,7 +55,7 @@ LogTriggerSection::LogTriggerSection(
|
|||||||
logToAgent(_logToAgent),
|
logToAgent(_logToAgent),
|
||||||
logToCef(_logToCef),
|
logToCef(_logToCef),
|
||||||
logToCloud(_logToCloud),
|
logToCloud(_logToCloud),
|
||||||
logToK8sService(_logToK8sService),
|
logToContainerService(_logToContainerService),
|
||||||
logToSyslog(_logToSyslog),
|
logToSyslog(_logToSyslog),
|
||||||
responseBody(_responseBody),
|
responseBody(_responseBody),
|
||||||
tpDetect(_tpDetect),
|
tpDetect(_tpDetect),
|
||||||
@@ -96,12 +96,12 @@ LogTriggerSection::save(cereal::JSONOutputArchive &out_ar) const
|
|||||||
cereal::make_nvp("acDrop", acDrop),
|
cereal::make_nvp("acDrop", acDrop),
|
||||||
cereal::make_nvp("complianceViolations", false),
|
cereal::make_nvp("complianceViolations", false),
|
||||||
cereal::make_nvp("complianceWarnings", false),
|
cereal::make_nvp("complianceWarnings", false),
|
||||||
cereal::make_nvp("extendloggingMinSeverity", extendloggingMinSeverity),
|
cereal::make_nvp("extendLoggingMinSeverity", extendloggingMinSeverity),
|
||||||
cereal::make_nvp("extendlogging", extendlogging),
|
cereal::make_nvp("extendLogging", extendlogging),
|
||||||
cereal::make_nvp("logToAgent", logToAgent),
|
cereal::make_nvp("logToAgent", logToAgent),
|
||||||
cereal::make_nvp("logToCef", logToCef),
|
cereal::make_nvp("logToCef", logToCef),
|
||||||
cereal::make_nvp("logToCloud", logToCloud),
|
cereal::make_nvp("logToCloud", logToCloud),
|
||||||
cereal::make_nvp("logToK8sService", logToK8sService),
|
cereal::make_nvp("logToContainerService", logToContainerService),
|
||||||
cereal::make_nvp("logToSyslog", logToSyslog),
|
cereal::make_nvp("logToSyslog", logToSyslog),
|
||||||
cereal::make_nvp("responseBody", responseBody),
|
cereal::make_nvp("responseBody", responseBody),
|
||||||
cereal::make_nvp("responseCode", false),
|
cereal::make_nvp("responseCode", false),
|
||||||
@@ -396,7 +396,9 @@ AppsecTriggerLogDestination::load(cereal::JSONInputArchive &archive_in)
|
|||||||
auto mode = Singleton::Consume<I_AgentDetails>::by<AppsecTriggerLogDestination>()->getOrchestrationMode();
|
auto mode = Singleton::Consume<I_AgentDetails>::by<AppsecTriggerLogDestination>()->getOrchestrationMode();
|
||||||
auto env_type = Singleton::Consume<I_EnvDetails>::by<AppsecTriggerLogDestination>()->getEnvType();
|
auto env_type = Singleton::Consume<I_EnvDetails>::by<AppsecTriggerLogDestination>()->getEnvType();
|
||||||
bool k8s_service_default = (mode == OrchestrationMode::HYBRID && env_type == EnvType::K8S);
|
bool k8s_service_default = (mode == OrchestrationMode::HYBRID && env_type == EnvType::K8S);
|
||||||
parseAppsecJSONKey<bool>("k8s-service", k8s_service, archive_in, k8s_service_default);
|
// BC try load previous name. TODO: update CRD
|
||||||
|
parseAppsecJSONKey<bool>("k8s-service", container_service, archive_in, k8s_service_default);
|
||||||
|
parseAppsecJSONKey<bool>("container-service", container_service, archive_in, container_service);
|
||||||
|
|
||||||
StdoutLogging stdout_log;
|
StdoutLogging stdout_log;
|
||||||
parseAppsecJSONKey<StdoutLogging>("stdout", stdout_log, archive_in);
|
parseAppsecJSONKey<StdoutLogging>("stdout", stdout_log, archive_in);
|
||||||
@@ -437,9 +439,9 @@ AppsecTriggerLogDestination::getCloud() const
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
AppsecTriggerLogDestination::isK8SNeeded() const
|
AppsecTriggerLogDestination::isContainerNeeded() const
|
||||||
{
|
{
|
||||||
return k8s_service;
|
return container_service;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
|
|||||||
@@ -12,6 +12,9 @@ add_subdirectory(manifest_controller)
|
|||||||
add_subdirectory(update_communication)
|
add_subdirectory(update_communication)
|
||||||
add_subdirectory(details_resolver)
|
add_subdirectory(details_resolver)
|
||||||
add_subdirectory(health_check)
|
add_subdirectory(health_check)
|
||||||
|
add_subdirectory(health_check_manager)
|
||||||
|
add_subdirectory(updates_process_reporter)
|
||||||
add_subdirectory(env_details)
|
add_subdirectory(env_details)
|
||||||
|
add_subdirectory(external_sdk_server)
|
||||||
|
|
||||||
#add_subdirectory(orchestration_ut)
|
#add_subdirectory(orchestration_ut)
|
||||||
|
|||||||
@@ -45,6 +45,7 @@ public:
|
|||||||
bool isVersionAboveR8110() override;
|
bool isVersionAboveR8110() override;
|
||||||
bool isReverseProxy() override;
|
bool isReverseProxy() override;
|
||||||
bool isCloudStorageEnabled() override;
|
bool isCloudStorageEnabled() override;
|
||||||
|
Maybe<tuple<string, string, string, string, string>> readCloudMetadata() override;
|
||||||
Maybe<tuple<string, string, string>> parseNginxMetadata() override;
|
Maybe<tuple<string, string, string>> parseNginxMetadata() override;
|
||||||
#if defined(gaia) || defined(smb)
|
#if defined(gaia) || defined(smb)
|
||||||
bool compareCheckpointVersion(int cp_version, std::function<bool(int, int)> compare_operator) const override;
|
bool compareCheckpointVersion(int cp_version, std::function<bool(int, int)> compare_operator) const override;
|
||||||
@@ -141,7 +142,7 @@ DetailsResolver::Impl::isCloudStorageEnabled()
|
|||||||
{
|
{
|
||||||
auto cloud_storage_mode_override = getProfileAgentSetting<bool>("agent.cloudStorage.enabled");
|
auto cloud_storage_mode_override = getProfileAgentSetting<bool>("agent.cloudStorage.enabled");
|
||||||
if (cloud_storage_mode_override.ok()) {
|
if (cloud_storage_mode_override.ok()) {
|
||||||
dbgInfo(D_ORCHESTRATOR) << "Received cloud-storage mode override: " << *cloud_storage_mode_override;
|
dbgDebug(D_ORCHESTRATOR) << "Received cloud-storage mode override: " << *cloud_storage_mode_override;
|
||||||
return *cloud_storage_mode_override;
|
return *cloud_storage_mode_override;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -151,6 +152,7 @@ DetailsResolver::Impl::isCloudStorageEnabled()
|
|||||||
bool
|
bool
|
||||||
DetailsResolver::Impl::isKernelVersion3OrHigher()
|
DetailsResolver::Impl::isKernelVersion3OrHigher()
|
||||||
{
|
{
|
||||||
|
#if defined(gaia) || defined(smb)
|
||||||
static const string cmd =
|
static const string cmd =
|
||||||
"clish -c 'show version os kernel' | awk '{print $4}' "
|
"clish -c 'show version os kernel' | awk '{print $4}' "
|
||||||
"| cut -d '.' -f 1 | awk -F: '{ if ( $1 >= 3 ) {print 1} else {print 0}}'";
|
"| cut -d '.' -f 1 | awk -F: '{ if ( $1 >= 3 ) {print 1} else {print 0}}'";
|
||||||
@@ -159,12 +161,14 @@ DetailsResolver::Impl::isKernelVersion3OrHigher()
|
|||||||
if (is_gogo.ok() && !is_gogo.unpack().empty()) {
|
if (is_gogo.ok() && !is_gogo.unpack().empty()) {
|
||||||
return is_gogo.unpack().front() == '1';
|
return is_gogo.unpack().front() == '1';
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
DetailsResolver::Impl::isGwNotVsx()
|
DetailsResolver::Impl::isGwNotVsx()
|
||||||
{
|
{
|
||||||
|
#if defined(gaia) || defined(smb)
|
||||||
static const string is_gw_cmd = "cpprod_util FwIsFirewallModule";
|
static const string is_gw_cmd = "cpprod_util FwIsFirewallModule";
|
||||||
static const string is_vsx_cmd = "cpprod_util FWisVSX";
|
static const string is_vsx_cmd = "cpprod_util FWisVSX";
|
||||||
auto is_gw = DetailsResolvingHanlder::getCommandOutput(is_gw_cmd);
|
auto is_gw = DetailsResolvingHanlder::getCommandOutput(is_gw_cmd);
|
||||||
@@ -172,6 +176,7 @@ DetailsResolver::Impl::isGwNotVsx()
|
|||||||
if (is_gw.ok() && is_vsx.ok() && !is_gw.unpack().empty() && !is_vsx.unpack().empty()) {
|
if (is_gw.ok() && is_vsx.ok() && !is_gw.unpack().empty() && !is_vsx.unpack().empty()) {
|
||||||
return is_gw.unpack().front() == '1' && is_vsx.unpack().front() == '0';
|
return is_gw.unpack().front() == '1' && is_vsx.unpack().front() == '0';
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -188,17 +193,16 @@ DetailsResolver::Impl::getCheckpointVersion() const
|
|||||||
{
|
{
|
||||||
#ifdef gaia
|
#ifdef gaia
|
||||||
static const string cmd =
|
static const string cmd =
|
||||||
"echo $CPDIR | awk -F'-' '{print $NF}' | cut -c 2- |"
|
"echo $CPDIR | awk '{sub(/.*-R/,\"\"); sub(/\\/.*/,\"\")}/^[0-9]*$/{$0=$0\".00\"}{sub(/\\./, \"\"); print}'";
|
||||||
" awk -F'.' '{ if( NF == 1 ) {print $1\"00\"} else {print $1$2} }'";
|
|
||||||
#else // smb
|
#else // smb
|
||||||
static const string cmd = "sqlcmd 'select major,minor from cpver' |"
|
static const string cmd = "sqlcmd 'select major,minor from cpver' |"
|
||||||
"awk '{if ($1 == \"major\") v += (substr($3,2) * 100);"
|
"awk '{if ($1 == \"major\") v += (substr($3,2) * 100);"
|
||||||
" if ($1 == \"minor\") v += $3; } END { print v}'";
|
" if ($1 == \"minor\") v += $3; } END { print v}'";
|
||||||
|
|
||||||
#endif // gaia
|
#endif // gaia
|
||||||
auto version_out = DetailsResolvingHanlder::getCommandOutput(cmd);
|
auto version_out = DetailsResolvingHanlder::getCommandOutput(cmd);
|
||||||
int cp_version = 0;
|
int cp_version = 0;
|
||||||
if (version_out.ok()) {
|
if (version_out.ok()) {
|
||||||
|
dbgTrace(D_ORCHESTRATOR) << "Identified version " << version_out.unpack();
|
||||||
stringstream version_stream(version_out.unpack());
|
stringstream version_stream(version_out.unpack());
|
||||||
version_stream >> cp_version;
|
version_stream >> cp_version;
|
||||||
}
|
}
|
||||||
@@ -300,6 +304,67 @@ DetailsResolver::Impl::parseNginxMetadata()
|
|||||||
return make_tuple(config_opt, cc_opt, nginx_version);
|
return make_tuple(config_opt, cc_opt, nginx_version);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Maybe<tuple<string, string, string, string, string>>
|
||||||
|
DetailsResolver::Impl::readCloudMetadata()
|
||||||
|
{
|
||||||
|
auto env_read_cloud_metadata = []() -> Maybe<tuple<string, string, string, string, string>> {
|
||||||
|
string account_id = getenv("CLOUD_ACCOUNT_ID") ? getenv("CLOUD_ACCOUNT_ID") : "";
|
||||||
|
string vpc_id = getenv("CLOUD_VPC_ID") ? getenv("CLOUD_VPC_ID") : "";
|
||||||
|
string instance_id = getenv("CLOUD_INSTANCE_ID") ? getenv("CLOUD_INSTANCE_ID") : "";
|
||||||
|
string instance_local_ip = getenv("CLOUD_INSTANCE_LOCAL_IP") ? getenv("CLOUD_INSTANCE_LOCAL_IP") : "";
|
||||||
|
string region = getenv("CLOUD_REGION") ? getenv("CLOUD_REGION") : "";
|
||||||
|
|
||||||
|
if (
|
||||||
|
account_id.empty() ||
|
||||||
|
vpc_id.empty() ||
|
||||||
|
instance_id.empty() ||
|
||||||
|
instance_local_ip.empty() ||
|
||||||
|
region.empty()) {
|
||||||
|
return genError("Could not read cloud metadata");
|
||||||
|
}
|
||||||
|
|
||||||
|
return make_tuple(account_id, vpc_id, instance_id, instance_local_ip, region);
|
||||||
|
};
|
||||||
|
|
||||||
|
auto cloud_metadata = env_read_cloud_metadata();
|
||||||
|
if (!cloud_metadata.ok()) {
|
||||||
|
const string cmd = getFilesystemPathConfig() + "/scripts/get-cloud-metadata.sh";
|
||||||
|
dbgTrace(D_ORCHESTRATOR) << cloud_metadata.getErr() << ", trying to fetch it via cmd: " << cmd;
|
||||||
|
|
||||||
|
auto result = DetailsResolvingHanlder::getCommandOutput(cmd);
|
||||||
|
if (result.ok()) {
|
||||||
|
istringstream iss(result.unpack());
|
||||||
|
string line;
|
||||||
|
while (getline(iss, line)) {
|
||||||
|
size_t pos = line.find('=');
|
||||||
|
if (pos != string::npos) {
|
||||||
|
string key = line.substr(0, pos);
|
||||||
|
string value = line.substr(pos + 1);
|
||||||
|
if (!key.empty() && !value.empty()) setenv(key.c_str(), value.c_str(), 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cloud_metadata = env_read_cloud_metadata();
|
||||||
|
} else {
|
||||||
|
dbgWarning(D_ORCHESTRATOR) << "Could not fetch cloud metadata from cmd: " << result.getErr();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!cloud_metadata.ok()) {
|
||||||
|
dbgWarning(D_ORCHESTRATOR) << cloud_metadata.getErr();
|
||||||
|
return genError("Failed to fetch cloud metadata");
|
||||||
|
}
|
||||||
|
|
||||||
|
dbgTrace(D_ORCHESTRATOR)
|
||||||
|
<< "Successfully fetched cloud metadata: "
|
||||||
|
<< ::get<0>(cloud_metadata.unpack()) << ", "
|
||||||
|
<< ::get<1>(cloud_metadata.unpack()) << ", "
|
||||||
|
<< ::get<2>(cloud_metadata.unpack()) << ", "
|
||||||
|
<< ::get<3>(cloud_metadata.unpack()) << ", "
|
||||||
|
<< ::get<4>(cloud_metadata.unpack());
|
||||||
|
|
||||||
|
return cloud_metadata;
|
||||||
|
}
|
||||||
|
|
||||||
DetailsResolver::DetailsResolver() : Component("DetailsResolver"), pimpl(make_unique<Impl>()) {}
|
DetailsResolver::DetailsResolver() : Component("DetailsResolver"), pimpl(make_unique<Impl>()) {}
|
||||||
|
|
||||||
DetailsResolver::~DetailsResolver() {}
|
DetailsResolver::~DetailsResolver() {}
|
||||||
|
|||||||
@@ -15,21 +15,25 @@
|
|||||||
#define __CHECKPOINT_PRODUCT_HANDLERS_H__
|
#define __CHECKPOINT_PRODUCT_HANDLERS_H__
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
#include <regex>
|
||||||
#include <boost/regex.hpp>
|
#include <boost/regex.hpp>
|
||||||
|
#include <boost/algorithm/string.hpp>
|
||||||
|
|
||||||
#if defined(gaia)
|
#if defined(gaia)
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
checkSAMLSupportedBlade(const string &command_output)
|
checkSAMLSupportedBlade(const string &command_output)
|
||||||
{
|
{
|
||||||
string supportedBlades[3] = {"identityServer", "vpn", "cvpn"};
|
// uncomment when vpn will support SAML authentication
|
||||||
|
// string supportedBlades[3] = {"identityServer", "vpn", "cvpn"};
|
||||||
|
string supportedBlades[1] = {"identityServer"};
|
||||||
for(const string &blade : supportedBlades) {
|
for(const string &blade : supportedBlades) {
|
||||||
if (command_output.find(blade) != string::npos) {
|
if (command_output.find(blade) != string::npos) {
|
||||||
return string("true");
|
return string("true");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return genError("Current host does not have SAML capability");
|
return string("false");
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
@@ -40,7 +44,7 @@ checkIDABlade(const string &command_output)
|
|||||||
return string("true");
|
return string("true");
|
||||||
}
|
}
|
||||||
|
|
||||||
return genError("Current host does not have IDA installed");
|
return string("false");
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
@@ -50,23 +54,26 @@ checkSAMLPortal(const string &command_output)
|
|||||||
return string("true");
|
return string("true");
|
||||||
}
|
}
|
||||||
|
|
||||||
return genError("Current host does not have SAML Portal configured");
|
return string("false");
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
checkPepIdaIdnStatus(const string &command_output)
|
checkPepIdaIdnStatus(const string &command_output)
|
||||||
{
|
{
|
||||||
if (command_output.find("ida_idn_nano_service_enabled=1") != string::npos) {
|
if (command_output.find("nac_pep_scaled_sharing_enabled = 1") != string::npos) {
|
||||||
return string("true");
|
return string("true");
|
||||||
}
|
}
|
||||||
|
return string("false");
|
||||||
return genError("Current host does not have PEP control IDA IDN enabled");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
getIDAGaiaPackages(const string &command_output)
|
getIDAGaiaPackages(const string &command_output)
|
||||||
{
|
{
|
||||||
return string("idaSaml_gaia;idaIdn_gaia;idaIdnBg_gaia;");
|
string result = "idaSaml_gaia;idaIdn_gaia;idaIdnBg_gaia;";
|
||||||
|
if (command_output.find("nac_pep_scaled_sharing_enabled = 1") != string::npos) {
|
||||||
|
result += "agentIntelligenceService_gaia;";
|
||||||
|
}
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
@@ -82,7 +89,7 @@ checkIDP(shared_ptr<istream> file_stream)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return genError("Identity Provider was not found");
|
return string("false");
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif // gaia
|
#endif // gaia
|
||||||
@@ -324,6 +331,34 @@ getSmbGWIPSecVPNBlade(const string &command_output)
|
|||||||
{
|
{
|
||||||
return getSmbBlade(command_output, "IPSec VPN Blade was not found");
|
return getSmbBlade(command_output, "IPSec VPN Blade was not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Maybe<string>
|
||||||
|
extractManagements(const string &command_output)
|
||||||
|
{
|
||||||
|
size_t start_pos = command_output.find(":masters(");
|
||||||
|
if (start_pos == string::npos) {
|
||||||
|
return genError("Starting pattern \":masters(\" not found.");
|
||||||
|
}
|
||||||
|
size_t end_pos = command_output.find("))):", start_pos);
|
||||||
|
if (end_pos == string::npos) {
|
||||||
|
return genError("Ending pattern \"))):\" not found.");
|
||||||
|
}
|
||||||
|
string input_string = command_output.substr(start_pos, end_pos - start_pos + 3);
|
||||||
|
string json_output = "[";
|
||||||
|
regex pattern("\\(ReferenceObject\\:Uid\\(\"\\{([\\w-]+)\\}\"\\)\\:Name\\(([^\\)]+)\\)\\:Table\\(([^\\)]+)\\)\\)");
|
||||||
|
smatch matches;
|
||||||
|
auto words_begin = sregex_iterator(input_string.begin(), input_string.end(), pattern);
|
||||||
|
auto words_end = sregex_iterator();
|
||||||
|
for (sregex_iterator i = words_begin; i != words_end; ++i) {
|
||||||
|
const smatch& match = *i;
|
||||||
|
string uid = boost::algorithm::to_lower_copy(match[1].str());
|
||||||
|
string name = match[2].str();
|
||||||
|
if (json_output.back() != '[') json_output += ",";
|
||||||
|
json_output += "{\"Uid\":\"" + uid + "\",\"Name\":\"" + name + "\"}";
|
||||||
|
}
|
||||||
|
json_output += "]";
|
||||||
|
return json_output;
|
||||||
|
}
|
||||||
#endif // gaia || smb
|
#endif // gaia || smb
|
||||||
|
|
||||||
#if defined(gaia)
|
#if defined(gaia)
|
||||||
|
|||||||
@@ -43,12 +43,15 @@ SHELL_PRE_CMD("gunzip local.cfg", "gunzip -c $FWDIR/state/local/FW1/local.cfg.gz
|
|||||||
#if defined(gaia) || defined(smb)
|
#if defined(gaia) || defined(smb)
|
||||||
SHELL_CMD_HANDLER("cpProductIntegrationMgmtObjectType", "cpprod_util CPPROD_IsMgmtMachine", getMgmtObjType)
|
SHELL_CMD_HANDLER("cpProductIntegrationMgmtObjectType", "cpprod_util CPPROD_IsMgmtMachine", getMgmtObjType)
|
||||||
SHELL_CMD_HANDLER("prerequisitesForHorizonTelemetry",
|
SHELL_CMD_HANDLER("prerequisitesForHorizonTelemetry",
|
||||||
"[ -f /var/log/nano_agent/cp-nano-horizon-telemetry-prerequisites.log ] "
|
"FS_PATH=<FILESYSTEM-PREFIX>; [ -f ${FS_PATH}/cp-nano-horizon-telemetry-prerequisites.log ] "
|
||||||
"&& head -1 /var/log/nano_agent/cp-nano-horizon-telemetry-prerequisites.log || echo ''",
|
"&& head -1 ${FS_PATH}/cp-nano-horizon-telemetry-prerequisites.log || echo ''",
|
||||||
checkIsInstallHorizonTelemetrySucceeded)
|
checkIsInstallHorizonTelemetrySucceeded)
|
||||||
SHELL_CMD_HANDLER("QUID", "[ -d /opt/CPquid ] "
|
SHELL_CMD_HANDLER("QUID", "[ -d /opt/CPquid ] "
|
||||||
"&& python3 /opt/CPquid/Quid_Api.py -i /opt/CPotelcol/quid_api/get_global_id.json | jq -r .message || echo ''",
|
"&& python3 /opt/CPquid/Quid_Api.py -i /opt/CPotelcol/quid_api/get_global_id.json | jq -r .message || echo ''",
|
||||||
getQUID)
|
getQUID)
|
||||||
|
SHELL_CMD_HANDLER("SMO_QUID", "[ -d /opt/CPquid ] "
|
||||||
|
"&& python3 /opt/CPquid/Quid_Api.py -i /opt/CPotelcol/quid_api/get_smo_quid.json | jq -r .message || echo ''",
|
||||||
|
getQUID)
|
||||||
SHELL_CMD_HANDLER("hasSDWan", "[ -f $FWDIR/bin/sdwan_steering ] && echo '1' || echo '0'", checkHasSDWan)
|
SHELL_CMD_HANDLER("hasSDWan", "[ -f $FWDIR/bin/sdwan_steering ] && echo '1' || echo '0'", checkHasSDWan)
|
||||||
SHELL_CMD_HANDLER(
|
SHELL_CMD_HANDLER(
|
||||||
"canUpdateSDWanData",
|
"canUpdateSDWanData",
|
||||||
@@ -99,8 +102,8 @@ SHELL_CMD_HANDLER(
|
|||||||
SHELL_CMD_HANDLER("hasSAMLSupportedBlade", "enabled_blades", checkSAMLSupportedBlade)
|
SHELL_CMD_HANDLER("hasSAMLSupportedBlade", "enabled_blades", checkSAMLSupportedBlade)
|
||||||
SHELL_CMD_HANDLER("hasIDABlade", "enabled_blades", checkIDABlade)
|
SHELL_CMD_HANDLER("hasIDABlade", "enabled_blades", checkIDABlade)
|
||||||
SHELL_CMD_HANDLER("hasSAMLPortal", "mpclient status nac", checkSAMLPortal)
|
SHELL_CMD_HANDLER("hasSAMLPortal", "mpclient status nac", checkSAMLPortal)
|
||||||
SHELL_CMD_HANDLER("hasIdaIdnEnabled", "pep control IDN_nano_Srv_support status", checkPepIdaIdnStatus)
|
SHELL_CMD_HANDLER("hasIdaIdnEnabled", "fw ctl get int nac_pep_scaled_sharing_enabled", checkPepIdaIdnStatus)
|
||||||
SHELL_CMD_HANDLER("requiredNanoServices", "ida_packages", getIDAGaiaPackages)
|
SHELL_CMD_HANDLER("requiredNanoServices", "fw ctl get int nac_pep_scaled_sharing_enabled", getIDAGaiaPackages)
|
||||||
SHELL_CMD_HANDLER(
|
SHELL_CMD_HANDLER(
|
||||||
"cpProductIntegrationMgmtParentObjectName",
|
"cpProductIntegrationMgmtParentObjectName",
|
||||||
"cat $FWDIR/database/myself_objects.C "
|
"cat $FWDIR/database/myself_objects.C "
|
||||||
@@ -149,6 +152,12 @@ SHELL_CMD_HANDLER(
|
|||||||
"| awk -F '[:()]' '/:masters/ {found=1; next} found && /:Name/ {print $3; exit}'",
|
"| awk -F '[:()]' '/:masters/ {found=1; next} found && /:Name/ {print $3; exit}'",
|
||||||
getSMCBasedMgmtName
|
getSMCBasedMgmtName
|
||||||
)
|
)
|
||||||
|
SHELL_CMD_HANDLER(
|
||||||
|
"managements",
|
||||||
|
"sed -n '/:masters (/,$p' $FWDIR/database/myself_objects.C |"
|
||||||
|
" sed -e ':a' -e 'N' -e '$!ba' -e 's/\\n//g' -e 's/\t//g' -e 's/ //g' | sed 's/))):.*/)))):/'",
|
||||||
|
extractManagements
|
||||||
|
)
|
||||||
#endif //gaia
|
#endif //gaia
|
||||||
|
|
||||||
#if defined(smb)
|
#if defined(smb)
|
||||||
@@ -199,6 +208,13 @@ SHELL_CMD_HANDLER(
|
|||||||
"| awk -F '[:()]' '/:masters/ {found=1; next} found && /:Name/ {print $3; exit}'",
|
"| awk -F '[:()]' '/:masters/ {found=1; next} found && /:Name/ {print $3; exit}'",
|
||||||
getSMCBasedMgmtName
|
getSMCBasedMgmtName
|
||||||
)
|
)
|
||||||
|
|
||||||
|
SHELL_CMD_HANDLER(
|
||||||
|
"managements",
|
||||||
|
"sed -n '/:masters (/,$p' /tmp/local.cfg |"
|
||||||
|
" sed -e ':a' -e 'N' -e '$!ba' -e 's/\\n//g' -e 's/\t//g' -e 's/ //g' | sed 's/))):.*/)))):/'",
|
||||||
|
extractManagements
|
||||||
|
)
|
||||||
#endif//smb
|
#endif//smb
|
||||||
|
|
||||||
SHELL_CMD_OUTPUT("kernel_version", "uname -r")
|
SHELL_CMD_OUTPUT("kernel_version", "uname -r")
|
||||||
|
|||||||
@@ -77,20 +77,29 @@ void
|
|||||||
DetailsResolvingHanlder::Impl::init()
|
DetailsResolvingHanlder::Impl::init()
|
||||||
{
|
{
|
||||||
string actual_filesystem_prefix = getFilesystemPathConfig();
|
string actual_filesystem_prefix = getFilesystemPathConfig();
|
||||||
|
size_t place_holder_size = filesystem_place_holder.size();
|
||||||
|
|
||||||
for (auto &file_handler : file_content_handlers) {
|
for (auto &file_handler : file_content_handlers) {
|
||||||
string &path = file_handler.second.first;
|
string &path = file_handler.second.first;
|
||||||
size_t place_holder_size = filesystem_place_holder.size();
|
|
||||||
if (path.substr(0, place_holder_size) == filesystem_place_holder) {
|
if (path.substr(0, place_holder_size) == filesystem_place_holder) {
|
||||||
path = actual_filesystem_prefix + path.substr(place_holder_size);
|
path = actual_filesystem_prefix + path.substr(place_holder_size);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (auto &cmd_handler_pair : shell_command_handlers) {
|
||||||
|
string &cmd_str = cmd_handler_pair.second.first;
|
||||||
|
size_t fs_pos = cmd_str.find(filesystem_place_holder);
|
||||||
|
if (fs_pos != string::npos) {
|
||||||
|
cmd_str.replace(fs_pos, place_holder_size, actual_filesystem_prefix);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
map<string, string>
|
map<string, string>
|
||||||
DetailsResolvingHanlder::Impl::getResolvedDetails() const
|
DetailsResolvingHanlder::Impl::getResolvedDetails() const
|
||||||
{
|
{
|
||||||
I_ShellCmd *shell = Singleton::Consume<I_ShellCmd>::by<DetailsResolvingHanlder>();
|
I_ShellCmd *shell = Singleton::Consume<I_ShellCmd>::by<DetailsResolvingHanlder>();
|
||||||
|
I_AgentDetailsReporter *reporter = Singleton::Consume<I_AgentDetailsReporter>::by<DetailsResolvingHanlder>();
|
||||||
uint32_t timeout = getConfigurationWithDefault<uint32_t>(5000, "orchestration", "Details resolver time out");
|
uint32_t timeout = getConfigurationWithDefault<uint32_t>(5000, "orchestration", "Details resolver time out");
|
||||||
|
|
||||||
for (auto &shell_pre_command : shell_pre_commands) {
|
for (auto &shell_pre_command : shell_pre_commands) {
|
||||||
@@ -114,7 +123,15 @@ DetailsResolvingHanlder::Impl::getResolvedDetails() const
|
|||||||
Maybe<string> shell_command_output = getCommandOutput(command);
|
Maybe<string> shell_command_output = getCommandOutput(command);
|
||||||
if (!shell_command_output.ok()) continue;
|
if (!shell_command_output.ok()) continue;
|
||||||
Maybe<string> handler_ret = handler(*shell_command_output);
|
Maybe<string> handler_ret = handler(*shell_command_output);
|
||||||
if (handler_ret.ok()) resolved_details[attr] = *handler_ret;
|
|
||||||
|
if (handler_ret.ok()) {
|
||||||
|
resolved_details[attr] = *handler_ret;
|
||||||
|
} else {
|
||||||
|
if (reporter->isPersistantAttr(attr)) {
|
||||||
|
dbgTrace(D_AGENT_DETAILS)<< "Persistent attribute changed, removing old value";
|
||||||
|
reporter->deleteAttr(attr);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto file_handler : file_content_handlers) {
|
for (auto file_handler : file_content_handlers) {
|
||||||
@@ -125,7 +142,7 @@ DetailsResolvingHanlder::Impl::getResolvedDetails() const
|
|||||||
shared_ptr<ifstream> in_file =
|
shared_ptr<ifstream> in_file =
|
||||||
Singleton::Consume<I_OrchestrationTools>::by<DetailsResolvingHanlder>()->fileStreamWrapper(path);
|
Singleton::Consume<I_OrchestrationTools>::by<DetailsResolvingHanlder>()->fileStreamWrapper(path);
|
||||||
if (!in_file->is_open()) {
|
if (!in_file->is_open()) {
|
||||||
dbgWarning(D_AGENT_DETAILS) << "Could not open file for processing. Path: " << path;
|
dbgDebug(D_AGENT_DETAILS) << "Could not open file for processing. Path: " << path;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -149,7 +166,6 @@ DetailsResolvingHanlder::Impl::getResolvedDetails() const
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
I_AgentDetailsReporter *reporter = Singleton::Consume<I_AgentDetailsReporter>::by<DetailsResolvingHanlder>();
|
|
||||||
reporter->addAttr(resolved_details, true);
|
reporter->addAttr(resolved_details, true);
|
||||||
|
|
||||||
return resolved_details;
|
return resolved_details;
|
||||||
|
|||||||
@@ -0,0 +1,4 @@
|
|||||||
|
include_directories(${PROJECT_SOURCE_DIR}/core/external_sdk/)
|
||||||
|
|
||||||
|
add_library(external_sdk_server external_sdk_server.cc)
|
||||||
|
add_subdirectory(external_sdk_server_ut)
|
||||||
@@ -0,0 +1,348 @@
|
|||||||
|
#include "external_sdk_server.h"
|
||||||
|
|
||||||
|
#include "external_agent_sdk.h"
|
||||||
|
#include "log_generator.h"
|
||||||
|
#include "rest_server.h"
|
||||||
|
#include "generic_metric.h"
|
||||||
|
#include "customized_cereal_map.h"
|
||||||
|
#include "report/log_rest.h"
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
|
||||||
|
USE_DEBUG_FLAG(D_EXTERNAL_SDK_USER);
|
||||||
|
USE_DEBUG_FLAG(D_EXTERNAL_SDK_SERVER);
|
||||||
|
|
||||||
|
class ExternalSdkRest : public ServerRest
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
void
|
||||||
|
doCall() override
|
||||||
|
{
|
||||||
|
dbgFlow(D_EXTERNAL_SDK_SERVER);
|
||||||
|
Maybe<SdkApiType> sdk_event_type = convertToEnum<SdkApiType>(event_type.get());
|
||||||
|
if (!sdk_event_type.ok()) {
|
||||||
|
dbgWarning(D_EXTERNAL_SDK_SERVER) << "Received illegal event type. Type : " << event_type.get();
|
||||||
|
throw JsonError("Illegal event type provided");
|
||||||
|
}
|
||||||
|
dbgDebug(D_EXTERNAL_SDK_SERVER)
|
||||||
|
<< "Handling a new external sdk api call event. Type : "
|
||||||
|
<< convertApiTypeToString(sdk_event_type.unpack());
|
||||||
|
|
||||||
|
I_ExternalSdkServer *sdk_server = Singleton::Consume<I_ExternalSdkServer>::from<ExternalSdkServer>();
|
||||||
|
switch(sdk_event_type.unpack()) {
|
||||||
|
case SdkApiType::SendCodeEvent: {
|
||||||
|
if (!file.isActive()) {
|
||||||
|
throw JsonError("File was not provided for code event");
|
||||||
|
}
|
||||||
|
if (!func.isActive()) {
|
||||||
|
throw JsonError("Function was not provided for code event");
|
||||||
|
}
|
||||||
|
if (!line.isActive()) {
|
||||||
|
throw JsonError("Line path was not provided for code event");
|
||||||
|
}
|
||||||
|
if (!trace_id.isActive()) {
|
||||||
|
throw JsonError("Trace ID was not provided for code event");
|
||||||
|
}
|
||||||
|
if (!span_id.isActive()) {
|
||||||
|
throw JsonError("Span ID was not provided for code event");
|
||||||
|
}
|
||||||
|
if (!message.isActive()) {
|
||||||
|
throw JsonError("Message was not provided for code event");
|
||||||
|
}
|
||||||
|
sdk_server->sendDebug(
|
||||||
|
file.get(),
|
||||||
|
func.get(),
|
||||||
|
line.get(),
|
||||||
|
getDebugLevel(),
|
||||||
|
trace_id.get(),
|
||||||
|
span_id.get(),
|
||||||
|
message.get(),
|
||||||
|
additional_fields.isActive() ? additional_fields.get() : map<string, string>()
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
case SdkApiType::SendEventDrivenEvent: {
|
||||||
|
if (!event_name.isActive()) {
|
||||||
|
throw JsonError("Event name was not provided for event");
|
||||||
|
}
|
||||||
|
sdk_server->sendLog(
|
||||||
|
event_name.get(),
|
||||||
|
getAudience(),
|
||||||
|
getSeverity(),
|
||||||
|
getPriority(),
|
||||||
|
tag.get(),
|
||||||
|
additional_fields.isActive() ? additional_fields.get() : map<string, string>()
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
case SdkApiType::SendGetConfigRequest: {
|
||||||
|
if (!config_path.isActive()) {
|
||||||
|
throw JsonError("Config path was not provided for get configuration event");
|
||||||
|
}
|
||||||
|
Maybe<string> config_val = sdk_server->getConfigValue(config_path.get());
|
||||||
|
config_value = config_val.ok() ? config_val.unpack() : "";
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
case SdkApiType::SendPeriodicEvent: {
|
||||||
|
if (!event_name.isActive()) {
|
||||||
|
throw JsonError("Event name was not provided for periodic event");
|
||||||
|
}
|
||||||
|
if (!service_name.isActive()) {
|
||||||
|
throw JsonError("Service name was not provided for periodic event");
|
||||||
|
}
|
||||||
|
sdk_server->sendMetric(
|
||||||
|
event_name,
|
||||||
|
service_name,
|
||||||
|
getAudienceTeam(),
|
||||||
|
ReportIS::IssuingEngine::AGENT_CORE,
|
||||||
|
additional_fields.isActive() ? additional_fields.get() : map<string, string>()
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
dbgError(D_EXTERNAL_SDK_SERVER) << "Received illegal event type. Type : " << event_type.get();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
static string
|
||||||
|
convertApiTypeToString(SdkApiType type)
|
||||||
|
{
|
||||||
|
static const EnumArray<SdkApiType, string> api_type_string {
|
||||||
|
"Code Event",
|
||||||
|
"Periodic Event",
|
||||||
|
"Event Driven",
|
||||||
|
"Get Configuration",
|
||||||
|
};
|
||||||
|
return api_type_string[type];
|
||||||
|
}
|
||||||
|
|
||||||
|
Debug::DebugLevel
|
||||||
|
getDebugLevel()
|
||||||
|
{
|
||||||
|
static const map<int, Debug::DebugLevel> debug_levels = {
|
||||||
|
{0, Debug::DebugLevel::TRACE},
|
||||||
|
{1, Debug::DebugLevel::DEBUG},
|
||||||
|
{2, Debug::DebugLevel::INFO},
|
||||||
|
{3, Debug::DebugLevel::WARNING},
|
||||||
|
{4, Debug::DebugLevel::ERROR}
|
||||||
|
};
|
||||||
|
if (!debug_level.isActive()) {
|
||||||
|
throw JsonError("Debug level was not provided for code event");
|
||||||
|
}
|
||||||
|
auto level = debug_levels.find(debug_level.get());
|
||||||
|
if(level == debug_levels.end()) {
|
||||||
|
throw JsonError("Illegal debug level provided");
|
||||||
|
}
|
||||||
|
|
||||||
|
return level->second;
|
||||||
|
}
|
||||||
|
|
||||||
|
ReportIS::Severity
|
||||||
|
getSeverity()
|
||||||
|
{
|
||||||
|
if (!severity.isActive()) {
|
||||||
|
throw JsonError("Event severity was not provided for periodic event");
|
||||||
|
}
|
||||||
|
switch (severity.get()) {
|
||||||
|
case EventSeverity::SeverityCritical: return ReportIS::Severity::CRITICAL;
|
||||||
|
case EventSeverity::SeverityHigh: return ReportIS::Severity::HIGH;
|
||||||
|
case EventSeverity::SeverityMedium: return ReportIS::Severity::MEDIUM;
|
||||||
|
case EventSeverity::SeverityLow: return ReportIS::Severity::LOW;
|
||||||
|
case EventSeverity::SeverityInfo: return ReportIS::Severity::INFO;
|
||||||
|
default:
|
||||||
|
throw JsonError("Illegal event severity provided");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ReportIS::Priority
|
||||||
|
getPriority()
|
||||||
|
{
|
||||||
|
if (!priority.isActive()) {
|
||||||
|
throw JsonError("Event priority was not provided");
|
||||||
|
}
|
||||||
|
switch (priority.get()) {
|
||||||
|
case EventPriority::PriorityUrgent: return ReportIS::Priority::URGENT;
|
||||||
|
case EventPriority::PriorityHigh: return ReportIS::Priority::HIGH;
|
||||||
|
case EventPriority::PriorityMedium: return ReportIS::Priority::MEDIUM;
|
||||||
|
case EventPriority::PriorityLow: return ReportIS::Priority::LOW;
|
||||||
|
default:
|
||||||
|
throw JsonError("Illegal event priority provided");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ReportIS::Audience
|
||||||
|
getAudience()
|
||||||
|
{
|
||||||
|
if (!audience.isActive()) {
|
||||||
|
throw JsonError("Event audience was not provided");
|
||||||
|
}
|
||||||
|
switch (audience.get()) {
|
||||||
|
case EventAudience::AudienceSecurity: return ReportIS::Audience::SECURITY;
|
||||||
|
case EventAudience::AudienceInternal: return ReportIS::Audience::INTERNAL;
|
||||||
|
default:
|
||||||
|
throw JsonError("Illegal event audience provided");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ReportIS::AudienceTeam
|
||||||
|
getAudienceTeam()
|
||||||
|
{
|
||||||
|
if (!team.isActive()) {
|
||||||
|
throw JsonError("Event audience team was not provided");
|
||||||
|
}
|
||||||
|
switch (team.get()) {
|
||||||
|
case EventAudienceTeam::AudienceTeamAgentCore: return ReportIS::AudienceTeam::AGENT_CORE;
|
||||||
|
case EventAudienceTeam::AudienceTeamIot: return ReportIS::AudienceTeam::IOT_NEXT;
|
||||||
|
case EventAudienceTeam::AudienceTeamWaap: return ReportIS::AudienceTeam::WAAP;
|
||||||
|
case EventAudienceTeam::AudienceTeamAgentIntelligence: return ReportIS::AudienceTeam::AGENT_INTELLIGENCE;
|
||||||
|
default:
|
||||||
|
throw JsonError("Illegal event audience team provided");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
using additional_fields_map = map<string, string>;
|
||||||
|
C2S_LABEL_PARAM(int, event_type, "eventType");
|
||||||
|
C2S_LABEL_OPTIONAL_PARAM(additional_fields_map, additional_fields, "additionalFields");
|
||||||
|
C2S_LABEL_OPTIONAL_PARAM(string, event_name, "eventName");
|
||||||
|
C2S_LABEL_OPTIONAL_PARAM(string, service_name, "serviceName");
|
||||||
|
C2S_OPTIONAL_PARAM(int, team);
|
||||||
|
C2S_OPTIONAL_PARAM(int, audience);
|
||||||
|
C2S_OPTIONAL_PARAM(int, severity);
|
||||||
|
C2S_OPTIONAL_PARAM(int, priority);
|
||||||
|
C2S_OPTIONAL_PARAM(string, tag);
|
||||||
|
C2S_OPTIONAL_PARAM(string, file);
|
||||||
|
C2S_OPTIONAL_PARAM(string, func);
|
||||||
|
C2S_OPTIONAL_PARAM(int, line);
|
||||||
|
C2S_LABEL_OPTIONAL_PARAM(int, debug_level, "debugLevel");
|
||||||
|
C2S_LABEL_OPTIONAL_PARAM(string, trace_id, "traceId");
|
||||||
|
C2S_LABEL_OPTIONAL_PARAM(string, span_id, "spanId");
|
||||||
|
C2S_OPTIONAL_PARAM(string, message);
|
||||||
|
C2S_LABEL_OPTIONAL_PARAM(string, config_path, "configPath");
|
||||||
|
S2C_LABEL_OPTIONAL_PARAM(string, config_value, "configValue");
|
||||||
|
};
|
||||||
|
|
||||||
|
class ExternalSdkServer::Impl
|
||||||
|
:
|
||||||
|
public Singleton::Provide<I_ExternalSdkServer>::From<ExternalSdkServer>
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
void
|
||||||
|
init()
|
||||||
|
{
|
||||||
|
auto rest = Singleton::Consume<I_RestApi>::by<ExternalSdkServer>();
|
||||||
|
rest->addRestCall<ExternalSdkRest>(RestAction::ADD, "sdk-call");
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
sendLog(
|
||||||
|
const string &event_name,
|
||||||
|
ReportIS::Audience audience,
|
||||||
|
ReportIS::Severity severity,
|
||||||
|
ReportIS::Priority priority,
|
||||||
|
const string &tag_string,
|
||||||
|
const map<string, string> &additional_fields)
|
||||||
|
{
|
||||||
|
Maybe<ReportIS::Tags> tag = TagAndEnumManagement::convertStringToTag(tag_string);
|
||||||
|
set<ReportIS::Tags> tags;
|
||||||
|
if (tag.ok()) tags.insert(tag.unpack());
|
||||||
|
LogGen log(event_name, audience, severity, priority, tags);
|
||||||
|
for (const auto &field : additional_fields) {
|
||||||
|
log << LogField(field.first, field.second);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
sendDebug(
|
||||||
|
const string &file_name,
|
||||||
|
const string &function_name,
|
||||||
|
unsigned int line_number,
|
||||||
|
Debug::DebugLevel debug_level,
|
||||||
|
const string &trace_id,
|
||||||
|
const string &span_id,
|
||||||
|
const string &message,
|
||||||
|
const map<string, string> &additional_fields)
|
||||||
|
{
|
||||||
|
(void)trace_id;
|
||||||
|
(void)span_id;
|
||||||
|
Debug debug(file_name, function_name, line_number, debug_level, D_EXTERNAL_SDK_USER);
|
||||||
|
debug.getStreamAggr() << message;
|
||||||
|
bool is_first_key = true;
|
||||||
|
for (const auto &field : additional_fields) {
|
||||||
|
if (is_first_key) {
|
||||||
|
is_first_key = false;
|
||||||
|
debug.getStreamAggr() << ". ";
|
||||||
|
} else {
|
||||||
|
debug.getStreamAggr() << ", ";
|
||||||
|
}
|
||||||
|
debug.getStreamAggr() << "\"" << field.first << "\": \"" << field.second << "\"";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
sendMetric(
|
||||||
|
const string &event_title,
|
||||||
|
const string &service_name,
|
||||||
|
ReportIS::AudienceTeam team,
|
||||||
|
ReportIS::IssuingEngine issuing_engine,
|
||||||
|
const map<string, string> &additional_fields)
|
||||||
|
{
|
||||||
|
ScopedContext ctx;
|
||||||
|
ctx.registerValue("Service Name", service_name);
|
||||||
|
|
||||||
|
set<ReportIS::Tags> tags;
|
||||||
|
Report metric_to_fog(
|
||||||
|
event_title,
|
||||||
|
Singleton::Consume<I_TimeGet>::by<GenericMetric>()->getWalltime(),
|
||||||
|
ReportIS::Type::PERIODIC,
|
||||||
|
ReportIS::Level::LOG,
|
||||||
|
ReportIS::LogLevel::INFO,
|
||||||
|
ReportIS::Audience::INTERNAL,
|
||||||
|
team,
|
||||||
|
ReportIS::Severity::INFO,
|
||||||
|
ReportIS::Priority::LOW,
|
||||||
|
chrono::seconds(0),
|
||||||
|
LogField("agentId", Singleton::Consume<I_AgentDetails>::by<GenericMetric>()->getAgentId()),
|
||||||
|
tags,
|
||||||
|
ReportIS::Tags::INFORMATIONAL,
|
||||||
|
issuing_engine
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const auto &field : additional_fields) {
|
||||||
|
metric_to_fog << LogField(field.first, field.second);
|
||||||
|
}
|
||||||
|
|
||||||
|
LogRest metric_client_rest(metric_to_fog);
|
||||||
|
|
||||||
|
string fog_metric_uri = getConfigurationWithDefault<string>("/api/v1/agents/events", "metric", "fogMetricUri");
|
||||||
|
Singleton::Consume<I_Messaging>::by<ExternalSdkServer>()->sendAsyncMessage(
|
||||||
|
HTTPMethod::POST,
|
||||||
|
fog_metric_uri,
|
||||||
|
metric_client_rest,
|
||||||
|
MessageCategory::METRIC,
|
||||||
|
MessageMetadata(),
|
||||||
|
false
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Maybe<string>
|
||||||
|
getConfigValue(const string &config_path)
|
||||||
|
{
|
||||||
|
auto config_val = getProfileAgentSetting<string>(config_path);
|
||||||
|
if (!config_val.ok()) {
|
||||||
|
stringstream error;
|
||||||
|
error << "Failed to get configuration. Config path: " << config_path << ", Error: " << config_val.getErr();
|
||||||
|
return genError(error.str());
|
||||||
|
}
|
||||||
|
return config_val.unpack();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ExternalSdkServer::ExternalSdkServer() : Component("ExternalSdkServer"), pimpl(make_unique<Impl>()) {}
|
||||||
|
ExternalSdkServer::~ExternalSdkServer() {}
|
||||||
|
|
||||||
|
void ExternalSdkServer::init() { pimpl->init(); }
|
||||||
|
void ExternalSdkServer::fini() {}
|
||||||
|
|
||||||
|
void ExternalSdkServer::preload() {}
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
link_directories(${BOOST_ROOT}/lib)
|
||||||
|
|
||||||
|
add_unit_test(
|
||||||
|
external_sdk_server_ut
|
||||||
|
"external_sdk_server_ut.cc"
|
||||||
|
"external_sdk_server;mainloop;singleton;rest;environment;time_proxy;logging;event_is;metric;-lboost_context;agent_details;-lboost_regex;messaging;"
|
||||||
|
)
|
||||||
@@ -0,0 +1,349 @@
|
|||||||
|
#include <stdio.h>
|
||||||
|
#include <stdarg.h>
|
||||||
|
|
||||||
|
#include "external_sdk_server.h"
|
||||||
|
|
||||||
|
#include "cptest.h"
|
||||||
|
#include "mock/mock_rest_api.h"
|
||||||
|
#include "mock/mock_messaging.h"
|
||||||
|
#include "mock/mock_logging.h"
|
||||||
|
#include "mock/mock_time_get.h"
|
||||||
|
#include "config.h"
|
||||||
|
#include "config_component.h"
|
||||||
|
#include "agent_details.h"
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
using namespace testing;
|
||||||
|
|
||||||
|
class ExternalSdkServerTest : public Test
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
ExternalSdkServerTest()
|
||||||
|
{
|
||||||
|
EXPECT_CALL(rest_mocker, mockRestCall(RestAction::ADD, "sdk-call", _)).WillOnce(
|
||||||
|
WithArg<2>(
|
||||||
|
Invoke(
|
||||||
|
[this](const unique_ptr<RestInit> &rest_ptr)
|
||||||
|
{
|
||||||
|
mock_sdk_rest = rest_ptr->getRest();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
sdk_server.preload();
|
||||||
|
sdk_server.init();
|
||||||
|
i_sdk = Singleton::Consume<I_ExternalSdkServer>::from(sdk_server);
|
||||||
|
}
|
||||||
|
|
||||||
|
~ExternalSdkServerTest()
|
||||||
|
{
|
||||||
|
sdk_server.fini();
|
||||||
|
}
|
||||||
|
|
||||||
|
ExternalSdkServer sdk_server;
|
||||||
|
NiceMock<MockTimeGet> mock_timer;
|
||||||
|
StrictMock<MockMessaging> messaging_mocker;
|
||||||
|
StrictMock<MockRestApi> rest_mocker;
|
||||||
|
StrictMock<MockLogging> log_mocker;
|
||||||
|
unique_ptr<ServerRest> mock_sdk_rest;
|
||||||
|
I_ExternalSdkServer *i_sdk;
|
||||||
|
ConfigComponent conf;
|
||||||
|
AgentDetails agent_details;
|
||||||
|
::Environment env;
|
||||||
|
};
|
||||||
|
|
||||||
|
TEST_F(ExternalSdkServerTest, initTest)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ExternalSdkServerTest, configCall)
|
||||||
|
{
|
||||||
|
Maybe<string> no_conf = i_sdk->getConfigValue("key1");
|
||||||
|
EXPECT_FALSE(no_conf.ok());
|
||||||
|
string config_json =
|
||||||
|
"{\n"
|
||||||
|
"\"agentSettings\": [\n"
|
||||||
|
"{\n"
|
||||||
|
"\"id\": \"id1\",\n"
|
||||||
|
"\"key\": \"key1\",\n"
|
||||||
|
"\"value\": \"value1\"\n"
|
||||||
|
"},\n"
|
||||||
|
"{\n"
|
||||||
|
"\"id\": \"id1\",\n"
|
||||||
|
"\"key\": \"key2\",\n"
|
||||||
|
"\"value\": \"value2\"\n"
|
||||||
|
"}\n"
|
||||||
|
"]\n"
|
||||||
|
"}\n";
|
||||||
|
conf.preload();
|
||||||
|
istringstream conf_stream(config_json);
|
||||||
|
ASSERT_TRUE(Singleton::Consume<Config::I_Config>::from(conf)->loadConfiguration(conf_stream));
|
||||||
|
|
||||||
|
Maybe<string> conf_found = i_sdk->getConfigValue("key1");
|
||||||
|
ASSERT_TRUE(conf_found.ok());
|
||||||
|
EXPECT_EQ(conf_found.unpack(), "value1");
|
||||||
|
|
||||||
|
conf_found = i_sdk->getConfigValue("key2");
|
||||||
|
ASSERT_TRUE(conf_found.ok());
|
||||||
|
EXPECT_EQ(conf_found.unpack(), "value2");
|
||||||
|
|
||||||
|
stringstream config_call_body;
|
||||||
|
config_call_body << "{ \"eventType\": 3, \"configPath\": \"key1\" }";
|
||||||
|
|
||||||
|
Maybe<string> sdk_conf = mock_sdk_rest->performRestCall(config_call_body);
|
||||||
|
ASSERT_TRUE(sdk_conf.ok());
|
||||||
|
EXPECT_EQ(
|
||||||
|
sdk_conf.unpack(),
|
||||||
|
"{\n"
|
||||||
|
" \"configValue\": \"value1\"\n"
|
||||||
|
"}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
string
|
||||||
|
toJson(const T &obj)
|
||||||
|
{
|
||||||
|
stringstream ss;
|
||||||
|
{
|
||||||
|
cereal::JSONOutputArchive ar(ss);
|
||||||
|
obj.serialize(ar);
|
||||||
|
}
|
||||||
|
return ss.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ExternalSdkServerTest, eventDrivenCall)
|
||||||
|
{
|
||||||
|
string generated_log;
|
||||||
|
EXPECT_CALL(log_mocker, getCurrentLogId()).Times(2).WillRepeatedly(Return(0));
|
||||||
|
EXPECT_CALL(log_mocker, sendLog(_)).Times(2).WillRepeatedly(
|
||||||
|
WithArg<0>(
|
||||||
|
Invoke(
|
||||||
|
[&] (const Report &msg)
|
||||||
|
{
|
||||||
|
generated_log = toJson(msg);
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
i_sdk->sendLog(
|
||||||
|
"my log",
|
||||||
|
ReportIS::Audience::INTERNAL,
|
||||||
|
ReportIS::Severity::LOW,
|
||||||
|
ReportIS::Priority::HIGH,
|
||||||
|
"IPS",
|
||||||
|
{{"key1", "value1"}, {"key2", "value2"}}
|
||||||
|
);
|
||||||
|
static const string expected_log =
|
||||||
|
"{\n"
|
||||||
|
" \"eventTime\": \"\",\n"
|
||||||
|
" \"eventName\": \"my log\",\n"
|
||||||
|
" \"eventSeverity\": \"Low\",\n"
|
||||||
|
" \"eventPriority\": \"High\",\n"
|
||||||
|
" \"eventType\": \"Event Driven\",\n"
|
||||||
|
" \"eventLevel\": \"Log\",\n"
|
||||||
|
" \"eventLogLevel\": \"info\",\n"
|
||||||
|
" \"eventAudience\": \"Internal\",\n"
|
||||||
|
" \"eventAudienceTeam\": \"\",\n"
|
||||||
|
" \"eventFrequency\": 0,\n"
|
||||||
|
" \"eventTags\": [\n"
|
||||||
|
" \"IPS\"\n"
|
||||||
|
" ],\n"
|
||||||
|
" \"eventSource\": {\n"
|
||||||
|
" \"agentId\": \"Unknown\",\n"
|
||||||
|
" \"eventTraceId\": \"\",\n"
|
||||||
|
" \"eventSpanId\": \"\",\n"
|
||||||
|
" \"issuingEngineVersion\": \"\",\n"
|
||||||
|
" \"serviceName\": \"Unnamed Nano Service\"\n"
|
||||||
|
" },\n"
|
||||||
|
" \"eventData\": {\n"
|
||||||
|
" \"logIndex\": 0,\n"
|
||||||
|
" \"key1\": \"value1\",\n"
|
||||||
|
" \"key2\": \"value2\"\n"
|
||||||
|
" }\n"
|
||||||
|
"}";
|
||||||
|
|
||||||
|
EXPECT_EQ(generated_log, expected_log);
|
||||||
|
|
||||||
|
string event_call_body =
|
||||||
|
"{\n"
|
||||||
|
" \"eventType\": 2,\n"
|
||||||
|
" \"eventName\": \"my log\",\n"
|
||||||
|
" \"audience\": 1,\n"
|
||||||
|
" \"severity\": 3,\n"
|
||||||
|
" \"priority\": 1,\n"
|
||||||
|
" \"tag\": \"IPS\",\n"
|
||||||
|
" \"team\": 3,\n"
|
||||||
|
" \"additionalFields\": {\n"
|
||||||
|
" \"key1\": \"value1\",\n"
|
||||||
|
" \"key2\": \"value2\"\n"
|
||||||
|
" }\n"
|
||||||
|
"}";
|
||||||
|
|
||||||
|
generated_log = "";
|
||||||
|
stringstream event_call_stream;
|
||||||
|
event_call_stream << event_call_body;
|
||||||
|
EXPECT_TRUE(mock_sdk_rest->performRestCall(event_call_stream).ok());
|
||||||
|
EXPECT_EQ(generated_log, expected_log);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ExternalSdkServerTest, periodicEventCall)
|
||||||
|
{
|
||||||
|
string message_body;
|
||||||
|
EXPECT_CALL(
|
||||||
|
messaging_mocker,
|
||||||
|
sendAsyncMessage(
|
||||||
|
HTTPMethod::POST,
|
||||||
|
"/api/v1/agents/events",
|
||||||
|
_,
|
||||||
|
MessageCategory::METRIC,
|
||||||
|
_,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
).Times(2).WillRepeatedly(SaveArg<2>(&message_body));
|
||||||
|
|
||||||
|
i_sdk->sendMetric(
|
||||||
|
"my metric",
|
||||||
|
"matrix",
|
||||||
|
ReportIS::AudienceTeam::AGENT_INTELLIGENCE,
|
||||||
|
ReportIS::IssuingEngine::AGENT_CORE,
|
||||||
|
{{"key", "value"}}
|
||||||
|
);
|
||||||
|
|
||||||
|
static const string expected_message =
|
||||||
|
"{\n"
|
||||||
|
" \"log\": {\n"
|
||||||
|
" \"eventTime\": \"\",\n"
|
||||||
|
" \"eventName\": \"my metric\",\n"
|
||||||
|
" \"eventSeverity\": \"Info\",\n"
|
||||||
|
" \"eventPriority\": \"Low\",\n"
|
||||||
|
" \"eventType\": \"Periodic\",\n"
|
||||||
|
" \"eventLevel\": \"Log\",\n"
|
||||||
|
" \"eventLogLevel\": \"info\",\n"
|
||||||
|
" \"eventAudience\": \"Internal\",\n"
|
||||||
|
" \"eventAudienceTeam\": \"Agent Intelligence\",\n"
|
||||||
|
" \"eventFrequency\": 0,\n"
|
||||||
|
" \"eventTags\": [\n"
|
||||||
|
" \"Informational\"\n"
|
||||||
|
" ],\n"
|
||||||
|
" \"eventSource\": {\n"
|
||||||
|
" \"agentId\": \"Unknown\",\n"
|
||||||
|
" \"issuingEngine\": \"Agent Core\",\n"
|
||||||
|
" \"eventTraceId\": \"\",\n"
|
||||||
|
" \"eventSpanId\": \"\",\n"
|
||||||
|
" \"issuingEngineVersion\": \"\",\n"
|
||||||
|
" \"serviceName\": \"matrix\"\n"
|
||||||
|
" },\n"
|
||||||
|
" \"eventData\": {\n"
|
||||||
|
" \"key\": \"value\"\n"
|
||||||
|
" }\n"
|
||||||
|
" }\n"
|
||||||
|
"}";
|
||||||
|
|
||||||
|
EXPECT_EQ(message_body, expected_message);
|
||||||
|
|
||||||
|
string event_call_body =
|
||||||
|
"{\n"
|
||||||
|
" \"eventType\": 1,\n"
|
||||||
|
" \"eventName\": \"my metric\",\n"
|
||||||
|
" \"serviceName\": \"matrix\",\n"
|
||||||
|
" \"team\": 3,\n"
|
||||||
|
" \"additionalFields\": {\n"
|
||||||
|
" \"key\": \"value\"\n"
|
||||||
|
" }\n"
|
||||||
|
"}";
|
||||||
|
|
||||||
|
stringstream event_call_stream;
|
||||||
|
event_call_stream << event_call_body;
|
||||||
|
|
||||||
|
message_body = "";
|
||||||
|
EXPECT_TRUE(mock_sdk_rest->performRestCall(event_call_stream).ok());
|
||||||
|
EXPECT_EQ(message_body, expected_message);
|
||||||
|
}
|
||||||
|
|
||||||
|
USE_DEBUG_FLAG(D_EXTERNAL_SDK_USER);
|
||||||
|
USE_DEBUG_FLAG(D_EXTERNAL_SDK_SERVER);
|
||||||
|
|
||||||
|
TEST_F(ExternalSdkServerTest, codeEventCall)
|
||||||
|
{
|
||||||
|
ostringstream capture_debug;
|
||||||
|
Debug::setUnitTestFlag(D_EXTERNAL_SDK_SERVER, Debug::DebugLevel::TRACE);
|
||||||
|
Debug::setUnitTestFlag(D_EXTERNAL_SDK_USER, Debug::DebugLevel::TRACE);
|
||||||
|
Debug::setNewDefaultStdout(&capture_debug);
|
||||||
|
|
||||||
|
i_sdk->sendDebug(
|
||||||
|
"file.cc",
|
||||||
|
"myFunc2",
|
||||||
|
42,
|
||||||
|
Debug::DebugLevel::TRACE,
|
||||||
|
"123",
|
||||||
|
"abc",
|
||||||
|
"h#l1ow w0r!d",
|
||||||
|
{{"hi", "universe"}}
|
||||||
|
);
|
||||||
|
|
||||||
|
EXPECT_THAT(
|
||||||
|
capture_debug.str(),
|
||||||
|
HasSubstr(
|
||||||
|
"[myFunc2@file.cc:42 | >>>] "
|
||||||
|
"h#l1ow w0r!d. \"hi\": \"universe\"\n"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
string debug_event =
|
||||||
|
"{\n"
|
||||||
|
" \"eventType\": 0,\n"
|
||||||
|
" \"file\": \"my file\",\n"
|
||||||
|
" \"func\": \"function_name\",\n"
|
||||||
|
" \"line\": 42,\n"
|
||||||
|
" \"debugLevel\": 0,\n"
|
||||||
|
" \"traceId\": \"\",\n"
|
||||||
|
" \"spanId\": \"span2323\",\n"
|
||||||
|
" \"message\": \"some short debug\",\n"
|
||||||
|
" \"team\": 1,\n"
|
||||||
|
" \"additionalFields\": {\n"
|
||||||
|
" \"name\": \"moshe\",\n"
|
||||||
|
" \"food\": \"bamba\"\n"
|
||||||
|
" }\n"
|
||||||
|
"}";
|
||||||
|
|
||||||
|
stringstream event_call_stream;
|
||||||
|
event_call_stream << debug_event;
|
||||||
|
|
||||||
|
EXPECT_TRUE(mock_sdk_rest->performRestCall(event_call_stream).ok());
|
||||||
|
|
||||||
|
EXPECT_THAT(
|
||||||
|
capture_debug.str(),
|
||||||
|
HasSubstr(
|
||||||
|
"[function_name@my file:42 | >>>] "
|
||||||
|
"some short debug. \"food\": \"bamba\", \"name\": \"moshe\"\n"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
Debug::setNewDefaultStdout(&cout);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(ExternalSdkServerTest, ilegalEventCall)
|
||||||
|
{
|
||||||
|
string event_call_body =
|
||||||
|
"{\n"
|
||||||
|
" \"eventType\": 7,\n"
|
||||||
|
" \"eventName\": \"my metric\",\n"
|
||||||
|
" \"serviceName\": \"matrix\",\n"
|
||||||
|
" \"team\": 3,\n"
|
||||||
|
" \"additionalFields\": {\n"
|
||||||
|
" \"key\": \"value\"\n"
|
||||||
|
" }\n"
|
||||||
|
"}";
|
||||||
|
|
||||||
|
stringstream event_call_stream;
|
||||||
|
event_call_stream << event_call_body;
|
||||||
|
|
||||||
|
Maybe<string> failed_respond = mock_sdk_rest->performRestCall(event_call_stream);
|
||||||
|
EXPECT_FALSE(failed_respond.ok());
|
||||||
|
EXPECT_EQ(failed_respond.getErr(), "Illegal event type provided");
|
||||||
|
}
|
||||||
@@ -246,7 +246,7 @@ private:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
routine_id = i_mainloop->addFileRoutine(
|
routine_id = i_mainloop->addFileRoutine(
|
||||||
I_MainLoop::RoutineType::RealTime,
|
I_MainLoop::RoutineType::System,
|
||||||
server_sock,
|
server_sock,
|
||||||
[this] () { handleConnection(); },
|
[this] () { handleConnection(); },
|
||||||
"Health check probe server",
|
"Health check probe server",
|
||||||
@@ -344,7 +344,7 @@ private:
|
|||||||
dbgDebug(D_HEALTH_CHECK) << "Successfully accepted client, client fd: " << new_client_socket;
|
dbgDebug(D_HEALTH_CHECK) << "Successfully accepted client, client fd: " << new_client_socket;
|
||||||
open_connections_counter++;
|
open_connections_counter++;
|
||||||
auto curr_routine = i_mainloop->addOneTimeRoutine(
|
auto curr_routine = i_mainloop->addOneTimeRoutine(
|
||||||
I_MainLoop::RoutineType::RealTime,
|
I_MainLoop::RoutineType::System,
|
||||||
[this] ()
|
[this] ()
|
||||||
{
|
{
|
||||||
auto curr_routine_id = i_mainloop->getCurrentRoutineId().unpack();
|
auto curr_routine_id = i_mainloop->getCurrentRoutineId().unpack();
|
||||||
|
|||||||
@@ -3,5 +3,5 @@ link_directories(${BOOST_ROOT}/lib)
|
|||||||
add_unit_test(
|
add_unit_test(
|
||||||
health_check_ut
|
health_check_ut
|
||||||
"health_check_ut.cc"
|
"health_check_ut.cc"
|
||||||
"health_check;messaging;mainloop;singleton;agent_details;config;logging;metric;event_is;health_check_manager;-lboost_regex;-lboost_system"
|
"health_check;updates_process_reporter;messaging;mainloop;singleton;agent_details;config;logging;metric;event_is;health_check_manager;-lboost_regex;-lboost_system"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -111,12 +111,12 @@ TEST_F(HealthCheckerTest, clientConnection)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_mainloop,
|
mock_mainloop,
|
||||||
addFileRoutine(I_MainLoop::RoutineType::RealTime, _, _, _, true)
|
addFileRoutine(I_MainLoop::RoutineType::System, _, _, _, true)
|
||||||
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_mainloop,
|
mock_mainloop,
|
||||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Health check probe connection handler", true)
|
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Health check probe connection handler", true)
|
||||||
).WillOnce(DoAll(SaveArg<1>(&connection_handler_routine), Return(0)));
|
).WillOnce(DoAll(SaveArg<1>(&connection_handler_routine), Return(0)));
|
||||||
|
|
||||||
int socket = 1;
|
int socket = 1;
|
||||||
@@ -145,7 +145,7 @@ TEST_F(HealthCheckerTest, loadFromDynamicConfiguration)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_mainloop,
|
mock_mainloop,
|
||||||
addFileRoutine(I_MainLoop::RoutineType::RealTime, _, _, _, true)
|
addFileRoutine(I_MainLoop::RoutineType::System, _, _, _, true)
|
||||||
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
||||||
|
|
||||||
health_checker.init();
|
health_checker.init();
|
||||||
@@ -183,7 +183,7 @@ TEST_F(HealthCheckerTest, connectionsLimit)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_mainloop,
|
mock_mainloop,
|
||||||
addFileRoutine(I_MainLoop::RoutineType::RealTime, _, _, _, true)
|
addFileRoutine(I_MainLoop::RoutineType::System, _, _, _, true)
|
||||||
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
||||||
|
|
||||||
EXPECT_CALL(mock_mainloop, doesRoutineExist(_)).WillRepeatedly(Return(false));
|
EXPECT_CALL(mock_mainloop, doesRoutineExist(_)).WillRepeatedly(Return(false));
|
||||||
@@ -218,12 +218,12 @@ TEST_F(HealthCheckerTest, disablingAfterEnabled)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_mainloop,
|
mock_mainloop,
|
||||||
addFileRoutine(I_MainLoop::RoutineType::RealTime, _, _, _, true)
|
addFileRoutine(I_MainLoop::RoutineType::System, _, _, _, true)
|
||||||
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_mainloop,
|
mock_mainloop,
|
||||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Health check probe connection handler", true)
|
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Health check probe connection handler", true)
|
||||||
).WillOnce(DoAll(SaveArg<1>(&connection_handler_routine), Return(0)));
|
).WillOnce(DoAll(SaveArg<1>(&connection_handler_routine), Return(0)));
|
||||||
|
|
||||||
int socket = 1;
|
int socket = 1;
|
||||||
@@ -273,12 +273,12 @@ TEST_F(HealthCheckerTest, changePortIpConfig)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_mainloop,
|
mock_mainloop,
|
||||||
addFileRoutine(I_MainLoop::RoutineType::RealTime, _, _, _, true)
|
addFileRoutine(I_MainLoop::RoutineType::System, _, _, _, true)
|
||||||
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_mainloop,
|
mock_mainloop,
|
||||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Health check probe connection handler", true)
|
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Health check probe connection handler", true)
|
||||||
).WillOnce(DoAll(SaveArg<1>(&connection_handler_routine), Return(0)));
|
).WillOnce(DoAll(SaveArg<1>(&connection_handler_routine), Return(0)));
|
||||||
|
|
||||||
int socket = 1;
|
int socket = 1;
|
||||||
@@ -321,12 +321,12 @@ TEST_F(HealthCheckerTest, FailedHealthCheck)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_mainloop,
|
mock_mainloop,
|
||||||
addFileRoutine(I_MainLoop::RoutineType::RealTime, _, _, _, true)
|
addFileRoutine(I_MainLoop::RoutineType::System, _, _, _, true)
|
||||||
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_mainloop,
|
mock_mainloop,
|
||||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Health check probe connection handler", true)
|
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Health check probe connection handler", true)
|
||||||
).WillOnce(DoAll(SaveArg<1>(&connection_handler_routine), Return(0)));
|
).WillOnce(DoAll(SaveArg<1>(&connection_handler_routine), Return(0)));
|
||||||
|
|
||||||
int socket = 1;
|
int socket = 1;
|
||||||
|
|||||||
@@ -1,3 +1 @@
|
|||||||
add_library(health_check_manager health_check_manager.cc)
|
add_library(health_check_manager health_check_manager.cc)
|
||||||
|
|
||||||
add_subdirectory(health_check_manager_ut)
|
|
||||||
@@ -21,6 +21,7 @@
|
|||||||
#include "config.h"
|
#include "config.h"
|
||||||
#include "cereal/archives/json.hpp"
|
#include "cereal/archives/json.hpp"
|
||||||
#include "customized_cereal_map.h"
|
#include "customized_cereal_map.h"
|
||||||
|
#include "updates_process_event.h"
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
@@ -79,19 +80,22 @@ class HealthCheckValue
|
|||||||
public:
|
public:
|
||||||
HealthCheckValue() = default;
|
HealthCheckValue() = default;
|
||||||
|
|
||||||
HealthCheckValue(HealthCheckStatus raw_status, const map<string, HealthCheckStatusReply> &descriptions)
|
HealthCheckValue(HealthCheckStatus raw_status, const HealthCheckStatusReply &description)
|
||||||
:
|
:
|
||||||
status(raw_status)
|
status(raw_status)
|
||||||
{
|
{
|
||||||
for (const auto &single_stat : descriptions) {
|
if (description.getStatus() == HealthCheckStatus::HEALTHY) {
|
||||||
if (single_stat.second.getStatus() == HealthCheckStatus::HEALTHY) {
|
dbgTrace(D_HEALTH_CHECK_MANAGER)
|
||||||
dbgTrace(D_HEALTH_CHECK_MANAGER) << "Ignoring healthy status reply. Comp name: " << single_stat.first;
|
<< "Ignoring healthy status reply. Comp name: "
|
||||||
continue;
|
<< description.getCompName();
|
||||||
}
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
for (const auto &status : single_stat.second.getExtendedStatus()) {
|
for (const auto &extended_status : description.getExtendedStatus()) {
|
||||||
errors.push_back(HealthCheckError(single_stat.first + " " + status.first, status.second));
|
errors.push_back(
|
||||||
}
|
HealthCheckError(description.getCompName() + " " + extended_status.first,
|
||||||
|
extended_status.second
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -113,9 +117,9 @@ private:
|
|||||||
class HealthCheckPatch : public ClientRest
|
class HealthCheckPatch : public ClientRest
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
HealthCheckPatch(HealthCheckStatus raw_status, const map<string, HealthCheckStatusReply> &descriptions)
|
HealthCheckPatch(HealthCheckStatus raw_status, const HealthCheckStatusReply &description)
|
||||||
{
|
{
|
||||||
health_check = HealthCheckValue(raw_status, descriptions);
|
health_check = HealthCheckValue(raw_status, description);
|
||||||
}
|
}
|
||||||
|
|
||||||
C2S_LABEL_PARAM(HealthCheckValue, health_check, "healthCheck");
|
C2S_LABEL_PARAM(HealthCheckValue, health_check, "healthCheck");
|
||||||
@@ -123,7 +127,8 @@ public:
|
|||||||
|
|
||||||
class HealthCheckManager::Impl
|
class HealthCheckManager::Impl
|
||||||
:
|
:
|
||||||
Singleton::Provide<I_Health_Check_Manager>::From<HealthCheckManager>
|
Singleton::Provide<I_Health_Check_Manager>::From<HealthCheckManager>,
|
||||||
|
public Listener<UpdatesProcessEvent>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
void
|
void
|
||||||
@@ -132,6 +137,7 @@ public:
|
|||||||
auto rest = Singleton::Consume<I_RestApi>::by<HealthCheckManager>();
|
auto rest = Singleton::Consume<I_RestApi>::by<HealthCheckManager>();
|
||||||
rest->addRestCall<HealthCheckOnDemand>(RestAction::SHOW, "health-check-on-demand");
|
rest->addRestCall<HealthCheckOnDemand>(RestAction::SHOW, "health-check-on-demand");
|
||||||
|
|
||||||
|
registerListener();
|
||||||
int interval_in_seconds =
|
int interval_in_seconds =
|
||||||
getProfileAgentSettingWithDefault<int>(30, "agent.healthCheck.intervalInSeconds");
|
getProfileAgentSettingWithDefault<int>(30, "agent.healthCheck.intervalInSeconds");
|
||||||
|
|
||||||
@@ -157,9 +163,62 @@ public:
|
|||||||
void
|
void
|
||||||
printRepliesHealthStatus(ofstream &oputput_file)
|
printRepliesHealthStatus(ofstream &oputput_file)
|
||||||
{
|
{
|
||||||
getRegisteredComponentsHealthStatus();
|
|
||||||
cereal::JSONOutputArchive ar(oputput_file);
|
cereal::JSONOutputArchive ar(oputput_file);
|
||||||
ar(cereal::make_nvp("allComponentsHealthCheckReplies", all_comps_health_status));
|
ar(cereal::make_nvp(health_check_reply.getCompName(), health_check_reply));
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
upon(const UpdatesProcessEvent &event)
|
||||||
|
{
|
||||||
|
|
||||||
|
OrchestrationStatusFieldType status_field_type = event.getStatusFieldType();
|
||||||
|
HealthCheckStatus _status = convertResultToHealthCheckStatus(event.getResult());
|
||||||
|
string status_field_type_str = convertOrchestrationStatusFieldTypeToStr(status_field_type);
|
||||||
|
|
||||||
|
extended_status[status_field_type_str] =
|
||||||
|
_status == HealthCheckStatus::HEALTHY ?
|
||||||
|
"Success" :
|
||||||
|
event.parseDescription();
|
||||||
|
field_types_status[status_field_type_str] = _status;
|
||||||
|
|
||||||
|
switch(_status) {
|
||||||
|
case HealthCheckStatus::UNHEALTHY: {
|
||||||
|
general_health_aggregated_status = HealthCheckStatus::UNHEALTHY;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case HealthCheckStatus::DEGRADED: {
|
||||||
|
for (const auto &type_status : field_types_status) {
|
||||||
|
if ((type_status.first != status_field_type_str)
|
||||||
|
&& (type_status.second == HealthCheckStatus::UNHEALTHY))
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
general_health_aggregated_status = HealthCheckStatus::DEGRADED;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case HealthCheckStatus::HEALTHY: {
|
||||||
|
for (const auto &type_status : field_types_status) {
|
||||||
|
if ((type_status.first != status_field_type_str)
|
||||||
|
&& (type_status.second == HealthCheckStatus::UNHEALTHY
|
||||||
|
|| type_status.second == HealthCheckStatus::DEGRADED)
|
||||||
|
)
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
general_health_aggregated_status = HealthCheckStatus::HEALTHY;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case HealthCheckStatus::IGNORED: {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
health_check_reply = HealthCheckStatusReply(
|
||||||
|
"Orchestration",
|
||||||
|
general_health_aggregated_status,
|
||||||
|
extended_status
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
@@ -168,9 +227,10 @@ private:
|
|||||||
{
|
{
|
||||||
dbgFlow(D_HEALTH_CHECK_MANAGER) << "Sending a health check patch";
|
dbgFlow(D_HEALTH_CHECK_MANAGER) << "Sending a health check patch";
|
||||||
|
|
||||||
HealthCheckPatch patch_to_send(general_health_aggregated_status, all_comps_health_status);
|
HealthCheckPatch patch_to_send(general_health_aggregated_status, health_check_reply);
|
||||||
auto messaging = Singleton::Consume<I_Messaging>::by<HealthCheckManager>();
|
extended_status.clear();
|
||||||
return messaging->sendSyncMessageWithoutResponse(
|
field_types_status.clear();
|
||||||
|
return Singleton::Consume<I_Messaging>::by<HealthCheckManager>()->sendSyncMessageWithoutResponse(
|
||||||
HTTPMethod::PATCH,
|
HTTPMethod::PATCH,
|
||||||
"/agents",
|
"/agents",
|
||||||
patch_to_send,
|
patch_to_send,
|
||||||
@@ -178,59 +238,11 @@ private:
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
|
||||||
getRegisteredComponentsHealthStatus()
|
|
||||||
{
|
|
||||||
vector<HealthCheckStatusReply> health_check_event_reply = HealthCheckStatusEvent().query();
|
|
||||||
all_comps_health_status.clear();
|
|
||||||
for (const auto &reply : health_check_event_reply) {
|
|
||||||
if (reply.getStatus() != HealthCheckStatus::IGNORED) {
|
|
||||||
all_comps_health_status.emplace(reply.getCompName(), reply);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void
|
|
||||||
calcGeneralHealthAggregatedStatus()
|
|
||||||
{
|
|
||||||
general_health_aggregated_status = HealthCheckStatus::HEALTHY;
|
|
||||||
|
|
||||||
for (const auto &reply : all_comps_health_status) {
|
|
||||||
HealthCheckStatus status = reply.second.getStatus();
|
|
||||||
|
|
||||||
dbgTrace(D_HEALTH_CHECK_MANAGER)
|
|
||||||
<< "Current aggregated status is: "
|
|
||||||
<< HealthCheckStatusReply::convertHealthCheckStatusToStr(
|
|
||||||
general_health_aggregated_status
|
|
||||||
)
|
|
||||||
<< ". Got health status: "
|
|
||||||
<< HealthCheckStatusReply::convertHealthCheckStatusToStr(status)
|
|
||||||
<< "for component: "
|
|
||||||
<< reply.first;
|
|
||||||
|
|
||||||
switch (status) {
|
|
||||||
case HealthCheckStatus::UNHEALTHY : {
|
|
||||||
general_health_aggregated_status = HealthCheckStatus::UNHEALTHY;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
case HealthCheckStatus::DEGRADED : {
|
|
||||||
general_health_aggregated_status = HealthCheckStatus::DEGRADED;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case HealthCheckStatus::IGNORED : break;
|
|
||||||
case HealthCheckStatus::HEALTHY : break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void
|
void
|
||||||
executeHealthCheck()
|
executeHealthCheck()
|
||||||
{
|
{
|
||||||
dbgFlow(D_HEALTH_CHECK_MANAGER) << "Collecting health status from all registered components.";
|
dbgFlow(D_HEALTH_CHECK_MANAGER) << "Collecting health status from all registered components.";
|
||||||
|
|
||||||
getRegisteredComponentsHealthStatus();
|
|
||||||
calcGeneralHealthAggregatedStatus();
|
|
||||||
|
|
||||||
dbgTrace(D_HEALTH_CHECK_MANAGER)
|
dbgTrace(D_HEALTH_CHECK_MANAGER)
|
||||||
<< "Aggregated status: "
|
<< "Aggregated status: "
|
||||||
<< HealthCheckStatusReply::convertHealthCheckStatusToStr(general_health_aggregated_status);
|
<< HealthCheckStatusReply::convertHealthCheckStatusToStr(general_health_aggregated_status);
|
||||||
@@ -244,9 +256,43 @@ private:
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
HealthCheckStatus general_health_aggregated_status;
|
string
|
||||||
map<string, HealthCheckStatusReply> all_comps_health_status;
|
convertOrchestrationStatusFieldTypeToStr(OrchestrationStatusFieldType type)
|
||||||
|
{
|
||||||
|
switch (type) {
|
||||||
|
case OrchestrationStatusFieldType::REGISTRATION : return "Registration";
|
||||||
|
case OrchestrationStatusFieldType::MANIFEST : return "Manifest";
|
||||||
|
case OrchestrationStatusFieldType::LAST_UPDATE : return "Last Update";
|
||||||
|
case OrchestrationStatusFieldType::COUNT : return "Count";
|
||||||
|
}
|
||||||
|
|
||||||
|
dbgAssert(false) << "Trying to convert unknown orchestration status field to string.";
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
HealthCheckStatus
|
||||||
|
convertResultToHealthCheckStatus(UpdatesProcessResult result)
|
||||||
|
{
|
||||||
|
switch (result) {
|
||||||
|
case UpdatesProcessResult::SUCCESS : return HealthCheckStatus::HEALTHY;
|
||||||
|
case UpdatesProcessResult::UNSET : return HealthCheckStatus::IGNORED;
|
||||||
|
case UpdatesProcessResult::FAILED : return HealthCheckStatus::UNHEALTHY;
|
||||||
|
case UpdatesProcessResult::DEGRADED : return HealthCheckStatus::DEGRADED;
|
||||||
|
}
|
||||||
|
|
||||||
|
dbgAssert(false) << "Trying to convert unknown update process result field to health check status.";
|
||||||
|
return HealthCheckStatus::IGNORED;
|
||||||
|
}
|
||||||
|
|
||||||
|
HealthCheckStatus general_health_aggregated_status = HealthCheckStatus::HEALTHY;
|
||||||
|
HealthCheckStatusReply health_check_reply = HealthCheckStatusReply(
|
||||||
|
"Orchestration",
|
||||||
|
HealthCheckStatus::HEALTHY,
|
||||||
|
{}
|
||||||
|
);
|
||||||
bool should_patch_report;
|
bool should_patch_report;
|
||||||
|
map<string, string> extended_status;
|
||||||
|
map<string, HealthCheckStatus> field_types_status;
|
||||||
};
|
};
|
||||||
|
|
||||||
HealthCheckManager::HealthCheckManager() : Component("HealthCheckManager"), pimpl(make_unique<Impl>()) {}
|
HealthCheckManager::HealthCheckManager() : Component("HealthCheckManager"), pimpl(make_unique<Impl>()) {}
|
||||||
@@ -13,42 +13,13 @@
|
|||||||
#include "mock/mock_mainloop.h"
|
#include "mock/mock_mainloop.h"
|
||||||
#include "mock/mock_messaging.h"
|
#include "mock/mock_messaging.h"
|
||||||
#include "mock/mock_rest_api.h"
|
#include "mock/mock_rest_api.h"
|
||||||
|
#include "updates_process_event.h"
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace testing;
|
using namespace testing;
|
||||||
|
|
||||||
USE_DEBUG_FLAG(D_HEALTH_CHECK);
|
USE_DEBUG_FLAG(D_HEALTH_CHECK);
|
||||||
|
|
||||||
class TestHealthCheckStatusListener : public Listener<HealthCheckStatusEvent>
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
void upon(const HealthCheckStatusEvent &) override {}
|
|
||||||
|
|
||||||
HealthCheckStatusReply
|
|
||||||
respond(const HealthCheckStatusEvent &) override
|
|
||||||
{
|
|
||||||
map<string, string> extended_status;
|
|
||||||
extended_status["team"] = team;
|
|
||||||
extended_status["city"] = city;
|
|
||||||
HealthCheckStatusReply reply(comp_name, status, extended_status);
|
|
||||||
return reply;
|
|
||||||
}
|
|
||||||
|
|
||||||
void setStatus(HealthCheckStatus new_status) { status = new_status; }
|
|
||||||
|
|
||||||
string getListenerName() const { return "TestHealthCheckStatusListener"; }
|
|
||||||
|
|
||||||
private:
|
|
||||||
static const string comp_name;
|
|
||||||
HealthCheckStatus status = HealthCheckStatus::HEALTHY;
|
|
||||||
static const string team;
|
|
||||||
static const string city;
|
|
||||||
};
|
|
||||||
|
|
||||||
const string TestHealthCheckStatusListener::comp_name = "Test";
|
|
||||||
const string TestHealthCheckStatusListener::team = "Hapoel";
|
|
||||||
const string TestHealthCheckStatusListener::city = "Tel-Aviv";
|
|
||||||
|
|
||||||
class TestEnd {};
|
class TestEnd {};
|
||||||
|
|
||||||
class HealthCheckManagerTest : public Test
|
class HealthCheckManagerTest : public Test
|
||||||
@@ -56,8 +27,7 @@ class HealthCheckManagerTest : public Test
|
|||||||
public:
|
public:
|
||||||
HealthCheckManagerTest()
|
HealthCheckManagerTest()
|
||||||
{
|
{
|
||||||
Debug::setNewDefaultStdout(&debug_output);
|
Debug::setUnitTestFlag(D_HEALTH_CHECK, Debug::DebugLevel::NOISE);
|
||||||
Debug::setUnitTestFlag(D_HEALTH_CHECK, Debug::DebugLevel::INFO);
|
|
||||||
|
|
||||||
EXPECT_CALL(mock_ml, addRecurringRoutine(_, _, _, _, _)).WillRepeatedly(
|
EXPECT_CALL(mock_ml, addRecurringRoutine(_, _, _, _, _)).WillRepeatedly(
|
||||||
DoAll(SaveArg<2>(&health_check_periodic_routine), Return(1))
|
DoAll(SaveArg<2>(&health_check_periodic_routine), Return(1))
|
||||||
@@ -70,7 +40,6 @@ public:
|
|||||||
);
|
);
|
||||||
|
|
||||||
env.preload();
|
env.preload();
|
||||||
event_listener.registerListener();
|
|
||||||
|
|
||||||
env.init();
|
env.init();
|
||||||
|
|
||||||
@@ -98,14 +67,12 @@ public:
|
|||||||
StrictMock<MockMainLoop> mock_ml;
|
StrictMock<MockMainLoop> mock_ml;
|
||||||
StrictMock<MockRestApi> mock_rest;
|
StrictMock<MockRestApi> mock_rest;
|
||||||
StrictMock<MockMessaging> mock_message;
|
StrictMock<MockMessaging> mock_message;
|
||||||
stringstream debug_output;
|
|
||||||
ConfigComponent config;
|
ConfigComponent config;
|
||||||
Config::I_Config *i_config = nullptr;
|
Config::I_Config *i_config = nullptr;
|
||||||
::Environment env;
|
::Environment env;
|
||||||
HealthCheckManager health_check_manager;
|
HealthCheckManager health_check_manager;
|
||||||
I_Health_Check_Manager *i_health_check_manager;
|
I_Health_Check_Manager *i_health_check_manager;
|
||||||
unique_ptr<ServerRest> health_check_server;
|
unique_ptr<ServerRest> health_check_server;
|
||||||
TestHealthCheckStatusListener event_listener;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST_F(HealthCheckManagerTest, runPeriodicHealthCheckTest)
|
TEST_F(HealthCheckManagerTest, runPeriodicHealthCheckTest)
|
||||||
@@ -142,7 +109,20 @@ TEST_F(HealthCheckManagerTest, runPeriodicHealthCheckTest)
|
|||||||
EXPECT_EQ(actual_body, expected_healthy_body);
|
EXPECT_EQ(actual_body, expected_healthy_body);
|
||||||
EXPECT_EQ("Healthy", aggregated_status_str);
|
EXPECT_EQ("Healthy", aggregated_status_str);
|
||||||
|
|
||||||
event_listener.setStatus(HealthCheckStatus::DEGRADED);
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::DEGRADED,
|
||||||
|
UpdatesConfigType::SETTINGS,
|
||||||
|
UpdatesFailureReason::DOWNLOAD_FILE,
|
||||||
|
"setting.json",
|
||||||
|
"File not found"
|
||||||
|
).notify();
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::DEGRADED,
|
||||||
|
UpdatesConfigType::MANIFEST,
|
||||||
|
UpdatesFailureReason::DOWNLOAD_FILE,
|
||||||
|
"manifest.json",
|
||||||
|
"File not found"
|
||||||
|
).notify();
|
||||||
try {
|
try {
|
||||||
health_check_periodic_routine();
|
health_check_periodic_routine();
|
||||||
} catch (const TestEnd &t) {}
|
} catch (const TestEnd &t) {}
|
||||||
@@ -156,16 +136,16 @@ TEST_F(HealthCheckManagerTest, runPeriodicHealthCheckTest)
|
|||||||
" \"status\": \"Degraded\",\n"
|
" \"status\": \"Degraded\",\n"
|
||||||
" \"errors\": [\n"
|
" \"errors\": [\n"
|
||||||
" {\n"
|
" {\n"
|
||||||
" \"code\": \"Test city\",\n"
|
" \"code\": \"Orchestration Last Update\",\n"
|
||||||
" \"message\": [\n"
|
" \"message\": [\n"
|
||||||
" \"Tel-Aviv\"\n"
|
" \"Failed to download the file setting.json. Error: File not found\"\n"
|
||||||
" ],\n"
|
" ],\n"
|
||||||
" \"internal\": true\n"
|
" \"internal\": true\n"
|
||||||
" },\n"
|
" },\n"
|
||||||
" {\n"
|
" {\n"
|
||||||
" \"code\": \"Test team\",\n"
|
" \"code\": \"Orchestration Manifest\",\n"
|
||||||
" \"message\": [\n"
|
" \"message\": [\n"
|
||||||
" \"Hapoel\"\n"
|
" \"Failed to download the file manifest.json. Error: File not found\"\n"
|
||||||
" ],\n"
|
" ],\n"
|
||||||
" \"internal\": true\n"
|
" \"internal\": true\n"
|
||||||
" }\n"
|
" }\n"
|
||||||
@@ -196,19 +176,24 @@ TEST_F(HealthCheckManagerTest, runOnDemandHealthCheckTest)
|
|||||||
config.preload();
|
config.preload();
|
||||||
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss);
|
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss);
|
||||||
|
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::MANIFEST,
|
||||||
|
UpdatesFailureReason::DOWNLOAD_FILE,
|
||||||
|
"manifest.json",
|
||||||
|
"File not found"
|
||||||
|
).notify();
|
||||||
|
|
||||||
stringstream is;
|
stringstream is;
|
||||||
is << "{}";
|
is << "{}";
|
||||||
health_check_server->performRestCall(is);
|
health_check_server->performRestCall(is);
|
||||||
|
|
||||||
string expected_status =
|
string expected_status =
|
||||||
"{\n"
|
"{\n"
|
||||||
" \"allComponentsHealthCheckReplies\": {\n"
|
" \"Orchestration\": {\n"
|
||||||
" \"Test\": {\n"
|
" \"status\": \"Unhealthy\",\n"
|
||||||
" \"status\": \"Healthy\",\n"
|
" \"extendedStatus\": {\n"
|
||||||
" \"extendedStatus\": {\n"
|
" \"Manifest\": \"Failed to download the file manifest.json. Error: File not found\"\n"
|
||||||
" \"city\": \"Tel-Aviv\",\n"
|
|
||||||
" \"team\": \"Hapoel\"\n"
|
|
||||||
" }\n"
|
|
||||||
" }\n"
|
" }\n"
|
||||||
" }\n"
|
" }\n"
|
||||||
"}";
|
"}";
|
||||||
@@ -51,6 +51,7 @@ public:
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
I_DeclarativePolicy *i_declarative_policy = nullptr;
|
I_DeclarativePolicy *i_declarative_policy = nullptr;
|
||||||
|
std::string profile_mode;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // __FOG_COMMUNICATION_H__
|
#endif // __FOG_COMMUNICATION_H__
|
||||||
|
|||||||
@@ -26,6 +26,13 @@ operator<<(std::ostream &os, const Maybe<std::tuple<std::string, std::string, st
|
|||||||
return os;
|
return os;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::ostream &
|
||||||
|
operator<<(
|
||||||
|
std::ostream &os, const Maybe<std::tuple<std::string, std::string, std::string, std::string, std::string>> &)
|
||||||
|
{
|
||||||
|
return os;
|
||||||
|
}
|
||||||
|
|
||||||
class MockDetailsResolver
|
class MockDetailsResolver
|
||||||
:
|
:
|
||||||
public Singleton::Provide<I_DetailsResolver>::From<MockProvider<I_DetailsResolver>>
|
public Singleton::Provide<I_DetailsResolver>::From<MockProvider<I_DetailsResolver>>
|
||||||
@@ -42,6 +49,8 @@ public:
|
|||||||
MOCK_METHOD0(getResolvedDetails, std::map<std::string, std::string>());
|
MOCK_METHOD0(getResolvedDetails, std::map<std::string, std::string>());
|
||||||
MOCK_METHOD0(isVersionAboveR8110, bool());
|
MOCK_METHOD0(isVersionAboveR8110, bool());
|
||||||
MOCK_METHOD0(parseNginxMetadata, Maybe<std::tuple<std::string, std::string, std::string>>());
|
MOCK_METHOD0(parseNginxMetadata, Maybe<std::tuple<std::string, std::string, std::string>>());
|
||||||
|
MOCK_METHOD0(
|
||||||
|
readCloudMetadata, Maybe<std::tuple<std::string, std::string, std::string, std::string, std::string>>());
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // __MOCK_DETAILS_RESOLVER_H__
|
#endif // __MOCK_DETAILS_RESOLVER_H__
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ public:
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
typedef std::map<std::string, PortNumber> ServicePortMap;
|
typedef std::map<std::string, std::vector<PortNumber>> ServicePortMap;
|
||||||
MOCK_METHOD0(getServiceToPortMap, ServicePortMap());
|
MOCK_METHOD0(getServiceToPortMap, ServicePortMap());
|
||||||
MOCK_METHOD3(updateReconfStatus, void(int id, const std::string &service_name, ReconfStatus status));
|
MOCK_METHOD3(updateReconfStatus, void(int id, const std::string &service_name, ReconfStatus status));
|
||||||
MOCK_METHOD4(
|
MOCK_METHOD4(
|
||||||
|
|||||||
@@ -0,0 +1,130 @@
|
|||||||
|
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||||
|
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#ifndef __UPDATES_PROCESS_EVENT_H__
|
||||||
|
#define __UPDATES_PROCESS_EVENT_H__
|
||||||
|
|
||||||
|
#include "event.h"
|
||||||
|
#include "singleton.h"
|
||||||
|
#include "config.h"
|
||||||
|
#include "debug.h"
|
||||||
|
#include "i_orchestration_status.h"
|
||||||
|
#include "health_check_status/health_check_status.h"
|
||||||
|
#include "customized_cereal_map.h"
|
||||||
|
|
||||||
|
USE_DEBUG_FLAG(D_UPDATES_PROCESS_REPORTER);
|
||||||
|
|
||||||
|
enum class UpdatesFailureReason {
|
||||||
|
CHECK_UPDATE,
|
||||||
|
REGISTRATION,
|
||||||
|
ORCHESTRATION_SELF_UPDATE,
|
||||||
|
GET_UPDATE_REQUEST,
|
||||||
|
DOWNLOAD_FILE,
|
||||||
|
HANDLE_FILE,
|
||||||
|
INSTALLATION_QUEUE,
|
||||||
|
INSTALL_PACKAGE,
|
||||||
|
CHECKSUM_UNMATCHED,
|
||||||
|
POLICY_CONFIGURATION,
|
||||||
|
POLICY_FOG_CONFIGURATION,
|
||||||
|
NONE
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
enum class UpdatesConfigType { MANIFEST, POLICY, SETTINGS, DATA, GENERAL };
|
||||||
|
enum class UpdatesProcessResult { UNSET, SUCCESS, FAILED, DEGRADED };
|
||||||
|
|
||||||
|
static inline std::string
|
||||||
|
convertUpdatesFailureReasonToStr(UpdatesFailureReason reason)
|
||||||
|
{
|
||||||
|
switch (reason) {
|
||||||
|
case UpdatesFailureReason::CHECK_UPDATE : return "CHECK_UPDATE";
|
||||||
|
case UpdatesFailureReason::REGISTRATION : return "REGISTRATION";
|
||||||
|
case UpdatesFailureReason::ORCHESTRATION_SELF_UPDATE : return "ORCHESTRATION_SELF_UPDATE";
|
||||||
|
case UpdatesFailureReason::GET_UPDATE_REQUEST : return "GET_UPDATE_REQUEST";
|
||||||
|
case UpdatesFailureReason::DOWNLOAD_FILE : return "DOWNLOAD_FILE";
|
||||||
|
case UpdatesFailureReason::HANDLE_FILE : return "HANDLE_FILE";
|
||||||
|
case UpdatesFailureReason::INSTALLATION_QUEUE : return "INSTALLATION_QUEUE";
|
||||||
|
case UpdatesFailureReason::INSTALL_PACKAGE : return "INSTALL_PACKAGE";
|
||||||
|
case UpdatesFailureReason::CHECKSUM_UNMATCHED : return "CHECKSUM_UNMATCHED";
|
||||||
|
case UpdatesFailureReason::POLICY_CONFIGURATION : return "POLICY_CONFIGURATION";
|
||||||
|
case UpdatesFailureReason::POLICY_FOG_CONFIGURATION : return "POLICY_FOG_CONFIGURATION";
|
||||||
|
case UpdatesFailureReason::NONE : return "NONE";
|
||||||
|
}
|
||||||
|
|
||||||
|
dbgWarning(D_UPDATES_PROCESS_REPORTER) << "Trying to convert unknown updates failure reason to string.";
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline std::string
|
||||||
|
convertUpdatesConfigTypeToStr(UpdatesConfigType type)
|
||||||
|
{
|
||||||
|
switch (type) {
|
||||||
|
case UpdatesConfigType::MANIFEST : return "MANIFEST";
|
||||||
|
case UpdatesConfigType::POLICY : return "POLICY";
|
||||||
|
case UpdatesConfigType::SETTINGS : return "SETTINGS";
|
||||||
|
case UpdatesConfigType::DATA : return "DATA";
|
||||||
|
case UpdatesConfigType::GENERAL : return "GENERAL";
|
||||||
|
}
|
||||||
|
|
||||||
|
dbgWarning(D_UPDATES_PROCESS_REPORTER) << "Trying to convert unknown updates failure reason to string.";
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline std::string
|
||||||
|
convertUpdateProcessResultToStr(UpdatesProcessResult result)
|
||||||
|
{
|
||||||
|
switch (result) {
|
||||||
|
case UpdatesProcessResult::SUCCESS : return "SUCCESS";
|
||||||
|
case UpdatesProcessResult::UNSET : return "UNSET";
|
||||||
|
case UpdatesProcessResult::FAILED : return "FAILURE";
|
||||||
|
case UpdatesProcessResult::DEGRADED : return "DEGRADED";
|
||||||
|
}
|
||||||
|
|
||||||
|
dbgWarning(D_UPDATES_PROCESS_REPORTER) << "Trying to convert unknown updates failure reason to string.";
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
class UpdatesProcessEvent : public Event<UpdatesProcessEvent>
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
UpdatesProcessEvent() {}
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult _result,
|
||||||
|
UpdatesConfigType _type,
|
||||||
|
UpdatesFailureReason _reason = UpdatesFailureReason::NONE,
|
||||||
|
const std::string &_detail = "",
|
||||||
|
const std::string &_description = "");
|
||||||
|
|
||||||
|
~UpdatesProcessEvent() {}
|
||||||
|
|
||||||
|
UpdatesProcessResult getResult() const { return result; }
|
||||||
|
UpdatesConfigType getType() const { return type; }
|
||||||
|
UpdatesFailureReason getReason() const { return reason; }
|
||||||
|
std::string getDetail() const { return detail; }
|
||||||
|
std::string getDescription() const { return description; }
|
||||||
|
|
||||||
|
OrchestrationStatusFieldType getStatusFieldType() const;
|
||||||
|
OrchestrationStatusResult getOrchestrationStatusResult() const;
|
||||||
|
|
||||||
|
std::string parseDescription() const;
|
||||||
|
|
||||||
|
private:
|
||||||
|
UpdatesProcessResult result;
|
||||||
|
UpdatesConfigType type;
|
||||||
|
UpdatesFailureReason reason;
|
||||||
|
std::string detail;
|
||||||
|
std::string description;
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // __UPDATES_PROCESS_EVENT_H__
|
||||||
@@ -0,0 +1,63 @@
|
|||||||
|
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||||
|
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#ifndef __UPDATES_PROCESS_REPORT_H__
|
||||||
|
#define __UPDATES_PROCESS_REPORT_H__
|
||||||
|
|
||||||
|
#include <sstream>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
#include "singleton.h"
|
||||||
|
#include "i_time_get.h"
|
||||||
|
#include "updates_process_event.h"
|
||||||
|
|
||||||
|
class UpdatesProcessReport : Singleton::Consume<I_TimeGet>
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
UpdatesProcessReport(
|
||||||
|
UpdatesProcessResult result,
|
||||||
|
UpdatesConfigType type,
|
||||||
|
UpdatesFailureReason reason,
|
||||||
|
const std::string &description)
|
||||||
|
:
|
||||||
|
result(result), type(type), reason(reason), description(description)
|
||||||
|
{
|
||||||
|
time_stamp = Singleton::Consume<I_TimeGet>::by<UpdatesProcessReport>()->getWalltimeStr();
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string
|
||||||
|
toString() const
|
||||||
|
{
|
||||||
|
std::stringstream report;
|
||||||
|
report
|
||||||
|
<< "["
|
||||||
|
<< time_stamp << "] - "
|
||||||
|
<< convertUpdateProcessResultToStr(result) << " | "
|
||||||
|
<< convertUpdatesConfigTypeToStr(type) << " | "
|
||||||
|
<< convertUpdatesFailureReasonToStr(reason) << " | "
|
||||||
|
<< description;
|
||||||
|
|
||||||
|
return report.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
UpdatesFailureReason getReason() const { return reason; }
|
||||||
|
|
||||||
|
private:
|
||||||
|
UpdatesProcessResult result;
|
||||||
|
UpdatesConfigType type;
|
||||||
|
UpdatesFailureReason reason;
|
||||||
|
std::string description;
|
||||||
|
std::string time_stamp;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // __UPDATES_PROCESS_EVENT_H__
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||||
|
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#ifndef __UPDATES_PROCESS_REPORTER_H__
|
||||||
|
#define __UPDATES_PROCESS_REPORTER_H__
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
#include "event.h"
|
||||||
|
#include "singleton.h"
|
||||||
|
#include "config.h"
|
||||||
|
#include "debug.h"
|
||||||
|
#include "i_orchestration_status.h"
|
||||||
|
#include "health_check_status/health_check_status.h"
|
||||||
|
#include "updates_process_event.h"
|
||||||
|
#include "updates_process_report.h"
|
||||||
|
|
||||||
|
class UpdatesProcessReporter : public Listener<UpdatesProcessEvent>
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
void upon(const UpdatesProcessEvent &event) override;
|
||||||
|
|
||||||
|
private:
|
||||||
|
void sendReoprt();
|
||||||
|
|
||||||
|
static std::vector<UpdatesProcessReport> reports;
|
||||||
|
uint report_failure_count = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // __UPDATES_PROCESS_REPORTER_H__
|
||||||
@@ -21,6 +21,7 @@
|
|||||||
#include "version.h"
|
#include "version.h"
|
||||||
#include "log_generator.h"
|
#include "log_generator.h"
|
||||||
#include "orchestration_comp.h"
|
#include "orchestration_comp.h"
|
||||||
|
#include "updates_process_event.h"
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace ReportIS;
|
using namespace ReportIS;
|
||||||
@@ -219,6 +220,13 @@ ManifestController::Impl::updateManifest(const string &new_manifest_file)
|
|||||||
if (isIgnoreFile(new_manifest_file)) {
|
if (isIgnoreFile(new_manifest_file)) {
|
||||||
if (!orchestration_tools->copyFile(new_manifest_file, manifest_file_path)) {
|
if (!orchestration_tools->copyFile(new_manifest_file, manifest_file_path)) {
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to copy a new manifest file";
|
dbgWarning(D_ORCHESTRATOR) << "Failed to copy a new manifest file";
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::MANIFEST,
|
||||||
|
UpdatesFailureReason::HANDLE_FILE,
|
||||||
|
new_manifest_file,
|
||||||
|
"Failed to copy a new manifest file"
|
||||||
|
).notify();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@@ -237,6 +245,13 @@ ManifestController::Impl::updateManifest(const string &new_manifest_file)
|
|||||||
|
|
||||||
if (!orchestration_tools->copyFile(new_manifest_file, manifest_file_path)) {
|
if (!orchestration_tools->copyFile(new_manifest_file, manifest_file_path)) {
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to copy a new manifest file";
|
dbgWarning(D_ORCHESTRATOR) << "Failed to copy a new manifest file";
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::MANIFEST,
|
||||||
|
UpdatesFailureReason::HANDLE_FILE,
|
||||||
|
new_manifest_file,
|
||||||
|
"Failed to copy a new manifest file"
|
||||||
|
).notify();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@@ -245,6 +260,13 @@ ManifestController::Impl::updateManifest(const string &new_manifest_file)
|
|||||||
Maybe<map<string, Package>> parsed_manifest = orchestration_tools->loadPackagesFromJson(new_manifest_file);
|
Maybe<map<string, Package>> parsed_manifest = orchestration_tools->loadPackagesFromJson(new_manifest_file);
|
||||||
if (!parsed_manifest.ok()) {
|
if (!parsed_manifest.ok()) {
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to parse the new manifest file. File: " << new_manifest_file;
|
dbgWarning(D_ORCHESTRATOR) << "Failed to parse the new manifest file. File: " << new_manifest_file;
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::MANIFEST,
|
||||||
|
UpdatesFailureReason::HANDLE_FILE,
|
||||||
|
new_manifest_file,
|
||||||
|
"Failed to parse the new manifest file"
|
||||||
|
).notify();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -332,6 +354,13 @@ ManifestController::Impl::updateManifest(const string &new_manifest_file)
|
|||||||
dbgWarning(D_ORCHESTRATOR)
|
dbgWarning(D_ORCHESTRATOR)
|
||||||
<< "Failed building installation queue. Error: "
|
<< "Failed building installation queue. Error: "
|
||||||
<< installation_queue_res.getErr();
|
<< installation_queue_res.getErr();
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::MANIFEST,
|
||||||
|
UpdatesFailureReason::INSTALLATION_QUEUE,
|
||||||
|
"",
|
||||||
|
installation_queue_res.getErr()
|
||||||
|
).notify();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
const vector<Package> &installation_queue = installation_queue_res.unpack();
|
const vector<Package> &installation_queue = installation_queue_res.unpack();
|
||||||
@@ -447,11 +476,25 @@ ManifestController::Impl::changeManifestFile(const string &new_manifest_file)
|
|||||||
dbgDebug(D_ORCHESTRATOR) << "Writing new manifest to file";
|
dbgDebug(D_ORCHESTRATOR) << "Writing new manifest to file";
|
||||||
if (!orchestration_tools->copyFile(new_manifest_file, manifest_file_path)) {
|
if (!orchestration_tools->copyFile(new_manifest_file, manifest_file_path)) {
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed write new manifest to file";
|
dbgWarning(D_ORCHESTRATOR) << "Failed write new manifest to file";
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::MANIFEST,
|
||||||
|
UpdatesFailureReason::HANDLE_FILE,
|
||||||
|
new_manifest_file,
|
||||||
|
"Failed write new manifest to file"
|
||||||
|
).notify();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!orchestration_tools->isNonEmptyFile(manifest_file_path)) {
|
if (!orchestration_tools->isNonEmptyFile(manifest_file_path)) {
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to get manifest file data";
|
dbgWarning(D_ORCHESTRATOR) << "Failed to get manifest file data";
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::MANIFEST,
|
||||||
|
UpdatesFailureReason::HANDLE_FILE,
|
||||||
|
manifest_file_path,
|
||||||
|
"Failed to get manifest file data"
|
||||||
|
).notify();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -281,13 +281,7 @@ TEST_F(ManifestControllerTest, badChecksum)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/packages/my/my")).WillOnce(Return(false));
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/packages/my/my")).WillOnce(Return(false));
|
||||||
|
|
||||||
string hostname = "hostname";
|
string hostname = "hostname";
|
||||||
string empty_err;
|
|
||||||
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(empty_err));
|
|
||||||
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return( Maybe<string>(hostname)));
|
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return( Maybe<string>(hostname)));
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::MANIFEST, OrchestrationStatusResult::FAILED, _)
|
|
||||||
);
|
|
||||||
EXPECT_FALSE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_FALSE(i_manifest_controller->updateManifest(file_name));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -710,10 +704,6 @@ TEST_F(ManifestControllerTest, selfUpdateWithOldCopyWithError)
|
|||||||
string hostname = "hostname";
|
string hostname = "hostname";
|
||||||
string empty_err;
|
string empty_err;
|
||||||
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(empty_err));
|
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(empty_err));
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::MANIFEST, OrchestrationStatusResult::FAILED, _)
|
|
||||||
);
|
|
||||||
load(manifest, new_services);
|
load(manifest, new_services);
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools,
|
EXPECT_CALL(mock_orchestration_tools,
|
||||||
@@ -932,10 +922,6 @@ TEST_F(ManifestControllerTest, badInstall)
|
|||||||
string empty_err;
|
string empty_err;
|
||||||
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(empty_err));
|
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(empty_err));
|
||||||
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return( Maybe<string>(hostname)));
|
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return( Maybe<string>(hostname)));
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::MANIFEST, OrchestrationStatusResult::FAILED, _)
|
|
||||||
);
|
|
||||||
|
|
||||||
string corrupted_packages_manifest =
|
string corrupted_packages_manifest =
|
||||||
"{"
|
"{"
|
||||||
@@ -1008,12 +994,6 @@ TEST_F(ManifestControllerTest, failToDownloadWithselfUpdate)
|
|||||||
doesFileExist("/etc/cp/packages/orchestration/orchestration")
|
doesFileExist("/etc/cp/packages/orchestration/orchestration")
|
||||||
).WillOnce(Return(false));
|
).WillOnce(Return(false));
|
||||||
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return(string("hostname")));
|
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return(string("hostname")));
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::MANIFEST, OrchestrationStatusResult::FAILED, _)
|
|
||||||
);
|
|
||||||
string not_error;
|
|
||||||
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(not_error));
|
|
||||||
EXPECT_FALSE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_FALSE(i_manifest_controller->updateManifest(file_name));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1404,12 +1384,6 @@ TEST_F(ManifestControllerTest, failureOnDownloadSharedObject)
|
|||||||
).WillOnce(Return(false));
|
).WillOnce(Return(false));
|
||||||
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return(string("hostname")));
|
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return(string("hostname")));
|
||||||
EXPECT_CALL(mock_orchestration_tools, removeFile("/tmp/temp_file1")).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, removeFile("/tmp/temp_file1")).WillOnce(Return(true));
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::MANIFEST, OrchestrationStatusResult::FAILED, _)
|
|
||||||
);
|
|
||||||
string not_error;
|
|
||||||
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(not_error));
|
|
||||||
|
|
||||||
EXPECT_FALSE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_FALSE(i_manifest_controller->updateManifest(file_name));
|
||||||
}
|
}
|
||||||
@@ -2538,12 +2512,6 @@ TEST_F(ManifestDownloadTest, download_relative_path)
|
|||||||
doesFileExist("/etc/cp/packages/orchestration/orchestration")
|
doesFileExist("/etc/cp/packages/orchestration/orchestration")
|
||||||
).WillOnce(Return(false));
|
).WillOnce(Return(false));
|
||||||
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return(string("hostname")));
|
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return(string("hostname")));
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::MANIFEST, OrchestrationStatusResult::FAILED, _)
|
|
||||||
);
|
|
||||||
string not_error;
|
|
||||||
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(not_error));
|
|
||||||
|
|
||||||
EXPECT_FALSE(i_manifest_controller->updateManifest(manifest_file.fname));
|
EXPECT_FALSE(i_manifest_controller->updateManifest(manifest_file.fname));
|
||||||
}
|
}
|
||||||
@@ -2589,8 +2557,6 @@ TEST_F(ManifestDownloadTest, download_relative_path_no_fog_domain)
|
|||||||
mock_orchestration_tools,
|
mock_orchestration_tools,
|
||||||
doesFileExist("/etc/cp/packages/orchestration/orchestration")
|
doesFileExist("/etc/cp/packages/orchestration/orchestration")
|
||||||
).WillOnce(Return(false));
|
).WillOnce(Return(false));
|
||||||
string not_error;
|
|
||||||
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(not_error));
|
|
||||||
|
|
||||||
checkIfFileExistsCall(new_packages.at("orchestration"));
|
checkIfFileExistsCall(new_packages.at("orchestration"));
|
||||||
|
|
||||||
@@ -2604,10 +2570,6 @@ TEST_F(ManifestDownloadTest, download_relative_path_no_fog_domain)
|
|||||||
)
|
)
|
||||||
).WillOnce(Return(downloaded_package));
|
).WillOnce(Return(downloaded_package));
|
||||||
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return(string("hostname")));
|
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return(string("hostname")));
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::MANIFEST, OrchestrationStatusResult::FAILED, _)
|
|
||||||
);
|
|
||||||
|
|
||||||
EXPECT_FALSE(i_manifest_controller->updateManifest(manifest_file.fname));
|
EXPECT_FALSE(i_manifest_controller->updateManifest(manifest_file.fname));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,6 +19,7 @@
|
|||||||
#include "config.h"
|
#include "config.h"
|
||||||
#include "agent_details.h"
|
#include "agent_details.h"
|
||||||
#include "orchestration_comp.h"
|
#include "orchestration_comp.h"
|
||||||
|
#include "updates_process_event.h"
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
@@ -174,14 +175,13 @@ ManifestHandler::downloadPackages(const map<string, Package> &new_packages_to_do
|
|||||||
" software update failed. Agent is running previous software. Contact Check Point support.";
|
" software update failed. Agent is running previous software. Contact Check Point support.";
|
||||||
}
|
}
|
||||||
|
|
||||||
auto orchestration_status = Singleton::Consume<I_OrchestrationStatus>::by<ManifestHandler>();
|
UpdatesProcessEvent(
|
||||||
if (orchestration_status->getManifestError().find("Gateway was not fully deployed") == string::npos) {
|
UpdatesProcessResult::FAILED,
|
||||||
orchestration_status->setFieldStatus(
|
UpdatesConfigType::MANIFEST,
|
||||||
OrchestrationStatusFieldType::MANIFEST,
|
UpdatesFailureReason::DOWNLOAD_FILE,
|
||||||
OrchestrationStatusResult::FAILED,
|
package.getName(),
|
||||||
install_error
|
install_error
|
||||||
);
|
).notify();
|
||||||
}
|
|
||||||
return genError(
|
return genError(
|
||||||
"Failed to download installation package. Package: " +
|
"Failed to download installation package. Package: " +
|
||||||
package.getName() +
|
package.getName() +
|
||||||
@@ -219,11 +219,13 @@ ManifestHandler::installPackage(
|
|||||||
err_hostname +
|
err_hostname +
|
||||||
" software update failed. Agent is running previous software. Contact Check Point support.";
|
" software update failed. Agent is running previous software. Contact Check Point support.";
|
||||||
if (orchestration_status->getManifestError().find("Gateway was not fully deployed") == string::npos) {
|
if (orchestration_status->getManifestError().find("Gateway was not fully deployed") == string::npos) {
|
||||||
orchestration_status->setFieldStatus(
|
UpdatesProcessEvent(
|
||||||
OrchestrationStatusFieldType::MANIFEST,
|
UpdatesProcessResult::FAILED,
|
||||||
OrchestrationStatusResult::FAILED,
|
UpdatesConfigType::MANIFEST,
|
||||||
|
UpdatesFailureReason::INSTALL_PACKAGE,
|
||||||
|
package_name,
|
||||||
install_error
|
install_error
|
||||||
);
|
).notify();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return self_update_status;
|
return self_update_status;
|
||||||
@@ -289,11 +291,13 @@ ManifestHandler::installPackage(
|
|||||||
|
|
||||||
auto orchestration_status = Singleton::Consume<I_OrchestrationStatus>::by<ManifestHandler>();
|
auto orchestration_status = Singleton::Consume<I_OrchestrationStatus>::by<ManifestHandler>();
|
||||||
if (orchestration_status->getManifestError().find("Gateway was not fully deployed") == string::npos) {
|
if (orchestration_status->getManifestError().find("Gateway was not fully deployed") == string::npos) {
|
||||||
orchestration_status->setFieldStatus(
|
UpdatesProcessEvent(
|
||||||
OrchestrationStatusFieldType::MANIFEST,
|
UpdatesProcessResult::FAILED,
|
||||||
OrchestrationStatusResult::FAILED,
|
UpdatesConfigType::MANIFEST,
|
||||||
|
UpdatesFailureReason::INSTALL_PACKAGE,
|
||||||
|
package_name,
|
||||||
install_error
|
install_error
|
||||||
);
|
).notify();
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
#include "mock/mock_agent_details.h"
|
#include "mock/mock_agent_details.h"
|
||||||
#include "mock/mock_mainloop.h"
|
#include "mock/mock_mainloop.h"
|
||||||
#include "mock/mock_rest_api.h"
|
#include "mock/mock_rest_api.h"
|
||||||
|
#include "updates_process_event.h"
|
||||||
|
|
||||||
using namespace testing;
|
using namespace testing;
|
||||||
using namespace std;
|
using namespace std;
|
||||||
@@ -200,6 +201,19 @@ TEST_F(OrchestrationStatusTest, checkUpdateStatus)
|
|||||||
auto result = orchestrationStatusFileToString();
|
auto result = orchestrationStatusFileToString();
|
||||||
EXPECT_EQ(buildOrchestrationStatusJSON("attempt time", "Succeeded ", "current time"), result);
|
EXPECT_EQ(buildOrchestrationStatusJSON("attempt time", "Succeeded ", "current time"), result);
|
||||||
}
|
}
|
||||||
|
TEST_F(OrchestrationStatusTest, checkUpdateStatusByRaiseEvent)
|
||||||
|
{
|
||||||
|
init();
|
||||||
|
EXPECT_CALL(time, getLocalTimeStr())
|
||||||
|
.WillOnce(Return(string("attempt time")))
|
||||||
|
.WillOnce(Return(string("current time")));
|
||||||
|
|
||||||
|
i_orchestration_status->setLastUpdateAttempt();
|
||||||
|
|
||||||
|
UpdatesProcessEvent(UpdatesProcessResult::SUCCESS, UpdatesConfigType::GENERAL).notify();
|
||||||
|
auto result = orchestrationStatusFileToString();
|
||||||
|
EXPECT_EQ(buildOrchestrationStatusJSON("attempt time", "Succeeded ", "current time"), result);
|
||||||
|
}
|
||||||
|
|
||||||
TEST_F(OrchestrationStatusTest, recoveryFields)
|
TEST_F(OrchestrationStatusTest, recoveryFields)
|
||||||
{
|
{
|
||||||
@@ -482,3 +496,69 @@ TEST_F(OrchestrationStatusTest, setAllFields)
|
|||||||
EXPECT_EQ(i_orchestration_status->getServiceSettings(), service_map_a);
|
EXPECT_EQ(i_orchestration_status->getServiceSettings(), service_map_a);
|
||||||
EXPECT_EQ(i_orchestration_status->getRegistrationDetails(), agent_details);
|
EXPECT_EQ(i_orchestration_status->getRegistrationDetails(), agent_details);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_F(OrchestrationStatusTest, checkErrorByRaiseEvent)
|
||||||
|
{
|
||||||
|
init();
|
||||||
|
string fog_address = "http://fog.address";
|
||||||
|
string registar_error = "Fail to registar";
|
||||||
|
string manifest_error = "Fail to achieve manifest";
|
||||||
|
string last_update_error = "Fail to update";
|
||||||
|
|
||||||
|
EXPECT_CALL(time, getLocalTimeStr()).Times(3).WillRepeatedly(Return(string("Time")));
|
||||||
|
|
||||||
|
UpdatesProcessEvent(UpdatesProcessResult::SUCCESS, UpdatesConfigType::GENERAL).notify();
|
||||||
|
i_orchestration_status->setIsConfigurationUpdated(
|
||||||
|
EnumArray<OrchestrationStatusConfigType, bool>(true, true, true)
|
||||||
|
);
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::GENERAL,
|
||||||
|
UpdatesFailureReason::NONE,
|
||||||
|
"",
|
||||||
|
last_update_error
|
||||||
|
).notify();
|
||||||
|
i_orchestration_status->setIsConfigurationUpdated(
|
||||||
|
EnumArray<OrchestrationStatusConfigType, bool>(false, false, false)
|
||||||
|
);
|
||||||
|
|
||||||
|
i_orchestration_status->setUpgradeMode("Online upgrades");
|
||||||
|
i_orchestration_status->setFogAddress(fog_address);
|
||||||
|
|
||||||
|
i_orchestration_status->setUpgradeMode("Online upgrades");
|
||||||
|
i_orchestration_status->setFogAddress(fog_address);
|
||||||
|
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::GENERAL,
|
||||||
|
UpdatesFailureReason::REGISTRATION,
|
||||||
|
"",
|
||||||
|
registar_error
|
||||||
|
).notify();
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::MANIFEST,
|
||||||
|
UpdatesFailureReason::NONE,
|
||||||
|
"",
|
||||||
|
manifest_error
|
||||||
|
).notify();
|
||||||
|
EXPECT_EQ(i_orchestration_status->getManifestError(), manifest_error);
|
||||||
|
|
||||||
|
auto result = orchestrationStatusFileToString();
|
||||||
|
EXPECT_EQ(
|
||||||
|
buildOrchestrationStatusJSON(
|
||||||
|
"None",
|
||||||
|
"Failed. Reason: " + last_update_error,
|
||||||
|
"Time",
|
||||||
|
"Time",
|
||||||
|
"",
|
||||||
|
"Time",
|
||||||
|
"Time",
|
||||||
|
"Online upgrades",
|
||||||
|
fog_address,
|
||||||
|
"Failed. Reason: Registration failed. Error: " + registar_error,
|
||||||
|
"Failed. Reason: " + manifest_error
|
||||||
|
),
|
||||||
|
result
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|||||||
@@ -19,6 +19,8 @@
|
|||||||
|
|
||||||
#include "debug.h"
|
#include "debug.h"
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
|
#include "updates_process_event.h"
|
||||||
|
#include "health_check_status/health_check_status.h"
|
||||||
|
|
||||||
using namespace cereal;
|
using namespace cereal;
|
||||||
using namespace std;
|
using namespace std;
|
||||||
@@ -383,7 +385,10 @@ private:
|
|||||||
map<string, string> service_settings;
|
map<string, string> service_settings;
|
||||||
};
|
};
|
||||||
|
|
||||||
class OrchestrationStatus::Impl : Singleton::Provide<I_OrchestrationStatus>::From<OrchestrationStatus>
|
class OrchestrationStatus::Impl
|
||||||
|
:
|
||||||
|
Singleton::Provide<I_OrchestrationStatus>::From<OrchestrationStatus>,
|
||||||
|
public Listener<UpdatesProcessEvent>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
void
|
void
|
||||||
@@ -462,6 +467,13 @@ public:
|
|||||||
},
|
},
|
||||||
"Write Orchestration status file"
|
"Write Orchestration status file"
|
||||||
);
|
);
|
||||||
|
registerListener();
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
upon(const UpdatesProcessEvent &event) override
|
||||||
|
{
|
||||||
|
setFieldStatus(event.getStatusFieldType(), event.getOrchestrationStatusResult(), event.parseDescription());
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|||||||
@@ -42,6 +42,8 @@
|
|||||||
#include "hybrid_communication.h"
|
#include "hybrid_communication.h"
|
||||||
#include "agent_core_utilities.h"
|
#include "agent_core_utilities.h"
|
||||||
#include "fog_communication.h"
|
#include "fog_communication.h"
|
||||||
|
#include "updates_process_event.h"
|
||||||
|
#include "updates_process_reporter.h"
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace chrono;
|
using namespace chrono;
|
||||||
@@ -53,85 +55,6 @@ USE_DEBUG_FLAG(D_ORCHESTRATOR);
|
|||||||
static string fw_last_update_time = "";
|
static string fw_last_update_time = "";
|
||||||
#endif // gaia || smb
|
#endif // gaia || smb
|
||||||
|
|
||||||
class HealthCheckStatusListener : public Listener<HealthCheckStatusEvent>
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
void upon(const HealthCheckStatusEvent &) override {}
|
|
||||||
|
|
||||||
HealthCheckStatusReply
|
|
||||||
respond(const HealthCheckStatusEvent &) override
|
|
||||||
{
|
|
||||||
return HealthCheckStatusReply(comp_name, status, extended_status);
|
|
||||||
}
|
|
||||||
|
|
||||||
string getListenerName() const override { return "HealthCheckStatusListener"; }
|
|
||||||
|
|
||||||
void
|
|
||||||
setStatus(
|
|
||||||
HealthCheckStatus _status,
|
|
||||||
OrchestrationStatusFieldType _status_field_type,
|
|
||||||
const string &_status_description = "Success")
|
|
||||||
{
|
|
||||||
string status_field_type_str = convertOrchestrationStatusFieldTypeToStr(_status_field_type);
|
|
||||||
extended_status[status_field_type_str] = _status_description;
|
|
||||||
field_types_status[status_field_type_str] = _status;
|
|
||||||
|
|
||||||
switch(_status) {
|
|
||||||
case HealthCheckStatus::UNHEALTHY: {
|
|
||||||
status = HealthCheckStatus::UNHEALTHY;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
case HealthCheckStatus::DEGRADED: {
|
|
||||||
for (const auto &type_status : field_types_status) {
|
|
||||||
if ((type_status.first != status_field_type_str)
|
|
||||||
&& (type_status.second == HealthCheckStatus::UNHEALTHY))
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
status = HealthCheckStatus::DEGRADED;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
case HealthCheckStatus::HEALTHY: {
|
|
||||||
for (const auto &type_status : field_types_status) {
|
|
||||||
if ((type_status.first != status_field_type_str)
|
|
||||||
&& (type_status.second == HealthCheckStatus::UNHEALTHY
|
|
||||||
|| type_status.second == HealthCheckStatus::DEGRADED)
|
|
||||||
)
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
status = HealthCheckStatus::HEALTHY;
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
case HealthCheckStatus::IGNORED: {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
|
||||||
string
|
|
||||||
convertOrchestrationStatusFieldTypeToStr(OrchestrationStatusFieldType type)
|
|
||||||
{
|
|
||||||
switch (type) {
|
|
||||||
case OrchestrationStatusFieldType::REGISTRATION : return "Registration";
|
|
||||||
case OrchestrationStatusFieldType::MANIFEST : return "Manifest";
|
|
||||||
case OrchestrationStatusFieldType::LAST_UPDATE : return "Last Update";
|
|
||||||
case OrchestrationStatusFieldType::COUNT : return "Count";
|
|
||||||
}
|
|
||||||
|
|
||||||
dbgError(D_ORCHESTRATOR) << "Trying to convert unknown orchestration status field to string.";
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
|
|
||||||
string comp_name = "Orchestration";
|
|
||||||
HealthCheckStatus status = HealthCheckStatus::IGNORED;
|
|
||||||
map<string, string> extended_status;
|
|
||||||
map<string, HealthCheckStatus> field_types_status;
|
|
||||||
};
|
|
||||||
|
|
||||||
class SetAgentUninstall
|
class SetAgentUninstall
|
||||||
:
|
:
|
||||||
public ServerRest,
|
public ServerRest,
|
||||||
@@ -203,7 +126,7 @@ public:
|
|||||||
loadFogAddress();
|
loadFogAddress();
|
||||||
|
|
||||||
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->addOneTimeRoutine(
|
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->addOneTimeRoutine(
|
||||||
I_MainLoop::RoutineType::RealTime,
|
I_MainLoop::RoutineType::System,
|
||||||
[this] () { run(); },
|
[this] () { run(); },
|
||||||
"Orchestration runner",
|
"Orchestration runner",
|
||||||
true
|
true
|
||||||
@@ -257,6 +180,13 @@ private:
|
|||||||
<< "Failed to load Orchestration Policy. Error: "
|
<< "Failed to load Orchestration Policy. Error: "
|
||||||
<< maybe_policy.getErr()
|
<< maybe_policy.getErr()
|
||||||
<< "Trying to load from backup.";
|
<< "Trying to load from backup.";
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::POLICY,
|
||||||
|
UpdatesFailureReason::POLICY_CONFIGURATION,
|
||||||
|
orchestration_policy_file,
|
||||||
|
maybe_policy.getErr()
|
||||||
|
).notify();
|
||||||
return loadOrchestrationPolicyFromBackup();
|
return loadOrchestrationPolicyFromBackup();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -280,6 +210,13 @@ private:
|
|||||||
return maybe_policy;
|
return maybe_policy;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::POLICY,
|
||||||
|
UpdatesFailureReason::POLICY_CONFIGURATION,
|
||||||
|
orchestration_policy_file + backup_ext,
|
||||||
|
maybe_policy.getErr()
|
||||||
|
).notify();
|
||||||
return genError("Failed to load Orchestration policy from backup.");
|
return genError("Failed to load Orchestration policy from backup.");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -337,17 +274,13 @@ private:
|
|||||||
<< new_manifest_file.getErr()
|
<< new_manifest_file.getErr()
|
||||||
<< " Presenting the next message to the user: "
|
<< " Presenting the next message to the user: "
|
||||||
<< install_error;
|
<< install_error;
|
||||||
i_orchestration_status->setFieldStatus(
|
UpdatesProcessEvent(
|
||||||
OrchestrationStatusFieldType::MANIFEST,
|
UpdatesProcessResult::FAILED,
|
||||||
OrchestrationStatusResult::FAILED,
|
UpdatesConfigType::MANIFEST,
|
||||||
install_error
|
UpdatesFailureReason::DOWNLOAD_FILE,
|
||||||
);
|
resource_file.getFileName(),
|
||||||
|
new_manifest_file.getErr()
|
||||||
health_check_status_listener.setStatus(
|
).notify();
|
||||||
HealthCheckStatus::UNHEALTHY,
|
|
||||||
OrchestrationStatusFieldType::MANIFEST,
|
|
||||||
install_error
|
|
||||||
);
|
|
||||||
|
|
||||||
return genError(install_error);
|
return genError(install_error);
|
||||||
}
|
}
|
||||||
@@ -372,23 +305,12 @@ private:
|
|||||||
<< "Manifest failed to be updated. Presenting the next message to the user: "
|
<< "Manifest failed to be updated. Presenting the next message to the user: "
|
||||||
<< install_error;
|
<< install_error;
|
||||||
|
|
||||||
health_check_status_listener.setStatus(
|
|
||||||
HealthCheckStatus::UNHEALTHY,
|
|
||||||
OrchestrationStatusFieldType::MANIFEST,
|
|
||||||
install_error
|
|
||||||
);
|
|
||||||
|
|
||||||
return genError(install_error);
|
return genError(install_error);
|
||||||
}
|
}
|
||||||
|
UpdatesProcessEvent(
|
||||||
i_orchestration_status->setFieldStatus(
|
UpdatesProcessResult::SUCCESS,
|
||||||
OrchestrationStatusFieldType::MANIFEST,
|
UpdatesConfigType::MANIFEST
|
||||||
OrchestrationStatusResult::SUCCESS
|
).notify();
|
||||||
);
|
|
||||||
health_check_status_listener.setStatus(
|
|
||||||
HealthCheckStatus::HEALTHY,
|
|
||||||
OrchestrationStatusFieldType::MANIFEST
|
|
||||||
);
|
|
||||||
|
|
||||||
ifstream restart_watchdog_orch(filesystem_prefix + "/orchestration/restart_watchdog");
|
ifstream restart_watchdog_orch(filesystem_prefix + "/orchestration/restart_watchdog");
|
||||||
if (restart_watchdog_orch.good()) {
|
if (restart_watchdog_orch.good()) {
|
||||||
@@ -473,6 +395,13 @@ private:
|
|||||||
if (!updateFogAddress(policy.getFogAddress())) {
|
if (!updateFogAddress(policy.getFogAddress())) {
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to restore the old Fog address.";
|
dbgWarning(D_ORCHESTRATOR) << "Failed to restore the old Fog address.";
|
||||||
}
|
}
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::POLICY,
|
||||||
|
UpdatesFailureReason::POLICY_FOG_CONFIGURATION,
|
||||||
|
orchestration_policy.getFogAddress(),
|
||||||
|
"Failed to update the new Fog address."
|
||||||
|
).notify();
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -499,13 +428,19 @@ private:
|
|||||||
// Handling policy update.
|
// Handling policy update.
|
||||||
dbgInfo(D_ORCHESTRATOR) << "There is a new policy file.";
|
dbgInfo(D_ORCHESTRATOR) << "There is a new policy file.";
|
||||||
GetResourceFile resource_file(GetResourceFile::ResourceFileType::POLICY);
|
GetResourceFile resource_file(GetResourceFile::ResourceFileType::POLICY);
|
||||||
Maybe<string> new_policy_file =
|
Maybe<string> new_policy_file = Singleton::Consume<I_Downloader>::by<OrchestrationComp>()->downloadFile(
|
||||||
Singleton::Consume<I_Downloader>::by<OrchestrationComp>()->downloadFile(
|
|
||||||
new_policy.unpack(),
|
new_policy.unpack(),
|
||||||
I_OrchestrationTools::SELECTED_CHECKSUM_TYPE,
|
I_OrchestrationTools::SELECTED_CHECKSUM_TYPE,
|
||||||
resource_file
|
resource_file
|
||||||
);
|
);
|
||||||
if (!new_policy_file.ok()) {
|
if (!new_policy_file.ok()) {
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::POLICY,
|
||||||
|
UpdatesFailureReason::DOWNLOAD_FILE,
|
||||||
|
resource_file.getFileName(),
|
||||||
|
new_policy_file.getErr()
|
||||||
|
).notify();
|
||||||
return genError("Failed to download the new policy file. Error: " + new_policy_file.getErr());
|
return genError("Failed to download the new policy file. Error: " + new_policy_file.getErr());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -564,6 +499,13 @@ private:
|
|||||||
<< LogField("policyVersion", updated_policy_version)
|
<< LogField("policyVersion", updated_policy_version)
|
||||||
<< LogField("previousPolicyVersion", old_policy_version);
|
<< LogField("previousPolicyVersion", old_policy_version);
|
||||||
|
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::POLICY,
|
||||||
|
UpdatesFailureReason::POLICY_CONFIGURATION,
|
||||||
|
updated_policy_version,
|
||||||
|
res.getErr()
|
||||||
|
).notify();
|
||||||
return genError(error_str);
|
return genError(error_str);
|
||||||
}
|
}
|
||||||
i_service_controller->moveChangedPolicies();
|
i_service_controller->moveChangedPolicies();
|
||||||
@@ -648,6 +590,11 @@ private:
|
|||||||
"Send policy update report"
|
"Send policy update report"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::SUCCESS,
|
||||||
|
UpdatesConfigType::POLICY
|
||||||
|
).notify();
|
||||||
|
|
||||||
dbgInfo(D_ORCHESTRATOR) << "Policy update report was successfully sent to fog";
|
dbgInfo(D_ORCHESTRATOR) << "Policy update report was successfully sent to fog";
|
||||||
|
|
||||||
return Maybe<void>();
|
return Maybe<void>();
|
||||||
@@ -683,10 +630,24 @@ private:
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (!new_data_files.ok()) {
|
if (!new_data_files.ok()) {
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::DATA,
|
||||||
|
UpdatesFailureReason::DOWNLOAD_FILE,
|
||||||
|
resource_file.getFileName(),
|
||||||
|
new_data_files.getErr()
|
||||||
|
).notify();
|
||||||
return genError("Failed to download new data file, Error: " + new_data_files.getErr());
|
return genError("Failed to download new data file, Error: " + new_data_files.getErr());
|
||||||
}
|
}
|
||||||
auto new_data_file_input = i_orchestration_tools->readFile(new_data_files.unpack());
|
auto new_data_file_input = i_orchestration_tools->readFile(new_data_files.unpack());
|
||||||
if (!new_data_file_input.ok()) {
|
if (!new_data_file_input.ok()) {
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::DATA,
|
||||||
|
UpdatesFailureReason::HANDLE_FILE,
|
||||||
|
resource_file.getFileName(),
|
||||||
|
"Failed to read new data file, Error: " + new_data_file_input.getErr()
|
||||||
|
).notify();
|
||||||
return genError("Failed to read new data file, Error: " + new_data_file_input.getErr());
|
return genError("Failed to read new data file, Error: " + new_data_file_input.getErr());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -702,21 +663,35 @@ private:
|
|||||||
<< e.what()
|
<< e.what()
|
||||||
<< ". Content: "
|
<< ". Content: "
|
||||||
<< new_data_files.unpack();
|
<< new_data_files.unpack();
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::DATA,
|
||||||
|
UpdatesFailureReason::HANDLE_FILE,
|
||||||
|
new_data_files.unpack(),
|
||||||
|
string("Failed to load data from JSON file, Error: ") + e.what()
|
||||||
|
).notify();
|
||||||
return genError(e.what());
|
return genError(e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const auto &data_file : parsed_data) {
|
for (const auto &data_file : parsed_data) {
|
||||||
const string data_file_save_path = getPolicyConfigPath(data_file.first, Config::ConfigFileType::Data);
|
const string data_file_save_path = getPolicyConfigPath(data_file.first, Config::ConfigFileType::Data);
|
||||||
Maybe<string> new_data_file =
|
Maybe<string> new_data_file =
|
||||||
Singleton::Consume<I_Downloader>::by<OrchestrationComp>()->downloadFileFromURL(
|
Singleton::Consume<I_Downloader>::by<OrchestrationComp>()->downloadFileFromURL(
|
||||||
data_file.second.getDownloadPath(),
|
data_file.second.getDownloadPath(),
|
||||||
data_file.second.getChecksum(),
|
data_file.second.getChecksum(),
|
||||||
I_OrchestrationTools::SELECTED_CHECKSUM_TYPE,
|
I_OrchestrationTools::SELECTED_CHECKSUM_TYPE,
|
||||||
"data_" + data_file.first
|
"data_" + data_file.first
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!new_data_file.ok()) {
|
if (!new_data_file.ok()) {
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to download the " << data_file.first << " data file.";
|
dbgWarning(D_ORCHESTRATOR) << "Failed to download the " << data_file.first << " data file.";
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::DATA,
|
||||||
|
UpdatesFailureReason::DOWNLOAD_FILE,
|
||||||
|
data_file.first,
|
||||||
|
new_data_file.getErr()
|
||||||
|
).notify();
|
||||||
return new_data_file.passErr();
|
return new_data_file.passErr();
|
||||||
}
|
}
|
||||||
auto data_new_checksum = getChecksum(new_data_file.unpack());
|
auto data_new_checksum = getChecksum(new_data_file.unpack());
|
||||||
@@ -729,6 +704,16 @@ private:
|
|||||||
<< data_new_checksum;
|
<< data_new_checksum;
|
||||||
|
|
||||||
dbgWarning(D_ORCHESTRATOR) << current_error.str();
|
dbgWarning(D_ORCHESTRATOR) << current_error.str();
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::DATA,
|
||||||
|
UpdatesFailureReason::CHECKSUM_UNMATCHED,
|
||||||
|
data_file.first,
|
||||||
|
" Expected checksum: " +
|
||||||
|
data_file.second.getChecksum() +
|
||||||
|
". Downloaded checksum: " +
|
||||||
|
data_new_checksum
|
||||||
|
).notify();
|
||||||
return genError(current_error.str());
|
return genError(current_error.str());
|
||||||
}
|
}
|
||||||
if (!i_orchestration_tools->copyFile(new_data_file.unpack(), data_file_save_path)) {
|
if (!i_orchestration_tools->copyFile(new_data_file.unpack(), data_file_save_path)) {
|
||||||
@@ -741,6 +726,10 @@ private:
|
|||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to copy a new agents' data file to " << data_file_path;
|
dbgWarning(D_ORCHESTRATOR) << "Failed to copy a new agents' data file to " << data_file_path;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::SUCCESS,
|
||||||
|
UpdatesConfigType::DATA
|
||||||
|
).notify();
|
||||||
return Maybe<void>();
|
return Maybe<void>();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -751,8 +740,7 @@ private:
|
|||||||
|
|
||||||
dbgInfo(D_ORCHESTRATOR) << "There is a new settings file.";
|
dbgInfo(D_ORCHESTRATOR) << "There is a new settings file.";
|
||||||
GetResourceFile resource_file(GetResourceFile::ResourceFileType::SETTINGS);
|
GetResourceFile resource_file(GetResourceFile::ResourceFileType::SETTINGS);
|
||||||
Maybe<string> new_settings_file =
|
Maybe<string> new_settings_file = Singleton::Consume<I_Downloader>::by<OrchestrationComp>()->downloadFile(
|
||||||
Singleton::Consume<I_Downloader>::by<OrchestrationComp>()->downloadFile(
|
|
||||||
orch_settings.unpack(),
|
orch_settings.unpack(),
|
||||||
I_OrchestrationTools::SELECTED_CHECKSUM_TYPE,
|
I_OrchestrationTools::SELECTED_CHECKSUM_TYPE,
|
||||||
resource_file
|
resource_file
|
||||||
@@ -762,6 +750,13 @@ private:
|
|||||||
dbgWarning(D_ORCHESTRATOR)
|
dbgWarning(D_ORCHESTRATOR)
|
||||||
<< "Failed to download the new settings file. Error: "
|
<< "Failed to download the new settings file. Error: "
|
||||||
<< new_settings_file.getErr();
|
<< new_settings_file.getErr();
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::SETTINGS,
|
||||||
|
UpdatesFailureReason::DOWNLOAD_FILE,
|
||||||
|
resource_file.getFileName(),
|
||||||
|
new_settings_file.getErr()
|
||||||
|
).notify();
|
||||||
return genError("Failed to download the new settings file. Error: " + new_settings_file.getErr());
|
return genError("Failed to download the new settings file. Error: " + new_settings_file.getErr());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -769,6 +764,10 @@ private:
|
|||||||
if (res.ok()) {
|
if (res.ok()) {
|
||||||
settings_file_path = *res;
|
settings_file_path = *res;
|
||||||
reloadConfiguration();
|
reloadConfiguration();
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::SUCCESS,
|
||||||
|
UpdatesConfigType::SETTINGS
|
||||||
|
).notify();
|
||||||
return Maybe<void>();
|
return Maybe<void>();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -877,11 +876,13 @@ private:
|
|||||||
|
|
||||||
if (!response.ok()) {
|
if (!response.ok()) {
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to get the update. Error: " << response.getErr();
|
dbgWarning(D_ORCHESTRATOR) << "Failed to get the update. Error: " << response.getErr();
|
||||||
i_orchestration_status->setFieldStatus(
|
UpdatesProcessEvent(
|
||||||
OrchestrationStatusFieldType::LAST_UPDATE,
|
UpdatesProcessResult::FAILED,
|
||||||
OrchestrationStatusResult::FAILED,
|
UpdatesConfigType::GENERAL,
|
||||||
|
UpdatesFailureReason::GET_UPDATE_REQUEST,
|
||||||
|
"",
|
||||||
"Warning: Agent/Gateway failed during the update process. Contact Check Point support."
|
"Warning: Agent/Gateway failed during the update process. Contact Check Point support."
|
||||||
);
|
).notify();
|
||||||
|
|
||||||
return genError(response.getErr());
|
return genError(response.getErr());
|
||||||
}
|
}
|
||||||
@@ -924,10 +925,10 @@ private:
|
|||||||
OrchSettings orch_settings = response.getSettings();
|
OrchSettings orch_settings = response.getSettings();
|
||||||
OrchData orch_data = response.getData();
|
OrchData orch_data = response.getData();
|
||||||
|
|
||||||
i_orchestration_status->setFieldStatus(
|
UpdatesProcessEvent(
|
||||||
OrchestrationStatusFieldType::LAST_UPDATE,
|
UpdatesProcessResult::SUCCESS,
|
||||||
OrchestrationStatusResult::SUCCESS
|
UpdatesConfigType::GENERAL
|
||||||
);
|
).notify();
|
||||||
i_orchestration_status->setIsConfigurationUpdated(
|
i_orchestration_status->setIsConfigurationUpdated(
|
||||||
EnumArray<OrchestrationStatusConfigType, bool>(
|
EnumArray<OrchestrationStatusConfigType, bool>(
|
||||||
orch_manifest.ok(), orch_policy.ok(), orch_settings.ok(), orch_data.ok()
|
orch_manifest.ok(), orch_policy.ok(), orch_settings.ok(), orch_data.ok()
|
||||||
@@ -1017,6 +1018,10 @@ private:
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (maybe_errors != "") return genError(maybe_errors);
|
if (maybe_errors != "") return genError(maybe_errors);
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::SUCCESS,
|
||||||
|
UpdatesConfigType::GENERAL
|
||||||
|
).notify();
|
||||||
return Maybe<void>();
|
return Maybe<void>();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1196,6 +1201,13 @@ private:
|
|||||||
dbgTrace(D_ORCHESTRATOR) << "The settings directory is " << settings_file_path;
|
dbgTrace(D_ORCHESTRATOR) << "The settings directory is " << settings_file_path;
|
||||||
if (!i_orchestration_tools->copyFile(new_settings_file, settings_file_path)) {
|
if (!i_orchestration_tools->copyFile(new_settings_file, settings_file_path)) {
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to update the settings.";
|
dbgWarning(D_ORCHESTRATOR) << "Failed to update the settings.";
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::SETTINGS,
|
||||||
|
UpdatesFailureReason::HANDLE_FILE,
|
||||||
|
settings_file_path,
|
||||||
|
"Failed to update the settings"
|
||||||
|
).notify();
|
||||||
return genError("Failed to update the settings");
|
return genError("Failed to update the settings");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1290,6 +1302,23 @@ private:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
reportCloudMetadata(AgentDataReport &report)
|
||||||
|
{
|
||||||
|
I_DetailsResolver *i_details_resolver = Singleton::Consume<I_DetailsResolver>::by<OrchestrationComp>();
|
||||||
|
auto cloud_metadata = i_details_resolver->readCloudMetadata();
|
||||||
|
if (!cloud_metadata.ok()) {
|
||||||
|
dbgDebug(D_ORCHESTRATOR) << cloud_metadata.getErr();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
report << make_pair("cloudAccountId", ::get<0>(cloud_metadata.unpack()));
|
||||||
|
report << make_pair("cloudVpcId", ::get<1>(cloud_metadata.unpack()));
|
||||||
|
report << make_pair("cloudInstanceId", ::get<2>(cloud_metadata.unpack()));
|
||||||
|
report << make_pair("cloudInstanceLocalIp", ::get<3>(cloud_metadata.unpack()));
|
||||||
|
report << make_pair("cloudRegion", ::get<4>(cloud_metadata.unpack()));
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
reportAgentDetailsMetaData()
|
reportAgentDetailsMetaData()
|
||||||
{
|
{
|
||||||
@@ -1335,6 +1364,8 @@ private:
|
|||||||
agent_data_report << AgentReportFieldWithLabel("cloud_storage_service", "false");
|
agent_data_report << AgentReportFieldWithLabel("cloud_storage_service", "false");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
reportCloudMetadata(agent_data_report);
|
||||||
|
|
||||||
if (i_details_resolver->isKernelVersion3OrHigher()) {
|
if (i_details_resolver->isKernelVersion3OrHigher()) {
|
||||||
agent_data_report << AgentReportFieldWithLabel("isKernelVersion3OrHigher", "true");
|
agent_data_report << AgentReportFieldWithLabel("isKernelVersion3OrHigher", "true");
|
||||||
}
|
}
|
||||||
@@ -1426,20 +1457,24 @@ private:
|
|||||||
<< check_update_result.getErr()
|
<< check_update_result.getErr()
|
||||||
<< ", new check will be every: "
|
<< ", new check will be every: "
|
||||||
<< sleep_interval << " seconds";
|
<< sleep_interval << " seconds";
|
||||||
|
UpdatesProcessEvent(
|
||||||
health_check_status_listener.setStatus(
|
UpdatesProcessResult::FAILED,
|
||||||
HealthCheckStatus::UNHEALTHY,
|
UpdatesConfigType::GENERAL,
|
||||||
OrchestrationStatusFieldType::LAST_UPDATE,
|
UpdatesFailureReason::CHECK_UPDATE,
|
||||||
|
"",
|
||||||
"Failed during check update. Error: " + check_update_result.getErr()
|
"Failed during check update. Error: " + check_update_result.getErr()
|
||||||
);
|
).notify();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
failure_count = 0;
|
failure_count = 0;
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Check update process completed successfully";
|
dbgDebug(D_ORCHESTRATOR) << "Check update process completed successfully";
|
||||||
health_check_status_listener.setStatus(
|
UpdatesProcessEvent(
|
||||||
HealthCheckStatus::HEALTHY,
|
UpdatesProcessResult::SUCCESS,
|
||||||
OrchestrationStatusFieldType::LAST_UPDATE
|
UpdatesConfigType::GENERAL,
|
||||||
);
|
UpdatesFailureReason::CHECK_UPDATE,
|
||||||
|
"",
|
||||||
|
"Check update procces succeeded!"
|
||||||
|
).notify();
|
||||||
sleep_interval = policy.getSleepInterval();
|
sleep_interval = policy.getSleepInterval();
|
||||||
if (!is_new_success) {
|
if (!is_new_success) {
|
||||||
dbgInfo(D_ORCHESTRATOR)
|
dbgInfo(D_ORCHESTRATOR)
|
||||||
@@ -1464,7 +1499,7 @@ private:
|
|||||||
<< " minutes from now.";
|
<< " minutes from now.";
|
||||||
upgrade_delay_time += chrono::minutes(upgrade_delay_interval);
|
upgrade_delay_time += chrono::minutes(upgrade_delay_interval);
|
||||||
} catch (const exception& err) {
|
} catch (const exception& err) {
|
||||||
dbgInfo(D_ORCHESTRATOR) << "Failed to parse upgrade delay interval.";
|
dbgWarning(D_ORCHESTRATOR) << "Failed to parse upgrade delay interval.";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1474,11 +1509,13 @@ private:
|
|||||||
sleep_interval = policy.getErrorSleepInterval();
|
sleep_interval = policy.getErrorSleepInterval();
|
||||||
Maybe<void> registration_status(genError("Not running yet."));
|
Maybe<void> registration_status(genError("Not running yet."));
|
||||||
while (!(registration_status = registerToTheFog()).ok()) {
|
while (!(registration_status = registerToTheFog()).ok()) {
|
||||||
health_check_status_listener.setStatus(
|
UpdatesProcessEvent(
|
||||||
HealthCheckStatus::UNHEALTHY,
|
UpdatesProcessResult::FAILED,
|
||||||
OrchestrationStatusFieldType::REGISTRATION,
|
UpdatesConfigType::GENERAL,
|
||||||
|
UpdatesFailureReason::REGISTRATION,
|
||||||
|
"",
|
||||||
registration_status.getErr()
|
registration_status.getErr()
|
||||||
);
|
).notify();
|
||||||
sleep_interval = getConfigurationWithDefault<int>(
|
sleep_interval = getConfigurationWithDefault<int>(
|
||||||
30,
|
30,
|
||||||
"orchestration",
|
"orchestration",
|
||||||
@@ -1498,10 +1535,11 @@ private:
|
|||||||
|
|
||||||
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->yield(chrono::seconds(1));
|
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->yield(chrono::seconds(1));
|
||||||
|
|
||||||
health_check_status_listener.setStatus(
|
UpdatesProcessEvent(
|
||||||
HealthCheckStatus::HEALTHY,
|
UpdatesProcessResult::SUCCESS,
|
||||||
OrchestrationStatusFieldType::REGISTRATION
|
UpdatesConfigType::GENERAL,
|
||||||
);
|
UpdatesFailureReason::REGISTRATION
|
||||||
|
).notify();
|
||||||
|
|
||||||
LogGen(
|
LogGen(
|
||||||
"Check Point Orchestration nano service successfully started",
|
"Check Point Orchestration nano service successfully started",
|
||||||
@@ -1535,16 +1573,18 @@ private:
|
|||||||
if (!Singleton::Consume<I_ManifestController>::by<OrchestrationComp>()->loadAfterSelfUpdate()) {
|
if (!Singleton::Consume<I_ManifestController>::by<OrchestrationComp>()->loadAfterSelfUpdate()) {
|
||||||
// Should restore from backup
|
// Should restore from backup
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to load Orchestration after self-update";
|
dbgWarning(D_ORCHESTRATOR) << "Failed to load Orchestration after self-update";
|
||||||
health_check_status_listener.setStatus(
|
UpdatesProcessEvent(
|
||||||
HealthCheckStatus::UNHEALTHY,
|
UpdatesProcessResult::FAILED,
|
||||||
OrchestrationStatusFieldType::LAST_UPDATE,
|
UpdatesConfigType::GENERAL,
|
||||||
|
UpdatesFailureReason::ORCHESTRATION_SELF_UPDATE,
|
||||||
|
"",
|
||||||
"Failed to load Orchestration after self-update"
|
"Failed to load Orchestration after self-update"
|
||||||
);
|
).notify();
|
||||||
} else {
|
} else {
|
||||||
health_check_status_listener.setStatus(
|
UpdatesProcessEvent(
|
||||||
HealthCheckStatus::HEALTHY,
|
UpdatesProcessResult::SUCCESS,
|
||||||
OrchestrationStatusFieldType::MANIFEST
|
UpdatesConfigType::MANIFEST
|
||||||
);
|
).notify();
|
||||||
}
|
}
|
||||||
|
|
||||||
setUpgradeTime();
|
setUpgradeTime();
|
||||||
@@ -1894,7 +1934,7 @@ private:
|
|||||||
ReportIS::Audience::INTERNAL
|
ReportIS::Audience::INTERNAL
|
||||||
);
|
);
|
||||||
hybrid_mode_metric.registerListener();
|
hybrid_mode_metric.registerListener();
|
||||||
health_check_status_listener.registerListener();
|
updates_process_reporter_listener.registerListener();
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
@@ -2007,7 +2047,7 @@ private:
|
|||||||
unsigned int sleep_interval = 0;
|
unsigned int sleep_interval = 0;
|
||||||
bool is_new_success = false;
|
bool is_new_success = false;
|
||||||
OrchestrationPolicy policy;
|
OrchestrationPolicy policy;
|
||||||
HealthCheckStatusListener health_check_status_listener;
|
UpdatesProcessReporter updates_process_reporter_listener;
|
||||||
HybridModeMetric hybrid_mode_metric;
|
HybridModeMetric hybrid_mode_metric;
|
||||||
EnvDetails env_details;
|
EnvDetails env_details;
|
||||||
chrono::minutes upgrade_delay_time;
|
chrono::minutes upgrade_delay_time;
|
||||||
|
|||||||
@@ -20,6 +20,7 @@
|
|||||||
#include "cereal/types/set.hpp"
|
#include "cereal/types/set.hpp"
|
||||||
#include "agent_core_utilities.h"
|
#include "agent_core_utilities.h"
|
||||||
#include "namespace_data.h"
|
#include "namespace_data.h"
|
||||||
|
#include "updates_process_event.h"
|
||||||
|
|
||||||
#include <netdb.h>
|
#include <netdb.h>
|
||||||
#include <arpa/inet.h>
|
#include <arpa/inet.h>
|
||||||
@@ -469,6 +470,13 @@ OrchestrationTools::Impl::packagesToJsonFile(const map<packageName, Package> &pa
|
|||||||
archive_out(cereal::make_nvp("packages", packges_vector));
|
archive_out(cereal::make_nvp("packages", packges_vector));
|
||||||
} catch (cereal::Exception &e) {
|
} catch (cereal::Exception &e) {
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Failed to write vector of packages to JSON file " << path << ", " << e.what();
|
dbgDebug(D_ORCHESTRATOR) << "Failed to write vector of packages to JSON file " << path << ", " << e.what();
|
||||||
|
UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult::FAILED,
|
||||||
|
UpdatesConfigType::MANIFEST,
|
||||||
|
UpdatesFailureReason::HANDLE_FILE,
|
||||||
|
path,
|
||||||
|
string("Failed to write vector of packages to JSON file. Error: ") + e.what()
|
||||||
|
).notify();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
|
|||||||
@@ -88,7 +88,7 @@ public:
|
|||||||
// This Holding the Main Routine of the Orchestration.
|
// This Holding the Main Routine of the Orchestration.
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_ml,
|
mock_ml,
|
||||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Orchestration runner", true)
|
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Orchestration runner", true)
|
||||||
).WillOnce(DoAll(SaveArg<1>(&routine), Return(1)));
|
).WillOnce(DoAll(SaveArg<1>(&routine), Return(1)));
|
||||||
|
|
||||||
EXPECT_CALL(mock_shell_cmd, getExecOutput("openssl version -d | cut -d\" \" -f2 | cut -d\"\\\"\" -f2", _, _))
|
EXPECT_CALL(mock_shell_cmd, getExecOutput("openssl version -d | cut -d\" \" -f2 | cut -d\"\\\"\" -f2", _, _))
|
||||||
@@ -143,6 +143,9 @@ public:
|
|||||||
|
|
||||||
map<string, string> resolved_mgmt_details({{"kernel_version", "4.4.0-87-generic"}});
|
map<string, string> resolved_mgmt_details({{"kernel_version", "4.4.0-87-generic"}});
|
||||||
EXPECT_CALL(mock_details_resolver, getResolvedDetails()).WillRepeatedly(Return(resolved_mgmt_details));
|
EXPECT_CALL(mock_details_resolver, getResolvedDetails()).WillRepeatedly(Return(resolved_mgmt_details));
|
||||||
|
EXPECT_CALL(mock_details_resolver, readCloudMetadata()).WillRepeatedly(
|
||||||
|
Return(Maybe<tuple<string, string, string, string, string>>(genError("No cloud metadata")))
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
@@ -281,7 +284,7 @@ TEST_F(OrchestrationMultitenancyTest, handle_virtual_resource)
|
|||||||
EXPECT_CALL(mock_service_controller, getPolicyVersion())
|
EXPECT_CALL(mock_service_controller, getPolicyVersion())
|
||||||
.Times(3).WillRepeatedly(ReturnRef(first_policy_version));
|
.Times(3).WillRepeatedly(ReturnRef(first_policy_version));
|
||||||
|
|
||||||
map<string, PortNumber> empty_service_to_port_map;
|
map<string, vector<PortNumber>> empty_service_to_port_map;
|
||||||
EXPECT_CALL(mock_service_controller, getServiceToPortMap()).WillRepeatedly(Return(empty_service_to_port_map));
|
EXPECT_CALL(mock_service_controller, getServiceToPortMap()).WillRepeatedly(Return(empty_service_to_port_map));
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
#include "agent_details.h"
|
#include "agent_details.h"
|
||||||
#include "customized_cereal_map.h"
|
#include "customized_cereal_map.h"
|
||||||
#include "health_check_status/health_check_status.h"
|
#include "health_check_status/health_check_status.h"
|
||||||
|
#include "updates_process_event.h"
|
||||||
#include "declarative_policy_utils.h"
|
#include "declarative_policy_utils.h"
|
||||||
|
|
||||||
using namespace testing;
|
using namespace testing;
|
||||||
@@ -79,7 +80,7 @@ public:
|
|||||||
EXPECT_CALL(mock_orchestration_tools, setClusterId());
|
EXPECT_CALL(mock_orchestration_tools, setClusterId());
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_ml,
|
mock_ml,
|
||||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Orchestration runner", true)
|
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Orchestration runner", true)
|
||||||
).WillOnce(DoAll(SaveArg<1>(&routine), Return(1)));
|
).WillOnce(DoAll(SaveArg<1>(&routine), Return(1)));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
@@ -99,7 +100,7 @@ public:
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
map<string, PortNumber> empty_service_to_port_map;
|
map<string, vector<PortNumber>> empty_service_to_port_map;
|
||||||
EXPECT_CALL(mock_service_controller, getServiceToPortMap()).WillRepeatedly(Return(empty_service_to_port_map));
|
EXPECT_CALL(mock_service_controller, getServiceToPortMap()).WillRepeatedly(Return(empty_service_to_port_map));
|
||||||
|
|
||||||
EXPECT_CALL(rest, mockRestCall(RestAction::SHOW, "orchestration-status", _)).WillOnce(
|
EXPECT_CALL(rest, mockRestCall(RestAction::SHOW, "orchestration-status", _)).WillOnce(
|
||||||
@@ -170,6 +171,9 @@ public:
|
|||||||
|
|
||||||
map<string, string> resolved_mgmt_details({{"kernel_version", "4.4.0-87-generic"}});
|
map<string, string> resolved_mgmt_details({{"kernel_version", "4.4.0-87-generic"}});
|
||||||
EXPECT_CALL(mock_details_resolver, getResolvedDetails()).WillRepeatedly(Return(resolved_mgmt_details));
|
EXPECT_CALL(mock_details_resolver, getResolvedDetails()).WillRepeatedly(Return(resolved_mgmt_details));
|
||||||
|
EXPECT_CALL(mock_details_resolver, readCloudMetadata()).WillRepeatedly(
|
||||||
|
Return(Maybe<tuple<string, string, string, string, string>>(genError("No cloud metadata")))
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
string
|
string
|
||||||
@@ -355,6 +359,7 @@ private:
|
|||||||
TEST_F(OrchestrationTest, hybridModeRegisterLocalAgentRoutine)
|
TEST_F(OrchestrationTest, hybridModeRegisterLocalAgentRoutine)
|
||||||
{
|
{
|
||||||
EXPECT_CALL(rest, mockRestCall(_, _, _)).WillRepeatedly(Return(true));
|
EXPECT_CALL(rest, mockRestCall(_, _, _)).WillRepeatedly(Return(true));
|
||||||
|
|
||||||
Singleton::Consume<Config::I_Config>::from(config_comp)->loadConfiguration(
|
Singleton::Consume<Config::I_Config>::from(config_comp)->loadConfiguration(
|
||||||
vector<string>{"--orchestration-mode=hybrid_mode"}
|
vector<string>{"--orchestration-mode=hybrid_mode"}
|
||||||
);
|
);
|
||||||
@@ -373,7 +378,6 @@ TEST_F(OrchestrationTest, hybridModeRegisterLocalAgentRoutine)
|
|||||||
expectDetailsResolver();
|
expectDetailsResolver();
|
||||||
EXPECT_CALL(mock_update_communication, getUpdate(_));
|
EXPECT_CALL(mock_update_communication, getUpdate(_));
|
||||||
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
||||||
EXPECT_CALL(mock_status, setFieldStatus(_, _, _));
|
|
||||||
EXPECT_CALL(mock_status, setIsConfigurationUpdated(_));
|
EXPECT_CALL(mock_status, setIsConfigurationUpdated(_));
|
||||||
|
|
||||||
EXPECT_CALL(mock_ml, yield(A<chrono::microseconds>()))
|
EXPECT_CALL(mock_ml, yield(A<chrono::microseconds>()))
|
||||||
@@ -581,7 +585,6 @@ TEST_F(OrchestrationTest, check_sending_registration_data)
|
|||||||
expectDetailsResolver();
|
expectDetailsResolver();
|
||||||
EXPECT_CALL(mock_update_communication, getUpdate(_));
|
EXPECT_CALL(mock_update_communication, getUpdate(_));
|
||||||
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
||||||
EXPECT_CALL(mock_status, setFieldStatus(_, _, _));
|
|
||||||
EXPECT_CALL(mock_status, setIsConfigurationUpdated(_));
|
EXPECT_CALL(mock_status, setIsConfigurationUpdated(_));
|
||||||
|
|
||||||
EXPECT_CALL(mock_ml, yield(A<chrono::microseconds>()))
|
EXPECT_CALL(mock_ml, yield(A<chrono::microseconds>()))
|
||||||
@@ -758,10 +761,6 @@ TEST_F(OrchestrationTest, orchestrationPolicyUpdatRollback)
|
|||||||
).WillOnce(Return(true));
|
).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion("/test"));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion("/test"));
|
||||||
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::LAST_UPDATE, OrchestrationStatusResult::SUCCESS, "")
|
|
||||||
);
|
|
||||||
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
||||||
).WillOnce(
|
).WillOnce(
|
||||||
Invoke(
|
Invoke(
|
||||||
@@ -937,10 +936,6 @@ TEST_F(OrchestrationTest, orchestrationPolicyUpdate)
|
|||||||
).WillOnce(Return(true));
|
).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion("/test"));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion("/test"));
|
||||||
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::LAST_UPDATE, OrchestrationStatusResult::SUCCESS, "")
|
|
||||||
);
|
|
||||||
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
||||||
).WillOnce(
|
).WillOnce(
|
||||||
Invoke(
|
Invoke(
|
||||||
@@ -990,7 +985,7 @@ TEST_F(OrchestrationTest, loadOrchestrationPolicyFromBackup)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_ml,
|
mock_ml,
|
||||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Orchestration runner", true)
|
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Orchestration runner", true)
|
||||||
);
|
);
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
@@ -1010,7 +1005,7 @@ TEST_F(OrchestrationTest, loadOrchestrationPolicyFromBackup)
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
map<string, PortNumber> empty_service_to_port_map;
|
map<string, vector<PortNumber>> empty_service_to_port_map;
|
||||||
EXPECT_CALL(mock_service_controller, getServiceToPortMap()).WillRepeatedly(Return(empty_service_to_port_map));
|
EXPECT_CALL(mock_service_controller, getServiceToPortMap()).WillRepeatedly(Return(empty_service_to_port_map));
|
||||||
|
|
||||||
EXPECT_CALL(rest, mockRestCall(RestAction::SHOW, "orchestration-status", _));
|
EXPECT_CALL(rest, mockRestCall(RestAction::SHOW, "orchestration-status", _));
|
||||||
@@ -1105,14 +1100,6 @@ TEST_F(OrchestrationTest, manifestUpdate)
|
|||||||
);
|
);
|
||||||
|
|
||||||
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::LAST_UPDATE, OrchestrationStatusResult::SUCCESS, "")
|
|
||||||
);
|
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::MANIFEST, OrchestrationStatusResult::SUCCESS, "")
|
|
||||||
);
|
|
||||||
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
||||||
).WillOnce(
|
).WillOnce(
|
||||||
Invoke(
|
Invoke(
|
||||||
@@ -1234,10 +1221,6 @@ TEST_F(OrchestrationTest, getBadPolicyUpdate)
|
|||||||
.WillOnce(ReturnRef(second_val)
|
.WillOnce(ReturnRef(second_val)
|
||||||
);
|
);
|
||||||
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::LAST_UPDATE, OrchestrationStatusResult::SUCCESS, "")
|
|
||||||
);
|
|
||||||
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
||||||
).WillOnce(
|
).WillOnce(
|
||||||
Invoke(
|
Invoke(
|
||||||
@@ -1268,7 +1251,7 @@ TEST_F(OrchestrationTest, getBadPolicyUpdate)
|
|||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_service_controller,
|
mock_service_controller,
|
||||||
updateServiceConfiguration(string("policy path"), "", expected_data_types, "", "", _)
|
updateServiceConfiguration(string("policy path"), "", expected_data_types, "", "", _)
|
||||||
).WillOnce(Return(Maybe<void>(genError(string("")))));
|
).WillOnce(Return(Maybe<void>(genError(string("Fail to load policy")))));
|
||||||
|
|
||||||
EXPECT_CALL(mock_ml, yield(A<chrono::microseconds>()))
|
EXPECT_CALL(mock_ml, yield(A<chrono::microseconds>()))
|
||||||
.WillOnce(
|
.WillOnce(
|
||||||
@@ -1325,6 +1308,7 @@ TEST_F(OrchestrationTest, failedDownloadSettings)
|
|||||||
|
|
||||||
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
||||||
expectDetailsResolver();
|
expectDetailsResolver();
|
||||||
|
|
||||||
EXPECT_CALL(mock_manifest_controller, loadAfterSelfUpdate()).WillOnce(Return(false));
|
EXPECT_CALL(mock_manifest_controller, loadAfterSelfUpdate()).WillOnce(Return(false));
|
||||||
EXPECT_CALL(mock_orchestration_tools, calculateChecksum(Package::ChecksumTypes::SHA256, manifest_file_path))
|
EXPECT_CALL(mock_orchestration_tools, calculateChecksum(Package::ChecksumTypes::SHA256, manifest_file_path))
|
||||||
.WillOnce(Return(manifest_checksum));
|
.WillOnce(Return(manifest_checksum));
|
||||||
@@ -1356,22 +1340,10 @@ TEST_F(OrchestrationTest, failedDownloadSettings)
|
|||||||
);
|
);
|
||||||
|
|
||||||
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::LAST_UPDATE, OrchestrationStatusResult::SUCCESS, "")
|
|
||||||
).Times(1);
|
|
||||||
|
|
||||||
string manifest_err =
|
string manifest_err =
|
||||||
"Critical Error: Agent/Gateway was not fully deployed on host 'hostname' "
|
"Critical Error: Agent/Gateway was not fully deployed on host 'hostname' "
|
||||||
"and is not enforcing a security policy. Retry installation or contact Check Point support.";
|
"and is not enforcing a security policy. Retry installation or contact Check Point support.";
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(
|
|
||||||
OrchestrationStatusFieldType::MANIFEST,
|
|
||||||
OrchestrationStatusResult::FAILED,
|
|
||||||
manifest_err
|
|
||||||
)
|
|
||||||
).Times(1);
|
|
||||||
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(manifest_err));
|
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(manifest_err));
|
||||||
|
|
||||||
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
||||||
@@ -1472,10 +1444,6 @@ TEST_P(OrchestrationTest, orchestrationFirstRun)
|
|||||||
.WillOnce(Return(data_checksum));
|
.WillOnce(Return(data_checksum));
|
||||||
|
|
||||||
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::LAST_UPDATE, OrchestrationStatusResult::SUCCESS, "")
|
|
||||||
);
|
|
||||||
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
||||||
).WillOnce(
|
).WillOnce(
|
||||||
Invoke(
|
Invoke(
|
||||||
@@ -1534,23 +1502,6 @@ TEST_P(OrchestrationTest, orchestrationFirstRun)
|
|||||||
} catch (const invalid_argument& e) {}
|
} catch (const invalid_argument& e) {}
|
||||||
EXPECT_CALL(mock_status, writeStatusToFile());
|
EXPECT_CALL(mock_status, writeStatusToFile());
|
||||||
|
|
||||||
vector<HealthCheckStatusReply> reply;
|
|
||||||
bool is_named_query = GetParam();
|
|
||||||
if (is_named_query) {
|
|
||||||
auto all_comps_status_reply = HealthCheckStatusEvent().performNamedQuery();
|
|
||||||
for (auto &elem : all_comps_status_reply) {
|
|
||||||
reply.push_back(elem.second);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
reply = HealthCheckStatusEvent().query();
|
|
||||||
}
|
|
||||||
|
|
||||||
ASSERT_EQ(reply.size(), 1);
|
|
||||||
EXPECT_EQ(reply[0].getCompName(), "Orchestration");
|
|
||||||
EXPECT_EQ(reply[0].getStatus(), HealthCheckStatus::HEALTHY);
|
|
||||||
|
|
||||||
HealthCheckStatusEvent().notify();
|
|
||||||
|
|
||||||
orchestration_comp.fini();
|
orchestration_comp.fini();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1718,10 +1669,6 @@ TEST_F(OrchestrationTest, dataUpdate)
|
|||||||
);
|
);
|
||||||
|
|
||||||
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
||||||
EXPECT_CALL(
|
|
||||||
mock_status,
|
|
||||||
setFieldStatus(OrchestrationStatusFieldType::LAST_UPDATE, OrchestrationStatusResult::SUCCESS, "")
|
|
||||||
);
|
|
||||||
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
EXPECT_CALL(mock_status, setIsConfigurationUpdated(A<EnumArray<OrchestrationStatusConfigType, bool>>())
|
||||||
).WillOnce(
|
).WillOnce(
|
||||||
Invoke(
|
Invoke(
|
||||||
|
|||||||
@@ -333,7 +333,7 @@ private:
|
|||||||
|
|
||||||
ReconfStatus getUpdatedReconfStatus();
|
ReconfStatus getUpdatedReconfStatus();
|
||||||
Maybe<ServiceDetails> getServiceDetails(const string &service_name);
|
Maybe<ServiceDetails> getServiceDetails(const string &service_name);
|
||||||
map<string, PortNumber> getServiceToPortMap();
|
map<string, vector<PortNumber>> getServiceToPortMap();
|
||||||
|
|
||||||
template<class Archive>
|
template<class Archive>
|
||||||
void serializeRegisterServices(Archive &ar) { ar(pending_services); }
|
void serializeRegisterServices(Archive &ar) { ar(pending_services); }
|
||||||
@@ -358,6 +358,7 @@ private:
|
|||||||
string filesystem_prefix;
|
string filesystem_prefix;
|
||||||
bool is_multi_tenant_env = false;
|
bool is_multi_tenant_env = false;
|
||||||
set<string> changed_policy_files;
|
set<string> changed_policy_files;
|
||||||
|
ServiceDetails orchestration_service_details;
|
||||||
|
|
||||||
I_OrchestrationTools *orchestration_tools = nullptr;
|
I_OrchestrationTools *orchestration_tools = nullptr;
|
||||||
I_MainLoop *mainloop = nullptr;
|
I_MainLoop *mainloop = nullptr;
|
||||||
@@ -374,8 +375,13 @@ public:
|
|||||||
for (auto const& entry: ports_map) {
|
for (auto const& entry: ports_map) {
|
||||||
string service = entry.first;
|
string service = entry.first;
|
||||||
replace(service.begin(), service.end(), ' ', '-');
|
replace(service.begin(), service.end(), ' ', '-');
|
||||||
output << service << ":";
|
output << service;
|
||||||
output << entry.second << ",";
|
char delim = ':';
|
||||||
|
for (PortNumber port : entry.second) {
|
||||||
|
output << delim << port;
|
||||||
|
delim = ',';
|
||||||
|
}
|
||||||
|
output << ";";
|
||||||
}
|
}
|
||||||
ports_list = output.str();
|
ports_list = output.str();
|
||||||
}
|
}
|
||||||
@@ -407,7 +413,7 @@ ServiceController::Impl::getUpdatedReconfStatus()
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!maybe_service.unpack().isServiceActive()) {
|
if (!maybe_service.unpack().isServiceActive()) {
|
||||||
dbgInfo(D_SERVICE_CONTROLLER)
|
dbgDebug(D_SERVICE_CONTROLLER)
|
||||||
<< "Service is not active, removing from registered services list. Service: "
|
<< "Service is not active, removing from registered services list. Service: "
|
||||||
<< services_reconf_names[service_and_reconf_status.first]
|
<< services_reconf_names[service_and_reconf_status.first]
|
||||||
<< "ID: "
|
<< "ID: "
|
||||||
@@ -500,8 +506,9 @@ ServiceController::Impl::loadRegisteredServicesFromFile()
|
|||||||
stringstream ss(maybe_registered_services_str.unpack());
|
stringstream ss(maybe_registered_services_str.unpack());
|
||||||
cereal::JSONInputArchive ar(ss);
|
cereal::JSONInputArchive ar(ss);
|
||||||
ar(cereal::make_nvp("Registered Services", pending_services));
|
ar(cereal::make_nvp("Registered Services", pending_services));
|
||||||
|
pending_services.erase("cp-nano-orchestration");
|
||||||
|
|
||||||
dbgInfo(D_SERVICE_CONTROLLER)
|
dbgDebug(D_SERVICE_CONTROLLER)
|
||||||
<< "Orchestration pending services loaded from file."
|
<< "Orchestration pending services loaded from file."
|
||||||
<< " File: "
|
<< " File: "
|
||||||
<< registered_services_file
|
<< registered_services_file
|
||||||
@@ -509,7 +516,7 @@ ServiceController::Impl::loadRegisteredServicesFromFile()
|
|||||||
|
|
||||||
for (const auto &id_service_pair : pending_services) {
|
for (const auto &id_service_pair : pending_services) {
|
||||||
const auto &service = id_service_pair.second;
|
const auto &service = id_service_pair.second;
|
||||||
dbgInfo(D_SERVICE_CONTROLLER)
|
dbgDebug(D_SERVICE_CONTROLLER)
|
||||||
<< "Service name: "
|
<< "Service name: "
|
||||||
<< service.getServiceName()
|
<< service.getServiceName()
|
||||||
<< ", Service ID: "
|
<< ", Service ID: "
|
||||||
@@ -529,18 +536,26 @@ ServiceController::Impl::writeRegisteredServicesToFile()
|
|||||||
"Orchestration registered services"
|
"Orchestration registered services"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
map<string, ServiceDetails> registered_services_with_orch = registered_services;
|
||||||
|
if (orchestration_service_details.getServiceID() != "") {
|
||||||
|
registered_services_with_orch.emplace(
|
||||||
|
orchestration_service_details.getServiceID(),
|
||||||
|
orchestration_service_details
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
ofstream ss(registered_services_file);
|
ofstream ss(registered_services_file);
|
||||||
cereal::JSONOutputArchive ar(ss);
|
cereal::JSONOutputArchive ar(ss);
|
||||||
ar(cereal::make_nvp("Registered Services", registered_services));
|
ar(cereal::make_nvp("Registered Services", registered_services_with_orch));
|
||||||
|
|
||||||
dbgInfo(D_SERVICE_CONTROLLER)
|
dbgDebug(D_SERVICE_CONTROLLER)
|
||||||
<< "Orchestration registered services file has been updated. File: "
|
<< "Orchestration registered services file has been updated. File: "
|
||||||
<< registered_services_file
|
<< registered_services_file
|
||||||
<< ". Registered Services:";
|
<< ". Registered Services:";
|
||||||
|
|
||||||
for (const auto &id_service_pair : registered_services) {
|
for (const auto &id_service_pair : registered_services_with_orch) {
|
||||||
const auto &service = id_service_pair.second;
|
const auto &service = id_service_pair.second;
|
||||||
dbgInfo(D_SERVICE_CONTROLLER)
|
dbgDebug(D_SERVICE_CONTROLLER)
|
||||||
<< "Service name: "
|
<< "Service name: "
|
||||||
<< service.getServiceName()
|
<< service.getServiceName()
|
||||||
<< ", Service ID: "
|
<< ", Service ID: "
|
||||||
@@ -591,20 +606,20 @@ ServiceController::Impl::cleanUpVirtualFiles()
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
map<string, PortNumber>
|
map<string, vector<PortNumber>>
|
||||||
ServiceController::Impl::getServiceToPortMap()
|
ServiceController::Impl::getServiceToPortMap()
|
||||||
{
|
{
|
||||||
map<string, PortNumber> ports_map;
|
map<string, vector<PortNumber>> ports_map;
|
||||||
for (auto const& entry: registered_services) {
|
for (auto const& entry: registered_services) {
|
||||||
const string &service = entry.first;
|
const string &service = entry.second.getServiceName();
|
||||||
PortNumber port = entry.second.getPort();
|
PortNumber port = entry.second.getPort();
|
||||||
ports_map[service] = port;
|
ports_map[service].push_back(port);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto const& entry: pending_services) {
|
for (auto const& entry: pending_services) {
|
||||||
const string &service = entry.first;
|
const string &service = entry.second.getServiceName();
|
||||||
PortNumber port = entry.second.getPort();
|
PortNumber port = entry.second.getPort();
|
||||||
ports_map[service] = port;
|
ports_map[service].push_back(port);
|
||||||
}
|
}
|
||||||
|
|
||||||
return ports_map;
|
return ports_map;
|
||||||
@@ -624,6 +639,12 @@ ServiceController::Impl::registerServiceConfig(
|
|||||||
service_id
|
service_id
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (service_name == "cp-nano-orchestration") {
|
||||||
|
dbgTrace(D_SERVICE_CONTROLLER) << "Save the orchestration service details";
|
||||||
|
orchestration_service_details = service_config;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
pending_services.erase(service_config.getServiceID());
|
pending_services.erase(service_config.getServiceID());
|
||||||
pending_services.insert({service_config.getServiceID(), service_config});
|
pending_services.insert({service_config.getServiceID(), service_config});
|
||||||
refreshPendingServices();
|
refreshPendingServices();
|
||||||
|
|||||||
@@ -178,16 +178,17 @@ public:
|
|||||||
void
|
void
|
||||||
expectNewConfigRequest(const string &response)
|
expectNewConfigRequest(const string &response)
|
||||||
{
|
{
|
||||||
|
Maybe<HTTPResponse, HTTPResponse> res = HTTPResponse(HTTPStatusCode::HTTP_OK, response);
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_message,
|
mock_message,
|
||||||
sendSyncMessage(
|
sendSyncMessage(
|
||||||
HTTPMethod::POST,
|
HTTPMethod::POST,
|
||||||
"/set-new-configuration",
|
"/set-new-configuration",
|
||||||
HasSubstr("1.0.2"),
|
_,
|
||||||
_,
|
_,
|
||||||
_
|
_
|
||||||
)
|
)
|
||||||
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, response)));
|
).WillOnce(DoAll(SaveArg<2>(&version_body), Return(res)));
|
||||||
}
|
}
|
||||||
|
|
||||||
CPTestTempfile status_file;
|
CPTestTempfile status_file;
|
||||||
@@ -196,6 +197,7 @@ public:
|
|||||||
::Environment env;
|
::Environment env;
|
||||||
ConfigComponent config;
|
ConfigComponent config;
|
||||||
DeclarativePolicyUtils declarative_policy_utils;
|
DeclarativePolicyUtils declarative_policy_utils;
|
||||||
|
string version_body;
|
||||||
string configuration_dir;
|
string configuration_dir;
|
||||||
string policy_extension;
|
string policy_extension;
|
||||||
string settings_extension;
|
string settings_extension;
|
||||||
@@ -229,19 +231,21 @@ public:
|
|||||||
string old_version = "1.0.1";
|
string old_version = "1.0.1";
|
||||||
|
|
||||||
string versions =
|
string versions =
|
||||||
"["
|
"[\n"
|
||||||
" {"
|
" {\n"
|
||||||
" \"id\": \"d8c3cc3c-f9df-83c8-f875-322dd8a0c161\","
|
" \"id\": \"d8c3cc3c-f9df-83c8-f875-322dd8a0c161\",\n"
|
||||||
" \"name\": \"Linux Embedded Agents\","
|
" \"name\": \"Linux Embedded Agents\",\n"
|
||||||
" \"version\": \"1.0.2\""
|
" \"version\": \"1.0.2\",\n"
|
||||||
" }"
|
" \"profileType\": \"Embedded\"\n"
|
||||||
|
" }\n"
|
||||||
"]";
|
"]";
|
||||||
string old_versions =
|
string old_versions =
|
||||||
"["
|
"["
|
||||||
" {"
|
" {"
|
||||||
" \"id\": \"d8c3cc3c-f9df-83c8-f875-322dd8a0c161\","
|
" \"id\": \"d8c3cc3c-f9df-83c8-f875-322dd8a0c161\","
|
||||||
" \"name\": \"Linux Embedded Agents\","
|
" \"name\": \"Linux Embedded Agents\","
|
||||||
" \"version\": \"1.0.1\""
|
" \"version\": \"1.0.1\","
|
||||||
|
" \"profileType\": \"Embedded\""
|
||||||
" }"
|
" }"
|
||||||
"]";
|
"]";
|
||||||
|
|
||||||
@@ -338,6 +342,23 @@ TEST_F(ServiceControllerTest, UpdateConfiguration)
|
|||||||
EXPECT_EQ(i_service_controller->getPolicyVersion(), version_value);
|
EXPECT_EQ(i_service_controller->getPolicyVersion(), version_value);
|
||||||
EXPECT_EQ(i_service_controller->getPolicyVersions(), versions);
|
EXPECT_EQ(i_service_controller->getPolicyVersions(), versions);
|
||||||
EXPECT_EQ(i_service_controller->getUpdatePolicyVersion(), version_value);
|
EXPECT_EQ(i_service_controller->getUpdatePolicyVersion(), version_value);
|
||||||
|
|
||||||
|
stringstream ver_ss;
|
||||||
|
ver_ss
|
||||||
|
<< "{\n"
|
||||||
|
<< " \"id\": 1,\n"
|
||||||
|
<< " \"policy_version\": \"1.0.2,[\\n"
|
||||||
|
<< " {\\n"
|
||||||
|
<< " \\\"id\\\": \\\"d8c3cc3c-f9df-83c8-f875-322dd8a0c161\\\",\\n"
|
||||||
|
<< " \\\"name\\\": \\\"Linux Embedded Agents\\\",\\n"
|
||||||
|
<< " \\\"version\\\": \\\"1.0.2\\\",\\n"
|
||||||
|
<< " \\\"profileType\\\": \\\"Embedded\\\"\\n"
|
||||||
|
<< " }\\n"
|
||||||
|
<< "]\"\n}";
|
||||||
|
EXPECT_EQ(
|
||||||
|
version_body,
|
||||||
|
ver_ss.str()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(ServiceControllerTest, supportVersions)
|
TEST_F(ServiceControllerTest, supportVersions)
|
||||||
@@ -527,13 +548,13 @@ TEST_F(ServiceControllerTest, TimeOutUpdateConfiguration)
|
|||||||
TEST_F(ServiceControllerTest, readRegisteredServicesFromFile)
|
TEST_F(ServiceControllerTest, readRegisteredServicesFromFile)
|
||||||
{
|
{
|
||||||
init();
|
init();
|
||||||
int family1_id3_port = 1111;
|
uint16_t family1_id3_port = 1111;
|
||||||
string registered_services_json = "{\n"
|
string registered_services_json = "{\n"
|
||||||
" \"Registered Services\": {\n"
|
" \"Registered Services\": {\n"
|
||||||
" \"family1_id3\": {\n"
|
" \"family1_id3\": {\n"
|
||||||
" \"Service name\": \"mock access control\",\n"
|
" \"Service name\": \"mock access control\",\n"
|
||||||
" \"Service ID\": \"family1_id3\",\n"
|
" \"Service ID\": \"family1_id3\",\n"
|
||||||
" \"Service port\": 1111,\n"
|
" \"Service port\": " + to_string(family1_id3_port) + ",\n"
|
||||||
" \"Relevant configs\": [\n"
|
" \"Relevant configs\": [\n"
|
||||||
" \"non updated capability\",\n"
|
" \"non updated capability\",\n"
|
||||||
" \"l4_firewall\"\n"
|
" \"l4_firewall\"\n"
|
||||||
@@ -573,7 +594,8 @@ TEST_F(ServiceControllerTest, readRegisteredServicesFromFile)
|
|||||||
service_controller.init();
|
service_controller.init();
|
||||||
|
|
||||||
auto services_to_port_map = i_service_controller->getServiceToPortMap();
|
auto services_to_port_map = i_service_controller->getServiceToPortMap();
|
||||||
EXPECT_EQ(services_to_port_map.find("family1_id3")->second, family1_id3_port);
|
vector<PortNumber> ports = {l4_firewall_service_port, family1_id3_port};
|
||||||
|
EXPECT_EQ(services_to_port_map.find("mock access control")->second, ports);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(ServiceControllerTest, noPolicyUpdate)
|
TEST_F(ServiceControllerTest, noPolicyUpdate)
|
||||||
@@ -1589,7 +1611,7 @@ TEST_F(ServiceControllerTest, testPortsRest)
|
|||||||
empty_json << "{}";
|
empty_json << "{}";
|
||||||
auto res = get_services_ports->performRestCall(empty_json);
|
auto res = get_services_ports->performRestCall(empty_json);
|
||||||
ASSERT_TRUE(res.ok());
|
ASSERT_TRUE(res.ok());
|
||||||
EXPECT_THAT(res.unpack(), HasSubstr("family1_id2:8888"));
|
EXPECT_THAT(res.unpack(), HasSubstr("mock-access-control:8888;"));
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(ServiceControllerTest, testMultitenantConfFiles)
|
TEST_F(ServiceControllerTest, testMultitenantConfFiles)
|
||||||
|
|||||||
@@ -141,7 +141,7 @@ DeclarativePolicyUtils::sendUpdatesToFog(
|
|||||||
auto shell_cmd = Singleton::Consume<I_ShellCmd>::by<DeclarativePolicyUtils>();
|
auto shell_cmd = Singleton::Consume<I_ShellCmd>::by<DeclarativePolicyUtils>();
|
||||||
string exec_command =
|
string exec_command =
|
||||||
getFilesystemPathConfig()
|
getFilesystemPathConfig()
|
||||||
+ "/scripts/open-appsec-cloud-mgmt --upload_policy_only"
|
+ "/scripts/open-appsec-cloud-mgmt --config-upload-only"
|
||||||
+ " --access_token " + access_token
|
+ " --access_token " + access_token
|
||||||
+ " --tenant_id " + tenant_id
|
+ " --tenant_id " + tenant_id
|
||||||
+ " --profile_id " + profile_id;
|
+ " --profile_id " + profile_id;
|
||||||
|
|||||||
@@ -179,6 +179,17 @@ FogAuthenticator::registerAgent(
|
|||||||
dbgDebug(D_ORCHESTRATOR) << nginx_data.getErr();
|
dbgDebug(D_ORCHESTRATOR) << nginx_data.getErr();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
auto cloud_metadata = details_resolver->readCloudMetadata();
|
||||||
|
if (cloud_metadata.ok()) {
|
||||||
|
request << make_pair("cloudAccountId", ::get<0>(cloud_metadata.unpack()));
|
||||||
|
request << make_pair("cloudVpcId", ::get<1>(cloud_metadata.unpack()));
|
||||||
|
request << make_pair("cloudInstanceId", ::get<2>(cloud_metadata.unpack()));
|
||||||
|
request << make_pair("cloudInstanceLocalIp", ::get<3>(cloud_metadata.unpack()));
|
||||||
|
request << make_pair("cloudRegion", ::get<4>(cloud_metadata.unpack()));
|
||||||
|
} else {
|
||||||
|
dbgDebug(D_ORCHESTRATOR) << cloud_metadata.getErr();
|
||||||
|
}
|
||||||
|
|
||||||
for (const pair<string, string> details : details_resolver->getResolvedDetails()) {
|
for (const pair<string, string> details : details_resolver->getResolvedDetails()) {
|
||||||
request << details;
|
request << details;
|
||||||
}
|
}
|
||||||
@@ -450,9 +461,9 @@ getDeplymentType()
|
|||||||
auto deplyment_type = Singleton::Consume<I_EnvDetails>::by<FogAuthenticator>()->getEnvType();
|
auto deplyment_type = Singleton::Consume<I_EnvDetails>::by<FogAuthenticator>()->getEnvType();
|
||||||
switch (deplyment_type) {
|
switch (deplyment_type) {
|
||||||
case EnvType::LINUX: return "Embedded";
|
case EnvType::LINUX: return "Embedded";
|
||||||
case EnvType::DOCKER: return "Embedded";
|
case EnvType::DOCKER: return "Docker";
|
||||||
case EnvType::NON_CRD_K8S:
|
case EnvType::NON_CRD_K8S:
|
||||||
case EnvType::K8S: return "Embedded";
|
case EnvType::K8S: return "K8S";
|
||||||
case EnvType::COUNT: break;
|
case EnvType::COUNT: break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -579,7 +590,7 @@ FogAuthenticator::authenticateAgent()
|
|||||||
auto mainloop = Singleton::Consume<I_MainLoop>::by<FogAuthenticator>();
|
auto mainloop = Singleton::Consume<I_MainLoop>::by<FogAuthenticator>();
|
||||||
if (!mainloop->doesRoutineExist(routine)) {
|
if (!mainloop->doesRoutineExist(routine)) {
|
||||||
routine = mainloop->addOneTimeRoutine(
|
routine = mainloop->addOneTimeRoutine(
|
||||||
I_MainLoop::RoutineType::RealTime,
|
I_MainLoop::RoutineType::System,
|
||||||
[this, min_expiration_time] ()
|
[this, min_expiration_time] ()
|
||||||
{
|
{
|
||||||
uint expiration_time;
|
uint expiration_time;
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ FogCommunication::init()
|
|||||||
{
|
{
|
||||||
FogAuthenticator::init();
|
FogAuthenticator::init();
|
||||||
i_declarative_policy = Singleton::Consume<I_DeclarativePolicy>::from<DeclarativePolicyUtils>();
|
i_declarative_policy = Singleton::Consume<I_DeclarativePolicy>::from<DeclarativePolicyUtils>();
|
||||||
|
profile_mode = getSettingWithDefault<string>("management", "profileManagedMode");
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<void>
|
Maybe<void>
|
||||||
@@ -66,6 +67,16 @@ FogCommunication::getUpdate(CheckUpdateRequest &request)
|
|||||||
Maybe<string> maybe_new_data = request.getData();
|
Maybe<string> maybe_new_data = request.getData();
|
||||||
string data_checksum = maybe_new_data.ok() ? maybe_new_data.unpack() : "";
|
string data_checksum = maybe_new_data.ok() ? maybe_new_data.unpack() : "";
|
||||||
|
|
||||||
|
if (profile_mode != policy_mgmt_mode) {
|
||||||
|
dbgTrace(D_ORCHESTRATOR)
|
||||||
|
<< "The profile managed mode was changed from: "
|
||||||
|
<< profile_mode
|
||||||
|
<< " to: "
|
||||||
|
<< policy_mgmt_mode;
|
||||||
|
profile_mode = policy_mgmt_mode;
|
||||||
|
i_declarative_policy->turnOnApplyPolicyFlag();
|
||||||
|
}
|
||||||
|
|
||||||
if (i_declarative_policy->shouldApplyPolicy()) {
|
if (i_declarative_policy->shouldApplyPolicy()) {
|
||||||
string policy_response = i_declarative_policy->getUpdate(request);
|
string policy_response = i_declarative_policy->getUpdate(request);
|
||||||
if (!policy_response.empty()) {
|
if (!policy_response.empty()) {
|
||||||
|
|||||||
@@ -32,6 +32,7 @@
|
|||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
USE_DEBUG_FLAG(D_ORCHESTRATOR);
|
USE_DEBUG_FLAG(D_ORCHESTRATOR);
|
||||||
|
|
||||||
class UpdateCommunication::Impl
|
class UpdateCommunication::Impl
|
||||||
:
|
:
|
||||||
public ServerRest,
|
public ServerRest,
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
add_library(updates_process_reporter updates_process_event.cc updates_process_reporter.cc)
|
||||||
@@ -0,0 +1,124 @@
|
|||||||
|
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||||
|
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#include "updates_process_event.h"
|
||||||
|
|
||||||
|
#include <sstream>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
#include "debug.h"
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
|
||||||
|
USE_DEBUG_FLAG(D_UPDATES_PROCESS_REPORTER);
|
||||||
|
|
||||||
|
UpdatesProcessEvent::UpdatesProcessEvent(
|
||||||
|
UpdatesProcessResult _result,
|
||||||
|
UpdatesConfigType _type,
|
||||||
|
UpdatesFailureReason _reason,
|
||||||
|
const std::string &_detail,
|
||||||
|
const std::string &_description)
|
||||||
|
:
|
||||||
|
result(_result),
|
||||||
|
type(_type),
|
||||||
|
reason(_reason),
|
||||||
|
detail(_detail),
|
||||||
|
description(_description)
|
||||||
|
{
|
||||||
|
string report =
|
||||||
|
"Result: " + convertUpdateProcessResultToStr(result) +
|
||||||
|
", Reason: " + convertUpdatesFailureReasonToStr(reason) +
|
||||||
|
", Type: " + convertUpdatesConfigTypeToStr(type) +
|
||||||
|
", Detail: " + detail +
|
||||||
|
", Description: " + description;
|
||||||
|
dbgTrace(D_UPDATES_PROCESS_REPORTER) << "Updates process event: " << report;
|
||||||
|
}
|
||||||
|
|
||||||
|
OrchestrationStatusFieldType
|
||||||
|
UpdatesProcessEvent::getStatusFieldType() const
|
||||||
|
{
|
||||||
|
if (reason == UpdatesFailureReason::REGISTRATION) {
|
||||||
|
return OrchestrationStatusFieldType::REGISTRATION;
|
||||||
|
}
|
||||||
|
if (type == UpdatesConfigType::MANIFEST) {
|
||||||
|
return OrchestrationStatusFieldType::MANIFEST;
|
||||||
|
}
|
||||||
|
return OrchestrationStatusFieldType::LAST_UPDATE;
|
||||||
|
}
|
||||||
|
|
||||||
|
OrchestrationStatusResult
|
||||||
|
UpdatesProcessEvent::getOrchestrationStatusResult() const
|
||||||
|
{
|
||||||
|
return result == UpdatesProcessResult::SUCCESS ?
|
||||||
|
OrchestrationStatusResult::SUCCESS :
|
||||||
|
OrchestrationStatusResult::FAILED;
|
||||||
|
}
|
||||||
|
|
||||||
|
string
|
||||||
|
UpdatesProcessEvent::parseDescription() const
|
||||||
|
{
|
||||||
|
stringstream err;
|
||||||
|
if (description.empty() || result == UpdatesProcessResult::SUCCESS) return "";
|
||||||
|
|
||||||
|
switch (reason) {
|
||||||
|
case UpdatesFailureReason::CHECK_UPDATE: {
|
||||||
|
err << description;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case UpdatesFailureReason::REGISTRATION: {
|
||||||
|
err << "Registration failed. Error: " << description;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case UpdatesFailureReason::GET_UPDATE_REQUEST: {
|
||||||
|
err << "Failed to get update request. Error: " << description;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case UpdatesFailureReason::DOWNLOAD_FILE : {
|
||||||
|
err << "Failed to download the file " << detail << ". Error: " << description;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case UpdatesFailureReason::HANDLE_FILE : {
|
||||||
|
err << "Failed to handle the file " << detail << ". " << description;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case UpdatesFailureReason::INSTALLATION_QUEUE : {
|
||||||
|
err << "Installation queue creation failed. Error: " << description;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case UpdatesFailureReason::INSTALL_PACKAGE : {
|
||||||
|
err << "Failed to install the package " << detail << ". Error: " << description;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case UpdatesFailureReason::CHECKSUM_UNMATCHED : {
|
||||||
|
err << "Checksums do not match for the file: " << detail << ". " << description;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case UpdatesFailureReason::POLICY_CONFIGURATION : {
|
||||||
|
err << "Failed to configure policy version: " << detail << ". Error: " << description;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case UpdatesFailureReason::POLICY_FOG_CONFIGURATION : {
|
||||||
|
err << "Failed to configure the fog address: " << detail << ". Error: " << description;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case UpdatesFailureReason::ORCHESTRATION_SELF_UPDATE : {
|
||||||
|
err << description;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case UpdatesFailureReason::NONE : {
|
||||||
|
err << description;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return err.str();
|
||||||
|
}
|
||||||
@@ -0,0 +1,86 @@
|
|||||||
|
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||||
|
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#include "updates_process_reporter.h"
|
||||||
|
|
||||||
|
#include <sstream>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
#include "debug.h"
|
||||||
|
#include "log_generator.h"
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
|
||||||
|
USE_DEBUG_FLAG(D_UPDATES_PROCESS_REPORTER);
|
||||||
|
|
||||||
|
vector<UpdatesProcessReport> UpdatesProcessReporter::reports;
|
||||||
|
|
||||||
|
void
|
||||||
|
UpdatesProcessReporter::upon(const UpdatesProcessEvent &event)
|
||||||
|
{
|
||||||
|
if (event.getReason() == UpdatesFailureReason::CHECK_UPDATE) {
|
||||||
|
if (event.getResult() == UpdatesProcessResult::SUCCESS && reports.empty()) {
|
||||||
|
dbgTrace(D_UPDATES_PROCESS_REPORTER) << "Update proccess finished successfully";
|
||||||
|
report_failure_count = 0;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
dbgTrace(D_UPDATES_PROCESS_REPORTER) << "Update proccess finished with errors";
|
||||||
|
report_failure_count++;
|
||||||
|
if (report_failure_count <= 1) {
|
||||||
|
reports.clear();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
reports.emplace_back(
|
||||||
|
UpdatesProcessReport(
|
||||||
|
event.getResult(),
|
||||||
|
event.getType(),
|
||||||
|
event.getReason(),
|
||||||
|
event.parseDescription()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
sendReoprt();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (event.getResult() == UpdatesProcessResult::SUCCESS || event.getResult() == UpdatesProcessResult::UNSET) return;
|
||||||
|
reports.emplace_back(
|
||||||
|
UpdatesProcessReport(event.getResult(), event.getType(), event.getReason(), event.parseDescription())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
UpdatesProcessReporter::sendReoprt()
|
||||||
|
{
|
||||||
|
stringstream full_reports;
|
||||||
|
UpdatesFailureReason failure_reason = UpdatesFailureReason::NONE;
|
||||||
|
full_reports << "Updates process reports:" << endl;
|
||||||
|
full_reports << "report failure count:" << report_failure_count << endl;
|
||||||
|
for (const auto &report : reports) {
|
||||||
|
if (report.getReason() != UpdatesFailureReason::CHECK_UPDATE) {
|
||||||
|
failure_reason = report.getReason();
|
||||||
|
}
|
||||||
|
full_reports << report.toString() << endl;
|
||||||
|
}
|
||||||
|
reports.clear();
|
||||||
|
dbgTrace(D_UPDATES_PROCESS_REPORTER) << "Sending updates process report: " << endl << full_reports.str();
|
||||||
|
LogGen log (
|
||||||
|
"Updates process report",
|
||||||
|
ReportIS::Audience::INTERNAL,
|
||||||
|
ReportIS::Severity::HIGH,
|
||||||
|
ReportIS::Priority::HIGH,
|
||||||
|
ReportIS::Tags::ORCHESTRATOR
|
||||||
|
);
|
||||||
|
log << LogField("eventMessage", full_reports.str());
|
||||||
|
if (failure_reason != UpdatesFailureReason::NONE) {
|
||||||
|
log.addToOrigin(LogField("eventCategory", convertUpdatesFailureReasonToStr(failure_reason)));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -87,6 +87,7 @@ add_library(waap_clib
|
|||||||
ParserPairs.cc
|
ParserPairs.cc
|
||||||
Waf2Util2.cc
|
Waf2Util2.cc
|
||||||
ParserPDF.cc
|
ParserPDF.cc
|
||||||
|
ParserBinaryFile.cc
|
||||||
)
|
)
|
||||||
|
|
||||||
add_definitions("-Wno-unused-function")
|
add_definitions("-Wno-unused-function")
|
||||||
|
|||||||
@@ -27,6 +27,7 @@
|
|||||||
#include "ParserPairs.h"
|
#include "ParserPairs.h"
|
||||||
#include "ParserDelimiter.h"
|
#include "ParserDelimiter.h"
|
||||||
#include "ParserPDF.h"
|
#include "ParserPDF.h"
|
||||||
|
#include "ParserBinaryFile.h"
|
||||||
#include "WaapAssetState.h"
|
#include "WaapAssetState.h"
|
||||||
#include "Waf2Regex.h"
|
#include "Waf2Regex.h"
|
||||||
#include "Waf2Util.h"
|
#include "Waf2Util.h"
|
||||||
@@ -272,54 +273,58 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
|||||||
// Detect and decode potential base64 chunks in the value before further processing
|
// Detect and decode potential base64 chunks in the value before further processing
|
||||||
|
|
||||||
bool base64ParamFound = false;
|
bool base64ParamFound = false;
|
||||||
dbgTrace(D_WAAP_DEEP_PARSER) << " ===Processing potential base64===";
|
Waap::Util::BinaryFileType base64BinaryFileType = Waap::Util::BinaryFileType::FILE_TYPE_NONE;
|
||||||
std::string decoded_val, decoded_key;
|
if (m_depth == 1 && flags == BUFFERED_RECEIVER_F_MIDDLE && m_key.depth() == 1 && m_key.first() != "#base64"){
|
||||||
base64_variants base64_status = Waap::Util::b64Test(cur_val, decoded_key, decoded_val);
|
dbgTrace(D_WAAP_DEEP_PARSER) << " === will not check base64 since prev data block was not b64-encoded ===";
|
||||||
|
} else {
|
||||||
|
dbgTrace(D_WAAP_DEEP_PARSER) << " ===Processing potential base64===";
|
||||||
|
std::string decoded_val, decoded_key;
|
||||||
|
base64_variants base64_status = Waap::Util::b64Test(cur_val, decoded_key, decoded_val, base64BinaryFileType);
|
||||||
|
|
||||||
dbgTrace(D_WAAP_DEEP_PARSER)
|
dbgTrace(D_WAAP_DEEP_PARSER)
|
||||||
<< " status = "
|
<< " status = "
|
||||||
<< base64_status
|
<< base64_status
|
||||||
<< " key = "
|
<< " key = "
|
||||||
<< decoded_key
|
<< decoded_key
|
||||||
<< " value = "
|
<< " value = "
|
||||||
<< decoded_val;
|
<< decoded_val;
|
||||||
|
|
||||||
switch (base64_status) {
|
switch (base64_status) {
|
||||||
case SINGLE_B64_CHUNK_CONVERT:
|
case SINGLE_B64_CHUNK_CONVERT:
|
||||||
cur_val = decoded_val;
|
|
||||||
base64ParamFound = true;
|
|
||||||
break;
|
|
||||||
case KEY_VALUE_B64_PAIR:
|
|
||||||
// going deep with new pair in case value is not empty
|
|
||||||
if (decoded_val.size() > 0) {
|
|
||||||
cur_val = decoded_val;
|
cur_val = decoded_val;
|
||||||
base64ParamFound = true;
|
base64ParamFound = true;
|
||||||
rc = onKv(
|
break;
|
||||||
decoded_key.c_str(),
|
case KEY_VALUE_B64_PAIR:
|
||||||
decoded_key.size(),
|
// going deep with new pair in case value is not empty
|
||||||
cur_val.data(),
|
if (decoded_val.size() > 0) {
|
||||||
cur_val.size(),
|
cur_val = decoded_val;
|
||||||
flags,
|
base64ParamFound = true;
|
||||||
parser_depth
|
rc = onKv(
|
||||||
|
decoded_key.c_str(),
|
||||||
|
decoded_key.size(),
|
||||||
|
cur_val.data(),
|
||||||
|
cur_val.size(),
|
||||||
|
flags,
|
||||||
|
parser_depth
|
||||||
);
|
);
|
||||||
|
|
||||||
dbgTrace(D_WAAP_DEEP_PARSER) << " rc = " << rc;
|
dbgTrace(D_WAAP_DEEP_PARSER) << " rc = " << rc;
|
||||||
if (rc != CONTINUE_PARSING) {
|
if (rc != CONTINUE_PARSING) {
|
||||||
return rc;
|
return rc;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
break;
|
||||||
break;
|
case CONTINUE_AS_IS:
|
||||||
case CONTINUE_AS_IS:
|
break;
|
||||||
break;
|
default:
|
||||||
default:
|
break;
|
||||||
break;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (base64ParamFound) {
|
if (base64ParamFound) {
|
||||||
dbgTrace(D_WAAP_DEEP_PARSER) << "DeepParser::onKv(): pushing #base64 prefix to the key.";
|
dbgTrace(D_WAAP_DEEP_PARSER) << "DeepParser::onKv(): pushing #base64 prefix to the key.";
|
||||||
m_key.push("#base64", 7, false);
|
m_key.push("#base64", 7, false);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// cur_val is later passed through some filters (such as urldecode) before JSON, XML or HTML is detected/decoded
|
// cur_val is later passed through some filters (such as urldecode) before JSON, XML or HTML is detected/decoded
|
||||||
std::string orig_val = cur_val;
|
std::string orig_val = cur_val;
|
||||||
|
|
||||||
@@ -355,7 +360,8 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
|||||||
isUrlPayload,
|
isUrlPayload,
|
||||||
isUrlParamPayload,
|
isUrlParamPayload,
|
||||||
flags,
|
flags,
|
||||||
parser_depth
|
parser_depth,
|
||||||
|
base64BinaryFileType
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
offset = 0;
|
offset = 0;
|
||||||
@@ -425,7 +431,8 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
|||||||
isUrlParamPayload,
|
isUrlParamPayload,
|
||||||
flags,
|
flags,
|
||||||
parser_depth,
|
parser_depth,
|
||||||
base64ParamFound
|
base64ParamFound,
|
||||||
|
base64BinaryFileType
|
||||||
);
|
);
|
||||||
if (rc != CONTINUE_PARSING) {
|
if (rc != CONTINUE_PARSING) {
|
||||||
return rc;
|
return rc;
|
||||||
@@ -468,19 +475,19 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
|||||||
if (rc != CONTINUE_PARSING) {
|
if (rc != CONTINUE_PARSING) {
|
||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
|
std::string json_decoded_val, json_decoded_key;
|
||||||
if (Waap::Util::detectJSONasParameter(cur_val, decoded_key, decoded_val)) {
|
if (Waap::Util::detectJSONasParameter(cur_val, json_decoded_key, json_decoded_val)) {
|
||||||
dbgTrace(D_WAAP_DEEP_PARSER)
|
dbgTrace(D_WAAP_DEEP_PARSER)
|
||||||
<< " detectJSONasParameter was true: key = "
|
<< " detectJSONasParameter was true: key = "
|
||||||
<< decoded_key
|
<< json_decoded_key
|
||||||
<< " value = "
|
<< " value = "
|
||||||
<< decoded_val;
|
<< json_decoded_val;
|
||||||
|
|
||||||
rc = onKv(
|
rc = onKv(
|
||||||
decoded_key.c_str(),
|
json_decoded_key.c_str(),
|
||||||
decoded_key.size(),
|
json_decoded_key.size(),
|
||||||
decoded_val.data(),
|
json_decoded_val.data(),
|
||||||
decoded_val.size(),
|
json_decoded_val.size(),
|
||||||
flags,
|
flags,
|
||||||
parser_depth
|
parser_depth
|
||||||
);
|
);
|
||||||
@@ -798,7 +805,8 @@ DeepParser::parseAfterMisleadingMultipartBoundaryCleaned(
|
|||||||
bool isUrlParamPayload,
|
bool isUrlParamPayload,
|
||||||
int flags,
|
int flags,
|
||||||
size_t parser_depth,
|
size_t parser_depth,
|
||||||
bool base64ParamFound)
|
bool base64ParamFound,
|
||||||
|
Waap::Util::BinaryFileType b64FileType)
|
||||||
{
|
{
|
||||||
int offset = -1;
|
int offset = -1;
|
||||||
int rc = 0;
|
int rc = 0;
|
||||||
@@ -815,7 +823,8 @@ DeepParser::parseAfterMisleadingMultipartBoundaryCleaned(
|
|||||||
isUrlPayload,
|
isUrlPayload,
|
||||||
isUrlParamPayload,
|
isUrlParamPayload,
|
||||||
flags,
|
flags,
|
||||||
parser_depth
|
parser_depth,
|
||||||
|
b64FileType
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
offset = 0;
|
offset = 0;
|
||||||
@@ -919,7 +928,8 @@ DeepParser::createInternalParser(
|
|||||||
bool isUrlPayload,
|
bool isUrlPayload,
|
||||||
bool isUrlParamPayload,
|
bool isUrlParamPayload,
|
||||||
int flags,
|
int flags,
|
||||||
size_t parser_depth
|
size_t parser_depth,
|
||||||
|
Waap::Util::BinaryFileType b64FileType
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
dbgTrace(D_WAAP_DEEP_PARSER)
|
dbgTrace(D_WAAP_DEEP_PARSER)
|
||||||
@@ -1152,10 +1162,25 @@ DeepParser::createInternalParser(
|
|||||||
m_parsersDeque.push_back(std::make_shared<BufferedParser<ParserPDF>>(*this, parser_depth + 1));
|
m_parsersDeque.push_back(std::make_shared<BufferedParser<ParserPDF>>(*this, parser_depth + 1));
|
||||||
offset = 0;
|
offset = 0;
|
||||||
} else {
|
} else {
|
||||||
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse a binary file";
|
Waap::Util::BinaryFileType fileType = ParserBinaryFile::detectBinaryFileHeader(cur_val);
|
||||||
m_parsersDeque.push_back(std::make_shared<BufferedParser<ParserBinary>>(*this, parser_depth + 1));
|
if (fileType != Waap::Util::BinaryFileType::FILE_TYPE_NONE) {
|
||||||
offset = 0;
|
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse a known binary file (type=" << fileType << ")";
|
||||||
|
m_parsersDeque.push_back(
|
||||||
|
std::make_shared<BufferedParser<ParserBinaryFile>>(*this, parser_depth + 1, false, fileType)
|
||||||
|
);
|
||||||
|
offset = 0;
|
||||||
|
} else {
|
||||||
|
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse a binary file";
|
||||||
|
m_parsersDeque.push_back(std::make_shared<BufferedParser<ParserBinary>>(*this, parser_depth + 1));
|
||||||
|
offset = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
} else if (b64FileType != Waap::Util::BinaryFileType::FILE_TYPE_NONE) {
|
||||||
|
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse a known binary file, base64 encoded";
|
||||||
|
m_parsersDeque.push_back(
|
||||||
|
std::make_shared<BufferedParser<ParserBinaryFile>>(*this, parser_depth + 1, true, b64FileType)
|
||||||
|
);
|
||||||
|
offset = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (offset < 0) {
|
if (offset < 0) {
|
||||||
|
|||||||
@@ -18,6 +18,7 @@
|
|||||||
#include "KeyStack.h"
|
#include "KeyStack.h"
|
||||||
#include "WaapAssetState.h"
|
#include "WaapAssetState.h"
|
||||||
#include "Waf2Regex.h"
|
#include "Waf2Regex.h"
|
||||||
|
#include "Waf2Util.h"
|
||||||
#include "maybe_res.h"
|
#include "maybe_res.h"
|
||||||
#include <deque>
|
#include <deque>
|
||||||
|
|
||||||
@@ -129,7 +130,8 @@ private:
|
|||||||
bool isUrlPayload,
|
bool isUrlPayload,
|
||||||
bool isUrlParamPayload,
|
bool isUrlParamPayload,
|
||||||
int flags,
|
int flags,
|
||||||
size_t parser_depth
|
size_t parser_depth,
|
||||||
|
Waap::Util::BinaryFileType b64FileType
|
||||||
);
|
);
|
||||||
|
|
||||||
int createUrlParserForJson(
|
int createUrlParserForJson(
|
||||||
@@ -160,7 +162,8 @@ private:
|
|||||||
bool isUrlParamPayload,
|
bool isUrlParamPayload,
|
||||||
int flags,
|
int flags,
|
||||||
size_t parser_depth,
|
size_t parser_depth,
|
||||||
bool base64ParamFound
|
bool base64ParamFound,
|
||||||
|
Waap::Util::BinaryFileType b64FileType
|
||||||
);
|
);
|
||||||
int pushValueToTopParser(std::string &cur_val, int flags, bool base64ParamFound, int offset, size_t parser_depth);
|
int pushValueToTopParser(std::string &cur_val, int flags, bool base64ParamFound, int offset, size_t parser_depth);
|
||||||
int parseBuffer(
|
int parseBuffer(
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
#define BUFFERED_RECEIVER_F_LAST 0x02
|
#define BUFFERED_RECEIVER_F_LAST 0x02
|
||||||
#define BUFFERED_RECEIVER_F_BOTH (BUFFERED_RECEIVER_F_FIRST | BUFFERED_RECEIVER_F_LAST)
|
#define BUFFERED_RECEIVER_F_BOTH (BUFFERED_RECEIVER_F_FIRST | BUFFERED_RECEIVER_F_LAST)
|
||||||
#define BUFFERED_RECEIVER_F_UNNAMED 0x04
|
#define BUFFERED_RECEIVER_F_UNNAMED 0x04
|
||||||
|
#define BUFFERED_RECEIVER_F_MIDDLE 0x00
|
||||||
|
|
||||||
#if (DISTRO_centos6)
|
#if (DISTRO_centos6)
|
||||||
// pre c++11 compiler doesn' support the "final" keyword
|
// pre c++11 compiler doesn' support the "final" keyword
|
||||||
|
|||||||
199
components/security_apps/waap/waap_clib/ParserBinaryFile.cc
Normal file
199
components/security_apps/waap/waap_clib/ParserBinaryFile.cc
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||||
|
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#include "ParserBinaryFile.h"
|
||||||
|
#include "Waf2Util.h"
|
||||||
|
#include "debug.h"
|
||||||
|
#include <string.h>
|
||||||
|
#include <map>
|
||||||
|
#include <tuple>
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
using Waap::Util::BinaryFileType;
|
||||||
|
|
||||||
|
USE_DEBUG_FLAG(D_WAAP_PARSER_BINARY_FILE);
|
||||||
|
USE_DEBUG_FLAG(D_WAAP);
|
||||||
|
|
||||||
|
const string ParserBinaryFile::m_parserName = "ParserBinaryFile";
|
||||||
|
|
||||||
|
static const map<BinaryFileType, pair<string, string>> m_head_tail_map = {
|
||||||
|
{BinaryFileType::FILE_TYPE_PNG,
|
||||||
|
{string("\x89\x50\x4E\x47\x0D\x0A\x1A\x0A"), // PNG
|
||||||
|
string("\x49\x45\x4e\x44\xae\x42\x60\x82")}}, // IEND
|
||||||
|
{BinaryFileType::FILE_TYPE_JPEG,
|
||||||
|
{string("\xff\xd8\xff"),
|
||||||
|
string("\xff\xd9")}},
|
||||||
|
{BinaryFileType::FILE_TYPE_PDF,
|
||||||
|
{string("%PDF-"),
|
||||||
|
string("%%EOF")}}
|
||||||
|
};
|
||||||
|
|
||||||
|
ParserBinaryFile::ParserBinaryFile(
|
||||||
|
IParserStreamReceiver &receiver,
|
||||||
|
size_t parser_depth,
|
||||||
|
bool is_base64,
|
||||||
|
BinaryFileType file_type
|
||||||
|
) :
|
||||||
|
m_receiver(receiver),
|
||||||
|
m_state(s_start),
|
||||||
|
m_parser_depth(parser_depth),
|
||||||
|
m_is_base64(is_base64),
|
||||||
|
m_file_type(file_type)
|
||||||
|
{}
|
||||||
|
|
||||||
|
ParserBinaryFile::~ParserBinaryFile()
|
||||||
|
{}
|
||||||
|
|
||||||
|
BinaryFileType
|
||||||
|
ParserBinaryFile::detectBinaryFileHeader(const string &buf)
|
||||||
|
{
|
||||||
|
if (buf.size() < MIN_HEADER_LOOKUP) {
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "Buffer size too small (" << buf.size() << ")";
|
||||||
|
return BinaryFileType::FILE_TYPE_NONE;
|
||||||
|
}
|
||||||
|
const string searchStr = buf.substr(0, MAX_HEADER_LOOKUP);
|
||||||
|
for (const auto &entry : m_head_tail_map) {
|
||||||
|
const string &head = entry.second.first;
|
||||||
|
size_t pos = searchStr.find(head);
|
||||||
|
if (pos != string::npos) {
|
||||||
|
if (buf.size() - pos >= MIN_HEADER_LOOKUP) {
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "Found. type=" << entry.first;
|
||||||
|
return entry.first;
|
||||||
|
} else {
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "Remaining size after header is too small";
|
||||||
|
return BinaryFileType::FILE_TYPE_NONE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return BinaryFileType::FILE_TYPE_NONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
size_t
|
||||||
|
ParserBinaryFile::push(const char *buf, size_t len)
|
||||||
|
{
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE)
|
||||||
|
<< "buf="
|
||||||
|
<< buf
|
||||||
|
<< "len="
|
||||||
|
<< len;
|
||||||
|
|
||||||
|
const char *c;
|
||||||
|
|
||||||
|
if (m_state == s_error) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
if (len == 0) {
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "end of stream. m_state=" << m_state;
|
||||||
|
|
||||||
|
if (m_state == s_end) {
|
||||||
|
m_receiver.onKvDone();
|
||||||
|
} else if (m_is_base64) {
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "finished parsing";
|
||||||
|
if (m_receiver.onKey("BinaryFileSkip", 14) != 0) {
|
||||||
|
m_state = s_error;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
if (m_receiver.onValue("", 0) != 0) {
|
||||||
|
m_state = s_error;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
m_receiver.onKvDone();
|
||||||
|
} else {
|
||||||
|
m_state = s_error;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
if (m_head_tail_map.find(m_file_type) == m_head_tail_map.end()) {
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "unknown file type: " << m_file_type;
|
||||||
|
m_state = s_error;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
const string tail = m_head_tail_map.at(m_file_type).second;
|
||||||
|
|
||||||
|
switch (m_state) {
|
||||||
|
case s_start:
|
||||||
|
m_state = s_body;
|
||||||
|
CP_FALL_THROUGH;
|
||||||
|
case s_body:
|
||||||
|
if (m_is_base64) {
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "parsing base64";
|
||||||
|
bool keepParsing = true;
|
||||||
|
for (size_t i = 0; i < len; i++) {
|
||||||
|
bool isB64AlphaChar =
|
||||||
|
Waap::Util::isAlphaAsciiFast(buf[i]) || isdigit(buf[i]) || buf[i] == '/' || buf[i] == '+';
|
||||||
|
if (buf[i] == '=') {
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE)
|
||||||
|
<< "base64 padding found (offset=" << i << "). end of stream.";
|
||||||
|
m_state = s_end;
|
||||||
|
keepParsing = false;
|
||||||
|
break;
|
||||||
|
} else if (!isB64AlphaChar) {
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE)
|
||||||
|
<< "non-base64 char found (c=" << buf[i] << ",offset=" << i << "). return error";
|
||||||
|
m_state = s_error;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (keepParsing) { // keep "parsing" on next call to push()
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "parsing binary. Searching for tail: " << tail;
|
||||||
|
c = strstr(buf + len - tail.size(), tail.c_str());
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "search result: c=" << c;
|
||||||
|
if (c) {
|
||||||
|
m_state = s_end;
|
||||||
|
} else { // keep "parsing" on next call to push()
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CP_FALL_THROUGH;
|
||||||
|
case s_end:
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "finished parsing";
|
||||||
|
if (m_receiver.onKey("BinaryFileSkip", 14) != 0) {
|
||||||
|
m_state = s_error;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
if (m_receiver.onValue("", 0) != 0) {
|
||||||
|
m_state = s_error;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case s_error:
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "error detected";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "unknown state: " << m_state;
|
||||||
|
m_state = s_error;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return len;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void ParserBinaryFile::finish()
|
||||||
|
{
|
||||||
|
push(NULL, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
const string& ParserBinaryFile::name() const
|
||||||
|
{
|
||||||
|
return m_parserName;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool ParserBinaryFile::error() const
|
||||||
|
{
|
||||||
|
return m_state == s_error;
|
||||||
|
}
|
||||||
57
components/security_apps/waap/waap_clib/ParserBinaryFile.h
Normal file
57
components/security_apps/waap/waap_clib/ParserBinaryFile.h
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||||
|
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#ifndef __PARSER_BINARY_FILE_H__
|
||||||
|
#define __PARSER_BINARY_FILE_H__
|
||||||
|
|
||||||
|
#include "ParserBase.h"
|
||||||
|
#include "Waf2Util.h"
|
||||||
|
#include <string.h>
|
||||||
|
|
||||||
|
#define MIN_HEADER_LOOKUP 16
|
||||||
|
#define MAX_HEADER_LOOKUP 64
|
||||||
|
#define MAX_TAIL_LOOKUP 5
|
||||||
|
|
||||||
|
class ParserBinaryFile : public ParserBase {
|
||||||
|
public:
|
||||||
|
static Waap::Util::BinaryFileType detectBinaryFileHeader(const std::string &buf);
|
||||||
|
|
||||||
|
ParserBinaryFile(
|
||||||
|
IParserStreamReceiver &receiver,
|
||||||
|
size_t parser_depth,
|
||||||
|
bool is_base64,
|
||||||
|
Waap::Util::BinaryFileType file_type);
|
||||||
|
virtual ~ParserBinaryFile();
|
||||||
|
virtual size_t push(const char *buf, size_t len);
|
||||||
|
virtual void finish();
|
||||||
|
virtual const std::string &name() const;
|
||||||
|
virtual bool error() const;
|
||||||
|
virtual size_t depth() { return 1; }
|
||||||
|
|
||||||
|
private:
|
||||||
|
enum state {
|
||||||
|
s_start,
|
||||||
|
s_body,
|
||||||
|
s_end,
|
||||||
|
s_error
|
||||||
|
};
|
||||||
|
|
||||||
|
IParserStreamReceiver &m_receiver;
|
||||||
|
enum state m_state;
|
||||||
|
static const std::string m_parserName;
|
||||||
|
size_t m_parser_depth;
|
||||||
|
bool m_is_base64;
|
||||||
|
Waap::Util::BinaryFileType m_file_type;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // __PARSER_BINARY_FILE_H__
|
||||||
@@ -304,6 +304,7 @@ ParserJson::ParserJson(
|
|||||||
m_key.push("json", 4);
|
m_key.push("json", 4);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
ParserJson::~ParserJson()
|
ParserJson::~ParserJson()
|
||||||
{
|
{
|
||||||
// Cleanup JSON
|
// Cleanup JSON
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ unescaped_line(),
|
|||||||
param_name(),
|
param_name(),
|
||||||
location(),
|
location(),
|
||||||
score(0.0f),
|
score(0.0f),
|
||||||
|
scoreNoFilter(0.0f),
|
||||||
scoreArray(),
|
scoreArray(),
|
||||||
keywordCombinations(),
|
keywordCombinations(),
|
||||||
attack_types(),
|
attack_types(),
|
||||||
@@ -40,6 +41,7 @@ void Waf2ScanResult::clear()
|
|||||||
param_name.clear();
|
param_name.clear();
|
||||||
location.clear();
|
location.clear();
|
||||||
score = 0;
|
score = 0;
|
||||||
|
scoreNoFilter = 0;
|
||||||
scoreArray.clear();
|
scoreArray.clear();
|
||||||
keywordCombinations.clear();
|
keywordCombinations.clear();
|
||||||
attack_types.clear();
|
attack_types.clear();
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ struct Waf2ScanResult {
|
|||||||
std::string param_name;
|
std::string param_name;
|
||||||
std::string location;
|
std::string location;
|
||||||
double score;
|
double score;
|
||||||
|
double scoreNoFilter;
|
||||||
std::vector<double> scoreArray;
|
std::vector<double> scoreArray;
|
||||||
std::vector<std::string> keywordCombinations;
|
std::vector<std::string> keywordCombinations;
|
||||||
std::set<std::string> attack_types;
|
std::set<std::string> attack_types;
|
||||||
|
|||||||
@@ -727,7 +727,6 @@ void SerializeToLocalAndRemoteSyncBase::syncWorker()
|
|||||||
"sync notification for '" + m_assetId + "'",
|
"sync notification for '" + m_assetId + "'",
|
||||||
ReportIS::AudienceTeam::WAAP,
|
ReportIS::AudienceTeam::WAAP,
|
||||||
syncNotification,
|
syncNotification,
|
||||||
false,
|
|
||||||
MessageCategory::GENERIC,
|
MessageCategory::GENERIC,
|
||||||
ReportIS::Tags::WAF,
|
ReportIS::Tags::WAF,
|
||||||
ReportIS::Notification::SYNC_LEARNING
|
ReportIS::Notification::SYNC_LEARNING
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ namespace Conversions {
|
|||||||
return HIGH_THREAT;
|
return HIGH_THREAT;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool shouldDoWafBlocking(const IWaapConfig* pWaapConfig, ThreatLevel threatLevel)
|
bool shouldDoWafBlocking(const IWaapConfig* const pWaapConfig, ThreatLevel threatLevel)
|
||||||
{
|
{
|
||||||
if (pWaapConfig == NULL)
|
if (pWaapConfig == NULL)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -20,7 +20,7 @@
|
|||||||
namespace Waap {
|
namespace Waap {
|
||||||
namespace Conversions {
|
namespace Conversions {
|
||||||
ThreatLevel convertFinalScoreToThreatLevel(double finalScore);
|
ThreatLevel convertFinalScoreToThreatLevel(double finalScore);
|
||||||
bool shouldDoWafBlocking(const IWaapConfig* pSitePolicy, ThreatLevel threatLevel);
|
bool shouldDoWafBlocking(const IWaapConfig* const pSitePolicy, ThreatLevel threatLevel);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -39,12 +39,19 @@ public:
|
|||||||
m_op = to_lower_copy(m_op);
|
m_op = to_lower_copy(m_op);
|
||||||
m_isCidr = false;
|
m_isCidr = false;
|
||||||
m_value = "";
|
m_value = "";
|
||||||
|
m_isValid = true;
|
||||||
|
|
||||||
if (m_op == "basic") {
|
if (m_op == "basic") {
|
||||||
// If op == "BASIC" - read numeric value
|
// If op == "BASIC" - read numeric value
|
||||||
ar(cereal::make_nvp("tag", m_tag));
|
ar(cereal::make_nvp("tag", m_tag));
|
||||||
m_tag = to_lower_copy(m_tag);
|
m_tag = to_lower_copy(m_tag);
|
||||||
|
|
||||||
|
if (m_tag != "sourceip" && m_tag != "sourceidentifier" && m_tag != "url" && m_tag != "hostname" &&
|
||||||
|
m_tag != "keyword" && m_tag != "paramname" && m_tag != "paramvalue" && m_tag != "paramlocation" &&
|
||||||
|
m_tag != "responsebody" && m_tag != "headername" && m_tag != "headervalue" ) {
|
||||||
|
m_isValid = false;
|
||||||
|
dbgDebug(D_WAAP_OVERRIDE) << "Invalid override tag: " << m_tag;
|
||||||
|
}
|
||||||
// The name "value" here is misleading. The real meaning is "regex pattern string"
|
// The name "value" here is misleading. The real meaning is "regex pattern string"
|
||||||
ar(cereal::make_nvp("value", m_value));
|
ar(cereal::make_nvp("value", m_value));
|
||||||
|
|
||||||
@@ -73,12 +80,14 @@ public:
|
|||||||
m_operand2 = std::make_shared<Match>();
|
m_operand2 = std::make_shared<Match>();
|
||||||
ar(cereal::make_nvp("operand2", *m_operand2));
|
ar(cereal::make_nvp("operand2", *m_operand2));
|
||||||
m_isOverrideResponse = m_operand1->m_isOverrideResponse || m_operand2->m_isOverrideResponse;
|
m_isOverrideResponse = m_operand1->m_isOverrideResponse || m_operand2->m_isOverrideResponse;
|
||||||
|
m_isValid = m_operand1->m_isValid && m_operand2->m_isValid;
|
||||||
}
|
}
|
||||||
else if (m_op == "not") {
|
else if (m_op == "not") {
|
||||||
// If op is "NOT" get one operand
|
// If op is "NOT" get one operand
|
||||||
m_operand1 = std::make_shared<Match>();
|
m_operand1 = std::make_shared<Match>();
|
||||||
ar(cereal::make_nvp("operand1", *m_operand1));
|
ar(cereal::make_nvp("operand1", *m_operand1));
|
||||||
m_isOverrideResponse = m_operand1->m_isOverrideResponse;
|
m_isOverrideResponse = m_operand1->m_isOverrideResponse;
|
||||||
|
m_isValid = m_operand1->m_isValid;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -120,6 +129,10 @@ public:
|
|||||||
return m_isOverrideResponse;
|
return m_isOverrideResponse;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool isValidMatch() const{
|
||||||
|
return m_isValid;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::string m_op;
|
std::string m_op;
|
||||||
std::shared_ptr<Match> m_operand1;
|
std::shared_ptr<Match> m_operand1;
|
||||||
@@ -130,6 +143,7 @@ private:
|
|||||||
Waap::Util::CIDRData m_cidr;
|
Waap::Util::CIDRData m_cidr;
|
||||||
bool m_isCidr;
|
bool m_isCidr;
|
||||||
bool m_isOverrideResponse;
|
bool m_isOverrideResponse;
|
||||||
|
bool m_isValid;
|
||||||
};
|
};
|
||||||
|
|
||||||
class Behavior
|
class Behavior
|
||||||
@@ -189,6 +203,9 @@ private:
|
|||||||
|
|
||||||
class Rule {
|
class Rule {
|
||||||
public:
|
public:
|
||||||
|
|
||||||
|
Rule(): m_match(), m_isChangingRequestData(false), isValid(true){}
|
||||||
|
|
||||||
bool operator==(const Rule &other) const;
|
bool operator==(const Rule &other) const;
|
||||||
|
|
||||||
template <typename _A>
|
template <typename _A>
|
||||||
@@ -202,6 +219,11 @@ public:
|
|||||||
m_id.clear();
|
m_id.clear();
|
||||||
}
|
}
|
||||||
ar(cereal::make_nvp("parsedMatch", m_match));
|
ar(cereal::make_nvp("parsedMatch", m_match));
|
||||||
|
if (!m_match.isValidMatch()) {
|
||||||
|
dbgDebug(D_WAAP_OVERRIDE) << "An override rule was not load";
|
||||||
|
isValid = false;
|
||||||
|
}
|
||||||
|
|
||||||
ar(cereal::make_nvp("parsedBehavior", m_behaviors));
|
ar(cereal::make_nvp("parsedBehavior", m_behaviors));
|
||||||
|
|
||||||
m_isChangingRequestData = false;
|
m_isChangingRequestData = false;
|
||||||
@@ -242,6 +264,7 @@ public:
|
|||||||
dbgTrace(D_WAAP_OVERRIDE) << "Rule not matched";
|
dbgTrace(D_WAAP_OVERRIDE) << "Rule not matched";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool isChangingRequestData() const {
|
bool isChangingRequestData() const {
|
||||||
return m_isChangingRequestData;
|
return m_isChangingRequestData;
|
||||||
}
|
}
|
||||||
@@ -253,11 +276,16 @@ public:
|
|||||||
return m_id;
|
return m_id;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool isValidRule() const {
|
||||||
|
return isValid;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
Match m_match;
|
Match m_match;
|
||||||
bool m_isChangingRequestData;
|
bool m_isChangingRequestData;
|
||||||
std::vector<Behavior> m_behaviors;
|
std::vector<Behavior> m_behaviors;
|
||||||
std::string m_id;
|
std::string m_id;
|
||||||
|
bool isValid;
|
||||||
};
|
};
|
||||||
|
|
||||||
class Policy {
|
class Policy {
|
||||||
@@ -270,6 +298,10 @@ public:
|
|||||||
|
|
||||||
for (std::vector<Waap::Override::Rule>::const_iterator it = rules.begin(); it != rules.end(); ++it) {
|
for (std::vector<Waap::Override::Rule>::const_iterator it = rules.begin(); it != rules.end(); ++it) {
|
||||||
const Waap::Override::Rule& rule = *it;
|
const Waap::Override::Rule& rule = *it;
|
||||||
|
if (!rule.isValidRule()) {
|
||||||
|
dbgWarning(D_WAAP_OVERRIDE) << "rule is not valid";
|
||||||
|
continue;
|
||||||
|
}
|
||||||
if (rule.isChangingRequestData())
|
if (rule.isChangingRequestData())
|
||||||
{
|
{
|
||||||
m_RequestOverrides.push_back(rule);
|
m_RequestOverrides.push_back(rule);
|
||||||
|
|||||||
@@ -145,6 +145,6 @@ bool WaapOverrideFunctor::operator()(const std::string& tag, const boost::regex&
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Unknown tag: should not occur
|
// Unknown tag: should not occur
|
||||||
dbgWarning(D_WAAP) << "Invalid override tag: " << tag;
|
dbgDebug(D_WAAP) << "Invalid override tag: " << tag;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ USE_DEBUG_FLAG(D_OA_SCHEMA_UPDATER);
|
|||||||
// id generated by xml parser for an entity attribute
|
// id generated by xml parser for an entity attribute
|
||||||
const std::string Waap::Scanner::xmlEntityAttributeId = "08a80340-06d3-11ea-9f87-0242ac11000f";
|
const std::string Waap::Scanner::xmlEntityAttributeId = "08a80340-06d3-11ea-9f87-0242ac11000f";
|
||||||
|
|
||||||
double Waap::Scanner::getScoreData(Waf2ScanResult& res, const std::string &poolName)
|
double Waap::Scanner::getScoreData(Waf2ScanResult& res, const std::string &poolName, bool applyLearning)
|
||||||
{
|
{
|
||||||
std::string source = m_transaction->getSourceIdentifier();
|
std::string source = m_transaction->getSourceIdentifier();
|
||||||
|
|
||||||
@@ -33,21 +33,24 @@ double Waap::Scanner::getScoreData(Waf2ScanResult& res, const std::string &poolN
|
|||||||
Waap::Keywords::KeywordsSet keywordsSet;
|
Waap::Keywords::KeywordsSet keywordsSet;
|
||||||
Waap::Keywords::computeKeywordsSet(keywordsSet, res.keyword_matches, res.found_patterns);
|
Waap::Keywords::computeKeywordsSet(keywordsSet, res.keyword_matches, res.found_patterns);
|
||||||
|
|
||||||
std::string param_name = IndicatorsFiltersManager::generateKey(res.location, res.param_name, m_transaction);
|
if (applyLearning) {
|
||||||
dbgTrace(D_WAAP_SCANNER) << "filter processing for parameter: " << param_name;
|
std::string param_name = IndicatorsFiltersManager::generateKey(res.location, res.param_name, m_transaction);
|
||||||
m_transaction->getAssetState()->logIndicatorsInFilters(param_name, keywordsSet, m_transaction);
|
dbgTrace(D_WAAP_SCANNER) << "filter processing for parameter: " << param_name <<
|
||||||
|
", indicators count: " << keywordsSet.size();
|
||||||
|
m_transaction->getAssetState()->logIndicatorsInFilters(param_name, keywordsSet, m_transaction);
|
||||||
|
|
||||||
m_transaction->getAssetState()->filterKeywords(param_name, keywordsSet, res.filtered_keywords);
|
m_transaction->getAssetState()->filterKeywords(param_name, keywordsSet, res.filtered_keywords);
|
||||||
if (m_transaction->getSiteConfig() != nullptr)
|
if (m_transaction->getSiteConfig() != nullptr)
|
||||||
{
|
{
|
||||||
auto waapParams = m_transaction->getSiteConfig()->get_WaapParametersPolicy();
|
auto waapParams = m_transaction->getSiteConfig()->get_WaapParametersPolicy();
|
||||||
if (waapParams != nullptr && waapParams->getParamVal("filtersVerbose", "false") == "true") {
|
if (waapParams != nullptr && waapParams->getParamVal("filtersVerbose", "false") == "true") {
|
||||||
m_transaction->getAssetState()->filterVerbose(param_name, res.filtered_keywords);
|
m_transaction->getAssetState()->filterVerbose(param_name, res.filtered_keywords);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
m_transaction->getAssetState()->filterKeywordsByParameters(res.param_name, keywordsSet);
|
||||||
|
|
||||||
|
dbgTrace(D_WAAP_SCANNER) << "post filtering indicators count: " << keywordsSet.size();
|
||||||
}
|
}
|
||||||
m_transaction->getAssetState()->filterKeywordsByParameters(res.param_name, keywordsSet);
|
|
||||||
|
|
||||||
|
|
||||||
// The keywords are only removed in production, they are still used while building scores
|
// The keywords are only removed in production, they are still used while building scores
|
||||||
if (!m_transaction->get_ignoreScore()) {
|
if (!m_transaction->get_ignoreScore()) {
|
||||||
m_transaction->getAssetState()->removeKeywords(keywordsSet);
|
m_transaction->getAssetState()->removeKeywords(keywordsSet);
|
||||||
@@ -148,9 +151,16 @@ bool Waap::Scanner::suspiciousHit(Waf2ScanResult& res, DeepParser &dp,
|
|||||||
// Select scores pool by location
|
// Select scores pool by location
|
||||||
std::string poolName = Waap::Scores::getScorePoolNameByLocation(location);
|
std::string poolName = Waap::Scores::getScorePoolNameByLocation(location);
|
||||||
|
|
||||||
|
Waf2ScanResult nonFilterRes = res;
|
||||||
|
res.scoreNoFilter = getScoreData(nonFilterRes, poolName, false);
|
||||||
|
|
||||||
double score = getScoreData(res, poolName);
|
double score = getScoreData(res, poolName);
|
||||||
|
|
||||||
dbgTrace(D_WAAP_SCANNER) << "score: " << score;
|
// call shouldIgnoreOverride post score calculation and filtering to evaluate ignore override effectivness
|
||||||
|
res.score = score;
|
||||||
|
m_transaction->shouldIgnoreOverride(res);
|
||||||
|
|
||||||
|
dbgTrace(D_WAAP_SCANNER) << "score: " << score << " should ignore: " << ignoreOverride;
|
||||||
// Add record about scores to the notes[] log (also reported in logs)
|
// Add record about scores to the notes[] log (also reported in logs)
|
||||||
if (score > 1.0f) {
|
if (score > 1.0f) {
|
||||||
DetectionEvent(location, res.keyword_matches).notify();
|
DetectionEvent(location, res.keyword_matches).notify();
|
||||||
@@ -166,6 +176,7 @@ bool Waap::Scanner::suspiciousHit(Waf2ScanResult& res, DeepParser &dp,
|
|||||||
if (isKeyCspReport(key, res, dp) || ignoreOverride) {
|
if (isKeyCspReport(key, res, dp) || ignoreOverride) {
|
||||||
dbgTrace(D_WAAP_SCANNER) << "Ignoring parameter key/value " << res.param_name <<
|
dbgTrace(D_WAAP_SCANNER) << "Ignoring parameter key/value " << res.param_name <<
|
||||||
" due to ignore action in override";
|
" due to ignore action in override";
|
||||||
|
res.score = 0;
|
||||||
m_bIgnoreOverride = true;
|
m_bIgnoreOverride = true;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ namespace Waap {
|
|||||||
|
|
||||||
static const std::string xmlEntityAttributeId;
|
static const std::string xmlEntityAttributeId;
|
||||||
private:
|
private:
|
||||||
double getScoreData(Waf2ScanResult& res, const std::string &poolName);
|
double getScoreData(Waf2ScanResult& res, const std::string &poolName, bool applyLearning = true);
|
||||||
bool shouldIgnoreOverride(const Waf2ScanResult &res);
|
bool shouldIgnoreOverride(const Waf2ScanResult &res);
|
||||||
bool isKeyCspReport(const std::string &key, Waf2ScanResult &res, DeepParser &dp);
|
bool isKeyCspReport(const std::string &key, Waf2ScanResult &res, DeepParser &dp);
|
||||||
|
|
||||||
|
|||||||
@@ -329,6 +329,7 @@ Waf2Transaction::Waf2Transaction() :
|
|||||||
is_schema_validation(false),
|
is_schema_validation(false),
|
||||||
m_waf2TransactionFlags()
|
m_waf2TransactionFlags()
|
||||||
{
|
{
|
||||||
|
m_overrideOriginalMaxScore[OVERRIDE_ACCEPT] = 0;
|
||||||
I_TimeGet *timeGet = Singleton::Consume<I_TimeGet>::by<Waf2Transaction>();
|
I_TimeGet *timeGet = Singleton::Consume<I_TimeGet>::by<Waf2Transaction>();
|
||||||
m_entry_time = chrono::duration_cast<chrono::milliseconds>(timeGet->getMonotonicTime());
|
m_entry_time = chrono::duration_cast<chrono::milliseconds>(timeGet->getMonotonicTime());
|
||||||
}
|
}
|
||||||
@@ -1516,6 +1517,7 @@ Waf2Transaction::decideAfterHeaders()
|
|||||||
return finalizeDecision(sitePolicy, shouldBlock);
|
return finalizeDecision(sitePolicy, shouldBlock);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Note: the only user of the transactionResult structure filled by this method is waap_automation.
|
// Note: the only user of the transactionResult structure filled by this method is waap_automation.
|
||||||
// TODO: Consider removing this parameter (and provide access to this information by other means)
|
// TODO: Consider removing this parameter (and provide access to this information by other means)
|
||||||
int
|
int
|
||||||
@@ -1728,6 +1730,11 @@ void Waf2Transaction::appendCommonLogFields(LogGen& waapLog,
|
|||||||
std::copy(m_effectiveOverrideIds.begin(), m_effectiveOverrideIds.end(), vEffectiveOverrideIds.begin());
|
std::copy(m_effectiveOverrideIds.begin(), m_effectiveOverrideIds.end(), vEffectiveOverrideIds.begin());
|
||||||
waapLog.addToOrigin(LogField("effectiveExceptionIdList", vEffectiveOverrideIds));
|
waapLog.addToOrigin(LogField("effectiveExceptionIdList", vEffectiveOverrideIds));
|
||||||
}
|
}
|
||||||
|
if (!m_exceptionLearned.empty()) {
|
||||||
|
std::vector<std::string> vLearningAffected(m_exceptionLearned.size());
|
||||||
|
std::copy(m_exceptionLearned.begin(), m_exceptionLearned.end(), vLearningAffected.begin());
|
||||||
|
waapLog.addToOrigin(LogField("redundantExceptionIdList", vLearningAffected));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1808,12 +1815,6 @@ Waf2Transaction::sendLog()
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
dbgTrace(D_WAAP) << "force exception: " << m_overrideState.bForceException <<
|
|
||||||
" force block: " << m_overrideState.bForceBlock <<
|
|
||||||
" matched overrides count: " << m_matchedOverrideIds.size() <<
|
|
||||||
" effective overrides count: " << m_effectiveOverrideIds.size();
|
|
||||||
|
|
||||||
|
|
||||||
bool shouldBlock = false;
|
bool shouldBlock = false;
|
||||||
if (m_overrideState.bForceBlock) {
|
if (m_overrideState.bForceBlock) {
|
||||||
// If override forces "reject" decision, mention it in the "override" log field.
|
// If override forces "reject" decision, mention it in the "override" log field.
|
||||||
@@ -2090,7 +2091,30 @@ Waf2Transaction::decideAutonomousSecurity(
|
|||||||
transactionResult.threatLevel = threat;
|
transactionResult.threatLevel = threat;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dbgTrace(D_WAAP_OVERRIDE) << "override ids count: " << m_matchedOverrideIds.size();
|
||||||
// Apply overrides
|
// Apply overrides
|
||||||
|
for (auto it = m_overridePostFilterMaxScore.begin(); it != m_overridePostFilterMaxScore.end(); it++) {
|
||||||
|
const string id = it->first;
|
||||||
|
if (m_overrideState.forceBlockIds.find(id) != m_overrideState.forceBlockIds.end()) {
|
||||||
|
// blocked effectivness is calculates later from the force block exception ids list
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
ThreatLevel threat = Waap::Conversions::convertFinalScoreToThreatLevel(it->second);
|
||||||
|
bool shouldBlock = Waap::Conversions::shouldDoWafBlocking(m_siteConfig, threat);
|
||||||
|
dbgTrace(D_WAAP_OVERRIDE) << "checking effectivness of override: " << id << ", should have blocked: " << shouldBlock
|
||||||
|
<< ", scores: " << m_overridePostFilterMaxScore[id] << ", " << m_overrideOriginalMaxScore[id];
|
||||||
|
if (shouldBlock) {
|
||||||
|
m_effectiveOverrideIds.insert(id);
|
||||||
|
} else {
|
||||||
|
ThreatLevel threatNoFilter = Waap::Conversions::convertFinalScoreToThreatLevel(
|
||||||
|
m_overrideOriginalMaxScore[id]
|
||||||
|
);
|
||||||
|
if (Waap::Conversions::shouldDoWafBlocking(m_siteConfig, threatNoFilter)) {
|
||||||
|
m_exceptionLearned.insert(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (m_overrideState.bForceBlock) {
|
if (m_overrideState.bForceBlock) {
|
||||||
dbgTrace(D_WAAP) << "decideAutonomousSecurity(): decision was " << decision->shouldBlock() <<
|
dbgTrace(D_WAAP) << "decideAutonomousSecurity(): decision was " << decision->shouldBlock() <<
|
||||||
" and override forces REJECT ...";
|
" and override forces REJECT ...";
|
||||||
@@ -2104,25 +2128,25 @@ Waf2Transaction::decideAutonomousSecurity(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (m_overrideState.bForceException) {
|
else if (m_overrideState.bForceException) {
|
||||||
dbgTrace(D_WAAP) << "decideAutonomousSecurity(): decision was " << decision->shouldBlock() <<
|
dbgTrace(D_WAAP) << "de cideAutonomousSecurity(): decision was " << decision->shouldBlock() <<
|
||||||
" and override forces ALLOW ...";
|
" and override forces ALLOW ...";
|
||||||
if (m_scanResult) {
|
|
||||||
// on accept exception the decision is not set and needs to be calculated to determine effectivness
|
|
||||||
ThreatLevel threat = Waap::Conversions::convertFinalScoreToThreatLevel(m_scanResult->score);
|
|
||||||
bool shouldBlock = Waap::Conversions::shouldDoWafBlocking(&sitePolicy, threat);
|
|
||||||
if (shouldBlock) {
|
|
||||||
m_effectiveOverrideIds.insert(
|
|
||||||
m_overrideState.forceExceptionIds.begin(), m_overrideState.forceExceptionIds.end()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
decision->setBlock(false);
|
decision->setBlock(false);
|
||||||
if (!m_overrideState.bIgnoreLog)
|
if (!m_overrideState.bIgnoreLog)
|
||||||
{
|
{
|
||||||
decision->setOverridesLog(true);
|
decision->setOverridesLog(true);
|
||||||
}
|
}
|
||||||
|
} else if (!m_matchedOverrideIds.empty()) {
|
||||||
|
if (!m_overrideState.bIgnoreLog)
|
||||||
|
{
|
||||||
|
decision->setOverridesLog(true);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
dbgTrace(D_WAAP_OVERRIDE) << "force exception: " << m_overrideState.bForceException <<
|
||||||
|
" force block: " << m_overrideState.bForceBlock <<
|
||||||
|
" matched overrides count: " << m_matchedOverrideIds.size() <<
|
||||||
|
" effective overrides count: " << m_effectiveOverrideIds.size() <<
|
||||||
|
" learned overrides count: " << m_exceptionLearned.size();
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
bool log_all = false;
|
bool log_all = false;
|
||||||
@@ -2261,7 +2285,7 @@ bool
|
|||||||
Waf2Transaction::shouldIgnoreOverride(const Waf2ScanResult &res) {
|
Waf2Transaction::shouldIgnoreOverride(const Waf2ScanResult &res) {
|
||||||
auto exceptions = getConfiguration<ParameterException>("rulebase", "exception");
|
auto exceptions = getConfiguration<ParameterException>("rulebase", "exception");
|
||||||
if (!exceptions.ok()) {
|
if (!exceptions.ok()) {
|
||||||
dbgTrace(D_WAAP_OVERRIDE) << "matching exceptions error:" << exceptions.getErr();
|
dbgTrace(D_WAAP_OVERRIDE) << "matching exceptions error: " << exceptions.getErr();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
dbgTrace(D_WAAP_OVERRIDE) << "matching exceptions";
|
dbgTrace(D_WAAP_OVERRIDE) << "matching exceptions";
|
||||||
@@ -2304,6 +2328,24 @@ Waf2Transaction::shouldIgnoreOverride(const Waf2ScanResult &res) {
|
|||||||
auto behaviors = exceptions.unpack().getBehavior(exceptions_dict,
|
auto behaviors = exceptions.unpack().getBehavior(exceptions_dict,
|
||||||
getAssetState()->m_filtersMngr->getMatchedOverrideKeywords());
|
getAssetState()->m_filtersMngr->getMatchedOverrideKeywords());
|
||||||
for (const auto &behavior : behaviors) {
|
for (const auto &behavior : behaviors) {
|
||||||
|
if (!res.filtered_keywords.empty() || res.score > 0) {
|
||||||
|
dbgTrace(D_WAAP_OVERRIDE) << "matched exceptions for " << res.param_name << " with filtered indicators";
|
||||||
|
std::string overrideId = behavior.getId();
|
||||||
|
if (m_overrideOriginalMaxScore.find(overrideId) == m_overrideOriginalMaxScore.end()){
|
||||||
|
m_overrideOriginalMaxScore[overrideId] = res.scoreNoFilter;
|
||||||
|
m_overridePostFilterMaxScore[overrideId] = res.score;
|
||||||
|
} else {
|
||||||
|
if (res.scoreNoFilter > m_overrideOriginalMaxScore[overrideId]) {
|
||||||
|
m_overrideOriginalMaxScore[overrideId] = res.scoreNoFilter;
|
||||||
|
}
|
||||||
|
if (res.score > m_overridePostFilterMaxScore[overrideId]) {
|
||||||
|
m_overridePostFilterMaxScore[overrideId] = res.score;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (res.scoreNoFilter > m_overrideOriginalMaxScore[OVERRIDE_ACCEPT]) {
|
||||||
|
m_overrideOriginalMaxScore[OVERRIDE_ACCEPT] = res.scoreNoFilter;
|
||||||
|
}
|
||||||
|
}
|
||||||
if (behavior == action_ignore)
|
if (behavior == action_ignore)
|
||||||
{
|
{
|
||||||
dbgTrace(D_WAAP_OVERRIDE) << "matched exceptions for " << res.param_name << " should ignore.";
|
dbgTrace(D_WAAP_OVERRIDE) << "matched exceptions for " << res.param_name << " should ignore.";
|
||||||
@@ -2311,12 +2353,6 @@ Waf2Transaction::shouldIgnoreOverride(const Waf2ScanResult &res) {
|
|||||||
if (!overrideId.empty()) {
|
if (!overrideId.empty()) {
|
||||||
m_matchedOverrideIds.insert(overrideId);
|
m_matchedOverrideIds.insert(overrideId);
|
||||||
}
|
}
|
||||||
if (!res.keyword_matches.empty() || res.unescaped_line == Waap::Scanner::xmlEntityAttributeId)
|
|
||||||
{
|
|
||||||
if (!overrideId.empty()) {
|
|
||||||
m_effectiveOverrideIds.insert(overrideId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -293,6 +293,9 @@ private:
|
|||||||
// Matched override IDs
|
// Matched override IDs
|
||||||
std::set<std::string> m_matchedOverrideIds;
|
std::set<std::string> m_matchedOverrideIds;
|
||||||
std::set<std::string> m_effectiveOverrideIds;
|
std::set<std::string> m_effectiveOverrideIds;
|
||||||
|
std::set<std::string> m_exceptionLearned;
|
||||||
|
std::map<std::string, double> m_overrideOriginalMaxScore;
|
||||||
|
std::map<std::string, double> m_overridePostFilterMaxScore;
|
||||||
|
|
||||||
//csrf state
|
//csrf state
|
||||||
Waap::CSRF::State m_csrfState;
|
Waap::CSRF::State m_csrfState;
|
||||||
|
|||||||
@@ -459,9 +459,15 @@ Waf2Transaction::getUserLimitVerdict()
|
|||||||
}
|
}
|
||||||
else if (mode == AttackMitigationMode::PREVENT) {
|
else if (mode == AttackMitigationMode::PREVENT) {
|
||||||
decision->setLog(true);
|
decision->setLog(true);
|
||||||
decision->setBlock(true);
|
if (!m_overrideState.bForceException) {
|
||||||
dbgInfo(D_WAAP_ULIMITS) << msg << "BLOCK" << reason;
|
decision->setBlock(true);
|
||||||
verdict = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP;
|
dbgInfo(D_WAAP_ULIMITS) << msg << "BLOCK" << reason;
|
||||||
|
verdict = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP;
|
||||||
|
} else {
|
||||||
|
decision->setBlock(true);
|
||||||
|
dbgInfo(D_WAAP_ULIMITS) << msg << "Override Accept" << reason;
|
||||||
|
verdict = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return verdict;
|
return verdict;
|
||||||
@@ -567,12 +573,11 @@ Waap::Override::State Waf2Transaction::getOverrideState(IWaapConfig* sitePolicy)
|
|||||||
|
|
||||||
extractEnvSourceIdentifier();
|
extractEnvSourceIdentifier();
|
||||||
|
|
||||||
Waap::Override::State overrideStateResponse;
|
|
||||||
if (overridePolicy) { // later we will run response overrides
|
if (overridePolicy) { // later we will run response overrides
|
||||||
overrideStateResponse.applyOverride(*overridePolicy, WaapOverrideFunctor(*this), m_matchedOverrideIds, false);
|
m_overrideState.applyOverride(*overridePolicy, WaapOverrideFunctor(*this), m_matchedOverrideIds, false);
|
||||||
}
|
}
|
||||||
m_isHeaderOverrideScanRequired = false;
|
m_isHeaderOverrideScanRequired = false;
|
||||||
return overrideStateResponse;
|
return m_overrideState;
|
||||||
}
|
}
|
||||||
|
|
||||||
Waf2TransactionFlags &Waf2Transaction::getTransactionFlags()
|
Waf2TransactionFlags &Waf2Transaction::getTransactionFlags()
|
||||||
|
|||||||
@@ -34,6 +34,7 @@
|
|||||||
#include "generic_rulebase/rulebase_config.h"
|
#include "generic_rulebase/rulebase_config.h"
|
||||||
#include "user_identifiers_config.h"
|
#include "user_identifiers_config.h"
|
||||||
#include "Waf2Regex.h"
|
#include "Waf2Regex.h"
|
||||||
|
#include "ParserBinaryFile.h"
|
||||||
|
|
||||||
using boost::algorithm::to_lower_copy;
|
using boost::algorithm::to_lower_copy;
|
||||||
using namespace std;
|
using namespace std;
|
||||||
@@ -960,11 +961,12 @@ string filterUTF7(const string& text) {
|
|||||||
// 4. percent of non-printable characters (!isprint())
|
// 4. percent of non-printable characters (!isprint())
|
||||||
// in decoded data is less than 10% (statistical garbage detection).
|
// in decoded data is less than 10% (statistical garbage detection).
|
||||||
// Returns false above checks fail.
|
// Returns false above checks fail.
|
||||||
bool decodeBase64Chunk(
|
base64_decode_status decodeBase64Chunk(
|
||||||
const string& value,
|
const string& value,
|
||||||
string::const_iterator it,
|
string::const_iterator it,
|
||||||
string::const_iterator end,
|
string::const_iterator end,
|
||||||
string& decoded)
|
string& decoded,
|
||||||
|
bool clear_on_error)
|
||||||
{
|
{
|
||||||
decoded.clear();
|
decoded.clear();
|
||||||
uint32_t acc = 0;
|
uint32_t acc = 0;
|
||||||
@@ -974,13 +976,14 @@ bool decodeBase64Chunk(
|
|||||||
uint32_t spacer_count = 0;
|
uint32_t spacer_count = 0;
|
||||||
|
|
||||||
dbgTrace(D_WAAP) << "decodeBase64Chunk: value='" << value << "' match='" << string(it, end) << "'";
|
dbgTrace(D_WAAP) << "decodeBase64Chunk: value='" << value << "' match='" << string(it, end) << "'";
|
||||||
|
string::const_iterator begin = it;
|
||||||
|
|
||||||
// The encoded data length (without the "base64," prefix) should be exactly divisible by 4
|
// The encoded data length (without the "base64," prefix) should be exactly divisible by 4
|
||||||
// len % 4 is not 0 i.e. this is not base64
|
// len % 4 is not 0 i.e. this is not base64
|
||||||
if ((end - it) % 4 != 0) {
|
if ((end - it) % 4 != 0) {
|
||||||
dbgTrace(D_WAAP_BASE64) <<
|
dbgTrace(D_WAAP_BASE64) <<
|
||||||
"b64DecodeChunk: (leave as-is) because encoded data length should be exactly divisible by 4.";
|
"b64DecodeChunk: (leave as-is) because encoded data length should be exactly divisible by 4.";
|
||||||
return false;
|
return B64_DECODE_INVALID;
|
||||||
}
|
}
|
||||||
|
|
||||||
while (it != end) {
|
while (it != end) {
|
||||||
@@ -992,7 +995,7 @@ bool decodeBase64Chunk(
|
|||||||
dbgTrace(D_WAAP_BASE64) <<
|
dbgTrace(D_WAAP_BASE64) <<
|
||||||
"decodeBase64Chunk: (leave as-is) because terminator characters must all be '='," <<
|
"decodeBase64Chunk: (leave as-is) because terminator characters must all be '='," <<
|
||||||
"until end of match.";
|
"until end of match.";
|
||||||
return false;
|
return B64_DECODE_INVALID;
|
||||||
}
|
}
|
||||||
|
|
||||||
// We should see 0, 1 or 2 (no more) terminator characters
|
// We should see 0, 1 or 2 (no more) terminator characters
|
||||||
@@ -1000,7 +1003,7 @@ bool decodeBase64Chunk(
|
|||||||
|
|
||||||
if (terminatorCharsSeen > 2) {
|
if (terminatorCharsSeen > 2) {
|
||||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (leave as-is) because terminatorCharsSeen > 2";
|
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (leave as-is) because terminatorCharsSeen > 2";
|
||||||
return false;
|
return B64_DECODE_INVALID;
|
||||||
}
|
}
|
||||||
|
|
||||||
// allow for more terminator characters
|
// allow for more terminator characters
|
||||||
@@ -1033,8 +1036,8 @@ bool decodeBase64Chunk(
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (leave as-is) because of non-base64 character ('" <<
|
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (leave as-is) because of non-base64 character ('" <<
|
||||||
c << "', ASCII " << (unsigned int)c << ")";
|
c << "', ASCII " << (unsigned int)c << ", offset " << (it-begin) << ")";
|
||||||
return false; // non-base64 character
|
return B64_DECODE_INVALID; // non-base64 character
|
||||||
}
|
}
|
||||||
|
|
||||||
acc = (acc << 6) | val;
|
acc = (acc << 6) | val;
|
||||||
@@ -1087,20 +1090,23 @@ bool decodeBase64Chunk(
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (delete) because decoded.size=" << decoded.size() <<
|
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (delete) because decoded.size=" << decoded.size() <<
|
||||||
", nonPrintableCharsCount=" << nonPrintableCharsCount;
|
", nonPrintableCharsCount=" << nonPrintableCharsCount <<
|
||||||
decoded.clear();
|
", clear_on_error=" << clear_on_error;
|
||||||
|
if (clear_on_error) decoded.clear();
|
||||||
|
return B64_DECODE_INCOMPLETE;
|
||||||
}
|
}
|
||||||
dbgTrace(D_WAAP_BASE64) << "returning true: successfully decoded."
|
dbgTrace(D_WAAP_BASE64) << "returning true: successfully decoded."
|
||||||
<< " Returns decoded data in \"decoded\" parameter";
|
<< " Returns decoded data in \"decoded\" parameter";
|
||||||
return true; // successfully decoded. Returns decoded data in "decoded" parameter
|
return B64_DECODE_OK; // successfully decoded. Returns decoded data in "decoded" parameter
|
||||||
}
|
}
|
||||||
|
|
||||||
// If decoded size is too small - leave the encoded value (return false)
|
// If decoded size is too small - leave the encoded value (return false)
|
||||||
decoded.clear(); // discard partial data
|
decoded.clear(); // discard partial data
|
||||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (leave as-is) because decoded too small. decoded.size=" <<
|
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (leave as-is) because decoded too small. decoded.size=" <<
|
||||||
decoded.size() <<
|
decoded.size() <<
|
||||||
", nonPrintableCharsCount=" << nonPrintableCharsCount;
|
", nonPrintableCharsCount=" << nonPrintableCharsCount <<
|
||||||
return false;
|
", clear_on_error=" << clear_on_error;
|
||||||
|
return B64_DECODE_INVALID;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Attempts to detect and validate base64 chunk.
|
// Attempts to detect and validate base64 chunk.
|
||||||
@@ -1144,7 +1150,7 @@ b64DecodeChunk(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return decodeBase64Chunk(value, it, end, decoded);
|
return decodeBase64Chunk(value, it, end, decoded) != B64_DECODE_INVALID;
|
||||||
}
|
}
|
||||||
|
|
||||||
vector<string> split(const string& s, char delim) {
|
vector<string> split(const string& s, char delim) {
|
||||||
@@ -1281,6 +1287,8 @@ bool detectBase64Chunk(
|
|||||||
{
|
{
|
||||||
dbgTrace(D_WAAP_BASE64) << " ===detectBase64Chunk===: starting with = '" << s << "'";
|
dbgTrace(D_WAAP_BASE64) << " ===detectBase64Chunk===: starting with = '" << s << "'";
|
||||||
string::const_iterator it = s.begin();
|
string::const_iterator it = s.begin();
|
||||||
|
start = s.end();
|
||||||
|
end = s.end();
|
||||||
|
|
||||||
//detect "base64," prefix to start search after this
|
//detect "base64," prefix to start search after this
|
||||||
for (; it != s.end()-7; it++) {
|
for (; it != s.end()-7; it++) {
|
||||||
@@ -1309,33 +1317,62 @@ bool detectBase64Chunk(
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
base64_decode_status
|
||||||
|
processDecodedChunk(
|
||||||
|
const string &s,
|
||||||
|
string::const_iterator start,
|
||||||
|
string::const_iterator end,
|
||||||
|
string &value,
|
||||||
|
BinaryFileType &binaryFileType
|
||||||
|
)
|
||||||
|
{
|
||||||
|
base64_decode_status retVal = decodeBase64Chunk(s, start, end, value, false);
|
||||||
|
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: after decode. retVal=" << retVal
|
||||||
|
<< " value.size()=" << value.size();
|
||||||
|
if (retVal != B64_DECODE_INVALID && !value.empty()) {
|
||||||
|
binaryFileType = ParserBinaryFile::detectBinaryFileHeader(value);
|
||||||
|
if (retVal == B64_DECODE_INCOMPLETE) value.clear();
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
bool isBase64PrefixProcessingOK (
|
bool isBase64PrefixProcessingOK (
|
||||||
const string &s,
|
const string &s,
|
||||||
string &value)
|
string &value,
|
||||||
|
BinaryFileType &binaryFileType)
|
||||||
{
|
{
|
||||||
string::const_iterator start, end;
|
string::const_iterator start, end;
|
||||||
bool retVal = false;
|
base64_decode_status retVal = B64_DECODE_INVALID;
|
||||||
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: before regex for prefix for string '" << s << "'";
|
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: before regex for prefix for string '" << s << "'";
|
||||||
if (base64_prefix_detector_re.hasMatch(s)) {
|
if (base64_prefix_detector_re.hasMatch(s)) {
|
||||||
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: prefix detected on string '" << s << "'";
|
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: prefix detected on string '" << s << "'";
|
||||||
if (detectBase64Chunk(s, start, end)) {
|
if (detectBase64Chunk(s, start, end)) {
|
||||||
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: chunk detected";
|
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: chunk detected";
|
||||||
if ((start != s.end()) && (end == s.end())) {
|
if ((start != s.end()) && (end == s.end())) {
|
||||||
retVal = decodeBase64Chunk(s, start, end, value);
|
retVal = processDecodedChunk(s, start, end, value, binaryFileType);
|
||||||
}
|
}
|
||||||
|
} else if (start != s.end()) {
|
||||||
|
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: chunk not detected."
|
||||||
|
" searching for known file header only";
|
||||||
|
end = (start + MAX_HEADER_LOOKUP < s.end()) ? start + MAX_HEADER_LOOKUP : s.end();
|
||||||
|
processDecodedChunk(s, start, end, value, binaryFileType);
|
||||||
|
value.clear();
|
||||||
|
return binaryFileType != Waap::Util::BinaryFileType::FILE_TYPE_NONE;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return retVal;
|
return retVal != B64_DECODE_INVALID;
|
||||||
}
|
}
|
||||||
|
|
||||||
base64_variants b64Test (
|
base64_variants b64Test (
|
||||||
const string &s,
|
const string &s,
|
||||||
string &key,
|
string &key,
|
||||||
string &value)
|
string &value,
|
||||||
|
BinaryFileType &binaryFileType)
|
||||||
{
|
{
|
||||||
|
|
||||||
key.clear();
|
key.clear();
|
||||||
bool retVal;
|
bool retVal;
|
||||||
|
binaryFileType = Waap::Util::BinaryFileType::FILE_TYPE_NONE;
|
||||||
|
|
||||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: string = " << s
|
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: string = " << s
|
||||||
<< " key = " << key << " value = " << value;
|
<< " key = " << key << " value = " << value;
|
||||||
@@ -1397,7 +1434,7 @@ base64_variants b64Test (
|
|||||||
}
|
}
|
||||||
|
|
||||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: after processing key = '" << key << "'";
|
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: after processing key = '" << key << "'";
|
||||||
bool found = isBase64PrefixProcessingOK(s, prefix_decoded_val);
|
bool found = isBase64PrefixProcessingOK(s, prefix_decoded_val, binaryFileType);
|
||||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: after prefix test found = "
|
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: after prefix test found = "
|
||||||
<< found << " new value is '" << prefix_decoded_val << "' - done";
|
<< found << " new value is '" << prefix_decoded_val << "' - done";
|
||||||
if (found) {
|
if (found) {
|
||||||
@@ -1421,7 +1458,7 @@ base64_variants b64Test (
|
|||||||
if ((s.end() - start) % 4 != 0) {
|
if ((s.end() - start) % 4 != 0) {
|
||||||
key.clear();
|
key.clear();
|
||||||
value.clear();
|
value.clear();
|
||||||
return CONTINUE_AS_IS;;
|
return CONTINUE_AS_IS;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -1443,7 +1480,7 @@ base64_variants b64Test (
|
|||||||
key.pop_back();
|
key.pop_back();
|
||||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: FINAL key = '" << key << "'";
|
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: FINAL key = '" << key << "'";
|
||||||
}
|
}
|
||||||
retVal = decodeBase64Chunk(s, start, s.end(), value);
|
retVal = decodeBase64Chunk(s, start, s.end(), value) != B64_DECODE_INVALID;
|
||||||
|
|
||||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: After testing and conversion value = "
|
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: After testing and conversion value = "
|
||||||
<< value << "retVal = '" << retVal <<"'";
|
<< value << "retVal = '" << retVal <<"'";
|
||||||
|
|||||||
@@ -34,6 +34,7 @@
|
|||||||
|
|
||||||
enum base64_variants {SINGLE_B64_CHUNK_CONVERT, KEY_VALUE_B64_PAIR, CONTINUE_AS_IS};
|
enum base64_variants {SINGLE_B64_CHUNK_CONVERT, KEY_VALUE_B64_PAIR, CONTINUE_AS_IS};
|
||||||
enum base64_stage {BEFORE_EQUAL, EQUAL, DONE, MISDETECT};
|
enum base64_stage {BEFORE_EQUAL, EQUAL, DONE, MISDETECT};
|
||||||
|
enum base64_decode_status {B64_DECODE_INVALID, B64_DECODE_OK, B64_DECODE_INCOMPLETE};
|
||||||
|
|
||||||
// This is portable version of stricmp(), which is non-standard function (not even in C).
|
// This is portable version of stricmp(), which is non-standard function (not even in C).
|
||||||
// Contrary to stricmp(), for a slight optimization, s2 is ASSUMED to be already in lowercase.
|
// Contrary to stricmp(), for a slight optimization, s2 is ASSUMED to be already in lowercase.
|
||||||
@@ -858,12 +859,13 @@ void unescapeUnicode(std::string &text);
|
|||||||
// Try to find and decode UTF7 chunks
|
// Try to find and decode UTF7 chunks
|
||||||
std::string filterUTF7(const std::string &text);
|
std::string filterUTF7(const std::string &text);
|
||||||
|
|
||||||
bool
|
base64_decode_status
|
||||||
decodeBase64Chunk(
|
decodeBase64Chunk(
|
||||||
const std::string &value,
|
const std::string &value,
|
||||||
std::string::const_iterator it,
|
std::string::const_iterator it,
|
||||||
std::string::const_iterator end,
|
std::string::const_iterator end,
|
||||||
std::string &decoded);
|
std::string &decoded,
|
||||||
|
bool clear_on_error = true);
|
||||||
|
|
||||||
bool
|
bool
|
||||||
b64DecodeChunk(
|
b64DecodeChunk(
|
||||||
@@ -889,6 +891,13 @@ namespace Util {
|
|||||||
std::string &key,
|
std::string &key,
|
||||||
std::string &value);
|
std::string &value);
|
||||||
|
|
||||||
|
enum BinaryFileType {
|
||||||
|
FILE_TYPE_NONE,
|
||||||
|
FILE_TYPE_PNG,
|
||||||
|
FILE_TYPE_JPEG,
|
||||||
|
FILE_TYPE_PDF
|
||||||
|
};
|
||||||
|
|
||||||
void b64Decode(
|
void b64Decode(
|
||||||
const std::string &s,
|
const std::string &s,
|
||||||
RegexSubCallback_f cb,
|
RegexSubCallback_f cb,
|
||||||
@@ -899,7 +908,8 @@ namespace Util {
|
|||||||
base64_variants b64Test (
|
base64_variants b64Test (
|
||||||
const std::string &s,
|
const std::string &s,
|
||||||
std::string &key,
|
std::string &key,
|
||||||
std::string &value);
|
std::string &value,
|
||||||
|
BinaryFileType &binaryFileType);
|
||||||
|
|
||||||
// The original stdlib implementation of isalpha() supports locale settings which we do not really need.
|
// The original stdlib implementation of isalpha() supports locale settings which we do not really need.
|
||||||
// It is also proven to contribute to slow performance in some of the algorithms using it.
|
// It is also proven to contribute to slow performance in some of the algorithms using it.
|
||||||
|
|||||||
@@ -14,7 +14,8 @@ enum SchemaKeyType
|
|||||||
StartObjectKeyName,
|
StartObjectKeyName,
|
||||||
StartArrayKeyName,
|
StartArrayKeyName,
|
||||||
EndArrayKeyName,
|
EndArrayKeyName,
|
||||||
OtherKey
|
OtherKey,
|
||||||
|
JsonFailure
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // __OA_SCHEMA_UPDATER_KEYS_H__
|
#endif // __OA_SCHEMA_UPDATER_KEYS_H__
|
||||||
|
|||||||
@@ -42,6 +42,7 @@ using namespace std;
|
|||||||
USE_DEBUG_FLAG(D_WAAP);
|
USE_DEBUG_FLAG(D_WAAP);
|
||||||
USE_DEBUG_FLAG(D_WAAP_ULIMITS);
|
USE_DEBUG_FLAG(D_WAAP_ULIMITS);
|
||||||
USE_DEBUG_FLAG(D_OA_SCHEMA_UPDATER);
|
USE_DEBUG_FLAG(D_OA_SCHEMA_UPDATER);
|
||||||
|
USE_DEBUG_FLAG(D_NGINX_EVENTS);
|
||||||
|
|
||||||
WaapComponent::Impl::Impl() :
|
WaapComponent::Impl::Impl() :
|
||||||
pending_response(ngx_http_cp_verdict_e::TRAFFIC_VERDICT_INSPECT),
|
pending_response(ngx_http_cp_verdict_e::TRAFFIC_VERDICT_INSPECT),
|
||||||
@@ -124,7 +125,7 @@ WaapComponent::Impl::getListenerName() const
|
|||||||
EventVerdict
|
EventVerdict
|
||||||
WaapComponent::Impl::respond(const NewHttpTransactionEvent &event)
|
WaapComponent::Impl::respond(const NewHttpTransactionEvent &event)
|
||||||
{
|
{
|
||||||
dbgTrace(D_WAAP) << " * \e[32mNGEN_EVENT: NewTransactionEvent\e[0m";
|
dbgTrace(D_NGINX_EVENTS) << " * \e[32mNGEN_EVENT: NewTransactionEvent\e[0m";
|
||||||
|
|
||||||
if (waapStateTable->hasState<Waf2Transaction>()) {
|
if (waapStateTable->hasState<Waf2Transaction>()) {
|
||||||
dbgWarning(D_WAAP) << " * \e[31 -- NewTransactionEvent called twice on same entry \e[0m";
|
dbgWarning(D_WAAP) << " * \e[31 -- NewTransactionEvent called twice on same entry \e[0m";
|
||||||
@@ -202,7 +203,7 @@ WaapComponent::Impl::respond(const HttpRequestHeaderEvent &event)
|
|||||||
auto &header_name = event.getKey();
|
auto &header_name = event.getKey();
|
||||||
auto &header_value = event.getValue();
|
auto &header_value = event.getValue();
|
||||||
|
|
||||||
dbgTrace(D_WAAP)
|
dbgTrace(D_NGINX_EVENTS)
|
||||||
<< " * \e[32mNGEN_EVENT: HttpHeaderRequest event: "
|
<< " * \e[32mNGEN_EVENT: HttpHeaderRequest event: "
|
||||||
<< string(header_name)
|
<< string(header_name)
|
||||||
<< ": "
|
<< ": "
|
||||||
@@ -210,7 +211,7 @@ WaapComponent::Impl::respond(const HttpRequestHeaderEvent &event)
|
|||||||
<< "\e[0m";
|
<< "\e[0m";
|
||||||
|
|
||||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||||
dbgWarning(D_WAAP)
|
dbgWarning(D_NGINX_EVENTS)
|
||||||
<< " * \e[31mNGEN_EVENT: http_header - "
|
<< " * \e[31mNGEN_EVENT: http_header - "
|
||||||
<< "failed to get waf2 transaction, state not exist\e[0m";
|
<< "failed to get waf2 transaction, state not exist\e[0m";
|
||||||
|
|
||||||
@@ -257,10 +258,10 @@ WaapComponent::Impl::respond(const HttpRequestHeaderEvent &event)
|
|||||||
EventVerdict
|
EventVerdict
|
||||||
WaapComponent::Impl::respond(const HttpRequestBodyEvent &event)
|
WaapComponent::Impl::respond(const HttpRequestBodyEvent &event)
|
||||||
{
|
{
|
||||||
dbgTrace(D_WAAP) << " * \e[32mNGEN_EVENT: HttpBodyRequest data buffer event\e[0m";
|
dbgTrace(D_NGINX_EVENTS) << " * \e[32mNGEN_EVENT: HttpBodyRequest data buffer event\e[0m";
|
||||||
|
|
||||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||||
dbgWarning(D_WAAP) <<
|
dbgWarning(D_NGINX_EVENTS) <<
|
||||||
" * \e[31mNGEN_EVENT: data buffer - failed to get waf2 transaction, state not exist\e[0m";
|
" * \e[31mNGEN_EVENT: data buffer - failed to get waf2 transaction, state not exist\e[0m";
|
||||||
return drop_response;
|
return drop_response;
|
||||||
}
|
}
|
||||||
@@ -295,10 +296,10 @@ WaapComponent::Impl::respond(const HttpRequestBodyEvent &event)
|
|||||||
EventVerdict
|
EventVerdict
|
||||||
WaapComponent::Impl::respond(const EndRequestEvent &)
|
WaapComponent::Impl::respond(const EndRequestEvent &)
|
||||||
{
|
{
|
||||||
dbgTrace(D_WAAP) << " * \e[32mNGEN_EVENT: endRequest event\e[0m";
|
dbgTrace(D_NGINX_EVENTS) << " * \e[32mNGEN_EVENT: endRequest event\e[0m";
|
||||||
|
|
||||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||||
dbgWarning(D_WAAP)
|
dbgWarning(D_NGINX_EVENTS)
|
||||||
<< "* \e[31mNGEN_EVENT: endRequest - failed to get waf2 transaction, state does not exist\e[0m";
|
<< "* \e[31mNGEN_EVENT: endRequest - failed to get waf2 transaction, state does not exist\e[0m";
|
||||||
return drop_response;
|
return drop_response;
|
||||||
}
|
}
|
||||||
@@ -333,13 +334,13 @@ WaapComponent::Impl::respond(const EndRequestEvent &)
|
|||||||
EventVerdict
|
EventVerdict
|
||||||
WaapComponent::Impl::respond(const ResponseCodeEvent &event)
|
WaapComponent::Impl::respond(const ResponseCodeEvent &event)
|
||||||
{
|
{
|
||||||
dbgTrace(D_WAAP)
|
dbgTrace(D_NGINX_EVENTS)
|
||||||
<< " * \e[32mNGEN_EVENT: ResponseCodeTransactionEvent event code = "
|
<< " * \e[32mNGEN_EVENT: ResponseCodeTransactionEvent event code = "
|
||||||
<< event.getResponseCode()
|
<< event.getResponseCode()
|
||||||
<< "\e[0m";
|
<< "\e[0m";
|
||||||
|
|
||||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||||
dbgWarning(D_WAAP)
|
dbgWarning(D_NGINX_EVENTS)
|
||||||
<< " * \e[31mNGEN_EVENT: ResponseCodeTransactionEvent - failed to get waf2 transaction, "
|
<< " * \e[31mNGEN_EVENT: ResponseCodeTransactionEvent - failed to get waf2 transaction, "
|
||||||
<< "state does not exist\e[0m";
|
<< "state does not exist\e[0m";
|
||||||
return drop_response;
|
return drop_response;
|
||||||
@@ -385,7 +386,7 @@ WaapComponent::Impl::respond(const HttpResponseHeaderEvent &event)
|
|||||||
auto &header_name = event.getKey();
|
auto &header_name = event.getKey();
|
||||||
auto &header_value = event.getValue();
|
auto &header_value = event.getValue();
|
||||||
|
|
||||||
dbgTrace(D_WAAP)
|
dbgTrace(D_NGINX_EVENTS)
|
||||||
<< " * \e[32mNGEN_EVENT: HttpHeaderResponse event: "
|
<< " * \e[32mNGEN_EVENT: HttpHeaderResponse event: "
|
||||||
<< string(header_name)
|
<< string(header_name)
|
||||||
<< ": "
|
<< ": "
|
||||||
@@ -393,7 +394,7 @@ WaapComponent::Impl::respond(const HttpResponseHeaderEvent &event)
|
|||||||
<< "\e[0m";
|
<< "\e[0m";
|
||||||
|
|
||||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||||
dbgWarning(D_WAAP)
|
dbgWarning(D_NGINX_EVENTS)
|
||||||
<< " * \e[31mNGEN_EVENT: HttpHeaderResponse - "
|
<< " * \e[31mNGEN_EVENT: HttpHeaderResponse - "
|
||||||
<< "failed to get waf2 transaction, state does not exist\e[0m";
|
<< "failed to get waf2 transaction, state does not exist\e[0m";
|
||||||
return drop_response;
|
return drop_response;
|
||||||
@@ -491,10 +492,10 @@ WaapComponent::Impl::respond(const HttpResponseHeaderEvent &event)
|
|||||||
EventVerdict
|
EventVerdict
|
||||||
WaapComponent::Impl::respond(const HttpResponseBodyEvent &event)
|
WaapComponent::Impl::respond(const HttpResponseBodyEvent &event)
|
||||||
{
|
{
|
||||||
dbgTrace(D_WAAP) << " * \e[32mNGEN_EVENT: HttpBodyResponse data buffer event\e[0m";
|
dbgTrace(D_NGINX_EVENTS) << " * \e[32mNGEN_EVENT: HttpBodyResponse data buffer event\e[0m";
|
||||||
|
|
||||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||||
dbgWarning(D_WAAP) <<
|
dbgWarning(D_NGINX_EVENTS) <<
|
||||||
" * \e[31mNGEN_EVENT: HttpBodyResponse - failed to get waf2 transaction, state does not exist\e[0m";
|
" * \e[31mNGEN_EVENT: HttpBodyResponse - failed to get waf2 transaction, state does not exist\e[0m";
|
||||||
return drop_response;
|
return drop_response;
|
||||||
}
|
}
|
||||||
@@ -591,7 +592,7 @@ EventVerdict
|
|||||||
WaapComponent::Impl::respond(const EndTransactionEvent &)
|
WaapComponent::Impl::respond(const EndTransactionEvent &)
|
||||||
{
|
{
|
||||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||||
dbgWarning(D_WAAP) <<
|
dbgWarning(D_NGINX_EVENTS) <<
|
||||||
" * \e[31mNGEN_EVENT: endTransaction - failed to get waf2 transaction, state does not exist\e[0m";
|
" * \e[31mNGEN_EVENT: endTransaction - failed to get waf2 transaction, state does not exist\e[0m";
|
||||||
return EventVerdict(drop_response);
|
return EventVerdict(drop_response);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,6 +22,8 @@
|
|||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
|
USE_DEBUG_FLAG(D_RULEBASE_CONFIG);
|
||||||
|
|
||||||
string ParameterMatcher::ctx_key = "parameters";
|
string ParameterMatcher::ctx_key = "parameters";
|
||||||
|
|
||||||
ParameterMatcher::ParameterMatcher(const vector<string> ¶ms)
|
ParameterMatcher::ParameterMatcher(const vector<string> ¶ms)
|
||||||
@@ -33,6 +35,17 @@ ParameterMatcher::ParameterMatcher(const vector<string> ¶ms)
|
|||||||
Maybe<bool, Context::Error>
|
Maybe<bool, Context::Error>
|
||||||
ParameterMatcher::evalVariable() const
|
ParameterMatcher::evalVariable() const
|
||||||
{
|
{
|
||||||
|
I_Environment *env = Singleton::Consume<I_Environment>::by<ParameterMatcher>();
|
||||||
|
auto bc_param_id_ctx = env->get<set<GenericConfigId>>(ParameterMatcher::ctx_key);
|
||||||
|
dbgTrace(D_RULEBASE_CONFIG)
|
||||||
|
<< "Trying to match parameter. ID: "
|
||||||
|
<< parameter_id << ", Current set IDs: "
|
||||||
|
<< makeSeparatedStr(bc_param_id_ctx.ok() ? *bc_param_id_ctx : set<GenericConfigId>(), ", ");
|
||||||
|
if (bc_param_id_ctx.ok()) return bc_param_id_ctx.unpack().count(parameter_id) > 0;
|
||||||
|
|
||||||
|
dbgTrace(D_RULEBASE_CONFIG)
|
||||||
|
<< "Did not find current parameter in context."
|
||||||
|
<< " Match parameter from current rule";
|
||||||
auto rule = getConfiguration<BasicRuleConfig>("rulebase", "rulesConfig");
|
auto rule = getConfiguration<BasicRuleConfig>("rulebase", "rulesConfig");
|
||||||
return rule.ok() && rule.unpack().isParameterActive(parameter_id);
|
return rule.ok() && rule.unpack().isParameterActive(parameter_id);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -173,7 +173,7 @@ LogTriggerConf::load(cereal::JSONInputArchive& archive_in)
|
|||||||
setTriggersFlag("webUrlQuery", archive_in, WebLogFields::webUrlQuery, log_web_fields);
|
setTriggersFlag("webUrlQuery", archive_in, WebLogFields::webUrlQuery, log_web_fields);
|
||||||
setTriggersFlag("logToAgent", archive_in, ReportIS::StreamType::JSON_LOG_FILE, active_streams);
|
setTriggersFlag("logToAgent", archive_in, ReportIS::StreamType::JSON_LOG_FILE, active_streams);
|
||||||
setTriggersFlag("logToCloud", archive_in, ReportIS::StreamType::JSON_FOG, active_streams);
|
setTriggersFlag("logToCloud", archive_in, ReportIS::StreamType::JSON_FOG, active_streams);
|
||||||
setTriggersFlag("logToK8sService", archive_in, ReportIS::StreamType::JSON_K8S_SVC, active_streams);
|
setTriggersFlag("logToContainerService", archive_in, ReportIS::StreamType::JSON_CONTAINER_SVC, active_streams);
|
||||||
setTriggersFlag("logToSyslog", archive_in, ReportIS::StreamType::SYSLOG, active_streams);
|
setTriggersFlag("logToSyslog", archive_in, ReportIS::StreamType::SYSLOG, active_streams);
|
||||||
setTriggersFlag("logToCef", archive_in, ReportIS::StreamType::CEF, active_streams);
|
setTriggersFlag("logToCef", archive_in, ReportIS::StreamType::CEF, active_streams);
|
||||||
setTriggersFlag("acAllow", archive_in, SecurityType::AccessControl, should_log_on_detect);
|
setTriggersFlag("acAllow", archive_in, SecurityType::AccessControl, should_log_on_detect);
|
||||||
@@ -221,8 +221,8 @@ LogTriggerConf::load(cereal::JSONInputArchive& archive_in)
|
|||||||
case ReportIS::StreamType::JSON_LOG_FILE:
|
case ReportIS::StreamType::JSON_LOG_FILE:
|
||||||
setLogConfiguration(ReportIS::StreamType::JSON_LOG_FILE);
|
setLogConfiguration(ReportIS::StreamType::JSON_LOG_FILE);
|
||||||
break;
|
break;
|
||||||
case ReportIS::StreamType::JSON_K8S_SVC:
|
case ReportIS::StreamType::JSON_CONTAINER_SVC:
|
||||||
setLogConfiguration(ReportIS::StreamType::JSON_K8S_SVC);
|
setLogConfiguration(ReportIS::StreamType::JSON_CONTAINER_SVC);
|
||||||
break;
|
break;
|
||||||
case ReportIS::StreamType::SYSLOG:
|
case ReportIS::StreamType::SYSLOG:
|
||||||
setLogConfiguration(ReportIS::StreamType::SYSLOG, getUrlForSyslog(), syslog_protocol);
|
setLogConfiguration(ReportIS::StreamType::SYSLOG, getUrlForSyslog(), syslog_protocol);
|
||||||
|
|||||||
@@ -52,6 +52,7 @@ const string HttpTransactionData::req_headers = "transaction_request_head
|
|||||||
const string HttpTransactionData::req_body = "transaction_request_body";
|
const string HttpTransactionData::req_body = "transaction_request_body";
|
||||||
const string HttpTransactionData::source_identifier = "sourceIdentifiers";
|
const string HttpTransactionData::source_identifier = "sourceIdentifiers";
|
||||||
const string HttpTransactionData::proxy_ip_ctx = "proxy_ip";
|
const string HttpTransactionData::proxy_ip_ctx = "proxy_ip";
|
||||||
|
const string HttpTransactionData::xff_vals_ctx = "xff_vals";
|
||||||
|
|
||||||
const CompressionType HttpTransactionData::default_response_content_encoding = CompressionType::NO_COMPRESSION;
|
const CompressionType HttpTransactionData::default_response_content_encoding = CompressionType::NO_COMPRESSION;
|
||||||
|
|
||||||
|
|||||||
1321
config/crds/open-appsec-crd-latest.yaml
Normal file
1321
config/crds/open-appsec-crd-latest.yaml
Normal file
File diff suppressed because it is too large
Load Diff
525
config/crds/open-appsec-crd-v1beta1.yaml
Normal file
525
config/crds/open-appsec-crd-v1beta1.yaml
Normal file
@@ -0,0 +1,525 @@
|
|||||||
|
Enter file contents hereapiVersion: apiextensions.k8s.io/v1
|
||||||
|
kind: CustomResourceDefinition
|
||||||
|
metadata :
|
||||||
|
name : customresponses.openappsec.io
|
||||||
|
|
||||||
|
spec:
|
||||||
|
group: openappsec.io
|
||||||
|
versions:
|
||||||
|
- name: v1beta1
|
||||||
|
served: true
|
||||||
|
storage: true
|
||||||
|
schema:
|
||||||
|
openAPIV3Schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
spec:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
mode:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- block-page
|
||||||
|
#- redirect
|
||||||
|
- response-code-only
|
||||||
|
message-title:
|
||||||
|
type: string
|
||||||
|
message-body:
|
||||||
|
type: string
|
||||||
|
http-response-code:
|
||||||
|
type: integer
|
||||||
|
minimum: 100
|
||||||
|
maximum: 599
|
||||||
|
|
||||||
|
scope: Cluster
|
||||||
|
names:
|
||||||
|
plural: customresponses
|
||||||
|
singular: customresponse
|
||||||
|
kind: CustomResponse
|
||||||
|
shortNames:
|
||||||
|
- customresponse
|
||||||
|
---
|
||||||
|
apiVersion: apiextensions.k8s.io/v1
|
||||||
|
kind: CustomResourceDefinition
|
||||||
|
metadata:
|
||||||
|
name: exceptions.openappsec.io
|
||||||
|
|
||||||
|
spec:
|
||||||
|
group: openappsec.io
|
||||||
|
versions:
|
||||||
|
- name: v1beta1
|
||||||
|
served: true
|
||||||
|
storage: true
|
||||||
|
schema:
|
||||||
|
openAPIV3Schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
spec:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- action
|
||||||
|
properties:
|
||||||
|
action:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- skip
|
||||||
|
- accept
|
||||||
|
- drop
|
||||||
|
- suppressLog
|
||||||
|
sourceIp:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
url:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
sourceIdentifier:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
protectionName:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
paramValue:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
paramName:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
hostName:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
countryCode:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
countryName:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
comment:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
scope: Cluster
|
||||||
|
names:
|
||||||
|
plural: exceptions
|
||||||
|
singular: exception
|
||||||
|
kind: Exception
|
||||||
|
shortNames:
|
||||||
|
- exception
|
||||||
|
---
|
||||||
|
apiVersion: apiextensions.k8s.io/v1
|
||||||
|
kind: CustomResourceDefinition
|
||||||
|
metadata :
|
||||||
|
name : logtriggers.openappsec.io
|
||||||
|
|
||||||
|
spec:
|
||||||
|
group: openappsec.io
|
||||||
|
versions:
|
||||||
|
- name: v1beta1
|
||||||
|
# Each version can be enabled/disabled by Served flag.
|
||||||
|
served: true
|
||||||
|
# One and only one version must be marked as the storage version.
|
||||||
|
storage: true
|
||||||
|
schema:
|
||||||
|
openAPIV3Schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
spec:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
access-control-logging:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
allow-events:
|
||||||
|
type: boolean
|
||||||
|
drop-events:
|
||||||
|
type: boolean
|
||||||
|
appsec-logging:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
detect-events:
|
||||||
|
type: boolean
|
||||||
|
prevent-events:
|
||||||
|
type: boolean
|
||||||
|
all-web-requests:
|
||||||
|
type: boolean
|
||||||
|
additional-suspicious-events-logging:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
enabled:
|
||||||
|
type: boolean
|
||||||
|
minimum-severity:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- high
|
||||||
|
- critical
|
||||||
|
response-body:
|
||||||
|
type: boolean
|
||||||
|
response-code:
|
||||||
|
type: boolean
|
||||||
|
extended-logging:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
url-path:
|
||||||
|
type: boolean
|
||||||
|
url-query:
|
||||||
|
type: boolean
|
||||||
|
http-headers:
|
||||||
|
type: boolean
|
||||||
|
request-body:
|
||||||
|
type: boolean
|
||||||
|
log-destination:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
cloud:
|
||||||
|
type: boolean
|
||||||
|
syslog-service: #change to object array
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
address:
|
||||||
|
type: string
|
||||||
|
port:
|
||||||
|
type: integer
|
||||||
|
file:
|
||||||
|
type: string
|
||||||
|
stdout:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
format:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- json
|
||||||
|
- json-formatted
|
||||||
|
cef-service:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
address:
|
||||||
|
type: string
|
||||||
|
port:
|
||||||
|
type: integer
|
||||||
|
proto:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- tcp
|
||||||
|
- udp
|
||||||
|
|
||||||
|
scope: Cluster
|
||||||
|
names:
|
||||||
|
plural: logtriggers
|
||||||
|
singular: logtrigger
|
||||||
|
kind: LogTrigger
|
||||||
|
shortNames:
|
||||||
|
- logtrigger
|
||||||
|
---
|
||||||
|
apiVersion: apiextensions.k8s.io/v1
|
||||||
|
kind: CustomResourceDefinition
|
||||||
|
metadata :
|
||||||
|
name : policies.openappsec.io
|
||||||
|
|
||||||
|
spec:
|
||||||
|
group: openappsec.io
|
||||||
|
versions:
|
||||||
|
- name: v1beta1
|
||||||
|
# Each version can be enabled/disabled by Served flag.
|
||||||
|
served: true
|
||||||
|
# One and only one version must be marked as the storage version.
|
||||||
|
storage: true
|
||||||
|
schema:
|
||||||
|
openAPIV3Schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
spec:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
default:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
mode:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- prevent-learn
|
||||||
|
- detect-learn
|
||||||
|
- prevent
|
||||||
|
- detect
|
||||||
|
- inactive
|
||||||
|
practices:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
triggers:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
custom-response:
|
||||||
|
type: string
|
||||||
|
source-identifiers:
|
||||||
|
type: string
|
||||||
|
trusted-sources:
|
||||||
|
type: string
|
||||||
|
exceptions:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
specific-rules:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
host:
|
||||||
|
type: string
|
||||||
|
mode:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- prevent-learn
|
||||||
|
- detect-learn
|
||||||
|
- prevent
|
||||||
|
- detect
|
||||||
|
- inactive
|
||||||
|
practices:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
triggers:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
custom-response:
|
||||||
|
type: string
|
||||||
|
source-identifiers:
|
||||||
|
type: string
|
||||||
|
trusted-sources:
|
||||||
|
type: string
|
||||||
|
exceptions:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
scope: Cluster
|
||||||
|
names:
|
||||||
|
plural: policies
|
||||||
|
singular: policy
|
||||||
|
kind: Policy
|
||||||
|
shortNames:
|
||||||
|
- policy
|
||||||
|
---
|
||||||
|
apiVersion: apiextensions.k8s.io/v1
|
||||||
|
kind: CustomResourceDefinition
|
||||||
|
metadata :
|
||||||
|
name : practices.openappsec.io
|
||||||
|
|
||||||
|
spec:
|
||||||
|
group: openappsec.io
|
||||||
|
versions:
|
||||||
|
- name: v1beta1
|
||||||
|
served: true
|
||||||
|
storage: true
|
||||||
|
schema:
|
||||||
|
openAPIV3Schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
spec:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
web-attacks:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
override-mode:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- prevent-learn
|
||||||
|
- detect-learn
|
||||||
|
- prevent
|
||||||
|
- detect
|
||||||
|
- inactive
|
||||||
|
minimum-confidence:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- medium
|
||||||
|
- high
|
||||||
|
- critical
|
||||||
|
max-url-size-bytes:
|
||||||
|
type: integer
|
||||||
|
max-object-depth:
|
||||||
|
type: integer
|
||||||
|
max-body-size-kb:
|
||||||
|
type: integer
|
||||||
|
max-header-size-bytes:
|
||||||
|
type: integer
|
||||||
|
protections:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
csrf-enabled:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- prevent-learn
|
||||||
|
- detect-learn
|
||||||
|
- prevent
|
||||||
|
- detect
|
||||||
|
- inactive
|
||||||
|
error-disclosure-enabled:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- prevent-learn
|
||||||
|
- detect-learn
|
||||||
|
- prevent
|
||||||
|
- detect
|
||||||
|
- inactive
|
||||||
|
open-redirect-enabled:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- prevent-learn
|
||||||
|
- detect-learn
|
||||||
|
- prevent
|
||||||
|
- detect
|
||||||
|
- inactive
|
||||||
|
non-valid-http-methods:
|
||||||
|
type: boolean
|
||||||
|
anti-bot:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
override-mode:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- prevent-learn
|
||||||
|
- detect-learn
|
||||||
|
- prevent
|
||||||
|
- detect
|
||||||
|
- inactive
|
||||||
|
injected-URIs:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
uri:
|
||||||
|
type: string
|
||||||
|
validated-URIs:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
uri:
|
||||||
|
type: string
|
||||||
|
snort-signatures:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
override-mode:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- prevent-learn
|
||||||
|
- detect-learn
|
||||||
|
- prevent
|
||||||
|
- detect
|
||||||
|
- inactive
|
||||||
|
configmap:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
openapi-schema-validation:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
override-mode:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- prevent-learn
|
||||||
|
- detect-learn
|
||||||
|
- prevent
|
||||||
|
- detect
|
||||||
|
- inactive
|
||||||
|
configmap:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
scope: Cluster
|
||||||
|
names:
|
||||||
|
plural: practices
|
||||||
|
singular: practice
|
||||||
|
kind: Practice
|
||||||
|
shortNames:
|
||||||
|
- practice
|
||||||
|
---
|
||||||
|
apiVersion: apiextensions.k8s.io/v1
|
||||||
|
kind: CustomResourceDefinition
|
||||||
|
metadata :
|
||||||
|
name : sourcesidentifiers.openappsec.io
|
||||||
|
|
||||||
|
spec:
|
||||||
|
group: openappsec.io
|
||||||
|
versions:
|
||||||
|
- name: v1beta1
|
||||||
|
served: true
|
||||||
|
storage: true
|
||||||
|
schema:
|
||||||
|
openAPIV3Schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
spec:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
sourceIdentifier:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- headerkey
|
||||||
|
- JWTKey
|
||||||
|
- cookie
|
||||||
|
- sourceip
|
||||||
|
- x-forwarded-for
|
||||||
|
value:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
scope: Cluster
|
||||||
|
names:
|
||||||
|
plural: sourcesidentifiers
|
||||||
|
singular: sourcesidentifier
|
||||||
|
kind: SourcesIdentifier
|
||||||
|
shortNames:
|
||||||
|
- sourcesidentifier
|
||||||
|
---
|
||||||
|
apiVersion: apiextensions.k8s.io/v1
|
||||||
|
kind: CustomResourceDefinition
|
||||||
|
metadata :
|
||||||
|
name : trustedsources.openappsec.io
|
||||||
|
|
||||||
|
spec:
|
||||||
|
group: openappsec.io
|
||||||
|
versions:
|
||||||
|
- name: v1beta1
|
||||||
|
served: true
|
||||||
|
storage: true
|
||||||
|
schema:
|
||||||
|
openAPIV3Schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
spec:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
minNumOfSources:
|
||||||
|
type: integer
|
||||||
|
sourcesIdentifiers:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
scope: Cluster
|
||||||
|
names:
|
||||||
|
plural: trustedsources
|
||||||
|
singular: trustedsource
|
||||||
|
kind: TrustedSource
|
||||||
|
shortNames:
|
||||||
|
- trustedsource
|
||||||
1321
config/crds/open-appsec-crd-v1beta2.yaml
Normal file
1321
config/crds/open-appsec-crd-v1beta2.yaml
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user