mirror of
https://github.com/openappsec/openappsec.git
synced 2025-06-28 16:41:02 +03:00
Merge pull request #141 from openappsec/May_27_2024-Dev
May 27 2024 dev
This commit is contained in:
commit
d6debf8d8d
@ -1,7 +1,7 @@
|
||||
cmake_minimum_required (VERSION 2.8.4)
|
||||
project (ngen)
|
||||
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC -Wall -Wno-terminate")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2 -fPIC -Wall -Wno-terminate")
|
||||
|
||||
execute_process(COMMAND grep -c "Alpine Linux" /etc/os-release OUTPUT_VARIABLE IS_ALPINE)
|
||||
if(NOT IS_ALPINE EQUAL "0")
|
||||
|
@ -1698,7 +1698,7 @@ private:
|
||||
}
|
||||
};
|
||||
mainloop->addFileRoutine(
|
||||
I_MainLoop::RoutineType::RealTime,
|
||||
I_MainLoop::RoutineType::System,
|
||||
server_sock,
|
||||
accept_attachment_routine,
|
||||
"Nginx Attachment registration listener",
|
||||
|
@ -306,17 +306,20 @@ UsersAllIdentifiersConfig::parseXForwardedFor(const string &str) const
|
||||
void
|
||||
UsersAllIdentifiersConfig::setXFFValuesToOpaqueCtx(const HttpHeader &header, ExtractType type) const
|
||||
{
|
||||
auto i_transaction_table = Singleton::Consume<I_TableSpecific<SessionID>>::by<NginxAttachment>();
|
||||
if (!i_transaction_table || !i_transaction_table->hasState<NginxAttachmentOpaque>()) {
|
||||
dbgTrace(D_NGINX_ATTACHMENT_PARSER) << "Can't get the transaction table";
|
||||
return;
|
||||
}
|
||||
NginxAttachmentOpaque &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
||||
opaque.setSavedData(HttpTransactionData::xff_vals_ctx, header.getValue());
|
||||
dbgTrace(D_NGINX_ATTACHMENT_PARSER) << "xff found, value from header: " << static_cast<string>(header.getValue());
|
||||
auto value = parseXForwardedFor(header.getValue());
|
||||
if (!value.ok()) {
|
||||
dbgTrace(D_NGINX_ATTACHMENT_PARSER) << "Could not extract source identifier from X-Forwarded-For header";
|
||||
return;
|
||||
};
|
||||
auto i_transaction_table = Singleton::Consume<I_TableSpecific<SessionID>>::by<NginxAttachment>();
|
||||
if (!i_transaction_table || !i_transaction_table->hasState<NginxAttachmentOpaque>()) {
|
||||
dbgDebug(D_NGINX_ATTACHMENT_PARSER) << "Can't get the transaction table";
|
||||
return;
|
||||
}
|
||||
NginxAttachmentOpaque &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
||||
|
||||
if (type == ExtractType::SOURCEIDENTIFIER) {
|
||||
opaque.setSourceIdentifier(header.getKey(), value.unpack());
|
||||
dbgDebug(D_NGINX_ATTACHMENT_PARSER)
|
||||
|
@ -15,7 +15,8 @@ class HttpGeoFilter
|
||||
public Component,
|
||||
Singleton::Consume<I_MainLoop>,
|
||||
Singleton::Consume<I_GeoLocation>,
|
||||
Singleton::Consume<I_GenericRulebase>
|
||||
Singleton::Consume<I_GenericRulebase>,
|
||||
Singleton::Consume<I_Environment>
|
||||
{
|
||||
public:
|
||||
HttpGeoFilter();
|
||||
|
@ -136,6 +136,7 @@ public:
|
||||
static const std::string req_body;
|
||||
static const std::string source_identifier;
|
||||
static const std::string proxy_ip_ctx;
|
||||
static const std::string xff_vals_ctx;
|
||||
|
||||
static const CompressionType default_response_content_encoding;
|
||||
|
||||
|
@ -31,6 +31,7 @@ public:
|
||||
virtual bool isReverseProxy() = 0;
|
||||
virtual bool isCloudStorageEnabled() = 0;
|
||||
virtual Maybe<std::tuple<std::string, std::string, std::string>> parseNginxMetadata() = 0;
|
||||
virtual Maybe<std::tuple<std::string, std::string, std::string>> readCloudMetadata() = 0;
|
||||
virtual std::map<std::string, std::string> getResolvedDetails() = 0;
|
||||
#if defined(gaia) || defined(smb)
|
||||
virtual bool compareCheckpointVersion(int cp_version, std::function<bool(int, int)> compare_operator) const = 0;
|
||||
|
@ -1 +1,5 @@
|
||||
include_directories(../waap/include)
|
||||
include_directories(../waap/waap_clib)
|
||||
include_directories(../../attachment-intakers/nginx_attachment)
|
||||
|
||||
add_library(http_geo_filter http_geo_filter.cc)
|
||||
|
@ -4,10 +4,16 @@
|
||||
#include <unistd.h>
|
||||
#include <stddef.h>
|
||||
#include <algorithm>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <boost/algorithm/string.hpp>
|
||||
|
||||
#include "cidrs_data.h"
|
||||
#include "generic_rulebase/generic_rulebase.h"
|
||||
#include "generic_rulebase/parameters_config.h"
|
||||
#include "generic_rulebase/triggers_config.h"
|
||||
#include "user_identifiers_config.h"
|
||||
#include "debug.h"
|
||||
#include "config.h"
|
||||
#include "rest.h"
|
||||
@ -21,9 +27,10 @@ USE_DEBUG_FLAG(D_GEO_FILTER);
|
||||
|
||||
static const LogTriggerConf default_triger;
|
||||
|
||||
class HttpGeoFilter::Impl : public Listener<NewHttpTransactionEvent>
|
||||
class HttpGeoFilter::Impl : public Listener<HttpRequestHeaderEvent>
|
||||
{
|
||||
public:
|
||||
|
||||
void
|
||||
init()
|
||||
{
|
||||
@ -55,32 +62,42 @@ public:
|
||||
}
|
||||
|
||||
EventVerdict
|
||||
respond(const NewHttpTransactionEvent &event) override
|
||||
respond(const HttpRequestHeaderEvent &event) override
|
||||
{
|
||||
dbgTrace(D_GEO_FILTER) << getListenerName() << " new transaction event";
|
||||
|
||||
if (!ParameterException::isGeoLocationExceptionExists() &&
|
||||
!getConfiguration<GeoConfig>("rulebase", "httpGeoFilter").ok()
|
||||
) {
|
||||
dbgTrace(D_GEO_FILTER) << "No geo location practice nor exception was found. Returning default verdict";
|
||||
if (!event.isLastHeader()) return EventVerdict(ngx_http_cp_verdict_e::TRAFFIC_VERDICT_INSPECT);
|
||||
std::set<std::string> xff_set;
|
||||
auto env = Singleton::Consume<I_Environment>::by<HttpGeoFilter>();
|
||||
auto maybe_xff = env->get<std::string>(HttpTransactionData::xff_vals_ctx);
|
||||
if (!maybe_xff.ok()) {
|
||||
dbgTrace(D_GEO_FILTER) << "failed to get xff vals from env";
|
||||
} else {
|
||||
xff_set = split(maybe_xff.unpack(), ',');
|
||||
}
|
||||
dbgDebug(D_GEO_FILTER) << getListenerName() << " last header, start lookup";
|
||||
|
||||
if (xff_set.size() > 0) {
|
||||
removeTrustedIpsFromXff(xff_set);
|
||||
} else {
|
||||
dbgDebug(D_GEO_FILTER) << "xff not found in headers";
|
||||
}
|
||||
|
||||
auto maybe_source_ip = env->get<IPAddr>(HttpTransactionData::client_ip_ctx);
|
||||
if (!maybe_source_ip.ok()) {
|
||||
dbgWarning(D_GEO_FILTER) << "failed to get source ip from env";
|
||||
return EventVerdict(default_action);
|
||||
}
|
||||
|
||||
I_GeoLocation *i_geo_location = Singleton::Consume<I_GeoLocation>::by<HttpGeoFilter>();
|
||||
auto asset_location = i_geo_location->lookupLocation(event.getSourceIP());
|
||||
if (!asset_location.ok()) {
|
||||
dbgTrace(D_GEO_FILTER) << "Lookup location failed, Error: " << asset_location.getErr();
|
||||
return EventVerdict(default_action);
|
||||
}
|
||||
auto source_ip = convertIpAddrToString(maybe_source_ip.unpack());
|
||||
xff_set.insert(source_ip);
|
||||
|
||||
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data = asset_location.unpack();
|
||||
|
||||
ngx_http_cp_verdict_e exception_verdict = getExceptionVerdict(event, geo_location_data);
|
||||
ngx_http_cp_verdict_e exception_verdict = getExceptionVerdict(xff_set);
|
||||
if (exception_verdict != ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT) {
|
||||
return EventVerdict(exception_verdict);
|
||||
}
|
||||
|
||||
ngx_http_cp_verdict_e geo_lookup_verdict = getGeoLookupVerdict(event, geo_location_data);
|
||||
ngx_http_cp_verdict_e geo_lookup_verdict = getGeoLookupVerdict(xff_set);
|
||||
if (geo_lookup_verdict != ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT) {
|
||||
return EventVerdict(geo_lookup_verdict);
|
||||
}
|
||||
@ -88,6 +105,73 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
std::set<std::string>
|
||||
split(const std::string& s, char delim) {
|
||||
std::set<std::string> elems;
|
||||
std::stringstream ss(s);
|
||||
std::string value;
|
||||
while (std::getline(ss, value, delim)) {
|
||||
elems.insert(trim(value));
|
||||
}
|
||||
return elems;
|
||||
}
|
||||
|
||||
static inline std::string <rim(std::string &s) {
|
||||
s.erase(s.begin(), std::find_if(s.begin(), s.end(),
|
||||
[] (char c) { return !std::isspace(c); }));
|
||||
return s;
|
||||
}
|
||||
|
||||
// trim from end
|
||||
static inline std::string &rtrim(std::string &s) {
|
||||
s.erase(std::find_if(s.rbegin(), s.rend(),
|
||||
[] (char c) { return !std::isspace(c); }).base(), s.end());
|
||||
return s;
|
||||
}
|
||||
|
||||
// trim from both ends
|
||||
static inline std::string &trim(std::string &s) {
|
||||
return ltrim(rtrim(s));
|
||||
}
|
||||
|
||||
void
|
||||
removeTrustedIpsFromXff(std::set<std::string> &xff_set)
|
||||
{
|
||||
auto identify_config = getConfiguration<UsersAllIdentifiersConfig>(
|
||||
"rulebase",
|
||||
"usersIdentifiers"
|
||||
);
|
||||
if (!identify_config.ok()) {
|
||||
dbgDebug(D_GEO_FILTER) << "did not find users identifiers definition in policy";
|
||||
} else {
|
||||
auto trusted_ips = (*identify_config).getHeaderValuesFromConfig("x-forwarded-for");
|
||||
for (auto it = xff_set.begin(); it != xff_set.end();) {
|
||||
if (isIpTrusted(*it, trusted_ips)) {
|
||||
dbgTrace(D_GEO_FILTER) << "xff value is in trusted ips: " << *it;
|
||||
it = xff_set.erase(it);
|
||||
} else {
|
||||
dbgTrace(D_GEO_FILTER) << "xff value is not in trusted ips: " << *it;
|
||||
++it;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool
|
||||
isIpTrusted(const string &ip, const vector<string> &trusted_ips)
|
||||
{
|
||||
for (const auto &trusted_ip : trusted_ips) {
|
||||
CIDRSData cidr_data(trusted_ip);
|
||||
if (
|
||||
ip == trusted_ip ||
|
||||
(cidr_data.contains(ip))
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
string
|
||||
convertIpAddrToString(const IPAddr &ip_to_convert)
|
||||
{
|
||||
@ -117,54 +201,75 @@ private:
|
||||
}
|
||||
|
||||
ngx_http_cp_verdict_e
|
||||
getGeoLookupVerdict(
|
||||
const NewHttpTransactionEvent &event,
|
||||
const EnumArray<I_GeoLocation::GeoLocationField, std::string> &geo_location_data)
|
||||
getGeoLookupVerdict(const std::set<std::string> &sources)
|
||||
{
|
||||
auto maybe_geo_config = getConfiguration<GeoConfig>("rulebase", "httpGeoFilter");
|
||||
if (!maybe_geo_config.ok()) {
|
||||
dbgWarning(D_GEO_FILTER) << "Failed to load HTTP Geo Filter config. Error:" << maybe_geo_config.getErr();
|
||||
dbgTrace(D_GEO_FILTER) << "Failed to load HTTP Geo Filter config. Error:" << maybe_geo_config.getErr();
|
||||
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT;
|
||||
}
|
||||
GeoConfig geo_config = maybe_geo_config.unpack();
|
||||
string country_code = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE];
|
||||
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data;
|
||||
I_GeoLocation *i_geo_location = Singleton::Consume<I_GeoLocation>::by<HttpGeoFilter>();
|
||||
|
||||
if (geo_config.isAllowedCountry(country_code)) {
|
||||
dbgTrace(D_GEO_FILTER)
|
||||
<< "geo verdict ACCEPT, practice id: "
|
||||
<< geo_config.getId()
|
||||
<< ", country code: "
|
||||
<< country_code;
|
||||
generateVerdictLog(
|
||||
ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT,
|
||||
event,
|
||||
geo_config.getId(),
|
||||
true,
|
||||
geo_location_data
|
||||
);
|
||||
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT;
|
||||
}
|
||||
if (geo_config.isBlockedCountry(country_code)) {
|
||||
dbgTrace(D_GEO_FILTER)
|
||||
<< "geo verdict DROP, practice id: "
|
||||
<< geo_config.getId()
|
||||
<< ", country code: "
|
||||
<< country_code;
|
||||
generateVerdictLog(
|
||||
ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP,
|
||||
event,
|
||||
geo_config.getId(),
|
||||
true,
|
||||
geo_location_data
|
||||
);
|
||||
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP;
|
||||
for (const std::string& source : sources) {
|
||||
Maybe<IPAddr> maybe_source_ip = IPAddr::createIPAddr(source);
|
||||
if (!maybe_source_ip.ok()){
|
||||
dbgWarning(D_GEO_FILTER) <<
|
||||
"create ip address failed for source: " <<
|
||||
source <<
|
||||
", Error: " <<
|
||||
maybe_source_ip.getErr();
|
||||
continue;
|
||||
}
|
||||
auto asset_location = i_geo_location->lookupLocation(maybe_source_ip.unpack());
|
||||
if (!asset_location.ok()) {
|
||||
dbgWarning(D_GEO_FILTER) <<
|
||||
"Lookup location failed for source: " <<
|
||||
source <<
|
||||
", Error: " <<
|
||||
asset_location.getErr();
|
||||
continue;
|
||||
}
|
||||
|
||||
geo_location_data = asset_location.unpack();
|
||||
|
||||
string country_code = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE];
|
||||
|
||||
if (geo_config.isAllowedCountry(country_code)) {
|
||||
dbgTrace(D_GEO_FILTER)
|
||||
<< "geo verdict ACCEPT, practice id: "
|
||||
<< geo_config.getId()
|
||||
<< ", country code: "
|
||||
<< country_code;
|
||||
generateVerdictLog(
|
||||
ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT,
|
||||
geo_config.getId(),
|
||||
true,
|
||||
geo_location_data
|
||||
);
|
||||
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT;
|
||||
}
|
||||
if (geo_config.isBlockedCountry(country_code)) {
|
||||
dbgTrace(D_GEO_FILTER)
|
||||
<< "geo verdict DROP, practice id: "
|
||||
<< geo_config.getId()
|
||||
<< ", country code: "
|
||||
<< country_code;
|
||||
generateVerdictLog(
|
||||
ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP,
|
||||
geo_config.getId(),
|
||||
true,
|
||||
geo_location_data
|
||||
);
|
||||
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP;
|
||||
}
|
||||
}
|
||||
dbgTrace(D_GEO_FILTER)
|
||||
<< "No matched practice. Returned default action: "
|
||||
<< geo_config.getDefaultAction();
|
||||
generateVerdictLog(
|
||||
convertActionToVerdict(geo_config.getDefaultAction()),
|
||||
event,
|
||||
geo_config.getId(),
|
||||
true,
|
||||
geo_location_data,
|
||||
@ -176,7 +281,6 @@ private:
|
||||
Maybe<pair<ngx_http_cp_verdict_e, string>>
|
||||
getBehaviorsVerdict(
|
||||
const unordered_map<string, set<string>> &behaviors_map_to_search,
|
||||
const NewHttpTransactionEvent &event,
|
||||
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data)
|
||||
{
|
||||
bool is_matched = false;
|
||||
@ -193,7 +297,6 @@ private:
|
||||
dbgTrace(D_GEO_FILTER) << "behavior verdict: DROP, exception id: " << behavior.getId();
|
||||
generateVerdictLog(
|
||||
matched_verdict,
|
||||
event,
|
||||
behavior.getId(),
|
||||
false,
|
||||
geo_location_data
|
||||
@ -218,63 +321,83 @@ private:
|
||||
}
|
||||
|
||||
ngx_http_cp_verdict_e
|
||||
getExceptionVerdict(
|
||||
const NewHttpTransactionEvent &event,
|
||||
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data
|
||||
){
|
||||
string country_code = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE];
|
||||
string country_name = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_NAME];
|
||||
string source_ip = convertIpAddrToString(event.getSourceIP());
|
||||
getExceptionVerdict(const std::set<std::string> &sources) {
|
||||
|
||||
pair<ngx_http_cp_verdict_e, string> curr_matched_behavior;
|
||||
ngx_http_cp_verdict_e verdict = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT;
|
||||
I_GeoLocation *i_geo_location = Singleton::Consume<I_GeoLocation>::by<HttpGeoFilter>();
|
||||
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data;
|
||||
|
||||
dbgTrace(D_GEO_FILTER)
|
||||
for (const std::string& source : sources) {
|
||||
|
||||
Maybe<IPAddr> maybe_source_ip = IPAddr::createIPAddr(source);
|
||||
if (!maybe_source_ip.ok()){
|
||||
dbgWarning(D_GEO_FILTER) <<
|
||||
"create ip address failed for source: " <<
|
||||
source <<
|
||||
", Error: " <<
|
||||
maybe_source_ip.getErr();
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
auto asset_location = i_geo_location->lookupLocation(maybe_source_ip.unpack());
|
||||
if (!asset_location.ok()) {
|
||||
dbgWarning(D_GEO_FILTER) << "Lookup location failed for source: " <<
|
||||
source <<
|
||||
", Error: " <<
|
||||
asset_location.getErr();
|
||||
continue;
|
||||
}
|
||||
geo_location_data = asset_location.unpack();
|
||||
string country_code = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE];
|
||||
string country_name = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_NAME];
|
||||
dbgTrace(D_GEO_FILTER)
|
||||
<< "Get exception verdict. "
|
||||
<< "country code: "
|
||||
<< country_code
|
||||
<< ", country name: "
|
||||
<< country_name
|
||||
<< ", source ip address: "
|
||||
<< source_ip;
|
||||
<< source;
|
||||
|
||||
unordered_map<string, set<string>> exception_value_source_ip = {{"sourceIP", {source_ip}}};
|
||||
auto matched_behavior_maybe = getBehaviorsVerdict(exception_value_source_ip, event, geo_location_data);
|
||||
if (matched_behavior_maybe.ok()) {
|
||||
curr_matched_behavior = matched_behavior_maybe.unpack();
|
||||
verdict = curr_matched_behavior.first;
|
||||
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
||||
return verdict;
|
||||
unordered_map<string, set<string>> exception_value_source_ip = {{"sourceIP", {source}}};
|
||||
auto matched_behavior_maybe = getBehaviorsVerdict(exception_value_source_ip, geo_location_data);
|
||||
if (matched_behavior_maybe.ok()) {
|
||||
curr_matched_behavior = matched_behavior_maybe.unpack();
|
||||
verdict = curr_matched_behavior.first;
|
||||
dbgDebug(D_GEO_FILTER) << "found sourceIP exception, return verdict";
|
||||
break;
|
||||
}
|
||||
|
||||
unordered_map<string, set<string>> exception_value_country_code = {
|
||||
{"countryCode", {country_code}}
|
||||
};
|
||||
matched_behavior_maybe = getBehaviorsVerdict(exception_value_country_code, geo_location_data);
|
||||
if (matched_behavior_maybe.ok()) {
|
||||
curr_matched_behavior = matched_behavior_maybe.unpack();
|
||||
verdict = curr_matched_behavior.first;
|
||||
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
||||
return verdict;
|
||||
}
|
||||
}
|
||||
|
||||
unordered_map<string, set<string>> exception_value_country_name = {
|
||||
{"countryName", {country_name}}
|
||||
};
|
||||
matched_behavior_maybe = getBehaviorsVerdict(exception_value_country_name, geo_location_data);
|
||||
if (matched_behavior_maybe.ok()) {
|
||||
curr_matched_behavior = matched_behavior_maybe.unpack();
|
||||
verdict = curr_matched_behavior.first;
|
||||
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
||||
return verdict;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unordered_map<string, set<string>> exception_value_country_code = {
|
||||
{"countryCode", {country_code}}
|
||||
};
|
||||
matched_behavior_maybe = getBehaviorsVerdict(exception_value_country_code, event, geo_location_data);
|
||||
if (matched_behavior_maybe.ok()) {
|
||||
curr_matched_behavior = matched_behavior_maybe.unpack();
|
||||
verdict = curr_matched_behavior.first;
|
||||
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
||||
return verdict;
|
||||
}
|
||||
}
|
||||
|
||||
unordered_map<string, set<string>> exception_value_country_name = {
|
||||
{"countryName", {country_name}}
|
||||
};
|
||||
matched_behavior_maybe = getBehaviorsVerdict(exception_value_country_name, event, geo_location_data);
|
||||
if (matched_behavior_maybe.ok()) {
|
||||
curr_matched_behavior = matched_behavior_maybe.unpack();
|
||||
verdict = curr_matched_behavior.first;
|
||||
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
||||
return verdict;
|
||||
}
|
||||
}
|
||||
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT) {
|
||||
generateVerdictLog(
|
||||
verdict,
|
||||
event,
|
||||
curr_matched_behavior.second,
|
||||
false,
|
||||
geo_location_data
|
||||
@ -286,7 +409,6 @@ private:
|
||||
void
|
||||
generateVerdictLog(
|
||||
const ngx_http_cp_verdict_e &verdict,
|
||||
const NewHttpTransactionEvent &event,
|
||||
const string &matched_id,
|
||||
bool is_geo_filter,
|
||||
const EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data,
|
||||
@ -307,14 +429,24 @@ private:
|
||||
LogField(matched_on, matched_id),
|
||||
ReportIS::Tags::HTTP_GEO_FILTER
|
||||
);
|
||||
log
|
||||
<< LogField("sourceIP", convertIpAddrToString(event.getSourceIP()))
|
||||
<< LogField("sourcePort", event.getSourcePort())
|
||||
<< LogField("hostName", event.getDestinationHost())
|
||||
<< LogField("httpMethod", event.getHttpMethod())
|
||||
<< LogField("securityAction", is_prevent ? "Prevent" : "Detect");
|
||||
auto env = Singleton::Consume<I_Environment>::by<HttpGeoFilter>();
|
||||
auto source_ip = env->get<string>(HttpTransactionData::client_ip_ctx);
|
||||
if (source_ip.ok()) log << LogField("sourceIP", source_ip.unpack());
|
||||
|
||||
auto source_port = env->get<string>(HttpTransactionData::client_port_ctx);
|
||||
if (source_port.ok()) log << LogField("sourcePort", source_port.unpack());
|
||||
|
||||
auto host_name = env->get<string>(HttpTransactionData::host_name_ctx);
|
||||
if (host_name.ok()) log << LogField("hostName", host_name.unpack());
|
||||
|
||||
auto method = env->get<string>(HttpTransactionData::method_ctx);
|
||||
if (method.ok()) log << LogField("httpMethod", method.unpack());
|
||||
|
||||
log << LogField("securityAction", is_prevent ? "Prevent" : "Detect");
|
||||
|
||||
if (is_default_action) log << LogField("isDefaultSecurityAction", true);
|
||||
auto xff = env->get<std::string>(HttpTransactionData::xff_vals_ctx);
|
||||
if (xff.ok()) log << LogField("proxyIP", xff.unpack());
|
||||
|
||||
log
|
||||
<< LogField("sourceCountryCode", geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE])
|
||||
|
@ -142,6 +142,13 @@ string disabled_settings =
|
||||
"}"
|
||||
"],\n";
|
||||
|
||||
|
||||
string local_intelligence =
|
||||
"\"intelligence\":{"
|
||||
" \"local intelligence server ip\":\"127.0.0.1\","
|
||||
" \"local intelligence server primary port\":9090"
|
||||
"}\n,";
|
||||
|
||||
string policy =
|
||||
"\"rulebase\": {"
|
||||
"\"usersIdentifiers\": ["
|
||||
@ -259,7 +266,7 @@ Layer7AccessControlTest::verifyReport(
|
||||
|
||||
TEST_F(Layer7AccessControlTest, ReturnAcceptVerdict)
|
||||
{
|
||||
stringstream ss_conf(prevent_settings + policy);
|
||||
stringstream ss_conf(prevent_settings + local_intelligence + policy);
|
||||
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
||||
|
||||
string intelligence_response_ok = loadIntelligenceResponse("data/ok_intelligence_response.json");
|
||||
@ -305,7 +312,7 @@ TEST_F(Layer7AccessControlTest, ReturnAcceptVerdict)
|
||||
|
||||
TEST_F(Layer7AccessControlTest, ReturnDropVerdictOnMaliciousReputation)
|
||||
{
|
||||
stringstream ss_conf(prevent_settings + policy);
|
||||
stringstream ss_conf(prevent_settings + local_intelligence + policy);
|
||||
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
||||
|
||||
string malicious_intelligence_response = loadIntelligenceResponse("data/malicious_intelligence_response.json");
|
||||
@ -351,7 +358,7 @@ TEST_F(Layer7AccessControlTest, ReturnDropVerdictOnMaliciousReputation)
|
||||
|
||||
TEST_F(Layer7AccessControlTest, ReturnDropVerdictCacheBased)
|
||||
{
|
||||
stringstream ss_conf(prevent_settings + policy);
|
||||
stringstream ss_conf(prevent_settings + local_intelligence + policy);
|
||||
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
||||
|
||||
string malicious_intelligence_response = loadIntelligenceResponse("data/malicious_intelligence_response.json");
|
||||
@ -403,7 +410,7 @@ TEST_F(Layer7AccessControlTest, ReturnDropVerdictCacheBased)
|
||||
|
||||
TEST_F(Layer7AccessControlTest, AcceptOnDetect)
|
||||
{
|
||||
stringstream ss_conf(detect_settings + policy);
|
||||
stringstream ss_conf(detect_settings + local_intelligence + policy);
|
||||
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
||||
|
||||
string malicious_intelligence_response = loadIntelligenceResponse("data/malicious_intelligence_response.json");
|
||||
@ -449,7 +456,7 @@ TEST_F(Layer7AccessControlTest, AcceptOnDetect)
|
||||
|
||||
TEST_F(Layer7AccessControlTest, FallbackToSourceIPAndDrop)
|
||||
{
|
||||
stringstream ss_conf(prevent_settings + policy);
|
||||
stringstream ss_conf(prevent_settings + local_intelligence + policy);
|
||||
Singleton::Consume<Config::I_Config>::from(config)->loadConfiguration(ss_conf);
|
||||
|
||||
string malicious_intelligence_response = loadIntelligenceResponse("data/malicious_intelligence_response.json");
|
||||
|
@ -132,7 +132,7 @@ void
|
||||
NewLoggingService::load(cereal::JSONInputArchive &archive_in)
|
||||
{
|
||||
parseAppsecJSONKey<string>("address", address, archive_in);
|
||||
parseAppsecJSONKey<string>("proto", proto, archive_in);
|
||||
parseAppsecJSONKey<string>("proto", proto, archive_in, "tcp");
|
||||
if (valid_protocols.count(proto) == 0) {
|
||||
dbgWarning(D_LOCAL_POLICY) << "AppSec Logging Service - proto invalid: " << proto;
|
||||
throw PolicyGenException("AppSec Logging Service - proto invalid: " + proto);
|
||||
|
@ -45,6 +45,7 @@ public:
|
||||
bool isVersionAboveR8110() override;
|
||||
bool isReverseProxy() override;
|
||||
bool isCloudStorageEnabled() override;
|
||||
Maybe<tuple<string, string, string>> readCloudMetadata() override;
|
||||
Maybe<tuple<string, string, string>> parseNginxMetadata() override;
|
||||
#if defined(gaia) || defined(smb)
|
||||
bool compareCheckpointVersion(int cp_version, std::function<bool(int, int)> compare_operator) const override;
|
||||
@ -188,17 +189,16 @@ DetailsResolver::Impl::getCheckpointVersion() const
|
||||
{
|
||||
#ifdef gaia
|
||||
static const string cmd =
|
||||
"echo $CPDIR | awk -F'-' '{print $NF}' | cut -c 2- |"
|
||||
" awk -F'.' '{ if( NF == 1 ) {print $1\"00\"} else {print $1$2} }'";
|
||||
"echo $CPDIR | awk '{sub(/.*-R/,\"\"); sub(/\\/.*/,\"\")}/^[0-9]*$/{$0=$0\".00\"}{sub(/\\./, \"\"); print}'";
|
||||
#else // smb
|
||||
static const string cmd = "sqlcmd 'select major,minor from cpver' |"
|
||||
"awk '{if ($1 == \"major\") v += (substr($3,2) * 100);"
|
||||
" if ($1 == \"minor\") v += $3; } END { print v}'";
|
||||
|
||||
#endif // gaia
|
||||
auto version_out = DetailsResolvingHanlder::getCommandOutput(cmd);
|
||||
int cp_version = 0;
|
||||
if (version_out.ok()) {
|
||||
dbgTrace(D_ORCHESTRATOR) << "Identified version " << version_out.unpack();
|
||||
stringstream version_stream(version_out.unpack());
|
||||
version_stream >> cp_version;
|
||||
}
|
||||
@ -300,6 +300,58 @@ DetailsResolver::Impl::parseNginxMetadata()
|
||||
return make_tuple(config_opt, cc_opt, nginx_version);
|
||||
}
|
||||
|
||||
Maybe<tuple<string, string, string>>
|
||||
DetailsResolver::Impl::readCloudMetadata()
|
||||
{
|
||||
auto env_read_cloud_metadata = []() -> Maybe<tuple<string, string, string>> {
|
||||
string account_id = getenv("CLOUD_ACCOUNT_ID") ? getenv("CLOUD_ACCOUNT_ID") : "";
|
||||
string vpc_id = getenv("CLOUD_VPC_ID") ? getenv("CLOUD_VPC_ID") : "";
|
||||
string instance_id = getenv("CLOUD_INSTANCE_ID") ? getenv("CLOUD_INSTANCE_ID") : "";
|
||||
|
||||
if (account_id.empty() || vpc_id.empty() || instance_id.empty()) {
|
||||
return genError("Could not read cloud metadata");
|
||||
}
|
||||
|
||||
return make_tuple(account_id, vpc_id, instance_id);
|
||||
};
|
||||
|
||||
auto cloud_metadata = env_read_cloud_metadata();
|
||||
if (!cloud_metadata.ok()) {
|
||||
const string cmd = getFilesystemPathConfig() + "/scripts/get-cloud-metadata.sh";
|
||||
dbgTrace(D_ORCHESTRATOR) << cloud_metadata.getErr() << ", trying to fetch it via cmd: " << cmd;
|
||||
|
||||
auto result = DetailsResolvingHanlder::getCommandOutput(cmd);
|
||||
if (result.ok()) {
|
||||
istringstream iss(result.unpack());
|
||||
string line;
|
||||
while (getline(iss, line)) {
|
||||
size_t pos = line.find('=');
|
||||
if (pos != string::npos) {
|
||||
string key = line.substr(0, pos);
|
||||
string value = line.substr(pos + 1);
|
||||
if (!key.empty() && !value.empty()) setenv(key.c_str(), value.c_str(), 1);
|
||||
}
|
||||
}
|
||||
cloud_metadata = env_read_cloud_metadata();
|
||||
} else {
|
||||
dbgWarning(D_ORCHESTRATOR) << "Could not fetch cloud metadata from cmd: " << result.getErr();
|
||||
}
|
||||
}
|
||||
|
||||
if (!cloud_metadata.ok()) {
|
||||
dbgWarning(D_ORCHESTRATOR) << cloud_metadata.getErr();
|
||||
return genError("Failed to fetch cloud metadata");
|
||||
}
|
||||
|
||||
dbgTrace(D_ORCHESTRATOR)
|
||||
<< "Successfully fetched cloud metadata: "
|
||||
<< ::get<0>(cloud_metadata.unpack()) << ", "
|
||||
<< ::get<1>(cloud_metadata.unpack()) << ", "
|
||||
<< ::get<2>(cloud_metadata.unpack());
|
||||
|
||||
return cloud_metadata.unpack();
|
||||
}
|
||||
|
||||
DetailsResolver::DetailsResolver() : Component("DetailsResolver"), pimpl(make_unique<Impl>()) {}
|
||||
|
||||
DetailsResolver::~DetailsResolver() {}
|
||||
|
@ -15,7 +15,9 @@
|
||||
#define __CHECKPOINT_PRODUCT_HANDLERS_H__
|
||||
|
||||
#include <algorithm>
|
||||
#include <regex>
|
||||
#include <boost/regex.hpp>
|
||||
#include <boost/algorithm/string.hpp>
|
||||
|
||||
#if defined(gaia)
|
||||
|
||||
@ -63,6 +65,16 @@ checkPepIdaIdnStatus(const string &command_output)
|
||||
return genError("Current host does not have PEP control IDA IDN enabled");
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
checkAgentIntelligence(const string &command_output)
|
||||
{
|
||||
if (command_output.find("is registered") != string::npos) {
|
||||
return string("true");
|
||||
}
|
||||
|
||||
return genError("Current host does not have agent intelligence installed");
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
getIDAGaiaPackages(const string &command_output)
|
||||
{
|
||||
@ -324,6 +336,34 @@ getSmbGWIPSecVPNBlade(const string &command_output)
|
||||
{
|
||||
return getSmbBlade(command_output, "IPSec VPN Blade was not found");
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
extractManagements(const string &command_output)
|
||||
{
|
||||
size_t start_pos = command_output.find(":masters(");
|
||||
if (start_pos == string::npos) {
|
||||
return genError("Starting pattern \":masters(\" not found.");
|
||||
}
|
||||
size_t end_pos = command_output.find("))):", start_pos);
|
||||
if (end_pos == string::npos) {
|
||||
return genError("Ending pattern \"))):\" not found.");
|
||||
}
|
||||
string input_string = command_output.substr(start_pos, end_pos - start_pos + 3);
|
||||
string json_output = "[";
|
||||
regex pattern("\\(ReferenceObject\\:Uid\\(\"\\{([\\w-]+)\\}\"\\)\\:Name\\(([^\\)]+)\\)\\:Table\\(([^\\)]+)\\)\\)");
|
||||
smatch matches;
|
||||
auto words_begin = sregex_iterator(input_string.begin(), input_string.end(), pattern);
|
||||
auto words_end = sregex_iterator();
|
||||
for (sregex_iterator i = words_begin; i != words_end; ++i) {
|
||||
const smatch& match = *i;
|
||||
string uid = boost::algorithm::to_lower_copy(match[1].str());
|
||||
string name = match[2].str();
|
||||
if (json_output.back() != '[') json_output += ",";
|
||||
json_output += "{\"Uid\":\"" + uid + "\",\"Name\":\"" + name + "\"}";
|
||||
}
|
||||
json_output += "]";
|
||||
return json_output;
|
||||
}
|
||||
#endif // gaia || smb
|
||||
|
||||
#if defined(gaia)
|
||||
|
@ -43,8 +43,8 @@ SHELL_PRE_CMD("gunzip local.cfg", "gunzip -c $FWDIR/state/local/FW1/local.cfg.gz
|
||||
#if defined(gaia) || defined(smb)
|
||||
SHELL_CMD_HANDLER("cpProductIntegrationMgmtObjectType", "cpprod_util CPPROD_IsMgmtMachine", getMgmtObjType)
|
||||
SHELL_CMD_HANDLER("prerequisitesForHorizonTelemetry",
|
||||
"[ -f /var/log/nano_agent/cp-nano-horizon-telemetry-prerequisites.log ] "
|
||||
"&& head -1 /var/log/nano_agent/cp-nano-horizon-telemetry-prerequisites.log || echo ''",
|
||||
"FS_PATH=<FILESYSTEM-PREFIX>; [ -f ${FS_PATH}/cp-nano-horizon-telemetry-prerequisites.log ] "
|
||||
"&& head -1 ${FS_PATH}/cp-nano-horizon-telemetry-prerequisites.log || echo ''",
|
||||
checkIsInstallHorizonTelemetrySucceeded)
|
||||
SHELL_CMD_HANDLER("QUID", "[ -d /opt/CPquid ] "
|
||||
"&& python3 /opt/CPquid/Quid_Api.py -i /opt/CPotelcol/quid_api/get_global_id.json | jq -r .message || echo ''",
|
||||
@ -99,6 +99,12 @@ SHELL_CMD_HANDLER(
|
||||
SHELL_CMD_HANDLER("hasSAMLSupportedBlade", "enabled_blades", checkSAMLSupportedBlade)
|
||||
SHELL_CMD_HANDLER("hasIDABlade", "enabled_blades", checkIDABlade)
|
||||
SHELL_CMD_HANDLER("hasSAMLPortal", "mpclient status nac", checkSAMLPortal)
|
||||
SHELL_CMD_HANDLER(
|
||||
"hasAgentIntelligenceInstalled",
|
||||
"<FILESYSTEM-PREFIX>/watchdog/cp-nano-watchdog "
|
||||
"--status --service <FILESYSTEM-PREFIX>/agentIntelligence/cp-nano-agent-intelligence-service",
|
||||
checkAgentIntelligence
|
||||
)
|
||||
SHELL_CMD_HANDLER("hasIdaIdnEnabled", "pep control IDN_nano_Srv_support status", checkPepIdaIdnStatus)
|
||||
SHELL_CMD_HANDLER("requiredNanoServices", "ida_packages", getIDAGaiaPackages)
|
||||
SHELL_CMD_HANDLER(
|
||||
@ -149,6 +155,12 @@ SHELL_CMD_HANDLER(
|
||||
"| awk -F '[:()]' '/:masters/ {found=1; next} found && /:Name/ {print $3; exit}'",
|
||||
getSMCBasedMgmtName
|
||||
)
|
||||
SHELL_CMD_HANDLER(
|
||||
"managements",
|
||||
"sed -n '/:masters (/,$p' $FWDIR/database/myself_objects.C |"
|
||||
" sed -e ':a' -e 'N' -e '$!ba' -e 's/\\n//g' -e 's/\t//g' -e 's/ //g' | sed 's/))):.*/)))):/'",
|
||||
extractManagements
|
||||
)
|
||||
#endif //gaia
|
||||
|
||||
#if defined(smb)
|
||||
@ -199,6 +211,13 @@ SHELL_CMD_HANDLER(
|
||||
"| awk -F '[:()]' '/:masters/ {found=1; next} found && /:Name/ {print $3; exit}'",
|
||||
getSMCBasedMgmtName
|
||||
)
|
||||
|
||||
SHELL_CMD_HANDLER(
|
||||
"managements",
|
||||
"sed -n '/:masters (/,$p' /tmp/local.cfg |"
|
||||
" sed -e ':a' -e 'N' -e '$!ba' -e 's/\\n//g' -e 's/\t//g' -e 's/ //g' | sed 's/))):.*/)))):/'",
|
||||
extractManagements
|
||||
)
|
||||
#endif//smb
|
||||
|
||||
SHELL_CMD_OUTPUT("kernel_version", "uname -r")
|
||||
|
@ -77,14 +77,22 @@ void
|
||||
DetailsResolvingHanlder::Impl::init()
|
||||
{
|
||||
string actual_filesystem_prefix = getFilesystemPathConfig();
|
||||
size_t place_holder_size = filesystem_place_holder.size();
|
||||
|
||||
for (auto &file_handler : file_content_handlers) {
|
||||
string &path = file_handler.second.first;
|
||||
size_t place_holder_size = filesystem_place_holder.size();
|
||||
if (path.substr(0, place_holder_size) == filesystem_place_holder) {
|
||||
path = actual_filesystem_prefix + path.substr(place_holder_size);
|
||||
}
|
||||
}
|
||||
|
||||
for (auto &cmd_handler_pair : shell_command_handlers) {
|
||||
string &cmd_str = cmd_handler_pair.second.first;
|
||||
size_t fs_pos = cmd_str.find(filesystem_place_holder);
|
||||
if (fs_pos != string::npos) {
|
||||
cmd_str.replace(fs_pos, place_holder_size, actual_filesystem_prefix);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
map<string, string>
|
||||
|
@ -246,7 +246,7 @@ private:
|
||||
}
|
||||
}
|
||||
routine_id = i_mainloop->addFileRoutine(
|
||||
I_MainLoop::RoutineType::RealTime,
|
||||
I_MainLoop::RoutineType::System,
|
||||
server_sock,
|
||||
[this] () { handleConnection(); },
|
||||
"Health check probe server",
|
||||
@ -344,7 +344,7 @@ private:
|
||||
dbgDebug(D_HEALTH_CHECK) << "Successfully accepted client, client fd: " << new_client_socket;
|
||||
open_connections_counter++;
|
||||
auto curr_routine = i_mainloop->addOneTimeRoutine(
|
||||
I_MainLoop::RoutineType::RealTime,
|
||||
I_MainLoop::RoutineType::System,
|
||||
[this] ()
|
||||
{
|
||||
auto curr_routine_id = i_mainloop->getCurrentRoutineId().unpack();
|
||||
|
@ -111,12 +111,12 @@ TEST_F(HealthCheckerTest, clientConnection)
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_mainloop,
|
||||
addFileRoutine(I_MainLoop::RoutineType::RealTime, _, _, _, true)
|
||||
addFileRoutine(I_MainLoop::RoutineType::System, _, _, _, true)
|
||||
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_mainloop,
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Health check probe connection handler", true)
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Health check probe connection handler", true)
|
||||
).WillOnce(DoAll(SaveArg<1>(&connection_handler_routine), Return(0)));
|
||||
|
||||
int socket = 1;
|
||||
@ -145,7 +145,7 @@ TEST_F(HealthCheckerTest, loadFromDynamicConfiguration)
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_mainloop,
|
||||
addFileRoutine(I_MainLoop::RoutineType::RealTime, _, _, _, true)
|
||||
addFileRoutine(I_MainLoop::RoutineType::System, _, _, _, true)
|
||||
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
||||
|
||||
health_checker.init();
|
||||
@ -183,7 +183,7 @@ TEST_F(HealthCheckerTest, connectionsLimit)
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_mainloop,
|
||||
addFileRoutine(I_MainLoop::RoutineType::RealTime, _, _, _, true)
|
||||
addFileRoutine(I_MainLoop::RoutineType::System, _, _, _, true)
|
||||
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
||||
|
||||
EXPECT_CALL(mock_mainloop, doesRoutineExist(_)).WillRepeatedly(Return(false));
|
||||
@ -218,12 +218,12 @@ TEST_F(HealthCheckerTest, disablingAfterEnabled)
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_mainloop,
|
||||
addFileRoutine(I_MainLoop::RoutineType::RealTime, _, _, _, true)
|
||||
addFileRoutine(I_MainLoop::RoutineType::System, _, _, _, true)
|
||||
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_mainloop,
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Health check probe connection handler", true)
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Health check probe connection handler", true)
|
||||
).WillOnce(DoAll(SaveArg<1>(&connection_handler_routine), Return(0)));
|
||||
|
||||
int socket = 1;
|
||||
@ -273,12 +273,12 @@ TEST_F(HealthCheckerTest, changePortIpConfig)
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_mainloop,
|
||||
addFileRoutine(I_MainLoop::RoutineType::RealTime, _, _, _, true)
|
||||
addFileRoutine(I_MainLoop::RoutineType::System, _, _, _, true)
|
||||
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_mainloop,
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Health check probe connection handler", true)
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Health check probe connection handler", true)
|
||||
).WillOnce(DoAll(SaveArg<1>(&connection_handler_routine), Return(0)));
|
||||
|
||||
int socket = 1;
|
||||
@ -321,12 +321,12 @@ TEST_F(HealthCheckerTest, FailedHealthCheck)
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_mainloop,
|
||||
addFileRoutine(I_MainLoop::RoutineType::RealTime, _, _, _, true)
|
||||
addFileRoutine(I_MainLoop::RoutineType::System, _, _, _, true)
|
||||
).WillRepeatedly(DoAll(SaveArg<2>(&connection_handler_routine), Return(0)));
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_mainloop,
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Health check probe connection handler", true)
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Health check probe connection handler", true)
|
||||
).WillOnce(DoAll(SaveArg<1>(&connection_handler_routine), Return(0)));
|
||||
|
||||
int socket = 1;
|
||||
|
@ -42,6 +42,7 @@ public:
|
||||
MOCK_METHOD0(getResolvedDetails, std::map<std::string, std::string>());
|
||||
MOCK_METHOD0(isVersionAboveR8110, bool());
|
||||
MOCK_METHOD0(parseNginxMetadata, Maybe<std::tuple<std::string, std::string, std::string>>());
|
||||
MOCK_METHOD0(readCloudMetadata, Maybe<std::tuple<std::string, std::string, std::string>>());
|
||||
};
|
||||
|
||||
#endif // __MOCK_DETAILS_RESOLVER_H__
|
||||
|
@ -203,7 +203,7 @@ public:
|
||||
loadFogAddress();
|
||||
|
||||
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->addOneTimeRoutine(
|
||||
I_MainLoop::RoutineType::RealTime,
|
||||
I_MainLoop::RoutineType::System,
|
||||
[this] () { run(); },
|
||||
"Orchestration runner",
|
||||
true
|
||||
@ -1290,6 +1290,21 @@ private:
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
reportCloudMetadata(AgentDataReport &report)
|
||||
{
|
||||
I_DetailsResolver *i_details_resolver = Singleton::Consume<I_DetailsResolver>::by<OrchestrationComp>();
|
||||
auto cloud_metadata = i_details_resolver->readCloudMetadata();
|
||||
if (!cloud_metadata.ok()) {
|
||||
dbgDebug(D_ORCHESTRATOR) << cloud_metadata.getErr();
|
||||
return;
|
||||
}
|
||||
|
||||
report << make_pair("cloudAccountId", ::get<0>(cloud_metadata.unpack()));
|
||||
report << make_pair("cloudVpcId", ::get<1>(cloud_metadata.unpack()));
|
||||
report << make_pair("cloudInstanceId", ::get<2>(cloud_metadata.unpack()));
|
||||
}
|
||||
|
||||
void
|
||||
reportAgentDetailsMetaData()
|
||||
{
|
||||
@ -1335,6 +1350,8 @@ private:
|
||||
agent_data_report << AgentReportFieldWithLabel("cloud_storage_service", "false");
|
||||
}
|
||||
|
||||
reportCloudMetadata(agent_data_report);
|
||||
|
||||
if (i_details_resolver->isKernelVersion3OrHigher()) {
|
||||
agent_data_report << AgentReportFieldWithLabel("isKernelVersion3OrHigher", "true");
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ public:
|
||||
// This Holding the Main Routine of the Orchestration.
|
||||
EXPECT_CALL(
|
||||
mock_ml,
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Orchestration runner", true)
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Orchestration runner", true)
|
||||
).WillOnce(DoAll(SaveArg<1>(&routine), Return(1)));
|
||||
|
||||
EXPECT_CALL(mock_shell_cmd, getExecOutput("openssl version -d | cut -d\" \" -f2 | cut -d\"\\\"\" -f2", _, _))
|
||||
@ -143,6 +143,9 @@ public:
|
||||
|
||||
map<string, string> resolved_mgmt_details({{"kernel_version", "4.4.0-87-generic"}});
|
||||
EXPECT_CALL(mock_details_resolver, getResolvedDetails()).WillRepeatedly(Return(resolved_mgmt_details));
|
||||
EXPECT_CALL(mock_details_resolver, readCloudMetadata()).WillRepeatedly(
|
||||
Return(Maybe<tuple<string, string, string>>(genError("No cloud metadata")))
|
||||
);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -79,7 +79,7 @@ public:
|
||||
EXPECT_CALL(mock_orchestration_tools, setClusterId());
|
||||
EXPECT_CALL(
|
||||
mock_ml,
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Orchestration runner", true)
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Orchestration runner", true)
|
||||
).WillOnce(DoAll(SaveArg<1>(&routine), Return(1)));
|
||||
|
||||
EXPECT_CALL(
|
||||
@ -170,6 +170,9 @@ public:
|
||||
|
||||
map<string, string> resolved_mgmt_details({{"kernel_version", "4.4.0-87-generic"}});
|
||||
EXPECT_CALL(mock_details_resolver, getResolvedDetails()).WillRepeatedly(Return(resolved_mgmt_details));
|
||||
EXPECT_CALL(mock_details_resolver, readCloudMetadata()).WillRepeatedly(
|
||||
Return(Maybe<tuple<string, string, string>>(genError("No cloud metadata")))
|
||||
);
|
||||
}
|
||||
|
||||
string
|
||||
@ -990,7 +993,7 @@ TEST_F(OrchestrationTest, loadOrchestrationPolicyFromBackup)
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_ml,
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::RealTime, _, "Orchestration runner", true)
|
||||
addOneTimeRoutine(I_MainLoop::RoutineType::System, _, "Orchestration runner", true)
|
||||
);
|
||||
|
||||
EXPECT_CALL(
|
||||
|
@ -179,6 +179,15 @@ FogAuthenticator::registerAgent(
|
||||
dbgDebug(D_ORCHESTRATOR) << nginx_data.getErr();
|
||||
}
|
||||
|
||||
auto cloud_metadata = details_resolver->readCloudMetadata();
|
||||
if (cloud_metadata.ok()) {
|
||||
request << make_pair("cloudAccountId", ::get<0>(cloud_metadata.unpack()));
|
||||
request << make_pair("cloudVpcId", ::get<1>(cloud_metadata.unpack()));
|
||||
request << make_pair("cloudInstanceId", ::get<2>(cloud_metadata.unpack()));
|
||||
} else {
|
||||
dbgDebug(D_ORCHESTRATOR) << cloud_metadata.getErr();
|
||||
}
|
||||
|
||||
for (const pair<string, string> details : details_resolver->getResolvedDetails()) {
|
||||
request << details;
|
||||
}
|
||||
@ -450,9 +459,9 @@ getDeplymentType()
|
||||
auto deplyment_type = Singleton::Consume<I_EnvDetails>::by<FogAuthenticator>()->getEnvType();
|
||||
switch (deplyment_type) {
|
||||
case EnvType::LINUX: return "Embedded";
|
||||
case EnvType::DOCKER: return "Embedded";
|
||||
case EnvType::DOCKER: return "Docker";
|
||||
case EnvType::NON_CRD_K8S:
|
||||
case EnvType::K8S: return "Embedded";
|
||||
case EnvType::K8S: return "K8S";
|
||||
case EnvType::COUNT: break;
|
||||
}
|
||||
|
||||
@ -579,7 +588,7 @@ FogAuthenticator::authenticateAgent()
|
||||
auto mainloop = Singleton::Consume<I_MainLoop>::by<FogAuthenticator>();
|
||||
if (!mainloop->doesRoutineExist(routine)) {
|
||||
routine = mainloop->addOneTimeRoutine(
|
||||
I_MainLoop::RoutineType::RealTime,
|
||||
I_MainLoop::RoutineType::System,
|
||||
[this, min_expiration_time] ()
|
||||
{
|
||||
uint expiration_time;
|
||||
|
@ -87,6 +87,7 @@ add_library(waap_clib
|
||||
ParserPairs.cc
|
||||
Waf2Util2.cc
|
||||
ParserPDF.cc
|
||||
ParserBinaryFile.cc
|
||||
)
|
||||
|
||||
add_definitions("-Wno-unused-function")
|
||||
|
@ -27,6 +27,7 @@
|
||||
#include "ParserPairs.h"
|
||||
#include "ParserDelimiter.h"
|
||||
#include "ParserPDF.h"
|
||||
#include "ParserBinaryFile.h"
|
||||
#include "WaapAssetState.h"
|
||||
#include "Waf2Regex.h"
|
||||
#include "Waf2Util.h"
|
||||
@ -274,7 +275,8 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
||||
bool base64ParamFound = false;
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << " ===Processing potential base64===";
|
||||
std::string decoded_val, decoded_key;
|
||||
base64_variants base64_status = Waap::Util::b64Test(cur_val, decoded_key, decoded_val);
|
||||
Waap::Util::BinaryFileType base64BinaryFileType = Waap::Util::BinaryFileType::FILE_TYPE_NONE;
|
||||
base64_variants base64_status = Waap::Util::b64Test(cur_val, decoded_key, decoded_val, base64BinaryFileType);
|
||||
|
||||
dbgTrace(D_WAAP_DEEP_PARSER)
|
||||
<< " status = "
|
||||
@ -355,7 +357,8 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
||||
isUrlPayload,
|
||||
isUrlParamPayload,
|
||||
flags,
|
||||
parser_depth
|
||||
parser_depth,
|
||||
base64BinaryFileType
|
||||
);
|
||||
} else {
|
||||
offset = 0;
|
||||
@ -425,7 +428,8 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
||||
isUrlParamPayload,
|
||||
flags,
|
||||
parser_depth,
|
||||
base64ParamFound
|
||||
base64ParamFound,
|
||||
base64BinaryFileType
|
||||
);
|
||||
if (rc != CONTINUE_PARSING) {
|
||||
return rc;
|
||||
@ -798,7 +802,8 @@ DeepParser::parseAfterMisleadingMultipartBoundaryCleaned(
|
||||
bool isUrlParamPayload,
|
||||
int flags,
|
||||
size_t parser_depth,
|
||||
bool base64ParamFound)
|
||||
bool base64ParamFound,
|
||||
Waap::Util::BinaryFileType b64FileType)
|
||||
{
|
||||
int offset = -1;
|
||||
int rc = 0;
|
||||
@ -815,7 +820,8 @@ DeepParser::parseAfterMisleadingMultipartBoundaryCleaned(
|
||||
isUrlPayload,
|
||||
isUrlParamPayload,
|
||||
flags,
|
||||
parser_depth
|
||||
parser_depth,
|
||||
b64FileType
|
||||
);
|
||||
} else {
|
||||
offset = 0;
|
||||
@ -919,7 +925,8 @@ DeepParser::createInternalParser(
|
||||
bool isUrlPayload,
|
||||
bool isUrlParamPayload,
|
||||
int flags,
|
||||
size_t parser_depth
|
||||
size_t parser_depth,
|
||||
Waap::Util::BinaryFileType b64FileType
|
||||
)
|
||||
{
|
||||
dbgTrace(D_WAAP_DEEP_PARSER)
|
||||
@ -1152,10 +1159,25 @@ DeepParser::createInternalParser(
|
||||
m_parsersDeque.push_back(std::make_shared<BufferedParser<ParserPDF>>(*this, parser_depth + 1));
|
||||
offset = 0;
|
||||
} else {
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse a binary file";
|
||||
m_parsersDeque.push_back(std::make_shared<BufferedParser<ParserBinary>>(*this, parser_depth + 1));
|
||||
offset = 0;
|
||||
Waap::Util::BinaryFileType fileType = ParserBinaryFile::detectBinaryFileHeader(cur_val);
|
||||
if (fileType != Waap::Util::BinaryFileType::FILE_TYPE_NONE) {
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse a known binary file (type=" << fileType << ")";
|
||||
m_parsersDeque.push_back(
|
||||
std::make_shared<BufferedParser<ParserBinaryFile>>(*this, parser_depth + 1, false, fileType)
|
||||
);
|
||||
offset = 0;
|
||||
} else {
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse a binary file";
|
||||
m_parsersDeque.push_back(std::make_shared<BufferedParser<ParserBinary>>(*this, parser_depth + 1));
|
||||
offset = 0;
|
||||
}
|
||||
}
|
||||
} else if (b64FileType != Waap::Util::BinaryFileType::FILE_TYPE_NONE) {
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse a known binary file, base64 encoded";
|
||||
m_parsersDeque.push_back(
|
||||
std::make_shared<BufferedParser<ParserBinaryFile>>(*this, parser_depth + 1, true, b64FileType)
|
||||
);
|
||||
offset = 0;
|
||||
}
|
||||
}
|
||||
if (offset < 0) {
|
||||
|
@ -18,6 +18,7 @@
|
||||
#include "KeyStack.h"
|
||||
#include "WaapAssetState.h"
|
||||
#include "Waf2Regex.h"
|
||||
#include "Waf2Util.h"
|
||||
#include "maybe_res.h"
|
||||
#include <deque>
|
||||
|
||||
@ -129,7 +130,8 @@ private:
|
||||
bool isUrlPayload,
|
||||
bool isUrlParamPayload,
|
||||
int flags,
|
||||
size_t parser_depth
|
||||
size_t parser_depth,
|
||||
Waap::Util::BinaryFileType b64FileType
|
||||
);
|
||||
|
||||
int createUrlParserForJson(
|
||||
@ -160,7 +162,8 @@ private:
|
||||
bool isUrlParamPayload,
|
||||
int flags,
|
||||
size_t parser_depth,
|
||||
bool base64ParamFound
|
||||
bool base64ParamFound,
|
||||
Waap::Util::BinaryFileType b64FileType
|
||||
);
|
||||
int pushValueToTopParser(std::string &cur_val, int flags, bool base64ParamFound, int offset, size_t parser_depth);
|
||||
int parseBuffer(
|
||||
|
199
components/security_apps/waap/waap_clib/ParserBinaryFile.cc
Normal file
199
components/security_apps/waap/waap_clib/ParserBinaryFile.cc
Normal file
@ -0,0 +1,199 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "ParserBinaryFile.h"
|
||||
#include "Waf2Util.h"
|
||||
#include "debug.h"
|
||||
#include <string.h>
|
||||
#include <map>
|
||||
#include <tuple>
|
||||
|
||||
using namespace std;
|
||||
using Waap::Util::BinaryFileType;
|
||||
|
||||
USE_DEBUG_FLAG(D_WAAP_PARSER_BINARY_FILE);
|
||||
USE_DEBUG_FLAG(D_WAAP);
|
||||
|
||||
const string ParserBinaryFile::m_parserName = "ParserBinaryFile";
|
||||
|
||||
static const map<BinaryFileType, pair<string, string>> m_head_tail_map = {
|
||||
{BinaryFileType::FILE_TYPE_PNG,
|
||||
{string("\x89\x50\x4E\x47\x0D\x0A\x1A\x0A"), // PNG
|
||||
string("\x49\x45\x4e\x44\xae\x42\x60\x82")}}, // IEND
|
||||
{BinaryFileType::FILE_TYPE_JPEG,
|
||||
{string("\xff\xd8\xff"),
|
||||
string("\xff\xd9")}},
|
||||
{BinaryFileType::FILE_TYPE_PDF,
|
||||
{string("%PDF-"),
|
||||
string("%%EOF")}}
|
||||
};
|
||||
|
||||
ParserBinaryFile::ParserBinaryFile(
|
||||
IParserStreamReceiver &receiver,
|
||||
size_t parser_depth,
|
||||
bool is_base64,
|
||||
BinaryFileType file_type
|
||||
) :
|
||||
m_receiver(receiver),
|
||||
m_state(s_start),
|
||||
m_parser_depth(parser_depth),
|
||||
m_is_base64(is_base64),
|
||||
m_file_type(file_type)
|
||||
{}
|
||||
|
||||
ParserBinaryFile::~ParserBinaryFile()
|
||||
{}
|
||||
|
||||
BinaryFileType
|
||||
ParserBinaryFile::detectBinaryFileHeader(const string &buf)
|
||||
{
|
||||
if (buf.size() < MIN_HEADER_LOOKUP) {
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "Buffer size too small (" << buf.size() << ")";
|
||||
return BinaryFileType::FILE_TYPE_NONE;
|
||||
}
|
||||
const string searchStr = buf.substr(0, MAX_HEADER_LOOKUP);
|
||||
for (const auto &entry : m_head_tail_map) {
|
||||
const string &head = entry.second.first;
|
||||
size_t pos = searchStr.find(head);
|
||||
if (pos != string::npos) {
|
||||
if (buf.size() - pos >= MIN_HEADER_LOOKUP) {
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "Found. type=" << entry.first;
|
||||
return entry.first;
|
||||
} else {
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "Remaining size after header is too small";
|
||||
return BinaryFileType::FILE_TYPE_NONE;
|
||||
}
|
||||
}
|
||||
}
|
||||
return BinaryFileType::FILE_TYPE_NONE;
|
||||
}
|
||||
|
||||
|
||||
size_t
|
||||
ParserBinaryFile::push(const char *buf, size_t len)
|
||||
{
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE)
|
||||
<< "buf="
|
||||
<< buf
|
||||
<< "len="
|
||||
<< len;
|
||||
|
||||
const char *c;
|
||||
|
||||
if (m_state == s_error) {
|
||||
return 0;
|
||||
}
|
||||
if (len == 0) {
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "end of stream. m_state=" << m_state;
|
||||
|
||||
if (m_state == s_end) {
|
||||
m_receiver.onKvDone();
|
||||
} else if (m_is_base64) {
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "finished parsing";
|
||||
if (m_receiver.onKey("BinaryFileSkip", 14) != 0) {
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
if (m_receiver.onValue("", 0) != 0) {
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
m_receiver.onKvDone();
|
||||
} else {
|
||||
m_state = s_error;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
if (m_head_tail_map.find(m_file_type) == m_head_tail_map.end()) {
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "unknown file type: " << m_file_type;
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
const string tail = m_head_tail_map.at(m_file_type).second;
|
||||
|
||||
switch (m_state) {
|
||||
case s_start:
|
||||
m_state = s_body;
|
||||
CP_FALL_THROUGH;
|
||||
case s_body:
|
||||
if (m_is_base64) {
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "parsing base64";
|
||||
bool keepParsing = true;
|
||||
for (size_t i = 0; i < len; i++) {
|
||||
bool isB64AlphaChar =
|
||||
Waap::Util::isAlphaAsciiFast(buf[i]) || isdigit(buf[i]) || buf[i] == '/' || buf[i] == '+';
|
||||
if (buf[i] == '=') {
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE)
|
||||
<< "base64 padding found (offset=" << i << "). end of stream.";
|
||||
m_state = s_end;
|
||||
keepParsing = false;
|
||||
break;
|
||||
} else if (!isB64AlphaChar) {
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE)
|
||||
<< "non-base64 char found (c=" << buf[i] << ",offset=" << i << "). return error";
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
if (keepParsing) { // keep "parsing" on next call to push()
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "parsing binary. Searching for tail: " << tail;
|
||||
c = strstr(buf + len - tail.size(), tail.c_str());
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "search result: c=" << c;
|
||||
if (c) {
|
||||
m_state = s_end;
|
||||
} else { // keep "parsing" on next call to push()
|
||||
break;
|
||||
}
|
||||
}
|
||||
CP_FALL_THROUGH;
|
||||
case s_end:
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "finished parsing";
|
||||
if (m_receiver.onKey("BinaryFileSkip", 14) != 0) {
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
if (m_receiver.onValue("", 0) != 0) {
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
break;
|
||||
case s_error:
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "error detected";
|
||||
break;
|
||||
default:
|
||||
dbgTrace(D_WAAP_PARSER_BINARY_FILE) << "unknown state: " << m_state;
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
|
||||
return len;
|
||||
}
|
||||
|
||||
|
||||
void ParserBinaryFile::finish()
|
||||
{
|
||||
push(NULL, 0);
|
||||
}
|
||||
|
||||
const string& ParserBinaryFile::name() const
|
||||
{
|
||||
return m_parserName;
|
||||
}
|
||||
|
||||
bool ParserBinaryFile::error() const
|
||||
{
|
||||
return m_state == s_error;
|
||||
}
|
57
components/security_apps/waap/waap_clib/ParserBinaryFile.h
Normal file
57
components/security_apps/waap/waap_clib/ParserBinaryFile.h
Normal file
@ -0,0 +1,57 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef __PARSER_BINARY_FILE_H__
|
||||
#define __PARSER_BINARY_FILE_H__
|
||||
|
||||
#include "ParserBase.h"
|
||||
#include "Waf2Util.h"
|
||||
#include <string.h>
|
||||
|
||||
#define MIN_HEADER_LOOKUP 16
|
||||
#define MAX_HEADER_LOOKUP 64
|
||||
#define MAX_TAIL_LOOKUP 5
|
||||
|
||||
class ParserBinaryFile : public ParserBase {
|
||||
public:
|
||||
static Waap::Util::BinaryFileType detectBinaryFileHeader(const std::string &buf);
|
||||
|
||||
ParserBinaryFile(
|
||||
IParserStreamReceiver &receiver,
|
||||
size_t parser_depth,
|
||||
bool is_base64,
|
||||
Waap::Util::BinaryFileType file_type);
|
||||
virtual ~ParserBinaryFile();
|
||||
virtual size_t push(const char *buf, size_t len);
|
||||
virtual void finish();
|
||||
virtual const std::string &name() const;
|
||||
virtual bool error() const;
|
||||
virtual size_t depth() { return 1; }
|
||||
|
||||
private:
|
||||
enum state {
|
||||
s_start,
|
||||
s_body,
|
||||
s_end,
|
||||
s_error
|
||||
};
|
||||
|
||||
IParserStreamReceiver &m_receiver;
|
||||
enum state m_state;
|
||||
static const std::string m_parserName;
|
||||
size_t m_parser_depth;
|
||||
bool m_is_base64;
|
||||
Waap::Util::BinaryFileType m_file_type;
|
||||
};
|
||||
|
||||
#endif // __PARSER_BINARY_FILE_H__
|
@ -304,6 +304,7 @@ ParserJson::ParserJson(
|
||||
m_key.push("json", 4);
|
||||
}
|
||||
|
||||
|
||||
ParserJson::~ParserJson()
|
||||
{
|
||||
// Cleanup JSON
|
||||
|
@ -34,6 +34,7 @@
|
||||
#include "generic_rulebase/rulebase_config.h"
|
||||
#include "user_identifiers_config.h"
|
||||
#include "Waf2Regex.h"
|
||||
#include "ParserBinaryFile.h"
|
||||
|
||||
using boost::algorithm::to_lower_copy;
|
||||
using namespace std;
|
||||
@ -960,11 +961,12 @@ string filterUTF7(const string& text) {
|
||||
// 4. percent of non-printable characters (!isprint())
|
||||
// in decoded data is less than 10% (statistical garbage detection).
|
||||
// Returns false above checks fail.
|
||||
bool decodeBase64Chunk(
|
||||
base64_decode_status decodeBase64Chunk(
|
||||
const string& value,
|
||||
string::const_iterator it,
|
||||
string::const_iterator end,
|
||||
string& decoded)
|
||||
string& decoded,
|
||||
bool clear_on_error)
|
||||
{
|
||||
decoded.clear();
|
||||
uint32_t acc = 0;
|
||||
@ -974,13 +976,14 @@ bool decodeBase64Chunk(
|
||||
uint32_t spacer_count = 0;
|
||||
|
||||
dbgTrace(D_WAAP) << "decodeBase64Chunk: value='" << value << "' match='" << string(it, end) << "'";
|
||||
string::const_iterator begin = it;
|
||||
|
||||
// The encoded data length (without the "base64," prefix) should be exactly divisible by 4
|
||||
// len % 4 is not 0 i.e. this is not base64
|
||||
if ((end - it) % 4 != 0) {
|
||||
dbgTrace(D_WAAP_BASE64) <<
|
||||
"b64DecodeChunk: (leave as-is) because encoded data length should be exactly divisible by 4.";
|
||||
return false;
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
while (it != end) {
|
||||
@ -992,7 +995,7 @@ bool decodeBase64Chunk(
|
||||
dbgTrace(D_WAAP_BASE64) <<
|
||||
"decodeBase64Chunk: (leave as-is) because terminator characters must all be '='," <<
|
||||
"until end of match.";
|
||||
return false;
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
// We should see 0, 1 or 2 (no more) terminator characters
|
||||
@ -1000,7 +1003,7 @@ bool decodeBase64Chunk(
|
||||
|
||||
if (terminatorCharsSeen > 2) {
|
||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (leave as-is) because terminatorCharsSeen > 2";
|
||||
return false;
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
// allow for more terminator characters
|
||||
@ -1033,8 +1036,8 @@ bool decodeBase64Chunk(
|
||||
}
|
||||
else {
|
||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (leave as-is) because of non-base64 character ('" <<
|
||||
c << "', ASCII " << (unsigned int)c << ")";
|
||||
return false; // non-base64 character
|
||||
c << "', ASCII " << (unsigned int)c << ", offset " << (it-begin) << ")";
|
||||
return B64_DECODE_INVALID; // non-base64 character
|
||||
}
|
||||
|
||||
acc = (acc << 6) | val;
|
||||
@ -1087,20 +1090,23 @@ bool decodeBase64Chunk(
|
||||
}
|
||||
else {
|
||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (delete) because decoded.size=" << decoded.size() <<
|
||||
", nonPrintableCharsCount=" << nonPrintableCharsCount;
|
||||
decoded.clear();
|
||||
", nonPrintableCharsCount=" << nonPrintableCharsCount <<
|
||||
", clear_on_error=" << clear_on_error;
|
||||
if (clear_on_error) decoded.clear();
|
||||
return B64_DECODE_INCOMPLETE;
|
||||
}
|
||||
dbgTrace(D_WAAP_BASE64) << "returning true: successfully decoded."
|
||||
<< " Returns decoded data in \"decoded\" parameter";
|
||||
return true; // successfully decoded. Returns decoded data in "decoded" parameter
|
||||
return B64_DECODE_OK; // successfully decoded. Returns decoded data in "decoded" parameter
|
||||
}
|
||||
|
||||
// If decoded size is too small - leave the encoded value (return false)
|
||||
decoded.clear(); // discard partial data
|
||||
dbgTrace(D_WAAP_BASE64) << "decodeBase64Chunk: (leave as-is) because decoded too small. decoded.size=" <<
|
||||
decoded.size() <<
|
||||
", nonPrintableCharsCount=" << nonPrintableCharsCount;
|
||||
return false;
|
||||
", nonPrintableCharsCount=" << nonPrintableCharsCount <<
|
||||
", clear_on_error=" << clear_on_error;
|
||||
return B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
// Attempts to detect and validate base64 chunk.
|
||||
@ -1144,7 +1150,7 @@ b64DecodeChunk(
|
||||
}
|
||||
}
|
||||
|
||||
return decodeBase64Chunk(value, it, end, decoded);
|
||||
return decodeBase64Chunk(value, it, end, decoded) != B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
vector<string> split(const string& s, char delim) {
|
||||
@ -1281,6 +1287,8 @@ bool detectBase64Chunk(
|
||||
{
|
||||
dbgTrace(D_WAAP_BASE64) << " ===detectBase64Chunk===: starting with = '" << s << "'";
|
||||
string::const_iterator it = s.begin();
|
||||
start = s.end();
|
||||
end = s.end();
|
||||
|
||||
//detect "base64," prefix to start search after this
|
||||
for (; it != s.end()-7; it++) {
|
||||
@ -1309,33 +1317,62 @@ bool detectBase64Chunk(
|
||||
return false;
|
||||
}
|
||||
|
||||
base64_decode_status
|
||||
processDecodedChunk(
|
||||
const string &s,
|
||||
string::const_iterator start,
|
||||
string::const_iterator end,
|
||||
string &value,
|
||||
BinaryFileType &binaryFileType
|
||||
)
|
||||
{
|
||||
base64_decode_status retVal = decodeBase64Chunk(s, start, end, value, false);
|
||||
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: after decode. retVal=" << retVal
|
||||
<< " value.size()=" << value.size();
|
||||
if (retVal != B64_DECODE_INVALID && !value.empty()) {
|
||||
binaryFileType = ParserBinaryFile::detectBinaryFileHeader(value);
|
||||
if (retVal == B64_DECODE_INCOMPLETE) value.clear();
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
bool isBase64PrefixProcessingOK (
|
||||
const string &s,
|
||||
string &value)
|
||||
string &value,
|
||||
BinaryFileType &binaryFileType)
|
||||
{
|
||||
string::const_iterator start, end;
|
||||
bool retVal = false;
|
||||
base64_decode_status retVal = B64_DECODE_INVALID;
|
||||
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: before regex for prefix for string '" << s << "'";
|
||||
if (base64_prefix_detector_re.hasMatch(s)) {
|
||||
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: prefix detected on string '" << s << "'";
|
||||
if (detectBase64Chunk(s, start, end)) {
|
||||
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: chunk detected";
|
||||
if ((start != s.end()) && (end == s.end())) {
|
||||
retVal = decodeBase64Chunk(s, start, end, value);
|
||||
retVal = processDecodedChunk(s, start, end, value, binaryFileType);
|
||||
}
|
||||
} else if (start != s.end()) {
|
||||
dbgTrace(D_WAAP_BASE64) << " ===isBase64PrefixProcessingOK===: chunk not detected."
|
||||
" searching for known file header only";
|
||||
end = (start + MAX_HEADER_LOOKUP < s.end()) ? start + MAX_HEADER_LOOKUP : s.end();
|
||||
processDecodedChunk(s, start, end, value, binaryFileType);
|
||||
value.clear();
|
||||
return binaryFileType != Waap::Util::BinaryFileType::FILE_TYPE_NONE;
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
return retVal != B64_DECODE_INVALID;
|
||||
}
|
||||
|
||||
base64_variants b64Test (
|
||||
const string &s,
|
||||
string &key,
|
||||
string &value)
|
||||
string &value,
|
||||
BinaryFileType &binaryFileType)
|
||||
{
|
||||
|
||||
key.clear();
|
||||
bool retVal;
|
||||
binaryFileType = Waap::Util::BinaryFileType::FILE_TYPE_NONE;
|
||||
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: string = " << s
|
||||
<< " key = " << key << " value = " << value;
|
||||
@ -1397,7 +1434,7 @@ base64_variants b64Test (
|
||||
}
|
||||
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: after processing key = '" << key << "'";
|
||||
bool found = isBase64PrefixProcessingOK(s, prefix_decoded_val);
|
||||
bool found = isBase64PrefixProcessingOK(s, prefix_decoded_val, binaryFileType);
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: after prefix test found = "
|
||||
<< found << " new value is '" << prefix_decoded_val << "' - done";
|
||||
if (found) {
|
||||
@ -1421,7 +1458,7 @@ base64_variants b64Test (
|
||||
if ((s.end() - start) % 4 != 0) {
|
||||
key.clear();
|
||||
value.clear();
|
||||
return CONTINUE_AS_IS;;
|
||||
return CONTINUE_AS_IS;
|
||||
}
|
||||
}
|
||||
else {
|
||||
@ -1443,7 +1480,7 @@ base64_variants b64Test (
|
||||
key.pop_back();
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: FINAL key = '" << key << "'";
|
||||
}
|
||||
retVal = decodeBase64Chunk(s, start, s.end(), value);
|
||||
retVal = decodeBase64Chunk(s, start, s.end(), value) != B64_DECODE_INVALID;
|
||||
|
||||
dbgTrace(D_WAAP_BASE64) << " ===b64Test===: After testing and conversion value = "
|
||||
<< value << "retVal = '" << retVal <<"'";
|
||||
|
@ -34,6 +34,7 @@
|
||||
|
||||
enum base64_variants {SINGLE_B64_CHUNK_CONVERT, KEY_VALUE_B64_PAIR, CONTINUE_AS_IS};
|
||||
enum base64_stage {BEFORE_EQUAL, EQUAL, DONE, MISDETECT};
|
||||
enum base64_decode_status {B64_DECODE_INVALID, B64_DECODE_OK, B64_DECODE_INCOMPLETE};
|
||||
|
||||
// This is portable version of stricmp(), which is non-standard function (not even in C).
|
||||
// Contrary to stricmp(), for a slight optimization, s2 is ASSUMED to be already in lowercase.
|
||||
@ -858,12 +859,13 @@ void unescapeUnicode(std::string &text);
|
||||
// Try to find and decode UTF7 chunks
|
||||
std::string filterUTF7(const std::string &text);
|
||||
|
||||
bool
|
||||
base64_decode_status
|
||||
decodeBase64Chunk(
|
||||
const std::string &value,
|
||||
std::string::const_iterator it,
|
||||
std::string::const_iterator end,
|
||||
std::string &decoded);
|
||||
std::string &decoded,
|
||||
bool clear_on_error = true);
|
||||
|
||||
bool
|
||||
b64DecodeChunk(
|
||||
@ -889,6 +891,13 @@ namespace Util {
|
||||
std::string &key,
|
||||
std::string &value);
|
||||
|
||||
enum BinaryFileType {
|
||||
FILE_TYPE_NONE,
|
||||
FILE_TYPE_PNG,
|
||||
FILE_TYPE_JPEG,
|
||||
FILE_TYPE_PDF
|
||||
};
|
||||
|
||||
void b64Decode(
|
||||
const std::string &s,
|
||||
RegexSubCallback_f cb,
|
||||
@ -899,7 +908,8 @@ namespace Util {
|
||||
base64_variants b64Test (
|
||||
const std::string &s,
|
||||
std::string &key,
|
||||
std::string &value);
|
||||
std::string &value,
|
||||
BinaryFileType &binaryFileType);
|
||||
|
||||
// The original stdlib implementation of isalpha() supports locale settings which we do not really need.
|
||||
// It is also proven to contribute to slow performance in some of the algorithms using it.
|
||||
|
@ -14,7 +14,8 @@ enum SchemaKeyType
|
||||
StartObjectKeyName,
|
||||
StartArrayKeyName,
|
||||
EndArrayKeyName,
|
||||
OtherKey
|
||||
OtherKey,
|
||||
JsonFailure
|
||||
};
|
||||
|
||||
#endif // __OA_SCHEMA_UPDATER_KEYS_H__
|
||||
|
@ -42,6 +42,7 @@ using namespace std;
|
||||
USE_DEBUG_FLAG(D_WAAP);
|
||||
USE_DEBUG_FLAG(D_WAAP_ULIMITS);
|
||||
USE_DEBUG_FLAG(D_OA_SCHEMA_UPDATER);
|
||||
USE_DEBUG_FLAG(D_NGINX_EVENTS);
|
||||
|
||||
WaapComponent::Impl::Impl() :
|
||||
pending_response(ngx_http_cp_verdict_e::TRAFFIC_VERDICT_INSPECT),
|
||||
@ -124,7 +125,7 @@ WaapComponent::Impl::getListenerName() const
|
||||
EventVerdict
|
||||
WaapComponent::Impl::respond(const NewHttpTransactionEvent &event)
|
||||
{
|
||||
dbgTrace(D_WAAP) << " * \e[32mNGEN_EVENT: NewTransactionEvent\e[0m";
|
||||
dbgTrace(D_NGINX_EVENTS) << " * \e[32mNGEN_EVENT: NewTransactionEvent\e[0m";
|
||||
|
||||
if (waapStateTable->hasState<Waf2Transaction>()) {
|
||||
dbgWarning(D_WAAP) << " * \e[31 -- NewTransactionEvent called twice on same entry \e[0m";
|
||||
@ -202,7 +203,7 @@ WaapComponent::Impl::respond(const HttpRequestHeaderEvent &event)
|
||||
auto &header_name = event.getKey();
|
||||
auto &header_value = event.getValue();
|
||||
|
||||
dbgTrace(D_WAAP)
|
||||
dbgTrace(D_NGINX_EVENTS)
|
||||
<< " * \e[32mNGEN_EVENT: HttpHeaderRequest event: "
|
||||
<< string(header_name)
|
||||
<< ": "
|
||||
@ -210,7 +211,7 @@ WaapComponent::Impl::respond(const HttpRequestHeaderEvent &event)
|
||||
<< "\e[0m";
|
||||
|
||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||
dbgWarning(D_WAAP)
|
||||
dbgWarning(D_NGINX_EVENTS)
|
||||
<< " * \e[31mNGEN_EVENT: http_header - "
|
||||
<< "failed to get waf2 transaction, state not exist\e[0m";
|
||||
|
||||
@ -257,10 +258,10 @@ WaapComponent::Impl::respond(const HttpRequestHeaderEvent &event)
|
||||
EventVerdict
|
||||
WaapComponent::Impl::respond(const HttpRequestBodyEvent &event)
|
||||
{
|
||||
dbgTrace(D_WAAP) << " * \e[32mNGEN_EVENT: HttpBodyRequest data buffer event\e[0m";
|
||||
dbgTrace(D_NGINX_EVENTS) << " * \e[32mNGEN_EVENT: HttpBodyRequest data buffer event\e[0m";
|
||||
|
||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||
dbgWarning(D_WAAP) <<
|
||||
dbgWarning(D_NGINX_EVENTS) <<
|
||||
" * \e[31mNGEN_EVENT: data buffer - failed to get waf2 transaction, state not exist\e[0m";
|
||||
return drop_response;
|
||||
}
|
||||
@ -295,10 +296,10 @@ WaapComponent::Impl::respond(const HttpRequestBodyEvent &event)
|
||||
EventVerdict
|
||||
WaapComponent::Impl::respond(const EndRequestEvent &)
|
||||
{
|
||||
dbgTrace(D_WAAP) << " * \e[32mNGEN_EVENT: endRequest event\e[0m";
|
||||
dbgTrace(D_NGINX_EVENTS) << " * \e[32mNGEN_EVENT: endRequest event\e[0m";
|
||||
|
||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||
dbgWarning(D_WAAP)
|
||||
dbgWarning(D_NGINX_EVENTS)
|
||||
<< "* \e[31mNGEN_EVENT: endRequest - failed to get waf2 transaction, state does not exist\e[0m";
|
||||
return drop_response;
|
||||
}
|
||||
@ -333,13 +334,13 @@ WaapComponent::Impl::respond(const EndRequestEvent &)
|
||||
EventVerdict
|
||||
WaapComponent::Impl::respond(const ResponseCodeEvent &event)
|
||||
{
|
||||
dbgTrace(D_WAAP)
|
||||
dbgTrace(D_NGINX_EVENTS)
|
||||
<< " * \e[32mNGEN_EVENT: ResponseCodeTransactionEvent event code = "
|
||||
<< event.getResponseCode()
|
||||
<< "\e[0m";
|
||||
|
||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||
dbgWarning(D_WAAP)
|
||||
dbgWarning(D_NGINX_EVENTS)
|
||||
<< " * \e[31mNGEN_EVENT: ResponseCodeTransactionEvent - failed to get waf2 transaction, "
|
||||
<< "state does not exist\e[0m";
|
||||
return drop_response;
|
||||
@ -385,7 +386,7 @@ WaapComponent::Impl::respond(const HttpResponseHeaderEvent &event)
|
||||
auto &header_name = event.getKey();
|
||||
auto &header_value = event.getValue();
|
||||
|
||||
dbgTrace(D_WAAP)
|
||||
dbgTrace(D_NGINX_EVENTS)
|
||||
<< " * \e[32mNGEN_EVENT: HttpHeaderResponse event: "
|
||||
<< string(header_name)
|
||||
<< ": "
|
||||
@ -393,7 +394,7 @@ WaapComponent::Impl::respond(const HttpResponseHeaderEvent &event)
|
||||
<< "\e[0m";
|
||||
|
||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||
dbgWarning(D_WAAP)
|
||||
dbgWarning(D_NGINX_EVENTS)
|
||||
<< " * \e[31mNGEN_EVENT: HttpHeaderResponse - "
|
||||
<< "failed to get waf2 transaction, state does not exist\e[0m";
|
||||
return drop_response;
|
||||
@ -491,10 +492,10 @@ WaapComponent::Impl::respond(const HttpResponseHeaderEvent &event)
|
||||
EventVerdict
|
||||
WaapComponent::Impl::respond(const HttpResponseBodyEvent &event)
|
||||
{
|
||||
dbgTrace(D_WAAP) << " * \e[32mNGEN_EVENT: HttpBodyResponse data buffer event\e[0m";
|
||||
dbgTrace(D_NGINX_EVENTS) << " * \e[32mNGEN_EVENT: HttpBodyResponse data buffer event\e[0m";
|
||||
|
||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||
dbgWarning(D_WAAP) <<
|
||||
dbgWarning(D_NGINX_EVENTS) <<
|
||||
" * \e[31mNGEN_EVENT: HttpBodyResponse - failed to get waf2 transaction, state does not exist\e[0m";
|
||||
return drop_response;
|
||||
}
|
||||
@ -591,7 +592,7 @@ EventVerdict
|
||||
WaapComponent::Impl::respond(const EndTransactionEvent &)
|
||||
{
|
||||
if (!waapStateTable->hasState<Waf2Transaction>()) {
|
||||
dbgWarning(D_WAAP) <<
|
||||
dbgWarning(D_NGINX_EVENTS) <<
|
||||
" * \e[31mNGEN_EVENT: endTransaction - failed to get waf2 transaction, state does not exist\e[0m";
|
||||
return EventVerdict(drop_response);
|
||||
}
|
||||
|
@ -52,6 +52,7 @@ const string HttpTransactionData::req_headers = "transaction_request_head
|
||||
const string HttpTransactionData::req_body = "transaction_request_body";
|
||||
const string HttpTransactionData::source_identifier = "sourceIdentifiers";
|
||||
const string HttpTransactionData::proxy_ip_ctx = "proxy_ip";
|
||||
const string HttpTransactionData::xff_vals_ctx = "xff_vals";
|
||||
|
||||
const CompressionType HttpTransactionData::default_response_content_encoding = CompressionType::NO_COMPRESSION;
|
||||
|
||||
|
@ -124,7 +124,7 @@ cptestGenerateHex(const std::vector<u_char> &vec, bool print_offsets)
|
||||
std::string
|
||||
cptestFnameInExeDir(const std::string &name)
|
||||
{
|
||||
auto const &bin_path = ::testing::internal::GetArgvs()[0]; // Internal ugly API.
|
||||
auto bin_path = ::testing::internal::GetArgvs()[0]; // Internal ugly API.
|
||||
auto slash = bin_path.rfind('/');
|
||||
if (slash==string::npos) {
|
||||
// bin_path contains no dir. So return name with no dir
|
||||
|
@ -36,7 +36,9 @@ I_Intelligence_IS_V2::queryIntelligence(
|
||||
query_request.setCursor(Intelligence_IS_V2::CursorState::DONE, "");
|
||||
} else {
|
||||
query_request.setCursor(Intelligence_IS_V2::CursorState::IN_PROGRESS, response->getCursor());
|
||||
if (ignore_in_progress) return genError("Query intelligence response with InProgress status");
|
||||
if (ignore_in_progress && response->getResponseStatus() == Intelligence_IS_V2::ResponseStatus::IN_PROGRESS) {
|
||||
return genError("Query intelligence response with InProgress status");
|
||||
}
|
||||
}
|
||||
|
||||
return serializable_response.getData();
|
||||
|
@ -61,27 +61,34 @@ DEFINE_FLAG(D_COMPONENT, D_ALL)
|
||||
DEFINE_FLAG(D_STREAMING, D_COMPONENT)
|
||||
DEFINE_FLAG(D_STREAMING_DATA, D_STREAMING)
|
||||
DEFINE_FLAG(D_CHECKSUM, D_STREAMING)
|
||||
DEFINE_FLAG(D_WAAP, D_COMPONENT)
|
||||
DEFINE_FLAG(D_OA_SCHEMA_UPDATER, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_API, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_AUTOMATION, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_REGEX, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_SAMPLE_PREPROCESS, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_SAMPLE_SCAN, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_EVASIONS, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_ASSET_STATE, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_CONFIDENCE_CALCULATOR, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_REPUTATION, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_SCORE_BUILDER, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_ULIMITS, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_SCANNER, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_DEEP_PARSER, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_BASE64, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_JSON, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_BOT_PROTECTION, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_STREAMING_PARSING, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_HEADERS, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_PARSER, D_WAAP)
|
||||
|
||||
DEFINE_FLAG(D_WAAP_GLOBAL, D_COMPONENT)
|
||||
DEFINE_FLAG(D_WAAP, D_WAAP_GLOBAL)
|
||||
DEFINE_FLAG(D_NGINX_EVENTS, D_WAAP)
|
||||
DEFINE_FLAG(D_OA_SCHEMA_UPDATER, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_API, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_AUTOMATION, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_REGEX, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_SAMPLE_SCAN, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_ASSET_STATE, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_CONFIDENCE_CALCULATOR, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_REPUTATION, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_SCORE_BUILDER, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_ULIMITS, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_SCANNER, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_DEEP_PARSER, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_BASE64, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_JSON, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_BOT_PROTECTION, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_STREAMING_PARSING, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_HEADERS, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_OVERRIDE, D_WAAP)
|
||||
|
||||
DEFINE_FLAG(D_WAAP_SAMPLE_HANDLING, D_WAAP_GLOBAL)
|
||||
DEFINE_FLAG(D_WAAP_SAMPLE_PREPROCESS, D_WAAP_SAMPLE_HANDLING)
|
||||
DEFINE_FLAG(D_WAAP_EVASIONS, D_WAAP_SAMPLE_HANDLING)
|
||||
|
||||
DEFINE_FLAG(D_WAAP_PARSER, D_WAAP_GLOBAL)
|
||||
DEFINE_FLAG(D_WAAP_PARSER_XML, D_WAAP_PARSER)
|
||||
DEFINE_FLAG(D_WAAP_PARSER_HTML, D_WAAP_PARSER)
|
||||
DEFINE_FLAG(D_WAAP_PARSER_BINARY, D_WAAP_PARSER)
|
||||
@ -98,7 +105,7 @@ DEFINE_FLAG(D_COMPONENT, D_ALL)
|
||||
DEFINE_FLAG(D_WAAP_PARSER_PERCENT, D_WAAP_PARSER)
|
||||
DEFINE_FLAG(D_WAAP_PARSER_PAIRS, D_WAAP_PARSER)
|
||||
DEFINE_FLAG(D_WAAP_PARSER_PDF, D_WAAP_PARSER)
|
||||
DEFINE_FLAG(D_WAAP_OVERRIDE, D_WAAP)
|
||||
DEFINE_FLAG(D_WAAP_PARSER_BINARY_FILE, D_WAAP_PARSER)
|
||||
|
||||
DEFINE_FLAG(D_IPS, D_COMPONENT)
|
||||
DEFINE_FLAG(D_FILE_UPLOAD, D_COMPONENT)
|
||||
|
@ -20,7 +20,6 @@
|
||||
#include "intelligence_invalidation.h"
|
||||
#include "intelligence_is_v2/intelligence_response.h"
|
||||
#include "intelligence_request.h"
|
||||
#include "intelligence_server.h"
|
||||
|
||||
using namespace std;
|
||||
using namespace chrono;
|
||||
@ -33,6 +32,8 @@ static const string primary_port_setting = "local intelligence server primary po
|
||||
static const string secondary_port_setting = "local intelligence server secondary port";
|
||||
static const string invalidation_uri = "/api/v2/intelligence/invalidation";
|
||||
static const string registration_uri = "/api/v2/intelligence/invalidation/register";
|
||||
static const string query_uri = "/api/v2/intelligence/assets/query";
|
||||
static const string queries_uri = "/api/v2/intelligence/assets/queries";
|
||||
|
||||
class I_InvalidationCallBack
|
||||
{
|
||||
@ -245,6 +246,51 @@ private:
|
||||
C2S_OPTIONAL_PARAM(string, invalidationType);
|
||||
};
|
||||
|
||||
class PagingController
|
||||
{
|
||||
public:
|
||||
PagingController()
|
||||
{
|
||||
uint request_overall_timeout_conf = getConfigurationWithDefault<uint>(
|
||||
20,
|
||||
"intelligence",
|
||||
"request overall timeout"
|
||||
);
|
||||
|
||||
timer = Singleton::Consume<I_TimeGet>::by<IntelligenceComponentV2>();
|
||||
mainloop = Singleton::Consume<I_MainLoop>::by<IntelligenceComponentV2>();
|
||||
|
||||
paging_timeout = timer->getMonotonicTime() + chrono::microseconds(request_overall_timeout_conf * 1000000);
|
||||
}
|
||||
|
||||
bool
|
||||
isMoreResponses(const Maybe<Response> &res, const IntelligenceRequest &req)
|
||||
{
|
||||
response = res;
|
||||
if (!res.ok() || req.getPagingStatus().ok()) return false;
|
||||
if (res->getResponseStatus() != ResponseStatus::IN_PROGRESS) return false;
|
||||
dbgTrace(D_INTELLIGENCE) << "Intelligence paging response is in progress";
|
||||
mainloop->yield(true);
|
||||
return hasTimeoutRemaining();
|
||||
}
|
||||
|
||||
Maybe<Response> getResponse() const { return response; }
|
||||
|
||||
private:
|
||||
bool
|
||||
hasTimeoutRemaining() const
|
||||
{
|
||||
if (timer->getMonotonicTime() < paging_timeout) return true;
|
||||
dbgDebug(D_INTELLIGENCE) << "Intelligence paging response reached timeout";
|
||||
return false;
|
||||
}
|
||||
|
||||
chrono::microseconds paging_timeout;
|
||||
Maybe<Response> response = genError("Paging response is uninitialized");
|
||||
I_TimeGet *timer;
|
||||
I_MainLoop *mainloop;
|
||||
};
|
||||
|
||||
class IntelligenceComponentV2::Impl
|
||||
:
|
||||
Singleton::Provide<I_Intelligence_IS_V2>::From<IntelligenceComponentV2>
|
||||
@ -255,13 +301,12 @@ public:
|
||||
init()
|
||||
{
|
||||
message = Singleton::Consume<I_Messaging>::by<IntelligenceComponentV2>();
|
||||
timer = Singleton::Consume<I_TimeGet>::by<IntelligenceComponentV2>();
|
||||
mainloop = Singleton::Consume<I_MainLoop>::by<IntelligenceComponentV2>();
|
||||
|
||||
mainloop->addRecurringRoutine(
|
||||
I_MainLoop::RoutineType::System,
|
||||
chrono::minutes(12),
|
||||
[this] () { sendReccurringInvalidationRegistration(); },
|
||||
[this] () { sendRecurringInvalidationRegistration(); },
|
||||
"Sending intelligence invalidation"
|
||||
);
|
||||
|
||||
@ -272,12 +317,7 @@ public:
|
||||
bool
|
||||
sendInvalidation(const Invalidation &invalidation) const override
|
||||
{
|
||||
if (hasLocalInvalidationSupport()) {
|
||||
return sendLocalInvalidation(invalidation);
|
||||
}
|
||||
else {
|
||||
return sendGlobalInvalidation(invalidation);
|
||||
}
|
||||
return sendIntelligence(invalidation).ok();
|
||||
}
|
||||
|
||||
Maybe<uint>
|
||||
@ -285,7 +325,7 @@ public:
|
||||
{
|
||||
if (!invalidation.isLegalInvalidation()) return genError("Attempting to register invalid invalidation");
|
||||
auto res = invalidations.emplace(invalidation, cb);
|
||||
sendReccurringInvalidationRegistration();
|
||||
sendRecurringInvalidationRegistration();
|
||||
return res;
|
||||
}
|
||||
|
||||
@ -312,8 +352,7 @@ public:
|
||||
return genError("Paging is activated and already finished. No need for more queries.");
|
||||
}
|
||||
}
|
||||
Sender intelligence_server(intelligence_req);
|
||||
auto response = intelligence_server.sendIntelligenceRequest();
|
||||
auto response = sendIntelligenceRequest(intelligence_req);
|
||||
return response;
|
||||
}
|
||||
|
||||
@ -326,86 +365,163 @@ public:
|
||||
|
||||
private:
|
||||
bool
|
||||
hasLocalInvalidationSupport() const
|
||||
hasLocalIntelligenceSupport() const
|
||||
{
|
||||
auto is_supported = getProfileAgentSettingWithDefault<bool>(false, "agent.config.useLocalIntelligence");
|
||||
if (getProfileAgentSettingWithDefault<bool>(false, "agent.config.useLocalIntelligence")) return true;
|
||||
|
||||
if (!is_supported) {
|
||||
is_supported = getProfileAgentSettingWithDefault<bool>(false, "agent.config.supportInvalidation");
|
||||
auto crowsec_env = getenv("CROWDSEC_ENABLED");
|
||||
bool crowdsec_enabled = crowsec_env != nullptr && string(crowsec_env) == "true";
|
||||
|
||||
if (getProfileAgentSettingWithDefault<bool>(crowdsec_enabled, "layer7AccessControl.crowdsec.enabled")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!is_supported) {
|
||||
is_supported = getConfigurationWithDefault(false, "intelligence", "support Invalidation");
|
||||
if (getProfileAgentSettingWithDefault<bool>(false, "agent.config.supportInvalidation")) return true;
|
||||
dbgTrace(D_INTELLIGENCE) << "Local intelligence not supported";
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
template <typename IntelligenceRest>
|
||||
Maybe<Response>
|
||||
sendIntelligence(const IntelligenceRest &rest_req) const
|
||||
{
|
||||
dbgFlow(D_INTELLIGENCE) << "Sending intelligence request";
|
||||
auto res = sendLocalIntelligenceToLocalServer(rest_req);
|
||||
if (res.ok()) return res;
|
||||
return sendGlobalIntelligence(rest_req);
|
||||
}
|
||||
|
||||
template <typename IntelligenceRest>
|
||||
Maybe<Response>
|
||||
sendLocalIntelligenceToLocalServer(const IntelligenceRest &rest_req) const
|
||||
{
|
||||
dbgFlow(D_INTELLIGENCE) << "Sending local intelligence request";
|
||||
if (!hasLocalIntelligenceSupport()) {
|
||||
dbgDebug(D_INTELLIGENCE) << "Local intelligence not supported";
|
||||
return genError("Local intelligence not configured");
|
||||
}
|
||||
|
||||
return is_supported;
|
||||
}
|
||||
|
||||
bool
|
||||
sendLocalInvalidation(const Invalidation &invalidation) const
|
||||
{
|
||||
dbgFlow(D_INTELLIGENCE) << "Starting local invalidation";
|
||||
return sendLocalInvalidationImpl(invalidation) || sendGlobalInvalidation(invalidation);
|
||||
}
|
||||
|
||||
bool
|
||||
sendLocalInvalidationImpl(const Invalidation &invalidation) const
|
||||
{
|
||||
auto server = getSetting<string>("intelligence", "local intelligence server ip");
|
||||
if (!server.ok()) {
|
||||
dbgWarning(D_INTELLIGENCE) << "Local intelligence server not configured";
|
||||
return false;
|
||||
dbgWarning(D_INTELLIGENCE) << "Local intelligence server ip not configured";
|
||||
return genError("Local intelligence server ip not configured");
|
||||
}
|
||||
|
||||
return
|
||||
sendLocalInvalidationImpl(invalidation, *server, primary_port_setting) ||
|
||||
sendLocalInvalidationImpl(invalidation, *server, secondary_port_setting);
|
||||
auto res = sendLocalIntelligenceToLocalServer(rest_req, *server, primary_port_setting);
|
||||
if (res.ok()) return res;
|
||||
return sendLocalIntelligenceToLocalServer(rest_req, *server, secondary_port_setting);
|
||||
}
|
||||
|
||||
bool
|
||||
sendLocalInvalidationImpl(const Invalidation &invalidation, const string &server, const string &port_setting) const
|
||||
template <typename IntelligenceRest>
|
||||
Maybe<Response>
|
||||
sendLocalIntelligenceToLocalServer(
|
||||
const IntelligenceRest &rest_req,
|
||||
const string &server,
|
||||
const string &port_setting
|
||||
) const
|
||||
{
|
||||
dbgFlow(D_INTELLIGENCE) << "Sending to local intelligence";
|
||||
|
||||
auto port = getSetting<uint>("intelligence", port_setting);
|
||||
if (!port.ok()) {
|
||||
dbgWarning(D_INTELLIGENCE) << "Could not resolve port for " << port_setting;
|
||||
return false;
|
||||
dbgWarning(D_INTELLIGENCE) << "Could not resolve port for " + port_setting;
|
||||
return genError("Could not resolve port for " + port_setting);
|
||||
}
|
||||
|
||||
dbgTrace(D_INTELLIGENCE)
|
||||
<< "Invalidation value: "
|
||||
<< (invalidation.genJson().ok() ? invalidation.genJson().unpack() : invalidation.genJson().getErr());
|
||||
<< "Intelligence rest request value: "
|
||||
<< (rest_req.genJson().ok() ? rest_req.genJson().unpack() : rest_req.genJson().getErr());
|
||||
|
||||
MessageMetadata invalidation_req_md(server, *port);
|
||||
invalidation_req_md.insertHeaders(getHTTPHeaders());
|
||||
invalidation_req_md.setConnectioFlag(MessageConnectionConfig::UNSECURE_CONN);
|
||||
return message->sendSyncMessageWithoutResponse(
|
||||
HTTPMethod::POST,
|
||||
invalidation_uri,
|
||||
invalidation,
|
||||
MessageCategory::INTELLIGENCE,
|
||||
invalidation_req_md
|
||||
);
|
||||
MessageMetadata req_md(server, *port);
|
||||
req_md.insertHeaders(getHTTPHeaders());
|
||||
req_md.setConnectioFlag(MessageConnectionConfig::UNSECURE_CONN);
|
||||
return sendIntelligenceRequestImpl(rest_req, req_md);
|
||||
}
|
||||
|
||||
bool
|
||||
sendGlobalInvalidation(const Invalidation &invalidation) const
|
||||
template <typename IntelligenceRest>
|
||||
Maybe<Response>
|
||||
sendGlobalIntelligence(const IntelligenceRest &rest_req) const
|
||||
{
|
||||
dbgFlow(D_INTELLIGENCE) << "Starting global invalidation";
|
||||
dbgFlow(D_INTELLIGENCE) << "Sending global intelligence request";
|
||||
|
||||
dbgTrace(D_INTELLIGENCE)
|
||||
<< "Invalidation value: "
|
||||
<< (invalidation.genJson().ok() ? invalidation.genJson().unpack() : invalidation.genJson().getErr());
|
||||
MessageMetadata global_invalidation_req_md;
|
||||
global_invalidation_req_md.insertHeaders(getHTTPHeaders());
|
||||
return message->sendSyncMessageWithoutResponse(
|
||||
<< "Intelligence rest value: "
|
||||
<< (rest_req.genJson().ok() ? rest_req.genJson().unpack() : rest_req.genJson().getErr());
|
||||
MessageMetadata global_req_md;
|
||||
global_req_md.insertHeaders(getHTTPHeaders());
|
||||
return sendIntelligenceRequestImpl(rest_req, global_req_md);
|
||||
}
|
||||
|
||||
Maybe<Response>
|
||||
createResponse(const string &response_body, const IntelligenceRequest &query_request) const
|
||||
{
|
||||
Response response(response_body, query_request.getSize(), query_request.isBulk());
|
||||
auto load_status = response.load();
|
||||
if (load_status.ok()) return response;
|
||||
dbgWarning(D_INTELLIGENCE) << "Could not create intelligence response.";
|
||||
return load_status.passErr();
|
||||
}
|
||||
|
||||
Maybe<Response>
|
||||
sendIntelligenceRequestImpl(const Invalidation &invalidation, const MessageMetadata &local_req_md) const
|
||||
{
|
||||
dbgFlow(D_INTELLIGENCE) << "Sending intelligence invalidation";
|
||||
auto res = message->sendSyncMessageWithoutResponse(
|
||||
HTTPMethod::POST,
|
||||
invalidation_uri,
|
||||
invalidation,
|
||||
MessageCategory::INTELLIGENCE,
|
||||
global_invalidation_req_md
|
||||
local_req_md
|
||||
);
|
||||
if (res) return Response();
|
||||
dbgWarning(D_INTELLIGENCE) << "Could not send local intelligence invalidation.";
|
||||
return genError("Could not send local intelligence invalidation");
|
||||
}
|
||||
|
||||
Maybe<Response>
|
||||
sendIntelligenceRequestImpl(
|
||||
const InvalidationRegistration::RestCall ®istration,
|
||||
const MessageMetadata ®istration_req_md
|
||||
) const
|
||||
{
|
||||
dbgFlow(D_INTELLIGENCE) << "Sending intelligence invalidation registration";
|
||||
auto res = message->sendSyncMessageWithoutResponse(
|
||||
HTTPMethod::POST,
|
||||
registration_uri,
|
||||
registration,
|
||||
MessageCategory::INTELLIGENCE,
|
||||
registration_req_md
|
||||
);
|
||||
if (res) return Response();
|
||||
dbgWarning(D_INTELLIGENCE) << "Could not send intelligence invalidation registration.";
|
||||
return genError("Could not send intelligence invalidation registration");
|
||||
}
|
||||
|
||||
Maybe<Response>
|
||||
sendIntelligenceRequestImpl(const IntelligenceRequest &query_request, const MessageMetadata &global_req_md) const
|
||||
{
|
||||
dbgFlow(D_INTELLIGENCE) << "Sending intelligence query";
|
||||
auto json_body = query_request.genJson();
|
||||
if (!json_body.ok()) return json_body.passErr();
|
||||
auto req_data = message->sendSyncMessage(
|
||||
HTTPMethod::POST,
|
||||
query_request.isBulk() ? queries_uri : query_uri,
|
||||
*json_body,
|
||||
MessageCategory::INTELLIGENCE,
|
||||
global_req_md
|
||||
);
|
||||
if (!req_data.ok()) {
|
||||
auto response_error = req_data.getErr().toString();
|
||||
dbgWarning(D_INTELLIGENCE)
|
||||
<< "Could not send intelligence query. "
|
||||
<< req_data.getErr().getBody()
|
||||
<< " "
|
||||
<< req_data.getErr().toString();
|
||||
return genError("Could not send intelligence query.");
|
||||
} else if (req_data->getHTTPStatusCode() != HTTPStatusCode::HTTP_OK) {
|
||||
dbgWarning(D_INTELLIGENCE) << "Invalid intelligence response: " << req_data->toString();
|
||||
return genError(req_data->toString());
|
||||
}
|
||||
|
||||
return createResponse(req_data->getBody(), query_request);
|
||||
}
|
||||
|
||||
map<string, string>
|
||||
@ -423,66 +539,26 @@ private:
|
||||
return headers;
|
||||
}
|
||||
|
||||
bool
|
||||
sendRegistration(const Invalidation &invalidation) const
|
||||
{
|
||||
InvalidationRegistration registration;
|
||||
registration.addInvalidation(invalidation);
|
||||
|
||||
return sendLocalRegistrationImpl(registration.genJson());
|
||||
}
|
||||
|
||||
bool
|
||||
sendLocalRegistrationImpl(const InvalidationRegistration::RestCall ®istration) const
|
||||
{
|
||||
auto server = getSetting<string>("intelligence", "local intelligence server ip");
|
||||
if (!server.ok()) {
|
||||
dbgWarning(D_INTELLIGENCE) << "Local intelligence server not configured";
|
||||
return false;
|
||||
}
|
||||
return
|
||||
sendLocalRegistrationImpl(registration, *server, primary_port_setting) ||
|
||||
sendLocalRegistrationImpl(registration, *server, secondary_port_setting);
|
||||
}
|
||||
|
||||
bool
|
||||
sendLocalRegistrationImpl(
|
||||
const InvalidationRegistration::RestCall ®istration,
|
||||
const string &server,
|
||||
const string &port_setting
|
||||
) const
|
||||
{
|
||||
dbgFlow(D_INTELLIGENCE) << "Sending to local registration";
|
||||
|
||||
auto port = getSetting<uint>("intelligence", port_setting);
|
||||
if (!port.ok()) {
|
||||
dbgWarning(D_INTELLIGENCE) << "Could not resolve port for " << port_setting;
|
||||
return false;
|
||||
}
|
||||
|
||||
dbgTrace(D_INTELLIGENCE) << "Invalidation value: " << registration.genJson();
|
||||
MessageMetadata registration_req_md(server, *port);
|
||||
registration_req_md.setConnectioFlag(MessageConnectionConfig::UNSECURE_CONN);
|
||||
return message->sendSyncMessageWithoutResponse(
|
||||
HTTPMethod::POST,
|
||||
registration_uri,
|
||||
registration,
|
||||
MessageCategory::INTELLIGENCE,
|
||||
registration_req_md
|
||||
);
|
||||
}
|
||||
|
||||
void
|
||||
sendReccurringInvalidationRegistration() const
|
||||
sendRecurringInvalidationRegistration() const
|
||||
{
|
||||
if (!hasLocalInvalidationSupport() || invalidations.empty()) return;
|
||||
if (invalidations.empty()) return;
|
||||
|
||||
sendLocalRegistrationImpl(invalidations.getRegistration());
|
||||
sendLocalIntelligenceToLocalServer(invalidations.getRegistration());
|
||||
}
|
||||
|
||||
Maybe<Response>
|
||||
sendIntelligenceRequest(const IntelligenceRequest& req) const
|
||||
{
|
||||
PagingController paging;
|
||||
|
||||
while (paging.isMoreResponses(sendIntelligence(req), req));
|
||||
|
||||
return paging.getResponse();
|
||||
}
|
||||
|
||||
InvalidationCallBack invalidations;
|
||||
I_Messaging *message = nullptr;
|
||||
I_TimeGet *timer = nullptr;
|
||||
I_MainLoop *mainloop = nullptr;
|
||||
};
|
||||
|
||||
|
@ -8,6 +8,7 @@
|
||||
#include "mock/mock_messaging.h"
|
||||
#include "mock/mock_rest_api.h"
|
||||
#include "mock/mock_time_get.h"
|
||||
#include "mock/mock_agent_details.h"
|
||||
#include "read_attribute_v2.h"
|
||||
#include "singleton.h"
|
||||
|
||||
@ -44,6 +45,9 @@ public:
|
||||
addRecurringRoutine(I_MainLoop::RoutineType::System, chrono::microseconds(720000000), _, _, _)
|
||||
).WillRepeatedly(Return(0));
|
||||
|
||||
EXPECT_CALL(mock_agent_details, getAgentId()).WillRepeatedly(Return("dummy_agent_id"));
|
||||
EXPECT_CALL(mock_agent_details, getTenantId()).WillRepeatedly(Return("dummy_tenant_id"));
|
||||
|
||||
EXPECT_CALL(
|
||||
mock_rest,
|
||||
mockRestCall(_, "new-invalidation/source/invalidation", _)
|
||||
@ -62,6 +66,7 @@ public:
|
||||
stringstream debug_output;
|
||||
StrictMock<MockMainLoop> mock_ml;
|
||||
StrictMock<MockRestApi> mock_rest;
|
||||
StrictMock<MockAgentDetails> mock_agent_details;
|
||||
NiceMock<MockTimeGet> mock_time;
|
||||
::Environment env;
|
||||
ConfigComponent conf;
|
||||
@ -411,6 +416,8 @@ TEST_F(IntelligenceComponentTestV2, fakeOnlineIntelligenceTest)
|
||||
"}\n"
|
||||
);
|
||||
|
||||
EXPECT_CALL(mock_rest, getListeningPort()).WillOnce(Return(8888));
|
||||
|
||||
MessageMetadata md;
|
||||
EXPECT_CALL(messaging_mock, sendSyncMessage(HTTPMethod::POST, _, _, MessageCategory::INTELLIGENCE, _)
|
||||
).WillOnce(DoAll(SaveArg<4>(&md), Return(HTTPResponse(HTTPStatusCode::HTTP_OK, response_str))));
|
||||
@ -485,6 +492,8 @@ TEST_F(IntelligenceComponentTestV2, fakeLocalIntelligenceTest)
|
||||
|
||||
MessageMetadata md;
|
||||
|
||||
EXPECT_CALL(mock_rest, getListeningPort()).WillOnce(Return(8888));
|
||||
|
||||
EXPECT_CALL(messaging_mock, sendSyncMessage(HTTPMethod::POST, _, _, MessageCategory::INTELLIGENCE, _)
|
||||
).WillOnce(DoAll(SaveArg<4>(&md), Return(HTTPResponse(HTTPStatusCode::HTTP_OK, response_str))));
|
||||
|
||||
@ -625,6 +634,8 @@ TEST_F(IntelligenceComponentTestV2, multiAssetsIntelligenceTest)
|
||||
"}\n"
|
||||
);
|
||||
|
||||
EXPECT_CALL(mock_rest, getListeningPort()).WillOnce(Return(8888));
|
||||
|
||||
EXPECT_CALL(messaging_mock, sendSyncMessage(HTTPMethod::POST, _, _, MessageCategory::INTELLIGENCE, _)
|
||||
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, response_str1)));
|
||||
|
||||
@ -754,6 +765,8 @@ TEST_F(IntelligenceComponentTestV2, inProgressQueryTest)
|
||||
"}\n"
|
||||
);
|
||||
|
||||
EXPECT_CALL(mock_rest, getListeningPort()).Times(2).WillRepeatedly(Return(8888));
|
||||
|
||||
EXPECT_CALL(messaging_mock, sendSyncMessage(HTTPMethod::POST, _, _, MessageCategory::INTELLIGENCE, _)
|
||||
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, in_progress_response_str))
|
||||
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, done_response_str)));
|
||||
@ -943,6 +956,8 @@ TEST_F(IntelligenceComponentTestV2, pagingQueryTest)
|
||||
"}\n"
|
||||
);
|
||||
|
||||
EXPECT_CALL(mock_rest, getListeningPort()).Times(3).WillRepeatedly(Return(8888));
|
||||
|
||||
EXPECT_CALL(messaging_mock, sendSyncMessage(HTTPMethod::POST, _, _, MessageCategory::INTELLIGENCE, _)
|
||||
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, paging_in_progress_response_str1)));
|
||||
|
||||
@ -1126,6 +1141,8 @@ TEST_F(IntelligenceComponentTestV2, bulkOnlineIntelligenceTest)
|
||||
"}\n"
|
||||
);
|
||||
Debug::setNewDefaultStdout(&cout);
|
||||
|
||||
EXPECT_CALL(mock_rest, getListeningPort()).WillOnce(Return(8888));
|
||||
EXPECT_CALL(messaging_mock, sendSyncMessage(HTTPMethod::POST, _, _, MessageCategory::INTELLIGENCE, _)
|
||||
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, response_str)));
|
||||
|
||||
@ -1289,6 +1306,8 @@ TEST_F(IntelligenceComponentTestV2, ignoreInProgressQueryTest_2)
|
||||
"}\n"
|
||||
);
|
||||
|
||||
EXPECT_CALL(mock_rest, getListeningPort()).Times(2).WillRepeatedly(Return(8888));
|
||||
|
||||
EXPECT_CALL(messaging_mock, sendSyncMessage(HTTPMethod::POST, _, _, MessageCategory::INTELLIGENCE, _))
|
||||
.WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, paging_in_progress_response_str)))
|
||||
.WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, paging_done_response_str)));
|
||||
|
@ -86,6 +86,12 @@ public:
|
||||
|
||||
void resume(RoutineID id) override;
|
||||
|
||||
void
|
||||
reloadConfigurationCb()
|
||||
{
|
||||
reload_configuration = true;
|
||||
}
|
||||
|
||||
void
|
||||
init()
|
||||
{
|
||||
@ -120,7 +126,7 @@ public:
|
||||
private:
|
||||
void reportStartupEvent();
|
||||
void stop(const RoutineMap::iterator &iter);
|
||||
uint32_t getCurrentTimeSlice(uint32_t current_stress);
|
||||
uint32_t getCurrentTimeSlice(uint32_t current_stress, int idle_time_slice, int busy_time_slice);
|
||||
RoutineID getNextID();
|
||||
|
||||
I_TimeGet *
|
||||
@ -130,7 +136,14 @@ private:
|
||||
return timer;
|
||||
}
|
||||
|
||||
I_Environment *
|
||||
getEnvironment() {
|
||||
if (env == nullptr) env = Singleton::Consume<I_Environment>::by<MainloopComponent>();
|
||||
return env;
|
||||
}
|
||||
|
||||
I_TimeGet *timer = nullptr;
|
||||
I_Environment *env = nullptr;
|
||||
|
||||
RoutineMap routines;
|
||||
RoutineMap::iterator curr_iter = routines.end();
|
||||
@ -144,6 +157,7 @@ private:
|
||||
chrono::seconds metric_report_interval;
|
||||
MainloopEvent mainloop_event;
|
||||
MainloopMetric mainloop_metric;
|
||||
bool reload_configuration = false;
|
||||
};
|
||||
|
||||
static I_MainLoop::RoutineType rounds[] = {
|
||||
@ -171,7 +185,7 @@ MainloopComponent::Impl::reportStartupEvent()
|
||||
chrono::microseconds curr_time = Singleton::Consume<I_TimeGet>::by<MainloopComponent>()->getWalltime();
|
||||
|
||||
ReportIS::AudienceTeam audience_team = ReportIS::AudienceTeam::NONE;
|
||||
auto i_env = Singleton::Consume<I_Environment>::by<MainloopComponent>();
|
||||
auto i_env = getEnvironment();
|
||||
auto team = i_env->get<ReportIS::AudienceTeam>("Audience Team");
|
||||
if (team.ok()) audience_team = *team;
|
||||
|
||||
@ -222,18 +236,27 @@ MainloopComponent::Impl::run()
|
||||
const chrono::seconds one_sec(1);
|
||||
|
||||
string service_name = "Unnamed Nano Service";
|
||||
auto name = Singleton::Consume<I_Environment>::by<MainloopComponent>()->get<string>("Service Name");
|
||||
auto name = getEnvironment()->get<string>("Service Name");
|
||||
if (name.ok()) service_name = *name;
|
||||
|
||||
string error_prefix = "Service " + service_name + " crashed. Error details: ";
|
||||
string error;
|
||||
int idle_time_slice = getConfigurationWithDefault<int>(1500, "Mainloop", "Idle routine time slice");
|
||||
int busy_time_slice = getConfigurationWithDefault<int>(100, "Mainloop", "Busy routine time slice");
|
||||
int exceed_warning_slice = getConfigurationWithDefault(100, "Mainloop", "Exceed Warning");
|
||||
|
||||
while (has_primary_routines) {
|
||||
mainloop_event.setStressValue(current_stress);
|
||||
int time_slice_to_use = getCurrentTimeSlice(current_stress);
|
||||
if (reload_configuration) {
|
||||
idle_time_slice = getConfigurationWithDefault<int>(1500, "Mainloop", "Idle routine time slice");
|
||||
busy_time_slice = getConfigurationWithDefault<int>(100, "Mainloop", "Busy routine time slice");
|
||||
exceed_warning_slice = getConfigurationWithDefault(100, "Mainloop", "Exceed Warning");
|
||||
reload_configuration = false;
|
||||
}
|
||||
int time_slice_to_use = getCurrentTimeSlice(current_stress, idle_time_slice, busy_time_slice);
|
||||
mainloop_event.setTimeSlice(time_slice_to_use);
|
||||
chrono::microseconds basic_time_slice(time_slice_to_use);
|
||||
chrono::milliseconds large_exceeding(getConfigurationWithDefault(100u, "Mainloop", "Exceed Warning"));
|
||||
chrono::milliseconds large_exceeding(exceed_warning_slice);
|
||||
auto start_time = getTimer()->getMonotonicTime();
|
||||
has_primary_routines = false;
|
||||
|
||||
@ -351,9 +374,9 @@ MainloopComponent::Impl::addOneTimeRoutine(
|
||||
auto id = getNextID();
|
||||
|
||||
string routine_name = _routine_name.empty() ? string("Generic routine, id: " + to_string(id)) : _routine_name;
|
||||
auto env = Singleton::Consume<I_Environment>::by<MainloopComponent>()->createEnvironment();
|
||||
auto env = getEnvironment()->createEnvironment();
|
||||
Routine func_wrapper = [this, env, func, routine_name] () mutable {
|
||||
Singleton::Consume<I_Environment>::by<MainloopComponent>()->loadEnvironment(move(env));
|
||||
getEnvironment()->loadEnvironment(move(env));
|
||||
|
||||
try {
|
||||
if (this->do_stop) return;
|
||||
@ -446,9 +469,9 @@ MainloopComponent::Impl::yield(bool force)
|
||||
if (do_stop) throw MainloopStop();
|
||||
if (!force && getTimer()->getMonotonicTime() < stop_time) return;
|
||||
|
||||
auto env = Singleton::Consume<I_Environment>::by<MainloopComponent>()->saveEnvironment();
|
||||
auto env = getEnvironment()->saveEnvironment();
|
||||
curr_iter->second.yield();
|
||||
Singleton::Consume<I_Environment>::by<MainloopComponent>()->loadEnvironment(move(env));
|
||||
getEnvironment()->loadEnvironment(move(env));
|
||||
if (do_stop) throw MainloopStop();
|
||||
}
|
||||
|
||||
@ -533,7 +556,7 @@ MainloopComponent::Impl::stop(const RoutineMap::iterator &iter)
|
||||
if (iter->second.isActive()) {
|
||||
dbgDebug(D_MAINLOOP) << "Stoping the routine " << iter->first;
|
||||
do_stop = true;
|
||||
auto env = Singleton::Consume<I_Environment>::by<MainloopComponent>()->saveEnvironment();
|
||||
auto env = getEnvironment()->saveEnvironment();
|
||||
RoutineMap::iterator save_routine = curr_iter;
|
||||
curr_iter = iter;
|
||||
// We are going to let the routine run one last time, so it can throw an exception which will cause the stack
|
||||
@ -560,10 +583,13 @@ MainloopComponent::Impl::getNextID()
|
||||
void
|
||||
MainloopComponent::Impl::updateCurrentStress(bool is_busy)
|
||||
{
|
||||
const int stress_factor = 6; // calculated by trial and error, should be revisited
|
||||
const int ramp_stress_factor = 10; // calculated by trial and error, should be revisited
|
||||
const int steady_stress_factor = 6; // calculated by trial and error, should be revisited
|
||||
if (is_busy) {
|
||||
if (current_stress < 95) {
|
||||
current_stress += stress_factor;
|
||||
if (current_stress < 50) {
|
||||
current_stress += ramp_stress_factor;
|
||||
} else if (current_stress < 95) {
|
||||
current_stress += steady_stress_factor;
|
||||
} else {
|
||||
current_stress = 100;
|
||||
}
|
||||
@ -573,10 +599,8 @@ MainloopComponent::Impl::updateCurrentStress(bool is_busy)
|
||||
}
|
||||
|
||||
uint32_t
|
||||
MainloopComponent::Impl::getCurrentTimeSlice(uint32_t current_stress)
|
||||
MainloopComponent::Impl::getCurrentTimeSlice(uint32_t current_stress, int idle_time_slice, int busy_time_slice)
|
||||
{
|
||||
int idle_time_slice = getConfigurationWithDefault<int>(1000, "Mainloop", "Idle routine time slice");
|
||||
int busy_time_slice = getConfigurationWithDefault<int>(1, "Mainloop", "Busy routine time slice");
|
||||
return idle_time_slice - (((idle_time_slice - busy_time_slice) * current_stress) / 100);
|
||||
}
|
||||
|
||||
@ -598,4 +622,5 @@ MainloopComponent::preload()
|
||||
registerExpectedConfiguration<int>("Mainloop", "Busy routine time slice");
|
||||
registerExpectedConfiguration<uint>("Mainloop", "metric reporting interval");
|
||||
registerExpectedConfiguration<uint>("Mainloop", "Exceed Warning");
|
||||
registerConfigLoadCb([&] () { pimpl->reloadConfigurationCb(); });
|
||||
}
|
||||
|
@ -131,12 +131,12 @@ TEST_F(MainloopTest, basic_metrics_check)
|
||||
"{\n"
|
||||
" \"Metric\": \"Mainloop sleep time data\",\n"
|
||||
" \"Reporting interval\": 600,\n"
|
||||
" \"mainloopMaxTimeSliceSample\": 1000,\n"
|
||||
" \"mainloopAvgTimeSliceSample\": 1000.0,\n"
|
||||
" \"mainloopLastTimeSliceSample\": 1000,\n"
|
||||
" \"mainloopMaxSleepTimeSample\": 1000,\n"
|
||||
" \"mainloopAvgSleepTimeSample\": 1000.0,\n"
|
||||
" \"mainloopLastSleepTimeSample\": 1000,\n"
|
||||
" \"mainloopMaxTimeSliceSample\": 1500,\n"
|
||||
" \"mainloopAvgTimeSliceSample\": 1500.0,\n"
|
||||
" \"mainloopLastTimeSliceSample\": 1500,\n"
|
||||
" \"mainloopMaxSleepTimeSample\": 1500,\n"
|
||||
" \"mainloopAvgSleepTimeSample\": 1500.0,\n"
|
||||
" \"mainloopLastSleepTimeSample\": 1500,\n"
|
||||
" \"mainloopMaxStressValueSample\": 0,\n"
|
||||
" \"mainloopAvgStressValueSample\": 0.0,\n"
|
||||
" \"mainloopLastStressValueSample\": 0\n"
|
||||
|
@ -91,8 +91,9 @@ public:
|
||||
}
|
||||
|
||||
char buffer[128];
|
||||
if (fgets(buffer, sizeof(buffer)-1, pipe) != nullptr) result += buffer;
|
||||
if (do_yield && mainloop != nullptr) mainloop->yield();
|
||||
bool did_get = fgets(buffer, sizeof(buffer)-1, pipe) != nullptr;
|
||||
if (did_get) result += buffer;
|
||||
if (do_yield && mainloop != nullptr) mainloop->yield(!did_get);
|
||||
}
|
||||
|
||||
auto code = pclose(pipe) / 256;
|
||||
|
@ -8,7 +8,7 @@ install(FILES orchestration_package.sh DESTINATION ./orchestration/ PERMISSIONS
|
||||
install(FILES cp-agent-info.sh DESTINATION ./orchestration/ PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ)
|
||||
install(FILES k8s-check-update-listener.sh DESTINATION ./orchestration/ PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ)
|
||||
install(FILES k8s-check-update-trigger.sh DESTINATION ./orchestration/ PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ)
|
||||
|
||||
install(FILES get-cloud-metadata.sh DESTINATION ./orchestration/scripts/ PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ)
|
||||
|
||||
install(FILES cp-agent-uninstall.sh DESTINATION ./orchestration/ PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ)
|
||||
install(FILES cp-nano-cli.sh DESTINATION ./orchestration/ PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ)
|
||||
|
2
nodes/orchestration/package/get-cloud-metadata.sh
Normal file
2
nodes/orchestration/package/get-cloud-metadata.sh
Normal file
@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
|
@ -52,7 +52,7 @@ log-triggers:
|
||||
url-path: false
|
||||
url-query: false
|
||||
log-destination:
|
||||
cloud: false
|
||||
cloud: true
|
||||
stdout:
|
||||
format: json
|
||||
|
||||
|
@ -31,6 +31,7 @@ ORCHESTRATION_EXE_SOURCE_PATH="./bin/orchestration_comp"
|
||||
NGINX_METADAT_EXTRACTOR_PATH="./scripts/cp-nano-makefile-generator.sh"
|
||||
ORCHESTRATION_FILE_NAME="cp-nano-orchestration"
|
||||
NGINX_METADDATA_EXTRACTOR_NAME="cp-nano-makefile-generator.sh"
|
||||
GET_CLOUD_METADATA_PATH="get-cloud-metadata.sh"
|
||||
AGENT_UNINSTALL="cp-agent-uninstall.sh"
|
||||
ORCHESTRATION_NAME="orchestration"
|
||||
CP_AGENT_INFO_NAME="cp-agent-info"
|
||||
@ -197,7 +198,7 @@ save_local_policy_config()
|
||||
|
||||
[ -f /etc/environment ] && . "/etc/environment"
|
||||
if [ -n "${CP_ENV_FILESYSTEM}" ] ; then
|
||||
FILESYSTEM_PATH=$CP_ENV_FILESYSTEM
|
||||
export FILESYSTEM_PATH=$CP_ENV_FILESYSTEM
|
||||
fi
|
||||
if [ -n "${CP_ENV_LOG_FILE}" ] ; then
|
||||
LOG_FILE_PATH=$CP_ENV_LOG_FILE
|
||||
@ -294,15 +295,15 @@ while true; do
|
||||
last_char=${var##${var%%?}}
|
||||
echo $var
|
||||
if [ "$last_char" = "/" ]; then
|
||||
FILESYSTEM_PATH=${var%?}
|
||||
export FILESYSTEM_PATH=${var%?}
|
||||
else
|
||||
FILESYSTEM_PATH=$1
|
||||
export FILESYSTEM_PATH=$1
|
||||
fi
|
||||
echo "Filesystem paths: ${FILESYSTEM_PATH}"
|
||||
elif [ "$1" = "--vs_id" ]; then
|
||||
shift
|
||||
VS_ID=$1
|
||||
FILESYSTEM_PATH="/etc/cp/vs${VS_ID}"
|
||||
export FILESYSTEM_PATH="/etc/cp/vs${VS_ID}"
|
||||
NANO_AGENT_SERVICE_NAME="nano_agent_${VS_ID}"
|
||||
NANO_AGENT_SERVICE_FILE="${NANO_AGENT_SERVICE_NAME}.service"
|
||||
VS_LIB_SUB_FOLDER="/vs${VS_ID}"
|
||||
@ -350,7 +351,7 @@ if [ -z "$VS_ID" ]; then
|
||||
vs_folder=$(dirname "$packages_folder")
|
||||
VS_ID=`echo ${vs_folder} | grep -oE '/etc/cp/vs[0-9]+$' | grep -oE '[0-9]+$'`
|
||||
if [ -n "$VS_ID" ]; then
|
||||
FILESYSTEM_PATH="/etc/cp/vs${VS_ID}"
|
||||
export FILESYSTEM_PATH="/etc/cp/vs${VS_ID}"
|
||||
NANO_AGENT_SERVICE_NAME="nano_agent_${VS_ID}"
|
||||
NANO_AGENT_SERVICE_FILE="${NANO_AGENT_SERVICE_NAME}.service"
|
||||
VS_LIB_SUB_FOLDER="/vs${VS_ID}"
|
||||
@ -828,6 +829,13 @@ copy_nginx_metadata_script()
|
||||
cp_exec "chmod +x ${FILESYSTEM_PATH}/${SCRIPTS_PATH}/${NGINX_METADDATA_EXTRACTOR_NAME}"
|
||||
}
|
||||
|
||||
copy_and_run_cloud_metadata_script()
|
||||
{
|
||||
cp_copy "${SCRIPTS_PATH}/$GET_CLOUD_METADATA_PATH" ${FILESYSTEM_PATH}/${SCRIPTS_PATH}/${GET_CLOUD_METADATA_PATH}
|
||||
cp_exec "chmod +x ${FILESYSTEM_PATH}/${SCRIPTS_PATH}/${GET_CLOUD_METADATA_PATH}"
|
||||
cp_exec "${FILESYSTEM_PATH}/${SCRIPTS_PATH}/${GET_CLOUD_METADATA_PATH}"
|
||||
}
|
||||
|
||||
install_public_key()
|
||||
{
|
||||
return
|
||||
@ -887,6 +895,13 @@ uninstall_messaging_proxy_if_needed()
|
||||
fi
|
||||
}
|
||||
|
||||
get_status_content()
|
||||
{
|
||||
gsc_temp_old_status=$(sed -e 's/{\|}\|,\|"//g' -e '/^\s*$/d' -e 's/^ //' ${FILESYSTEM_PATH}/conf/orchestration_status.json)
|
||||
|
||||
echo ${gsc_temp_old_status}
|
||||
}
|
||||
|
||||
install_orchestration()
|
||||
{
|
||||
INSTALLATION_TIME=$(date)
|
||||
@ -991,6 +1006,7 @@ install_orchestration()
|
||||
copy_orchestration_executable
|
||||
copy_k8s_executable
|
||||
copy_nginx_metadata_script
|
||||
copy_and_run_cloud_metadata_script
|
||||
|
||||
install_watchdog "--upgrade"
|
||||
|
||||
@ -1078,6 +1094,7 @@ install_orchestration()
|
||||
copy_orchestration_executable
|
||||
copy_k8s_executable
|
||||
copy_nginx_metadata_script
|
||||
copy_and_run_cloud_metadata_script
|
||||
|
||||
install_cp_nano_ctl
|
||||
|
||||
@ -1171,7 +1188,7 @@ install_orchestration()
|
||||
time_sleep=2
|
||||
time_out=60
|
||||
cp_print "Registering open-appsec Nano Agent to Fog.." ${FORCE_STDOUT}
|
||||
until $USR_SBIN_PATH/${CP_NANO_CTL} -s 2> /dev/null | grep -q "Registration status: Succeeded"; do
|
||||
until get_status_content | grep -q "Registration status: Succeeded"; do
|
||||
time_out=$(( time_out - time_sleep ))
|
||||
if [ $time_out -le 0 ]; then
|
||||
cp_print "open-appsec Nano Agent registration failed. Failed to register to Fog: $var_fog_address" ${FORCE_STDOUT}
|
||||
|
Loading…
x
Reference in New Issue
Block a user