mirror of
https://github.com/openappsec/openappsec.git
synced 2025-11-16 09:21:54 +03:00
Compare commits
91 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7b3320ce10 | ||
|
|
25cc2d66e7 | ||
|
|
66e2112afb | ||
|
|
ba7c9afd52 | ||
|
|
2aa0993d7e | ||
|
|
0cdfc9df90 | ||
|
|
010814d656 | ||
|
|
3779dd360d | ||
|
|
0e7dc2133d | ||
|
|
c9095acbef | ||
|
|
e47e29321d | ||
|
|
25a66e77df | ||
|
|
6eea40f165 | ||
|
|
cee6ed511a | ||
|
|
4f145fd74f | ||
|
|
3fe5c5b36f | ||
|
|
7542a85ddb | ||
|
|
fae4534e5c | ||
|
|
923a8a804b | ||
|
|
b1731237d1 | ||
|
|
3d3d6e73b9 | ||
|
|
3f80127ec5 | ||
|
|
abdee954bb | ||
|
|
9a516899e8 | ||
|
|
4fd2aa6c6b | ||
|
|
0db666ac4f | ||
|
|
493d9a6627 | ||
|
|
6db87fc7fe | ||
|
|
d2b9bc8c9c | ||
|
|
886a5befe1 | ||
|
|
1f2502f9e4 | ||
|
|
9e4c5014ce | ||
|
|
024423cce9 | ||
|
|
dc4b546bd1 | ||
|
|
a86aca13b4 | ||
|
|
87b34590d4 | ||
|
|
e0198a1a95 | ||
|
|
d024ad5845 | ||
|
|
46d42c8fa3 | ||
|
|
f6c36f3363 | ||
|
|
63541a4c3c | ||
|
|
d14fa7a468 | ||
|
|
ae0de5bf14 | ||
|
|
d39919f348 | ||
|
|
4f215e1409 | ||
|
|
f05b5f8cee | ||
|
|
949b656b13 | ||
|
|
bbe293d215 | ||
|
|
35b2df729f | ||
|
|
7600b6218f | ||
|
|
20e8e65e14 | ||
|
|
414130a789 | ||
|
|
9d704455e8 | ||
|
|
602442fed4 | ||
|
|
4e9a90db01 | ||
|
|
20f92afbc2 | ||
|
|
ee7adc37d0 | ||
|
|
c0b3e9c0d0 | ||
|
|
f1f4b13327 | ||
|
|
4354a98d37 | ||
|
|
09fa11516c | ||
|
|
446b043128 | ||
|
|
91bcadf930 | ||
|
|
0824cf4b23 | ||
|
|
108abdb35e | ||
|
|
64ebf013eb | ||
|
|
2c91793f08 | ||
|
|
72a263d25a | ||
|
|
4e14ff9a58 | ||
|
|
1fb28e14d6 | ||
|
|
e38bb9525c | ||
|
|
63b8bb22c2 | ||
|
|
11c97330f5 | ||
|
|
e56fb0bc1a | ||
|
|
4571d563f4 | ||
|
|
02c1db01f6 | ||
|
|
c557affd9b | ||
|
|
8889c3c054 | ||
|
|
f67eff87bc | ||
|
|
fa6a2e4233 | ||
|
|
b7e2efbf7e | ||
|
|
96ce290e5f | ||
|
|
de8e2d9970 | ||
|
|
0048708af1 | ||
|
|
4fe0f44e88 | ||
|
|
5f139d13d7 | ||
|
|
919d775a73 | ||
|
|
ac8e353598 | ||
|
|
0663f20691 | ||
|
|
2dda6231f6 | ||
|
|
1c1f0b7e29 |
@@ -74,7 +74,7 @@ For Linux, if you’ve built your own package use the following commands:
|
||||
|
||||
```bash
|
||||
$ install-cp-nano-agent.sh --install --hybrid_mode
|
||||
$ install-cp-nano-service-http-transaction-handler.sh –install
|
||||
$ install-cp-nano-service-http-transaction-handler.sh --install
|
||||
$ install-cp-nano-attachment-registration-manager.sh --install
|
||||
```
|
||||
You can add the ```--token <token>``` and ```--email <email address>``` options to the first command, to get a token follow [documentation](https://docs.openappsec.io/getting-started/using-the-web-ui-saas/connect-deployed-agents-to-saas-management-k8s-and-linux).
|
||||
|
||||
@@ -173,6 +173,12 @@ getReqBodySizeTrigger()
|
||||
return conf_data.getNumericalValue("body_size_trigger");
|
||||
}
|
||||
|
||||
unsigned int
|
||||
getRemoveResServerHeader()
|
||||
{
|
||||
return conf_data.getNumericalValue("remove_server_header");
|
||||
}
|
||||
|
||||
int
|
||||
isIPAddress(c_str ip_str)
|
||||
{
|
||||
|
||||
@@ -66,7 +66,8 @@ TEST_F(HttpAttachmentUtilTest, GetValidAttachmentConfiguration)
|
||||
"\"static_resources_path\": \"" + static_resources_path + "\",\n"
|
||||
"\"min_retries_for_verdict\": 1,\n"
|
||||
"\"max_retries_for_verdict\": 3,\n"
|
||||
"\"body_size_trigger\": 777\n"
|
||||
"\"body_size_trigger\": 777,\n"
|
||||
"\"remove_server_header\": 1\n"
|
||||
"}\n";
|
||||
ofstream valid_configuration_file(attachment_configuration_file_name);
|
||||
valid_configuration_file << valid_configuration;
|
||||
@@ -95,6 +96,7 @@ TEST_F(HttpAttachmentUtilTest, GetValidAttachmentConfiguration)
|
||||
EXPECT_EQ(getReqBodySizeTrigger(), 777u);
|
||||
EXPECT_EQ(getWaitingForVerdictThreadTimeout(), 75u);
|
||||
EXPECT_EQ(getInspectionMode(), ngx_http_inspection_mode::BLOCKING_THREAD);
|
||||
EXPECT_EQ(getRemoveResServerHeader(), 1u);
|
||||
|
||||
EXPECT_EQ(isDebugContext("1.2.3.4", "5.6.7.8", 80, "GET", "test", "/abc"), 1);
|
||||
EXPECT_EQ(isDebugContext("1.2.3.9", "5.6.7.8", 80, "GET", "test", "/abc"), 0);
|
||||
|
||||
@@ -7,3 +7,4 @@ add_subdirectory(pending_key)
|
||||
add_subdirectory(utils)
|
||||
add_subdirectory(attachment-intakers)
|
||||
add_subdirectory(security_apps)
|
||||
add_subdirectory(nginx_message_reader)
|
||||
|
||||
@@ -1135,7 +1135,11 @@ private:
|
||||
"webUserResponse"
|
||||
);
|
||||
|
||||
bool remove_event_id_param =
|
||||
getProfileAgentSettingWithDefault<string>("false", "nginxAttachment.removeRedirectEventId") == "true";
|
||||
|
||||
string uuid;
|
||||
string redirectUrl;
|
||||
if (i_transaction_table->hasState<NginxAttachmentOpaque>()) {
|
||||
NginxAttachmentOpaque &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
||||
uuid = opaque.getSessionUUID();
|
||||
@@ -1145,7 +1149,12 @@ private:
|
||||
if (web_trigger_conf.getDetailsLevel() == "Redirect") {
|
||||
web_response_data.response_data.redirect_data.redirect_location_size =
|
||||
web_trigger_conf.getRedirectURL().size();
|
||||
web_response_data.response_data.redirect_data.add_event_id = web_trigger_conf.getAddEventId() ? 1 : 0;
|
||||
bool add_event = web_trigger_conf.getAddEventId();
|
||||
if (add_event && !remove_event_id_param) {
|
||||
web_response_data.response_data.redirect_data.redirect_location_size +=
|
||||
strlen("?event_id=") + uuid.size();
|
||||
}
|
||||
web_response_data.response_data.redirect_data.add_event_id = add_event ? 1 : 0;
|
||||
web_response_data.web_repsonse_type = static_cast<uint8_t>(ngx_web_response_type_e::REDIRECT_WEB_RESPONSE);
|
||||
} else {
|
||||
web_response_data.response_data.custom_response_data.title_size =
|
||||
@@ -1159,8 +1168,13 @@ private:
|
||||
verdict_data_sizes.push_back(sizeof(ngx_http_cp_web_response_data_t));
|
||||
|
||||
if (web_trigger_conf.getDetailsLevel() == "Redirect") {
|
||||
verdict_data.push_back(reinterpret_cast<const char *>(web_trigger_conf.getRedirectURL().data()));
|
||||
verdict_data_sizes.push_back(web_trigger_conf.getRedirectURL().size());
|
||||
redirectUrl = web_trigger_conf.getRedirectURL();
|
||||
if (!remove_event_id_param && web_trigger_conf.getAddEventId()) {
|
||||
redirectUrl += "?event-id=" + uuid;
|
||||
}
|
||||
|
||||
verdict_data.push_back(reinterpret_cast<const char *>(redirectUrl.data()));
|
||||
verdict_data_sizes.push_back(redirectUrl.size());
|
||||
} else {
|
||||
verdict_data.push_back(reinterpret_cast<const char *>(web_trigger_conf.getResponseTitle().data()));
|
||||
verdict_data_sizes.push_back(web_trigger_conf.getResponseTitle().size());
|
||||
|
||||
@@ -203,6 +203,13 @@ HttpAttachmentConfig::setFailOpenTimeout()
|
||||
"NGINX wait thread timeout msec"
|
||||
));
|
||||
|
||||
conf_data.setNumericalValue("remove_server_header", getAttachmentConf<uint>(
|
||||
0,
|
||||
"agent.removeServerHeader.nginxModule",
|
||||
"HTTP manager",
|
||||
"Response server header removal"
|
||||
));
|
||||
|
||||
uint inspection_mode = getAttachmentConf<uint>(
|
||||
static_cast<uint>(ngx_http_inspection_mode_e::NON_BLOCKING_THREAD),
|
||||
"agent.inspectionMode.nginxModule",
|
||||
|
||||
@@ -282,7 +282,7 @@ isIpTrusted(const string &value, const vector<CIDRSData> &cidr_values)
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
UsersAllIdentifiersConfig::parseXForwardedFor(const string &str) const
|
||||
UsersAllIdentifiersConfig::parseXForwardedFor(const string &str, ExtractType type) const
|
||||
{
|
||||
vector<string> header_values = split(str);
|
||||
|
||||
@@ -291,12 +291,23 @@ UsersAllIdentifiersConfig::parseXForwardedFor(const string &str) const
|
||||
vector<string> xff_values = getHeaderValuesFromConfig("x-forwarded-for");
|
||||
vector<CIDRSData> cidr_values(xff_values.begin(), xff_values.end());
|
||||
|
||||
for (const string &value : header_values) {
|
||||
if (!IPAddr::createIPAddr(value).ok()) {
|
||||
dbgWarning(D_NGINX_ATTACHMENT_PARSER) << "Invalid IP address found in the xff header IPs list: " << value;
|
||||
for (auto it = header_values.rbegin(); it != header_values.rend() - 1; ++it) {
|
||||
if (!IPAddr::createIPAddr(*it).ok()) {
|
||||
dbgWarning(D_NGINX_ATTACHMENT_PARSER) << "Invalid IP address found in the xff header IPs list: " << *it;
|
||||
return genError("Invalid IP address");
|
||||
}
|
||||
if (!isIpTrusted(value, cidr_values)) return genError("Untrusted Ip found");
|
||||
if (type == ExtractType::PROXYIP) continue;
|
||||
if (!isIpTrusted(*it, cidr_values)) {
|
||||
dbgDebug(D_NGINX_ATTACHMENT_PARSER) << "Found untrusted IP in the xff header IPs list: " << *it;
|
||||
return *it;
|
||||
}
|
||||
}
|
||||
|
||||
if (!IPAddr::createIPAddr(header_values[0]).ok()) {
|
||||
dbgWarning(D_NGINX_ATTACHMENT_PARSER)
|
||||
<< "Invalid IP address found in the xff header IPs list: "
|
||||
<< header_values[0];
|
||||
return genError("Invalid IP address");
|
||||
}
|
||||
|
||||
return header_values[0];
|
||||
@@ -312,7 +323,7 @@ UsersAllIdentifiersConfig::setXFFValuesToOpaqueCtx(const HttpHeader &header, Ext
|
||||
return;
|
||||
}
|
||||
NginxAttachmentOpaque &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
||||
auto value = parseXForwardedFor(header.getValue());
|
||||
auto value = parseXForwardedFor(header.getValue(), type);
|
||||
if (!value.ok()) {
|
||||
dbgTrace(D_NGINX_ATTACHMENT_PARSER) << "Could not extract source identifier from X-Forwarded-For header";
|
||||
return;
|
||||
@@ -321,12 +332,13 @@ UsersAllIdentifiersConfig::setXFFValuesToOpaqueCtx(const HttpHeader &header, Ext
|
||||
if (type == ExtractType::SOURCEIDENTIFIER) {
|
||||
opaque.setSourceIdentifier(header.getKey(), value.unpack());
|
||||
dbgDebug(D_NGINX_ATTACHMENT_PARSER)
|
||||
<< "Added source identifir to XFF "
|
||||
<< "Added source identifier from XFF header"
|
||||
<< value.unpack();
|
||||
opaque.setSavedData(HttpTransactionData::xff_vals_ctx, header.getValue());
|
||||
opaque.setSavedData(HttpTransactionData::source_identifier, value.unpack());
|
||||
dbgTrace(D_NGINX_ATTACHMENT_PARSER)
|
||||
<< "XFF found, set ctx with value from header: "
|
||||
<< static_cast<string>(header.getValue());
|
||||
<< "XFF found, set ctx with value from header: "
|
||||
<< static_cast<string>(header.getValue());
|
||||
} else {
|
||||
opaque.setSavedData(HttpTransactionData::proxy_ip_ctx, value.unpack());
|
||||
}
|
||||
|
||||
@@ -18,7 +18,9 @@
|
||||
#include <sys/stat.h>
|
||||
#include <climits>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
#include <boost/range/iterator_range.hpp>
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <fstream>
|
||||
#include <algorithm>
|
||||
|
||||
@@ -28,6 +30,7 @@
|
||||
#include "http_manager_opaque.h"
|
||||
#include "log_generator.h"
|
||||
#include "http_inspection_events.h"
|
||||
#include "agent_core_utilities.h"
|
||||
|
||||
USE_DEBUG_FLAG(D_HTTP_MANAGER);
|
||||
|
||||
@@ -66,6 +69,22 @@ public:
|
||||
i_transaction_table = Singleton::Consume<I_Table>::by<HttpManager>();
|
||||
|
||||
Singleton::Consume<I_Logging>::by<HttpManager>()->addGeneralModifier(compressAppSecLogs);
|
||||
|
||||
const char* ignored_headers_env = getenv("SAAS_IGNORED_UPSTREAM_HEADERS");
|
||||
if (ignored_headers_env) {
|
||||
string ignored_headers_str = ignored_headers_env;
|
||||
ignored_headers_str = NGEN::Strings::removeTrailingWhitespaces(ignored_headers_str);
|
||||
|
||||
if (!ignored_headers_str.empty()) {
|
||||
dbgInfo(D_HTTP_MANAGER)
|
||||
<< "Ignoring SAAS_IGNORED_UPSTREAM_HEADERS environment variable: "
|
||||
<< ignored_headers_str;
|
||||
|
||||
vector<string> ignored_headers_vec;
|
||||
boost::split(ignored_headers_vec, ignored_headers_str, boost::is_any_of(";"));
|
||||
for (const string &header : ignored_headers_vec) ignored_headers.insert(header);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
FilterVerdict
|
||||
@@ -90,6 +109,14 @@ public:
|
||||
return FilterVerdict(default_verdict);
|
||||
}
|
||||
|
||||
if (is_request && ignored_headers.find(static_cast<string>(event.getKey())) != ignored_headers.end()) {
|
||||
dbgTrace(D_HTTP_MANAGER)
|
||||
<< "Ignoring header key - "
|
||||
<< static_cast<string>(event.getKey())
|
||||
<< " - as it is in the ignored headers list";
|
||||
return FilterVerdict(ngx_http_cp_verdict_e::TRAFFIC_VERDICT_INSPECT);
|
||||
}
|
||||
|
||||
ScopedContext ctx;
|
||||
ctx.registerValue(app_sec_marker_key, i_transaction_table->keyToString(), EnvKeyAttr::LogSection::MARKER);
|
||||
|
||||
@@ -394,6 +421,7 @@ private:
|
||||
I_Table *i_transaction_table;
|
||||
static const ngx_http_cp_verdict_e default_verdict;
|
||||
static const string app_sec_marker_key;
|
||||
unordered_set<string> ignored_headers;
|
||||
};
|
||||
|
||||
const ngx_http_cp_verdict_e HttpManager::Impl::default_verdict(ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP);
|
||||
|
||||
45
components/include/central_nginx_manager.h
Executable file
45
components/include/central_nginx_manager.h
Executable file
@@ -0,0 +1,45 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef __CENTRAL_NGINX_MANAGER_H__
|
||||
#define __CENTRAL_NGINX_MANAGER_H__
|
||||
|
||||
#include "component.h"
|
||||
#include "singleton.h"
|
||||
#include "i_messaging.h"
|
||||
#include "i_rest_api.h"
|
||||
#include "i_mainloop.h"
|
||||
#include "i_agent_details.h"
|
||||
|
||||
class CentralNginxManager
|
||||
:
|
||||
public Component,
|
||||
Singleton::Consume<I_RestApi>,
|
||||
Singleton::Consume<I_Messaging>,
|
||||
Singleton::Consume<I_MainLoop>,
|
||||
Singleton::Consume<I_AgentDetails>
|
||||
{
|
||||
public:
|
||||
CentralNginxManager();
|
||||
~CentralNginxManager();
|
||||
|
||||
void preload() override;
|
||||
void init() override;
|
||||
void fini() override;
|
||||
|
||||
private:
|
||||
class Impl;
|
||||
std::unique_ptr<Impl> pimpl;
|
||||
};
|
||||
|
||||
#endif // __CENTRAL_NGINX_MANAGER_H__
|
||||
28
components/include/nginx_message_reader.h
Executable file
28
components/include/nginx_message_reader.h
Executable file
@@ -0,0 +1,28 @@
|
||||
#ifndef __NGINX_MESSAGE_READER_H__
|
||||
#define __NGINX_MESSAGE_READER_H__
|
||||
|
||||
#include "singleton.h"
|
||||
#include "i_mainloop.h"
|
||||
#include "i_socket_is.h"
|
||||
#include "component.h"
|
||||
|
||||
class NginxMessageReader
|
||||
:
|
||||
public Component,
|
||||
Singleton::Consume<I_MainLoop>,
|
||||
Singleton::Consume<I_Socket>
|
||||
{
|
||||
public:
|
||||
NginxMessageReader();
|
||||
~NginxMessageReader();
|
||||
|
||||
void init() override;
|
||||
void fini() override;
|
||||
void preload() override;
|
||||
|
||||
private:
|
||||
class Impl;
|
||||
std::unique_ptr<Impl> pimpl;
|
||||
};
|
||||
|
||||
#endif //__NGINX_MESSAGE_READER_H__
|
||||
51
components/include/nginx_utils.h
Executable file
51
components/include/nginx_utils.h
Executable file
@@ -0,0 +1,51 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef __NGINX_UTILS_H__
|
||||
#define __NGINX_UTILS_H__
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "maybe_res.h"
|
||||
#include "singleton.h"
|
||||
#include "i_shell_cmd.h"
|
||||
|
||||
class NginxConfCollector
|
||||
{
|
||||
public:
|
||||
NginxConfCollector(const std::string &nginx_conf_input_path, const std::string &nginx_conf_output_path);
|
||||
Maybe<std::string> generateFullNginxConf() const;
|
||||
|
||||
private:
|
||||
std::vector<std::string> expandIncludes(const std::string &includePattern) const;
|
||||
void processConfigFile(
|
||||
const std::string &path,
|
||||
std::ostringstream &conf_output,
|
||||
std::vector<std::string> &errors
|
||||
) const;
|
||||
|
||||
std::string main_conf_input_path;
|
||||
std::string main_conf_output_path;
|
||||
std::string main_conf_directory_path;
|
||||
};
|
||||
|
||||
class NginxUtils : Singleton::Consume<I_ShellCmd>
|
||||
{
|
||||
public:
|
||||
static std::string getModulesPath();
|
||||
static std::string getMainNginxConfPath();
|
||||
static Maybe<void> validateNginxConf(const std::string &nginx_conf_path);
|
||||
static Maybe<void> reloadNginx(const std::string &nginx_conf_path);
|
||||
};
|
||||
|
||||
#endif // __NGINX_UTILS_H__
|
||||
@@ -7,15 +7,21 @@
|
||||
#include "singleton.h"
|
||||
#include "i_mainloop.h"
|
||||
#include "i_environment.h"
|
||||
#include "i_geo_location.h"
|
||||
#include "i_generic_rulebase.h"
|
||||
#include "i_shell_cmd.h"
|
||||
#include "i_env_details.h"
|
||||
|
||||
class RateLimit
|
||||
:
|
||||
public Component,
|
||||
Singleton::Consume<I_MainLoop>,
|
||||
Singleton::Consume<I_TimeGet>,
|
||||
Singleton::Consume<I_GeoLocation>,
|
||||
Singleton::Consume<I_Environment>,
|
||||
Singleton::Consume<I_GenericRulebase>
|
||||
Singleton::Consume<I_GenericRulebase>,
|
||||
Singleton::Consume<I_ShellCmd>,
|
||||
Singleton::Consume<I_EnvDetails>
|
||||
{
|
||||
public:
|
||||
RateLimit();
|
||||
|
||||
@@ -28,7 +28,7 @@ static const std::string default_nginx_config_file = "/etc/cp/conf/rpmanager/ngi
|
||||
static const std::string default_prepare_nginx_config_file = "/etc/cp/conf/rpmanager/nginx_prepare.conf";
|
||||
static const std::string default_global_conf_template = "/etc/cp/conf/rpmanager/nginx-conf-template";
|
||||
static const std::string default_nginx_config_include_file =
|
||||
"/etc/cp/conf/rpmanager/servers/nginx_conf_include.conf";
|
||||
"/etc/cp/conf/rpmanager/servers/00_nginx_conf_include.conf";
|
||||
static const std::string default_global_conf_include_template =
|
||||
"/etc/cp/conf/rpmanager/nginx-conf-include-template";
|
||||
static const std::string default_global_conf_include_template_no_responses =
|
||||
|
||||
@@ -58,7 +58,7 @@ private:
|
||||
const std::string::const_iterator &end,
|
||||
const std::string &key) const;
|
||||
Buffer extractKeyValueFromCookie(const std::string &cookie_value, const std::string &key) const;
|
||||
Maybe<std::string> parseXForwardedFor(const std::string &str) const;
|
||||
Maybe<std::string> parseXForwardedFor(const std::string &str, ExtractType type) const;
|
||||
|
||||
std::vector<UsersIdentifiersConfig> user_identifiers;
|
||||
};
|
||||
|
||||
@@ -33,7 +33,6 @@ class I_WaapAssetStatesManager;
|
||||
class I_Messaging;
|
||||
class I_AgentDetails;
|
||||
class I_Encryptor;
|
||||
class I_WaapModelResultLogger;
|
||||
|
||||
const std::string WAAP_APPLICATION_NAME = "waap application";
|
||||
|
||||
@@ -51,8 +50,7 @@ class WaapComponent
|
||||
Singleton::Consume<I_AgentDetails>,
|
||||
Singleton::Consume<I_Messaging>,
|
||||
Singleton::Consume<I_Encryptor>,
|
||||
Singleton::Consume<I_Environment>,
|
||||
Singleton::Consume<I_WaapModelResultLogger>
|
||||
Singleton::Consume<I_Environment>
|
||||
{
|
||||
public:
|
||||
WaapComponent();
|
||||
|
||||
3
components/nginx_message_reader/CMakeLists.txt
Executable file
3
components/nginx_message_reader/CMakeLists.txt
Executable file
@@ -0,0 +1,3 @@
|
||||
link_directories(${BOOST_ROOT}/lib)
|
||||
|
||||
add_library(nginx_message_reader nginx_message_reader.cc)
|
||||
735
components/nginx_message_reader/nginx_message_reader.cc
Executable file
735
components/nginx_message_reader/nginx_message_reader.cc
Executable file
@@ -0,0 +1,735 @@
|
||||
#include "nginx_message_reader.h"
|
||||
|
||||
#include <string>
|
||||
#include <boost/regex.hpp>
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <boost/algorithm/string/regex.hpp>
|
||||
|
||||
#include "config.h"
|
||||
#include "singleton.h"
|
||||
#include "i_mainloop.h"
|
||||
#include "enum_array.h"
|
||||
#include "log_generator.h"
|
||||
#include "maybe_res.h"
|
||||
#include "http_transaction_data.h"
|
||||
#include "generic_rulebase/rulebase_config.h"
|
||||
#include "generic_rulebase/evaluators/asset_eval.h"
|
||||
#include "generic_rulebase/triggers_config.h"
|
||||
#include "agent_core_utilities.h"
|
||||
#include "rate_limit_config.h"
|
||||
|
||||
USE_DEBUG_FLAG(D_NGINX_MESSAGE_READER);
|
||||
|
||||
using namespace std;
|
||||
|
||||
static const string syslog_regex_string = (
|
||||
"<[0-9]+>([A-Z][a-z][a-z]\\s{1,2}\\d{1,2}\\s\\d{2}"
|
||||
"[:]\\d{2}[:]\\d{2})\\s([\\w][\\w\\d\\.@-]*)\\s(nginx:)"
|
||||
);
|
||||
|
||||
static const boost::regex socket_address_regex("(\\d+\\.\\d+\\.\\d+\\.\\d+):(\\d+)");
|
||||
static const boost::regex syslog_regex(syslog_regex_string);
|
||||
static const boost::regex alert_log_regex(
|
||||
"("
|
||||
+ syslog_regex_string + ") "
|
||||
+ "(.+?\\[alert\\] )(.+?)"
|
||||
", (client: .+?)"
|
||||
", (server: .+?)"
|
||||
", (request: \".+?\")"
|
||||
", (upstream: \".+?\")"
|
||||
", (host: \".+?\")$"
|
||||
);
|
||||
|
||||
static const boost::regex error_log_regex(
|
||||
"("
|
||||
+ syslog_regex_string + ") "
|
||||
+ "(.+?\\[error\\] )(.+?)"
|
||||
", (client: .+?)"
|
||||
", (server: .+?)"
|
||||
", (request: \".+?\")"
|
||||
", (upstream: \".+?\")"
|
||||
", (host: \".+?\")$"
|
||||
);
|
||||
|
||||
static const boost::regex server_regex("(\\d+\\.\\d+\\.\\d+\\.\\d+)|(\\w+\\.\\w+)");
|
||||
static const boost::regex uri_regex("^/");
|
||||
static const boost::regex port_regex("\\d+");
|
||||
static const boost::regex response_code_regex("[0-9]{3}");
|
||||
static const boost::regex http_method_regex("[A-Za-z]+");
|
||||
|
||||
class NginxMessageReader::Impl
|
||||
{
|
||||
public:
|
||||
void
|
||||
init()
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER);
|
||||
I_MainLoop *mainloop = Singleton::Consume<I_MainLoop>::by<NginxMessageReader>();
|
||||
mainloop->addOneTimeRoutine(
|
||||
I_MainLoop::RoutineType::System,
|
||||
[this] ()
|
||||
{
|
||||
initSyslogServerSocket();
|
||||
handleNginxLogs();
|
||||
},
|
||||
"Initialize nginx syslog",
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
void
|
||||
preload()
|
||||
{
|
||||
registerConfigLoadCb([this]() { loadNginxMessageReaderConfig(); });
|
||||
}
|
||||
|
||||
void
|
||||
fini()
|
||||
{
|
||||
I_Socket *i_socket = Singleton::Consume<I_Socket>::by<NginxMessageReader>();
|
||||
i_socket->closeSocket(syslog_server_socket);
|
||||
}
|
||||
|
||||
void
|
||||
loadNginxMessageReaderConfig()
|
||||
{
|
||||
rate_limit_status_code = getProfileAgentSettingWithDefault<string>(
|
||||
"429",
|
||||
"accessControl.rateLimit.returnCode"
|
||||
);
|
||||
dbgTrace(D_NGINX_MESSAGE_READER) << "Selected rate-limit status code: " << rate_limit_status_code;
|
||||
}
|
||||
|
||||
private:
|
||||
enum class LogInfo {
|
||||
HTTP_METHOD,
|
||||
URI,
|
||||
RESPONSE_CODE,
|
||||
HOST,
|
||||
SOURCE,
|
||||
DESTINATION_IP,
|
||||
DESTINATION_PORT,
|
||||
EVENT_MESSAGE,
|
||||
ASSET_ID,
|
||||
ASSET_NAME,
|
||||
RULE_NAME,
|
||||
RULE_ID,
|
||||
COUNT
|
||||
};
|
||||
|
||||
void
|
||||
initSyslogServerSocket()
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER);
|
||||
I_MainLoop *mainloop = Singleton::Consume<I_MainLoop>::by<NginxMessageReader>();
|
||||
I_Socket *i_socket = Singleton::Consume<I_Socket>::by<NginxMessageReader>();
|
||||
string nginx_syslog_server_address = getProfileAgentSettingWithDefault<string>(
|
||||
"127.0.0.1:1514",
|
||||
"reverseProxy.nginx.syslogAddress"
|
||||
);
|
||||
dbgInfo(D_NGINX_MESSAGE_READER) << "Attempting to open a socket: " << nginx_syslog_server_address;
|
||||
do {
|
||||
Maybe<I_Socket::socketFd> new_socket = i_socket->genSocket(
|
||||
I_Socket::SocketType::UDP,
|
||||
false,
|
||||
true,
|
||||
nginx_syslog_server_address
|
||||
);
|
||||
if (!new_socket.ok()) {
|
||||
dbgError(D_NGINX_MESSAGE_READER) << "Failed to open a socket. Error: " << new_socket.getErr();
|
||||
mainloop->yield(chrono::milliseconds(500));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (new_socket.unpack() < 0) {
|
||||
dbgError(D_NGINX_MESSAGE_READER)<< "Generated socket is OK yet negative";
|
||||
mainloop->yield(chrono::milliseconds(500));
|
||||
continue;
|
||||
}
|
||||
syslog_server_socket = new_socket.unpack();
|
||||
dbgInfo(D_NGINX_MESSAGE_READER)
|
||||
<< "Opened socket for nginx logs over syslog. Socket: "
|
||||
<< syslog_server_socket;
|
||||
} while (syslog_server_socket < 0);
|
||||
}
|
||||
|
||||
void
|
||||
handleNginxLogs()
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER);
|
||||
I_MainLoop::Routine read_logs =
|
||||
[this] ()
|
||||
{
|
||||
Maybe<string> logs = getLogsFromSocket(syslog_server_socket);
|
||||
|
||||
if (!logs.ok()) {
|
||||
dbgWarning(D_NGINX_MESSAGE_READER)
|
||||
<< "Failed to get NGINX logs from the socket. Error: "
|
||||
<< logs.getErr();
|
||||
return;
|
||||
}
|
||||
string raw_logs_to_parse = logs.unpackMove();
|
||||
vector<string> logs_to_parse = separateLogs(raw_logs_to_parse);
|
||||
|
||||
for (auto const &log: logs_to_parse) {
|
||||
bool log_sent;
|
||||
if (isAccessLog(log)) {
|
||||
log_sent = sendAccessLog(log);
|
||||
} else if (isAlertErrorLog(log) || isErrorLog(log)) {
|
||||
log_sent = sendErrorLog(log);
|
||||
} else {
|
||||
dbgWarning(D_NGINX_MESSAGE_READER) << "Unexpected nginx log format";
|
||||
continue;
|
||||
}
|
||||
if (!log_sent) {
|
||||
dbgWarning(D_NGINX_MESSAGE_READER) << "Failed to send Log to Infinity Portal";
|
||||
} else {
|
||||
dbgTrace(D_NGINX_MESSAGE_READER) << "Succesfully sent nginx log to Infinity Portal";
|
||||
}
|
||||
}
|
||||
};
|
||||
I_MainLoop *mainloop = Singleton::Consume<I_MainLoop>::by<NginxMessageReader>();
|
||||
mainloop->addFileRoutine(
|
||||
I_MainLoop::RoutineType::RealTime,
|
||||
syslog_server_socket,
|
||||
read_logs,
|
||||
"Process nginx logs",
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
bool
|
||||
sendAccessLog(const string &log)
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER) << "Access log" << log;
|
||||
Maybe<EnumArray<LogInfo, string>> log_info = parseAccessLog(log);
|
||||
if (!log_info.ok()) {
|
||||
dbgWarning(D_NGINX_MESSAGE_READER)
|
||||
<< "Failed parsing the NGINX logs. Error: "
|
||||
<< log_info.getErr();
|
||||
return false;
|
||||
}
|
||||
auto unpacked_log_info = log_info.unpack();
|
||||
|
||||
if (unpacked_log_info[LogInfo::RESPONSE_CODE] == rate_limit_status_code) {
|
||||
return sendRateLimitLog(unpacked_log_info);
|
||||
}
|
||||
return sendLog(unpacked_log_info);
|
||||
}
|
||||
|
||||
bool
|
||||
sendErrorLog(const string &log)
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER) << "Error log" << log;
|
||||
Maybe<EnumArray<LogInfo, string>> log_info = parseErrorLog(log);
|
||||
if (!log_info.ok()) {
|
||||
dbgWarning(D_NGINX_MESSAGE_READER)
|
||||
<< "Failed parsing the NGINX logs. Error: "
|
||||
<< log_info.getErr();
|
||||
return false;
|
||||
}
|
||||
return sendLog(log_info.unpack());
|
||||
}
|
||||
|
||||
bool
|
||||
isAccessLog(const string &log) const
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER) << "Chekck if string contains \"accessLog\"" << log;
|
||||
return log.find("accessLog") != string::npos;
|
||||
}
|
||||
|
||||
bool
|
||||
isAlertErrorLog(const string &log) const
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER) << "Check if log is of type 'error log'. Log: " << log;
|
||||
return log.find("[alert]") != string::npos;
|
||||
}
|
||||
|
||||
bool
|
||||
isErrorLog(const string &log) const
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER) << "Check if log is of type 'error log'. Log: " << log;
|
||||
return log.find("[error]") != string::npos;
|
||||
}
|
||||
|
||||
bool
|
||||
sendLog(const EnumArray<LogInfo, string> &log_info)
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER);
|
||||
string event_name;
|
||||
switch (log_info[LogInfo::RESPONSE_CODE][0]) {
|
||||
case '4': {
|
||||
event_name = "Invalid request or incorrect reverse proxy configuration - Request dropped."
|
||||
" Please check the reverse proxy configuration of your relevant assets";
|
||||
break;
|
||||
}
|
||||
case '5': {
|
||||
event_name = "AppSec Gateway reverse proxy error - Request dropped. "
|
||||
"Please verify the reverse proxy configuration of your relevant assets. "
|
||||
"If the issue persists please contact Check Point Support";
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
dbgError(D_NGINX_MESSAGE_READER) << "Irrelevant status code";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
dbgTrace(D_NGINX_MESSAGE_READER)
|
||||
<< "Nginx log's event name and response code: "
|
||||
<< event_name
|
||||
<< ", "
|
||||
<< log_info[LogInfo::RESPONSE_CODE];
|
||||
LogGen log(
|
||||
event_name,
|
||||
ReportIS::Audience::SECURITY,
|
||||
ReportIS::Severity::INFO,
|
||||
ReportIS::Priority::LOW,
|
||||
ReportIS::Tags::REVERSE_PROXY
|
||||
);
|
||||
log << LogField("eventConfidence", "High");
|
||||
|
||||
for (LogInfo field : makeRange<LogInfo>()) {
|
||||
Maybe<string> string_field = convertLogFieldToString(field);
|
||||
if (!string_field.ok()) {
|
||||
dbgDebug(D_NGINX_MESSAGE_READER) << "Enum field was not converted: " << string_field.getErr();
|
||||
return false;
|
||||
}
|
||||
|
||||
if (field != LogInfo::DESTINATION_PORT) {
|
||||
log << LogField(string_field.unpack(), log_info[field]);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
log << LogField(string_field.unpack(), stoi(log_info[field]));
|
||||
} catch (const exception &e) {
|
||||
dbgError(D_NGINX_MESSAGE_READER)
|
||||
<< "Unable to convert port to numeric value: "
|
||||
<< e.what();
|
||||
log << LogField(string_field.unpack(), 0);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
sendRateLimitLog(const EnumArray<LogInfo, string> &log_info)
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER) << "Getting rate-limit rules of asset ID: " << log_info[LogInfo::ASSET_ID];
|
||||
|
||||
ScopedContext rate_limit_ctx;
|
||||
|
||||
rate_limit_ctx.registerValue<GenericConfigId>(AssetMatcher::ctx_key, log_info[LogInfo::ASSET_ID]);
|
||||
auto rate_limit_config = getConfiguration<RateLimitConfig>("rulebase", "rateLimit");
|
||||
if (!rate_limit_config.ok()) {
|
||||
dbgTrace(D_NGINX_MESSAGE_READER)
|
||||
<< "Rate limit context does not match asset ID: " << log_info[LogInfo::ASSET_ID];
|
||||
return false;
|
||||
}
|
||||
RateLimitConfig unpacked_rate_limit_config = rate_limit_config.unpack();
|
||||
|
||||
string nginx_uri = log_info[LogInfo::URI];
|
||||
const LogTriggerConf &rate_limit_trigger = unpacked_rate_limit_config.getRateLimitTrigger(nginx_uri);
|
||||
|
||||
dbgTrace(D_NGINX_MESSAGE_READER)<< "About to generate NGINX rate-limit log";
|
||||
|
||||
string event_name = "Rate limit";
|
||||
string security_action = "Drop";
|
||||
bool is_log_required = false;
|
||||
|
||||
// Prevent events checkbox (in triggers)
|
||||
if (rate_limit_trigger.isPreventLogActive(LogTriggerConf::SecurityType::AccessControl)) {
|
||||
is_log_required = true;
|
||||
}
|
||||
|
||||
if (!is_log_required) {
|
||||
dbgTrace(D_NGINX_MESSAGE_READER) << "Not sending NGINX rate-limit log as it is not required";
|
||||
return false;
|
||||
}
|
||||
|
||||
ostringstream src_ip;
|
||||
ostringstream dst_ip;
|
||||
src_ip << log_info[LogInfo::SOURCE];
|
||||
dst_ip << log_info[LogInfo::DESTINATION_IP];
|
||||
|
||||
ReportIS::Severity log_severity = ReportIS::Severity::MEDIUM;
|
||||
ReportIS::Priority log_priority = ReportIS::Priority::MEDIUM;
|
||||
|
||||
LogGen log = rate_limit_trigger(
|
||||
event_name,
|
||||
LogTriggerConf::SecurityType::AccessControl,
|
||||
log_severity,
|
||||
log_priority,
|
||||
true, // is drop
|
||||
LogField("practiceType", "Rate Limit"),
|
||||
ReportIS::Tags::RATE_LIMIT
|
||||
);
|
||||
|
||||
for (LogInfo field : makeRange<LogInfo>()) {
|
||||
Maybe<string> string_field = convertLogFieldToString(field);
|
||||
if (!string_field.ok()) {
|
||||
dbgDebug(D_NGINX_MESSAGE_READER) << "Enum field was not converted: " << string_field.getErr();
|
||||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
field == LogInfo::HOST ||
|
||||
field == LogInfo::URI ||
|
||||
field == LogInfo::HTTP_METHOD ||
|
||||
field == LogInfo::SOURCE ||
|
||||
field == LogInfo::DESTINATION_IP ||
|
||||
field == LogInfo::ASSET_ID ||
|
||||
field == LogInfo::ASSET_NAME ||
|
||||
field == LogInfo::RESPONSE_CODE
|
||||
) {
|
||||
if (!log_info[field].empty()) {
|
||||
log << LogField(string_field.unpack(), log_info[field]);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (field == LogInfo::DESTINATION_PORT) {
|
||||
try {
|
||||
int numeric_dst_port = stoi(log_info[field]);
|
||||
log << LogField(string_field.unpack(), numeric_dst_port);
|
||||
} catch (const exception &e) {
|
||||
dbgWarning(D_NGINX_MESSAGE_READER)
|
||||
<< "Unable to convert dst port: "
|
||||
<< log_info[field]
|
||||
<< " to numberic value. Error: "
|
||||
<< e.what();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
convertLogFieldToString(LogInfo field)
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER);
|
||||
switch (field) {
|
||||
case LogInfo::HTTP_METHOD:
|
||||
return string("httpMethod");
|
||||
case LogInfo::URI:
|
||||
return string("httpUriPath");
|
||||
case LogInfo::RESPONSE_CODE:
|
||||
return string("httpResponseCode");
|
||||
case LogInfo::HOST:
|
||||
return string("httpHostName");
|
||||
case LogInfo::SOURCE:
|
||||
return string("httpSourceId");
|
||||
case LogInfo::DESTINATION_IP:
|
||||
return string("destinationIp");
|
||||
case LogInfo::DESTINATION_PORT:
|
||||
return string("destinationPort");
|
||||
case LogInfo::ASSET_ID:
|
||||
return string("assetId");
|
||||
case LogInfo::ASSET_NAME:
|
||||
return string("assetName");
|
||||
case LogInfo::EVENT_MESSAGE:
|
||||
return string("httpResponseBody");
|
||||
case LogInfo::RULE_ID:
|
||||
return string("ruleId");
|
||||
case LogInfo::RULE_NAME:
|
||||
return string("ruleName");
|
||||
case LogInfo::COUNT:
|
||||
dbgError(D_NGINX_MESSAGE_READER) << "LogInfo::COUNT is not allowed";
|
||||
return genError("LogInfo::COUNT is not allowed");
|
||||
}
|
||||
dbgError(D_NGINX_MESSAGE_READER) << "No Enum found, int value: " << static_cast<int>(field);
|
||||
return genError("No Enum found");
|
||||
}
|
||||
|
||||
static vector<string>
|
||||
separateLogs(const string &raw_logs_to_parse)
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER) << "separating logs. logs: " << raw_logs_to_parse;
|
||||
dbgTrace(D_NGINX_MESSAGE_READER) << "separateLogs start of function. Logs to parse: " << raw_logs_to_parse;
|
||||
boost::smatch matcher;
|
||||
vector<string> logs;
|
||||
|
||||
if (raw_logs_to_parse.empty()) return logs;
|
||||
|
||||
size_t pos = 0;
|
||||
while (NGEN::Regex::regexSearch(__FILE__, __LINE__, raw_logs_to_parse.substr(pos), matcher, syslog_regex)) {
|
||||
if (pos == 0) {
|
||||
dbgTrace(D_NGINX_MESSAGE_READER) << "separateLogs pos = 0";
|
||||
pos++;
|
||||
continue;
|
||||
}
|
||||
auto log_length = matcher.position();
|
||||
logs.push_back(raw_logs_to_parse.substr(pos - 1, log_length));
|
||||
|
||||
pos += log_length + 1;
|
||||
}
|
||||
logs.push_back(raw_logs_to_parse.substr(pos - 1));
|
||||
dbgTrace(D_NGINX_MESSAGE_READER) << "separateLogs end of function";
|
||||
|
||||
return logs;
|
||||
}
|
||||
|
||||
static pair<string, string>
|
||||
parseErrorLogRequestField(const string &request)
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER) << "parsing request field. request: " << request;
|
||||
string formatted_request = request;
|
||||
vector<string> result;
|
||||
boost::erase_all(formatted_request, "\"");
|
||||
boost::erase_all(formatted_request, "\n");
|
||||
boost::split(result, formatted_request, boost::is_any_of(" "), boost::token_compress_on);
|
||||
|
||||
const int http_method_index = 1;
|
||||
const int uri_index = 2;
|
||||
return pair<string, string>(result[http_method_index], result[uri_index]);
|
||||
}
|
||||
|
||||
static string
|
||||
parseErrorLogField(const string &field)
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER) << "parsing error log field " << field;
|
||||
string formatted_field = field;
|
||||
vector<string> result;
|
||||
boost::erase_all(formatted_field, "\"");
|
||||
boost::erase_all(formatted_field, "\n");
|
||||
boost::split(result, formatted_field, boost::is_any_of(" "), boost::token_compress_on);
|
||||
|
||||
const int field_index = 1;
|
||||
return result[field_index];
|
||||
}
|
||||
|
||||
void
|
||||
addContextFieldsToLogInfo(EnumArray<LogInfo, string> &log_info)
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER);
|
||||
ScopedContext ctx;
|
||||
|
||||
try {
|
||||
ctx.registerValue<uint16_t>(
|
||||
HttpTransactionData::listening_port_ctx,
|
||||
static_cast<uint16_t>(stoi(log_info[LogInfo::DESTINATION_PORT]))
|
||||
);
|
||||
} catch (const exception &e) {
|
||||
dbgError(D_NGINX_MESSAGE_READER) << "Failed register values for context " << e.what();
|
||||
}
|
||||
ctx.registerValue<string>(HttpTransactionData::host_name_ctx, log_info[LogInfo::HOST]);
|
||||
ctx.registerValue<string>(HttpTransactionData::uri_ctx, log_info[LogInfo::URI]);
|
||||
auto rule_by_ctx = getConfiguration<BasicRuleConfig>("rulebase", "rulesConfig");
|
||||
if (!rule_by_ctx.ok()) {
|
||||
dbgWarning(D_NGINX_MESSAGE_READER)
|
||||
<< "AssetId was not found by the given context. Reason: "
|
||||
<< rule_by_ctx.getErr();
|
||||
return;
|
||||
}
|
||||
|
||||
BasicRuleConfig context = rule_by_ctx.unpack();
|
||||
log_info[LogInfo::ASSET_ID] = context.getAssetId();
|
||||
log_info[LogInfo::ASSET_NAME] = context.getAssetName();
|
||||
log_info[LogInfo::RULE_ID] = context.getRuleId();
|
||||
log_info[LogInfo::RULE_NAME] = context.getRuleName();
|
||||
}
|
||||
|
||||
Maybe<EnumArray<LogInfo, string>>
|
||||
parseErrorLog(const string &log_line)
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER) << "Handling log line:" << log_line;
|
||||
string port;
|
||||
EnumArray<LogInfo, string> log_info(EnumArray<LogInfo, string>::Fill(), string(""));
|
||||
|
||||
boost::smatch matcher;
|
||||
vector<string> result;
|
||||
if (
|
||||
!NGEN::Regex::regexSearch(
|
||||
__FILE__,
|
||||
__LINE__,
|
||||
log_line,
|
||||
matcher,
|
||||
isAlertErrorLog(log_line) ? alert_log_regex : error_log_regex
|
||||
)
|
||||
) {
|
||||
dbgWarning(D_NGINX_MESSAGE_READER) << "Unexpected nginx log format";
|
||||
return genError("Unexpected nginx log format");
|
||||
}
|
||||
|
||||
const int event_message_index = 6;
|
||||
const int source_index = 7;
|
||||
const int request_index = 9;
|
||||
const int host_index = 11;
|
||||
string host = string(matcher[host_index].first, matcher[host_index].second);
|
||||
string source = string(matcher[source_index].first, matcher[source_index].second);
|
||||
string event_message = string(matcher[event_message_index].first, matcher[event_message_index].second);
|
||||
string request = string(matcher[request_index].first, matcher[request_index].second);
|
||||
|
||||
host = parseErrorLogField(host);
|
||||
source = parseErrorLogField(source);
|
||||
pair<string, string> parsed_request = parseErrorLogRequestField(request);
|
||||
string http_method = parsed_request.first;
|
||||
string uri = parsed_request.second;
|
||||
|
||||
if (NGEN::Regex::regexSearch(__FILE__, __LINE__, host, matcher, socket_address_regex)) {
|
||||
int host_index = 1;
|
||||
int port_index = 2;
|
||||
host = string(matcher[host_index].first, matcher[host_index].second);
|
||||
port = string(matcher[port_index].first, matcher[port_index].second);
|
||||
} else if (NGEN::Regex::regexSearch(__FILE__, __LINE__, host, matcher, boost::regex("https://"))) {
|
||||
port = "443";
|
||||
} else {
|
||||
port = "80";
|
||||
}
|
||||
|
||||
log_info[LogInfo::HOST] = host;
|
||||
log_info[LogInfo::URI] = uri;
|
||||
log_info[LogInfo::RESPONSE_CODE] = "500";
|
||||
log_info[LogInfo::HTTP_METHOD] = http_method;
|
||||
log_info[LogInfo::SOURCE] = source;
|
||||
log_info[LogInfo::DESTINATION_IP] = host;
|
||||
log_info[LogInfo::DESTINATION_PORT] = port;
|
||||
log_info[LogInfo::EVENT_MESSAGE] = event_message;
|
||||
|
||||
addContextFieldsToLogInfo(log_info);
|
||||
|
||||
if (!validateLog(log_info)) {
|
||||
dbgWarning(D_NGINX_MESSAGE_READER) << "Unexpected nginx log format";
|
||||
return genError("Unexpected nginx log format");
|
||||
}
|
||||
|
||||
return log_info;
|
||||
}
|
||||
|
||||
Maybe<EnumArray<LogInfo, string>>
|
||||
parseAccessLog(const string &log_line)
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER) << "Parsing log line: " << log_line;
|
||||
string formatted_log = log_line;
|
||||
EnumArray<LogInfo, string> log_info(EnumArray<LogInfo, string>::Fill(), string(""));
|
||||
vector<string> result;
|
||||
boost::erase_all(formatted_log, "\"");
|
||||
boost::erase_all(formatted_log, "\n");
|
||||
boost::split(result, formatted_log, boost::is_any_of(" "), boost::token_compress_on);
|
||||
|
||||
const int valid_log_size = 20;
|
||||
|
||||
if (result.size() < valid_log_size) {
|
||||
dbgWarning(D_NGINX_MESSAGE_READER) << "Unexpected nginx log format";
|
||||
return genError("Unexpected nginx log format");
|
||||
}
|
||||
|
||||
const int host_index = 6;
|
||||
const int host_port_index = 7;
|
||||
const int http_method_index = 13;
|
||||
const int uri_index = 14;
|
||||
const int response_cod_index = 16;
|
||||
const int source_index = 8;
|
||||
|
||||
log_info[LogInfo::HOST] = result[host_index];
|
||||
log_info[LogInfo::URI] = result[uri_index];
|
||||
log_info[LogInfo::RESPONSE_CODE] = result[response_cod_index];
|
||||
log_info[LogInfo::HTTP_METHOD] = result[http_method_index];
|
||||
log_info[LogInfo::SOURCE] = result[source_index];
|
||||
log_info[LogInfo::DESTINATION_IP] = result[host_index];
|
||||
log_info[LogInfo::DESTINATION_PORT] = result[host_port_index];
|
||||
log_info[LogInfo::EVENT_MESSAGE] = "Invalid request or incorrect reverse proxy configuration - "
|
||||
"Request dropped. Please check the reverse proxy configuration of your relevant assets";
|
||||
|
||||
addContextFieldsToLogInfo(log_info);
|
||||
|
||||
if (!validateLog(log_info)) {
|
||||
dbgWarning(D_NGINX_MESSAGE_READER) << "Unexpected nginx log format";
|
||||
return genError("Unexpected nginx log format");
|
||||
}
|
||||
return log_info;
|
||||
}
|
||||
|
||||
static bool
|
||||
validateLog(const EnumArray<LogInfo, string> &log_info)
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER);
|
||||
|
||||
boost::smatch matcher;
|
||||
if (!NGEN::Regex::regexSearch(__FILE__, __LINE__, log_info[LogInfo::HOST], matcher, server_regex)) {
|
||||
dbgTrace(D_NGINX_MESSAGE_READER) << "Could not validate server (Host): " << log_info[LogInfo::HOST];
|
||||
return false;
|
||||
}
|
||||
if (!NGEN::Regex::regexSearch(__FILE__, __LINE__, log_info[LogInfo::URI], matcher, uri_regex)) {
|
||||
dbgTrace(D_NGINX_MESSAGE_READER) << "Could not validate Uri: " << log_info[LogInfo::URI];
|
||||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
!NGEN::Regex::regexSearch(
|
||||
__FILE__,
|
||||
__LINE__,
|
||||
log_info[LogInfo::RESPONSE_CODE],
|
||||
matcher, response_code_regex
|
||||
)
|
||||
) {
|
||||
dbgTrace(D_NGINX_MESSAGE_READER)
|
||||
<< "Could not validate response code: "
|
||||
<< log_info[LogInfo::RESPONSE_CODE];
|
||||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
!NGEN::Regex::regexSearch(__FILE__, __LINE__, log_info[LogInfo::HTTP_METHOD], matcher, http_method_regex)
|
||||
) {
|
||||
dbgTrace(D_NGINX_MESSAGE_READER) << "Could not validate HTTP method: " << log_info[LogInfo::HTTP_METHOD];
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!NGEN::Regex::regexSearch(__FILE__, __LINE__, log_info[LogInfo::DESTINATION_PORT], matcher, port_regex)) {
|
||||
dbgTrace(D_NGINX_MESSAGE_READER)
|
||||
<< "Could not validate destination port : "
|
||||
<< log_info[LogInfo::DESTINATION_PORT];
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!NGEN::Regex::regexSearch(__FILE__, __LINE__, log_info[LogInfo::SOURCE], matcher, server_regex)) {
|
||||
dbgTrace(D_NGINX_MESSAGE_READER) << "Could not validate source : " << log_info[LogInfo::SOURCE];
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
getLogsFromSocket(const I_Socket::socketFd &client_socket) const
|
||||
{
|
||||
dbgFlow(D_NGINX_MESSAGE_READER) << "Reading logs from socket. fd: " << client_socket;
|
||||
I_Socket *i_socket = Singleton::Consume<I_Socket>::by<NginxMessageReader>();
|
||||
Maybe<vector<char>> raw_log_data = i_socket->receiveData(client_socket, 0, false);
|
||||
if (!raw_log_data.ok()) {
|
||||
dbgWarning(D_NGINX_MESSAGE_READER) << "Error receiving data from socket";
|
||||
return genError("Error receiving data from socket");
|
||||
}
|
||||
|
||||
string raw_log(raw_log_data.unpack().begin(), raw_log_data.unpack().end());
|
||||
return move(raw_log);
|
||||
}
|
||||
|
||||
I_Socket::socketFd syslog_server_socket = -1;
|
||||
string rate_limit_status_code = "429";
|
||||
};
|
||||
|
||||
NginxMessageReader::NginxMessageReader() : Component("NginxMessageReader"), pimpl(make_unique<Impl>()) {}
|
||||
|
||||
NginxMessageReader::~NginxMessageReader() {}
|
||||
|
||||
void
|
||||
NginxMessageReader::init()
|
||||
{
|
||||
pimpl->init();
|
||||
}
|
||||
|
||||
void
|
||||
NginxMessageReader::preload()
|
||||
{
|
||||
pimpl->preload();
|
||||
}
|
||||
|
||||
void
|
||||
NginxMessageReader::fini()
|
||||
{
|
||||
pimpl->fini();
|
||||
}
|
||||
@@ -5,3 +5,4 @@ add_subdirectory(local_policy_mgmt_gen)
|
||||
add_subdirectory(orchestration)
|
||||
add_subdirectory(rate_limit)
|
||||
add_subdirectory(waap)
|
||||
add_subdirectory(central_nginx_manager)
|
||||
|
||||
3
components/security_apps/central_nginx_manager/CMakeLists.txt
Executable file
3
components/security_apps/central_nginx_manager/CMakeLists.txt
Executable file
@@ -0,0 +1,3 @@
|
||||
include_directories(include)
|
||||
|
||||
add_library(central_nginx_manager central_nginx_manager.cc lets_encrypt_listener.cc)
|
||||
390
components/security_apps/central_nginx_manager/central_nginx_manager.cc
Executable file
390
components/security_apps/central_nginx_manager/central_nginx_manager.cc
Executable file
@@ -0,0 +1,390 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "central_nginx_manager.h"
|
||||
#include "lets_encrypt_listener.h"
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <cereal/external/base64.hpp>
|
||||
|
||||
#include "debug.h"
|
||||
#include "config.h"
|
||||
#include "rest.h"
|
||||
#include "log_generator.h"
|
||||
#include "nginx_utils.h"
|
||||
#include "agent_core_utilities.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
USE_DEBUG_FLAG(D_NGINX_MANAGER);
|
||||
|
||||
class CentralNginxConfig
|
||||
{
|
||||
public:
|
||||
void load(cereal::JSONInputArchive &ar)
|
||||
{
|
||||
try {
|
||||
string nginx_conf_base64;
|
||||
ar(cereal::make_nvp("id", file_id));
|
||||
ar(cereal::make_nvp("name", file_name));
|
||||
ar(cereal::make_nvp("data", nginx_conf_base64));
|
||||
nginx_conf_content = cereal::base64::decode(nginx_conf_base64);
|
||||
central_nginx_conf_path = getCentralNginxConfPath();
|
||||
shared_config_path = getSharedConfigPath();
|
||||
if (!nginx_conf_content.empty()) configureCentralNginx();
|
||||
} catch (const cereal::Exception &e) {
|
||||
dbgDebug(D_NGINX_MANAGER) << "Could not load Central Management Config JSON. Error: " << e.what();
|
||||
ar.setNextName(nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
const string & getFileId() const { return file_id; }
|
||||
const string & getFileName() const { return file_name; }
|
||||
const string & getFileContent() const { return nginx_conf_content; }
|
||||
|
||||
static string
|
||||
getCentralNginxConfPath()
|
||||
{
|
||||
string central_nginx_conf_path = getProfileAgentSettingWithDefault<string>(
|
||||
string("/tmp/central_nginx.conf"),
|
||||
"centralNginxManagement.confDownloadPath"
|
||||
);
|
||||
dbgInfo(D_NGINX_MANAGER) << "Central NGINX configuration path: " << central_nginx_conf_path;
|
||||
|
||||
return central_nginx_conf_path;
|
||||
}
|
||||
|
||||
static string
|
||||
getSharedConfigPath()
|
||||
{
|
||||
string central_shared_conf_path = getConfigurationWithDefault<string>(
|
||||
"/etc/cp/conf",
|
||||
"Config Component",
|
||||
"configuration path"
|
||||
);
|
||||
central_shared_conf_path += "/centralNginxManager/shared/central_nginx_shared.conf";
|
||||
dbgInfo(D_NGINX_MANAGER) << "Shared NGINX configuration path: " << central_shared_conf_path;
|
||||
|
||||
return central_shared_conf_path;
|
||||
}
|
||||
|
||||
private:
|
||||
void
|
||||
loadAttachmentModule()
|
||||
{
|
||||
string attachment_module_path = NginxUtils::getModulesPath() + "/ngx_cp_attachment_module.so";
|
||||
if (!NGEN::Filesystem::exists(attachment_module_path)) {
|
||||
dbgTrace(D_NGINX_MANAGER) << "Attachment module " << attachment_module_path << " does not exist";
|
||||
return;
|
||||
}
|
||||
|
||||
string attachment_module_conf = "load_module " + attachment_module_path + ";";
|
||||
if (nginx_conf_content.find(attachment_module_conf) != string::npos) {
|
||||
dbgTrace(D_NGINX_MANAGER) << "Attachment module " << attachment_module_path << " already loaded";
|
||||
return;
|
||||
}
|
||||
|
||||
nginx_conf_content = attachment_module_conf + "\n" + nginx_conf_content;
|
||||
}
|
||||
|
||||
Maybe<void>
|
||||
loadSharedDirective(const string &directive)
|
||||
{
|
||||
dbgFlow(D_NGINX_MANAGER) << "Loading shared directive into the servers " << directive;
|
||||
|
||||
if (!NGEN::Filesystem::copyFile(shared_config_path, shared_config_path + ".bak", true)) {
|
||||
return genError("Could not create a backup of the shared NGINX configuration file");
|
||||
}
|
||||
|
||||
ifstream shared_config(shared_config_path);
|
||||
if (!shared_config.is_open()) {
|
||||
return genError("Could not open shared NGINX configuration file");
|
||||
}
|
||||
|
||||
string shared_config_content((istreambuf_iterator<char>(shared_config)), istreambuf_iterator<char>());
|
||||
shared_config.close();
|
||||
|
||||
if (shared_config_content.find(directive) != string::npos) {
|
||||
dbgTrace(D_NGINX_MANAGER) << "Shared directive " << directive << " already loaded";
|
||||
return {};
|
||||
}
|
||||
|
||||
ofstream new_shared_config(shared_config_path, ios::app);
|
||||
if (!new_shared_config.is_open()) {
|
||||
return genError("Could not open shared NGINX configuration file");
|
||||
}
|
||||
|
||||
dbgTrace(D_NGINX_MANAGER) << "Adding shared directive " << directive;
|
||||
new_shared_config << directive << "\n";
|
||||
new_shared_config.close();
|
||||
|
||||
auto validation = NginxUtils::validateNginxConf(central_nginx_conf_path);
|
||||
if (!validation.ok()) {
|
||||
if (!NGEN::Filesystem::copyFile(shared_config_path + ".bak", shared_config_path, true)) {
|
||||
return genError("Could not restore the shared NGINX configuration file");
|
||||
}
|
||||
return genError("Could not validate shared NGINX configuration file. Error: " + validation.getErr());
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
Maybe<void>
|
||||
loadSharedConfig()
|
||||
{
|
||||
dbgFlow(D_NGINX_MANAGER) << "Loading shared configuration into the servers";
|
||||
|
||||
ofstream shared_config(shared_config_path);
|
||||
if (!shared_config.is_open()) {
|
||||
return genError("Could not create shared NGINX configuration file");
|
||||
}
|
||||
shared_config.close();
|
||||
|
||||
string shared_config_directive = "include " + shared_config_path + ";\n";
|
||||
boost::regex server_regex("server\\s*\\{");
|
||||
nginx_conf_content = NGEN::Regex::regexReplace(
|
||||
__FILE__,
|
||||
__LINE__,
|
||||
nginx_conf_content,
|
||||
server_regex,
|
||||
"server {\n" + shared_config_directive
|
||||
);
|
||||
|
||||
ofstream nginx_conf_file(central_nginx_conf_path);
|
||||
if (!nginx_conf_file.is_open()) {
|
||||
return genError("Could not open a temporary central NGINX configuration file");
|
||||
}
|
||||
nginx_conf_file << nginx_conf_content;
|
||||
nginx_conf_file.close();
|
||||
|
||||
auto validation = NginxUtils::validateNginxConf(central_nginx_conf_path);
|
||||
if (!validation.ok()) {
|
||||
return genError("Could not validate central NGINX configuration file. Error: " + validation.getErr());
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
Maybe<void>
|
||||
configureSyslog()
|
||||
{
|
||||
if (!getProfileAgentSettingWithDefault<bool>(true, "centralNginxManagement.syslogEnabled")) {
|
||||
dbgTrace(D_NGINX_MANAGER) << "Syslog is disabled via settings";
|
||||
return {};
|
||||
}
|
||||
|
||||
string syslog_directive = "error_log syslog:server=127.0.0.1:1514 warn;";
|
||||
auto load_shared_directive_result = loadSharedDirective(syslog_directive);
|
||||
if (!load_shared_directive_result.ok()) {
|
||||
return genError("Could not configure syslog directive, error: " + load_shared_directive_result.getErr());
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
Maybe<void>
|
||||
saveBaseCentralNginxConf()
|
||||
{
|
||||
ofstream central_nginx_conf_base_file(central_nginx_conf_path + ".base");
|
||||
if (!central_nginx_conf_base_file.is_open()) {
|
||||
return genError("Could not open a temporary central NGINX configuration file");
|
||||
}
|
||||
central_nginx_conf_base_file << nginx_conf_content;
|
||||
central_nginx_conf_base_file.close();
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
void
|
||||
configureCentralNginx()
|
||||
{
|
||||
loadAttachmentModule();
|
||||
auto save_base_nginx_conf = saveBaseCentralNginxConf();
|
||||
if (!save_base_nginx_conf.ok()) {
|
||||
dbgWarning(D_NGINX_MANAGER)
|
||||
<< "Could not save base NGINX configuration. Error: "
|
||||
<< save_base_nginx_conf.getErr();
|
||||
return;
|
||||
}
|
||||
|
||||
string nginx_conf_content_backup = nginx_conf_content;
|
||||
auto shared_config_result = loadSharedConfig();
|
||||
if (!shared_config_result.ok()) {
|
||||
dbgWarning(D_NGINX_MANAGER)
|
||||
<< "Could not load shared configuration. Error: "
|
||||
<< shared_config_result.getErr();
|
||||
nginx_conf_content = nginx_conf_content_backup;
|
||||
return;
|
||||
}
|
||||
|
||||
auto syslog_result = configureSyslog();
|
||||
if (!syslog_result.ok()) {
|
||||
dbgWarning(D_NGINX_MANAGER) << "Could not configure syslog. Error: " << syslog_result.getErr();
|
||||
}
|
||||
}
|
||||
|
||||
string file_id;
|
||||
string file_name;
|
||||
string nginx_conf_content;
|
||||
string central_nginx_conf_path;
|
||||
string shared_config_path;
|
||||
};
|
||||
|
||||
class CentralNginxManager::Impl
|
||||
{
|
||||
public:
|
||||
void
|
||||
init()
|
||||
{
|
||||
dbgInfo(D_NGINX_MANAGER) << "Starting Central NGINX Manager";
|
||||
|
||||
string main_nginx_conf_path = NginxUtils::getMainNginxConfPath();
|
||||
if (
|
||||
NGEN::Filesystem::exists(main_nginx_conf_path)
|
||||
&& !NGEN::Filesystem::exists(main_nginx_conf_path + ".orig")
|
||||
) {
|
||||
dbgInfo(D_NGINX_MANAGER) << "Creating a backup of the original main NGINX configuration file";
|
||||
NGEN::Filesystem::copyFile(main_nginx_conf_path, main_nginx_conf_path + ".orig", true);
|
||||
}
|
||||
|
||||
i_mainloop = Singleton::Consume<I_MainLoop>::by<CentralNginxManager>();
|
||||
if (!lets_encrypt_listener.init()) {
|
||||
dbgWarning(D_NGINX_MANAGER) << "Could not start Lets Encrypt Listener, scheduling retry";
|
||||
i_mainloop->addOneTimeRoutine(
|
||||
I_MainLoop::RoutineType::System,
|
||||
[this] ()
|
||||
{
|
||||
while(!lets_encrypt_listener.init()) {
|
||||
dbgWarning(D_NGINX_MANAGER) << "Could not start Lets Encrypt Listener, will retry";
|
||||
i_mainloop->yield(chrono::seconds(5));
|
||||
}
|
||||
},
|
||||
"Lets Encrypt Listener initializer",
|
||||
false
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
loadPolicy()
|
||||
{
|
||||
auto central_nginx_config = getSetting<vector<CentralNginxConfig>>("centralNginxManagement");
|
||||
if (!central_nginx_config.ok() || central_nginx_config.unpack().empty()) {
|
||||
dbgWarning(D_NGINX_MANAGER)
|
||||
<< "Could not load Central NGINX Management settings. Error: "
|
||||
<< central_nginx_config.getErr();
|
||||
return;
|
||||
}
|
||||
|
||||
auto &config = central_nginx_config.unpack().front();
|
||||
if (config.getFileContent().empty()) {
|
||||
dbgWarning(D_NGINX_MANAGER) << "Empty NGINX configuration file";
|
||||
return;
|
||||
}
|
||||
|
||||
dbgTrace(D_NGINX_MANAGER)
|
||||
<< "Handling Central NGINX Management settings: "
|
||||
<< config.getFileId()
|
||||
<< ", "
|
||||
<< config.getFileName()
|
||||
<< ", "
|
||||
<< config.getFileContent();
|
||||
|
||||
string central_nginx_conf_path = config.getCentralNginxConfPath();
|
||||
ofstream central_nginx_conf_file(central_nginx_conf_path);
|
||||
if (!central_nginx_conf_file.is_open()) {
|
||||
dbgWarning(D_NGINX_MANAGER)
|
||||
<< "Could not open central NGINX configuration file: "
|
||||
<< central_nginx_conf_path;
|
||||
return;
|
||||
}
|
||||
central_nginx_conf_file << config.getFileContent();
|
||||
central_nginx_conf_file.close();
|
||||
|
||||
auto validation_result = NginxUtils::validateNginxConf(central_nginx_conf_path);
|
||||
if (!validation_result.ok()) {
|
||||
dbgWarning(D_NGINX_MANAGER)
|
||||
<< "Could not validate central NGINX configuration file. Error: "
|
||||
<< validation_result.getErr();
|
||||
logError(validation_result.getErr());
|
||||
return;
|
||||
}
|
||||
|
||||
dbgTrace(D_NGINX_MANAGER) << "Validated central NGINX configuration file";
|
||||
|
||||
auto reload_result = NginxUtils::reloadNginx(central_nginx_conf_path);
|
||||
if (!reload_result.ok()) {
|
||||
dbgWarning(D_NGINX_MANAGER)
|
||||
<< "Could not reload central NGINX configuration. Error: "
|
||||
<< reload_result.getErr();
|
||||
logError("Could not reload central NGINX configuration. Error: " + reload_result.getErr());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
fini()
|
||||
{
|
||||
string central_nginx_base_path = CentralNginxConfig::getCentralNginxConfPath() + ".base";
|
||||
if (!NGEN::Filesystem::exists(central_nginx_base_path)) {
|
||||
dbgWarning(D_NGINX_MANAGER) << "Could not find base NGINX configuration file: " << central_nginx_base_path;
|
||||
return;
|
||||
}
|
||||
|
||||
NginxUtils::reloadNginx(central_nginx_base_path);
|
||||
}
|
||||
|
||||
private:
|
||||
void
|
||||
logError(const string &error)
|
||||
{
|
||||
LogGen log(
|
||||
error,
|
||||
ReportIS::Audience::SECURITY,
|
||||
ReportIS::Severity::CRITICAL,
|
||||
ReportIS::Priority::HIGH,
|
||||
ReportIS::Tags::POLICY_INSTALLATION
|
||||
);
|
||||
}
|
||||
|
||||
I_MainLoop *i_mainloop = nullptr;
|
||||
LetsEncryptListener lets_encrypt_listener;
|
||||
};
|
||||
|
||||
CentralNginxManager::CentralNginxManager()
|
||||
:
|
||||
Component("Central NGINX Manager"),
|
||||
pimpl(make_unique<CentralNginxManager::Impl>()) {}
|
||||
|
||||
CentralNginxManager::~CentralNginxManager() {}
|
||||
|
||||
void
|
||||
CentralNginxManager::init()
|
||||
{
|
||||
pimpl->init();
|
||||
}
|
||||
|
||||
void
|
||||
CentralNginxManager::fini()
|
||||
{
|
||||
pimpl->fini();
|
||||
}
|
||||
|
||||
void
|
||||
CentralNginxManager::preload()
|
||||
{
|
||||
registerExpectedSetting<vector<CentralNginxConfig>>("centralNginxManagement");
|
||||
registerExpectedConfiguration<string>("Config Component", "configuration path");
|
||||
registerConfigLoadCb([this]() { pimpl->loadPolicy(); });
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef __LETS_ENCRYPT_HANDLER_H__
|
||||
#define __LETS_ENCRYPT_HANDLER_H__
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "maybe_res.h"
|
||||
|
||||
class LetsEncryptListener
|
||||
{
|
||||
public:
|
||||
bool init();
|
||||
|
||||
private:
|
||||
Maybe<std::string> getChallengeValue(const std::string &uri) const;
|
||||
};
|
||||
|
||||
#endif // __LETS_ENCRYPT_HANDLER_H__
|
||||
76
components/security_apps/central_nginx_manager/lets_encrypt_listener.cc
Executable file
76
components/security_apps/central_nginx_manager/lets_encrypt_listener.cc
Executable file
@@ -0,0 +1,76 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "lets_encrypt_listener.h"
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "central_nginx_manager.h"
|
||||
#include "debug.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
USE_DEBUG_FLAG(D_NGINX_MANAGER);
|
||||
|
||||
bool
|
||||
LetsEncryptListener::init()
|
||||
{
|
||||
dbgInfo(D_NGINX_MANAGER) << "Starting Lets Encrypt Listener";
|
||||
return Singleton::Consume<I_RestApi>::by<CentralNginxManager>()->addWildcardGetCall(
|
||||
".well-known/acme-challenge/",
|
||||
[&] (const string &uri) -> string
|
||||
{
|
||||
Maybe<string> maybe_challenge_value = getChallengeValue(uri);
|
||||
if (!maybe_challenge_value.ok()) {
|
||||
dbgWarning(D_NGINX_MANAGER)
|
||||
<< "Could not get challenge value for uri: "
|
||||
<< uri
|
||||
<< ", error: "
|
||||
<< maybe_challenge_value.getErr();
|
||||
return string{""};
|
||||
};
|
||||
|
||||
dbgTrace(D_NGINX_MANAGER) << "Got challenge value: " << maybe_challenge_value.unpack();
|
||||
return maybe_challenge_value.unpack();
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
LetsEncryptListener::getChallengeValue(const string &uri) const
|
||||
{
|
||||
string challenge_key = uri.substr(uri.find_last_of('/') + 1);
|
||||
string api_query = "/api/lets-encrypt-challenge?http_challenge_key=" + challenge_key;
|
||||
|
||||
dbgInfo(D_NGINX_MANAGER) << "Getting challenge value via: " << api_query;
|
||||
|
||||
MessageMetadata md;
|
||||
md.insertHeader("X-Tenant-Id", Singleton::Consume<I_AgentDetails>::by<CentralNginxManager>()->getTenantId());
|
||||
Maybe<HTTPResponse, HTTPResponse> maybe_http_challenge_value =
|
||||
Singleton::Consume<I_Messaging>::by<CentralNginxManager>()->sendSyncMessage(
|
||||
HTTPMethod::GET,
|
||||
api_query,
|
||||
string("{}"),
|
||||
MessageCategory::GENERIC,
|
||||
md
|
||||
);
|
||||
|
||||
if (!maybe_http_challenge_value.ok()) return genError(maybe_http_challenge_value.getErr().getBody());
|
||||
|
||||
string challenge_value = maybe_http_challenge_value.unpack().getBody();
|
||||
if (!challenge_value.empty() && challenge_value.front() == '"' && challenge_value.back() == '"') {
|
||||
challenge_value = challenge_value.substr(1, challenge_value.size() - 2);
|
||||
}
|
||||
|
||||
return challenge_value;
|
||||
}
|
||||
@@ -88,9 +88,17 @@ public:
|
||||
dbgWarning(D_GEO_FILTER) << "failed to get source ip from env";
|
||||
return EventVerdict(default_action);
|
||||
}
|
||||
|
||||
auto source_ip = convertIpAddrToString(maybe_source_ip.unpack());
|
||||
ip_set.insert(source_ip);
|
||||
|
||||
// saas profile setting
|
||||
bool ignore_source_ip =
|
||||
getProfileAgentSettingWithDefault<bool>(false, "agent.geoProtaction.ignoreSourceIP");
|
||||
if (ignore_source_ip){
|
||||
dbgDebug(D_GEO_FILTER) << "Geo protection ignoring source ip: " << source_ip;
|
||||
} else {
|
||||
ip_set.insert(convertIpAddrToString(maybe_source_ip.unpack()));
|
||||
}
|
||||
|
||||
|
||||
ngx_http_cp_verdict_e exception_verdict = getExceptionVerdict(ip_set);
|
||||
if (exception_verdict != ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT) {
|
||||
@@ -343,7 +351,7 @@ private:
|
||||
|
||||
auto asset_location = i_geo_location->lookupLocation(maybe_source_ip.unpack());
|
||||
if (!asset_location.ok()) {
|
||||
dbgWarning(D_GEO_FILTER) << "Lookup location failed for source: " <<
|
||||
dbgDebug(D_GEO_FILTER) << "Lookup location failed for source: " <<
|
||||
source <<
|
||||
", Error: " <<
|
||||
asset_location.getErr();
|
||||
|
||||
@@ -336,9 +336,16 @@ public:
|
||||
return metadata.getYear();
|
||||
}
|
||||
|
||||
bool
|
||||
isOk() const
|
||||
{
|
||||
return is_loaded;
|
||||
}
|
||||
|
||||
private:
|
||||
IPSSignatureMetaData metadata;
|
||||
std::shared_ptr<BaseSignature> rule;
|
||||
bool is_loaded;
|
||||
};
|
||||
|
||||
/// \class SignatureAndAction
|
||||
|
||||
@@ -219,10 +219,16 @@ IPSSignatureMetaData::getYear() const
|
||||
void
|
||||
CompleteSignature::load(cereal::JSONInputArchive &ar)
|
||||
{
|
||||
ar(cereal::make_nvp("protectionMetadata", metadata));
|
||||
RuleDetection rule_detection(metadata.getName());
|
||||
ar(cereal::make_nvp("detectionRules", rule_detection));
|
||||
rule = rule_detection.getRule();
|
||||
try {
|
||||
ar(cereal::make_nvp("protectionMetadata", metadata));
|
||||
RuleDetection rule_detection(metadata.getName());
|
||||
ar(cereal::make_nvp("detectionRules", rule_detection));
|
||||
rule = rule_detection.getRule();
|
||||
is_loaded = true;
|
||||
} catch (cereal::Exception &e) {
|
||||
is_loaded = false;
|
||||
dbgWarning(D_IPS) << "Failed to load signature: " << e.what();
|
||||
}
|
||||
}
|
||||
|
||||
MatchType
|
||||
@@ -367,7 +373,16 @@ SignatureAndAction::matchSilent(const Buffer &sample) const
|
||||
if (method.ok()) log << LogField("httpMethod", method.unpack());
|
||||
|
||||
auto path = env->get<Buffer>("HTTP_PATH_DECODED");
|
||||
if (path.ok()) log << LogField("httpUriPath", getSubString(path, 1536), LogFieldOption::XORANDB64);
|
||||
if (path.ok()) {
|
||||
log << LogField("httpUriPath", getSubString(path, 1536), LogFieldOption::XORANDB64);
|
||||
} else {
|
||||
auto transaction_path = env->get<string>(HttpTransactionData::uri_path_decoded);
|
||||
if (transaction_path.ok()) {
|
||||
auto uri_path = transaction_path.unpack();
|
||||
auto question_mark = uri_path.find('?');
|
||||
log << LogField("httpUriPath", uri_path.substr(0, question_mark), LogFieldOption::XORANDB64);
|
||||
}
|
||||
}
|
||||
|
||||
auto req_header = ips_state.getTransactionData(IPSCommonTypes::requests_header_for_log);
|
||||
if (req_header.ok()) log << LogField("httpRequestHeaders", getSubString(req_header), LogFieldOption::XORANDB64);
|
||||
@@ -485,13 +500,30 @@ SignatureAndAction::isMatchedPrevent(const Buffer &context_buffer, const set<PMP
|
||||
auto method = env->get<string>(HttpTransactionData::method_ctx);
|
||||
if (method.ok()) log << LogField("httpMethod", method.unpack());
|
||||
uint max_size = getConfigurationWithDefault<uint>(1536, "IPS", "Max Field Size");
|
||||
auto path = env->get<Buffer>("HTTP_PATH_DECODED");
|
||||
if (path.ok() && trigger.isWebLogFieldActive(url_path)) {
|
||||
log << LogField("httpUriPath", getSubString(path, max_size), LogFieldOption::XORANDB64);
|
||||
|
||||
if (trigger.isWebLogFieldActive(url_path)) {
|
||||
auto path = env->get<Buffer>("HTTP_PATH_DECODED");
|
||||
if (path.ok()) {
|
||||
log << LogField("httpUriPath", getSubString(path, max_size), LogFieldOption::XORANDB64);
|
||||
} else {
|
||||
auto transaction_path = env->get<string>(HttpTransactionData::uri_path_decoded);
|
||||
if (transaction_path.ok()) {
|
||||
auto uri_path = transaction_path.unpack();
|
||||
auto question_mark = uri_path.find('?');
|
||||
log << LogField("httpUriPath", uri_path.substr(0, question_mark), LogFieldOption::XORANDB64);
|
||||
}
|
||||
}
|
||||
}
|
||||
auto query = env->get<Buffer>("HTTP_QUERY_DECODED");
|
||||
if (query.ok() && trigger.isWebLogFieldActive(url_query)) {
|
||||
log << LogField("httpUriQuery", getSubString(query, max_size), LogFieldOption::XORANDB64);
|
||||
if (trigger.isWebLogFieldActive(url_query)) {
|
||||
auto query = env->get<Buffer>("HTTP_QUERY_DECODED");
|
||||
if (query.ok()) {
|
||||
log << LogField("httpUriQuery", getSubString(query, max_size), LogFieldOption::XORANDB64);
|
||||
} else {
|
||||
auto transaction_query = env->get<string>(HttpTransactionData::uri_query_decoded);
|
||||
if (transaction_query.ok()) {
|
||||
log << LogField("httpUriQuery", transaction_query.unpack());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
auto res_code = env->get<Buffer>("HTTP_RESPONSE_CODE");
|
||||
@@ -533,7 +565,9 @@ IPSSignaturesResource::load(cereal::JSONInputArchive &ar)
|
||||
|
||||
all_signatures.reserve(sigs.size());
|
||||
for (auto &sig : sigs) {
|
||||
all_signatures.emplace_back(make_shared<CompleteSignature>(move(sig)));
|
||||
if (sig.isOk()) {
|
||||
all_signatures.emplace_back(make_shared<CompleteSignature>(move(sig)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -104,6 +104,12 @@ public:
|
||||
cereal::JSONInputArchive ar(ss);
|
||||
high_medium_confidance_signatures.load(ar);
|
||||
}
|
||||
{
|
||||
stringstream ss;
|
||||
ss << "[" << signature_performance_high << ", " << signature_broken << "]";
|
||||
cereal::JSONInputArchive ar(ss);
|
||||
single_broken_signature.load(ar);
|
||||
}
|
||||
}
|
||||
|
||||
~SignatureTest()
|
||||
@@ -250,6 +256,7 @@ public:
|
||||
IPSSignaturesResource performance_signatures1;
|
||||
IPSSignaturesResource performance_signatures2;
|
||||
IPSSignaturesResource performance_signatures3;
|
||||
IPSSignaturesResource single_broken_signature;
|
||||
NiceMock<MockTable> table;
|
||||
MockAgg mock_agg;
|
||||
|
||||
@@ -483,6 +490,26 @@ private:
|
||||
"\"context\": [\"HTTP_REQUEST_BODY\", \"HTTP_RESPONSE_BODY\"]"
|
||||
"}"
|
||||
"}";
|
||||
|
||||
string signature_broken =
|
||||
"{"
|
||||
"\"protectionMetadata\": {"
|
||||
"\"protectionName\": \"BrokenTest\","
|
||||
"\"maintrainId\": \"101\","
|
||||
"\"severity\": \"Medium High\","
|
||||
"\"confidenceLevel\": \"Low\","
|
||||
"\"performanceImpact\": \"High\","
|
||||
"\"lastUpdate\": \"20210420\","
|
||||
"\"tags\": [],"
|
||||
"\"cveList\": []"
|
||||
"},"
|
||||
"\"detectionRules\": {"
|
||||
"\"type\": \"simple\","
|
||||
"\"SSM\": \"\","
|
||||
"\"keywosrds\": \"data: \\\"www\\\";\","
|
||||
"\"context\": [\"HTTP_REQUEST_BODY\", \"HTTP_RESPONSE_BODY\"]"
|
||||
"}"
|
||||
"}";
|
||||
};
|
||||
|
||||
TEST_F(SignatureTest, basic_load_of_signatures)
|
||||
@@ -665,3 +692,14 @@ TEST_F(SignatureTest, high_confidance_signatures_matching)
|
||||
expectLog("\"protectionId\": \"Test4\"", "\"matchedSignatureConfidence\": \"Medium\"");
|
||||
EXPECT_FALSE(checkData("mmm"));
|
||||
}
|
||||
|
||||
TEST_F(SignatureTest, broken_signature)
|
||||
{
|
||||
load(single_broken_signature, "Low or above", "Low");
|
||||
EXPECT_FALSE(checkData("ggg"));
|
||||
|
||||
expectLog("\"matchedSignaturePerformance\": \"High\"");
|
||||
EXPECT_TRUE(checkData("fff"));
|
||||
|
||||
EXPECT_FALSE(checkData("www"));
|
||||
}
|
||||
|
||||
@@ -22,4 +22,5 @@ add_library(local_policy_mgmt_gen
|
||||
access_control_practice.cc
|
||||
configmaps.cc
|
||||
reverse_proxy_section.cc
|
||||
policy_activation_data.cc
|
||||
)
|
||||
|
||||
@@ -48,7 +48,7 @@ public:
|
||||
void init();
|
||||
|
||||
std::tuple<std::map<std::string, AppsecLinuxPolicy>, std::map<std::string, V1beta2AppsecLinuxPolicy>>
|
||||
createAppsecPoliciesFromIngresses();
|
||||
createAppsecPolicies();
|
||||
void getClusterId() const;
|
||||
|
||||
private:
|
||||
@@ -101,12 +101,18 @@ private:
|
||||
) const;
|
||||
|
||||
template<class T, class K>
|
||||
void createPolicy(
|
||||
void createPolicyFromIngress(
|
||||
T &appsec_policy,
|
||||
std::map<std::string, T> &policies,
|
||||
std::map<AnnotationKeys, std::string> &annotations_values,
|
||||
const SingleIngressData &item) const;
|
||||
|
||||
template<class T, class K>
|
||||
void createPolicyFromActivation(
|
||||
T &appsec_policy,
|
||||
std::map<std::string, T> &policies,
|
||||
const EnabledPolicy &policy) const;
|
||||
|
||||
std::tuple<Maybe<AppsecLinuxPolicy>, Maybe<V1beta2AppsecLinuxPolicy>> createAppsecPolicyK8s(
|
||||
const std::string &policy_name,
|
||||
const std::string &ingress_mode
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef __POLICY_ACTIVATION_DATA_H__
|
||||
#define __POLICY_ACTIVATION_DATA_H__
|
||||
|
||||
#include <vector>
|
||||
#include <map>
|
||||
|
||||
#include "config.h"
|
||||
#include "debug.h"
|
||||
#include "rest.h"
|
||||
#include "cereal/archives/json.hpp"
|
||||
#include <cereal/types/map.hpp>
|
||||
#include "customized_cereal_map.h"
|
||||
|
||||
#include "local_policy_common.h"
|
||||
|
||||
class PolicyActivationMetadata
|
||||
{
|
||||
public:
|
||||
void load(cereal::JSONInputArchive &archive_in);
|
||||
|
||||
private:
|
||||
std::string name;
|
||||
};
|
||||
|
||||
class EnabledPolicy
|
||||
{
|
||||
public:
|
||||
void load(cereal::JSONInputArchive &archive_in);
|
||||
|
||||
const std::string & getName() const;
|
||||
const std::vector<std::string> & getHosts() const;
|
||||
|
||||
private:
|
||||
std::string name;
|
||||
std::string mode;
|
||||
std::vector<std::string> hosts;
|
||||
};
|
||||
|
||||
class PolicyActivationSpec
|
||||
{
|
||||
public:
|
||||
void load(cereal::JSONInputArchive &archive_in);
|
||||
|
||||
const std::vector<EnabledPolicy> & getPolicies() const;
|
||||
|
||||
private:
|
||||
std::string appsec_class_name;
|
||||
std::vector<EnabledPolicy> policies;
|
||||
};
|
||||
|
||||
class SinglePolicyActivationData
|
||||
{
|
||||
public:
|
||||
void load(cereal::JSONInputArchive &archive_in);
|
||||
|
||||
const PolicyActivationSpec & getSpec() const;
|
||||
|
||||
private:
|
||||
std::string api_version;
|
||||
std::string kind;
|
||||
PolicyActivationMetadata metadata;
|
||||
PolicyActivationSpec spec;
|
||||
};
|
||||
|
||||
class PolicyActivationData : public ClientRest
|
||||
{
|
||||
public:
|
||||
bool loadJson(const std::string &json);
|
||||
|
||||
const std::vector<SinglePolicyActivationData> & getItems() const;
|
||||
|
||||
private:
|
||||
std::string api_version;
|
||||
std::vector<SinglePolicyActivationData> items;
|
||||
};
|
||||
|
||||
#endif // __POLICY_ACTIVATION_DATA_H__
|
||||
@@ -32,6 +32,7 @@
|
||||
#include "i_messaging.h"
|
||||
#include "appsec_practice_section.h"
|
||||
#include "ingress_data.h"
|
||||
#include "policy_activation_data.h"
|
||||
#include "settings_section.h"
|
||||
#include "triggers_section.h"
|
||||
#include "local_policy_common.h"
|
||||
|
||||
@@ -577,7 +577,7 @@ K8sPolicyUtils::createAppsecPolicyK8s(const string &policy_name, const string &i
|
||||
|
||||
template<class T, class K>
|
||||
void
|
||||
K8sPolicyUtils::createPolicy(
|
||||
K8sPolicyUtils::createPolicyFromIngress(
|
||||
T &appsec_policy,
|
||||
map<std::string, T> &policies,
|
||||
map<AnnotationKeys, string> &annotations_values,
|
||||
@@ -615,10 +615,35 @@ K8sPolicyUtils::createPolicy(
|
||||
}
|
||||
}
|
||||
|
||||
std::tuple<map<string, AppsecLinuxPolicy>, map<string, V1beta2AppsecLinuxPolicy>>
|
||||
K8sPolicyUtils::createAppsecPoliciesFromIngresses()
|
||||
template<class T, class K>
|
||||
void
|
||||
K8sPolicyUtils::createPolicyFromActivation(
|
||||
T &appsec_policy,
|
||||
map<std::string, T> &policies,
|
||||
const EnabledPolicy &policy) const
|
||||
{
|
||||
dbgFlow(D_LOCAL_POLICY) << "Getting all policy object from Ingresses";
|
||||
if (policies.find(policy.getName()) == policies.end()) {
|
||||
policies[policy.getName()] = appsec_policy;
|
||||
}
|
||||
auto default_mode = appsec_policy.getAppsecPolicySpec().getDefaultRule().getMode();
|
||||
|
||||
for (const string &host : policy.getHosts()) {
|
||||
if (!appsec_policy.getAppsecPolicySpec().isAssetHostExist(host)) {
|
||||
dbgTrace(D_LOCAL_POLICY)
|
||||
<< "Inserting Host data to the specific asset set:"
|
||||
<< "URL: '"
|
||||
<< host
|
||||
<< "'";
|
||||
K ingress_rule = K(host, default_mode);
|
||||
policies[policy.getName()].addSpecificRule(ingress_rule);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::tuple<map<string, AppsecLinuxPolicy>, map<string, V1beta2AppsecLinuxPolicy>>
|
||||
K8sPolicyUtils::createAppsecPolicies()
|
||||
{
|
||||
dbgFlow(D_LOCAL_POLICY) << "Getting all policy object from Ingresses and PolicyActivation";
|
||||
map<string, AppsecLinuxPolicy> v1bet1_policies;
|
||||
map<string, V1beta2AppsecLinuxPolicy> v1bet2_policies;
|
||||
auto maybe_ingress = getObjectFromCluster<IngressData>("/apis/networking.k8s.io/v1/ingresses");
|
||||
@@ -628,7 +653,7 @@ K8sPolicyUtils::createAppsecPoliciesFromIngresses()
|
||||
dbgWarning(D_LOCAL_POLICY)
|
||||
<< "Failed to retrieve K8S Ingress configurations. Error: "
|
||||
<< maybe_ingress.getErr();
|
||||
return make_tuple(v1bet1_policies, v1bet2_policies);
|
||||
maybe_ingress = IngressData{};
|
||||
}
|
||||
|
||||
|
||||
@@ -658,19 +683,50 @@ K8sPolicyUtils::createAppsecPoliciesFromIngresses()
|
||||
|
||||
if (!std::get<0>(maybe_appsec_policy).ok()) {
|
||||
auto appsec_policy=std::get<1>(maybe_appsec_policy).unpack();
|
||||
createPolicy<V1beta2AppsecLinuxPolicy, NewParsedRule>(
|
||||
createPolicyFromIngress<V1beta2AppsecLinuxPolicy, NewParsedRule>(
|
||||
appsec_policy,
|
||||
v1bet2_policies,
|
||||
annotations_values,
|
||||
item);
|
||||
} else {
|
||||
auto appsec_policy=std::get<0>(maybe_appsec_policy).unpack();
|
||||
createPolicy<AppsecLinuxPolicy, ParsedRule>(
|
||||
createPolicyFromIngress<AppsecLinuxPolicy, ParsedRule>(
|
||||
appsec_policy,
|
||||
v1bet1_policies,
|
||||
annotations_values,
|
||||
item);
|
||||
}
|
||||
}
|
||||
|
||||
auto maybe_policy_activation =
|
||||
getObjectFromCluster<PolicyActivationData>("/apis/openappsec.io/v1beta2/policyactivations");
|
||||
|
||||
if (!maybe_policy_activation.ok()) {
|
||||
dbgWarning(D_LOCAL_POLICY)
|
||||
<< "Failed to retrieve K8S PolicyActivation configurations. Error: "
|
||||
<< maybe_policy_activation.getErr();
|
||||
return make_tuple(v1bet1_policies, v1bet2_policies);
|
||||
}
|
||||
|
||||
PolicyActivationData policy_activation = maybe_policy_activation.unpack();
|
||||
for (const SinglePolicyActivationData &item : policy_activation.getItems()) {
|
||||
for (const auto &policy : item.getSpec().getPolicies()) {
|
||||
auto maybe_appsec_policy = createAppsecPolicyK8s(policy.getName(), "");
|
||||
|
||||
if (!std::get<1>(maybe_appsec_policy).ok()) {
|
||||
dbgWarning(D_LOCAL_POLICY)
|
||||
<< "Failed to create appsec policy. v1beta2 Error: "
|
||||
<< std::get<1>(maybe_appsec_policy).getErr();
|
||||
continue;
|
||||
} else {
|
||||
auto appsec_policy=std::get<1>(maybe_appsec_policy).unpack();
|
||||
createPolicyFromActivation<V1beta2AppsecLinuxPolicy, NewParsedRule>(
|
||||
appsec_policy,
|
||||
v1bet2_policies,
|
||||
policy);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return make_tuple(v1bet1_policies, v1bet2_policies);
|
||||
}
|
||||
|
||||
@@ -36,6 +36,7 @@
|
||||
#include "customized_cereal_map.h"
|
||||
#include "include/appsec_practice_section.h"
|
||||
#include "include/ingress_data.h"
|
||||
#include "include/policy_activation_data.h"
|
||||
#include "include/settings_section.h"
|
||||
#include "include/triggers_section.h"
|
||||
#include "include/local_policy_common.h"
|
||||
@@ -85,7 +86,7 @@ public:
|
||||
K8sPolicyUtils k8s_policy_utils;
|
||||
k8s_policy_utils.init();
|
||||
|
||||
auto appsec_policies = k8s_policy_utils.createAppsecPoliciesFromIngresses();
|
||||
auto appsec_policies = k8s_policy_utils.createAppsecPolicies();
|
||||
if (!std::get<0>(appsec_policies).empty()) {
|
||||
return policy_maker_utils.proccesMultipleAppsecPolicies<AppsecLinuxPolicy, ParsedRule>(
|
||||
std::get<0>(appsec_policies),
|
||||
|
||||
116
components/security_apps/local_policy_mgmt_gen/policy_activation_data.cc
Executable file
116
components/security_apps/local_policy_mgmt_gen/policy_activation_data.cc
Executable file
@@ -0,0 +1,116 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "policy_activation_data.h"
|
||||
#include "customized_cereal_map.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
USE_DEBUG_FLAG(D_LOCAL_POLICY);
|
||||
|
||||
static const set<string> valid_modes = {
|
||||
"prevent-learn",
|
||||
"detect-learn",
|
||||
"prevent",
|
||||
"detect",
|
||||
"inactive"
|
||||
};
|
||||
|
||||
void
|
||||
PolicyActivationMetadata::load(cereal::JSONInputArchive &archive_in)
|
||||
{
|
||||
dbgTrace(D_LOCAL_POLICY) << "PolicyActivationMetadata load";
|
||||
parseAppsecJSONKey<string>("name", name, archive_in);
|
||||
}
|
||||
|
||||
void
|
||||
EnabledPolicy::load(cereal::JSONInputArchive &archive_in)
|
||||
{
|
||||
dbgTrace(D_LOCAL_POLICY) << "Loading policyActivation enabled policy";
|
||||
parseMandatoryAppsecJSONKey<vector<string>>("hosts", hosts, archive_in);
|
||||
parseAppsecJSONKey<string>("name", name, archive_in);
|
||||
parseAppsecJSONKey<string>("mode", mode, archive_in, "detect");
|
||||
if (valid_modes.count(mode) == 0) {
|
||||
dbgWarning(D_LOCAL_POLICY) << "AppSec policy activation mode invalid: " << mode;
|
||||
mode = "detect";
|
||||
}
|
||||
}
|
||||
|
||||
const string &
|
||||
EnabledPolicy::getName() const
|
||||
{
|
||||
return name;
|
||||
}
|
||||
|
||||
const vector<string> &
|
||||
EnabledPolicy::getHosts() const
|
||||
{
|
||||
return hosts;
|
||||
}
|
||||
|
||||
void
|
||||
PolicyActivationSpec::load(cereal::JSONInputArchive &archive_in)
|
||||
{
|
||||
dbgTrace(D_LOCAL_POLICY) << "PolicyActivationSpec load";
|
||||
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
||||
parseMandatoryAppsecJSONKey<vector<EnabledPolicy>>("enabledPolicies", policies, archive_in);
|
||||
}
|
||||
|
||||
const vector<EnabledPolicy> &
|
||||
PolicyActivationSpec::getPolicies() const
|
||||
{
|
||||
return policies;
|
||||
}
|
||||
|
||||
void
|
||||
SinglePolicyActivationData::load(cereal::JSONInputArchive &archive_in)
|
||||
{
|
||||
dbgTrace(D_LOCAL_POLICY) << "Loading single policy activation data";
|
||||
parseAppsecJSONKey<string>("apiVersion", api_version, archive_in);
|
||||
parseAppsecJSONKey<string>("kind", kind, archive_in);
|
||||
parseAppsecJSONKey<PolicyActivationMetadata>("metadata", metadata, archive_in);
|
||||
parseAppsecJSONKey<PolicyActivationSpec>("spec", spec, archive_in);
|
||||
}
|
||||
|
||||
const PolicyActivationSpec &
|
||||
SinglePolicyActivationData::getSpec() const
|
||||
{
|
||||
return spec;
|
||||
}
|
||||
|
||||
bool
|
||||
PolicyActivationData::loadJson(const string &json)
|
||||
{
|
||||
string modified_json = json;
|
||||
modified_json.pop_back();
|
||||
stringstream in;
|
||||
in.str(modified_json);
|
||||
dbgTrace(D_LOCAL_POLICY) << "Loading policy activations data";
|
||||
try {
|
||||
cereal::JSONInputArchive in_ar(in);
|
||||
in_ar(
|
||||
cereal::make_nvp("apiVersion", api_version),
|
||||
cereal::make_nvp("items", items)
|
||||
);
|
||||
} catch (cereal::Exception &e) {
|
||||
dbgError(D_LOCAL_POLICY) << "Failed to load policy activations data JSON. Error: " << e.what();
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
const vector<SinglePolicyActivationData> &
|
||||
PolicyActivationData::getItems() const
|
||||
{
|
||||
return items;
|
||||
}
|
||||
@@ -14,7 +14,6 @@ add_subdirectory(details_resolver)
|
||||
add_subdirectory(health_check)
|
||||
add_subdirectory(health_check_manager)
|
||||
add_subdirectory(updates_process_reporter)
|
||||
add_subdirectory(env_details)
|
||||
add_subdirectory(external_sdk_server)
|
||||
|
||||
#add_subdirectory(orchestration_ut)
|
||||
|
||||
@@ -350,7 +350,7 @@ DetailsResolver::Impl::readCloudMetadata()
|
||||
}
|
||||
|
||||
if (!cloud_metadata.ok()) {
|
||||
dbgWarning(D_ORCHESTRATOR) << cloud_metadata.getErr();
|
||||
dbgDebug(D_ORCHESTRATOR) << cloud_metadata.getErr();
|
||||
return genError("Failed to fetch cloud metadata");
|
||||
}
|
||||
|
||||
|
||||
@@ -18,6 +18,8 @@
|
||||
#include <regex>
|
||||
#include <boost/regex.hpp>
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <cereal/external/rapidjson/document.h>
|
||||
#include <cereal/external/rapidjson/filereadstream.h>
|
||||
|
||||
#if defined(gaia)
|
||||
|
||||
@@ -100,6 +102,14 @@ checkIsInstallHorizonTelemetrySucceeded(const string &command_output)
|
||||
return command_output;
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
getOtlpAgentGaiaOsRole(const string &command_output)
|
||||
{
|
||||
if (command_output == "" ) return string("-1");
|
||||
|
||||
return command_output;
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
getQUID(const string &command_output)
|
||||
{
|
||||
@@ -111,6 +121,13 @@ getQUID(const string &command_output)
|
||||
return command_output;
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
getIsAiopsRunning(const string &command_output)
|
||||
{
|
||||
if (command_output == "" ) return string("false");
|
||||
|
||||
return command_output;
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
checkHasSDWan(const string &command_output)
|
||||
@@ -186,6 +203,24 @@ getMgmtObjAttr(shared_ptr<istream> file_stream, const string &attr)
|
||||
return genError("Object attribute was not found. Attr: " + attr);
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
getAttrFromCpsdwanGetDataJson(const string &attr)
|
||||
{
|
||||
static const std::string get_data_json_path = "/tmp/cpsdwan_getdata_orch.json";
|
||||
std::ifstream ifs(get_data_json_path);
|
||||
if (ifs.is_open()) {
|
||||
rapidjson::IStreamWrapper isw(ifs);
|
||||
rapidjson::Document document;
|
||||
document.ParseStream(isw);
|
||||
|
||||
if (!document.HasParseError() && document.HasMember(attr.c_str()) && document[attr.c_str()].IsString()) {
|
||||
return string(document[attr.c_str()].GetString());
|
||||
}
|
||||
}
|
||||
|
||||
return genError("Attribute " + attr + " was not found in " + get_data_json_path);
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
getMgmtObjUid(const string &command_output)
|
||||
{
|
||||
@@ -193,6 +228,11 @@ getMgmtObjUid(const string &command_output)
|
||||
return command_output;
|
||||
}
|
||||
|
||||
Maybe<string> obj_uuid = getAttrFromCpsdwanGetDataJson("uuid");
|
||||
if (obj_uuid.ok()) {
|
||||
return obj_uuid.unpack();
|
||||
}
|
||||
|
||||
static const string obj_path = (getenv("FWDIR") ? string(getenv("FWDIR")) : "") + "/database/myown.C";
|
||||
auto file_stream = std::make_shared<std::ifstream>(obj_path);
|
||||
if (!file_stream->is_open()) {
|
||||
@@ -310,7 +350,12 @@ getSmbObjectName(const string &command_output)
|
||||
if (command_output.empty() || command_output[0] != centrally_managed_comd_output) {
|
||||
return genError("Object name was not found");
|
||||
}
|
||||
|
||||
|
||||
Maybe<string> obj_name = getAttrFromCpsdwanGetDataJson("name");
|
||||
if (obj_name.ok()) {
|
||||
return obj_name.unpack();
|
||||
}
|
||||
|
||||
static const string obj_path = (getenv("FWDIR") ? string(getenv("FWDIR")) : "") + "/database/myown.C";
|
||||
auto ifs = std::make_shared<std::ifstream>(obj_path);
|
||||
if (!ifs->is_open()) {
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
// shell command execution output as its input
|
||||
|
||||
#ifdef SHELL_PRE_CMD
|
||||
#if defined(gaia) || defined(smb)
|
||||
#if defined(gaia) || defined(smb) || defined(smb_thx_v3) || defined(smb_sve_v2) || defined(smb_mrv_v1)
|
||||
SHELL_PRE_CMD("read sdwan data",
|
||||
"(cpsdwan get_data > /tmp/cpsdwan_getdata_orch.json~) "
|
||||
"&& (mv /tmp/cpsdwan_getdata_orch.json~ /tmp/cpsdwan_getdata_orch.json)")
|
||||
@@ -40,7 +40,7 @@ SHELL_PRE_CMD("gunzip local.cfg", "gunzip -c $FWDIR/state/local/FW1/local.cfg.gz
|
||||
#endif
|
||||
|
||||
#ifdef SHELL_CMD_HANDLER
|
||||
#if defined(gaia) || defined(smb)
|
||||
#if defined(gaia) || defined(smb) || defined(smb_thx_v3) || defined(smb_sve_v2) || defined(smb_mrv_v1)
|
||||
SHELL_CMD_HANDLER("cpProductIntegrationMgmtObjectType", "cpprod_util CPPROD_IsMgmtMachine", getMgmtObjType)
|
||||
SHELL_CMD_HANDLER(
|
||||
"cpProductIntegrationMgmtObjectUid",
|
||||
@@ -51,6 +51,14 @@ SHELL_CMD_HANDLER("prerequisitesForHorizonTelemetry",
|
||||
"FS_PATH=<FILESYSTEM-PREFIX>; [ -f ${FS_PATH}/cp-nano-horizon-telemetry-prerequisites.log ] "
|
||||
"&& head -1 ${FS_PATH}/cp-nano-horizon-telemetry-prerequisites.log || echo ''",
|
||||
checkIsInstallHorizonTelemetrySucceeded)
|
||||
SHELL_CMD_HANDLER(
|
||||
"IS_AIOPS_RUNNING",
|
||||
"FS_PATH=<FILESYSTEM-PREFIX>; "
|
||||
"PID=$(ps auxf | grep -v grep | grep -E ${FS_PATH}.*cp-nano-horizon-telemetry | awk -F' ' '{printf $2}'); "
|
||||
"[ -z \"${PID}\" ] && echo 'false' || echo 'true'",
|
||||
getIsAiopsRunning)
|
||||
#endif
|
||||
#if defined(gaia)
|
||||
SHELL_CMD_HANDLER("GLOBAL_QUID", "[ -d /opt/CPquid ] "
|
||||
"&& python3 /opt/CPquid/Quid_Api.py -i /opt/CPotelcol/quid_api/get_global_id.json | jq -r .message || echo ''",
|
||||
getQUID)
|
||||
@@ -73,6 +81,24 @@ SHELL_CMD_HANDLER("MGMT_QUID", "[ -d /opt/CPquid ] "
|
||||
"&& python3 /opt/CPquid/Quid_Api.py -i "
|
||||
"/opt/CPotelcol/quid_api/get_mgmt_quid.json | jq -r .message[0].MGMT_QUID || echo ''",
|
||||
getQUID)
|
||||
SHELL_CMD_HANDLER("AIOPS_AGENT_ROLE", "[ -d /opt/CPOtlpAgent/custom_scripts ] "
|
||||
"&& ENV_NO_FORMAT=1 /opt/CPOtlpAgent/custom_scripts/agent_role.sh",
|
||||
getOtlpAgentGaiaOsRole)
|
||||
#endif
|
||||
#if defined(smb) || defined(smb_thx_v3) || defined(smb_sve_v2) || defined(smb_mrv_v1)
|
||||
SHELL_CMD_HANDLER("GLOBAL_QUID",
|
||||
"cat $FWDIR/database/myown.C "
|
||||
"| awk -F'[()]' '/:name/ { found=1; next } found && /:uuid/ { uid=tolower($2); print uid; exit }'",
|
||||
getQUID)
|
||||
SHELL_CMD_HANDLER("QUID",
|
||||
"cat $FWDIR/database/myown.C "
|
||||
"| awk -F'[()]' '/:name/ { found=1; next } found && /:uuid/ { uid=tolower($2); print uid; exit }'",
|
||||
getQUID)
|
||||
SHELL_CMD_HANDLER("SMO_QUID", "echo ''", getQUID)
|
||||
SHELL_CMD_HANDLER("MGMT_QUID", "echo ''", getQUID)
|
||||
SHELL_CMD_HANDLER("AIOPS_AGENT_ROLE", "echo 'SMB'", getOtlpAgentGaiaOsRole)
|
||||
#endif
|
||||
#if defined(gaia) || defined(smb) || defined(smb_thx_v3) || defined(smb_sve_v2) || defined(smb_mrv_v1)
|
||||
SHELL_CMD_HANDLER("hasSDWan", "[ -f $FWDIR/bin/sdwan_steering ] && echo '1' || echo '0'", checkHasSDWan)
|
||||
SHELL_CMD_HANDLER(
|
||||
"canUpdateSDWanData",
|
||||
@@ -180,13 +206,12 @@ SHELL_CMD_HANDLER(
|
||||
)
|
||||
SHELL_CMD_HANDLER(
|
||||
"managements",
|
||||
"sed -n '/:masters (/,$p' $FWDIR/database/myself_objects.C |"
|
||||
" sed -e ':a' -e 'N' -e '$!ba' -e 's/\\n//g' -e 's/\t//g' -e 's/ //g' | sed 's/))):.*/)))):/'",
|
||||
"echo 1",
|
||||
extractManagements
|
||||
)
|
||||
#endif //gaia
|
||||
|
||||
#if defined(smb)
|
||||
#if defined(smb) || defined(smb_thx_v3) || defined(smb_sve_v2) || defined(smb_mrv_v1)
|
||||
SHELL_CMD_HANDLER(
|
||||
"cpProductIntegrationMgmtParentObjectName",
|
||||
"jq -r .cluster_name /tmp/cpsdwan_getdata_orch.json",
|
||||
@@ -237,15 +262,13 @@ SHELL_CMD_HANDLER(
|
||||
|
||||
SHELL_CMD_HANDLER(
|
||||
"managements",
|
||||
"sed -n '/:masters (/,$p' /tmp/local.cfg |"
|
||||
" sed -e ':a' -e 'N' -e '$!ba' -e 's/\\n//g' -e 's/\t//g' -e 's/ //g' | sed 's/))):.*/)))):/'",
|
||||
"echo 1",
|
||||
extractManagements
|
||||
)
|
||||
#endif//smb
|
||||
|
||||
SHELL_CMD_OUTPUT("kernel_version", "uname -r")
|
||||
SHELL_CMD_OUTPUT("helloWorld", "cat /tmp/agentHelloWorld 2>/dev/null")
|
||||
SHELL_CMD_OUTPUT("report_timestamp", "date -u +\%s")
|
||||
#endif // SHELL_CMD_OUTPUT
|
||||
|
||||
|
||||
@@ -275,7 +298,7 @@ FILE_CONTENT_HANDLER("AppSecModelVersion", "<FILESYSTEM-PREFIX>/conf/waap/waap.d
|
||||
#endif // FILE_CONTENT_HANDLER
|
||||
|
||||
#ifdef SHELL_POST_CMD
|
||||
#if defined(smb)
|
||||
#if defined(smb) || defined(smb_thx_v3) || defined(smb_sve_v2) || defined(smb_mrv_v1)
|
||||
SHELL_POST_CMD("remove local.cfg", "rm -rf /tmp/local.cfg")
|
||||
#endif //smb
|
||||
#endif
|
||||
|
||||
@@ -266,10 +266,10 @@ private:
|
||||
case OrchestrationStatusFieldType::COUNT : return "Count";
|
||||
}
|
||||
|
||||
dbgAssert(false)
|
||||
dbgAssertOpt(false)
|
||||
<< AlertInfo(AlertTeam::CORE, "orchestration health")
|
||||
<< "Trying to convert unknown orchestration status field to string.";
|
||||
return "";
|
||||
return "Unknown Field";
|
||||
}
|
||||
|
||||
HealthCheckStatus
|
||||
@@ -282,7 +282,7 @@ private:
|
||||
case UpdatesProcessResult::DEGRADED : return HealthCheckStatus::DEGRADED;
|
||||
}
|
||||
|
||||
dbgAssert(false)
|
||||
dbgAssertOpt(false)
|
||||
<< AlertInfo(AlertTeam::CORE, "orchestration health")
|
||||
<< "Trying to convert unknown update process result field to health check status.";
|
||||
return HealthCheckStatus::IGNORED;
|
||||
|
||||
@@ -34,7 +34,9 @@ HybridModeMetric::upon(const HybridModeMetricEvent &)
|
||||
{
|
||||
auto shell_cmd = Singleton::Consume<I_ShellCmd>::by<OrchestrationComp>();
|
||||
auto maybe_cmd_output = shell_cmd->getExecOutput(
|
||||
getFilesystemPathConfig() + "/watchdog/cp-nano-watchdog --restart_count"
|
||||
getFilesystemPathConfig() + "/watchdog/cp-nano-watchdog --restart_count",
|
||||
1000,
|
||||
false
|
||||
);
|
||||
|
||||
// get wd process restart count
|
||||
|
||||
@@ -79,8 +79,8 @@ public:
|
||||
) override;
|
||||
std::string getUpdate(CheckUpdateRequest &request) override;
|
||||
bool shouldApplyPolicy() override;
|
||||
void turnOffApplyPolicyFlag() override;
|
||||
void turnOnApplyPolicyFlag() override;
|
||||
void turnOffApplyLocalPolicyFlag() override;
|
||||
void turnOnApplyLocalPolicyFlag() override;
|
||||
|
||||
std::string getCurrPolicy() override { return curr_policy; }
|
||||
|
||||
@@ -94,7 +94,7 @@ private:
|
||||
std::string curr_version;
|
||||
std::string curr_policy;
|
||||
std::string curr_checksum;
|
||||
bool should_apply_policy;
|
||||
bool should_apply_local_policy;
|
||||
};
|
||||
|
||||
#endif // __DECLARATIVE_POLICY_UTILS_H__
|
||||
|
||||
@@ -22,8 +22,8 @@ public:
|
||||
|
||||
virtual std::string getCurrPolicy() = 0;
|
||||
|
||||
virtual void turnOffApplyPolicyFlag() = 0;
|
||||
virtual void turnOnApplyPolicyFlag() = 0;
|
||||
virtual void turnOffApplyLocalPolicyFlag() = 0;
|
||||
virtual void turnOnApplyLocalPolicyFlag() = 0;
|
||||
|
||||
protected:
|
||||
virtual ~I_DeclarativePolicy() {}
|
||||
|
||||
@@ -429,7 +429,7 @@ public:
|
||||
status.insertServiceSetting(service_name, path);
|
||||
return;
|
||||
case OrchestrationStatusConfigType::MANIFEST:
|
||||
dbgAssert(false)
|
||||
dbgAssertOpt(false)
|
||||
<< AlertInfo(AlertTeam::CORE, "sesrvice configuration")
|
||||
<< "Manifest is not a service configuration file type";
|
||||
break;
|
||||
@@ -438,7 +438,9 @@ public:
|
||||
case OrchestrationStatusConfigType::COUNT:
|
||||
break;
|
||||
}
|
||||
dbgAssert(false) << AlertInfo(AlertTeam::CORE, "sesrvice configuration") << "Unknown configuration file type";
|
||||
dbgAssertOpt(false)
|
||||
<< AlertInfo(AlertTeam::CORE, "service configuration")
|
||||
<< "Unknown configuration file type";
|
||||
}
|
||||
|
||||
void
|
||||
|
||||
@@ -1587,6 +1587,7 @@ private:
|
||||
}
|
||||
|
||||
setDelayedUpgradeTime();
|
||||
|
||||
while (true) {
|
||||
Singleton::Consume<I_Environment>::by<OrchestrationComp>()->startNewTrace(false);
|
||||
if (shouldReportAgentDetailsMetadata()) {
|
||||
@@ -1630,7 +1631,7 @@ private:
|
||||
|
||||
string server_name = getAttribute("registered-server", "registered_server");
|
||||
auto server = TagAndEnumManagement::convertStringToTag(server_name);
|
||||
if (server_name == "'SWAG'") server = Tags::WEB_SERVER_SWAG;
|
||||
if (server_name == "'SWAG'" || server_name == "'SWAG Server'") server = Tags::WEB_SERVER_SWAG;
|
||||
if (server.ok()) tags.insert(*server);
|
||||
|
||||
if (getAttribute("no-setting", "CROWDSEC_ENABLED") == "true") tags.insert(Tags::CROWDSEC);
|
||||
@@ -1695,13 +1696,19 @@ private:
|
||||
auto backup_installation_file = current_installation_file + backup_ext;
|
||||
auto temp_ext = getConfigurationWithDefault<string>("_temp", "orchestration", "Temp file extension");
|
||||
|
||||
dbgAssert(i_orchestration_tools->doesFileExist(backup_installation_file))
|
||||
<< AlertInfo(AlertTeam::CORE, "orchestration backup")
|
||||
<< "There is no backup installation package";
|
||||
if (!i_orchestration_tools->doesFileExist(backup_installation_file)) {
|
||||
dbgAssertOpt(false)
|
||||
<< AlertInfo(AlertTeam::CORE, "orchestration backup")
|
||||
<< "There is no backup installation package";
|
||||
return;
|
||||
}
|
||||
|
||||
dbgAssert(i_orchestration_tools->copyFile(backup_installation_file, current_installation_file))
|
||||
<< AlertInfo(AlertTeam::CORE, "orchestration backup")
|
||||
<< "Failed to copy backup installation package";
|
||||
if (!i_orchestration_tools->copyFile(backup_installation_file, current_installation_file)) {
|
||||
dbgAssertOpt(false)
|
||||
<< AlertInfo(AlertTeam::CORE, "orchestration backup")
|
||||
<< "Failed to copy backup installation package";
|
||||
return;
|
||||
}
|
||||
|
||||
// Copy the backup manifest file to the default manifest file path.
|
||||
auto manifest_file_path = getConfigurationWithDefault<string>(
|
||||
@@ -1716,12 +1723,18 @@ private:
|
||||
|
||||
auto package_handler = Singleton::Consume<I_PackageHandler>::by<OrchestrationComp>();
|
||||
// Install the backup orchestration service installation package.
|
||||
dbgAssert(package_handler->preInstallPackage(service_name, current_installation_file))
|
||||
<< AlertInfo(AlertTeam::CORE, "orchestration backup")
|
||||
<< "Failed to restore from backup, pre install test failed";
|
||||
dbgAssert(package_handler->installPackage(service_name, current_installation_file, true))
|
||||
<< AlertInfo(AlertTeam::CORE, "orchestration backup")
|
||||
<< "Failed to restore from backup, installation failed";
|
||||
if (!package_handler->preInstallPackage(service_name, current_installation_file)) {
|
||||
dbgAssertOpt(false)
|
||||
<< AlertInfo(AlertTeam::CORE, "orchestration backup")
|
||||
<< "Failed to restore from backup, pre install test failed";
|
||||
return;
|
||||
}
|
||||
if (!package_handler->installPackage(service_name, current_installation_file, true)) {
|
||||
dbgAssertOpt(false)
|
||||
<< AlertInfo(AlertTeam::CORE, "orchestration backup")
|
||||
<< "Failed to restore from backup, installation failed";
|
||||
return;
|
||||
}
|
||||
}
|
||||
// LCOV_EXCL_STOP
|
||||
|
||||
@@ -2033,7 +2046,7 @@ private:
|
||||
}
|
||||
auto policy_mgmt_mode = getSettingWithDefault<string>("management", "profileManagedMode");
|
||||
if (getOrchestrationMode() == OrchestrationMode::HYBRID || policy_mgmt_mode == "declarative") {
|
||||
Singleton::Consume<I_DeclarativePolicy>::from<DeclarativePolicyUtils>()->turnOnApplyPolicyFlag();
|
||||
Singleton::Consume<I_DeclarativePolicy>::from<DeclarativePolicyUtils>()->turnOnApplyLocalPolicyFlag();
|
||||
}
|
||||
|
||||
auto policy_version = i_service_controller->getPolicyVersion();
|
||||
@@ -2055,7 +2068,6 @@ private:
|
||||
OrchestrationPolicy policy;
|
||||
UpdatesProcessReporter updates_process_reporter_listener;
|
||||
HybridModeMetric hybrid_mode_metric;
|
||||
EnvDetails env_details;
|
||||
chrono::minutes upgrade_delay_time;
|
||||
|
||||
string filesystem_prefix = "";
|
||||
|
||||
@@ -386,7 +386,7 @@ OrchestrationTools::Impl::calculateChecksum(Package::ChecksumTypes checksum_type
|
||||
return genError("Error while reading file " + path + ", " + e.what());
|
||||
}
|
||||
|
||||
dbgAssert(false)
|
||||
dbgAssertOpt(false)
|
||||
<< AlertInfo(AlertTeam::CORE, "service configuration")
|
||||
<< "Checksum type is not supported. Checksum type: "
|
||||
<< static_cast<unsigned int>(checksum_type);
|
||||
|
||||
@@ -28,6 +28,7 @@ std::ostream & operator<<(std::ostream &os, const Package &) { return os; }
|
||||
#include "health_check_status/health_check_status.h"
|
||||
#include "updates_process_event.h"
|
||||
#include "declarative_policy_utils.h"
|
||||
#include "mock/mock_env_details.h"
|
||||
|
||||
using namespace testing;
|
||||
using namespace std;
|
||||
@@ -324,6 +325,7 @@ public:
|
||||
StrictMock<MockOrchestrationTools> mock_orchestration_tools;
|
||||
StrictMock<MockDownloader> mock_downloader;
|
||||
StrictMock<MockShellCmd> mock_shell_cmd;
|
||||
StrictMock<EnvDetailsMocker> mock_env_details;
|
||||
StrictMock<MockMessaging> mock_message;
|
||||
StrictMock<MockRestApi> rest;
|
||||
StrictMock<MockServiceController> mock_service_controller;
|
||||
@@ -583,6 +585,8 @@ TEST_F(OrchestrationTest, check_sending_registration_data)
|
||||
env.init();
|
||||
init();
|
||||
|
||||
EXPECT_CALL(mock_env_details, getEnvType()).WillRepeatedly(Return(EnvType::LINUX));
|
||||
|
||||
EXPECT_CALL(mock_service_controller, updateServiceConfiguration(_, _, _, _, _, _))
|
||||
.WillOnce(Return(Maybe<void>()));
|
||||
EXPECT_CALL(mock_orchestration_tools, calculateChecksum(_, _)).WillRepeatedly(Return(string()));
|
||||
|
||||
@@ -141,11 +141,11 @@ packageHandlerActionsToString(PackageHandlerActions action)
|
||||
}
|
||||
}
|
||||
|
||||
dbgAssert(false)
|
||||
dbgAssertOpt(false)
|
||||
<< AlertInfo(AlertTeam::CORE, "service configuration")
|
||||
<< "Package handler action is not supported. Action: "
|
||||
<< static_cast<unsigned int>(action);
|
||||
return string();
|
||||
return string("--UNSUPPORTED");
|
||||
}
|
||||
|
||||
void
|
||||
|
||||
@@ -793,7 +793,7 @@ ServiceController::Impl::updateServiceConfiguration(
|
||||
<< "Policy file was not updated. Sending reload command regarding settings and data";
|
||||
auto signal_services = sendSignalForServices(nano_services_to_update, "");
|
||||
if (!signal_services.ok()) return signal_services.passErr();
|
||||
Singleton::Consume<I_DeclarativePolicy>::from<DeclarativePolicyUtils>()->turnOffApplyPolicyFlag();
|
||||
Singleton::Consume<I_DeclarativePolicy>::from<DeclarativePolicyUtils>()->turnOffApplyLocalPolicyFlag();
|
||||
return Maybe<void>();
|
||||
}
|
||||
|
||||
@@ -940,7 +940,7 @@ ServiceController::Impl::updateServiceConfiguration(
|
||||
if (new_policy_path.compare(config_file_path) == 0) {
|
||||
dbgDebug(D_SERVICE_CONTROLLER) << "Enforcing the default policy file";
|
||||
policy_version = version_value;
|
||||
Singleton::Consume<I_DeclarativePolicy>::from<DeclarativePolicyUtils>()->turnOffApplyPolicyFlag();
|
||||
Singleton::Consume<I_DeclarativePolicy>::from<DeclarativePolicyUtils>()->turnOffApplyLocalPolicyFlag();
|
||||
return Maybe<void>();
|
||||
}
|
||||
|
||||
@@ -959,7 +959,7 @@ ServiceController::Impl::updateServiceConfiguration(
|
||||
}
|
||||
|
||||
if (!was_policy_updated && !send_signal_for_services_err.empty()) return genError(send_signal_for_services_err);
|
||||
Singleton::Consume<I_DeclarativePolicy>::from<DeclarativePolicyUtils>()->turnOffApplyPolicyFlag();
|
||||
Singleton::Consume<I_DeclarativePolicy>::from<DeclarativePolicyUtils>()->turnOffApplyLocalPolicyFlag();
|
||||
return Maybe<void>();
|
||||
}
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ void
|
||||
DeclarativePolicyUtils::init()
|
||||
{
|
||||
local_policy_path = getFilesystemPathConfig() + "/conf/local_policy.yaml";
|
||||
should_apply_policy = true;
|
||||
should_apply_local_policy = true;
|
||||
Singleton::Consume<I_RestApi>::by<DeclarativePolicyUtils>()->addRestCall<ApplyPolicyRest>(
|
||||
RestAction::SET, "apply-policy"
|
||||
);
|
||||
@@ -40,7 +40,7 @@ DeclarativePolicyUtils::upon(const ApplyPolicyEvent &event)
|
||||
{
|
||||
dbgTrace(D_ORCHESTRATOR) << "Apply policy event";
|
||||
local_policy_path = event.getPolicyPath();
|
||||
should_apply_policy = true;
|
||||
should_apply_local_policy = true;
|
||||
}
|
||||
// LCOV_EXCL_STOP
|
||||
|
||||
@@ -48,19 +48,24 @@ bool
|
||||
DeclarativePolicyUtils::shouldApplyPolicy()
|
||||
{
|
||||
auto env_type = Singleton::Consume<I_EnvDetails>::by<DeclarativePolicyUtils>()->getEnvType();
|
||||
return env_type == EnvType::K8S ? true : should_apply_policy;
|
||||
if (env_type == EnvType::K8S) {
|
||||
I_OrchestrationTools *orch_tools = Singleton::Consume<I_OrchestrationTools>::by<DeclarativePolicyUtils>();
|
||||
auto maybe_new_version = orch_tools->readFile("/etc/cp/conf/k8s-policy-check.trigger");
|
||||
return maybe_new_version != curr_version;
|
||||
}
|
||||
return should_apply_local_policy;
|
||||
}
|
||||
|
||||
void
|
||||
DeclarativePolicyUtils::turnOffApplyPolicyFlag()
|
||||
DeclarativePolicyUtils::turnOffApplyLocalPolicyFlag()
|
||||
{
|
||||
should_apply_policy = false;
|
||||
should_apply_local_policy = false;
|
||||
}
|
||||
|
||||
void
|
||||
DeclarativePolicyUtils::turnOnApplyPolicyFlag()
|
||||
DeclarativePolicyUtils::turnOnApplyLocalPolicyFlag()
|
||||
{
|
||||
should_apply_policy = true;
|
||||
should_apply_local_policy = true;
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
@@ -211,6 +216,6 @@ DeclarativePolicyUtils::periodicPolicyLoad()
|
||||
|
||||
if (*new_checksum == curr_checksum) return;
|
||||
|
||||
should_apply_policy = true;
|
||||
should_apply_local_policy = true;
|
||||
curr_checksum = *new_checksum;
|
||||
}
|
||||
|
||||
@@ -467,9 +467,9 @@ getDeplymentType()
|
||||
case EnvType::COUNT: break;
|
||||
}
|
||||
|
||||
dbgAssert(false)
|
||||
dbgAssertOpt(false)
|
||||
<< AlertInfo(AlertTeam::CORE, "fog communication")
|
||||
<< "Failed to get a legitimate deplyment type: "
|
||||
<< "Failed to get a legitimate deployment type: "
|
||||
<< static_cast<uint>(deplyment_type);
|
||||
return "Embedded";
|
||||
}
|
||||
|
||||
@@ -74,7 +74,7 @@ FogCommunication::getUpdate(CheckUpdateRequest &request)
|
||||
<< " to: "
|
||||
<< policy_mgmt_mode;
|
||||
profile_mode = policy_mgmt_mode;
|
||||
i_declarative_policy->turnOnApplyPolicyFlag();
|
||||
i_declarative_policy->turnOnApplyLocalPolicyFlag();
|
||||
}
|
||||
|
||||
if (i_declarative_policy->shouldApplyPolicy()) {
|
||||
|
||||
@@ -246,6 +246,27 @@ public:
|
||||
return matched_rule;
|
||||
}
|
||||
|
||||
void
|
||||
fetchReplicaCount()
|
||||
{
|
||||
string curl_cmd =
|
||||
"curl -H \"Authorization: Bearer " + kubernetes_token + "\" "
|
||||
"https://kubernetes.default.svc.cluster.local/apis/apps/v1/namespaces/" + kubernetes_namespace +
|
||||
"/deployments/${AGENT_DEPLOYMENT_NAME} -k -s | jq .status.replicas";
|
||||
auto maybe_replicas = i_shell_cmd->getExecOutput(curl_cmd);
|
||||
if (maybe_replicas.ok()) {
|
||||
try {
|
||||
replicas = std::stoi(maybe_replicas.unpack());
|
||||
} catch (const std::exception &e) {
|
||||
dbgWarning(D_RATE_LIMIT) << "error while converting replicas: " << e.what();
|
||||
}
|
||||
}
|
||||
if (replicas == 0) {
|
||||
dbgWarning(D_RATE_LIMIT) << "replicas is set to 0, setting replicas to 1";
|
||||
replicas = 1;
|
||||
}
|
||||
}
|
||||
|
||||
EventVerdict
|
||||
respond(const HttpRequestHeaderEvent &event) override
|
||||
{
|
||||
@@ -271,10 +292,72 @@ public:
|
||||
dbgDebug(D_RATE_LIMIT) << "source identifier value: " << source_identifier;
|
||||
|
||||
auto maybe_source_ip = env->get<IPAddr>(HttpTransactionData::client_ip_ctx);
|
||||
set<string> ip_set;
|
||||
string source_ip = "";
|
||||
if (maybe_source_ip.ok()) source_ip = ipAddrToStr(maybe_source_ip.unpack());
|
||||
if (maybe_source_ip.ok()) {
|
||||
source_ip = ipAddrToStr(maybe_source_ip.unpack());
|
||||
|
||||
unordered_map<string, set<string>> condition_map = createConditionMap(uri, source_ip, source_identifier);
|
||||
if (getProfileAgentSettingWithDefault<bool>(false, "agent.rateLimit.ignoreSourceIP")) {
|
||||
dbgDebug(D_RATE_LIMIT) << "Rate limit ignoring source ip: " << source_ip;
|
||||
} else {
|
||||
ip_set.insert(source_ip);
|
||||
}
|
||||
}
|
||||
|
||||
auto maybe_xff = env->get<string>(HttpTransactionData::xff_vals_ctx);
|
||||
if (!maybe_xff.ok()) {
|
||||
dbgTrace(D_RATE_LIMIT) << "Rate limit failed to get xff vals from env";
|
||||
} else {
|
||||
auto ips = split(maybe_xff.unpack(), ',');
|
||||
ip_set.insert(ips.begin(), ips.end());
|
||||
}
|
||||
|
||||
EnumArray<I_GeoLocation::GeoLocationField, string> geo_location_data;
|
||||
set<string> country_codes;
|
||||
set<string> country_names;
|
||||
for (const string& source : ip_set) {
|
||||
Maybe<IPAddr> maybe_source_ip = IPAddr::createIPAddr(source);
|
||||
if (!maybe_source_ip.ok()){
|
||||
dbgWarning(D_RATE_LIMIT)
|
||||
<< "Rate limit failed to create ip address from source: "
|
||||
<< source
|
||||
<< ", Error: "
|
||||
<< maybe_source_ip.getErr();
|
||||
continue;
|
||||
}
|
||||
auto asset_location =
|
||||
Singleton::Consume<I_GeoLocation>::by<RateLimit>()->lookupLocation(maybe_source_ip.unpack());
|
||||
if (!asset_location.ok()) {
|
||||
dbgWarning(D_RATE_LIMIT)
|
||||
<< "Rate limit lookup location failed for source: "
|
||||
<< source_ip
|
||||
<< ", Error: "
|
||||
<< asset_location.getErr();
|
||||
continue;
|
||||
}
|
||||
geo_location_data = asset_location.unpack();
|
||||
auto code = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE];
|
||||
auto name = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_NAME];
|
||||
country_codes.insert(code);
|
||||
country_names.insert(name);
|
||||
dbgTrace(D_RATE_LIMIT)
|
||||
<< "Rate limit found "
|
||||
<< "country code: "
|
||||
<< code
|
||||
<< ", country name: "
|
||||
<< name
|
||||
<< ", source ip address: "
|
||||
<< source;
|
||||
}
|
||||
|
||||
|
||||
unordered_map<string, set<string>> condition_map = createConditionMap(
|
||||
uri,
|
||||
source_ip,
|
||||
source_identifier,
|
||||
country_codes,
|
||||
country_names
|
||||
);
|
||||
if (shouldApplyException(condition_map)) {
|
||||
dbgDebug(D_RATE_LIMIT) << "found accept exception, not enforcing rate limit on this URI: " << uri;
|
||||
return ACCEPT;
|
||||
@@ -293,8 +376,8 @@ public:
|
||||
return ACCEPT;
|
||||
}
|
||||
|
||||
burst = rule.getRateLimit();
|
||||
limit = calcRuleLimit(rule);
|
||||
burst = static_cast<float>(rule.getRateLimit()) / replicas;
|
||||
limit = static_cast<float>(calcRuleLimit(rule)) / replicas;
|
||||
|
||||
dbgTrace(D_RATE_LIMIT)
|
||||
<< "found rate limit rule with: "
|
||||
@@ -471,10 +554,18 @@ public:
|
||||
}
|
||||
|
||||
unordered_map<string, set<string>>
|
||||
createConditionMap(const string &uri, const string &source_ip, const string &source_identifier)
|
||||
createConditionMap(
|
||||
const string &uri,
|
||||
const string &source_ip,
|
||||
const string &source_identifier,
|
||||
const set<string> &country_codes,
|
||||
const set<string> &country_names
|
||||
)
|
||||
{
|
||||
unordered_map<string, set<string>> condition_map;
|
||||
if (!source_ip.empty()) condition_map["sourceIP"].insert(source_ip);
|
||||
if (!country_codes.empty()) condition_map["countryCode"].insert(country_codes.begin(), country_codes.end());
|
||||
if (!country_names.empty()) condition_map["countryName"].insert(country_names.begin(), country_names.end());
|
||||
condition_map["sourceIdentifier"].insert(source_identifier);
|
||||
condition_map["url"].insert(uri);
|
||||
|
||||
@@ -611,6 +702,21 @@ public:
|
||||
"Initialize rate limit component",
|
||||
false
|
||||
);
|
||||
|
||||
i_shell_cmd = Singleton::Consume<I_ShellCmd>::by<RateLimit>();
|
||||
i_env_details = Singleton::Consume<I_EnvDetails>::by<RateLimit>();
|
||||
env_type = i_env_details->getEnvType();
|
||||
if (env_type == EnvType::K8S) {
|
||||
kubernetes_token = i_env_details->getToken();
|
||||
kubernetes_namespace = i_env_details->getNameSpace();
|
||||
fetchReplicaCount();
|
||||
Singleton::Consume<I_MainLoop>::by<RateLimit>()->addRecurringRoutine(
|
||||
I_MainLoop::RoutineType::Offline,
|
||||
chrono::seconds(120),
|
||||
[this]() { fetchReplicaCount(); },
|
||||
"Fetch current replica count from the Kubernetes cluster"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
@@ -619,6 +725,9 @@ public:
|
||||
disconnectRedis();
|
||||
}
|
||||
|
||||
I_ShellCmd *i_shell_cmd = nullptr;
|
||||
I_EnvDetails* i_env_details = nullptr;
|
||||
|
||||
private:
|
||||
static constexpr auto DROP = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP;
|
||||
static constexpr auto ACCEPT = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT;
|
||||
@@ -629,6 +738,10 @@ private:
|
||||
int burst;
|
||||
float limit;
|
||||
redisContext* redis = nullptr;
|
||||
int replicas = 1;
|
||||
EnvType env_type;
|
||||
string kubernetes_namespace = "";
|
||||
string kubernetes_token = "";
|
||||
};
|
||||
|
||||
RateLimit::RateLimit() : Component("RateLimit"), pimpl(make_unique<Impl>()) {}
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
// Copyright (C) 2024 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#pragma once
|
||||
|
||||
class IWaf2Transaction;
|
||||
struct Waf2ScanResult;
|
||||
namespace Waap {
|
||||
namespace Scores {
|
||||
struct ModelLoggingSettings;
|
||||
}
|
||||
}
|
||||
|
||||
class I_WaapModelResultLogger {
|
||||
public:
|
||||
virtual ~I_WaapModelResultLogger() {}
|
||||
|
||||
virtual void
|
||||
logModelResult(
|
||||
Waap::Scores::ModelLoggingSettings &settings,
|
||||
IWaf2Transaction* transaction,
|
||||
Waf2ScanResult &res,
|
||||
std::string modelName,
|
||||
std::string otherModelName,
|
||||
double newScore,
|
||||
double baseScore) = 0;
|
||||
};
|
||||
@@ -87,9 +87,10 @@ add_library(waap_clib
|
||||
ParserPairs.cc
|
||||
Waf2Util2.cc
|
||||
ParserPDF.cc
|
||||
ParserKnownBenignSkipper.cc
|
||||
ParserScreenedJson.cc
|
||||
ParserBinaryFile.cc
|
||||
RegexComparator.cc
|
||||
WaapModelResultLogger.cc
|
||||
)
|
||||
|
||||
add_definitions("-Wno-unused-function")
|
||||
|
||||
@@ -28,6 +28,8 @@
|
||||
#include "ParserDelimiter.h"
|
||||
#include "ParserPDF.h"
|
||||
#include "ParserBinaryFile.h"
|
||||
#include "ParserKnownBenignSkipper.h"
|
||||
#include "ParserScreenedJson.h"
|
||||
#include "WaapAssetState.h"
|
||||
#include "Waf2Regex.h"
|
||||
#include "Waf2Util.h"
|
||||
@@ -359,6 +361,7 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
||||
isRefererParamPayload,
|
||||
isUrlPayload,
|
||||
isUrlParamPayload,
|
||||
isCookiePayload,
|
||||
flags,
|
||||
parser_depth,
|
||||
base64BinaryFileType
|
||||
@@ -410,6 +413,7 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
||||
isRefererParamPayload,
|
||||
isUrlPayload,
|
||||
isUrlParamPayload,
|
||||
isCookiePayload,
|
||||
flags,
|
||||
parser_depth,
|
||||
base64BinaryFileType
|
||||
@@ -461,6 +465,7 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
||||
isRefererParamPayload,
|
||||
isUrlPayload,
|
||||
isUrlParamPayload,
|
||||
isCookiePayload,
|
||||
flags,
|
||||
parser_depth,
|
||||
base64ParamFound,
|
||||
@@ -835,6 +840,7 @@ DeepParser::parseAfterMisleadingMultipartBoundaryCleaned(
|
||||
bool isRefererParamPayload,
|
||||
bool isUrlPayload,
|
||||
bool isUrlParamPayload,
|
||||
bool isCookiePayload,
|
||||
int flags,
|
||||
size_t parser_depth,
|
||||
bool base64ParamFound,
|
||||
@@ -854,6 +860,7 @@ DeepParser::parseAfterMisleadingMultipartBoundaryCleaned(
|
||||
isRefererParamPayload,
|
||||
isUrlPayload,
|
||||
isUrlParamPayload,
|
||||
isCookiePayload,
|
||||
flags,
|
||||
parser_depth,
|
||||
b64FileType
|
||||
@@ -918,6 +925,7 @@ bool isRefererPayload,
|
||||
bool isRefererParamPayload,
|
||||
bool isUrlPayload,
|
||||
bool isUrlParamPayload,
|
||||
bool isCookiePayload,
|
||||
int flags,
|
||||
size_t parser_depth
|
||||
) {
|
||||
@@ -959,6 +967,7 @@ DeepParser::createInternalParser(
|
||||
bool isRefererParamPayload,
|
||||
bool isUrlPayload,
|
||||
bool isUrlParamPayload,
|
||||
bool isCookiePayload,
|
||||
int flags,
|
||||
size_t parser_depth,
|
||||
Waap::Util::BinaryFileType b64FileType
|
||||
@@ -978,7 +987,19 @@ DeepParser::createInternalParser(
|
||||
<< "\n\tflags: "
|
||||
<< flags
|
||||
<< "\n\tparser_depth: "
|
||||
<< parser_depth;
|
||||
<< parser_depth
|
||||
<< "\n\tisBodyPayload: "
|
||||
<< isBodyPayload
|
||||
<< "\n\tisRefererPayload: "
|
||||
<< isRefererPayload
|
||||
<< "\n\tisRefererParamPayload: "
|
||||
<< isRefererParamPayload
|
||||
<< "\n\tisUrlPayload: "
|
||||
<< isUrlPayload
|
||||
<< "\n\tisUrlParamPayload: "
|
||||
<< isUrlParamPayload
|
||||
<< "\n\tisCookiePayload: "
|
||||
<< isCookiePayload;
|
||||
bool isPipesType = false, isSemicolonType = false, isAsteriskType = false, isCommaType = false,
|
||||
isAmperType = false;
|
||||
bool isKeyValDelimited = false;
|
||||
@@ -1045,6 +1066,53 @@ DeepParser::createInternalParser(
|
||||
}
|
||||
}
|
||||
|
||||
if (Waap::Util::isScreenedJson(cur_val)) {
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse screened JSON";
|
||||
m_parsersDeque.push_back(std::make_shared<BufferedParser<ParserScreenedJson>>(*this, parser_depth + 1));
|
||||
offset = 0;
|
||||
return offset;
|
||||
}
|
||||
|
||||
dbgTrace(D_WAAP_DEEP_PARSER)
|
||||
<< "Offset = "
|
||||
<< offset
|
||||
<< " depth = "
|
||||
<< m_depth
|
||||
<< " isBodyPayload = "
|
||||
<< isBodyPayload;
|
||||
//Detect sensor_data format in body and just use dedicated filter for it
|
||||
if (m_depth == 1
|
||||
&& isBodyPayload
|
||||
&& Waap::Util::detectKnownSource(cur_val) == Waap::Util::SOURCE_TYPE_SENSOR_DATA) {
|
||||
m_parsersDeque.push_back(
|
||||
std::make_shared<BufferedParser<ParserKnownBenignSkipper>>(
|
||||
*this,
|
||||
parser_depth + 1,
|
||||
Waap::Util::SOURCE_TYPE_SENSOR_DATA
|
||||
)
|
||||
);
|
||||
offset = 0;
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse data_sensor data - skipping it";
|
||||
return offset;
|
||||
}
|
||||
// Detect cookie parameter sensorsdata2015jssdkcross
|
||||
// and causes false positives due to malformed JSON. Make preprocessing to parse it correctly
|
||||
if (m_depth == 2
|
||||
&& isCookiePayload) {
|
||||
offset = Waap::Util::definePrefixedJson(cur_val);
|
||||
if (offset >= 0) {
|
||||
m_parsersDeque.push_back(
|
||||
std::make_shared<BufferedParser<ParserJson>>(
|
||||
*this,
|
||||
parser_depth + 1,
|
||||
m_pTransaction
|
||||
)
|
||||
);
|
||||
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse JSON data";
|
||||
return offset;
|
||||
}
|
||||
}
|
||||
|
||||
// Detect wbxml (binary XML) data type
|
||||
if (m_depth == 1 && isBodyPayload && !valueStats.isUTF16 && m_pWaapAssetState->isWBXMLSampleType(cur_val)) {
|
||||
m_is_wbxml = true;
|
||||
@@ -1374,6 +1442,7 @@ DeepParser::createInternalParser(
|
||||
isRefererParamPayload,
|
||||
isUrlPayload,
|
||||
isUrlParamPayload,
|
||||
isCookiePayload,
|
||||
flags,
|
||||
parser_depth
|
||||
);
|
||||
|
||||
@@ -129,6 +129,7 @@ private:
|
||||
bool isRefererParamPayload,
|
||||
bool isUrlPayload,
|
||||
bool isUrlParamPayload,
|
||||
bool isCookiePayload,
|
||||
int flags,
|
||||
size_t parser_depth,
|
||||
Waap::Util::BinaryFileType b64FileType
|
||||
@@ -144,6 +145,7 @@ private:
|
||||
bool isRefererParamPayload,
|
||||
bool isUrlPayload,
|
||||
bool isUrlParamPayload,
|
||||
bool isCookiePayload,
|
||||
int flags,
|
||||
size_t parser_depth
|
||||
);
|
||||
@@ -160,6 +162,7 @@ private:
|
||||
bool isRefererParamPayload,
|
||||
bool isUrlPayload,
|
||||
bool isUrlParamPayload,
|
||||
bool isCookiePayload,
|
||||
int flags,
|
||||
size_t parser_depth,
|
||||
bool base64ParamFound,
|
||||
|
||||
@@ -0,0 +1,139 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "ParserKnownBenignSkipper.h"
|
||||
#include "Waf2Util.h"
|
||||
#include "debug.h"
|
||||
#include <string.h>
|
||||
USE_DEBUG_FLAG(D_WAAP_PARSER_KNOWN_SOURCE_SKIPPER);
|
||||
USE_DEBUG_FLAG(D_WAAP);
|
||||
|
||||
const std::string ParserKnownBenignSkipper::m_parserName = "ParserKnownBenignSkipper";
|
||||
const char* DATA_SENSOR_TAIL = "\"}";
|
||||
|
||||
ParserKnownBenignSkipper::ParserKnownBenignSkipper(
|
||||
IParserStreamReceiver &receiver,
|
||||
size_t parser_depth,
|
||||
Waap::Util::KnownSourceType source_type
|
||||
) :
|
||||
m_receiver(receiver),
|
||||
m_state(s_start),
|
||||
m_parser_depth(parser_depth),
|
||||
m_source_type(source_type)
|
||||
{}
|
||||
|
||||
ParserKnownBenignSkipper::~ParserKnownBenignSkipper()
|
||||
{}
|
||||
|
||||
size_t
|
||||
ParserKnownBenignSkipper::push(const char *buf, size_t len)
|
||||
{
|
||||
dbgTrace(D_WAAP_PARSER_KNOWN_SOURCE_SKIPPER)
|
||||
<< "buf='"
|
||||
<< std::string(buf, std::min((size_t)200, len))
|
||||
<< (len > 200 ? "..." : "")
|
||||
<< "' len="
|
||||
<< len
|
||||
<< " depth="
|
||||
<< depth();
|
||||
|
||||
const char *c;
|
||||
|
||||
if (m_state == s_error) {
|
||||
return 0;
|
||||
}
|
||||
if (len == 0)
|
||||
{
|
||||
dbgTrace(D_WAAP_PARSER_KNOWN_SOURCE_SKIPPER)
|
||||
<< "ParserKnownBenignSkipper::push(): end of stream. m_state="
|
||||
<< m_state;
|
||||
|
||||
if (m_state == s_end) {
|
||||
m_receiver.onKvDone();
|
||||
} else {
|
||||
m_state = s_error;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
size_t tail_lookup_offset = 0;
|
||||
|
||||
switch (m_state) {
|
||||
case s_start:
|
||||
m_state = s_body;
|
||||
CP_FALL_THROUGH;
|
||||
case s_body:
|
||||
{
|
||||
if (m_source_type == Waap::Util::SOURCE_TYPE_SENSOR_DATA) {
|
||||
tail_lookup_offset =
|
||||
(len > MAX_DATA_SENSOR_TAIL_LOOKUP) ? len - MAX_DATA_SENSOR_TAIL_LOOKUP : 0;
|
||||
c = strstr(buf + tail_lookup_offset, DATA_SENSOR_TAIL);
|
||||
if (c) {
|
||||
dbgTrace(D_WAAP_PARSER_KNOWN_SOURCE_SKIPPER)
|
||||
<< "ParserKnownBenignSkipper::push(): found end of sensor data";
|
||||
m_state = s_end;
|
||||
CP_FALL_THROUGH;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
dbgTrace(D_WAAP_PARSER_KNOWN_SOURCE_SKIPPER)
|
||||
<< "ParserKnownBenignSkipper::push(): unknown source type";
|
||||
m_state = s_error;
|
||||
break;
|
||||
}
|
||||
}
|
||||
case s_end:
|
||||
dbgTrace(D_WAAP_PARSER_KNOWN_SOURCE_SKIPPER) << "state = end";
|
||||
if (m_receiver.onKey("SENSOR_DATA", 11) != 0) {
|
||||
dbgTrace(D_WAAP_PARSER_KNOWN_SOURCE_SKIPPER) << "state moving to error onKey";
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
if (m_receiver.onValue("", 0) != 0) {
|
||||
dbgTrace(D_WAAP_PARSER_KNOWN_SOURCE_SKIPPER) << "state moving to error onValue";
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
break;
|
||||
case s_error:
|
||||
dbgTrace(D_WAAP_PARSER_KNOWN_SOURCE_SKIPPER) << "state = error";
|
||||
break;
|
||||
default:
|
||||
dbgTrace(D_WAAP_PARSER_KNOWN_SOURCE_SKIPPER)
|
||||
<< "ParserKnownBenignSkipper::push(): unknown state: "
|
||||
<< m_state;
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
dbgTrace(D_WAAP_PARSER_KNOWN_SOURCE_SKIPPER)
|
||||
<< "ParserKnownBenignSkipper::push(): final state: "
|
||||
<< m_state;
|
||||
return len;
|
||||
}
|
||||
|
||||
|
||||
void ParserKnownBenignSkipper::finish()
|
||||
{
|
||||
push(NULL, 0);
|
||||
}
|
||||
|
||||
const std::string& ParserKnownBenignSkipper::name() const
|
||||
{
|
||||
return m_parserName;
|
||||
}
|
||||
|
||||
bool ParserKnownBenignSkipper::error() const
|
||||
{
|
||||
return m_state == s_error;
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef __PARSER_BENIGN_SKIPPER_H__
|
||||
#define __PARSER_BENIGN_SKIPPER_H__
|
||||
|
||||
#include "ParserBase.h"
|
||||
#include "Waf2Util.h"
|
||||
#include <string.h>
|
||||
#include "Waf2Util.h"
|
||||
|
||||
#define MAX_DATA_SENSOR_TAIL_LOOKUP 5
|
||||
|
||||
class ParserKnownBenignSkipper : public ParserBase {
|
||||
public:
|
||||
ParserKnownBenignSkipper(
|
||||
IParserStreamReceiver &receiver,
|
||||
size_t parser_depth,
|
||||
Waap::Util::KnownSourceType source_type=Waap::Util::SOURCE_TYPE_UNKNOWN);
|
||||
virtual ~ParserKnownBenignSkipper();
|
||||
virtual size_t push(const char *buf, size_t len);
|
||||
virtual void finish();
|
||||
virtual const std::string &name() const;
|
||||
virtual bool error() const;
|
||||
virtual size_t depth() { return 1; }
|
||||
|
||||
private:
|
||||
enum state {
|
||||
s_start,
|
||||
s_body,
|
||||
s_end,
|
||||
s_error
|
||||
};
|
||||
|
||||
IParserStreamReceiver &m_receiver;
|
||||
enum state m_state;
|
||||
static const std::string m_parserName;
|
||||
size_t m_parser_depth;
|
||||
Waap::Util::KnownSourceType m_source_type;
|
||||
};
|
||||
|
||||
#endif // __PARSER_BENIGN_SKIPPER_H__
|
||||
187
components/security_apps/waap/waap_clib/ParserScreenedJson.cc
Normal file
187
components/security_apps/waap/waap_clib/ParserScreenedJson.cc
Normal file
@@ -0,0 +1,187 @@
|
||||
#include "ParserScreenedJson.h"
|
||||
#include "debug.h"
|
||||
|
||||
USE_DEBUG_FLAG(D_WAAP_PARSER_SCREENED_JSON);
|
||||
|
||||
const std::string ParserScreenedJson::m_parserName = "ParserScreenedJson";
|
||||
|
||||
ParserScreenedJson::ParserScreenedJson(IParserStreamReceiver &receiver, size_t parser_depth) :
|
||||
m_receiver(receiver),
|
||||
m_state(s_start),
|
||||
m_unscreenedLen(0),
|
||||
m_leftoverLen(0),
|
||||
m_parser_depth(parser_depth)
|
||||
{
|
||||
dbgTrace(D_WAAP_PARSER_SCREENED_JSON)
|
||||
<< "parser_depth="
|
||||
<< parser_depth;
|
||||
|
||||
memset(m_unscreened, 0, sizeof(m_unscreened));
|
||||
}
|
||||
|
||||
ParserScreenedJson::~ParserScreenedJson()
|
||||
{}
|
||||
|
||||
size_t
|
||||
ParserScreenedJson::push(const char *buf, size_t len)
|
||||
{
|
||||
size_t i = 0;
|
||||
char c;
|
||||
|
||||
dbgTrace(D_WAAP_PARSER_SCREENED_JSON) << "ParserScreenedJson::push(): starting (len=" << len << ")";
|
||||
|
||||
if (len == 0) {
|
||||
dbgTrace(D_WAAP_PARSER_SCREENED_JSON) << "ParserScreenedJson::push(): end of data signal! m_state=" << m_state;
|
||||
// flush unescaped data collected (if any)
|
||||
if (m_leftoverLen > 0) {
|
||||
// No need any processing for leftover data - last char must be doublequote, else - error
|
||||
m_state = s_error;
|
||||
dbgTrace(D_WAAP_PARSER_SCREENED_JSON)
|
||||
<< "ParserScreenedJson::push(): end of data and leftover detected m_state="
|
||||
<< m_state;
|
||||
return i;
|
||||
}
|
||||
dbgTrace(D_WAAP_PARSER_SCREENED_JSON)
|
||||
<< "ParserScreenedJson::push(): s_value, pushing m_unscreened = "
|
||||
<< m_unscreened
|
||||
<< ", m_leftoverLen = "
|
||||
<< m_leftoverLen
|
||||
<< ", m_unscreenedLen = "
|
||||
<< m_unscreenedLen;
|
||||
|
||||
if (m_receiver.onKey("json_unscreened", 15) != 0) {
|
||||
m_state = s_error;
|
||||
return i;
|
||||
}
|
||||
|
||||
if (m_receiver.onValue(m_unscreened, m_unscreenedLen) != 0) {
|
||||
m_state = s_error;
|
||||
return i;
|
||||
}
|
||||
|
||||
if (m_receiver.onKvDone() != 0)
|
||||
{
|
||||
m_state = s_error;
|
||||
return i;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
while (i < len)
|
||||
{
|
||||
c = buf[i];
|
||||
|
||||
dbgTrace(D_WAAP_PARSER_SCREENED_JSON)
|
||||
<< "ParserScreenedJson::push(): state="
|
||||
<< m_state
|
||||
<< "; c='"
|
||||
<< c
|
||||
<< "'"
|
||||
<< "; i="
|
||||
<< i
|
||||
<< ", m_leftoverLen = "
|
||||
<< m_leftoverLen
|
||||
<< ", m_unscreenedLen = "
|
||||
<< m_unscreenedLen
|
||||
<< ", m_unscreened = "
|
||||
<< m_unscreened;
|
||||
|
||||
switch (m_state)
|
||||
{
|
||||
case s_start:
|
||||
{
|
||||
dbgTrace(D_WAAP_PARSER_SCREENED_JSON)
|
||||
<< "ParserScreenedJson::push(): s_start";
|
||||
m_state = s_value;
|
||||
|
||||
// fallthrough not required, removing 1st doublequote, it denoted by regex //
|
||||
//CP_FALL_THROUGH;
|
||||
break;
|
||||
}
|
||||
case s_value:
|
||||
{
|
||||
if (c == '\\') {
|
||||
if (m_leftoverLen > 0) {
|
||||
m_unscreened[m_unscreenedLen] = '\\';
|
||||
m_leftoverLen = 0;
|
||||
m_unscreenedLen++;
|
||||
} else {
|
||||
m_leftoverLen++;
|
||||
}
|
||||
} else if (c =='\"') {
|
||||
if (m_leftoverLen > 0) {
|
||||
m_unscreened[m_unscreenedLen] = '\"';
|
||||
m_unscreenedLen++;
|
||||
if (m_leftoverLen > 0) {
|
||||
m_leftoverLen = 0;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (m_leftoverLen > 0) {
|
||||
m_unscreened[m_unscreenedLen] = '\\';
|
||||
m_unscreenedLen++;
|
||||
m_leftoverLen = 0;
|
||||
}
|
||||
m_unscreened[m_unscreenedLen] = c;
|
||||
m_unscreenedLen++;
|
||||
}
|
||||
if (m_unscreenedLen >= MAX_UNSCREENED_JSON_SIZE) {
|
||||
if (m_receiver.onKey("json_unscreened", 15) != 0) {
|
||||
m_state = s_error;
|
||||
return i;
|
||||
}
|
||||
dbgTrace(D_WAAP_PARSER_SCREENED_JSON)
|
||||
<< "ParserScreenedJson::push(): s_value, pushing m_unscreened = "
|
||||
<< m_unscreened
|
||||
<< ", m_leftoverLen = "
|
||||
<< m_leftoverLen
|
||||
<< ", m_unscreenedLen = "
|
||||
<< m_unscreenedLen;
|
||||
if (m_receiver.onValue(m_unscreened, m_unscreenedLen) != 0) {
|
||||
m_state = s_error;
|
||||
return i;
|
||||
}
|
||||
m_unscreenedLen = 0;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case s_error:
|
||||
{
|
||||
dbgTrace(D_WAAP_PARSER_SCREENED_JSON)
|
||||
<< "ParserScreenedJson::push(): s_error";
|
||||
return 0;
|
||||
}
|
||||
default:
|
||||
{
|
||||
dbgTrace(D_WAAP_PARSER_SCREENED_JSON)
|
||||
<< "ParserScreenedJson::push(): JSON parser unrecoverable error";
|
||||
m_state = s_error;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
++i;
|
||||
}
|
||||
|
||||
dbgTrace(D_WAAP_PARSER_SCREENED_JSON)
|
||||
<< "ParserScreenedJson::push(): finished: len="
|
||||
<< len;
|
||||
return len;
|
||||
}
|
||||
|
||||
void
|
||||
ParserScreenedJson::finish()
|
||||
{
|
||||
push(NULL, 0);
|
||||
}
|
||||
|
||||
const std::string &
|
||||
ParserScreenedJson::name() const
|
||||
{
|
||||
return m_parserName;
|
||||
}
|
||||
|
||||
bool
|
||||
ParserScreenedJson::error() const
|
||||
{
|
||||
return m_state == s_error;
|
||||
}
|
||||
52
components/security_apps/waap/waap_clib/ParserScreenedJson.h
Normal file
52
components/security_apps/waap/waap_clib/ParserScreenedJson.h
Normal file
@@ -0,0 +1,52 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef __PARSER_SCREENED_JSON_H_
|
||||
#define __PARSER_SCREENED_JSON_H_
|
||||
|
||||
#include "ParserBase.h"
|
||||
#include <string.h>
|
||||
|
||||
#define MAX_UNSCREENED_JSON_SIZE 4095
|
||||
|
||||
class ParserScreenedJson : public ParserBase {
|
||||
public:
|
||||
ParserScreenedJson(IParserStreamReceiver &receiver, size_t parser_depth);
|
||||
virtual ~ParserScreenedJson();
|
||||
size_t push(const char *data, size_t data_len);
|
||||
void finish();
|
||||
virtual const std::string &name() const;
|
||||
bool error() const;
|
||||
// LCOV_EXCL_START Reason: The function not in use, compliance with the interface
|
||||
virtual size_t depth() { return 1; }
|
||||
// LCOV_EXCL_STOP
|
||||
|
||||
private:
|
||||
enum state
|
||||
{
|
||||
s_start,
|
||||
s_value,
|
||||
s_error
|
||||
};
|
||||
|
||||
IParserStreamReceiver &m_receiver;
|
||||
enum state m_state;
|
||||
size_t m_unscreenedLen;
|
||||
char m_unscreened[MAX_UNSCREENED_JSON_SIZE];
|
||||
size_t m_leftoverLen;
|
||||
static const std::string m_parserName;
|
||||
size_t m_parser_depth;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
||||
@@ -194,6 +194,10 @@ void SerializeToFileBase::saveData()
|
||||
dbgWarning(D_WAAP_CONFIDENCE_CALCULATOR) << "Failed to gzip data";
|
||||
} else {
|
||||
ss.str(string((const char *)res.output, res.num_output_bytes));
|
||||
// free the memory allocated by compressData
|
||||
if (res.output) free(res.output);
|
||||
res.output = nullptr;
|
||||
res.num_output_bytes = 0;
|
||||
}
|
||||
if (res.output) free(res.output);
|
||||
res.output = nullptr;
|
||||
|
||||
@@ -783,6 +783,55 @@ WaapAssetState::filterKeywordsDueToLongText(Waf2ScanResult &res) const
|
||||
#endif
|
||||
}
|
||||
|
||||
// std::string nicePrint() - is a function used to create std::string that will represent all data that is
|
||||
// collected inside Waf2ScanResult object. This function is used for debugging purposes. it should make deep-dive
|
||||
// into the object easier.
|
||||
|
||||
std::string
|
||||
WaapAssetState::nicePrint(Waf2ScanResult &res) const
|
||||
{
|
||||
std::string result = "Waf2ScanResult:\n";
|
||||
result += "keyword_matches:\n";
|
||||
for (const auto &keyword : res.keyword_matches) {
|
||||
result += keyword + "\n";
|
||||
}
|
||||
result += "regex_matches:\n";
|
||||
for (const auto ®ex : res.regex_matches) {
|
||||
result += regex + "\n";
|
||||
}
|
||||
result += "filtered_keywords:\n";
|
||||
for (const auto &filtered : res.filtered_keywords) {
|
||||
result += filtered + "\n";
|
||||
}
|
||||
result += "found_patterns:\n";
|
||||
for (const auto &pattern : res.found_patterns) {
|
||||
result += pattern.first + ":\n";
|
||||
for (const auto &value : pattern.second) {
|
||||
result += value + "\n";
|
||||
}
|
||||
}
|
||||
result += "unescaped_line: " + res.unescaped_line + "\n";
|
||||
result += "param_name: " + res.param_name + "\n";
|
||||
result += "location: " + res.location + "\n";
|
||||
result += "score: " + std::to_string(res.score) + "\n";
|
||||
result += "scoreNoFilter: " + std::to_string(res.scoreNoFilter) + "\n";
|
||||
result += "scoreArray:\n";
|
||||
for (const auto &score : res.scoreArray) {
|
||||
result += std::to_string(score) + "\n";
|
||||
}
|
||||
result += "keywordCombinations:\n";
|
||||
for (const auto &combination : res.keywordCombinations) {
|
||||
result += combination + "\n";
|
||||
}
|
||||
result += "attack_types:\n";
|
||||
for (const auto &attack : res.attack_types) {
|
||||
result += attack + "\n";
|
||||
}
|
||||
result += "m_isAttackInParam: " + std::to_string(res.m_isAttackInParam) + "\n";
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
checkBinaryData(const std::string &line, bool binaryDataFound)
|
||||
{
|
||||
@@ -1033,7 +1082,7 @@ WaapAssetState::apply(
|
||||
// Scan unescaped_line with aho-corasick once, and reuse it in multiple calls to checkRegex below
|
||||
// This is done to improve performance of regex matching.
|
||||
SampleValue unescapedLineSample(res.unescaped_line, m_Signatures->m_regexPreconditions);
|
||||
|
||||
dbgTrace(D_WAAP_SAMPLE_SCAN) << "after doing second set of checkRegex calls..." << nicePrint(res);
|
||||
checkRegex(
|
||||
unescapedLineSample,
|
||||
m_Signatures->specific_acuracy_keywords_regex,
|
||||
@@ -1111,7 +1160,7 @@ WaapAssetState::apply(
|
||||
}
|
||||
|
||||
bool os_cmd_ev = Waap::Util::find_in_map_of_stringlists_keys("os_cmd_ev", res.found_patterns);
|
||||
|
||||
dbgTrace(D_WAAP_SAMPLE_SCAN) << "before evasion checking " << nicePrint(res);
|
||||
if (os_cmd_ev) {
|
||||
dbgTrace(D_WAAP_EVASIONS) << "os command evasion found";
|
||||
|
||||
@@ -1295,6 +1344,47 @@ WaapAssetState::apply(
|
||||
}
|
||||
}
|
||||
|
||||
bool path_traversal_ev = Waap::Util::find_in_map_of_stringlists_keys("path_traversal", res.found_patterns);
|
||||
dbgTrace(D_WAAP_EVASIONS)
|
||||
<< "path_traversal_ev = " << path_traversal_ev
|
||||
<< " sample = " << res.unescaped_line
|
||||
<< " res.unescaped_line.find(2f) = " << res.unescaped_line.find("2f");
|
||||
if ((path_traversal_ev) && (res.unescaped_line.find("2f") != std::string::npos)) {
|
||||
// Possible path traversal evasion .2f. detected: - clean up and scan with regexes again.
|
||||
dbgTrace(D_WAAP_EVASIONS) << "comment evasion .2f. found" << res.unescaped_line
|
||||
<< "Status beroe evasion checking " << nicePrint(res);
|
||||
|
||||
std::string unescaped = line;
|
||||
replaceAll(unescaped, "2f", "/");
|
||||
size_t kwCount = res.keyword_matches.size();
|
||||
|
||||
if (res.unescaped_line != unescaped) {
|
||||
SampleValue unescapedSample(unescaped, m_Signatures->m_regexPreconditions);
|
||||
checkRegex(unescapedSample, m_Signatures->specific_acuracy_keywords_regex, res.keyword_matches,
|
||||
res.found_patterns, longTextFound, binaryDataFound);
|
||||
checkRegex(unescapedSample, m_Signatures->words_regex, res.keyword_matches, res.found_patterns,
|
||||
longTextFound, binaryDataFound);
|
||||
checkRegex(unescapedSample, m_Signatures->pattern_regex, res.regex_matches, res.found_patterns,
|
||||
longTextFound, binaryDataFound);
|
||||
}
|
||||
|
||||
if (kwCount == res.keyword_matches.size()) {
|
||||
// Remove the evasion keyword if no real evasion found
|
||||
keywordsToRemove.push_back("path_traversal");
|
||||
path_traversal_ev = false;
|
||||
}
|
||||
else if (!binaryDataFound) {
|
||||
// Recalculate repetition and/or probing indicators
|
||||
unsigned int newWordsCount = 0;
|
||||
calcRepetitionAndProbing(res, ignored_keywords, unescaped, detectedRepetition, detectedProbing,
|
||||
newWordsCount);
|
||||
// Take minimal words count because empirically it means evasion was probably succesfully decoded
|
||||
wordsCount = std::min(wordsCount, newWordsCount);
|
||||
}
|
||||
dbgTrace(D_WAAP_EVASIONS) << "status after evasion checking " << nicePrint(res);
|
||||
}
|
||||
|
||||
|
||||
bool quoutes_space_evasion = Waap::Util::find_in_map_of_stringlists_keys(
|
||||
"quotes_space_ev_fast_reg",
|
||||
res.found_patterns
|
||||
@@ -1726,7 +1816,7 @@ WaapAssetState::apply(
|
||||
wordsCount = std::min(wordsCount, newWordsCount);
|
||||
}
|
||||
}
|
||||
|
||||
dbgTrace(D_WAAP_SAMPLE_SCAN) << "after evasions..." << nicePrint(res);
|
||||
// Remove evasion keywords that should not be reported because there's no real evasion found
|
||||
if (!keywordsToRemove.empty()) {
|
||||
dbgTrace(D_WAAP_SAMPLE_SCAN)
|
||||
|
||||
@@ -49,6 +49,7 @@ private: //ugly but needed for build
|
||||
Waap::Util::map_of_stringlists_t & found_patterns, bool longTextFound, bool binaryDataFound) const;
|
||||
|
||||
void filterKeywordsDueToLongText(Waf2ScanResult &res) const;
|
||||
std::string nicePrint(Waf2ScanResult &res) const;
|
||||
|
||||
public:
|
||||
// Load and compile signatures from file
|
||||
|
||||
@@ -1,250 +0,0 @@
|
||||
// Copyright (C) 2024 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "WaapModelResultLogger.h"
|
||||
#include "Waf2Engine.h"
|
||||
#include "i_time_get.h"
|
||||
#include "i_messaging.h"
|
||||
#include "i_instance_awareness.h"
|
||||
#include "http_manager.h"
|
||||
#include "LogGenWrapper.h"
|
||||
#include "rest.h"
|
||||
#include "debug.h"
|
||||
|
||||
USE_DEBUG_FLAG(D_WAAP_MODEL_LOGGER);
|
||||
|
||||
using namespace std;
|
||||
|
||||
static const unsigned int MAX_FILES_PER_WINDOW = 5;
|
||||
static const unsigned int MAX_LOGS_PER_WINDOW = 1800;
|
||||
static constexpr std::chrono::minutes RATE_LIMIT_WINDOW_MINUTES = std::chrono::minutes(30);
|
||||
|
||||
class WaapModelReport : public RestGetFile
|
||||
{
|
||||
public:
|
||||
WaapModelReport(const vector<WaapModelResult> &_data) : data(_data) {}
|
||||
|
||||
private:
|
||||
C2S_PARAM(vector<WaapModelResult>, data);
|
||||
};
|
||||
|
||||
class WaapModelResultLogger::Impl
|
||||
:
|
||||
Singleton::Provide<I_WaapModelResultLogger>::From<WaapModelResultLogger>
|
||||
{
|
||||
public:
|
||||
Impl(size_t maxLogs) : max_logs(maxLogs), sent_files_count(0), sent_logs_count(0),
|
||||
last_sent_s3(std::chrono::minutes::zero()),
|
||||
last_kusto_log_window(std::chrono::minutes::zero()) {}
|
||||
virtual ~Impl();
|
||||
void
|
||||
logModelResult(
|
||||
Waap::Scores::ModelLoggingSettings &settings,
|
||||
IWaf2Transaction* transaction,
|
||||
Waf2ScanResult &res,
|
||||
string modelName,
|
||||
string otherModelName,
|
||||
double score,
|
||||
double otherScore) override;
|
||||
|
||||
private:
|
||||
void logToStream(WaapModelResult &result, chrono::minutes now);
|
||||
void logToS3(WaapModelResult &result, IWaf2Transaction* transaction, chrono::minutes now);
|
||||
bool shouldSendLogsToS3(chrono::minutes now);
|
||||
void sendLogsToS3();
|
||||
size_t max_logs;
|
||||
unsigned int sent_files_count;
|
||||
unsigned int sent_logs_count;
|
||||
std::chrono::minutes last_sent_s3;
|
||||
std::chrono::minutes last_kusto_log_window;
|
||||
std::map<std::string, vector<WaapModelResult>> logs;
|
||||
};
|
||||
|
||||
WaapModelResultLogger::WaapModelResultLogger(size_t maxLogs) : pimpl(make_unique<WaapModelResultLogger::Impl>(maxLogs))
|
||||
{
|
||||
}
|
||||
|
||||
WaapModelResultLogger::~WaapModelResultLogger()
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
WaapModelResultLogger::logModelResult(
|
||||
Waap::Scores::ModelLoggingSettings &settings,
|
||||
IWaf2Transaction* transaction,
|
||||
Waf2ScanResult &res,
|
||||
std::string modelName,
|
||||
std::string otherModelName,
|
||||
double score,
|
||||
double otherScore
|
||||
)
|
||||
{
|
||||
pimpl->logModelResult(settings, transaction, res, modelName, otherModelName, score, otherScore);
|
||||
}
|
||||
|
||||
void
|
||||
WaapModelResultLogger::Impl::logModelResult(
|
||||
Waap::Scores::ModelLoggingSettings &settings,
|
||||
IWaf2Transaction* transaction,
|
||||
Waf2ScanResult &res,
|
||||
string modelName,
|
||||
string otherModelName,
|
||||
double score,
|
||||
double otherScore)
|
||||
{
|
||||
if (transaction == NULL) return;
|
||||
if (!Singleton::exists<I_Messaging>()) {
|
||||
dbgError(D_WAAP_MODEL_LOGGER) << "Messaging service is not available, will not log";
|
||||
return;
|
||||
}
|
||||
|
||||
double score_diff = score - otherScore;
|
||||
if (settings.logLevel == Waap::Scores::ModelLogLevel::DIFF &&
|
||||
! ((score_diff > 0 && score >= 1.5f && otherScore < 4.0f) ||
|
||||
(score_diff < 0 && score < 4.0f && otherScore >= 1.5f))) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto current_time = Singleton::Consume<I_TimeGet>::by<WaapComponent>()->getWalltime();
|
||||
auto now = chrono::duration_cast<chrono::minutes>(current_time);
|
||||
|
||||
WaapModelResult result = WaapModelResult(
|
||||
*transaction,
|
||||
res,
|
||||
modelName,
|
||||
otherModelName,
|
||||
score,
|
||||
otherScore,
|
||||
now.count()
|
||||
);
|
||||
|
||||
if (settings.logToStream) logToStream(result, now);
|
||||
if (settings.logToS3) logToS3(result, transaction, now);
|
||||
}
|
||||
|
||||
void WaapModelResultLogger::Impl::logToS3(WaapModelResult &result, IWaf2Transaction* transaction, chrono::minutes now)
|
||||
{
|
||||
auto asset_state = transaction->getAssetState();
|
||||
string asset_id = (asset_state != nullptr) ? asset_state->m_assetId : "";
|
||||
auto asset_logs = logs.find(asset_id);
|
||||
if (asset_logs == logs.end()) {
|
||||
logs.emplace(asset_id, vector<WaapModelResult>());
|
||||
}
|
||||
logs.at(asset_id).push_back(result);
|
||||
if (shouldSendLogsToS3(now)) {
|
||||
sendLogsToS3();
|
||||
}
|
||||
}
|
||||
|
||||
void WaapModelResultLogger::Impl::logToStream(WaapModelResult &result, chrono::minutes now)
|
||||
{
|
||||
if (now - last_kusto_log_window > RATE_LIMIT_WINDOW_MINUTES) {
|
||||
last_kusto_log_window = now;
|
||||
sent_logs_count = 0;
|
||||
}
|
||||
else if (sent_logs_count > MAX_LOGS_PER_WINDOW) {
|
||||
return;
|
||||
}
|
||||
sent_logs_count++;
|
||||
dbgTrace(D_WAAP_MODEL_LOGGER) << "Logging WAAP model telemetry";
|
||||
|
||||
auto maybeLogTriggerConf = getConfiguration<LogTriggerConf>("rulebase", "log");
|
||||
LogGenWrapper logGenWrapper(
|
||||
maybeLogTriggerConf,
|
||||
"WAAP Model Telemetry",
|
||||
ReportIS::Audience::SECURITY,
|
||||
LogTriggerConf::SecurityType::ThreatPrevention,
|
||||
ReportIS::Severity::CRITICAL,
|
||||
ReportIS::Priority::HIGH,
|
||||
false);
|
||||
|
||||
LogGen& waap_log = logGenWrapper.getLogGen();
|
||||
waap_log.addMarkerSuffix(result.location);
|
||||
waap_log << LogField("httpuripath", result.uri);
|
||||
waap_log << LogField("matchedlocation", result.location);
|
||||
waap_log << LogField("matchedparameter", result.param);
|
||||
waap_log << LogField("matchedindicators", Waap::Util::vecToString(result.keywords), LogFieldOption::XORANDB64);
|
||||
waap_log << LogField("matchedsample", result.sample, LogFieldOption::XORANDB64);
|
||||
waap_log << LogField("waapkeywordsscore", (int)(result.otherScore * 100));
|
||||
waap_log << LogField("waapfinalscore", (int)(result.score * 100));
|
||||
waap_log << LogField("indicatorssource", result.modelName);
|
||||
waap_log << LogField("indicatorsversion", result.otherModelName);
|
||||
}
|
||||
|
||||
bool WaapModelResultLogger::Impl::shouldSendLogsToS3(chrono::minutes now)
|
||||
{
|
||||
if (now - last_sent_s3 > RATE_LIMIT_WINDOW_MINUTES) return true;
|
||||
for (const auto &asset_logs : logs) {
|
||||
if (asset_logs.second.size() >= max_logs) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void WaapModelResultLogger::Impl::sendLogsToS3()
|
||||
{
|
||||
dbgFlow(D_WAAP_MODEL_LOGGER) << "Sending logs to fog";
|
||||
|
||||
I_Messaging *msg = Singleton::Consume<I_Messaging>::by<WaapComponent>();
|
||||
|
||||
for (auto &asset_logs : logs) {
|
||||
if (asset_logs.second.empty()) {
|
||||
continue;
|
||||
}
|
||||
if (sent_files_count >= MAX_FILES_PER_WINDOW) {
|
||||
dbgInfo(D_WAAP_MODEL_LOGGER) << "Reached max files per window, will wait for next window";
|
||||
asset_logs.second.clear();
|
||||
continue;
|
||||
}
|
||||
I_AgentDetails *agentDetails = Singleton::Consume<I_AgentDetails>::by<WaapComponent>();
|
||||
string tenant_id = agentDetails->getTenantId();
|
||||
string agent_id = agentDetails->getAgentId();
|
||||
string asset_id = asset_logs.first;
|
||||
if (Singleton::exists<I_InstanceAwareness>()) {
|
||||
I_InstanceAwareness* instance = Singleton::Consume<I_InstanceAwareness>::by<WaapComponent>();
|
||||
Maybe<string> uniqueId = instance->getUniqueID();
|
||||
if (uniqueId.ok())
|
||||
{
|
||||
agent_id += "/" + uniqueId.unpack();
|
||||
}
|
||||
}
|
||||
string uri = "/storage/waap/" +
|
||||
tenant_id + "/" + asset_id + "/waap_model_results/window_" +
|
||||
to_string(last_sent_s3.count()) + "-" + to_string(sent_files_count) +
|
||||
"/" + agent_id + "/data.data";
|
||||
WaapModelReport report = WaapModelReport(asset_logs.second);
|
||||
|
||||
dbgInfo(D_WAAP_MODEL_LOGGER) << "Sending logs for asset " << asset_logs.first <<
|
||||
", length " << asset_logs.second.size() <<
|
||||
", uri " << uri;
|
||||
msg->sendAsyncMessage(
|
||||
HTTPMethod::PUT,
|
||||
uri,
|
||||
report,
|
||||
MessageCategory::LOG
|
||||
);
|
||||
|
||||
asset_logs.second.clear();
|
||||
}
|
||||
|
||||
auto current_time = Singleton::Consume<I_TimeGet>::by<WaapComponent>()->getWalltime();
|
||||
auto now = chrono::duration_cast<chrono::minutes>(current_time);
|
||||
if (now - last_sent_s3 > RATE_LIMIT_WINDOW_MINUTES) {
|
||||
last_sent_s3 = now;
|
||||
sent_files_count = 0;
|
||||
} else {
|
||||
sent_files_count++;
|
||||
}
|
||||
}
|
||||
|
||||
WaapModelResultLogger::Impl::~Impl()
|
||||
{}
|
||||
@@ -1,109 +0,0 @@
|
||||
// Copyright (C) 2024 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <chrono>
|
||||
#include <vector>
|
||||
#include <ostream>
|
||||
#include "cereal/archives/json.hpp"
|
||||
|
||||
#include "i_waap_model_result_logger.h"
|
||||
#include "DeepAnalyzer.h"
|
||||
#include "i_transaction.h"
|
||||
#include "ScanResult.h"
|
||||
#include "WaapAssetState.h"
|
||||
#include "WaapScores.h"
|
||||
|
||||
class WaapModelResultLogger
|
||||
:
|
||||
Singleton::Provide<I_WaapModelResultLogger>
|
||||
{
|
||||
public:
|
||||
WaapModelResultLogger(size_t maxLogs = MAX_WAAP_MODEL_LOGS);
|
||||
virtual ~WaapModelResultLogger();
|
||||
virtual void logModelResult(
|
||||
Waap::Scores::ModelLoggingSettings &settings,
|
||||
IWaf2Transaction* transaction,
|
||||
Waf2ScanResult &res,
|
||||
std::string modelName,
|
||||
std::string otherModelName,
|
||||
double score,
|
||||
double otherScore
|
||||
);
|
||||
class Impl;
|
||||
|
||||
protected:
|
||||
std::unique_ptr<Impl> pimpl;
|
||||
static const size_t MAX_WAAP_MODEL_LOGS = 20000;
|
||||
};
|
||||
|
||||
class WaapModelResult
|
||||
{
|
||||
public:
|
||||
WaapModelResult(
|
||||
IWaf2Transaction &transaction,
|
||||
Waf2ScanResult &res,
|
||||
const std::string &modelName,
|
||||
const std::string &otherModelName,
|
||||
double score,
|
||||
double otherScore,
|
||||
uint64_t time
|
||||
) : uri(transaction.getUri()), location(res.location), param(res.param_name),
|
||||
modelName(modelName), otherModelName(otherModelName),
|
||||
score(score), otherScore(otherScore), keywords(res.keywordsAfterFilter),
|
||||
sample(res.unescaped_line.substr(0, 100)), id(transaction.getIndex()), time(time)
|
||||
{
|
||||
}
|
||||
|
||||
template<class Archive>
|
||||
void serialize(Archive &ar) const
|
||||
{
|
||||
ar(cereal::make_nvp("uri", uri));
|
||||
ar(cereal::make_nvp("location", location));
|
||||
ar(cereal::make_nvp("param", param));
|
||||
ar(cereal::make_nvp("modelName", modelName));
|
||||
ar(cereal::make_nvp("otherModelName", otherModelName));
|
||||
ar(cereal::make_nvp("score", score));
|
||||
ar(cereal::make_nvp("otherScore", otherScore));
|
||||
ar(cereal::make_nvp("keywords", keywords));
|
||||
ar(cereal::make_nvp("sample", sample));
|
||||
ar(cereal::make_nvp("id", id));
|
||||
ar(cereal::make_nvp("time", time));
|
||||
}
|
||||
|
||||
std::string toString() const
|
||||
{
|
||||
std::stringstream message_stream;
|
||||
{
|
||||
cereal::JSONOutputArchive ar(message_stream);
|
||||
serialize(ar);
|
||||
}
|
||||
return message_stream.str();
|
||||
}
|
||||
|
||||
std::string uri;
|
||||
std::string location;
|
||||
std::string param;
|
||||
std::string modelName;
|
||||
std::string otherModelName;
|
||||
double score;
|
||||
double otherScore;
|
||||
std::vector<std::string> keywords;
|
||||
std::string sample;
|
||||
uint64_t id;
|
||||
uint64_t time;
|
||||
};
|
||||
@@ -48,8 +48,9 @@ public:
|
||||
m_tag = to_lower_copy(m_tag);
|
||||
|
||||
if (m_tag != "sourceip" && m_tag != "sourceidentifier" && m_tag != "url" && m_tag != "hostname" &&
|
||||
m_tag != "keyword" && m_tag != "paramname" && m_tag != "paramvalue" && m_tag != "paramlocation" &&
|
||||
m_tag != "responsebody" && m_tag != "headername" && m_tag != "headervalue" && m_tag != "method") {
|
||||
m_tag != "keyword" && m_tag != "indicator" && m_tag != "paramname" && m_tag != "paramvalue" &&
|
||||
m_tag != "paramlocation" && m_tag != "responsebody" && m_tag != "headername" &&
|
||||
m_tag != "headervalue" && m_tag != "method") {
|
||||
m_isValid = false;
|
||||
dbgDebug(D_WAAP_OVERRIDE) << "Invalid override tag: " << m_tag;
|
||||
}
|
||||
|
||||
@@ -105,7 +105,7 @@ bool WaapOverrideFunctor::operator()(
|
||||
}
|
||||
return false;
|
||||
}
|
||||
else if (tagLower == "keyword") {
|
||||
else if (tagLower == "keyword" || tagLower == "indicator") {
|
||||
for (const auto &rx : rxes) {
|
||||
for (const std::string& keywordStr : waf2Transaction.getKeywordMatches()) {
|
||||
if (REGX_MATCH(keywordStr)) {
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
#include "WaapScores.h"
|
||||
#include "Waf2Engine.h"
|
||||
#include "i_transaction.h"
|
||||
#include "WaapModelResultLogger.h"
|
||||
#include <string>
|
||||
#include "debug.h"
|
||||
#include "reputation_features_events.h"
|
||||
@@ -111,30 +110,8 @@ double Waap::Scanner::getScoreData(Waf2ScanResult& res, const std::string &poolN
|
||||
for (auto keyword : newKeywords) {
|
||||
res.keywordsAfterFilter.push_back(keyword);
|
||||
}
|
||||
res.scoreArray.clear();
|
||||
res.coefArray.clear();
|
||||
res.keywordCombinations.clear();
|
||||
|
||||
double res_score = getScoreFromPool(res, newKeywords, poolName);
|
||||
|
||||
std::string other_pool_name = Waap::Scores::getOtherScorePoolName();
|
||||
Waap::Scores::ModelLoggingSettings modelLoggingSettings = Waap::Scores::getModelLoggingSettings();
|
||||
|
||||
if (applyLearning && poolName != other_pool_name &&
|
||||
modelLoggingSettings.logLevel != Waap::Scores::ModelLogLevel::OFF) {
|
||||
double other_score = getScoreFromPool(res, newKeywords, other_pool_name);
|
||||
|
||||
dbgDebug(D_WAAP_SCANNER) << "Comparing score from pool " << poolName << ": " << res_score
|
||||
<< ", vs. pool " << other_pool_name << ": " << other_score
|
||||
<< ", score difference: " << res_score - other_score
|
||||
<< ", sample: " << res.unescaped_line;
|
||||
Singleton::Consume<I_WaapModelResultLogger>::by<WaapComponent>()->logModelResult(
|
||||
modelLoggingSettings, m_transaction, res, poolName, other_pool_name, res_score, other_score
|
||||
);
|
||||
res.other_model_score = other_score;
|
||||
} else {
|
||||
res.other_model_score = res_score;
|
||||
}
|
||||
return res_score;
|
||||
}
|
||||
|
||||
@@ -142,6 +119,9 @@ double Waap::Scanner::getScoreFromPool(
|
||||
Waf2ScanResult &res, const std::vector<std::string> &newKeywords, const std::string &poolName
|
||||
)
|
||||
{
|
||||
res.scoreArray.clear();
|
||||
res.coefArray.clear();
|
||||
res.keywordCombinations.clear();
|
||||
KeywordsStats stats = m_transaction->getAssetState()->scoreBuilder.getSnapshotStats(poolName);
|
||||
|
||||
if (!newKeywords.empty()) {
|
||||
|
||||
@@ -1358,7 +1358,7 @@ Waf2Transaction::isHtmlType(const char* data, int data_len){
|
||||
dbgTrace(D_WAAP) << "Waf2Transaction::isHtmlType: false";
|
||||
return false;
|
||||
}
|
||||
std::string body(data);
|
||||
std::string body(data, data_len);
|
||||
if(!m_pWaapAssetState->getSignatures()->html_regex.hasMatch(body))
|
||||
{
|
||||
dbgTrace(D_WAAP) << "Waf2Transaction::isHtmlType: false";
|
||||
@@ -1661,6 +1661,9 @@ void Waf2Transaction::appendCommonLogFields(LogGen& waapLog,
|
||||
waapLog << LogField("sourcePort", m_remote_port);
|
||||
waapLog << LogField("httpHostName", m_hostStr);
|
||||
waapLog << LogField("httpMethod", m_methodStr);
|
||||
if (!m_siteConfig->get_AssetId().empty()) waapLog << LogField("assetId", m_siteConfig->get_AssetId());
|
||||
if (!m_siteConfig->get_AssetName().empty()) waapLog << LogField("assetName", m_siteConfig->get_AssetName());
|
||||
|
||||
const auto& autonomousSecurityDecision = std::dynamic_pointer_cast<AutonomousSecurityDecision>(
|
||||
m_waapDecision.getDecision(AUTONOMOUS_SECURITY_DECISION));
|
||||
bool send_extended_log = shouldSendExtendedLog(triggerLog);
|
||||
@@ -2343,6 +2346,7 @@ Waf2Transaction::shouldIgnoreOverride(const Waf2ScanResult &res) {
|
||||
exceptions_dict["sourceIdentifier"].insert(m_source_identifier);
|
||||
exceptions_dict["url"].insert(getUriStr());
|
||||
exceptions_dict["hostName"].insert(m_hostStr);
|
||||
exceptions_dict["method"].insert(m_methodStr);
|
||||
|
||||
for (auto &keyword : res.keyword_matches) {
|
||||
exceptions_dict["indicator"].insert(keyword);
|
||||
@@ -2355,8 +2359,9 @@ Waf2Transaction::shouldIgnoreOverride(const Waf2ScanResult &res) {
|
||||
auto behaviors = exceptions.unpack().getBehavior(exceptions_dict,
|
||||
getAssetState()->m_filtersMngr->getMatchedOverrideKeywords());
|
||||
for (const auto &behavior : behaviors) {
|
||||
dbgTrace(D_WAAP_OVERRIDE) << "got behavior: " << behavior.getId();
|
||||
if (!res.filtered_keywords.empty() || res.score > 0) {
|
||||
dbgTrace(D_WAAP_OVERRIDE) << "matched exceptions for " << res.param_name << " with filtered indicators";
|
||||
dbgTrace(D_WAAP_OVERRIDE) << "matched exceptions for param '" << res.param_name << "' with filtered indicators";
|
||||
std::string overrideId = behavior.getId();
|
||||
if (m_overrideOriginalMaxScore.find(overrideId) == m_overrideOriginalMaxScore.end()){
|
||||
m_overrideOriginalMaxScore[overrideId] = res.scoreNoFilter;
|
||||
@@ -2375,7 +2380,7 @@ Waf2Transaction::shouldIgnoreOverride(const Waf2ScanResult &res) {
|
||||
}
|
||||
if (behavior == action_ignore)
|
||||
{
|
||||
dbgTrace(D_WAAP_OVERRIDE) << "matched exceptions for " << res.param_name << " should ignore.";
|
||||
dbgTrace(D_WAAP_OVERRIDE) << "matched exceptions for param '" << res.param_name << "': should ignore.";
|
||||
std::string overrideId = behavior.getId();
|
||||
if (!overrideId.empty()) {
|
||||
m_matchedOverrideIds.insert(overrideId);
|
||||
|
||||
@@ -41,7 +41,6 @@
|
||||
#include "i_waap_telemetry.h"
|
||||
#include "i_deepAnalyzer.h"
|
||||
#include "i_time_get.h"
|
||||
#include "i_waap_model_result_logger.h"
|
||||
#include "table_opaque.h"
|
||||
#include "WaapResponseInspectReasons.h"
|
||||
#include "WaapResponseInjectReasons.h"
|
||||
|
||||
@@ -35,6 +35,7 @@
|
||||
#include "user_identifiers_config.h"
|
||||
#include "Waf2Regex.h"
|
||||
#include "ParserBinaryFile.h"
|
||||
#include "ParserKnownBenignSkipper.h"
|
||||
|
||||
using boost::algorithm::to_lower_copy;
|
||||
using namespace std;
|
||||
@@ -1218,21 +1219,21 @@ static const SingleRegex csp_report_policy_re(
|
||||
"csp_report_policy"
|
||||
);
|
||||
static const SingleRegex base64_key_value_detector_re(
|
||||
"^[^<>{};,&\\?|=\\s]+={1}\\s*.+",
|
||||
err,
|
||||
"base64_key_value");
|
||||
"^[^<>{};,&\\?|=\\s]+={1}\\s*.+",
|
||||
err,
|
||||
"base64_key_value");
|
||||
static const SingleRegex json_key_value_detector_re(
|
||||
"\\A[^<>{};,&\\?|=\\s]+=[{\\[][^;\",}\\]]*[,:\"].+[\\s\\S]",
|
||||
err,
|
||||
"json_key_value");
|
||||
err,
|
||||
"json_key_value");
|
||||
static const SingleRegex base64_key_detector_re(
|
||||
"^[^<>{};,&\\?|=\\s]+={1}",
|
||||
err,
|
||||
"base64_key");
|
||||
"^[^<>{};,&\\?|=\\s]+={1}",
|
||||
err,
|
||||
"base64_key");
|
||||
static const SingleRegex base64_prefix_detector_re(
|
||||
"data:\\S*;base64,\\S+|base64,\\S+",
|
||||
err,
|
||||
"base64_prefix");
|
||||
"data:\\S*;base64,\\S+|base64,\\S+",
|
||||
err,
|
||||
"base64_prefix");
|
||||
|
||||
// looks for combination <param>={<some text>*:<some text>*}
|
||||
//used to allow parsing param=JSON to reduce false positives
|
||||
|
||||
@@ -894,6 +894,16 @@ namespace Util {
|
||||
|
||||
bool isValidJson(const std::string &input);
|
||||
|
||||
enum KnownSourceType {
|
||||
SOURCE_TYPE_UNKNOWN = 0,
|
||||
SOURCE_TYPE_SENSOR_DATA = 1
|
||||
};
|
||||
|
||||
KnownSourceType detectKnownSource(const std::string &input);
|
||||
bool isScreenedJson(const std::string &input);
|
||||
|
||||
int definePrefixedJson(const std::string &input);
|
||||
|
||||
bool detectJSONasParameter(const std::string &s,
|
||||
std::string &key,
|
||||
std::string &value);
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#include "Waf2Util.h"
|
||||
#include "Waf2Regex.h"
|
||||
#include <string>
|
||||
#include "debug.h"
|
||||
|
||||
namespace Waap {
|
||||
namespace Util {
|
||||
@@ -628,5 +630,52 @@ isValidJson(const std::string &input)
|
||||
return false;
|
||||
}
|
||||
|
||||
KnownSourceType
|
||||
detectKnownSource(const std::string &input)
|
||||
{
|
||||
static bool err = false;
|
||||
static const SingleRegex known_source_sensor_data_re(
|
||||
"^\\{\\\"sensor_data\\\":\\\"",
|
||||
err,
|
||||
"known_source_sensor_data"
|
||||
);
|
||||
if (known_source_sensor_data_re.hasMatch(input)) {
|
||||
return SOURCE_TYPE_SENSOR_DATA;
|
||||
}
|
||||
return SOURCE_TYPE_UNKNOWN;
|
||||
}
|
||||
|
||||
int
|
||||
definePrefixedJson(const std::string &input)
|
||||
{
|
||||
static const size_t MAX_JSON_PREFIX_LEN = 32;
|
||||
static const size_t MIN_PARAMETER_LEN = 4;
|
||||
if (input.size() < MIN_PARAMETER_LEN) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < std::min(input.size(), MAX_JSON_PREFIX_LEN) - 2 ; ++i) {
|
||||
if (input[i] == '-' && input[i+1] == '{') return i + 1;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
bool
|
||||
isScreenedJson(const std::string &input)
|
||||
{
|
||||
static bool err = false;
|
||||
static const SingleRegex screened_json_re(
|
||||
R"(^"{\s*\\"\w+\\"\s*:\s*\\"["\w])",
|
||||
err,
|
||||
"screened_json"
|
||||
);
|
||||
|
||||
if (screened_json_re.hasMatch(input)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
} // namespace Util
|
||||
} // namespace Waap
|
||||
|
||||
@@ -50,8 +50,7 @@ WaapComponent::Impl::Impl() :
|
||||
drop_response(ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP),
|
||||
waapStateTable(NULL),
|
||||
transactionsCount(0),
|
||||
deepAnalyzer(),
|
||||
waapModelResultLogger()
|
||||
deepAnalyzer()
|
||||
{
|
||||
}
|
||||
|
||||
@@ -536,9 +535,15 @@ WaapComponent::Impl::respond(const HttpResponseBodyEvent &event)
|
||||
verdict = drop_response.getVerdict();
|
||||
}
|
||||
|
||||
bool sould_inject_response = waf2Transaction.shouldInjectResponse();
|
||||
// in Chunked transfer encoding the last chunk is always empty - and we leave it empty
|
||||
bool should_stay_empty_chunk = event.isLastChunk() && dataBufLen == 0;
|
||||
dbgTrace(D_WAAP)
|
||||
<< (sould_inject_response ? "should Inject Response" : "should not Inject Response")
|
||||
<< (should_stay_empty_chunk ? " empty last chunk will stay empty" : "");
|
||||
if (verdict == pending_response.getVerdict() &&
|
||||
waf2Transaction.shouldInjectResponse() &&
|
||||
!event.isLastChunk()
|
||||
sould_inject_response &&
|
||||
!should_stay_empty_chunk
|
||||
) {
|
||||
// Inject if needed. Note that this is only reasonable to do if there was no DROP decision above
|
||||
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
#include "table_opaque.h"
|
||||
#include "i_transaction.h"
|
||||
#include "waap_clib/DeepAnalyzer.h"
|
||||
#include "waap_clib/WaapModelResultLogger.h"
|
||||
#include "waap_clib/WaapAssetState.h"
|
||||
#include "waap_clib/WaapAssetStatesManager.h"
|
||||
#include "reputation_features_agg.h"
|
||||
@@ -81,7 +80,6 @@ private:
|
||||
uint64_t transactionsCount;
|
||||
// instance of singleton classes
|
||||
DeepAnalyzer deepAnalyzer;
|
||||
WaapModelResultLogger waapModelResultLogger;
|
||||
WaapAssetStatesManager waapAssetStatesManager;
|
||||
std::unordered_set<std::string> m_seen_assets_id;
|
||||
};
|
||||
|
||||
@@ -5,3 +5,5 @@ add_subdirectory(ip_utilities)
|
||||
add_subdirectory(keywords)
|
||||
add_subdirectory(pm)
|
||||
add_subdirectory(service_health_status)
|
||||
add_subdirectory(nginx_utils)
|
||||
add_subdirectory(utilities)
|
||||
|
||||
@@ -111,7 +111,7 @@ MatchQuery::load(cereal::JSONInputArchive &archive_in)
|
||||
is_specific_label = false;
|
||||
}
|
||||
}
|
||||
is_ignore_keyword = (key == "indicator");
|
||||
is_ignore_keyword = (key == "indicator" || key == "keyword");
|
||||
|
||||
if (condition_type != Conditions::Exist) {
|
||||
archive_in(cereal::make_nvp("value", value));
|
||||
@@ -244,9 +244,10 @@ MatchQuery::getAllKeys() const
|
||||
bool
|
||||
MatchQuery::matchAttributes(
|
||||
const unordered_map<string, set<string>> &key_value_pairs,
|
||||
set<string> &matched_override_keywords ) const
|
||||
set<string> &matched_override_keywords) const
|
||||
{
|
||||
|
||||
dbgTrace(D_RULEBASE_CONFIG) << "Start matching attributes";
|
||||
if (type == MatchType::Condition) {
|
||||
auto key_value_pair = key_value_pairs.find(key);
|
||||
if (key_value_pair == key_value_pairs.end()) {
|
||||
@@ -257,9 +258,11 @@ MatchQuery::matchAttributes(
|
||||
} else if (type == MatchType::Operator && operator_type == Operators::And) {
|
||||
for (const MatchQuery &inner_match: items) {
|
||||
if (!inner_match.matchAttributes(key_value_pairs, matched_override_keywords)) {
|
||||
dbgTrace(D_RULEBASE_CONFIG) << "Failed to match attributes for AND operator";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
dbgTrace(D_RULEBASE_CONFIG) << "Successfully matched all inner matches for AND operator";
|
||||
return true;
|
||||
} else if (type == MatchType::Operator && operator_type == Operators::Or) {
|
||||
// With 'or' condition, evaluate matched override keywords first and add the ones that were fully matched
|
||||
@@ -272,6 +275,7 @@ MatchQuery::matchAttributes(
|
||||
res = true;
|
||||
}
|
||||
}
|
||||
dbgTrace(D_RULEBASE_CONFIG) << "Match result for OR operator is: " << res;
|
||||
return res;
|
||||
} else {
|
||||
dbgWarning(D_RULEBASE_CONFIG) << "Unsupported match query type";
|
||||
@@ -285,6 +289,7 @@ MatchQuery::getMatch( const unordered_map<string, set<string>> &key_value_pairs)
|
||||
MatchQuery::MatchResult matches;
|
||||
matches.matched_keywords = make_shared<set<string>>();
|
||||
matches.is_match = matchAttributes(key_value_pairs, *matches.matched_keywords);
|
||||
dbgTrace(D_RULEBASE_CONFIG) << "Match result: " << matches.is_match;
|
||||
return matches;
|
||||
}
|
||||
|
||||
@@ -306,10 +311,13 @@ MatchQuery::matchAttributes(
|
||||
|
||||
if (isKeyTypeIp()) {
|
||||
match = matchAttributesIp(values);
|
||||
dbgTrace(D_RULEBASE_CONFIG) << "Match result for IP address: " << match;
|
||||
} else if (isRegEx()) {
|
||||
match = matchAttributesRegEx(values, matched_override_keywords);
|
||||
dbgTrace(D_RULEBASE_CONFIG) << "Match result for regex: " << match;
|
||||
} else {
|
||||
match = matchAttributesString(values);
|
||||
dbgTrace(D_RULEBASE_CONFIG) << "Match result for string: " << match;
|
||||
}
|
||||
|
||||
return negate ? !match : match;
|
||||
@@ -324,6 +332,8 @@ MatchQuery::matchAttributesRegEx(
|
||||
boost::cmatch value_matcher;
|
||||
for (const boost::regex &val_regex : regex_values) {
|
||||
for (const string &requested_match_value : values) {
|
||||
dbgTrace(D_RULEBASE_CONFIG) << "Matching value: '" << requested_match_value
|
||||
<< "' with regex: '" << val_regex << "'";
|
||||
if (NGEN::Regex::regexMatch(
|
||||
__FILE__,
|
||||
__LINE__,
|
||||
|
||||
@@ -119,10 +119,13 @@ ParameterException::getBehavior(
|
||||
for (const MatchBehaviorPair &match_behavior_pair: match_queries) {
|
||||
MatchQuery::MatchResult match_res = match_behavior_pair.match.getMatch(key_value_pairs);
|
||||
if (match_res.is_match) {
|
||||
dbgTrace(D_RULEBASE_CONFIG) << "Successfully matched an exception from a list of matches.";
|
||||
dbgTrace(D_RULEBASE_CONFIG)
|
||||
<< "Successfully matched an exception from a list of matches, behavior: "
|
||||
<< match_behavior_pair.behavior.getId();
|
||||
// When matching indicators with action=ignore, we expect no behavior override.
|
||||
// Instead, a matched keywords list should be returned which will be later removed from score calculation
|
||||
if (match_res.matched_keywords->size() > 0 && match_behavior_pair.behavior == action_ignore) {
|
||||
dbgTrace(D_RULEBASE_CONFIG) << "Got action ignore";
|
||||
matched_override_keywords.insert(match_res.matched_keywords->begin(),
|
||||
match_res.matched_keywords->end());
|
||||
} else {
|
||||
|
||||
1
components/utils/nginx_utils/CMakeLists.txt
Executable file
1
components/utils/nginx_utils/CMakeLists.txt
Executable file
@@ -0,0 +1 @@
|
||||
add_library(nginx_utils nginx_utils.cc)
|
||||
281
components/utils/nginx_utils/nginx_utils.cc
Executable file
281
components/utils/nginx_utils/nginx_utils.cc
Executable file
@@ -0,0 +1,281 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "nginx_utils.h"
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <dirent.h>
|
||||
#include <boost/regex.hpp>
|
||||
|
||||
#include "debug.h"
|
||||
#include "maybe_res.h"
|
||||
#include "config.h"
|
||||
#include "agent_core_utilities.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
USE_DEBUG_FLAG(D_NGINX_MANAGER);
|
||||
|
||||
NginxConfCollector::NginxConfCollector(const string &input_path, const string &output_path)
|
||||
:
|
||||
main_conf_input_path(input_path),
|
||||
main_conf_output_path(output_path)
|
||||
{
|
||||
main_conf_directory_path = main_conf_input_path.substr(0, main_conf_input_path.find_last_of('/'));
|
||||
}
|
||||
|
||||
vector<string>
|
||||
NginxConfCollector::expandIncludes(const string &include_pattern) const {
|
||||
vector<string> matching_files;
|
||||
string absolute_include_pattern = include_pattern;
|
||||
string maybe_directory = include_pattern.substr(0, include_pattern.find_last_of('/'));
|
||||
if (!maybe_directory.empty() && maybe_directory.front() != '/') {
|
||||
dbgTrace(D_NGINX_MANAGER) << "Include pattern is a relative path: " << include_pattern;
|
||||
maybe_directory = main_conf_directory_path + '/' + maybe_directory;
|
||||
absolute_include_pattern = main_conf_directory_path + '/' + include_pattern;
|
||||
}
|
||||
|
||||
if (!NGEN::Filesystem::exists(maybe_directory)) {
|
||||
dbgTrace(D_NGINX_MANAGER) << "Include pattern directory/file does not exist: " << maybe_directory;
|
||||
return matching_files;
|
||||
}
|
||||
|
||||
string filename_pattern = absolute_include_pattern.substr(absolute_include_pattern.find_last_of('/') + 1);
|
||||
boost::regex wildcard_regex("\\*");
|
||||
boost::regex pattern(
|
||||
NGEN::Regex::regexReplace(__FILE__, __LINE__, filename_pattern, wildcard_regex, string("[^/]*"))
|
||||
);
|
||||
|
||||
if (!NGEN::Filesystem::isDirectory(maybe_directory)) {
|
||||
dbgTrace(D_NGINX_MANAGER) << "Include pattern is a file: " << absolute_include_pattern;
|
||||
matching_files.push_back(absolute_include_pattern);
|
||||
return matching_files;
|
||||
}
|
||||
|
||||
DIR* dir = opendir(maybe_directory.c_str());
|
||||
if (!dir) {
|
||||
dbgTrace(D_NGINX_MANAGER) << "Could not open directory: " << maybe_directory;
|
||||
return matching_files;
|
||||
}
|
||||
|
||||
struct dirent *entry;
|
||||
while ((entry = readdir(dir)) != nullptr) {
|
||||
if (strcmp(entry->d_name, ".") == 0 || strcmp(entry->d_name, "..") == 0) continue;
|
||||
|
||||
if (NGEN::Regex::regexMatch(__FILE__, __LINE__, entry->d_name, pattern)) {
|
||||
matching_files.push_back(maybe_directory + "/" + entry->d_name);
|
||||
dbgTrace(D_NGINX_MANAGER) << "Matched file: " << maybe_directory << '/' << entry->d_name;
|
||||
}
|
||||
}
|
||||
closedir(dir);
|
||||
|
||||
return matching_files;
|
||||
}
|
||||
|
||||
void
|
||||
NginxConfCollector::processConfigFile(const string &path, ostringstream &conf_output, vector<string> &errors) const
|
||||
{
|
||||
ifstream file(path);
|
||||
if (!file.is_open()) return;
|
||||
|
||||
string content((istreambuf_iterator<char>(file)), istreambuf_iterator<char>());
|
||||
file.close();
|
||||
|
||||
dbgTrace(D_NGINX_MANAGER) << "Processing file: " << path;
|
||||
|
||||
if (content.empty()) return;
|
||||
|
||||
try {
|
||||
boost::regex include_regex(R"(^\s*include\s+([^;]+);)");
|
||||
boost::smatch match;
|
||||
|
||||
while (NGEN::Regex::regexSearch(__FILE__, __LINE__, content, match, include_regex)) {
|
||||
string include_pattern = match[1].str();
|
||||
include_pattern = NGEN::Strings::trim(include_pattern);
|
||||
dbgTrace(D_NGINX_MANAGER) << "Include pattern: " << include_pattern;
|
||||
|
||||
vector<string> included_files = expandIncludes(include_pattern);
|
||||
if (included_files.empty()) {
|
||||
dbgTrace(D_NGINX_MANAGER) << "No files matched the include pattern: " << include_pattern;
|
||||
content.replace(match.position(), match.length(), "");
|
||||
continue;
|
||||
}
|
||||
|
||||
ostringstream included_content;
|
||||
for (const string &included_file : included_files) {
|
||||
dbgTrace(D_NGINX_MANAGER) << "Processing included file: " << included_file;
|
||||
processConfigFile(included_file, included_content, errors);
|
||||
}
|
||||
content.replace(match.position(), match.length(), included_content.str());
|
||||
}
|
||||
} catch (const boost::regex_error &e) {
|
||||
errors.emplace_back(e.what());
|
||||
return;
|
||||
} catch (const exception &e) {
|
||||
errors.emplace_back(e.what());
|
||||
return;
|
||||
}
|
||||
|
||||
conf_output << content;
|
||||
}
|
||||
|
||||
Maybe<string>
|
||||
NginxConfCollector::generateFullNginxConf() const
|
||||
{
|
||||
if (!NGEN::Filesystem::exists(main_conf_input_path)) {
|
||||
return genError("Input file does not exist: " + main_conf_input_path);
|
||||
}
|
||||
|
||||
ostringstream conf_output;
|
||||
vector<string> errors;
|
||||
processConfigFile(main_conf_input_path, conf_output, errors);
|
||||
|
||||
if (!errors.empty()) {
|
||||
for (const string &error : errors) dbgWarning(D_NGINX_MANAGER) << error;
|
||||
return genError("Errors occurred while processing configuration files");
|
||||
}
|
||||
|
||||
ofstream single_nginx_conf_file(main_conf_output_path);
|
||||
if (!single_nginx_conf_file.is_open()) return genError("Could not create output file: " + main_conf_output_path);
|
||||
|
||||
single_nginx_conf_file << conf_output.str();
|
||||
single_nginx_conf_file.close();
|
||||
|
||||
return NGEN::Filesystem::resolveFullPath(main_conf_output_path);
|
||||
}
|
||||
|
||||
string
|
||||
NginxUtils::getMainNginxConfPath()
|
||||
{
|
||||
static string main_nginx_conf_path;
|
||||
if (!main_nginx_conf_path.empty()) return main_nginx_conf_path;
|
||||
|
||||
auto main_nginx_conf_path_setting = getProfileAgentSetting<string>("centralNginxManagement.mainConfPath");
|
||||
if (main_nginx_conf_path_setting.ok()) {
|
||||
main_nginx_conf_path = main_nginx_conf_path_setting.unpack();
|
||||
return main_nginx_conf_path;
|
||||
}
|
||||
|
||||
string default_main_nginx_conf_path = "/etc/nginx/nginx.conf";
|
||||
string command = "nginx -V 2>&1";
|
||||
auto result = Singleton::Consume<I_ShellCmd>::by<NginxUtils>()->getExecOutputAndCode(command);
|
||||
if (!result.ok()) return default_main_nginx_conf_path;
|
||||
|
||||
string output = result.unpack().first;
|
||||
boost::regex conf_regex(R"(--conf-path=([^ ]+))");
|
||||
boost::smatch match;
|
||||
if (!NGEN::Regex::regexSearch(__FILE__, __LINE__, output, match, conf_regex)) {
|
||||
main_nginx_conf_path = default_main_nginx_conf_path;
|
||||
return main_nginx_conf_path;
|
||||
}
|
||||
|
||||
string conf_path = match[1].str();
|
||||
conf_path = NGEN::Strings::trim(conf_path);
|
||||
if (conf_path.empty()) {
|
||||
main_nginx_conf_path = default_main_nginx_conf_path;
|
||||
return main_nginx_conf_path;
|
||||
}
|
||||
|
||||
main_nginx_conf_path = conf_path;
|
||||
return main_nginx_conf_path;
|
||||
}
|
||||
|
||||
string
|
||||
NginxUtils::getModulesPath()
|
||||
{
|
||||
static string main_modules_path;
|
||||
if (!main_modules_path.empty()) return main_modules_path;
|
||||
|
||||
auto modules_path_setting = getProfileAgentSetting<string>("centralNginxManagement.modulesPath");
|
||||
if (modules_path_setting.ok()) {
|
||||
main_modules_path = modules_path_setting.unpack();
|
||||
return main_modules_path;
|
||||
}
|
||||
|
||||
string default_modules_path = "/usr/share/nginx/modules";
|
||||
string command = "nginx -V 2>&1";
|
||||
auto result = Singleton::Consume<I_ShellCmd>::by<NginxUtils>()->getExecOutputAndCode(command);
|
||||
if (!result.ok()) return default_modules_path;
|
||||
|
||||
string output = result.unpack().first;
|
||||
boost::regex modules_regex(R"(--modules-path=([^ ]+))");
|
||||
boost::smatch match;
|
||||
if (!NGEN::Regex::regexSearch(__FILE__, __LINE__, output, match, modules_regex)) {
|
||||
main_modules_path = default_modules_path;
|
||||
return main_modules_path;
|
||||
}
|
||||
|
||||
string modules_path = match[1].str();
|
||||
modules_path = NGEN::Strings::trim(modules_path);
|
||||
if (modules_path.empty()) {
|
||||
main_modules_path = default_modules_path;
|
||||
return main_modules_path;
|
||||
}
|
||||
|
||||
main_modules_path = modules_path;
|
||||
return modules_path;
|
||||
}
|
||||
|
||||
Maybe<void>
|
||||
NginxUtils::validateNginxConf(const string &nginx_conf_path)
|
||||
{
|
||||
dbgTrace(D_NGINX_MANAGER) << "Validating NGINX configuration file: " << nginx_conf_path;
|
||||
if (!NGEN::Filesystem::exists(nginx_conf_path)) return genError("Nginx configuration file does not exist");
|
||||
|
||||
string command = "nginx -t -c " + nginx_conf_path + " 2>&1";
|
||||
auto result = Singleton::Consume<I_ShellCmd>::by<NginxUtils>()->getExecOutputAndCode(command);
|
||||
if (!result.ok()) return genError(result.getErr());
|
||||
if (result.unpack().second != 0) return genError(result.unpack().first);
|
||||
|
||||
dbgTrace(D_NGINX_MANAGER) << "NGINX configuration file is valid";
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
Maybe<void>
|
||||
NginxUtils::reloadNginx(const string &nginx_conf_path)
|
||||
{
|
||||
dbgTrace(D_NGINX_MANAGER) << "Applying and reloading new NGINX configuration file: " << nginx_conf_path;
|
||||
string main_nginx_conf_path = getMainNginxConfPath();
|
||||
|
||||
string backup_conf_path = main_nginx_conf_path + ".bak";
|
||||
if (
|
||||
NGEN::Filesystem::exists(main_nginx_conf_path)
|
||||
&& !NGEN::Filesystem::copyFile(main_nginx_conf_path, backup_conf_path, true)
|
||||
) {
|
||||
return genError("Could not create backup of NGINX configuration file");
|
||||
}
|
||||
|
||||
dbgTrace(D_NGINX_MANAGER) << "Copying new NGINX configuration file to: " << main_nginx_conf_path;
|
||||
if (!NGEN::Filesystem::copyFile(nginx_conf_path, main_nginx_conf_path, true)) {
|
||||
return genError("Could not copy new NGINX configuration file");
|
||||
}
|
||||
|
||||
string command = "nginx -s reload 2>&1";
|
||||
auto result = Singleton::Consume<I_ShellCmd>::by<NginxUtils>()->getExecOutputAndCode(command);
|
||||
if (!result.ok() || result.unpack().second != 0) {
|
||||
if (!NGEN::Filesystem::copyFile(backup_conf_path, main_nginx_conf_path, true)) {
|
||||
return genError("Could not restore backup of NGINX configuration file");
|
||||
}
|
||||
dbgTrace(D_NGINX_MANAGER) << "Successfully restored backup of NGINX configuration file";
|
||||
return result.ok() ? genError(result.unpack().first) : genError(result.getErr());
|
||||
}
|
||||
|
||||
dbgInfo(D_NGINX_MANAGER) << "Successfully reloaded NGINX configuration file";
|
||||
|
||||
return {};
|
||||
}
|
||||
@@ -46,7 +46,7 @@ panicCFmt(const string &func, uint line, const char *fmt, ...)
|
||||
{
|
||||
va_list va;
|
||||
va_start(va, fmt);
|
||||
Debug("PM", func, line).getStreamAggr() << CFmtPrinter(fmt, va);
|
||||
Debug("PM", func, line, true).getStreamAggr() << CFmtPrinter(fmt, va);
|
||||
va_end(va);
|
||||
}
|
||||
|
||||
|
||||
1
components/utils/utilities/CMakeLists.txt
Executable file
1
components/utils/utilities/CMakeLists.txt
Executable file
@@ -0,0 +1 @@
|
||||
add_subdirectory(nginx_conf_collector)
|
||||
37
components/utils/utilities/nginx_conf_collector/CMakeLists.txt
Executable file
37
components/utils/utilities/nginx_conf_collector/CMakeLists.txt
Executable file
@@ -0,0 +1,37 @@
|
||||
include_directories(${PROJECT_SOURCE_DIR}/core/include/)
|
||||
|
||||
link_directories(${Boost_LIBRARY_DIRS})
|
||||
link_directories(${ZLIB_ROOT}/lib)
|
||||
|
||||
link_directories(${ZLIB_ROOT}/lib)
|
||||
link_directories(${CMAKE_BINARY_DIR}/core)
|
||||
link_directories(${CMAKE_BINARY_DIR}/core/compression)
|
||||
|
||||
SET(EXECUTABLE_NAME "nginx_conf_collector_bin")
|
||||
add_executable(${EXECUTABLE_NAME} nginx_conf_collector.cc)
|
||||
target_compile_definitions(${EXECUTABLE_NAME} PRIVATE "NGINX_CONF_COLLECTOR_VERSION=\"$ENV{CI_PIPELINE_ID}\"")
|
||||
|
||||
target_link_libraries(${EXECUTABLE_NAME}
|
||||
shell_cmd
|
||||
mainloop
|
||||
messaging
|
||||
event_is
|
||||
metric
|
||||
compression_utils
|
||||
z
|
||||
nginx_utils
|
||||
time_proxy
|
||||
debug_is
|
||||
version
|
||||
report
|
||||
config
|
||||
environment
|
||||
singleton
|
||||
rest
|
||||
boost_context
|
||||
boost_regex
|
||||
pthread
|
||||
)
|
||||
|
||||
install(TARGETS ${EXECUTABLE_NAME} DESTINATION bin)
|
||||
install(PROGRAMS ${EXECUTABLE_NAME} DESTINATION central_nginx_manager/bin RENAME cp-nano-nginx-conf-collector)
|
||||
148
components/utils/utilities/nginx_conf_collector/nginx_conf_collector.cc
Executable file
148
components/utils/utilities/nginx_conf_collector/nginx_conf_collector.cc
Executable file
@@ -0,0 +1,148 @@
|
||||
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include <iostream>
|
||||
#include <unistd.h>
|
||||
|
||||
#include "agent_core_utilities.h"
|
||||
#include "debug.h"
|
||||
#include "internal/shell_cmd.h"
|
||||
#include "mainloop.h"
|
||||
#include "nginx_utils.h"
|
||||
#include "time_proxy.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
USE_DEBUG_FLAG(D_NGINX_MANAGER);
|
||||
|
||||
class MainComponent
|
||||
{
|
||||
public:
|
||||
MainComponent()
|
||||
{
|
||||
time_proxy.init();
|
||||
environment.init();
|
||||
mainloop.init();
|
||||
shell_cmd.init();
|
||||
}
|
||||
|
||||
~MainComponent()
|
||||
{
|
||||
shell_cmd.fini();
|
||||
mainloop.fini();
|
||||
environment.fini();
|
||||
time_proxy.fini();
|
||||
}
|
||||
private:
|
||||
ShellCmd shell_cmd;
|
||||
MainloopComponent mainloop;
|
||||
Environment environment;
|
||||
TimeProxyComponent time_proxy;
|
||||
};
|
||||
|
||||
void
|
||||
printVersion()
|
||||
{
|
||||
#ifdef NGINX_CONF_COLLECTOR_VERSION
|
||||
cout << "Check Point NGINX configuration collector version: " << NGINX_CONF_COLLECTOR_VERSION << '\n';
|
||||
#else
|
||||
cout << "Check Point NGINX configuration collector version: Private" << '\n';
|
||||
#endif
|
||||
}
|
||||
|
||||
void
|
||||
printUsage(const char *prog_name)
|
||||
{
|
||||
cout << "Usage: " << prog_name << " [-v] [-i /path/to/nginx.conf] [-o /path/to/output.conf]" << '\n';
|
||||
cout << " -V Print version" << '\n';
|
||||
cout << " -v Enable verbose output" << '\n';
|
||||
cout << " -i input_file Specify input file (default is /etc/nginx/nginx.conf)" << '\n';
|
||||
cout << " -o output_file Specify output file (default is ./full_nginx.conf)" << '\n';
|
||||
cout << " -h Print this help message" << '\n';
|
||||
}
|
||||
|
||||
int
|
||||
main(int argc, char *argv[])
|
||||
{
|
||||
string nginx_input_file = "/etc/nginx/nginx.conf";
|
||||
string nginx_output_file = "full_nginx.conf";
|
||||
|
||||
int opt;
|
||||
while ((opt = getopt(argc, argv, "Vvhi:o:h")) != -1) {
|
||||
switch (opt) {
|
||||
case 'V':
|
||||
printVersion();
|
||||
return 0;
|
||||
case 'v':
|
||||
Debug::setUnitTestFlag(D_NGINX_MANAGER, Debug::DebugLevel::TRACE);
|
||||
break;
|
||||
case 'i':
|
||||
nginx_input_file = optarg;
|
||||
break;
|
||||
case 'o':
|
||||
nginx_output_file = optarg;
|
||||
break;
|
||||
case 'h':
|
||||
printUsage(argv[0]);
|
||||
return 0;
|
||||
default:
|
||||
printUsage(argv[0]);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = optind; i < argc;) {
|
||||
cerr << "Unknown argument: " << argv[i] << '\n';
|
||||
printUsage(argv[0]);
|
||||
return 1;
|
||||
}
|
||||
|
||||
dbgTrace(D_NGINX_MANAGER) << "Starting nginx configuration collector";
|
||||
|
||||
MainComponent main_component;
|
||||
auto validation_result = NginxUtils::validateNginxConf(nginx_input_file);
|
||||
if (!validation_result.ok()) {
|
||||
cerr
|
||||
<< "Could not validate nginx configuration file: "
|
||||
<< nginx_input_file
|
||||
<< '\n'
|
||||
<< validation_result.getErr();
|
||||
return 1;
|
||||
}
|
||||
|
||||
NginxConfCollector nginx_collector(nginx_input_file, nginx_output_file);
|
||||
auto result = nginx_collector.generateFullNginxConf();
|
||||
if (!result.ok()) {
|
||||
cerr << "Could not generate full nginx configuration file, error: " << result.getErr() << '\n';
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (result.unpack().empty() || !NGEN::Filesystem::exists(result.unpack())) {
|
||||
cerr << "Generated nginx configuration file does not exist: " << result.unpack() << '\n';
|
||||
return 1;
|
||||
}
|
||||
|
||||
validation_result = NginxUtils::validateNginxConf(result.unpack());
|
||||
if (!validation_result.ok()) {
|
||||
cerr
|
||||
<< "Could not validate generated nginx configuration file: "
|
||||
<< nginx_output_file
|
||||
<< '\n'
|
||||
<< validation_result.getErr();
|
||||
return 1;
|
||||
}
|
||||
|
||||
cout << "Full nginx configuration file was successfully generated: " << result.unpack() << '\n';
|
||||
|
||||
return 0;
|
||||
}
|
||||
@@ -138,6 +138,8 @@ spec:
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
host:
|
||||
type: string
|
||||
mode:
|
||||
@@ -182,142 +184,6 @@ spec:
|
||||
---
|
||||
apiVersion: apiextensions.k8s.io/v1
|
||||
kind: CustomResourceDefinition
|
||||
metadata :
|
||||
name : practices.openappsec.io
|
||||
|
||||
spec:
|
||||
group: openappsec.io
|
||||
versions:
|
||||
- name: v1beta1
|
||||
served: true
|
||||
storage: true
|
||||
schema:
|
||||
openAPIV3Schema:
|
||||
type: object
|
||||
properties:
|
||||
spec:
|
||||
type: object
|
||||
properties:
|
||||
web-attacks:
|
||||
type: object
|
||||
properties:
|
||||
override-mode:
|
||||
type: string
|
||||
enum:
|
||||
- prevent-learn
|
||||
- detect-learn
|
||||
- prevent
|
||||
- detect
|
||||
- inactive
|
||||
minimum-confidence:
|
||||
type: string
|
||||
enum:
|
||||
- medium
|
||||
- high
|
||||
- critical
|
||||
max-url-size-bytes:
|
||||
type: integer
|
||||
max-object-depth:
|
||||
type: integer
|
||||
max-body-size-kb:
|
||||
type: integer
|
||||
max-header-size-bytes:
|
||||
type: integer
|
||||
protections:
|
||||
type: object
|
||||
properties:
|
||||
csrf-enabled:
|
||||
type: string
|
||||
enum:
|
||||
- prevent-learn
|
||||
- detect-learn
|
||||
- prevent
|
||||
- detect
|
||||
- inactive
|
||||
error-disclosure-enabled:
|
||||
type: string
|
||||
enum:
|
||||
- prevent-learn
|
||||
- detect-learn
|
||||
- prevent
|
||||
- detect
|
||||
- inactive
|
||||
open-redirect-enabled:
|
||||
type: string
|
||||
enum:
|
||||
- prevent-learn
|
||||
- detect-learn
|
||||
- prevent
|
||||
- detect
|
||||
- inactive
|
||||
non-valid-http-methods:
|
||||
type: boolean
|
||||
anti-bot:
|
||||
type: object
|
||||
properties:
|
||||
override-mode:
|
||||
type: string
|
||||
enum:
|
||||
- prevent-learn
|
||||
- detect-learn
|
||||
- prevent
|
||||
- detect
|
||||
- inactive
|
||||
injected-URIs:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
uri:
|
||||
type: string
|
||||
validated-URIs:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
uri:
|
||||
type: string
|
||||
snort-signatures:
|
||||
type: object
|
||||
properties:
|
||||
override-mode:
|
||||
type: string
|
||||
enum:
|
||||
- prevent-learn
|
||||
- detect-learn
|
||||
- prevent
|
||||
- detect
|
||||
- inactive
|
||||
configmap:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
openapi-schema-validation:
|
||||
type: object
|
||||
properties:
|
||||
override-mode:
|
||||
type: string
|
||||
enum:
|
||||
- prevent-learn
|
||||
- detect-learn
|
||||
- prevent
|
||||
- detect
|
||||
- inactive
|
||||
configmap:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
|
||||
scope: Cluster
|
||||
names:
|
||||
plural: practices
|
||||
singular: practice
|
||||
kind: Practice
|
||||
shortNames:
|
||||
- practice
|
||||
---
|
||||
apiVersion: apiextensions.k8s.io/v1
|
||||
kind: CustomResourceDefinition
|
||||
metadata :
|
||||
name : accesscontrolpractices.openappsec.io
|
||||
creationTimestamp: null
|
||||
@@ -338,8 +204,6 @@ spec:
|
||||
properties:
|
||||
appsecClassName:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
practiceMode:
|
||||
type: string
|
||||
enum:
|
||||
@@ -431,7 +295,7 @@ spec:
|
||||
type: string
|
||||
enum:
|
||||
- block-page
|
||||
#- redirect
|
||||
- redirect
|
||||
- response-code-only
|
||||
message-title:
|
||||
type: string
|
||||
@@ -455,8 +319,6 @@ spec:
|
||||
properties:
|
||||
appsecClassName:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
mode:
|
||||
type: string
|
||||
enum:
|
||||
@@ -569,8 +431,6 @@ spec:
|
||||
properties:
|
||||
appsecClassName:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
action:
|
||||
type: string
|
||||
enum:
|
||||
@@ -718,8 +578,6 @@ spec:
|
||||
properties:
|
||||
appsecClassName:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
accessControlLogging:
|
||||
type: object
|
||||
properties:
|
||||
@@ -801,8 +659,8 @@ spec:
|
||||
- json
|
||||
- json-formatted
|
||||
default: json
|
||||
k8s-service:
|
||||
type: boolean # Default value is dependant on the environment type
|
||||
local-tuning:
|
||||
type: boolean
|
||||
cefService:
|
||||
type: array
|
||||
items:
|
||||
@@ -873,8 +731,6 @@ spec:
|
||||
properties:
|
||||
appsecClassName:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
sourcesIdentifiers: # required, minItems: 1
|
||||
type: array
|
||||
items:
|
||||
@@ -929,8 +785,6 @@ spec:
|
||||
properties:
|
||||
appsecClassName:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
practiceMode:
|
||||
type: string
|
||||
enum:
|
||||
@@ -1078,6 +932,8 @@ spec:
|
||||
- inactive
|
||||
- inherited #inherited from threatPreventionPractice mode set in policy
|
||||
default: inactive
|
||||
enforcementLevel:
|
||||
type: string
|
||||
configmap:
|
||||
type: array
|
||||
items:
|
||||
@@ -1303,8 +1159,6 @@ spec:
|
||||
properties:
|
||||
appsecClassName:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
minNumOfSources:
|
||||
type: integer
|
||||
default: 3
|
||||
|
||||
@@ -14,7 +14,7 @@ spec:
|
||||
- default-threat-prevention-practice
|
||||
accessControlPractices:
|
||||
- default-access-control-practice
|
||||
customResponses: default-web-user-response
|
||||
customResponse: default-web-user-response
|
||||
triggers:
|
||||
- default-log-trigger
|
||||
specificRules:
|
||||
@@ -62,7 +62,7 @@ spec:
|
||||
files: []
|
||||
# relevant for docker and linux embedded deployments
|
||||
# 0 or 1 files supported in array
|
||||
openapiSchemaValidation: # schema validation requires "Premium Edition"
|
||||
schemaValidation: # schema validation requires "Premium Edition"
|
||||
overrideMode: inherited
|
||||
configmap: []
|
||||
# relevant for deployments on kubernetes
|
||||
|
||||
@@ -0,0 +1,163 @@
|
||||
apiVersion: openappsec.io/v1beta2
|
||||
kind: AccessControlPractice
|
||||
metadata:
|
||||
name: access-control-practice-example
|
||||
spec:
|
||||
practiceMode: prevent
|
||||
rateLimit:
|
||||
overrideMode: inherited
|
||||
rules:
|
||||
- action: prevent
|
||||
comment: Limiting access to the resource
|
||||
limit: 100
|
||||
triggers:
|
||||
- log-trigger-example
|
||||
unit: minute
|
||||
uri: /api/resource
|
||||
- action: inherited
|
||||
comment: Rate limiting for authentication requests
|
||||
limit: 50
|
||||
triggers:
|
||||
- log-trigger-example
|
||||
unit: second
|
||||
uri: /api/auth
|
||||
|
||||
---
|
||||
apiVersion: openappsec.io/v1beta2
|
||||
kind: CustomResponse
|
||||
metadata:
|
||||
name: custom-response-block-page-example
|
||||
spec:
|
||||
mode: block-page
|
||||
messageTitle: "Access Denied"
|
||||
messageBody: "Your request was blocked for security reasons."
|
||||
httpResponseCode: 403
|
||||
|
||||
---
|
||||
apiVersion: openappsec.io/v1beta2
|
||||
kind: Exception
|
||||
metadata:
|
||||
name: exception-example
|
||||
spec:
|
||||
action: accept
|
||||
condition:
|
||||
- key: countryCode
|
||||
value: US
|
||||
|
||||
---
|
||||
apiVersion: openappsec.io/v1beta2
|
||||
kind: LogTrigger
|
||||
metadata:
|
||||
name: log-trigger-example
|
||||
spec:
|
||||
accessControlLogging:
|
||||
allowEvents: false
|
||||
dropEvents: true
|
||||
appsecLogging:
|
||||
detectEvents: true
|
||||
preventEvents: true
|
||||
allWebRequests: false
|
||||
additionalSuspiciousEventsLogging:
|
||||
enabled: true
|
||||
minSeverity: high # {high|critical}
|
||||
responseBody: false
|
||||
responseCode: true
|
||||
extendedLogging:
|
||||
urlPath: true
|
||||
urlQuery: true
|
||||
httpHeaders: false
|
||||
requestBody: false
|
||||
logDestination:
|
||||
cloud: true
|
||||
logToAgent: true
|
||||
stdout:
|
||||
format: json-formatted
|
||||
|
||||
---
|
||||
apiVersion: openappsec.io/v1beta2
|
||||
kind: Policy
|
||||
metadata:
|
||||
name: policy-example
|
||||
spec:
|
||||
default:
|
||||
mode: prevent-learn
|
||||
accessControlPractices: [access-control-practice-example]
|
||||
threatPreventionPractices: [threat-prevention-practice-example]
|
||||
triggers: [log-trigger-example]
|
||||
customResponse: custom-response-block-page-example
|
||||
sourceIdentifiers: sources-identifier-example
|
||||
trustedSources: trusted-sources-example
|
||||
exceptions:
|
||||
- exception-example
|
||||
---
|
||||
apiVersion: openappsec.io/v1beta2
|
||||
kind: ThreatPreventionPractice
|
||||
metadata:
|
||||
name: threat-prevention-practice-example
|
||||
spec:
|
||||
practiceMode: inherited
|
||||
webAttacks:
|
||||
overrideMode: inherited
|
||||
minimumConfidence: high
|
||||
intrusionPrevention:
|
||||
# intrusion prevention (IPS) requires "Premium Edition"
|
||||
overrideMode: inherited
|
||||
maxPerformanceImpact: medium
|
||||
minSeverityLevel: medium
|
||||
minCveYear: 2016
|
||||
highConfidenceEventAction: inherited
|
||||
mediumConfidenceEventAction: inherited
|
||||
lowConfidenceEventAction: detect
|
||||
fileSecurity:
|
||||
# file security requires "Premium Edition"
|
||||
overrideMode: inherited
|
||||
minSeverityLevel: medium
|
||||
highConfidenceEventAction: inherited
|
||||
mediumConfidenceEventAction: inherited
|
||||
lowConfidenceEventAction: detect
|
||||
snortSignatures:
|
||||
# you must specify snort signatures in configmap or file to activate snort inspection
|
||||
overrideMode: inherited
|
||||
configmap: []
|
||||
# relevant for deployments on kubernetes
|
||||
# 0 or 1 configmaps supported in array
|
||||
files: []
|
||||
# relevant for docker and linux embedded deployments
|
||||
# 0 or 1 files supported in array
|
||||
schemaValidation: # schema validation requires "Premium Edition"
|
||||
overrideMode: inherited
|
||||
configmap: []
|
||||
# relevant for deployments on kubernetes
|
||||
# 0 or 1 configmaps supported in array
|
||||
files: []
|
||||
# relevant for docker and linux embedded deployments
|
||||
# 0 or 1 files supported in array
|
||||
antiBot: # antibot requires "Premium Edition"
|
||||
overrideMode: inherited
|
||||
injectedUris: []
|
||||
validatedUris: []
|
||||
|
||||
---
|
||||
apiVersion: openappsec.io/v1beta2
|
||||
kind: TrustedSource
|
||||
metadata:
|
||||
name: trusted-sources-example
|
||||
spec:
|
||||
minNumOfSources: 3
|
||||
sourcesIdentifiers:
|
||||
- 1.0.0.27
|
||||
- 1.0.0.28
|
||||
- 1.0.0.29
|
||||
|
||||
---
|
||||
apiVersion: openappsec.io/v1beta2
|
||||
kind: SourcesIdentifier
|
||||
metadata:
|
||||
name: sources-identifier-example
|
||||
spec:
|
||||
sourcesIdentifiers:
|
||||
- identifier: sourceip
|
||||
value:
|
||||
- "192.168.1.1"
|
||||
- "10.0.0.1"
|
||||
|
||||
@@ -14,7 +14,7 @@ spec:
|
||||
- default-threat-prevention-practice
|
||||
accessControlPractices:
|
||||
- default-access-control-practice
|
||||
customResponses: default-web-user-response
|
||||
customResponse: default-web-user-response
|
||||
triggers:
|
||||
- default-log-trigger
|
||||
specificRules:
|
||||
@@ -62,7 +62,7 @@ spec:
|
||||
files: []
|
||||
# relevant for docker and linux embedded deployments
|
||||
# 0 or 1 files supported in array
|
||||
openapiSchemaValidation: # schema validation requires "Premium Edition"
|
||||
schemaValidation: # schema validation requires "Premium Edition"
|
||||
overrideMode: inherited
|
||||
configmap: []
|
||||
# relevant for deployments on kubernetes
|
||||
|
||||
@@ -12,17 +12,17 @@ practices:
|
||||
- name: webapp-default-practice
|
||||
openapi-schema-validation:
|
||||
configmap: []
|
||||
override-mode: detect-learn
|
||||
override-mode: as-top-level
|
||||
snort-signatures:
|
||||
configmap: []
|
||||
override-mode: detect-learn
|
||||
override-mode: as-top-level
|
||||
web-attacks:
|
||||
max-body-size-kb: 1000000
|
||||
max-header-size-bytes: 102400
|
||||
max-object-depth: 40
|
||||
max-url-size-bytes: 32768
|
||||
minimum-confidence: critical
|
||||
override-mode: detect-learn
|
||||
override-mode: as-top-level
|
||||
protections:
|
||||
csrf-protection: inactive
|
||||
error-disclosure: inactive
|
||||
@@ -31,7 +31,7 @@ practices:
|
||||
anti-bot:
|
||||
injected-URIs: []
|
||||
validated-URIs: []
|
||||
override-mode: detect-learn
|
||||
override-mode: as-top-level
|
||||
|
||||
log-triggers:
|
||||
- name: appsec-default-log-trigger
|
||||
|
||||
@@ -12,17 +12,17 @@ practices:
|
||||
- name: webapp-default-practice
|
||||
openapi-schema-validation:
|
||||
configmap: []
|
||||
override-mode: prevent-learn
|
||||
override-mode: as-top-level
|
||||
snort-signatures:
|
||||
configmap: []
|
||||
override-mode: prevent-learn
|
||||
override-mode: as-top-level
|
||||
web-attacks:
|
||||
max-body-size-kb: 1000000
|
||||
max-header-size-bytes: 102400
|
||||
max-object-depth: 40
|
||||
max-url-size-bytes: 32768
|
||||
minimum-confidence: critical
|
||||
override-mode: prevent-learn
|
||||
override-mode: as-top-level
|
||||
protections:
|
||||
csrf-protection: inactive
|
||||
error-disclosure: inactive
|
||||
@@ -31,7 +31,7 @@ practices:
|
||||
anti-bot:
|
||||
injected-URIs: []
|
||||
validated-URIs: []
|
||||
override-mode: prevent-learn
|
||||
override-mode: as-top-level
|
||||
|
||||
log-triggers:
|
||||
- name: appsec-default-log-trigger
|
||||
|
||||
@@ -31,15 +31,16 @@ add_subdirectory(tenant_manager)
|
||||
add_subdirectory(compression)
|
||||
add_subdirectory(attachments)
|
||||
add_subdirectory(report_messaging)
|
||||
add_subdirectory(env_details)
|
||||
|
||||
add_library(ngen_core SHARED ".")
|
||||
target_link_libraries(
|
||||
ngen_core
|
||||
-Wl,-whole-archive
|
||||
"table;debug_is;shell_cmd;metric;tenant_manager;messaging;encryptor;time_proxy;singleton;mainloop;environment;logging;report;rest"
|
||||
"config;intelligence_is_v2;event_is;memory_consumption;connkey"
|
||||
"compression_utils;-lz;config;intelligence_is_v2;event_is;memory_consumption;connkey"
|
||||
"instance_awareness;socket_is;agent_details;agent_details_reporter;buffers;cpu;agent_core_utilities"
|
||||
"report_messaging"
|
||||
"report_messaging;env_details;version"
|
||||
-Wl,-no-whole-archive
|
||||
)
|
||||
|
||||
|
||||
@@ -203,6 +203,18 @@ deleteFile(const string &path)
|
||||
return true;
|
||||
}
|
||||
|
||||
string
|
||||
resolveFullPath(const string &input_path) {
|
||||
dbgTrace(D_INFRA_UTILS) << "Resolving absolute path: " << input_path;
|
||||
char resolved_path[PATH_MAX];
|
||||
if (!realpath(input_path.c_str(), resolved_path)) {
|
||||
dbgWarning(D_INFRA_UTILS) << "Error resolving path: " << input_path << ", errno: " << errno;
|
||||
return "";
|
||||
}
|
||||
|
||||
return string(resolved_path);
|
||||
}
|
||||
|
||||
bool
|
||||
deleteDirectory(const string &path, bool delete_content)
|
||||
{
|
||||
@@ -510,6 +522,23 @@ removeTrailingWhitespaces(string str)
|
||||
return str;
|
||||
}
|
||||
|
||||
string
|
||||
removeLeadingWhitespaces(string str)
|
||||
{
|
||||
str.erase(
|
||||
str.begin(),
|
||||
find_if(str.begin(), str.end(), [] (char c) { return !isspace(c); })
|
||||
);
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
string
|
||||
trim(string str)
|
||||
{
|
||||
return removeLeadingWhitespaces(removeTrailingWhitespaces(str));
|
||||
}
|
||||
|
||||
} // namespace Strings
|
||||
|
||||
} // namespace NGEN
|
||||
|
||||
@@ -184,3 +184,27 @@ TEST_F(AgentCoreUtilUT, removeTrailingWhitespacesTest)
|
||||
string str_with_trailing_whitespace = "str_with_trailing_whitespace\n\n\n\r \n\n\r";
|
||||
EXPECT_EQ(NGEN::Strings::removeTrailingWhitespaces(str_with_trailing_whitespace), "str_with_trailing_whitespace");
|
||||
}
|
||||
|
||||
TEST_F(AgentCoreUtilUT, removeLeadingWhitespacesTest)
|
||||
{
|
||||
string str_with_leading_whitespace = "\n\n\n\r \n\n\rstr_with_leading_whitespace";
|
||||
EXPECT_EQ(NGEN::Strings::removeLeadingWhitespaces(str_with_leading_whitespace), "str_with_leading_whitespace");
|
||||
}
|
||||
|
||||
TEST_F(AgentCoreUtilUT, trimTest)
|
||||
{
|
||||
string str_with_leading_and_trailing_whitespace = "\n\n \r \rstr_with_whitespace\n\r \n\n\r";
|
||||
EXPECT_EQ(NGEN::Strings::trim(str_with_leading_and_trailing_whitespace), "str_with_whitespace");
|
||||
}
|
||||
|
||||
TEST_F(AgentCoreUtilUT, resolveFullPathTest)
|
||||
{
|
||||
string working_dir = cptestFnameInExeDir("");
|
||||
ofstream file(working_dir + "test.txt");
|
||||
ASSERT_TRUE(file.is_open());
|
||||
file.close();
|
||||
string relative_path = "test.txt";
|
||||
string full_path = NGEN::Filesystem::resolveFullPath(relative_path);
|
||||
EXPECT_EQ(full_path, working_dir + "test.txt");
|
||||
ASSERT_TRUE(NGEN::Filesystem::deleteFile(working_dir + "test.txt"));
|
||||
}
|
||||
|
||||
@@ -388,8 +388,9 @@ AgentDetails::convertProxyProtocolToString(ProxyProtocol proto) const
|
||||
case ProxyProtocol::HTTP: return "http";
|
||||
case ProxyProtocol::HTTPS: return "https";
|
||||
}
|
||||
dbgAssert(false) << alert << "Unsupported Proxy Protocol " << static_cast<int>(proto);
|
||||
return "";
|
||||
dbgAssertOpt(false) << alert << "Unsupported Proxy Protocol " << static_cast<int>(proto);
|
||||
dbgWarning(D_ORCHESTRATOR) << "Using https proxy as default";
|
||||
return "https";
|
||||
}
|
||||
|
||||
Maybe<void>
|
||||
@@ -475,11 +476,14 @@ Maybe<void>
|
||||
AgentDetails::loadProxyType(ProxyProtocol protocol)
|
||||
{
|
||||
dbgFlow(D_ORCHESTRATOR) << "Loading proxy type: " << convertProxyProtocolToString(protocol);
|
||||
dbgAssert(protocol == ProxyProtocol::HTTP || protocol == ProxyProtocol::HTTPS)
|
||||
<< alert
|
||||
<< "Unsupported Proxy Protocol "
|
||||
<< static_cast<int>(protocol);
|
||||
|
||||
if (!(protocol == ProxyProtocol::HTTP || protocol == ProxyProtocol::HTTPS)) {
|
||||
dbgAssertOpt(false)
|
||||
<< alert
|
||||
<< "Unsupported Proxy Protocol "
|
||||
<< static_cast<int>(protocol);
|
||||
protocol = ProxyProtocol::HTTPS;
|
||||
dbgWarning(D_ORCHESTRATOR) << "Using https proxy as default";
|
||||
}
|
||||
static const map<ProxyProtocol, string> env_var_name = {
|
||||
{ProxyProtocol::HTTPS, "https_proxy"},
|
||||
{ProxyProtocol::HTTP, "http_proxy"}
|
||||
|
||||
@@ -117,7 +117,7 @@ private:
|
||||
return;
|
||||
}
|
||||
|
||||
dbgInfo(D_AGENT_DETAILS)
|
||||
dbgDebug(D_AGENT_DETAILS)
|
||||
<< "Successfully handled attributes persistence. Operation: "
|
||||
<< operation
|
||||
<< ", Path "
|
||||
|
||||
@@ -111,7 +111,8 @@ HttpAttachmentConfiguration::save(cereal::JSONOutputArchive &archive) const
|
||||
cereal::make_nvp("keep_alive_interval_msec", getNumericalValue("keep_alive_interval_msec")),
|
||||
cereal::make_nvp("min_retries_for_verdict", getNumericalValue("min_retries_for_verdict")),
|
||||
cereal::make_nvp("max_retries_for_verdict", getNumericalValue("max_retries_for_verdict")),
|
||||
cereal::make_nvp("body_size_trigger", getNumericalValue("body_size_trigger"))
|
||||
cereal::make_nvp("body_size_trigger", getNumericalValue("body_size_trigger")),
|
||||
cereal::make_nvp("remove_server_header", getNumericalValue("remove_server_header"))
|
||||
);
|
||||
}
|
||||
|
||||
@@ -167,6 +168,7 @@ HttpAttachmentConfiguration::load(cereal::JSONInputArchive &archive)
|
||||
loadNumericalValue(archive, "min_retries_for_verdict", 3);
|
||||
loadNumericalValue(archive, "max_retries_for_verdict", 15);
|
||||
loadNumericalValue(archive, "body_size_trigger", 200000);
|
||||
loadNumericalValue(archive, "remove_server_header", 0);
|
||||
}
|
||||
|
||||
bool
|
||||
|
||||
@@ -63,7 +63,8 @@ TEST_F(HttpAttachmentUtilTest, GetValidAttachmentConfiguration)
|
||||
"\"waiting_for_verdict_thread_timeout_msec\": 60,\n"
|
||||
"\"req_header_thread_timeout_msec\": 10,\n"
|
||||
"\"ip_ranges\": " + createIPRangesString(ip_ranges) + ",\n"
|
||||
"\"static_resources_path\": \"" + static_resources_path + "\""
|
||||
"\"static_resources_path\": \"" + static_resources_path + "\",\n"
|
||||
"\"remove_server_header\": 0"
|
||||
"}\n";
|
||||
ofstream valid_configuration_file(attachment_configuration_file_name);
|
||||
valid_configuration_file << valid_configuration;
|
||||
@@ -89,6 +90,7 @@ TEST_F(HttpAttachmentUtilTest, GetValidAttachmentConfiguration)
|
||||
EXPECT_EQ(conf_data_out.getNumericalValue("res_body_thread_timeout_msec"), 80u);
|
||||
EXPECT_EQ(conf_data_out.getNumericalValue("waiting_for_verdict_thread_timeout_msec"), 60u);
|
||||
EXPECT_EQ(conf_data_out.getNumericalValue("nginx_inspection_mode"), 1u);
|
||||
EXPECT_EQ(conf_data_out.getNumericalValue("remove_server_header"), 0u);
|
||||
}
|
||||
|
||||
TEST_F(HttpAttachmentUtilTest, GetMalformedAttachmentConfiguration)
|
||||
|
||||
@@ -144,8 +144,8 @@ Buffer::operator+(const Buffer &other) const
|
||||
Buffer
|
||||
Buffer::getSubBuffer(uint start, uint end) const
|
||||
{
|
||||
dbgAssert(start<=end && end<=len) << alert << "Buffer::getSubBuffer() returned: Illegal scoping of buffer";
|
||||
if (start == end) return Buffer();
|
||||
dbgAssertOpt(start<=end && end<=len) << alert << "Buffer::getSubBuffer() returned: Illegal scoping of buffer";
|
||||
if (start >= end || end > len) return Buffer();
|
||||
|
||||
Buffer res;
|
||||
uint offset = 0;
|
||||
@@ -178,8 +178,12 @@ Buffer::getSubBuffer(uint start, uint end) const
|
||||
Maybe<uint>
|
||||
Buffer::findFirstOf(char ch, uint start) const
|
||||
{
|
||||
dbgAssert(start <= len) << alert << "Buffer::findFirstOf() returned: Cannot set a start point after buffer's end";
|
||||
|
||||
if (start > len) {
|
||||
dbgAssertOpt(start <= len)
|
||||
<< alert
|
||||
<< "Buffer::findFirstOf() returned: Cannot set a start point after buffer's end";
|
||||
return genError("Cannot set a start point after buffer's end");
|
||||
}
|
||||
for (; start < len; ++start) {
|
||||
if ((*this)[start] == ch) return start;
|
||||
}
|
||||
@@ -189,8 +193,12 @@ Buffer::findFirstOf(char ch, uint start) const
|
||||
Maybe<uint>
|
||||
Buffer::findFirstOf(const Buffer &buf, uint start) const
|
||||
{
|
||||
dbgAssert(start <= len) << alert << "Buffer::findFirstOf() returned: Cannot set a start point after buffer's end";
|
||||
|
||||
if (start > len) {
|
||||
dbgAssertOpt(start <= len)
|
||||
<< alert
|
||||
<< "Buffer::findFirstOf() returned: Cannot set a start point after buffer's end";
|
||||
return genError("Cannot set a start point after buffer's end");
|
||||
}
|
||||
for (; start + buf.size() <= len; ++start) {
|
||||
auto sub_buffer = getSubBuffer(start, start + buf.size());
|
||||
if (sub_buffer == buf) return start;
|
||||
@@ -201,9 +209,13 @@ Buffer::findFirstOf(const Buffer &buf, uint start) const
|
||||
Maybe<uint>
|
||||
Buffer::findFirstNotOf(char ch, uint start) const
|
||||
{
|
||||
dbgAssert(start <= len)
|
||||
<< alert
|
||||
<< "Buffer::findFirstNotOf() returned: Cannot set a start point after buffer's end";
|
||||
if (start > len) {
|
||||
dbgAssertOpt(start <= len)
|
||||
<< alert
|
||||
<< "Buffer::findFirstNotOf() returned: Cannot set a start point after buffer's end";
|
||||
return genError("Cannot set a start point after buffer's end");
|
||||
}
|
||||
|
||||
for (; start < len; ++start) {
|
||||
if ((*this)[start] != ch) return start;
|
||||
}
|
||||
@@ -213,7 +225,12 @@ Buffer::findFirstNotOf(char ch, uint start) const
|
||||
Maybe<uint>
|
||||
Buffer::findLastOf(char ch, uint start) const
|
||||
{
|
||||
dbgAssert(start <= len) << alert << "Buffer::findLastOf() returned: Cannot set a start point after buffer's end";
|
||||
if (start > len) {
|
||||
dbgAssertOpt(start <= len)
|
||||
<< alert
|
||||
<< "Buffer::findLastOf() returned: Cannot set a start point after buffer's end";
|
||||
return genError("Cannot set a start point after buffer's end");
|
||||
}
|
||||
for (; 0 < start; --start) {
|
||||
if ((*this)[start - 1] == ch) return start - 1;
|
||||
}
|
||||
@@ -223,9 +240,12 @@ Buffer::findLastOf(char ch, uint start) const
|
||||
Maybe<uint>
|
||||
Buffer::findLastNotOf(char ch, uint start) const
|
||||
{
|
||||
dbgAssert(start <= len)
|
||||
<< alert
|
||||
<< "Buffer::findLastNotOf() returned: Cannot set a start point after buffer's end";
|
||||
if (start > len) {
|
||||
dbgAssertOpt(start <= len)
|
||||
<< alert
|
||||
<< "Buffer::findLastNotOf() returned: Cannot set a start point after buffer's end";
|
||||
return genError("Cannot set a start point after buffer's end");
|
||||
}
|
||||
for (; 0 < start; --start) {
|
||||
if ((*this)[start - 1] != ch) return start - 1;
|
||||
}
|
||||
@@ -235,8 +255,8 @@ Buffer::findLastNotOf(char ch, uint start) const
|
||||
void
|
||||
Buffer::truncateHead(uint size)
|
||||
{
|
||||
dbgAssert(size <= len) << alert << "Cannot set a new start of buffer after the buffer's end";
|
||||
if (size == 0) return;
|
||||
dbgAssertOpt(size <= len) << alert << "Cannot set a new start of buffer after the buffer's end";
|
||||
if (size == 0 || size > len) return;
|
||||
if (size == len) {
|
||||
clear();
|
||||
return;
|
||||
@@ -261,8 +281,8 @@ Buffer::truncateHead(uint size)
|
||||
void
|
||||
Buffer::truncateTail(uint size)
|
||||
{
|
||||
dbgAssert(size <= len) << alert << "Cannot set a new end of buffer after the buffer's end";
|
||||
if (size == 0) return;
|
||||
dbgAssertOpt(size <= len) << alert << "Cannot set a new end of buffer after the buffer's end";
|
||||
if (size == 0 || size > len) return;
|
||||
if (size == len) {
|
||||
clear();
|
||||
return;
|
||||
@@ -285,14 +305,20 @@ Buffer::truncateTail(uint size)
|
||||
void
|
||||
Buffer::keepHead(uint size)
|
||||
{
|
||||
dbgAssert(size <= len) << alert << "Cannot set a new end of buffer before the buffer's start";
|
||||
if (size > len) {
|
||||
dbgAssertOpt(size <= len) << alert << "Cannot set a new end of buffer before the buffer's start";
|
||||
return;
|
||||
}
|
||||
truncateTail(len - size);
|
||||
}
|
||||
|
||||
void
|
||||
Buffer::keepTail(uint size)
|
||||
{
|
||||
dbgAssert(size <= len) << alert << "Cannot set a new start of buffer after the buffer's end";
|
||||
if (size > len) {
|
||||
dbgAssertOpt(size <= len) << alert << "Cannot set a new start of buffer after the buffer's end";
|
||||
return;
|
||||
}
|
||||
truncateHead(len - size);
|
||||
}
|
||||
|
||||
|
||||
@@ -2,8 +2,11 @@ include_directories(${ng_module_osrc_zlib_path}/include)
|
||||
add_definitions(-DZLIB_CONST)
|
||||
|
||||
add_library(compression_utils SHARED compression_utils.cc)
|
||||
add_library(static_compression_utils compression_utils.cc)
|
||||
|
||||
add_subdirectory(compression_utils_ut)
|
||||
|
||||
install(TARGETS compression_utils DESTINATION lib)
|
||||
install(TARGETS compression_utils DESTINATION http_transaction_handler_service/lib)
|
||||
|
||||
install(TARGETS static_compression_utils DESTINATION lib)
|
||||
|
||||
@@ -64,12 +64,12 @@ IPAddr::print(ostream &os) const
|
||||
switch (type) {
|
||||
case IPType::V4: {
|
||||
formatted_addr = inet_ntop(AF_INET, &v4, buf, sizeof(buf));
|
||||
dbgAssert(formatted_addr == buf) << alert("conversion error") << "Failed to convert an IPv4 address";
|
||||
dbgAssertOpt(formatted_addr == buf) << alert("conversion error") << "Failed to convert an IPv4 address";
|
||||
break;
|
||||
}
|
||||
case IPType::V6: {
|
||||
formatted_addr = inet_ntop(AF_INET6, &v6, buf, sizeof(buf));
|
||||
dbgAssert(formatted_addr == buf) << alert("conversion error") << "Failed to convert an IPv6 address";
|
||||
dbgAssertOpt(formatted_addr == buf) << alert("conversion error") << "Failed to convert an IPv6 address";
|
||||
break;
|
||||
}
|
||||
case IPType::UNINITIALIZED: {
|
||||
@@ -116,7 +116,7 @@ ConnKey::reverse()
|
||||
size_t
|
||||
ConnKey::hash() const
|
||||
{
|
||||
dbgAssert(src.type != IPType::UNINITIALIZED)
|
||||
dbgAssertOpt(src.type != IPType::UNINITIALIZED)
|
||||
<< alert("hashing")
|
||||
<< "ConnKey::hash was called on an uninitialized object";
|
||||
size_t seed = 0;
|
||||
|
||||
@@ -27,6 +27,7 @@
|
||||
#include "i_instance_awareness.h"
|
||||
#include "i_signal_handler.h"
|
||||
#include "hash_combine.h"
|
||||
#include "version.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
@@ -298,14 +299,19 @@ AlertInfo::evalParams()
|
||||
Debug::Debug(
|
||||
const string &file_name,
|
||||
const string &func_name,
|
||||
const uint &line)
|
||||
const uint &line,
|
||||
bool force_assert)
|
||||
{
|
||||
if (Singleton::exists<Config::I_Config>()) {
|
||||
do_assert = getConfigurationWithDefault<bool>(true, "Debug I/S", "Abort on assertion");
|
||||
if (!force_assert && !should_assert_optional) {
|
||||
do_assert = false;
|
||||
} else {
|
||||
do_assert = true;
|
||||
}
|
||||
|
||||
if (Singleton::exists<Config::I_Config>()) {
|
||||
do_assert = getConfigurationWithDefault<bool>(do_assert, "Debug I/S", "Abort on assertion");
|
||||
}
|
||||
|
||||
auto current_configuration =
|
||||
Singleton::exists<Config::I_Config>() ? getConfigurationWithDefault(default_config, "Debug") : default_config;
|
||||
|
||||
@@ -519,6 +525,13 @@ Debug::preload()
|
||||
|
||||
active_streams["STDOUT"] = make_shared<Debug::DebugStream>(&cout);
|
||||
active_streams["FOG"] = make_shared<DebugFogStream>();
|
||||
|
||||
string branch = Version::getBranch();
|
||||
if (branch == "open-source" || branch == "master" || branch.substr(0, 6) == "hotfix") {
|
||||
should_assert_optional = false;
|
||||
} else {
|
||||
should_assert_optional = true;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
@@ -844,3 +857,4 @@ bool Debug::is_fail_open_mode = false;
|
||||
bool Debug::debug_override_exist = false;
|
||||
string Debug::default_debug_file_stream_path = "";
|
||||
vector<string> Debug::streams_from_mgmt;
|
||||
bool Debug::should_assert_optional = true;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user