mirror of
https://github.com/openappsec/openappsec.git
synced 2025-06-28 16:41:02 +03:00
Jan_31_2024-Dev
This commit is contained in:
parent
752a5785f0
commit
6d67818a94
@ -33,5 +33,6 @@ DEFINE_KDEBUG_FLAG(kernelMetric)
|
|||||||
DEFINE_KDEBUG_FLAG(tproxy)
|
DEFINE_KDEBUG_FLAG(tproxy)
|
||||||
DEFINE_KDEBUG_FLAG(tenantStats)
|
DEFINE_KDEBUG_FLAG(tenantStats)
|
||||||
DEFINE_KDEBUG_FLAG(uuidTranslation)
|
DEFINE_KDEBUG_FLAG(uuidTranslation)
|
||||||
|
DEFINE_KDEBUG_FLAG(antibotResolver)
|
||||||
|
|
||||||
#endif // DEFINE_KDEBUG_FLAG
|
#endif // DEFINE_KDEBUG_FLAG
|
||||||
|
@ -810,7 +810,7 @@ private:
|
|||||||
}
|
}
|
||||||
|
|
||||||
FilterVerdict
|
FilterVerdict
|
||||||
handleStartTransaction(const Buffer &data)
|
handleStartTransaction(const Buffer &data, NginxAttachmentOpaque &opaque)
|
||||||
{
|
{
|
||||||
if (data.size() == 0) {
|
if (data.size() == 0) {
|
||||||
dbgWarning(D_NGINX_ATTACHMENT)
|
dbgWarning(D_NGINX_ATTACHMENT)
|
||||||
@ -819,7 +819,6 @@ private:
|
|||||||
return default_verdict;
|
return default_verdict;
|
||||||
}
|
}
|
||||||
|
|
||||||
NginxAttachmentOpaque &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
|
||||||
auto rule_by_ctx = getConfiguration<BasicRuleConfig>("rulebase", "rulesConfig");
|
auto rule_by_ctx = getConfiguration<BasicRuleConfig>("rulebase", "rulesConfig");
|
||||||
if (rule_by_ctx.ok()) {
|
if (rule_by_ctx.ok()) {
|
||||||
BasicRuleConfig rule = rule_by_ctx.unpack();
|
BasicRuleConfig rule = rule_by_ctx.unpack();
|
||||||
@ -928,13 +927,12 @@ private:
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
setResponseContentEncoding(const CompressionType content_encoding)
|
setResponseContentEncoding(const CompressionType content_encoding, NginxAttachmentOpaque &opaque)
|
||||||
{
|
{
|
||||||
if (content_encoding == HttpTransactionData::default_response_content_encoding) {
|
if (content_encoding == HttpTransactionData::default_response_content_encoding) {
|
||||||
dbgDebug(D_NGINX_ATTACHMENT) << "New content encoding is the default. Skipping change of currect state";
|
dbgDebug(D_NGINX_ATTACHMENT) << "New content encoding is the default. Skipping change of currect state";
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
auto &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
|
||||||
auto &transaction_data = opaque.getTransactionData();
|
auto &transaction_data = opaque.getTransactionData();
|
||||||
|
|
||||||
transaction_data.setResponseContentEncoding(content_encoding);
|
transaction_data.setResponseContentEncoding(content_encoding);
|
||||||
@ -951,7 +949,7 @@ private:
|
|||||||
}
|
}
|
||||||
|
|
||||||
FilterVerdict
|
FilterVerdict
|
||||||
handleResponseHeaders(const Buffer &headers_data)
|
handleResponseHeaders(const Buffer &headers_data, NginxAttachmentOpaque &opaque)
|
||||||
{
|
{
|
||||||
dbgFlow(D_NGINX_ATTACHMENT) << "Handling response headers";
|
dbgFlow(D_NGINX_ATTACHMENT) << "Handling response headers";
|
||||||
bool did_fail_on_purpose = false;
|
bool did_fail_on_purpose = false;
|
||||||
@ -985,16 +983,15 @@ private:
|
|||||||
dbgTrace(D_NGINX_ATTACHMENT) << "Successfully parsed response's content encoding";
|
dbgTrace(D_NGINX_ATTACHMENT) << "Successfully parsed response's content encoding";
|
||||||
|
|
||||||
auto parsed_content_encoding = parsed_content_encoding_maybe.unpack();
|
auto parsed_content_encoding = parsed_content_encoding_maybe.unpack();
|
||||||
setResponseContentEncoding(parsed_content_encoding);
|
setResponseContentEncoding(parsed_content_encoding, opaque);
|
||||||
updateMetrics(parsed_content_encoding);
|
updateMetrics(parsed_content_encoding);
|
||||||
|
|
||||||
return handleMultiModifiableChunks(response_headers, false);
|
return handleMultiModifiableChunks(response_headers, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
FilterVerdict
|
FilterVerdict
|
||||||
handleResponseBody(const Buffer &data)
|
handleResponseBody(const Buffer &data, NginxAttachmentOpaque &opaque)
|
||||||
{
|
{
|
||||||
auto &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
|
||||||
auto &transaction_data = opaque.getTransactionData();
|
auto &transaction_data = opaque.getTransactionData();
|
||||||
|
|
||||||
CompressionType content_encoding = transaction_data.getResponseContentEncoding();
|
CompressionType content_encoding = transaction_data.getResponseContentEncoding();
|
||||||
@ -1007,13 +1004,18 @@ private:
|
|||||||
}
|
}
|
||||||
|
|
||||||
FilterVerdict
|
FilterVerdict
|
||||||
handleChunkedData(ChunkType chunk_type, const Buffer &data)
|
handleChunkedData(ChunkType chunk_type, const Buffer &data, NginxAttachmentOpaque &opaque)
|
||||||
{
|
{
|
||||||
ScopedContext event_type;
|
ScopedContext event_type;
|
||||||
event_type.registerValue<ngx_http_chunk_type_e>("HTTP Chunk type", chunk_type);
|
event_type.registerValue<ngx_http_chunk_type_e>("HTTP Chunk type", chunk_type);
|
||||||
|
|
||||||
auto rule_by_ctx = getConfiguration<BasicRuleConfig>("rulebase", "rulesConfig");
|
if (chunk_type > ChunkType::REQUEST_HEADER && opaque.getApplicationState() == ApplicationState::UNKOWN) {
|
||||||
if (!rule_by_ctx.ok() && chunk_type > ChunkType::REQUEST_HEADER) {
|
auto rule_by_ctx = getConfiguration<BasicRuleConfig>("rulebase", "rulesConfig");
|
||||||
|
ApplicationState state = rule_by_ctx.ok() ? ApplicationState::DEFINED : ApplicationState::UNDEFINED;
|
||||||
|
opaque.setApplicationState(state);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (opaque.getApplicationState() == ApplicationState::UNDEFINED) {
|
||||||
ngx_http_cp_verdict_e verdict_action =
|
ngx_http_cp_verdict_e verdict_action =
|
||||||
getSettingWithDefault<bool>(false, "allowOnlyDefinedApplications") ? DROP : ACCEPT;
|
getSettingWithDefault<bool>(false, "allowOnlyDefinedApplications") ? DROP : ACCEPT;
|
||||||
|
|
||||||
@ -1026,7 +1028,7 @@ private:
|
|||||||
|
|
||||||
switch (chunk_type) {
|
switch (chunk_type) {
|
||||||
case ChunkType::REQUEST_START:
|
case ChunkType::REQUEST_START:
|
||||||
return handleStartTransaction(data);
|
return handleStartTransaction(data, opaque);
|
||||||
case ChunkType::REQUEST_HEADER:
|
case ChunkType::REQUEST_HEADER:
|
||||||
return handleMultiModifiableChunks(NginxParser::parseRequestHeaders(data), "request header", true);
|
return handleMultiModifiableChunks(NginxParser::parseRequestHeaders(data), "request header", true);
|
||||||
case ChunkType::REQUEST_BODY:
|
case ChunkType::REQUEST_BODY:
|
||||||
@ -1043,10 +1045,10 @@ private:
|
|||||||
return handleContentLength(data);
|
return handleContentLength(data);
|
||||||
}
|
}
|
||||||
case ChunkType::RESPONSE_HEADER:
|
case ChunkType::RESPONSE_HEADER:
|
||||||
return handleResponseHeaders(data);
|
return handleResponseHeaders(data, opaque);
|
||||||
case ChunkType::RESPONSE_BODY:
|
case ChunkType::RESPONSE_BODY:
|
||||||
nginx_attachment_event.addResponseInspectionCounter(1);
|
nginx_attachment_event.addResponseInspectionCounter(1);
|
||||||
return handleResponseBody(data);
|
return handleResponseBody(data, opaque);
|
||||||
case ChunkType::RESPONSE_END:
|
case ChunkType::RESPONSE_END:
|
||||||
return FilterVerdict(http_manager->inspectEndTransaction());
|
return FilterVerdict(http_manager->inspectEndTransaction());
|
||||||
case ChunkType::METRIC_DATA_FROM_PLUGIN:
|
case ChunkType::METRIC_DATA_FROM_PLUGIN:
|
||||||
@ -1456,7 +1458,7 @@ private:
|
|||||||
NginxAttachmentOpaque &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
NginxAttachmentOpaque &opaque = i_transaction_table->getState<NginxAttachmentOpaque>();
|
||||||
opaque.activateContext();
|
opaque.activateContext();
|
||||||
|
|
||||||
FilterVerdict verdict = handleChunkedData(*chunked_data_type, inspection_data);
|
FilterVerdict verdict = handleChunkedData(*chunked_data_type, inspection_data, opaque);
|
||||||
|
|
||||||
bool is_header =
|
bool is_header =
|
||||||
*chunked_data_type == ChunkType::REQUEST_HEADER ||
|
*chunked_data_type == ChunkType::REQUEST_HEADER ||
|
||||||
|
@ -26,6 +26,8 @@
|
|||||||
#include "i_environment.h"
|
#include "i_environment.h"
|
||||||
#include "buffer.h"
|
#include "buffer.h"
|
||||||
|
|
||||||
|
enum class ApplicationState { UNDEFINED, DEFINED, UNKOWN };
|
||||||
|
|
||||||
class NginxAttachmentOpaque : public TableOpaqueSerialize<NginxAttachmentOpaque>, Singleton::Consume<I_Environment>
|
class NginxAttachmentOpaque : public TableOpaqueSerialize<NginxAttachmentOpaque>, Singleton::Consume<I_Environment>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
@ -58,6 +60,7 @@ public:
|
|||||||
|
|
||||||
CompressionStream * getResponseCompressionStream() { return response_compression_stream; }
|
CompressionStream * getResponseCompressionStream() { return response_compression_stream; }
|
||||||
HttpTransactionData & getTransactionData() { return transaction_data; }
|
HttpTransactionData & getTransactionData() { return transaction_data; }
|
||||||
|
const ApplicationState & getApplicationState() const { return application_state; }
|
||||||
|
|
||||||
// LCOV_EXCL_START - sync functions, can only be tested once the sync module exists
|
// LCOV_EXCL_START - sync functions, can only be tested once the sync module exists
|
||||||
template <typename T> void serialize(T &, uint) {}
|
template <typename T> void serialize(T &, uint) {}
|
||||||
@ -81,6 +84,7 @@ public:
|
|||||||
const std::string &data,
|
const std::string &data,
|
||||||
EnvKeyAttr::LogSection log_ctx = EnvKeyAttr::LogSection::NONE
|
EnvKeyAttr::LogSection log_ctx = EnvKeyAttr::LogSection::NONE
|
||||||
);
|
);
|
||||||
|
void setApplicationState(const ApplicationState &app_state) { application_state = app_state; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
CompressionStream *response_compression_stream;
|
CompressionStream *response_compression_stream;
|
||||||
@ -93,6 +97,7 @@ private:
|
|||||||
std::string source_identifier;
|
std::string source_identifier;
|
||||||
std::string identifier_type;
|
std::string identifier_type;
|
||||||
std::map<std::string, std::string> saved_data;
|
std::map<std::string, std::string> saved_data;
|
||||||
|
ApplicationState application_state = ApplicationState::UNKOWN;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // __NGINX_ATTACHMENT_OPAQUE_H__
|
#endif // __NGINX_ATTACHMENT_OPAQUE_H__
|
||||||
|
@ -60,6 +60,22 @@ static const unordered_map<string, MatchQuery::StaticKeys> string_to_key = {
|
|||||||
{ "domain", MatchQuery::StaticKeys::Domain }
|
{ "domain", MatchQuery::StaticKeys::Domain }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
MatchQuery::MatchQuery(const string &match) : is_specific_label(false), is_ignore_keyword(false)
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
stringstream ss;
|
||||||
|
ss.str(match);
|
||||||
|
cereal::JSONInputArchive archive_in(ss);
|
||||||
|
load(archive_in);
|
||||||
|
} catch (const exception &e) {
|
||||||
|
dbgWarning(D_RULEBASE_CONFIG)
|
||||||
|
<< "Unable to load match query JSON. JSON content: "
|
||||||
|
<< match
|
||||||
|
<< ", Error: "
|
||||||
|
<< e.what();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
MatchQuery::load(cereal::JSONInputArchive &archive_in)
|
MatchQuery::load(cereal::JSONInputArchive &archive_in)
|
||||||
{
|
{
|
||||||
|
@ -49,7 +49,7 @@ ZonesConfig::load(cereal::JSONInputArchive &archive_in)
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
dbgWarning(D_RULEBASE_CONFIG)
|
dbgDebug(D_RULEBASE_CONFIG)
|
||||||
<< "Adding specific zone to cache. Zone ID: "
|
<< "Adding specific zone to cache. Zone ID: "
|
||||||
<< single_zone.getId()
|
<< single_zone.getId()
|
||||||
<< ", name: "
|
<< ", name: "
|
||||||
@ -93,7 +93,7 @@ ZonesConfig::load(cereal::JSONInputArchive &archive_in)
|
|||||||
}
|
}
|
||||||
for (GenericConfigId &implied_id: implied_zones) {
|
for (GenericConfigId &implied_id: implied_zones) {
|
||||||
if (all_zones.find(implied_id) != all_zones.end()) {
|
if (all_zones.find(implied_id) != all_zones.end()) {
|
||||||
dbgWarning(D_RULEBASE_CONFIG) << "Adding implied zone to cache. Zone ID: " << implied_id;
|
dbgDebug(D_RULEBASE_CONFIG) << "Adding implied zone to cache. Zone ID: " << implied_id;
|
||||||
active_zones_set.emplace(implied_id, all_zones[implied_id]);
|
active_zones_set.emplace(implied_id, all_zones[implied_id]);
|
||||||
if (any_zone_id != "" && active_zones_set.count(any_zone_id) == 0) {
|
if (any_zone_id != "" && active_zones_set.count(any_zone_id) == 0) {
|
||||||
active_zones_set.emplace(any_zone_id, all_zones[any_zone_id]);
|
active_zones_set.emplace(any_zone_id, all_zones[any_zone_id]);
|
||||||
|
@ -166,11 +166,16 @@ private:
|
|||||||
bool
|
bool
|
||||||
sendHealthCheckPatch()
|
sendHealthCheckPatch()
|
||||||
{
|
{
|
||||||
dbgFlow(D_HEALTH_CHECK_MANAGER);
|
dbgFlow(D_HEALTH_CHECK_MANAGER) << "Sending a health check patch";
|
||||||
|
|
||||||
HealthCheckPatch patch_to_send(general_health_aggregated_status, all_comps_health_status);
|
HealthCheckPatch patch_to_send(general_health_aggregated_status, all_comps_health_status);
|
||||||
auto messaging = Singleton::Consume<I_Messaging>::by<HealthCheckManager>();
|
auto messaging = Singleton::Consume<I_Messaging>::by<HealthCheckManager>();
|
||||||
return messaging->sendNoReplyObject(patch_to_send, I_Messaging::Method::PATCH, "/agents");
|
return messaging->sendSyncMessageWithoutResponse(
|
||||||
|
HTTPMethod::PATCH,
|
||||||
|
"/agents",
|
||||||
|
patch_to_send,
|
||||||
|
MessageCategory::GENERIC
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -4,5 +4,5 @@ link_directories(${BOOST_ROOT}/lib)
|
|||||||
add_unit_test(
|
add_unit_test(
|
||||||
health_check_manager_ut
|
health_check_manager_ut
|
||||||
"health_check_manager_ut.cc"
|
"health_check_manager_ut.cc"
|
||||||
"singleton;mainloop;health_check_manager;event_is;metric;-lboost_regex"
|
"singleton;messaging;mainloop;health_check_manager;event_is;metric;-lboost_regex"
|
||||||
)
|
)
|
||||||
|
@ -111,19 +111,18 @@ public:
|
|||||||
TEST_F(HealthCheckManagerTest, runPeriodicHealthCheckTest)
|
TEST_F(HealthCheckManagerTest, runPeriodicHealthCheckTest)
|
||||||
{
|
{
|
||||||
string actual_body;
|
string actual_body;
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(mock_message, sendSyncMessage(
|
||||||
mock_message,
|
HTTPMethod::PATCH,
|
||||||
sendMessage(
|
"/agents",
|
||||||
false,
|
_,
|
||||||
_,
|
_,
|
||||||
I_Messaging::Method::PATCH,
|
_
|
||||||
"/agents",
|
)).Times(4).WillRepeatedly(
|
||||||
"",
|
DoAll(
|
||||||
_,
|
SaveArg<2>(&actual_body),
|
||||||
_,
|
Return(HTTPResponse(HTTPStatusCode::HTTP_OK, ""))
|
||||||
MessageTypeTag::GENERIC
|
|
||||||
)
|
)
|
||||||
).Times(4).WillRepeatedly(DoAll(SaveArg<1>(&actual_body), Return(string())));
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
health_check_periodic_routine();
|
health_check_periodic_routine();
|
||||||
|
@ -24,6 +24,15 @@
|
|||||||
#define MAX_FINAL_SCORE 10.0
|
#define MAX_FINAL_SCORE 10.0
|
||||||
#define ATTACK_IN_PARAM "attack_in_param"
|
#define ATTACK_IN_PARAM "attack_in_param"
|
||||||
|
|
||||||
|
enum TrafficMethod {
|
||||||
|
POST,
|
||||||
|
GET,
|
||||||
|
PUT,
|
||||||
|
PATCH,
|
||||||
|
DELETE,
|
||||||
|
OTHER
|
||||||
|
};
|
||||||
|
|
||||||
enum ThreatLevel {
|
enum ThreatLevel {
|
||||||
NO_THREAT = 0,
|
NO_THREAT = 0,
|
||||||
THREAT_INFO,
|
THREAT_INFO,
|
||||||
|
@ -54,6 +54,8 @@ public:
|
|||||||
|
|
||||||
MatchQuery(): is_specific_label(false), is_ignore_keyword(false) {}
|
MatchQuery(): is_specific_label(false), is_ignore_keyword(false) {}
|
||||||
|
|
||||||
|
MatchQuery(const std::string &match);
|
||||||
|
|
||||||
void load(cereal::JSONInputArchive &archive_in);
|
void load(cereal::JSONInputArchive &archive_in);
|
||||||
|
|
||||||
MatchType getType() const { return type; }
|
MatchType getType() const { return type; }
|
||||||
|
24
components/include/geo_location.h
Normal file
24
components/include/geo_location.h
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
#ifndef __GEO_LOCATION_H__
|
||||||
|
#define __GEO_LOCATION_H__
|
||||||
|
|
||||||
|
#include "i_geo_location.h"
|
||||||
|
#include "singleton.h"
|
||||||
|
#include "component.h"
|
||||||
|
|
||||||
|
class GeoLocation : public Component, Singleton::Provide<I_GeoLocation>
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
GeoLocation();
|
||||||
|
~GeoLocation();
|
||||||
|
|
||||||
|
void preload();
|
||||||
|
|
||||||
|
void init();
|
||||||
|
void fini();
|
||||||
|
|
||||||
|
private:
|
||||||
|
class Impl;
|
||||||
|
std::unique_ptr<Impl> pimpl;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // __GEO_LOCATION_H__
|
35
components/include/http_geo_filter.h
Normal file
35
components/include/http_geo_filter.h
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
#ifndef __HTTP_GEO_FILTER_H__
|
||||||
|
#define __HTTP_GEO_FILTER_H__
|
||||||
|
|
||||||
|
#include <memory>
|
||||||
|
|
||||||
|
#include "singleton.h"
|
||||||
|
#include "i_mainloop.h"
|
||||||
|
#include "component.h"
|
||||||
|
#include "http_inspection_events.h"
|
||||||
|
#include "i_geo_location.h"
|
||||||
|
#include "i_generic_rulebase.h"
|
||||||
|
|
||||||
|
class HttpGeoFilter
|
||||||
|
:
|
||||||
|
public Component,
|
||||||
|
Singleton::Consume<I_MainLoop>,
|
||||||
|
Singleton::Consume<I_GeoLocation>,
|
||||||
|
Singleton::Consume<I_GenericRulebase>
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
HttpGeoFilter();
|
||||||
|
~HttpGeoFilter();
|
||||||
|
|
||||||
|
void preload() override;
|
||||||
|
|
||||||
|
void init() override;
|
||||||
|
void fini() override;
|
||||||
|
|
||||||
|
|
||||||
|
private:
|
||||||
|
class Impl;
|
||||||
|
std::unique_ptr<Impl> pimpl;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // __HTTP_GEO_FILTER_H__
|
@ -27,7 +27,7 @@ public:
|
|||||||
virtual std::string getAgentVersion() = 0;
|
virtual std::string getAgentVersion() = 0;
|
||||||
virtual bool isKernelVersion3OrHigher() = 0;
|
virtual bool isKernelVersion3OrHigher() = 0;
|
||||||
virtual bool isGwNotVsx() = 0;
|
virtual bool isGwNotVsx() = 0;
|
||||||
virtual bool isVersionEqualOrAboveR8110() = 0;
|
virtual bool isVersionAboveR8110() = 0;
|
||||||
virtual bool isReverseProxy() = 0;
|
virtual bool isReverseProxy() = 0;
|
||||||
virtual Maybe<std::tuple<std::string, std::string, std::string>> parseNginxMetadata() = 0;
|
virtual Maybe<std::tuple<std::string, std::string, std::string>> parseNginxMetadata() = 0;
|
||||||
virtual std::map<std::string, std::string> getResolvedDetails() = 0;
|
virtual std::map<std::string, std::string> getResolvedDetails() = 0;
|
||||||
|
@ -40,6 +40,10 @@ public:
|
|||||||
const std::string &service_name
|
const std::string &service_name
|
||||||
) const = 0;
|
) const = 0;
|
||||||
|
|
||||||
|
virtual Maybe<std::string> checkIfFileExists(const Package &package) const = 0;
|
||||||
|
|
||||||
|
virtual void removeDownloadFile(const std::string &file_name) const = 0;
|
||||||
|
|
||||||
virtual std::string getProfileFromMap(const std::string &tenant_id) const = 0;
|
virtual std::string getProfileFromMap(const std::string &tenant_id) const = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
22
components/include/i_geo_location.h
Normal file
22
components/include/i_geo_location.h
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
#ifndef __I_GEO_LOCATION_H__
|
||||||
|
#define __I_GEO_LOCATION_H__
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
#include "connkey.h"
|
||||||
|
#include "enum_array.h"
|
||||||
|
|
||||||
|
|
||||||
|
class I_GeoLocation
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
enum class GeoLocationField { COUNTRY_NAME, COUNTRY_CODE, CONTINENT_NAME, CONTINENT_CODE, COUNT };
|
||||||
|
|
||||||
|
virtual Maybe<EnumArray<GeoLocationField, std::string>> lookupLocation(const std::string &ip) = 0;
|
||||||
|
virtual Maybe<EnumArray<GeoLocationField, std::string>> lookupLocation(const IPAddr &ip) = 0;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
virtual ~I_GeoLocation() {}
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // __I_GEO_LOCATION_H__
|
37
components/include/i_oa_schema_updater.h
Normal file
37
components/include/i_oa_schema_updater.h
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
// Copyright (C) 2022 Check Point Software Technologies Ltd. All rights reserved.
|
||||||
|
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#ifndef __I_OA_SCHEMA_UPDATER_H__
|
||||||
|
#define __I_OA_SCHEMA_UPDATER_H__
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include "i_transaction.h"
|
||||||
|
#include "../security_apps/waap/waap_clib/oasu_key_types.h"
|
||||||
|
#include "../security_apps/waap/waap_clib/events_for_oa_schema.h"
|
||||||
|
|
||||||
|
class I_OASUpdater
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
virtual void onKvt(const std::string &value, SchemaKeyType type, IWaf2Transaction &waf2Transaction) = 0;
|
||||||
|
virtual void addOperationField(
|
||||||
|
const std::string &operation_name,
|
||||||
|
const std::string &operation_type,
|
||||||
|
const std::string &field_name,
|
||||||
|
IWaf2Transaction &waf2Transaction) = 0;
|
||||||
|
virtual void removeGraphQLData(IWaf2Transaction &waf2Transaction) = 0;
|
||||||
|
virtual void addActiveOperationName(
|
||||||
|
const std::string &operation_name,
|
||||||
|
IWaf2Transaction &waf2Transaction) = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // __I_OA_SCHEMA_UPDATER_H__
|
@ -33,7 +33,10 @@ public:
|
|||||||
) const = 0;
|
) const = 0;
|
||||||
virtual Maybe<void> authenticateAgent() = 0;
|
virtual Maybe<void> authenticateAgent() = 0;
|
||||||
virtual Maybe<void> getUpdate(CheckUpdateRequest &request) = 0;
|
virtual Maybe<void> getUpdate(CheckUpdateRequest &request) = 0;
|
||||||
virtual Maybe<std::string> downloadAttributeFile(const GetResourceFile &resourse_file) = 0;
|
virtual Maybe<std::string> downloadAttributeFile(
|
||||||
|
const GetResourceFile &resourse_file,
|
||||||
|
const std::string &file_path
|
||||||
|
) = 0;
|
||||||
virtual void setAddressExtenesion(const std::string &extension) = 0;
|
virtual void setAddressExtenesion(const std::string &extension) = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -23,6 +23,8 @@ struct DecisionTelemetryData
|
|||||||
std::string practiceId;
|
std::string practiceId;
|
||||||
std::string practiceName;
|
std::string practiceName;
|
||||||
std::string source;
|
std::string source;
|
||||||
|
TrafficMethod method;
|
||||||
|
int responseCode;
|
||||||
std::set<std::string> attackTypes;
|
std::set<std::string> attackTypes;
|
||||||
|
|
||||||
DecisionTelemetryData() :
|
DecisionTelemetryData() :
|
||||||
@ -32,6 +34,8 @@ struct DecisionTelemetryData
|
|||||||
practiceId(),
|
practiceId(),
|
||||||
practiceName(),
|
practiceName(),
|
||||||
source(),
|
source(),
|
||||||
|
method(POST),
|
||||||
|
responseCode(0),
|
||||||
attackTypes()
|
attackTypes()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
@ -36,15 +36,22 @@ public:
|
|||||||
std::map<std::string, Package> &corrupted_packages
|
std::map<std::string, Package> &corrupted_packages
|
||||||
);
|
);
|
||||||
|
|
||||||
bool
|
Maybe<std::vector<Package>>
|
||||||
buildInstallationQueue(
|
buildInstallationQueue(
|
||||||
const Package &updated_package,
|
const std::map<std::string, Package> ¤t_packages,
|
||||||
|
const std::map<std::string, Package> &new_packages
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
private:
|
||||||
|
Maybe<void>
|
||||||
|
buildRecInstallationQueue(
|
||||||
|
const Package &package,
|
||||||
std::vector<Package> &installation_queue,
|
std::vector<Package> &installation_queue,
|
||||||
const std::map<std::string, Package> ¤t_packages,
|
const std::map<std::string, Package> ¤t_packages,
|
||||||
const std::map<std::string, Package> &new_packages
|
const std::map<std::string, Package> &new_packages
|
||||||
);
|
);
|
||||||
|
|
||||||
private:
|
|
||||||
std::string corrupted_file_path;
|
std::string corrupted_file_path;
|
||||||
};
|
};
|
||||||
#endif // __MANIFEST_DIFF_CALCULATOR_H__
|
#endif // __MANIFEST_DIFF_CALCULATOR_H__
|
||||||
|
@ -24,6 +24,8 @@
|
|||||||
#include "i_details_resolver.h"
|
#include "i_details_resolver.h"
|
||||||
#include "i_time_get.h"
|
#include "i_time_get.h"
|
||||||
|
|
||||||
|
using packageFilePath = std::string;
|
||||||
|
|
||||||
class ManifestHandler
|
class ManifestHandler
|
||||||
:
|
:
|
||||||
Singleton::Consume<I_MainLoop>,
|
Singleton::Consume<I_MainLoop>,
|
||||||
@ -36,28 +38,23 @@ class ManifestHandler
|
|||||||
Singleton::Consume<I_DetailsResolver>
|
Singleton::Consume<I_DetailsResolver>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
using packageFilePath = std::string;
|
|
||||||
|
|
||||||
ManifestHandler() = default;
|
ManifestHandler() = default;
|
||||||
void init();
|
void init();
|
||||||
|
|
||||||
bool
|
Maybe<std::vector<std::pair<Package, packageFilePath>>> downloadPackages(
|
||||||
downloadPackages(
|
const std::map<std::string, Package> &new_packages_to_download
|
||||||
const std::vector<Package> &updated_packages,
|
|
||||||
std::vector<std::pair<Package, packageFilePath>> &downloaded_packages
|
|
||||||
);
|
);
|
||||||
|
|
||||||
bool
|
bool installPackage(
|
||||||
installPackages(
|
const std::pair<Package, std::string> &package_downloaded_file,
|
||||||
const std::vector<std::pair<Package, packageFilePath>> &downloaded_packages_files,
|
|
||||||
std::map<packageFilePath, Package> ¤t_packages,
|
std::map<packageFilePath, Package> ¤t_packages,
|
||||||
std::map<packageFilePath, Package> &corrupted_packages
|
std::map<packageFilePath, Package> &corrupted_packages
|
||||||
);
|
);
|
||||||
|
|
||||||
bool uninstallPackage(Package &removed_package);
|
bool uninstallPackage(Package &removed_package);
|
||||||
|
|
||||||
bool
|
bool selfUpdate(
|
||||||
selfUpdate(
|
|
||||||
const Package &updated_package,
|
const Package &updated_package,
|
||||||
std::map<packageFilePath, Package> ¤t_packages,
|
std::map<packageFilePath, Package> ¤t_packages,
|
||||||
const packageFilePath &installation_file
|
const packageFilePath &installation_file
|
||||||
|
4
components/include/oas_updater_entry_saver.h
Normal file
4
components/include/oas_updater_entry_saver.h
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
#ifndef __SCHEMA_UPDATER_DATA_STRUCTURE_H__
|
||||||
|
#define __SCHEMA_UPDATER_DATA_STRUCTURE_H__
|
||||||
|
|
||||||
|
#endif // __SCHEMA_UPDATER_DATA_STRUCTURE_H__
|
@ -120,10 +120,10 @@ public:
|
|||||||
return std::string();
|
return std::string();
|
||||||
}
|
}
|
||||||
|
|
||||||
I_Messaging::Method
|
HTTPMethod
|
||||||
getRequestMethod() const
|
getRequestMethod() const
|
||||||
{
|
{
|
||||||
return isVirtual() ? I_Messaging::Method::POST : I_Messaging::Method::GET;
|
return isVirtual() ? HTTPMethod::POST : HTTPMethod::GET;
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
@ -35,10 +35,11 @@ public:
|
|||||||
const std::string & getName() const { return name; }
|
const std::string & getName() const { return name; }
|
||||||
const std::string & getVersion() const { return version; }
|
const std::string & getVersion() const { return version; }
|
||||||
const std::string & getChecksum() const { return checksum_value; }
|
const std::string & getChecksum() const { return checksum_value; }
|
||||||
|
const std::string & getErrorMessage() const { return error_message; }
|
||||||
const PackageType & getType() const { return package_type; }
|
const PackageType & getType() const { return package_type; }
|
||||||
const std::vector<std::string> & getRequire() const { return require_packages; }
|
const std::vector<std::string> & getRequire() const { return require_packages; }
|
||||||
const ChecksumTypes & getChecksumType() const { return checksum_type; }
|
const ChecksumTypes & getChecksumType() const { return checksum_type; }
|
||||||
const Maybe<void> & isInstallable() const { return installable; }
|
const bool & isInstallable() const { return installable; }
|
||||||
|
|
||||||
bool operator==(const Package &other) const;
|
bool operator==(const Package &other) const;
|
||||||
bool operator!=(const Package &other) const;
|
bool operator!=(const Package &other) const;
|
||||||
@ -60,7 +61,8 @@ private:
|
|||||||
return std::string();
|
return std::string();
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<void> installable = Maybe<void>();
|
bool installable = true;
|
||||||
|
std::string error_message;
|
||||||
std::string mirror;
|
std::string mirror;
|
||||||
std::string name;
|
std::string name;
|
||||||
std::string version;
|
std::string version;
|
||||||
|
@ -9,9 +9,20 @@
|
|||||||
#include "debug.h"
|
#include "debug.h"
|
||||||
#include "generic_rulebase/rulebase_config.h"
|
#include "generic_rulebase/rulebase_config.h"
|
||||||
#include "generic_rulebase/triggers_config.h"
|
#include "generic_rulebase/triggers_config.h"
|
||||||
|
#include "generic_rulebase/match_query.h"
|
||||||
#include "generic_rulebase/evaluators/trigger_eval.h"
|
#include "generic_rulebase/evaluators/trigger_eval.h"
|
||||||
|
|
||||||
USE_DEBUG_FLAG(D_REVERSE_PROXY);
|
USE_DEBUG_FLAG(D_RATE_LIMIT);
|
||||||
|
|
||||||
|
enum class RateLimitAction
|
||||||
|
{
|
||||||
|
INACTIVE,
|
||||||
|
ACCORDING_TO_PRACTICE,
|
||||||
|
DETECT,
|
||||||
|
PREVENT,
|
||||||
|
|
||||||
|
UNKNOWN
|
||||||
|
};
|
||||||
|
|
||||||
class RateLimitTrigger
|
class RateLimitTrigger
|
||||||
{
|
{
|
||||||
@ -34,12 +45,12 @@ public:
|
|||||||
operator bool() const
|
operator bool() const
|
||||||
{
|
{
|
||||||
if (uri.empty()) {
|
if (uri.empty()) {
|
||||||
dbgTrace(D_REVERSE_PROXY) << "Recived empty URI in rate-limit rule";
|
dbgTrace(D_RATE_LIMIT) << "Recived empty URI in rate-limit rule";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (uri.at(0) != '/') {
|
if (uri.at(0) != '/') {
|
||||||
dbgWarning(D_REVERSE_PROXY)
|
dbgWarning(D_RATE_LIMIT)
|
||||||
<< "Recived invalid rate-limit URI in rate-limit rule: "
|
<< "Recived invalid rate-limit URI in rate-limit rule: "
|
||||||
<< uri
|
<< uri
|
||||||
<< " rate-limit URI must start with /";
|
<< " rate-limit URI must start with /";
|
||||||
@ -47,7 +58,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (limit <= 0) {
|
if (limit <= 0) {
|
||||||
dbgWarning(D_REVERSE_PROXY)
|
dbgWarning(D_RATE_LIMIT)
|
||||||
<< "Recived invalid rate-limit limit in rate-limit rule: "
|
<< "Recived invalid rate-limit limit in rate-limit rule: "
|
||||||
<< limit
|
<< limit
|
||||||
<< " rate-limit rule limit must be positive";
|
<< " rate-limit rule limit must be positive";
|
||||||
@ -70,10 +81,13 @@ public:
|
|||||||
const std::string & getRateLimitReq() const { return limit_req_template_value; }
|
const std::string & getRateLimitReq() const { return limit_req_template_value; }
|
||||||
const std::string & getRateLimitUri() const { return uri; }
|
const std::string & getRateLimitUri() const { return uri; }
|
||||||
const std::string & getRateLimitScope() const { return scope; }
|
const std::string & getRateLimitScope() const { return scope; }
|
||||||
|
const RateLimitAction & getRateLimitAction() const { return action; }
|
||||||
|
const MatchQuery & getRateLimitMatch() const { return match; }
|
||||||
const LogTriggerConf & getRateLimitTrigger() const { return trigger; }
|
const LogTriggerConf & getRateLimitTrigger() const { return trigger; }
|
||||||
const std::vector<RateLimitTrigger> & getRateLimitTriggers() const { return rate_limit_triggers; }
|
const std::vector<RateLimitTrigger> & getRateLimitTriggers() const { return rate_limit_triggers; }
|
||||||
|
|
||||||
bool isRootLocation() const;
|
bool isRootLocation() const;
|
||||||
|
bool isMatchAny() const;
|
||||||
|
|
||||||
bool operator==(const RateLimitRule &rhs) { return uri == rhs.uri; }
|
bool operator==(const RateLimitRule &rhs) { return uri == rhs.uri; }
|
||||||
bool operator<(const RateLimitRule &rhs) { return uri < rhs.uri; }
|
bool operator<(const RateLimitRule &rhs) { return uri < rhs.uri; }
|
||||||
@ -87,21 +101,27 @@ private:
|
|||||||
std::string limit_req_template_value;
|
std::string limit_req_template_value;
|
||||||
std::string limit_req_zone_template_value;
|
std::string limit_req_zone_template_value;
|
||||||
std::string cache_size = "5m";
|
std::string cache_size = "5m";
|
||||||
|
RateLimitAction action = RateLimitAction::ACCORDING_TO_PRACTICE;
|
||||||
|
MatchQuery match = MatchQuery(default_match);
|
||||||
std::vector<RateLimitTrigger> rate_limit_triggers;
|
std::vector<RateLimitTrigger> rate_limit_triggers;
|
||||||
LogTriggerConf trigger;
|
LogTriggerConf trigger;
|
||||||
int limit;
|
int limit;
|
||||||
bool exact_match = false;
|
bool exact_match = false;
|
||||||
|
|
||||||
|
static const std::string default_match;
|
||||||
};
|
};
|
||||||
|
|
||||||
class RateLimitConfig
|
class RateLimitConfig
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
void load(cereal::JSONInputArchive &ar);
|
void load(cereal::JSONInputArchive &ar);
|
||||||
void addSiblingRateLimitRule(RateLimitRule &rule);
|
void addSiblingRateLimitRules();
|
||||||
void prepare();
|
void prepare();
|
||||||
|
|
||||||
const std::vector<RateLimitRule> & getRateLimitRules() const { return rate_limit_rules; }
|
const std::vector<RateLimitRule> & getRateLimitRules() const { return rate_limit_rules; }
|
||||||
const std::string & getRateLimitMode() const { return mode; }
|
const RateLimitAction & getRateLimitMode() const { return mode; }
|
||||||
|
|
||||||
|
RateLimitRule generateSiblingRateLimitRule(const RateLimitRule &rule);
|
||||||
|
|
||||||
const LogTriggerConf
|
const LogTriggerConf
|
||||||
getRateLimitTrigger(const std::string &nginx_uri) const
|
getRateLimitTrigger(const std::string &nginx_uri) const
|
||||||
@ -110,7 +130,7 @@ public:
|
|||||||
|
|
||||||
std::set<std::string> rate_limit_triggers_set;
|
std::set<std::string> rate_limit_triggers_set;
|
||||||
for (const RateLimitTrigger &rate_limit_trigger : rule.getRateLimitTriggers()) {
|
for (const RateLimitTrigger &rate_limit_trigger : rule.getRateLimitTriggers()) {
|
||||||
dbgTrace(D_REVERSE_PROXY)
|
dbgTrace(D_RATE_LIMIT)
|
||||||
<< "Adding trigger ID: "
|
<< "Adding trigger ID: "
|
||||||
<< rate_limit_trigger.getTriggerId()
|
<< rate_limit_trigger.getTriggerId()
|
||||||
<< " of rule URI: "
|
<< " of rule URI: "
|
||||||
@ -130,12 +150,15 @@ public:
|
|||||||
|
|
||||||
static bool isActive() { return is_active; }
|
static bool isActive() { return is_active; }
|
||||||
|
|
||||||
|
static const std::map<RateLimitAction, std::string> rate_limit_action_to_string;
|
||||||
|
static const std::map<std::string, RateLimitAction> rate_limit_string_to_action;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
const RateLimitRule
|
const RateLimitRule
|
||||||
findLongestMatchingRule(const std::string &nginx_uri) const;
|
findLongestMatchingRule(const std::string &nginx_uri) const;
|
||||||
|
|
||||||
static bool is_active;
|
static bool is_active;
|
||||||
std::string mode;
|
RateLimitAction mode;
|
||||||
std::vector<RateLimitRule> rate_limit_rules;
|
std::vector<RateLimitRule> rate_limit_rules;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ public:
|
|||||||
audience_team,
|
audience_team,
|
||||||
obj,
|
obj,
|
||||||
false,
|
false,
|
||||||
MessageTypeTag::GENERIC,
|
MessageCategory::GENERIC,
|
||||||
std::forward<Args>(args)...
|
std::forward<Args>(args)...
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
@ -56,7 +56,7 @@ public:
|
|||||||
audience_team,
|
audience_team,
|
||||||
obj,
|
obj,
|
||||||
is_async_message,
|
is_async_message,
|
||||||
MessageTypeTag::GENERIC,
|
MessageCategory::GENERIC,
|
||||||
std::forward<Args>(args)...
|
std::forward<Args>(args)...
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
@ -68,7 +68,7 @@ public:
|
|||||||
const ReportIS::AudienceTeam &audience_team,
|
const ReportIS::AudienceTeam &audience_team,
|
||||||
const T &obj,
|
const T &obj,
|
||||||
bool is_async_message,
|
bool is_async_message,
|
||||||
const MessageTypeTag &message_type,
|
const MessageCategory &message_type,
|
||||||
Args ...args)
|
Args ...args)
|
||||||
:
|
:
|
||||||
ReportMessaging(
|
ReportMessaging(
|
||||||
@ -100,7 +100,7 @@ public:
|
|||||||
priority,
|
priority,
|
||||||
obj,
|
obj,
|
||||||
false,
|
false,
|
||||||
MessageTypeTag::GENERIC,
|
MessageCategory::GENERIC,
|
||||||
std::forward<Args>(args)...
|
std::forward<Args>(args)...
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
@ -115,7 +115,7 @@ public:
|
|||||||
const ReportIS::Priority &priority,
|
const ReportIS::Priority &priority,
|
||||||
const T &obj,
|
const T &obj,
|
||||||
bool _is_async_message,
|
bool _is_async_message,
|
||||||
const MessageTypeTag &message_type,
|
const MessageCategory &message_type,
|
||||||
Args ...args)
|
Args ...args)
|
||||||
:
|
:
|
||||||
report(
|
report(
|
||||||
@ -144,7 +144,7 @@ public:
|
|||||||
private:
|
private:
|
||||||
Report report;
|
Report report;
|
||||||
bool is_async_message;
|
bool is_async_message;
|
||||||
MessageTypeTag message_type_tag;
|
MessageCategory message_type_tag;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // __REPORT_MESSAGING_H__
|
#endif // __REPORT_MESSAGING_H__
|
||||||
|
@ -24,6 +24,7 @@
|
|||||||
#include "i_shell_cmd.h"
|
#include "i_shell_cmd.h"
|
||||||
#include "i_rest_api.h"
|
#include "i_rest_api.h"
|
||||||
#include "i_tenant_manager.h"
|
#include "i_tenant_manager.h"
|
||||||
|
#include "i_messaging.h"
|
||||||
#include "service_details.h"
|
#include "service_details.h"
|
||||||
#include "i_mainloop.h"
|
#include "i_mainloop.h"
|
||||||
#include "component.h"
|
#include "component.h"
|
||||||
|
@ -29,6 +29,7 @@
|
|||||||
#include "waap.h"
|
#include "waap.h"
|
||||||
#include "generic_metric.h"
|
#include "generic_metric.h"
|
||||||
|
|
||||||
|
#define LOGGING_INTERVAL_IN_MINUTES 10
|
||||||
enum class AssetType { API, WEB, ALL, COUNT };
|
enum class AssetType { API, WEB, ALL, COUNT };
|
||||||
|
|
||||||
class WaapTelemetryEvent : public Event<WaapTelemetryEvent>
|
class WaapTelemetryEvent : public Event<WaapTelemetryEvent>
|
||||||
@ -74,6 +75,24 @@ private:
|
|||||||
std::unordered_set<std::string> sources_seen;
|
std::unordered_set<std::string> sources_seen;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
class WaapTrafficTelemetrics : public WaapTelemetryBase
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
void updateMetrics(const std::string &asset_id, const DecisionTelemetryData &data);
|
||||||
|
void initMetrics();
|
||||||
|
|
||||||
|
private:
|
||||||
|
MetricCalculations::Counter post_requests{this, "reservedNgenA"};
|
||||||
|
MetricCalculations::Counter get_requests{this, "reservedNgenB"};
|
||||||
|
MetricCalculations::Counter put_requests{this, "reservedNgenC"};
|
||||||
|
MetricCalculations::Counter patch_requests{this, "reservedNgenD"};
|
||||||
|
MetricCalculations::Counter delete_requests{this, "reservedNgenE"};
|
||||||
|
MetricCalculations::Counter other_requests{this, "reservedNgenF"};
|
||||||
|
MetricCalculations::Counter response_2xx{this, "reservedNgenG"};
|
||||||
|
MetricCalculations::Counter response_4xx{this, "reservedNgenH"};
|
||||||
|
MetricCalculations::Counter response_5xx{this, "reservedNgenI"};
|
||||||
|
};
|
||||||
|
|
||||||
class WaapAttackTypesMetrics : public WaapTelemetryBase
|
class WaapAttackTypesMetrics : public WaapTelemetryBase
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
@ -100,8 +119,62 @@ public:
|
|||||||
private:
|
private:
|
||||||
std::map<std::string, std::shared_ptr<WaapTelemetrics>> metrics;
|
std::map<std::string, std::shared_ptr<WaapTelemetrics>> metrics;
|
||||||
std::map<std::string, std::shared_ptr<WaapTelemetrics>> telemetries;
|
std::map<std::string, std::shared_ptr<WaapTelemetrics>> telemetries;
|
||||||
|
std::map<std::string, std::shared_ptr<WaapTrafficTelemetrics>> traffic_telemetries;
|
||||||
std::map<std::string, std::shared_ptr<WaapAttackTypesMetrics>> attack_types;
|
std::map<std::string, std::shared_ptr<WaapAttackTypesMetrics>> attack_types;
|
||||||
std::map<std::string, std::shared_ptr<WaapAttackTypesMetrics>> attack_types_telemetries;
|
std::map<std::string, std::shared_ptr<WaapAttackTypesMetrics>> attack_types_telemetries;
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
void initializeTelemetryData(
|
||||||
|
const std::string& asset_id,
|
||||||
|
const DecisionTelemetryData& data,
|
||||||
|
const std::string& telemetryName,
|
||||||
|
std::map<std::string, std::shared_ptr<T>>& telemetryMap
|
||||||
|
) {
|
||||||
|
if (!telemetryMap.count(asset_id)) {
|
||||||
|
telemetryMap.emplace(asset_id, std::make_shared<T>());
|
||||||
|
telemetryMap[asset_id]->init(
|
||||||
|
telemetryName,
|
||||||
|
ReportIS::AudienceTeam::WAAP,
|
||||||
|
ReportIS::IssuingEngine::AGENT_CORE,
|
||||||
|
std::chrono::minutes(LOGGING_INTERVAL_IN_MINUTES),
|
||||||
|
true,
|
||||||
|
ReportIS::Audience::SECURITY
|
||||||
|
);
|
||||||
|
|
||||||
|
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||||
|
"pracitceType",
|
||||||
|
std::string("Threat Prevention"),
|
||||||
|
EnvKeyAttr::LogSection::SOURCE
|
||||||
|
);
|
||||||
|
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||||
|
"practiceSubType",
|
||||||
|
std::string("Web Application"),
|
||||||
|
EnvKeyAttr::LogSection::SOURCE
|
||||||
|
);
|
||||||
|
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||||
|
"assetId",
|
||||||
|
asset_id,
|
||||||
|
EnvKeyAttr::LogSection::SOURCE
|
||||||
|
);
|
||||||
|
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||||
|
"assetName",
|
||||||
|
data.assetName,
|
||||||
|
EnvKeyAttr::LogSection::SOURCE
|
||||||
|
);
|
||||||
|
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||||
|
"practiceId",
|
||||||
|
data.practiceId,
|
||||||
|
EnvKeyAttr::LogSection::SOURCE
|
||||||
|
);
|
||||||
|
telemetryMap[asset_id]->template registerContext<std::string>(
|
||||||
|
"practiceName",
|
||||||
|
data.practiceName,
|
||||||
|
EnvKeyAttr::LogSection::SOURCE
|
||||||
|
);
|
||||||
|
|
||||||
|
telemetryMap[asset_id]->registerListener();
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
class AssetCountEvent : public Event<AssetCountEvent>
|
class AssetCountEvent : public Event<AssetCountEvent>
|
||||||
|
@ -3,5 +3,5 @@ link_directories(${BOOST_ROOT}/lib)
|
|||||||
add_unit_test(
|
add_unit_test(
|
||||||
packet_ut
|
packet_ut
|
||||||
"packet_ut.cc"
|
"packet_ut.cc"
|
||||||
"packet;connkey;buffers;environment;metric;event_is;-lboost_regex"
|
"packet;messaging;connkey;buffers;environment;metric;event_is;-lboost_regex"
|
||||||
)
|
)
|
||||||
|
@ -29,17 +29,13 @@ ReportMessaging::~ReportMessaging()
|
|||||||
|
|
||||||
auto messaging = Singleton::Consume<I_Messaging>::by<ReportMessaging>();
|
auto messaging = Singleton::Consume<I_Messaging>::by<ReportMessaging>();
|
||||||
try {
|
try {
|
||||||
messaging->sendObjectWithPersistence(
|
messaging->sendAsyncMessage(
|
||||||
log_rest,
|
HTTPMethod::POST,
|
||||||
I_Messaging::Method::POST,
|
|
||||||
url,
|
url,
|
||||||
"",
|
log_rest,
|
||||||
true,
|
message_type_tag
|
||||||
message_type_tag,
|
|
||||||
is_async_message
|
|
||||||
);
|
);
|
||||||
} catch (...) {
|
} catch (...) {}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ReportMessaging &
|
ReportMessaging &
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
link_directories(${BOOST_ROOT}/lib)
|
link_directories(${BOOST_ROOT}/lib)
|
||||||
|
|
||||||
add_unit_test(report_messaging_ut "report_messaging_ut.cc" "report_messaging;report;singleton;-lboost_regex")
|
add_unit_test(report_messaging_ut "report_messaging_ut.cc" "report_messaging;report;messaging;singleton;-lboost_regex")
|
@ -69,7 +69,8 @@ TEST_F(ReportMessagingTest, title_only)
|
|||||||
{
|
{
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_messaging,
|
mock_messaging,
|
||||||
mockSendPersistentMessage(
|
sendAsyncMessage(
|
||||||
|
_,
|
||||||
_,
|
_,
|
||||||
"{\n"
|
"{\n"
|
||||||
" \"log\": {\n"
|
" \"log\": {\n"
|
||||||
@ -98,20 +99,18 @@ TEST_F(ReportMessagingTest, title_only)
|
|||||||
" }\n"
|
" }\n"
|
||||||
"}",
|
"}",
|
||||||
_,
|
_,
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_
|
_
|
||||||
)
|
)
|
||||||
).WillOnce(Return(string()));
|
).Times(1);
|
||||||
ReportMessaging("test", ReportIS::AudienceTeam::AGENT_CORE, 1, ReportIS::Tags::ACCESS_CONTROL);
|
ReportMessaging("test", ReportIS::AudienceTeam::AGENT_CORE, 1, true, ReportIS::Tags::ACCESS_CONTROL);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(ReportMessagingTest, with_dynamic_fields)
|
TEST_F(ReportMessagingTest, with_dynamic_fields)
|
||||||
{
|
{
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_messaging,
|
mock_messaging,
|
||||||
mockSendPersistentMessage(
|
sendAsyncMessage(
|
||||||
|
_,
|
||||||
_,
|
_,
|
||||||
"{\n"
|
"{\n"
|
||||||
" \"log\": {\n"
|
" \"log\": {\n"
|
||||||
@ -141,12 +140,9 @@ TEST_F(ReportMessagingTest, with_dynamic_fields)
|
|||||||
" }\n"
|
" }\n"
|
||||||
"}",
|
"}",
|
||||||
_,
|
_,
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_
|
_
|
||||||
)
|
)
|
||||||
).WillOnce(Return(string()));
|
).Times(1);
|
||||||
ReportMessaging("test", ReportIS::AudienceTeam::AGENT_CORE, 1, ReportIS::Tags::ACCESS_CONTROL)
|
ReportMessaging("test", ReportIS::AudienceTeam::AGENT_CORE, 1, ReportIS::Tags::ACCESS_CONTROL)
|
||||||
<< LogField("ASD", "QWE");
|
<< LogField("ASD", "QWE");
|
||||||
}
|
}
|
||||||
@ -155,7 +151,8 @@ TEST_F(ReportMessagingTest, custom_event_object)
|
|||||||
{
|
{
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_messaging,
|
mock_messaging,
|
||||||
mockSendPersistentMessage(
|
sendAsyncMessage(
|
||||||
|
_,
|
||||||
_,
|
_,
|
||||||
"{\n"
|
"{\n"
|
||||||
" \"log\": {\n"
|
" \"log\": {\n"
|
||||||
@ -192,12 +189,9 @@ TEST_F(ReportMessagingTest, custom_event_object)
|
|||||||
" }\n"
|
" }\n"
|
||||||
"}",
|
"}",
|
||||||
_,
|
_,
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_
|
_
|
||||||
)
|
)
|
||||||
).WillOnce(Return(string()));
|
).Times(1);
|
||||||
|
|
||||||
ReportMessaging(
|
ReportMessaging(
|
||||||
"test",
|
"test",
|
||||||
@ -211,7 +205,8 @@ TEST_F(ReportMessagingTest, custom_priority)
|
|||||||
{
|
{
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_messaging,
|
mock_messaging,
|
||||||
mockSendPersistentMessage(
|
sendAsyncMessage(
|
||||||
|
_,
|
||||||
_,
|
_,
|
||||||
"{\n"
|
"{\n"
|
||||||
" \"log\": {\n"
|
" \"log\": {\n"
|
||||||
@ -248,12 +243,9 @@ TEST_F(ReportMessagingTest, custom_priority)
|
|||||||
" }\n"
|
" }\n"
|
||||||
"}",
|
"}",
|
||||||
_,
|
_,
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_
|
_
|
||||||
)
|
)
|
||||||
).WillOnce(Return(string()));
|
).Times(1);
|
||||||
|
|
||||||
ReportMessaging(
|
ReportMessaging(
|
||||||
"test",
|
"test",
|
||||||
@ -279,7 +271,8 @@ TEST_F(ReportMessagingTest, with_env_details)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_messaging,
|
mock_messaging,
|
||||||
mockSendPersistentMessage(
|
sendAsyncMessage(
|
||||||
|
_,
|
||||||
_,
|
_,
|
||||||
"{\n"
|
"{\n"
|
||||||
" \"log\": {\n"
|
" \"log\": {\n"
|
||||||
@ -316,12 +309,9 @@ TEST_F(ReportMessagingTest, with_env_details)
|
|||||||
" }\n"
|
" }\n"
|
||||||
"}",
|
"}",
|
||||||
_,
|
_,
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_
|
_
|
||||||
)
|
)
|
||||||
).WillOnce(Return(string()));
|
).Times(1);
|
||||||
|
|
||||||
ReportMessaging(
|
ReportMessaging(
|
||||||
"test",
|
"test",
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#add_subdirectory(http_geo_filter)
|
||||||
add_subdirectory(ips)
|
add_subdirectory(ips)
|
||||||
add_subdirectory(layer_7_access_control)
|
add_subdirectory(layer_7_access_control)
|
||||||
add_subdirectory(local_policy_mgmt_gen)
|
add_subdirectory(local_policy_mgmt_gen)
|
||||||
|
1
components/security_apps/http_geo_filter/CMakeLists.txt
Normal file
1
components/security_apps/http_geo_filter/CMakeLists.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
add_library(http_geo_filter http_geo_filter.cc)
|
90
components/security_apps/http_geo_filter/geo_config.h
Normal file
90
components/security_apps/http_geo_filter/geo_config.h
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
#ifndef __GEO_CONFIG_H__
|
||||||
|
#define __GEO_CONFIG_H__
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
#include "cereal/archives/json.hpp"
|
||||||
|
#include "debug.h"
|
||||||
|
|
||||||
|
USE_DEBUG_FLAG(D_GEO_FILTER);
|
||||||
|
|
||||||
|
class GeoFilterCountry
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
void
|
||||||
|
load(cereal::JSONInputArchive &ar)
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
ar(cereal::make_nvp("countryName", country_name));
|
||||||
|
ar(cereal::make_nvp("countryCode", country_code));
|
||||||
|
ar(cereal::make_nvp("id", id));
|
||||||
|
} catch (const cereal::Exception &e) {
|
||||||
|
dbgDebug(D_GEO_FILTER) << "Failed to load http geo country config, error: " << e.what();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const std::string & getCountryCode() const { return country_code; }
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::string country_name;
|
||||||
|
std::string country_code;
|
||||||
|
std::string id;
|
||||||
|
};
|
||||||
|
|
||||||
|
class GeoConfig
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
void
|
||||||
|
load(cereal::JSONInputArchive &ar)
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
ar(cereal::make_nvp("name", name));
|
||||||
|
ar(cereal::make_nvp("defaultAction", default_action));
|
||||||
|
ar(cereal::make_nvp("practiceId", id));
|
||||||
|
ar(cereal::make_nvp("allowedCountries", allowed_countries));
|
||||||
|
ar(cereal::make_nvp("blockedCountries", blocked_countries));
|
||||||
|
} catch (const cereal::Exception &e) {
|
||||||
|
dbgDebug(D_GEO_FILTER) << "Failed to load http geo config, error: " << e.what();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const std::string & getId() const { return id; }
|
||||||
|
const std::string & getDefaultAction() const { return default_action; }
|
||||||
|
|
||||||
|
bool
|
||||||
|
isAllowedCountry(const std::string &_country_code) const
|
||||||
|
{
|
||||||
|
dbgTrace(D_GEO_FILTER) << "Check if country code: " << _country_code << " is allowed";
|
||||||
|
for (const GeoFilterCountry &country : allowed_countries) {
|
||||||
|
if (country.getCountryCode() == _country_code) {
|
||||||
|
dbgTrace(D_GEO_FILTER) << "County code: " << _country_code << " is allowed";
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dbgTrace(D_GEO_FILTER) << "County code: " << _country_code << " not in allowed countries list";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
isBlockedCountry(const std::string &_country_code) const
|
||||||
|
{
|
||||||
|
dbgTrace(D_GEO_FILTER) << "Check if country code: " << _country_code << " is blocked";
|
||||||
|
for (const GeoFilterCountry &country : blocked_countries) {
|
||||||
|
if (country.getCountryCode() == _country_code) {
|
||||||
|
dbgTrace(D_GEO_FILTER) << "County code: " << _country_code << " is blocked";
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dbgTrace(D_GEO_FILTER) << "County code: " << _country_code << " not in blocked countries list";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::string name;
|
||||||
|
std::string default_action;
|
||||||
|
std::string id;
|
||||||
|
std::vector<GeoFilterCountry> allowed_countries;
|
||||||
|
std::vector<GeoFilterCountry> blocked_countries;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif //__GEO_CONFIG_H__
|
347
components/security_apps/http_geo_filter/http_geo_filter.cc
Normal file
347
components/security_apps/http_geo_filter/http_geo_filter.cc
Normal file
@ -0,0 +1,347 @@
|
|||||||
|
#include "http_geo_filter.h"
|
||||||
|
|
||||||
|
#include <errno.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <stddef.h>
|
||||||
|
#include <algorithm>
|
||||||
|
|
||||||
|
#include "generic_rulebase/generic_rulebase.h"
|
||||||
|
#include "generic_rulebase/parameters_config.h"
|
||||||
|
#include "generic_rulebase/triggers_config.h"
|
||||||
|
#include "debug.h"
|
||||||
|
#include "config.h"
|
||||||
|
#include "rest.h"
|
||||||
|
#include "geo_config.h"
|
||||||
|
#include "ip_utilities.h"
|
||||||
|
#include "log_generator.h"
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
|
||||||
|
USE_DEBUG_FLAG(D_GEO_FILTER);
|
||||||
|
|
||||||
|
static const LogTriggerConf default_triger;
|
||||||
|
|
||||||
|
class HttpGeoFilter::Impl : public Listener<NewHttpTransactionEvent>
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
void
|
||||||
|
init()
|
||||||
|
{
|
||||||
|
dbgTrace(D_GEO_FILTER) << "Init Http Geo filter component";
|
||||||
|
registerListener();
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
fini()
|
||||||
|
{
|
||||||
|
unregisterListener();
|
||||||
|
}
|
||||||
|
|
||||||
|
string getListenerName() const override { return "HTTP geo filter"; }
|
||||||
|
|
||||||
|
void
|
||||||
|
loadDefaultAction()
|
||||||
|
{
|
||||||
|
auto default_action_maybe = getProfileAgentSetting<string>("httpGeoFilter.defaultAction");
|
||||||
|
if(default_action_maybe.ok()) {
|
||||||
|
default_action = convertActionToVerdict(default_action_maybe.unpack());
|
||||||
|
dbgTrace(D_GEO_FILTER)
|
||||||
|
<< "Load http geo filter default action. Action: "
|
||||||
|
<< default_action_maybe.unpack();
|
||||||
|
} else {
|
||||||
|
default_action = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT;
|
||||||
|
dbgTrace(D_GEO_FILTER) << "No http geo filter default action. Action: Irrelevant";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
EventVerdict
|
||||||
|
respond(const NewHttpTransactionEvent &event) override
|
||||||
|
{
|
||||||
|
dbgTrace(D_GEO_FILTER) << getListenerName() << " new transaction event";
|
||||||
|
|
||||||
|
if (!ParameterException::isGeoLocationExceptionExists() &&
|
||||||
|
!getConfiguration<GeoConfig>("rulebase", "httpGeoFilter").ok()
|
||||||
|
) {
|
||||||
|
dbgTrace(D_GEO_FILTER) << "No geo location practice nor exception was found. Returning default verdict";
|
||||||
|
return EventVerdict(default_action);
|
||||||
|
}
|
||||||
|
|
||||||
|
I_GeoLocation *i_geo_location = Singleton::Consume<I_GeoLocation>::by<HttpGeoFilter>();
|
||||||
|
auto asset_location = i_geo_location->lookupLocation(event.getSourceIP());
|
||||||
|
if (!asset_location.ok()) {
|
||||||
|
dbgTrace(D_GEO_FILTER) << "Lookup location failed, Error: " << asset_location.getErr();
|
||||||
|
return EventVerdict(default_action);
|
||||||
|
}
|
||||||
|
|
||||||
|
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data = asset_location.unpack();
|
||||||
|
|
||||||
|
ngx_http_cp_verdict_e exception_verdict = getExceptionVerdict(event, geo_location_data);
|
||||||
|
if (exception_verdict != ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT) {
|
||||||
|
return EventVerdict(exception_verdict);
|
||||||
|
}
|
||||||
|
|
||||||
|
ngx_http_cp_verdict_e geo_lookup_verdict = getGeoLookupVerdict(event, geo_location_data);
|
||||||
|
if (geo_lookup_verdict != ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT) {
|
||||||
|
return EventVerdict(geo_lookup_verdict);
|
||||||
|
}
|
||||||
|
return EventVerdict(default_action);
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
string
|
||||||
|
convertIpAddrToString(const IPAddr &ip_to_convert)
|
||||||
|
{
|
||||||
|
ostringstream os;
|
||||||
|
os << ip_to_convert;
|
||||||
|
return os.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
ngx_http_cp_verdict_e
|
||||||
|
convertActionToVerdict(const string &action) const
|
||||||
|
{
|
||||||
|
if (action == "accept") return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT;
|
||||||
|
if (action == "drop") return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP;
|
||||||
|
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_INSPECT;
|
||||||
|
}
|
||||||
|
|
||||||
|
ngx_http_cp_verdict_e
|
||||||
|
convertBehaviorValueToVerdict(const BehaviorValue &behavior_value) const
|
||||||
|
{
|
||||||
|
if (behavior_value == BehaviorValue::ACCEPT || behavior_value == BehaviorValue::IGNORE) {
|
||||||
|
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT;
|
||||||
|
}
|
||||||
|
if (behavior_value == BehaviorValue::DROP || behavior_value == BehaviorValue::REJECT) {
|
||||||
|
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP;
|
||||||
|
}
|
||||||
|
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT;
|
||||||
|
}
|
||||||
|
|
||||||
|
ngx_http_cp_verdict_e
|
||||||
|
getGeoLookupVerdict(
|
||||||
|
const NewHttpTransactionEvent &event,
|
||||||
|
const EnumArray<I_GeoLocation::GeoLocationField, std::string> &geo_location_data)
|
||||||
|
{
|
||||||
|
auto maybe_geo_config = getConfiguration<GeoConfig>("rulebase", "httpGeoFilter");
|
||||||
|
if (!maybe_geo_config.ok()) {
|
||||||
|
dbgWarning(D_GEO_FILTER) << "Failed to load HTTP Geo Filter config. Error:" << maybe_geo_config.getErr();
|
||||||
|
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT;
|
||||||
|
}
|
||||||
|
GeoConfig geo_config = maybe_geo_config.unpack();
|
||||||
|
string country_code = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE];
|
||||||
|
|
||||||
|
if (geo_config.isAllowedCountry(country_code)) {
|
||||||
|
dbgTrace(D_GEO_FILTER)
|
||||||
|
<< "geo verdict ACCEPT, practice id: "
|
||||||
|
<< geo_config.getId()
|
||||||
|
<< ", country code: "
|
||||||
|
<< country_code;
|
||||||
|
generateVerdictLog(
|
||||||
|
ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT,
|
||||||
|
event,
|
||||||
|
geo_config.getId(),
|
||||||
|
true,
|
||||||
|
geo_location_data
|
||||||
|
);
|
||||||
|
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT;
|
||||||
|
}
|
||||||
|
if (geo_config.isBlockedCountry(country_code)) {
|
||||||
|
dbgTrace(D_GEO_FILTER)
|
||||||
|
<< "geo verdict DROP, practice id: "
|
||||||
|
<< geo_config.getId()
|
||||||
|
<< ", country code: "
|
||||||
|
<< country_code;
|
||||||
|
generateVerdictLog(
|
||||||
|
ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP,
|
||||||
|
event,
|
||||||
|
geo_config.getId(),
|
||||||
|
true,
|
||||||
|
geo_location_data
|
||||||
|
);
|
||||||
|
return ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP;
|
||||||
|
}
|
||||||
|
dbgTrace(D_GEO_FILTER)
|
||||||
|
<< "No matched practice. Returned default action: "
|
||||||
|
<< geo_config.getDefaultAction();
|
||||||
|
generateVerdictLog(
|
||||||
|
convertActionToVerdict(geo_config.getDefaultAction()),
|
||||||
|
event,
|
||||||
|
geo_config.getId(),
|
||||||
|
true,
|
||||||
|
geo_location_data,
|
||||||
|
true
|
||||||
|
);
|
||||||
|
return convertActionToVerdict(geo_config.getDefaultAction());
|
||||||
|
}
|
||||||
|
|
||||||
|
Maybe<pair<ngx_http_cp_verdict_e, string>>
|
||||||
|
getBehaviorsVerdict(
|
||||||
|
const unordered_map<string, set<string>> &behaviors_map_to_search,
|
||||||
|
const NewHttpTransactionEvent &event,
|
||||||
|
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data)
|
||||||
|
{
|
||||||
|
bool is_matched = false;
|
||||||
|
ParameterBehavior matched_behavior;
|
||||||
|
ngx_http_cp_verdict_e matched_verdict = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT;
|
||||||
|
I_GenericRulebase *i_rulebase = Singleton::Consume<I_GenericRulebase>::by<HttpGeoFilter>();
|
||||||
|
set<ParameterBehavior> behaviors_set = i_rulebase->getBehavior(behaviors_map_to_search);
|
||||||
|
dbgTrace(D_GEO_FILTER) << "get verdict from: " << behaviors_set.size() << " behaviors";
|
||||||
|
for (const ParameterBehavior &behavior : behaviors_set) {
|
||||||
|
matched_verdict = convertBehaviorValueToVerdict(behavior.getValue());
|
||||||
|
if (
|
||||||
|
matched_verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP
|
||||||
|
){
|
||||||
|
dbgTrace(D_GEO_FILTER) << "behavior verdict: DROP, exception id: " << behavior.getId();
|
||||||
|
generateVerdictLog(
|
||||||
|
matched_verdict,
|
||||||
|
event,
|
||||||
|
behavior.getId(),
|
||||||
|
false,
|
||||||
|
geo_location_data
|
||||||
|
);
|
||||||
|
return pair<ngx_http_cp_verdict_e, string>(matched_verdict, behavior.getId());
|
||||||
|
}
|
||||||
|
else if (
|
||||||
|
matched_verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT
|
||||||
|
){
|
||||||
|
dbgTrace(D_GEO_FILTER) << "behavior verdict: ACCEPT, exception id: " << behavior.getId();
|
||||||
|
matched_behavior = behavior;
|
||||||
|
is_matched = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (is_matched) {
|
||||||
|
return pair<ngx_http_cp_verdict_e, string>(
|
||||||
|
ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT,
|
||||||
|
matched_behavior.getId()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return genError("No exception matched to HTTP geo filter rule");
|
||||||
|
}
|
||||||
|
|
||||||
|
ngx_http_cp_verdict_e
|
||||||
|
getExceptionVerdict(
|
||||||
|
const NewHttpTransactionEvent &event,
|
||||||
|
EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data
|
||||||
|
){
|
||||||
|
string country_code = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE];
|
||||||
|
string country_name = geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_NAME];
|
||||||
|
string source_ip = convertIpAddrToString(event.getSourceIP());
|
||||||
|
|
||||||
|
pair<ngx_http_cp_verdict_e, string> curr_matched_behavior;
|
||||||
|
ngx_http_cp_verdict_e verdict = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT;
|
||||||
|
|
||||||
|
dbgTrace(D_GEO_FILTER)
|
||||||
|
<< "Get exception verdict. "
|
||||||
|
<< "country code: "
|
||||||
|
<< country_code
|
||||||
|
<< ", country name: "
|
||||||
|
<< country_name
|
||||||
|
<< ", source ip address: "
|
||||||
|
<< source_ip;
|
||||||
|
|
||||||
|
unordered_map<string, set<string>> exception_value_source_ip = {{"sourceIP", {source_ip}}};
|
||||||
|
auto matched_behavior_maybe = getBehaviorsVerdict(exception_value_source_ip, event, geo_location_data);
|
||||||
|
if (matched_behavior_maybe.ok()) {
|
||||||
|
curr_matched_behavior = matched_behavior_maybe.unpack();
|
||||||
|
verdict = curr_matched_behavior.first;
|
||||||
|
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
||||||
|
return verdict;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unordered_map<string, set<string>> exception_value_country_code = {
|
||||||
|
{"countryCode", {country_code}}
|
||||||
|
};
|
||||||
|
matched_behavior_maybe = getBehaviorsVerdict(exception_value_country_code, event, geo_location_data);
|
||||||
|
if (matched_behavior_maybe.ok()) {
|
||||||
|
curr_matched_behavior = matched_behavior_maybe.unpack();
|
||||||
|
verdict = curr_matched_behavior.first;
|
||||||
|
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
||||||
|
return verdict;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unordered_map<string, set<string>> exception_value_country_name = {
|
||||||
|
{"countryName", {country_name}}
|
||||||
|
};
|
||||||
|
matched_behavior_maybe = getBehaviorsVerdict(exception_value_country_name, event, geo_location_data);
|
||||||
|
if (matched_behavior_maybe.ok()) {
|
||||||
|
curr_matched_behavior = matched_behavior_maybe.unpack();
|
||||||
|
verdict = curr_matched_behavior.first;
|
||||||
|
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP) {
|
||||||
|
return verdict;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT) {
|
||||||
|
generateVerdictLog(
|
||||||
|
verdict,
|
||||||
|
event,
|
||||||
|
curr_matched_behavior.second,
|
||||||
|
false,
|
||||||
|
geo_location_data
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return verdict;
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
generateVerdictLog(
|
||||||
|
const ngx_http_cp_verdict_e &verdict,
|
||||||
|
const NewHttpTransactionEvent &event,
|
||||||
|
const string &matched_id,
|
||||||
|
bool is_geo_filter,
|
||||||
|
const EnumArray<I_GeoLocation::GeoLocationField, std::string> geo_location_data,
|
||||||
|
bool is_default_action = false
|
||||||
|
)
|
||||||
|
{
|
||||||
|
dbgTrace(D_GEO_FILTER) << "Generate Log for verdict - HTTP geo filter";
|
||||||
|
auto &trigger = getConfigurationWithDefault(default_triger, "rulebase", "log");
|
||||||
|
bool is_prevent = verdict == ngx_http_cp_verdict_e::TRAFFIC_VERDICT_DROP;
|
||||||
|
string matched_on = is_geo_filter ? "geoFilterPracticeId" : "exceptionId";
|
||||||
|
LogGen log = trigger(
|
||||||
|
"Web Request - HTTP Geo Filter",
|
||||||
|
LogTriggerConf::SecurityType::ThreatPrevention,
|
||||||
|
ReportIS::Severity::MEDIUM,
|
||||||
|
ReportIS::Priority::HIGH,
|
||||||
|
is_prevent,
|
||||||
|
LogField("practiceType", "HTTP Geo Filter"),
|
||||||
|
LogField(matched_on, matched_id),
|
||||||
|
ReportIS::Tags::HTTP_GEO_FILTER
|
||||||
|
);
|
||||||
|
log
|
||||||
|
<< LogField("sourceIP", convertIpAddrToString(event.getSourceIP()))
|
||||||
|
<< LogField("sourcePort", event.getSourcePort())
|
||||||
|
<< LogField("hostName", event.getDestinationHost())
|
||||||
|
<< LogField("httpMethod", event.getHttpMethod())
|
||||||
|
<< LogField("securityAction", is_prevent ? "Prevent" : "Detect");
|
||||||
|
|
||||||
|
if (is_default_action) log << LogField("isDefaultSecurityAction", true);
|
||||||
|
|
||||||
|
log
|
||||||
|
<< LogField("sourceCountryCode", geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_CODE])
|
||||||
|
<< LogField("sourceCountryName", geo_location_data[I_GeoLocation::GeoLocationField::COUNTRY_NAME]);
|
||||||
|
}
|
||||||
|
|
||||||
|
ngx_http_cp_verdict_e default_action = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_IRRELEVANT;
|
||||||
|
};
|
||||||
|
|
||||||
|
HttpGeoFilter::HttpGeoFilter() : Component("HttpGeoFilter"), pimpl(make_unique<HttpGeoFilter::Impl>()) {}
|
||||||
|
HttpGeoFilter::~HttpGeoFilter() {}
|
||||||
|
|
||||||
|
void
|
||||||
|
HttpGeoFilter::init()
|
||||||
|
{
|
||||||
|
pimpl->init();
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
HttpGeoFilter::fini()
|
||||||
|
{
|
||||||
|
pimpl->fini();
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
HttpGeoFilter::preload()
|
||||||
|
{
|
||||||
|
registerExpectedConfiguration<GeoConfig>("rulebase", "httpGeoFilter");
|
||||||
|
registerConfigLoadCb([this]() { pimpl->loadDefaultAction(); });
|
||||||
|
}
|
@ -3,5 +3,5 @@ link_directories(${CMAKE_BINARY_DIR}/core/shmem_ipc)
|
|||||||
add_unit_test(
|
add_unit_test(
|
||||||
ips_ut
|
ips_ut
|
||||||
"signatures_ut.cc;entry_ut.cc;component_ut.cc;configuration.cc;rule_selector_ut.cc;compound_ut.cc;resource_ut.cc"
|
"signatures_ut.cc;entry_ut.cc;component_ut.cc;configuration.cc;rule_selector_ut.cc;compound_ut.cc;resource_ut.cc"
|
||||||
"ips;keywords;pcre2-8;intelligence_is_v2;logging;compression_utils;agent_details;time_proxy;event_is;table;http_transaction_data;nginx_attachment;connkey;pm;metric;encryptor;generic_rulebase;generic_rulebase_evaluators;compression_utils;ip_utilities;-lboost_regex;-lcrypto;-lz"
|
"ips;messaging;keywords;pcre2-8;intelligence_is_v2;logging;compression_utils;agent_details;time_proxy;event_is;table;http_transaction_data;nginx_attachment;connkey;pm;metric;encryptor;generic_rulebase;generic_rulebase_evaluators;compression_utils;ip_utilities;-lboost_regex;-lcrypto;-lz"
|
||||||
)
|
)
|
||||||
|
@ -3,5 +3,5 @@ file(COPY data DESTINATION .)
|
|||||||
add_unit_test(
|
add_unit_test(
|
||||||
layer_7_access_control_ut
|
layer_7_access_control_ut
|
||||||
"layer_7_access_control_ut.cc"
|
"layer_7_access_control_ut.cc"
|
||||||
"l7_access_control;logging;agent_details;table;singleton;time_proxy;metric;event_is;connkey;http_transaction_data;generic_rulebase;generic_rulebase_evaluators;ip_utilities;intelligence_is_v2"
|
"l7_access_control;messaging;logging;agent_details;table;singleton;time_proxy;metric;event_is;connkey;http_transaction_data;generic_rulebase;generic_rulebase_evaluators;ip_utilities;intelligence_is_v2"
|
||||||
)
|
)
|
||||||
|
@ -266,8 +266,8 @@ TEST_F(Layer7AccessControlTest, ReturnAcceptVerdict)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
messaging_mock,
|
messaging_mock,
|
||||||
sendMessage(true, _, _, _, _, _, _, MessageTypeTag::INTELLIGENCE)
|
sendSyncMessage(_, _, _, MessageCategory::INTELLIGENCE, _)
|
||||||
).WillOnce(Return(intelligence_response_ok));
|
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, intelligence_response_ok)));
|
||||||
|
|
||||||
registerTransactionData();
|
registerTransactionData();
|
||||||
ctx.registerValue<string>(HttpTransactionData::source_identifier, "1.2.3.4");
|
ctx.registerValue<string>(HttpTransactionData::source_identifier, "1.2.3.4");
|
||||||
@ -312,8 +312,8 @@ TEST_F(Layer7AccessControlTest, ReturnDropVerdictOnMaliciousReputation)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
messaging_mock,
|
messaging_mock,
|
||||||
sendMessage(true, _, _, _, _, _, _, MessageTypeTag::INTELLIGENCE)
|
sendSyncMessage(_, _, _, MessageCategory::INTELLIGENCE, _)
|
||||||
).WillOnce(Return(malicious_intelligence_response));
|
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, malicious_intelligence_response)));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_ml,
|
mock_ml,
|
||||||
@ -358,8 +358,8 @@ TEST_F(Layer7AccessControlTest, ReturnDropVerdictCacheBased)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
messaging_mock,
|
messaging_mock,
|
||||||
sendMessage(true, _, _, _, _, _, _, MessageTypeTag::INTELLIGENCE)
|
sendSyncMessage(_, _, _, MessageCategory::INTELLIGENCE, _)
|
||||||
).WillOnce(Return(malicious_intelligence_response));
|
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, malicious_intelligence_response)));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_ml,
|
mock_ml,
|
||||||
@ -410,8 +410,8 @@ TEST_F(Layer7AccessControlTest, AcceptOnDetect)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
messaging_mock,
|
messaging_mock,
|
||||||
sendMessage(true, _, _, _, _, _, _, MessageTypeTag::INTELLIGENCE)
|
sendSyncMessage(_, _, _, MessageCategory::INTELLIGENCE, _)
|
||||||
).WillOnce(Return(malicious_intelligence_response));
|
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, malicious_intelligence_response)));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_ml,
|
mock_ml,
|
||||||
@ -456,8 +456,8 @@ TEST_F(Layer7AccessControlTest, FallbackToSourceIPAndDrop)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
messaging_mock,
|
messaging_mock,
|
||||||
sendMessage(true, _, _, _, _, _, _, MessageTypeTag::INTELLIGENCE)
|
sendSyncMessage(_, _, _, MessageCategory::INTELLIGENCE, _)
|
||||||
).WillOnce(Return(malicious_intelligence_response));
|
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, malicious_intelligence_response)));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_ml,
|
mock_ml,
|
||||||
|
@ -133,11 +133,12 @@ AccessControlRateLimiteRules::load(cereal::JSONInputArchive &archive_in)
|
|||||||
dbgTrace(D_LOCAL_POLICY) << "Loading Access control rate limite rules";
|
dbgTrace(D_LOCAL_POLICY) << "Loading Access control rate limite rules";
|
||||||
parseAppsecJSONKey<int>("limit", limit, archive_in);
|
parseAppsecJSONKey<int>("limit", limit, archive_in);
|
||||||
parseAppsecJSONKey<string>("uri", uri, archive_in);
|
parseAppsecJSONKey<string>("uri", uri, archive_in);
|
||||||
parseAppsecJSONKey<string>("unit", unit, archive_in);
|
parseAppsecJSONKey<string>("unit", unit, archive_in, "minute");
|
||||||
if (valid_units.count(unit) == 0) {
|
if (valid_units.count(unit) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "Access control rate limite rules units invalid: "
|
<< "Access control rate limite rules units invalid: "
|
||||||
<< unit;
|
<< unit;
|
||||||
|
throw PolicyGenException("Access control rate limite rules units invalid: " + unit);
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("comment", comment, archive_in);
|
parseAppsecJSONKey<string>("comment", comment, archive_in);
|
||||||
parseAppsecJSONKey<vector<string>>("triggers", triggers, archive_in);
|
parseAppsecJSONKey<vector<string>>("triggers", triggers, archive_in);
|
||||||
@ -177,10 +178,10 @@ AccessControlRateLimit::load(cereal::JSONInputArchive &archive_in)
|
|||||||
{
|
{
|
||||||
dbgTrace(D_LOCAL_POLICY) << "Loading Access control rate limit";
|
dbgTrace(D_LOCAL_POLICY) << "Loading Access control rate limit";
|
||||||
string in_mode;
|
string in_mode;
|
||||||
parseAppsecJSONKey<string>("overrideMode", in_mode, archive_in, "inactive");
|
parseAppsecJSONKey<string>("overrideMode", in_mode, archive_in, "detect");
|
||||||
if (valid_modes_to_key.find(in_mode) == valid_modes_to_key.end()) {
|
if (valid_modes_to_key.find(in_mode) == valid_modes_to_key.end()) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec access control rate limit override mode invalid: " << in_mode;
|
dbgWarning(D_LOCAL_POLICY) << "AppSec access control rate limit override mode invalid: " << in_mode;
|
||||||
mode = "Inactive";
|
throw PolicyGenException("AppSec access control rate limit override mode invalid: " + in_mode);
|
||||||
} else {
|
} else {
|
||||||
mode = valid_modes_to_key.at(in_mode);
|
mode = valid_modes_to_key.at(in_mode);
|
||||||
}
|
}
|
||||||
@ -216,7 +217,7 @@ AccessControlPracticeSpec::load(cereal::JSONInputArchive &archive_in)
|
|||||||
|
|
||||||
parseAppsecJSONKey<string>("name", practice_name, archive_in);
|
parseAppsecJSONKey<string>("name", practice_name, archive_in);
|
||||||
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
||||||
parseAppsecJSONKey<AccessControlRateLimit>("rateLimit", rate_limit, archive_in);
|
parseMandatoryAppsecJSONKey<AccessControlRateLimit>("rateLimit", rate_limit, archive_in);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -111,7 +111,6 @@ private:
|
|||||||
I_EnvDetails* env_details = nullptr;
|
I_EnvDetails* env_details = nullptr;
|
||||||
I_Messaging* messaging = nullptr;
|
I_Messaging* messaging = nullptr;
|
||||||
EnvType env_type;
|
EnvType env_type;
|
||||||
Flags<MessageConnConfig> conn_flags;
|
|
||||||
std::string token;
|
std::string token;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
#include <map>
|
#include <map>
|
||||||
#include <set>
|
#include <set>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
#include <exception>
|
||||||
#include <cereal/archives/json.hpp>
|
#include <cereal/archives/json.hpp>
|
||||||
|
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
@ -66,27 +67,57 @@ static const std::unordered_map<std::string, std::string> key_to_practices_val2
|
|||||||
|
|
||||||
static const std::string default_appsec_url = "http://*:*";
|
static const std::string default_appsec_url = "http://*:*";
|
||||||
|
|
||||||
|
class PolicyGenException : public std::exception
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
PolicyGenException(const std::string& msg="") noexcept : m_msg(msg) {}
|
||||||
|
|
||||||
|
const char* what() const noexcept override
|
||||||
|
{
|
||||||
|
return m_msg.c_str();
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::string m_msg;
|
||||||
|
};
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
void
|
void
|
||||||
parseAppsecJSONKey(
|
parseAppsecJSONKey(
|
||||||
const std::string &key_name,
|
const std::string &key_name,
|
||||||
T &value,
|
T &value,
|
||||||
cereal::JSONInputArchive &archive_in,
|
cereal::JSONInputArchive &archive_in,
|
||||||
const T &default_value = T())
|
const T &default_value = T(),
|
||||||
|
bool mandatory = false)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
archive_in(cereal::make_nvp(key_name, value));
|
archive_in(cereal::make_nvp(key_name, value));
|
||||||
} catch (const cereal::Exception &e) {
|
} catch (const cereal::Exception &e) {
|
||||||
archive_in.setNextName(nullptr);
|
archive_in.setNextName(nullptr);
|
||||||
value = default_value;
|
value = default_value;
|
||||||
dbgDebug(D_LOCAL_POLICY)
|
if (!mandatory) {
|
||||||
<< "Could not parse the required key. Key: \""
|
dbgDebug(D_LOCAL_POLICY)
|
||||||
<< key_name
|
<< "Could not parse the required key. Key: \""<< key_name
|
||||||
<< "\", Error: "
|
<< "\", Error: " << e.what();
|
||||||
<< e.what();
|
} else {
|
||||||
|
throw PolicyGenException(
|
||||||
|
"Could not parse a mandatory key: \"" + key_name + "\", Error: " + std::string(e.what())
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
void
|
||||||
|
parseMandatoryAppsecJSONKey(
|
||||||
|
const std::string &key_name,
|
||||||
|
T &value,
|
||||||
|
cereal::JSONInputArchive &archive_in,
|
||||||
|
const T &default_value = T())
|
||||||
|
{
|
||||||
|
parseAppsecJSONKey(key_name, value, archive_in, default_value, true);
|
||||||
|
}
|
||||||
|
|
||||||
class AppsecSpecParserMetaData
|
class AppsecSpecParserMetaData
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
|
@ -43,8 +43,6 @@ K8sPolicyUtils::init()
|
|||||||
if (env_type == EnvType::K8S) {
|
if (env_type == EnvType::K8S) {
|
||||||
token = env_details->getToken();
|
token = env_details->getToken();
|
||||||
messaging = Singleton::Consume<I_Messaging>::by<K8sPolicyUtils>();
|
messaging = Singleton::Consume<I_Messaging>::by<K8sPolicyUtils>();
|
||||||
conn_flags.setFlag(MessageConnConfig::SECURE_CONN);
|
|
||||||
conn_flags.setFlag(MessageConnConfig::IGNORE_SSL_VALIDATION);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -79,17 +77,19 @@ Maybe<T, string>
|
|||||||
K8sPolicyUtils::getObjectFromCluster(const string &path) const
|
K8sPolicyUtils::getObjectFromCluster(const string &path) const
|
||||||
{
|
{
|
||||||
T object;
|
T object;
|
||||||
bool res = messaging->sendObject(
|
MessageMetadata k8s_md("kubernetes.default.svc", 443);
|
||||||
object,
|
k8s_md.insertHeader("Authorization", "Bearer " + token);
|
||||||
I_Messaging::Method::GET,
|
k8s_md.insertHeader("Connection", "close");
|
||||||
"kubernetes.default.svc",
|
k8s_md.setConnectioFlag(MessageConnectionConfig::IGNORE_SSL_VALIDATION);
|
||||||
443,
|
auto res = messaging->sendSyncMessage(
|
||||||
conn_flags,
|
HTTPMethod::GET,
|
||||||
path,
|
path,
|
||||||
"Authorization: Bearer " + token + "\nConnection: close"
|
object,
|
||||||
|
MessageCategory::GENERIC,
|
||||||
|
k8s_md
|
||||||
);
|
);
|
||||||
|
|
||||||
if (res) return object;
|
if (res.ok()) return object;
|
||||||
|
|
||||||
return genError(string("Was not able to get object form k8s cluser in path: " + path));
|
return genError(string("Was not able to get object form k8s cluser in path: " + path));
|
||||||
}
|
}
|
||||||
@ -488,23 +488,33 @@ K8sPolicyUtils::createAppsecPolicyK8s(const string &policy_name, const string &i
|
|||||||
if (!maybe_appsec_policy_spec.ok() ||
|
if (!maybe_appsec_policy_spec.ok() ||
|
||||||
!doesVersionExist(maybe_appsec_policy_spec.unpack().getMetaData().getAnnotations(), "v1beta1")
|
!doesVersionExist(maybe_appsec_policy_spec.unpack().getMetaData().getAnnotations(), "v1beta1")
|
||||||
) {
|
) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
try {
|
||||||
<< "Failed to retrieve Appsec policy with crds version: v1beta1, Trying version: v1beta2";
|
dbgWarning(D_LOCAL_POLICY
|
||||||
auto maybe_v1beta2_appsec_policy_spec = getObjectFromCluster<AppsecSpecParser<NewAppsecPolicySpec>>(
|
) << "Failed to retrieve Appsec policy with crds version: v1beta1, Trying version: v1beta2";
|
||||||
"/apis/openappsec.io/v1beta2/policies/" + policy_name
|
auto maybe_v1beta2_appsec_policy_spec = getObjectFromCluster<AppsecSpecParser<NewAppsecPolicySpec>>(
|
||||||
);
|
"/apis/openappsec.io/v1beta2/policies/" + policy_name
|
||||||
if(!maybe_v1beta2_appsec_policy_spec.ok()) {
|
);
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
if (!maybe_v1beta2_appsec_policy_spec.ok()) {
|
||||||
<< "Failed to retrieve AppSec policy. Error: "
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< maybe_v1beta2_appsec_policy_spec.getErr();
|
<< "Failed to retrieve AppSec policy. Error: " << maybe_v1beta2_appsec_policy_spec.getErr();
|
||||||
|
return std::make_tuple(
|
||||||
|
genError("Failed to retrieve AppSec v1beta1 policy. Error: " + maybe_appsec_policy_spec.getErr()),
|
||||||
|
genError(
|
||||||
|
"Failed to retrieve AppSec v1beta2 policy. Error: " + maybe_v1beta2_appsec_policy_spec.getErr()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
return std::make_tuple(
|
return std::make_tuple(
|
||||||
genError("Failed to retrieve AppSec v1beta1 policy. Error: " + maybe_appsec_policy_spec.getErr()),
|
genError("There is no v1beta1 policy"),
|
||||||
genError(
|
createAppsecPolicyK8sFromV1beta2Crds(maybe_v1beta2_appsec_policy_spec.unpack(), ingress_mode)
|
||||||
"Failed to retrieve AppSec v1beta2 policy. Error: " + maybe_v1beta2_appsec_policy_spec.getErr()));
|
);
|
||||||
|
} catch (const PolicyGenException &e) {
|
||||||
|
dbgDebug(D_LOCAL_POLICY) << "Failed in policy generation. Error: " << e.what();
|
||||||
|
return std::make_tuple(
|
||||||
|
genError("There is no v1beta1 policy"),
|
||||||
|
genError("Failed to retrieve AppSec v1beta2 policy. Error: " + string(e.what()))
|
||||||
|
);
|
||||||
}
|
}
|
||||||
return std::make_tuple(
|
|
||||||
genError("There is no v1beta1 policy"),
|
|
||||||
createAppsecPolicyK8sFromV1beta2Crds(maybe_v1beta2_appsec_policy_spec.unpack(), ingress_mode));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return std::make_tuple(
|
return std::make_tuple(
|
||||||
@ -521,22 +531,22 @@ K8sPolicyUtils::createPolicy(
|
|||||||
const SingleIngressData &item) const
|
const SingleIngressData &item) const
|
||||||
{
|
{
|
||||||
for (const IngressDefinedRule &rule : item.getSpec().getRules()) {
|
for (const IngressDefinedRule &rule : item.getSpec().getRules()) {
|
||||||
string url = rule.getHost();
|
string url = rule.getHost();
|
||||||
for (const IngressRulePath &uri : rule.getPathsWrapper().getRulePaths()) {
|
for (const IngressRulePath &uri : rule.getPathsWrapper().getRulePaths()) {
|
||||||
if (!appsec_policy.getAppsecPolicySpec().isAssetHostExist(url + uri.getPath())) {
|
if (!appsec_policy.getAppsecPolicySpec().isAssetHostExist(url + uri.getPath())) {
|
||||||
dbgTrace(D_LOCAL_POLICY)
|
dbgTrace(D_LOCAL_POLICY)
|
||||||
<< "Inserting Host data to the specific asset set:"
|
<< "Inserting Host data to the specific asset set:"
|
||||||
<< "URL: '"
|
<< "URL: '"
|
||||||
<< url
|
<< url
|
||||||
<< "' uri: '"
|
<< "' uri: '"
|
||||||
<< uri.getPath()
|
<< uri.getPath()
|
||||||
<< "'";
|
<< "'";
|
||||||
K ingress_rule = K(url + uri.getPath());
|
K ingress_rule = K(url + uri.getPath());
|
||||||
appsec_policy.addSpecificRule(ingress_rule);
|
appsec_policy.addSpecificRule(ingress_rule);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
policies[annotations_values[AnnotationKeys::PolicyKey]] = appsec_policy;
|
}
|
||||||
|
policies[annotations_values[AnnotationKeys::PolicyKey]] = appsec_policy;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,13 +26,16 @@ NewParsedRule::load(cereal::JSONInputArchive &archive_in)
|
|||||||
dbgTrace(D_LOCAL_POLICY) << "Loading AppSec NewParsedRule";
|
dbgTrace(D_LOCAL_POLICY) << "Loading AppSec NewParsedRule";
|
||||||
parseAppsecJSONKey<vector<string>>("exceptions", exceptions, archive_in);
|
parseAppsecJSONKey<vector<string>>("exceptions", exceptions, archive_in);
|
||||||
parseAppsecJSONKey<vector<string>>("triggers", log_triggers, archive_in);
|
parseAppsecJSONKey<vector<string>>("triggers", log_triggers, archive_in);
|
||||||
parseAppsecJSONKey<vector<string>>("threatPreventionPractices", threat_prevention_practices, archive_in);
|
parseMandatoryAppsecJSONKey<vector<string>>(
|
||||||
parseAppsecJSONKey<vector<string>>("accessControlPractices", access_control_practices, archive_in);
|
"threatPreventionPractices",
|
||||||
|
threat_prevention_practices,
|
||||||
|
archive_in);
|
||||||
|
parseMandatoryAppsecJSONKey<vector<string>>("accessControlPractices", access_control_practices, archive_in);
|
||||||
parseAppsecJSONKey<string>("mode", mode, archive_in);
|
parseAppsecJSONKey<string>("mode", mode, archive_in);
|
||||||
if (valid_modes.count(mode) == 0) {
|
if (valid_modes.count(mode) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec New Parsed Rule mode invalid: " << mode;
|
throw PolicyGenException("AppSec New Parsed Rule mode invalid: " + mode);
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("customResponse", custom_response, archive_in);
|
parseAppsecJSONKey<string>("customResponse", custom_response, archive_in, "403");
|
||||||
parseAppsecJSONKey<string>("sourceIdentifiers", source_identifiers, archive_in);
|
parseAppsecJSONKey<string>("sourceIdentifiers", source_identifiers, archive_in);
|
||||||
parseAppsecJSONKey<string>("trustedSources", trusted_sources, archive_in);
|
parseAppsecJSONKey<string>("trustedSources", trusted_sources, archive_in);
|
||||||
parseAppsecJSONKey<string>("autoUpgrade", upgrade_settings, archive_in);
|
parseAppsecJSONKey<string>("autoUpgrade", upgrade_settings, archive_in);
|
||||||
|
@ -21,7 +21,11 @@ using namespace std;
|
|||||||
USE_DEBUG_FLAG(D_LOCAL_POLICY);
|
USE_DEBUG_FLAG(D_LOCAL_POLICY);
|
||||||
// LCOV_EXCL_START Reason: no test exist
|
// LCOV_EXCL_START Reason: no test exist
|
||||||
|
|
||||||
static const set<string> valid_modes = {"block-page", "response-code-only", "redirect"};
|
static const map<string, string> mode_to_appsec_mode_val = {
|
||||||
|
{"block-page", "Redirect"},
|
||||||
|
{"response-code-only", "Response Code"},
|
||||||
|
{"redirect", "Redirect"}
|
||||||
|
};
|
||||||
|
|
||||||
void
|
void
|
||||||
NewAppSecCustomResponse::load(cereal::JSONInputArchive &archive_in)
|
NewAppSecCustomResponse::load(cereal::JSONInputArchive &archive_in)
|
||||||
@ -32,13 +36,10 @@ NewAppSecCustomResponse::load(cereal::JSONInputArchive &archive_in)
|
|||||||
if (http_response_code < MIN_RESPONSE_CODE || http_response_code > MAX_RESPOMSE_CODE) {
|
if (http_response_code < MIN_RESPONSE_CODE || http_response_code > MAX_RESPOMSE_CODE) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec web user response code invalid: " << http_response_code;
|
dbgWarning(D_LOCAL_POLICY) << "AppSec web user response code invalid: " << http_response_code;
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("mode", mode, archive_in, "block-page");
|
parseMandatoryAppsecJSONKey<string>("mode", mode, archive_in, "response-code-only");
|
||||||
if (valid_modes.count(mode) == 0) {
|
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec web user response mode invalid: " << mode;
|
|
||||||
}
|
|
||||||
parseAppsecJSONKey<string>("name", name, archive_in);
|
parseAppsecJSONKey<string>("name", name, archive_in);
|
||||||
parseAppsecJSONKey<string>("redirectUrl", redirect_url, archive_in);
|
parseAppsecJSONKey<string>("redirectUrl", redirect_url, archive_in);
|
||||||
parseAppsecJSONKey<bool>("redirectAddXEventId", redirect_add_x_event_id, archive_in);
|
parseAppsecJSONKey<bool>("redirectAddXEventId", redirect_add_x_event_id, archive_in, false);
|
||||||
if (mode == "block-page") {
|
if (mode == "block-page") {
|
||||||
parseAppsecJSONKey<string>(
|
parseAppsecJSONKey<string>(
|
||||||
"messageBody",
|
"messageBody",
|
||||||
@ -53,6 +54,12 @@ NewAppSecCustomResponse::load(cereal::JSONInputArchive &archive_in)
|
|||||||
"Attack blocked by web application protection"
|
"Attack blocked by web application protection"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
if (mode_to_appsec_mode_val.find(mode) == mode_to_appsec_mode_val.end()) {
|
||||||
|
dbgWarning(D_LOCAL_POLICY) << "AppSec web user response mode invalid: " << mode;
|
||||||
|
mode = "Response Code";
|
||||||
|
} else {
|
||||||
|
mode = mode_to_appsec_mode_val.at(mode);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -23,9 +23,9 @@ static const set<string> valid_actions = {"skip", "accept", "drop", "suppressLog
|
|||||||
void
|
void
|
||||||
NewAppsecExceptionCondition::load(cereal::JSONInputArchive &archive_in)
|
NewAppsecExceptionCondition::load(cereal::JSONInputArchive &archive_in)
|
||||||
{
|
{
|
||||||
parseAppsecJSONKey<string>("key", key, archive_in);
|
parseMandatoryAppsecJSONKey<string>("key", key, archive_in);
|
||||||
parseAppsecJSONKey<string>("value", value, archive_in);
|
parseMandatoryAppsecJSONKey<string>("value", value, archive_in);
|
||||||
dbgTrace(D_LOCAL_POLICY) << "Key: " << key << " Value: " << value;
|
dbgTrace(D_LOCAL_POLICY) << "Parsed exception condition: Key: " << key << " Value: " << value;
|
||||||
}
|
}
|
||||||
|
|
||||||
const string &
|
const string &
|
||||||
@ -45,12 +45,17 @@ NewAppsecException::load(cereal::JSONInputArchive &archive_in)
|
|||||||
{
|
{
|
||||||
dbgTrace(D_LOCAL_POLICY) << "Loading New AppSec exception";
|
dbgTrace(D_LOCAL_POLICY) << "Loading New AppSec exception";
|
||||||
parseAppsecJSONKey<string>("name", name, archive_in, "exception");
|
parseAppsecJSONKey<string>("name", name, archive_in, "exception");
|
||||||
parseAppsecJSONKey<string>("action", action, archive_in);
|
parseMandatoryAppsecJSONKey<string>("action", action, archive_in, "accept");
|
||||||
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
||||||
if (valid_actions.count(action) == 0) {
|
if (valid_actions.count(action) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec exception action invalid: " << action;
|
dbgWarning(D_LOCAL_POLICY) << "AppSec exception action invalid: " << action;
|
||||||
|
action = "accept";
|
||||||
|
}
|
||||||
|
parseMandatoryAppsecJSONKey<vector<NewAppsecExceptionCondition>>("condition", conditions, archive_in);
|
||||||
|
if (conditions.empty()) {
|
||||||
|
dbgWarning(D_LOCAL_POLICY) << "AppSec exception conditions empty";
|
||||||
|
throw PolicyGenException("AppSec exception conditions empty");
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<vector<NewAppsecExceptionCondition>>("condition", conditions, archive_in);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -43,6 +43,10 @@ NewAppsecTriggerAdditionalSuspiciousEventsLogging::load(cereal::JSONInputArchive
|
|||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec AppSec Trigger - Additional Suspicious Events Logging minimum severity invalid: "
|
<< "AppSec AppSec Trigger - Additional Suspicious Events Logging minimum severity invalid: "
|
||||||
<< minimum_severity;
|
<< minimum_severity;
|
||||||
|
throw PolicyGenException(
|
||||||
|
"AppSec AppSec Trigger - Additional Suspicious Events Logging minimum severity invalid: "
|
||||||
|
+ minimum_severity
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -132,6 +136,7 @@ NewLoggingService::load(cereal::JSONInputArchive &archive_in)
|
|||||||
parseAppsecJSONKey<string>("proto", proto, archive_in);
|
parseAppsecJSONKey<string>("proto", proto, archive_in);
|
||||||
if (valid_protocols.count(proto) == 0) {
|
if (valid_protocols.count(proto) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec Logging Service - proto invalid: " << proto;
|
dbgWarning(D_LOCAL_POLICY) << "AppSec Logging Service - proto invalid: " << proto;
|
||||||
|
throw PolicyGenException("AppSec Logging Service - proto invalid: " + proto);
|
||||||
}
|
}
|
||||||
|
|
||||||
parseAppsecJSONKey<int>("port", port, archive_in, 514);
|
parseAppsecJSONKey<int>("port", port, archive_in, 514);
|
||||||
@ -156,6 +161,7 @@ NewStdoutLogging::load(cereal::JSONInputArchive &archive_in)
|
|||||||
parseAppsecJSONKey<string>("format", format, archive_in, "json");
|
parseAppsecJSONKey<string>("format", format, archive_in, "json");
|
||||||
if (valid_formats.count(format) == 0) {
|
if (valid_formats.count(format) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec Stdout Logging - format invalid: " << format;
|
dbgWarning(D_LOCAL_POLICY) << "AppSec Stdout Logging - format invalid: " << format;
|
||||||
|
throw PolicyGenException("AppSec Stdout Logging - format invalid: " + format);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -261,19 +267,19 @@ NewAppsecLogTrigger::load(cereal::JSONInputArchive &archive_in)
|
|||||||
{
|
{
|
||||||
dbgTrace(D_LOCAL_POLICY) << "Loading AppSec log trigger";
|
dbgTrace(D_LOCAL_POLICY) << "Loading AppSec log trigger";
|
||||||
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
||||||
parseAppsecJSONKey<NewAppsecTriggerAccessControlLogging>(
|
parseMandatoryAppsecJSONKey<NewAppsecTriggerAccessControlLogging>(
|
||||||
"accessControlLogging",
|
"accessControlLogging",
|
||||||
access_control_logging,
|
access_control_logging,
|
||||||
archive_in
|
archive_in
|
||||||
);
|
);
|
||||||
parseAppsecJSONKey<NewAppsecTriggerAdditionalSuspiciousEventsLogging>(
|
parseMandatoryAppsecJSONKey<NewAppsecTriggerAdditionalSuspiciousEventsLogging>(
|
||||||
"additionalSuspiciousEventsLogging",
|
"additionalSuspiciousEventsLogging",
|
||||||
additional_suspicious_events_logging,
|
additional_suspicious_events_logging,
|
||||||
archive_in
|
archive_in
|
||||||
);
|
);
|
||||||
parseAppsecJSONKey<NewAppsecTriggerLogging>("appsecLogging", appsec_logging, archive_in);
|
parseMandatoryAppsecJSONKey<NewAppsecTriggerLogging>("appsecLogging", appsec_logging, archive_in);
|
||||||
parseAppsecJSONKey<NewAppsecTriggerExtendedLogging>("extendedLogging", extended_logging, archive_in);
|
parseMandatoryAppsecJSONKey<NewAppsecTriggerExtendedLogging>("extendedLogging", extended_logging, archive_in);
|
||||||
parseAppsecJSONKey<NewAppsecTriggerLogDestination>("logDestination", log_destination, archive_in);
|
parseMandatoryAppsecJSONKey<NewAppsecTriggerLogDestination>("logDestination", log_destination, archive_in);
|
||||||
parseAppsecJSONKey<string>("name", name, archive_in);
|
parseAppsecJSONKey<string>("name", name, archive_in);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -162,11 +162,12 @@ NewAppSecPracticeWebAttacks::load(cereal::JSONInputArchive &archive_in)
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (getMode() == "Prevent") {
|
if (getMode() == "Prevent") {
|
||||||
parseAppsecJSONKey<string>("minimumConfidence", minimum_confidence, archive_in, "critical");
|
parseMandatoryAppsecJSONKey<string>("minimumConfidence", minimum_confidence, archive_in, "critical");
|
||||||
if (valid_confidences.count(minimum_confidence) == 0) {
|
if (valid_confidences.count(minimum_confidence) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec practice override minimum confidence invalid: "
|
<< "AppSec practice override minimum confidence invalid: "
|
||||||
<< minimum_confidence;
|
<< minimum_confidence;
|
||||||
|
throw PolicyGenException("AppSec practice override minimum confidence invalid: " + minimum_confidence);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
minimum_confidence = "Transparent";
|
minimum_confidence = "Transparent";
|
||||||
@ -440,11 +441,12 @@ NewSnortSignaturesAndOpenSchemaAPI::load(cereal::JSONInputArchive &archive_in)
|
|||||||
{
|
{
|
||||||
dbgTrace(D_LOCAL_POLICY) << "Loading AppSec Snort Signatures practice";
|
dbgTrace(D_LOCAL_POLICY) << "Loading AppSec Snort Signatures practice";
|
||||||
parseAppsecJSONKey<string>("overrideMode", override_mode, archive_in, "inactive");
|
parseAppsecJSONKey<string>("overrideMode", override_mode, archive_in, "inactive");
|
||||||
parseAppsecJSONKey<vector<string>>("configmap", config_map, archive_in);
|
parseMandatoryAppsecJSONKey<vector<string>>("configmap", config_map, archive_in);
|
||||||
parseAppsecJSONKey<vector<string>>("files", files, archive_in);
|
parseAppsecJSONKey<vector<string>>("files", files, archive_in);
|
||||||
is_temporary = false;
|
is_temporary = false;
|
||||||
if (valid_modes.count(override_mode) == 0) {
|
if (valid_modes.count(override_mode) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec Snort Signatures override mode invalid: " << override_mode;
|
dbgWarning(D_LOCAL_POLICY) << "AppSec Snort Signatures override mode invalid: " << override_mode;
|
||||||
|
throw PolicyGenException("AppSec Snort Signatures override mode invalid: " + override_mode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -567,12 +569,16 @@ NewIntrusionPrevention::load(cereal::JSONInputArchive &archive_in)
|
|||||||
parseAppsecJSONKey<string>("overrideMode", override_mode, archive_in, "inactive");
|
parseAppsecJSONKey<string>("overrideMode", override_mode, archive_in, "inactive");
|
||||||
if (valid_modes.count(override_mode) == 0) {
|
if (valid_modes.count(override_mode) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec Intrusion Prevention override mode invalid: " << override_mode;
|
dbgWarning(D_LOCAL_POLICY) << "AppSec Intrusion Prevention override mode invalid: " << override_mode;
|
||||||
|
throw PolicyGenException("AppSec Intrusion Prevention override mode invalid: " + override_mode);
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("maxPerformanceImpact", max_performance_impact, archive_in, "low");
|
parseAppsecJSONKey<string>("maxPerformanceImpact", max_performance_impact, archive_in, "medium");
|
||||||
if (performance_impacts.count(max_performance_impact) == 0) {
|
if (performance_impacts.count(max_performance_impact) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec Intrusion Prevention max performance impact invalid: "
|
<< "AppSec Intrusion Prevention max performance impact invalid: "
|
||||||
<< max_performance_impact;
|
<< max_performance_impact;
|
||||||
|
throw PolicyGenException(
|
||||||
|
"AppSec Intrusion Prevention max performance impact invalid: " + max_performance_impact
|
||||||
|
);
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("minSeverityLevel", min_severity_level, archive_in, "low");
|
parseAppsecJSONKey<string>("minSeverityLevel", min_severity_level, archive_in, "low");
|
||||||
if (severity_levels.count(min_severity_level) == 0) {
|
if (severity_levels.count(min_severity_level) == 0) {
|
||||||
@ -580,23 +586,32 @@ NewIntrusionPrevention::load(cereal::JSONInputArchive &archive_in)
|
|||||||
<< "AppSec Intrusion Prevention min severity level invalid: "
|
<< "AppSec Intrusion Prevention min severity level invalid: "
|
||||||
<< min_severity_level;
|
<< min_severity_level;
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("highConfidenceEventAction", high_confidence_event_action, archive_in, "inactive");
|
parseAppsecJSONKey<string>("highConfidenceEventAction", high_confidence_event_action, archive_in, "prevent");
|
||||||
if (confidences_actions.count(high_confidence_event_action) == 0) {
|
if (confidences_actions.count(high_confidence_event_action) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec Intrusion Prevention high confidence event invalid: "
|
<< "AppSec Intrusion Prevention high confidence event invalid: "
|
||||||
<< high_confidence_event_action;
|
<< high_confidence_event_action;
|
||||||
|
throw PolicyGenException(
|
||||||
|
"AppSec Intrusion Prevention high confidence event invalid: " + high_confidence_event_action
|
||||||
|
);
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("mediumConfidenceEventAction", medium_confidence_event_action, archive_in, "inactive");
|
parseAppsecJSONKey<string>("mediumConfidenceEventAction", medium_confidence_event_action, archive_in, "prevent");
|
||||||
if (confidences_actions.count(medium_confidence_event_action) == 0) {
|
if (confidences_actions.count(medium_confidence_event_action) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec Intrusion Prevention medium confidence event invalid: "
|
<< "AppSec Intrusion Prevention medium confidence event invalid: "
|
||||||
<< medium_confidence_event_action;
|
<< medium_confidence_event_action;
|
||||||
|
throw PolicyGenException(
|
||||||
|
"AppSec Intrusion Prevention medium confidence event invalid: " + medium_confidence_event_action
|
||||||
|
);
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("lowConfidenceEventAction", low_confidence_event_action, archive_in, "inactive");
|
parseAppsecJSONKey<string>("lowConfidenceEventAction", low_confidence_event_action, archive_in, "detect");
|
||||||
if (confidences_actions.count(low_confidence_event_action) == 0) {
|
if (confidences_actions.count(low_confidence_event_action) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec Intrusion Prevention low confidence event action invalid: "
|
<< "AppSec Intrusion Prevention low confidence event action invalid: "
|
||||||
<< low_confidence_event_action;
|
<< low_confidence_event_action;
|
||||||
|
throw PolicyGenException(
|
||||||
|
"AppSec Intrusion Prevention low confidence event action invalid: " + low_confidence_event_action
|
||||||
|
);
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<int>("minCveYear", min_cve_Year, archive_in);
|
parseAppsecJSONKey<int>("minCveYear", min_cve_Year, archive_in);
|
||||||
}
|
}
|
||||||
@ -733,29 +748,36 @@ void
|
|||||||
NewFileSecurityArchiveInspection::load(cereal::JSONInputArchive &archive_in)
|
NewFileSecurityArchiveInspection::load(cereal::JSONInputArchive &archive_in)
|
||||||
{
|
{
|
||||||
dbgTrace(D_LOCAL_POLICY) << "Loading AppSec File Security Archive Inspection practice";
|
dbgTrace(D_LOCAL_POLICY) << "Loading AppSec File Security Archive Inspection practice";
|
||||||
parseAppsecJSONKey<bool>("extractArchiveFiles", extract_archive_files, archive_in);
|
parseAppsecJSONKey<bool>("extractArchiveFiles", extract_archive_files, archive_in, true);
|
||||||
parseAppsecJSONKey<uint64_t>("scanMaxFileSize", scan_max_file_size, archive_in, 0);
|
parseAppsecJSONKey<uint64_t>("scanMaxFileSize", scan_max_file_size, archive_in, 10);
|
||||||
parseAppsecJSONKey<string>("scanMaxFileSizeUnit", scan_max_file_size_unit, archive_in, "bytes");
|
parseAppsecJSONKey<string>("scanMaxFileSizeUnit", scan_max_file_size_unit, archive_in, "MB");
|
||||||
if (size_unit.count(scan_max_file_size_unit) == 0) {
|
if (size_unit.count(scan_max_file_size_unit) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec File Security Archive Inspection scan max file size unit invalid: "
|
<< "AppSec File Security Archive Inspection scan max file size unit invalid: "
|
||||||
<< scan_max_file_size_unit;
|
<< scan_max_file_size_unit;
|
||||||
|
throw PolicyGenException(
|
||||||
|
"AppSec File Security Archive Inspection scan max file size unit invalid: " + scan_max_file_size_unit
|
||||||
|
);
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>(
|
parseAppsecJSONKey<string>(
|
||||||
"archivedFilesWithinArchivedFiles",
|
"archivedFilesWithinArchivedFiles",
|
||||||
archived_files_within_archived_files,
|
archived_files_within_archived_files,
|
||||||
archive_in,
|
archive_in,
|
||||||
"inactive");
|
"prevent");
|
||||||
if (confidences_actions.count(archived_files_within_archived_files) == 0) {
|
if (confidences_actions.count(archived_files_within_archived_files) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec File Security Archive Inspection archived files within archived files invalid: "
|
<< "AppSec File Security Archive Inspection archived files within archived files invalid: "
|
||||||
<< archived_files_within_archived_files;
|
<< archived_files_within_archived_files;
|
||||||
|
throw PolicyGenException(
|
||||||
|
"AppSec File Security Archive Inspection archived files within archived files invalid: "
|
||||||
|
+ archived_files_within_archived_files
|
||||||
|
);
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>(
|
parseAppsecJSONKey<string>(
|
||||||
"archivedFilesWhereContentExtractionFailed",
|
"archivedFilesWhereContentExtractionFailed",
|
||||||
archived_files_where_content_extraction_failed,
|
archived_files_where_content_extraction_failed,
|
||||||
archive_in,
|
archive_in,
|
||||||
"inactive");
|
"prevent");
|
||||||
if (confidences_actions.count(archived_files_where_content_extraction_failed) == 0) {
|
if (confidences_actions.count(archived_files_where_content_extraction_failed) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec File Security Archive Inspection archived files within archived file invalid: "
|
<< "AppSec File Security Archive Inspection archived files within archived file invalid: "
|
||||||
@ -798,22 +820,29 @@ void
|
|||||||
NewFileSecurityLargeFileInspection::load(cereal::JSONInputArchive &archive_in)
|
NewFileSecurityLargeFileInspection::load(cereal::JSONInputArchive &archive_in)
|
||||||
{
|
{
|
||||||
dbgTrace(D_LOCAL_POLICY) << "Loading AppSec File Security large File Inspection practice";
|
dbgTrace(D_LOCAL_POLICY) << "Loading AppSec File Security large File Inspection practice";
|
||||||
parseAppsecJSONKey<uint64_t>("fileSizeLimit", file_size_limit, archive_in);
|
parseAppsecJSONKey<uint64_t>("fileSizeLimit", file_size_limit, archive_in, 10);
|
||||||
parseAppsecJSONKey<string>("fileSizeLimitUnit", file_size_limit_unit, archive_in, "bytes");
|
parseAppsecJSONKey<string>("fileSizeLimitUnit", file_size_limit_unit, archive_in, "MB");
|
||||||
if (size_unit.count(file_size_limit_unit) == 0) {
|
if (size_unit.count(file_size_limit_unit) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec File Security large File Inspection file size limit unit invalid: "
|
<< "AppSec File Security large File Inspection file size limit unit invalid: "
|
||||||
<< file_size_limit_unit;
|
<< file_size_limit_unit;
|
||||||
|
throw PolicyGenException(
|
||||||
|
"AppSec File Security large File Inspection file size limit unit invalid: " + file_size_limit_unit
|
||||||
|
);
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>(
|
parseAppsecJSONKey<string>(
|
||||||
"filesExceedingSizeLimitAction",
|
"filesExceedingSizeLimitAction",
|
||||||
files_exceeding_size_limit_action,
|
files_exceeding_size_limit_action,
|
||||||
archive_in,
|
archive_in,
|
||||||
"inactive");
|
"prevent");
|
||||||
if (confidences_actions.count(files_exceeding_size_limit_action) == 0) {
|
if (confidences_actions.count(files_exceeding_size_limit_action) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec File Security Archive Inspection archived files within archived files invalid: "
|
<< "AppSec File Security Archive Inspection archived files within archived files invalid: "
|
||||||
<< files_exceeding_size_limit_action;
|
<< files_exceeding_size_limit_action;
|
||||||
|
throw PolicyGenException(
|
||||||
|
"AppSec File Security Archive Inspection archived files within archived files invalid: "
|
||||||
|
+ files_exceeding_size_limit_action
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -843,38 +872,52 @@ NewFileSecurity::load(cereal::JSONInputArchive &archive_in)
|
|||||||
parseAppsecJSONKey<string>("overrideMode", override_mode, archive_in, "inactive");
|
parseAppsecJSONKey<string>("overrideMode", override_mode, archive_in, "inactive");
|
||||||
if (valid_modes.count(override_mode) == 0) {
|
if (valid_modes.count(override_mode) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec File Security override mode invalid: " << override_mode;
|
dbgWarning(D_LOCAL_POLICY) << "AppSec File Security override mode invalid: " << override_mode;
|
||||||
|
throw PolicyGenException("AppSec File Security override mode invalid: " + override_mode);
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("minSeverityLevel", min_severity_level, archive_in, "low");
|
parseMandatoryAppsecJSONKey<string>("minSeverityLevel", min_severity_level, archive_in, "low");
|
||||||
if (severity_levels.count(min_severity_level) == 0) {
|
if (severity_levels.count(min_severity_level) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec File Security min severity level invalid: " << min_severity_level;
|
dbgWarning(D_LOCAL_POLICY) << "AppSec File Security min severity level invalid: " << min_severity_level;
|
||||||
|
min_severity_level = "low";
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("highConfidenceEventAction", high_confidence_event_action, archive_in, "inactive");
|
parseMandatoryAppsecJSONKey<string>(
|
||||||
|
"highConfidenceEventAction", high_confidence_event_action, archive_in, "inactive"
|
||||||
|
);
|
||||||
if (confidences_actions.count(high_confidence_event_action) == 0) {
|
if (confidences_actions.count(high_confidence_event_action) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec File Security high confidence event invalid: "
|
<< "AppSec File Security high confidence event invalid: "
|
||||||
<< high_confidence_event_action;
|
<< high_confidence_event_action;
|
||||||
|
high_confidence_event_action = "inactive";
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("mediumConfidenceEventAction", medium_confidence_event_action, archive_in, "inactive");
|
parseMandatoryAppsecJSONKey<string>(
|
||||||
|
"mediumConfidenceEventAction", medium_confidence_event_action, archive_in, "inactive"
|
||||||
|
);
|
||||||
if (confidences_actions.count(medium_confidence_event_action) == 0) {
|
if (confidences_actions.count(medium_confidence_event_action) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec File Security medium confidence event invalid: "
|
<< "AppSec File Security medium confidence event invalid: "
|
||||||
<< medium_confidence_event_action;
|
<< medium_confidence_event_action;
|
||||||
|
medium_confidence_event_action = "inactive";
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("lowConfidenceEventAction", low_confidence_event_action, archive_in, "inactive");
|
parseMandatoryAppsecJSONKey<string>(
|
||||||
|
"lowConfidenceEventAction", low_confidence_event_action, archive_in, "inactive"
|
||||||
|
);
|
||||||
if (confidences_actions.count(low_confidence_event_action) == 0) {
|
if (confidences_actions.count(low_confidence_event_action) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec File Security low confidence event action invalid: "
|
<< "AppSec File Security low confidence event action invalid: "
|
||||||
<< low_confidence_event_action;
|
<< low_confidence_event_action;
|
||||||
|
low_confidence_event_action = "inactive";
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<string>("unnamedFilesAction", unnamed_files_action, archive_in, "inactive");
|
parseMandatoryAppsecJSONKey<string>("unnamedFilesAction", unnamed_files_action, archive_in, "inactive");
|
||||||
if (confidences_actions.count(unnamed_files_action) == 0) {
|
if (confidences_actions.count(unnamed_files_action) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY)
|
dbgWarning(D_LOCAL_POLICY)
|
||||||
<< "AppSec File Security low unnamed files action invalid: "
|
<< "AppSec File Security low unnamed files action invalid: "
|
||||||
<< unnamed_files_action;
|
<< unnamed_files_action;
|
||||||
|
unnamed_files_action = "inactive";
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<bool>("threatEmulationEnabled", threat_emulation_enabled, archive_in);
|
parseAppsecJSONKey<bool>("threatEmulationEnabled", threat_emulation_enabled, archive_in);
|
||||||
parseAppsecJSONKey<NewFileSecurityArchiveInspection>("archiveInspection", archive_inspection, archive_in);
|
parseMandatoryAppsecJSONKey<NewFileSecurityArchiveInspection>("archiveInspection", archive_inspection, archive_in);
|
||||||
parseAppsecJSONKey<NewFileSecurityLargeFileInspection>("largeFileInspection", large_file_inspection, archive_in);
|
parseMandatoryAppsecJSONKey<NewFileSecurityLargeFileInspection>(
|
||||||
|
"largeFileInspection", large_file_inspection, archive_in
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const string &
|
const string &
|
||||||
@ -939,7 +982,7 @@ NewAppSecPracticeSpec::load(cereal::JSONInputArchive &archive_in)
|
|||||||
parseAppsecJSONKey<NewFileSecurity>("fileSecurity", file_security, archive_in);
|
parseAppsecJSONKey<NewFileSecurity>("fileSecurity", file_security, archive_in);
|
||||||
parseAppsecJSONKey<NewIntrusionPrevention>("intrusionPrevention", intrusion_prevention, archive_in);
|
parseAppsecJSONKey<NewIntrusionPrevention>("intrusionPrevention", intrusion_prevention, archive_in);
|
||||||
parseAppsecJSONKey<NewSnortSignaturesAndOpenSchemaAPI>("snortSignatures", snort_signatures, archive_in);
|
parseAppsecJSONKey<NewSnortSignaturesAndOpenSchemaAPI>("snortSignatures", snort_signatures, archive_in);
|
||||||
parseAppsecJSONKey<NewAppSecPracticeWebAttacks>("webAttacks", web_attacks, archive_in);
|
parseMandatoryAppsecJSONKey<NewAppSecPracticeWebAttacks>("webAttacks", web_attacks, archive_in);
|
||||||
parseAppsecJSONKey<NewAppSecPracticeAntiBot>("antiBot", anti_bot, archive_in);
|
parseAppsecJSONKey<NewAppSecPracticeAntiBot>("antiBot", anti_bot, archive_in);
|
||||||
parseAppsecJSONKey<string>("name", practice_name, archive_in);
|
parseAppsecJSONKey<string>("name", practice_name, archive_in);
|
||||||
}
|
}
|
||||||
|
@ -25,8 +25,8 @@ NewTrustedSourcesSpec::load(cereal::JSONInputArchive &archive_in)
|
|||||||
{
|
{
|
||||||
dbgTrace(D_LOCAL_POLICY) << "Loading trusted sources spec";
|
dbgTrace(D_LOCAL_POLICY) << "Loading trusted sources spec";
|
||||||
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
||||||
parseAppsecJSONKey<int>("minNumOfSources", min_num_of_sources, archive_in, 3);
|
parseMandatoryAppsecJSONKey<int>("minNumOfSources", min_num_of_sources, archive_in, 3);
|
||||||
parseAppsecJSONKey<vector<string>>("sourcesIdentifiers", sources_identifiers, archive_in);
|
parseMandatoryAppsecJSONKey<vector<string>>("sourcesIdentifiers", sources_identifiers, archive_in);
|
||||||
parseAppsecJSONKey<string>("name", name, archive_in);
|
parseAppsecJSONKey<string>("name", name, archive_in);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -64,11 +64,12 @@ void
|
|||||||
Identifier::load(cereal::JSONInputArchive &archive_in)
|
Identifier::load(cereal::JSONInputArchive &archive_in)
|
||||||
{
|
{
|
||||||
dbgTrace(D_LOCAL_POLICY) << "Loading source identifiers spec";
|
dbgTrace(D_LOCAL_POLICY) << "Loading source identifiers spec";
|
||||||
parseAppsecJSONKey<string>("identifier", identifier, archive_in);
|
parseMandatoryAppsecJSONKey<string>("identifier", identifier, archive_in, "sourceip");
|
||||||
if (valid_identifiers.count(identifier) == 0) {
|
if (valid_identifiers.count(identifier) == 0) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << "AppSec identifier invalid: " << identifier;
|
dbgWarning(D_LOCAL_POLICY) << "AppSec identifier invalid: " << identifier;
|
||||||
|
identifier = "sourceip";
|
||||||
}
|
}
|
||||||
parseAppsecJSONKey<vector<string>>("value", value, archive_in);
|
parseMandatoryAppsecJSONKey<vector<string>>("value", value, archive_in);
|
||||||
}
|
}
|
||||||
|
|
||||||
const string &
|
const string &
|
||||||
@ -88,7 +89,11 @@ NewSourcesIdentifiers::load(cereal::JSONInputArchive &archive_in)
|
|||||||
{
|
{
|
||||||
dbgTrace(D_LOCAL_POLICY) << "Loading Sources Identifiers";
|
dbgTrace(D_LOCAL_POLICY) << "Loading Sources Identifiers";
|
||||||
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
parseAppsecJSONKey<string>("appsecClassName", appsec_class_name, archive_in);
|
||||||
parseAppsecJSONKey<vector<Identifier>>("sourcesIdentifiers", sources_identifiers, archive_in);
|
parseMandatoryAppsecJSONKey<vector<Identifier>>("sourcesIdentifiers", sources_identifiers, archive_in);
|
||||||
|
if (sources_identifiers.empty()) {
|
||||||
|
dbgWarning(D_LOCAL_POLICY) << "AppSec sources identifiers empty";
|
||||||
|
throw PolicyGenException("AppSec sources identifiers empty");
|
||||||
|
}
|
||||||
parseAppsecJSONKey<string>("name", name, archive_in);
|
parseAppsecJSONKey<string>("name", name, archive_in);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1649,36 +1649,39 @@ PolicyMakerUtils::proccesSingleAppsecPolicy(
|
|||||||
const string &local_appsec_policy_path)
|
const string &local_appsec_policy_path)
|
||||||
{
|
{
|
||||||
|
|
||||||
Maybe<V1beta2AppsecLinuxPolicy> maybe_policy_v1beta2 = openFileAsJson<V1beta2AppsecLinuxPolicy>(policy_path);
|
try {
|
||||||
if (maybe_policy_v1beta2.ok()) {
|
Maybe<V1beta2AppsecLinuxPolicy> maybe_policy_v1beta2 = openFileAsJson<V1beta2AppsecLinuxPolicy>(policy_path);
|
||||||
policy_version_name = "v1beta2";
|
if (maybe_policy_v1beta2.ok()) {
|
||||||
createAgentPolicyFromAppsecPolicy<V1beta2AppsecLinuxPolicy, NewParsedRule>(
|
policy_version_name = "v1beta2";
|
||||||
getPolicyName(policy_path),
|
createAgentPolicyFromAppsecPolicy<V1beta2AppsecLinuxPolicy, NewParsedRule>(
|
||||||
maybe_policy_v1beta2.unpack()
|
getPolicyName(policy_path), maybe_policy_v1beta2.unpack()
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
policy_version_name = "v1beta1";
|
policy_version_name = "v1beta1";
|
||||||
dbgInfo(D_LOCAL_POLICY)
|
dbgInfo(D_LOCAL_POLICY
|
||||||
<< "Failed to retrieve AppSec local policy with version: v1beta2, Trying version: v1beta1";
|
) << "Failed to retrieve AppSec local policy with version: v1beta2, Trying version: v1beta1";
|
||||||
|
|
||||||
Maybe<AppsecLinuxPolicy> maybe_policy_v1beta1 = openFileAsJson<AppsecLinuxPolicy>(policy_path);
|
Maybe<AppsecLinuxPolicy> maybe_policy_v1beta1 = openFileAsJson<AppsecLinuxPolicy>(policy_path);
|
||||||
if (!maybe_policy_v1beta1.ok()){
|
if (!maybe_policy_v1beta1.ok()) {
|
||||||
dbgWarning(D_LOCAL_POLICY) << maybe_policy_v1beta1.getErr();
|
dbgWarning(D_LOCAL_POLICY) << maybe_policy_v1beta1.getErr();
|
||||||
return "";
|
return "";
|
||||||
|
}
|
||||||
|
createAgentPolicyFromAppsecPolicy<AppsecLinuxPolicy, ParsedRule>(
|
||||||
|
getPolicyName(policy_path), maybe_policy_v1beta1.unpack()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (getenv("OPENAPPSEC_STANDALONE")) rpmBuildNginxServers(maybe_policy_v1beta1.unpack());
|
||||||
}
|
}
|
||||||
createAgentPolicyFromAppsecPolicy<AppsecLinuxPolicy, ParsedRule>(
|
|
||||||
getPolicyName(policy_path),
|
PolicyWrapper policy_wrapper = combineElementsToPolicy(policy_version);
|
||||||
maybe_policy_v1beta1.unpack()
|
return dumpPolicyToFile(
|
||||||
|
policy_wrapper,
|
||||||
|
local_appsec_policy_path
|
||||||
);
|
);
|
||||||
|
} catch (const PolicyGenException &e) {
|
||||||
if (getenv("OPENAPPSEC_STANDALONE")) rpmBuildNginxServers(maybe_policy_v1beta1.unpack());
|
dbgDebug(D_LOCAL_POLICY) << "Policy generation failed. Error: " << e.what();
|
||||||
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
PolicyWrapper policy_wrapper = combineElementsToPolicy(policy_version);
|
|
||||||
return dumpPolicyToFile(
|
|
||||||
policy_wrapper,
|
|
||||||
local_appsec_policy_path
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -174,8 +174,12 @@ RulesConfigRulebase::RulesConfigRulebase(
|
|||||||
context ="All()";
|
context ="All()";
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
string host_check = "Any(EqualHost(" + _url + ")),";
|
bool uri_regex = false;
|
||||||
string uri_check = (_uri.empty() || _uri == "/" ) ? "" : ",BeginWithUri(" + _uri + ")";
|
if (std::find(_uri.begin(), _uri.end(), '*') != _uri.end()) {
|
||||||
|
uri_regex = true;
|
||||||
|
}
|
||||||
|
string host_check = (_url.empty() || _url == "/") ? "" : "Any(EqualHost(" + _url + ")),";
|
||||||
|
string uri_check = (_uri.empty() || _uri == "/" || uri_regex ) ? "" : ",BeginWithUri(" + _uri + ")";
|
||||||
auto ports = _port.empty() ? vector<string>({"80", "443"}) : vector<string>({_port});
|
auto ports = _port.empty() ? vector<string>({"80", "443"}) : vector<string>({_port});
|
||||||
context = "Any(";
|
context = "Any(";
|
||||||
for (auto &port : ports) {
|
for (auto &port : ports) {
|
||||||
|
@ -52,15 +52,14 @@ void
|
|||||||
SettingsRulebase::save(cereal::JSONOutputArchive &out_ar) const
|
SettingsRulebase::save(cereal::JSONOutputArchive &out_ar) const
|
||||||
{
|
{
|
||||||
string profile_type = "Kubernetes";
|
string profile_type = "Kubernetes";
|
||||||
string upgrade_mode = "automatic";
|
|
||||||
out_ar(
|
out_ar(
|
||||||
cereal::make_nvp("agentSettings", agentSettings),
|
cereal::make_nvp("agentSettings", agentSettings),
|
||||||
cereal::make_nvp("agentType", profile_type),
|
cereal::make_nvp("agentType", profile_type),
|
||||||
cereal::make_nvp("allowOnlyDefinedApplications", false),
|
cereal::make_nvp("allowOnlyDefinedApplications", false),
|
||||||
cereal::make_nvp("anyFog", true),
|
cereal::make_nvp("anyFog", true),
|
||||||
cereal::make_nvp("maxNumberOfAgents", 10),
|
cereal::make_nvp("maxNumberOfAgents", 10)
|
||||||
cereal::make_nvp("upgradeMode", upgrade_mode)
|
|
||||||
);
|
);
|
||||||
|
upgrade_settings.save(out_ar);
|
||||||
}
|
}
|
||||||
|
|
||||||
SettingsWrapper::SettingsWrapper(SettingsRulebase _agent) : agent(_agent)
|
SettingsWrapper::SettingsWrapper(SettingsRulebase _agent) : agent(_agent)
|
||||||
|
@ -41,7 +41,7 @@ public:
|
|||||||
string getAgentVersion() override;
|
string getAgentVersion() override;
|
||||||
bool isKernelVersion3OrHigher() override;
|
bool isKernelVersion3OrHigher() override;
|
||||||
bool isGwNotVsx() override;
|
bool isGwNotVsx() override;
|
||||||
bool isVersionEqualOrAboveR8110() override;
|
bool isVersionAboveR8110() override;
|
||||||
bool isReverseProxy() override;
|
bool isReverseProxy() override;
|
||||||
Maybe<tuple<string, string, string>> parseNginxMetadata() override;
|
Maybe<tuple<string, string, string>> parseNginxMetadata() override;
|
||||||
#if defined(gaia) || defined(smb)
|
#if defined(gaia) || defined(smb)
|
||||||
@ -193,10 +193,12 @@ DetailsResolver::Impl::getCheckpointVersion() const
|
|||||||
#endif // gaia || smb
|
#endif // gaia || smb
|
||||||
|
|
||||||
bool
|
bool
|
||||||
DetailsResolver::Impl::isVersionEqualOrAboveR8110()
|
DetailsResolver::Impl::isVersionAboveR8110()
|
||||||
{
|
{
|
||||||
#if defined(gaia) || defined(smb)
|
#if defined(gaia)
|
||||||
return compareCheckpointVersion(8110, std::greater_equal<int>());
|
return compareCheckpointVersion(8110, std::greater<int>());
|
||||||
|
#elif defined(smb)
|
||||||
|
return true;
|
||||||
#endif
|
#endif
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,7 @@
|
|||||||
#if defined(gaia)
|
#if defined(gaia)
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
checkHasSupportedBlade(const string &command_output)
|
checkSAMLSupportedBlade(const string &command_output)
|
||||||
{
|
{
|
||||||
string supportedBlades[3] = {"identityServer", "vpn", "cvpn"};
|
string supportedBlades[3] = {"identityServer", "vpn", "cvpn"};
|
||||||
for(const string &blade : supportedBlades) {
|
for(const string &blade : supportedBlades) {
|
||||||
@ -29,11 +29,11 @@ checkHasSupportedBlade(const string &command_output)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return genError("Current host does not have IDA capability");
|
return genError("Current host does not have SAML capability");
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
checkSamlPortal(const string &command_output)
|
checkSAMLPortal(const string &command_output)
|
||||||
{
|
{
|
||||||
if (command_output.find("Portal is running") != string::npos) {
|
if (command_output.find("Portal is running") != string::npos) {
|
||||||
return string("true");
|
return string("true");
|
||||||
@ -43,9 +43,9 @@ checkSamlPortal(const string &command_output)
|
|||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
getIDAGaia(const string &command_output)
|
getIDASSamlGaia(const string &command_output)
|
||||||
{
|
{
|
||||||
return string("ida_gaia");
|
return string("idaSaml_gaia");
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
@ -211,6 +211,15 @@ getClusterObjectIP(const string &command_output)
|
|||||||
return getAttr(command_output, "Cluster object IP was not found");
|
return getAttr(command_output, "Cluster object IP was not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Maybe<string>
|
||||||
|
getFecApplicable(const string &command_output)
|
||||||
|
{
|
||||||
|
if (command_output == "0") return string("true");
|
||||||
|
if (command_output == "1") return string("false");
|
||||||
|
|
||||||
|
return genError("Could not determine if fec applicable");
|
||||||
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
getSmbObjectName(const string &command_output)
|
getSmbObjectName(const string &command_output)
|
||||||
{
|
{
|
||||||
|
@ -27,6 +27,15 @@
|
|||||||
// use SHELL_CMD_HANDLER(key as string, shell command as string, ptr to Maybe<string> handler(const string&))
|
// use SHELL_CMD_HANDLER(key as string, shell command as string, ptr to Maybe<string> handler(const string&))
|
||||||
// to return a string value for an attribute key based on a logic executed in a handler that receives
|
// to return a string value for an attribute key based on a logic executed in a handler that receives
|
||||||
// shell command execution output as its input
|
// shell command execution output as its input
|
||||||
|
|
||||||
|
#ifdef SHELL_PRE_CMD
|
||||||
|
#if defined(gaia) || defined(smb)
|
||||||
|
SHELL_PRE_CMD("read sdwan data",
|
||||||
|
"(cpsdwan get_data > /tmp/cpsdwan_getdata_orch.json~) "
|
||||||
|
"&& (mv /tmp/cpsdwan_getdata_orch.json~ /tmp/cpsdwan_getdata_orch.json)")
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
#ifdef SHELL_CMD_HANDLER
|
#ifdef SHELL_CMD_HANDLER
|
||||||
#if defined(gaia) || defined(smb)
|
#if defined(gaia) || defined(smb)
|
||||||
SHELL_CMD_HANDLER("cpProductIntegrationMgmtObjectType", "cpprod_util CPPROD_IsMgmtMachine", getMgmtObjType)
|
SHELL_CMD_HANDLER("cpProductIntegrationMgmtObjectType", "cpprod_util CPPROD_IsMgmtMachine", getMgmtObjType)
|
||||||
@ -41,7 +50,7 @@ SHELL_CMD_HANDLER("isCPotelcolGRET64",
|
|||||||
SHELL_CMD_HANDLER("hasSDWan", "[ -f $FWDIR/bin/sdwan_steering ] && echo '1' || echo '0'", checkHasSDWan)
|
SHELL_CMD_HANDLER("hasSDWan", "[ -f $FWDIR/bin/sdwan_steering ] && echo '1' || echo '0'", checkHasSDWan)
|
||||||
SHELL_CMD_HANDLER(
|
SHELL_CMD_HANDLER(
|
||||||
"canUpdateSDWanData",
|
"canUpdateSDWanData",
|
||||||
"CPSDWAN_NOLOGS=1 cpsdwan get_data -f can_update_sdwan_data | jq -r .can_update_sdwan_data",
|
"jq -r .can_update_sdwan_data /tmp/cpsdwan_getdata_orch.json",
|
||||||
checkCanUpdateSDWanData
|
checkCanUpdateSDWanData
|
||||||
)
|
)
|
||||||
SHELL_CMD_HANDLER(
|
SHELL_CMD_HANDLER(
|
||||||
@ -50,7 +59,8 @@ SHELL_CMD_HANDLER(
|
|||||||
checkIfSdwanRunning)
|
checkIfSdwanRunning)
|
||||||
SHELL_CMD_HANDLER(
|
SHELL_CMD_HANDLER(
|
||||||
"IP Address",
|
"IP Address",
|
||||||
"cpsdwan get_data | jq -r .main_ip",
|
"[ $(cpprod_util FWisDAG) -eq 1 ] && echo \"Dynamic Address\" "
|
||||||
|
"|| (jq -r .main_ip /tmp/cpsdwan_getdata_orch.json)",
|
||||||
getGWIPAddress
|
getGWIPAddress
|
||||||
)
|
)
|
||||||
SHELL_CMD_HANDLER(
|
SHELL_CMD_HANDLER(
|
||||||
@ -60,18 +70,23 @@ SHELL_CMD_HANDLER(
|
|||||||
)
|
)
|
||||||
SHELL_CMD_HANDLER(
|
SHELL_CMD_HANDLER(
|
||||||
"cpProductIntegrationMgmtParentObjectIP",
|
"cpProductIntegrationMgmtParentObjectIP",
|
||||||
"obj=\"$(cpsdwan get_data | jq -r .cluster_name)\";"
|
"obj=\"$(jq -r .cluster_name /tmp/cpsdwan_getdata_orch.json)\";"
|
||||||
" awk -v obj=\"$obj\" '$1 == \":\" && $2 == \"(\" obj, $1 == \":ip_address\" { if ($1 == \":ip_address\")"
|
" awk -v obj=\"$obj\" '$1 == \":\" && $2 == \"(\" obj, $1 == \":ip_address\" { if ($1 == \":ip_address\")"
|
||||||
" { gsub(/[()]/, \"\", $2); print $2; exit; } }'"
|
" { gsub(/[()]/, \"\", $2); print $2; exit; } }'"
|
||||||
" $FWDIR/state/local/FW1/local.gateway_cluster",
|
" $FWDIR/state/local/FW1/local.gateway_cluster",
|
||||||
getClusterObjectIP
|
getClusterObjectIP
|
||||||
)
|
)
|
||||||
|
SHELL_CMD_HANDLER(
|
||||||
|
"isFecApplicable",
|
||||||
|
"fw ctl get int support_fec |& grep -sq \"support_fec =\";echo $?",
|
||||||
|
getFecApplicable
|
||||||
|
)
|
||||||
#endif //gaia || smb
|
#endif //gaia || smb
|
||||||
|
|
||||||
#if defined(gaia)
|
#if defined(gaia)
|
||||||
SHELL_CMD_HANDLER("hasSupportedBlade", "enabled_blades", checkHasSupportedBlade)
|
SHELL_CMD_HANDLER("hasSAMLSupportedBlade", "enabled_blades", checkSAMLSupportedBlade)
|
||||||
SHELL_CMD_HANDLER("hasSamlPortal", "mpclient status saml-vpn", checkSamlPortal)
|
SHELL_CMD_HANDLER("hasSAMLPortal", "mpclient status saml-vpn", checkSAMLPortal)
|
||||||
SHELL_CMD_HANDLER("requiredNanoServices", "ida_gaia", getIDAGaia)
|
SHELL_CMD_HANDLER("requiredNanoServices", "ida_saml_gaia", getIDASSamlGaia)
|
||||||
SHELL_CMD_HANDLER(
|
SHELL_CMD_HANDLER(
|
||||||
"cpProductIntegrationMgmtParentObjectName",
|
"cpProductIntegrationMgmtParentObjectName",
|
||||||
"cat $FWDIR/database/myself_objects.C "
|
"cat $FWDIR/database/myself_objects.C "
|
||||||
@ -109,12 +124,12 @@ SHELL_CMD_HANDLER(
|
|||||||
#if defined(smb)
|
#if defined(smb)
|
||||||
SHELL_CMD_HANDLER(
|
SHELL_CMD_HANDLER(
|
||||||
"cpProductIntegrationMgmtParentObjectName",
|
"cpProductIntegrationMgmtParentObjectName",
|
||||||
"cpsdwan get_data | jq -r .cluster_name",
|
"jq -r .cluster_name /tmp/cpsdwan_getdata_orch.json",
|
||||||
getSmbMgmtParentObjName
|
getSmbMgmtParentObjName
|
||||||
)
|
)
|
||||||
SHELL_CMD_HANDLER(
|
SHELL_CMD_HANDLER(
|
||||||
"cpProductIntegrationMgmtParentObjectUid",
|
"cpProductIntegrationMgmtParentObjectUid",
|
||||||
"cpsdwan get_data | jq -r .cluster_uuid",
|
"jq -r .cluster_uuid /tmp/cpsdwan_getdata_orch.json",
|
||||||
getSmbMgmtParentObjUid
|
getSmbMgmtParentObjUid
|
||||||
)
|
)
|
||||||
SHELL_CMD_HANDLER(
|
SHELL_CMD_HANDLER(
|
||||||
@ -150,7 +165,11 @@ SHELL_CMD_OUTPUT("helloWorld", "cat /tmp/agentHelloWorld 2>/dev/null")
|
|||||||
|
|
||||||
#if defined(gaia)
|
#if defined(gaia)
|
||||||
|
|
||||||
FILE_CONTENT_HANDLER("hasIdpConfigured", "/opt/CPSamlPortal/phpincs/spPortal/idpPolicy.xml", checkIDP)
|
FILE_CONTENT_HANDLER(
|
||||||
|
"hasIdpConfigured",
|
||||||
|
(getenv("SAMLPORTAL_HOME") ? string(getenv("SAMLPORTAL_HOME")) : "") + "/phpincs/spPortal/idpPolicy.xml",
|
||||||
|
checkIDP
|
||||||
|
)
|
||||||
FILE_CONTENT_HANDLER(
|
FILE_CONTENT_HANDLER(
|
||||||
"cpProductIntegrationMgmtObjectName",
|
"cpProductIntegrationMgmtObjectName",
|
||||||
(getenv("FWDIR") ? string(getenv("FWDIR")) : "") + "/database/myown.C",
|
(getenv("FWDIR") ? string(getenv("FWDIR")) : "") + "/database/myown.C",
|
||||||
|
@ -43,6 +43,12 @@ public:
|
|||||||
static Maybe<string> getCommandOutput(const string &cmd);
|
static Maybe<string> getCommandOutput(const string &cmd);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
#define SHELL_PRE_CMD(NAME, COMMAND) {NAME, COMMAND},
|
||||||
|
map<string, string> shell_pre_commands = {
|
||||||
|
#include "details_resolver_impl.h"
|
||||||
|
};
|
||||||
|
#undef SHELL_PRE_CMD
|
||||||
|
|
||||||
#define SHELL_CMD_OUTPUT(ATTRIBUTE, COMMAND) SHELL_CMD_HANDLER(ATTRIBUTE, COMMAND, [](const string &s) { return s; })
|
#define SHELL_CMD_OUTPUT(ATTRIBUTE, COMMAND) SHELL_CMD_HANDLER(ATTRIBUTE, COMMAND, [](const string &s) { return s; })
|
||||||
#define SHELL_CMD_HANDLER(ATTRIBUTE, COMMAND, HANDLER) {ATTRIBUTE, {COMMAND, ShellCommandHandler(HANDLER)}},
|
#define SHELL_CMD_HANDLER(ATTRIBUTE, COMMAND, HANDLER) {ATTRIBUTE, {COMMAND, ShellCommandHandler(HANDLER)}},
|
||||||
map<string, pair<string, ShellCommandHandler>> shell_command_handlers = {
|
map<string, pair<string, ShellCommandHandler>> shell_command_handlers = {
|
||||||
@ -61,6 +67,21 @@ private:
|
|||||||
map<string, string>
|
map<string, string>
|
||||||
DetailsResolvingHanlder::Impl::getResolvedDetails() const
|
DetailsResolvingHanlder::Impl::getResolvedDetails() const
|
||||||
{
|
{
|
||||||
|
I_ShellCmd *shell = Singleton::Consume<I_ShellCmd>::by<DetailsResolvingHanlder>();
|
||||||
|
uint32_t timeout = getConfigurationWithDefault<uint32_t>(5000, "orchestration", "Details resolver time out");
|
||||||
|
|
||||||
|
for (auto &shell_pre_command : shell_pre_commands) {
|
||||||
|
const string &name = shell_pre_command.first;
|
||||||
|
const string &command = shell_pre_command.second;
|
||||||
|
Maybe<int> command_ret = shell->getExecReturnCode(command, timeout);
|
||||||
|
|
||||||
|
if (!command_ret.ok()) {
|
||||||
|
dbgWarning(D_AGENT_DETAILS) << "Failed to run pre-command " << name;
|
||||||
|
} else if (*command_ret) {
|
||||||
|
dbgWarning(D_AGENT_DETAILS) << "Pre-command " << name << " failed (rc: " << *command_ret << ")";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
map<string, string> resolved_details;
|
map<string, string> resolved_details;
|
||||||
for (auto shell_handler : shell_command_handlers) {
|
for (auto shell_handler : shell_command_handlers) {
|
||||||
const string &attr = shell_handler.first;
|
const string &attr = shell_handler.first;
|
||||||
@ -116,7 +137,6 @@ DetailsResolvingHanlder::Impl::getCommandOutput(const string &cmd)
|
|||||||
DetailsResolvingHanlder::DetailsResolvingHanlder() : pimpl(make_unique<Impl>()) {}
|
DetailsResolvingHanlder::DetailsResolvingHanlder() : pimpl(make_unique<Impl>()) {}
|
||||||
DetailsResolvingHanlder::~DetailsResolvingHanlder() {}
|
DetailsResolvingHanlder::~DetailsResolvingHanlder() {}
|
||||||
|
|
||||||
|
|
||||||
map<string, string>
|
map<string, string>
|
||||||
DetailsResolvingHanlder::getResolvedDetails() const
|
DetailsResolvingHanlder::getResolvedDetails() const
|
||||||
{
|
{
|
||||||
|
@ -55,7 +55,7 @@ HttpCurl::HttpCurl(
|
|||||||
const string &_bearer,
|
const string &_bearer,
|
||||||
const Maybe<string> &proxy_url,
|
const Maybe<string> &proxy_url,
|
||||||
const Maybe<uint16_t> &proxy_port,
|
const Maybe<uint16_t> &proxy_port,
|
||||||
const Maybe<string> &proxy_auth)
|
const Maybe<string> &_proxy_auth)
|
||||||
:
|
:
|
||||||
url(_url),
|
url(_url),
|
||||||
out_file(_out_file),
|
out_file(_out_file),
|
||||||
@ -85,10 +85,10 @@ HttpCurl::HttpCurl(
|
|||||||
proxy = proxy_url.unpack() + ":" + to_string(proxy_port.unpack());
|
proxy = proxy_url.unpack() + ":" + to_string(proxy_port.unpack());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (proxy_auth.ok())
|
if (_proxy_auth.ok())
|
||||||
{
|
{
|
||||||
I_Encryptor *encryptor = Singleton::Consume<I_Encryptor>::by<HttpCurl>();
|
I_Encryptor *encryptor = Singleton::Consume<I_Encryptor>::by<HttpCurl>();
|
||||||
proxy_credentials = "Proxy-Authorization: Basic " + encryptor->base64Encode(proxy_auth.unpack());
|
proxy_auth = "Proxy-Authorization: Basic " + encryptor->base64Encode(_proxy_auth.unpack());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -98,7 +98,7 @@ HttpCurl::HttpCurl(const HttpCurl &other)
|
|||||||
out_file(other.out_file),
|
out_file(other.out_file),
|
||||||
bearer(other.bearer),
|
bearer(other.bearer),
|
||||||
proxy(other.proxy),
|
proxy(other.proxy),
|
||||||
proxy_credentials(other.proxy_credentials),
|
proxy_auth(other.proxy_auth),
|
||||||
curl(unique_ptr<CURL, function<void(CURL *)>>(curl_easy_init(), curl_easy_cleanup))
|
curl(unique_ptr<CURL, function<void(CURL *)>>(curl_easy_init(), curl_easy_cleanup))
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
@ -133,9 +133,9 @@ HttpCurl::setCurlOpts(long timeout, HTTP_VERSION http_version)
|
|||||||
if (!proxy.empty())
|
if (!proxy.empty())
|
||||||
{
|
{
|
||||||
curl_easy_setopt(curl_handle, CURLOPT_PROXY, proxy.c_str());
|
curl_easy_setopt(curl_handle, CURLOPT_PROXY, proxy.c_str());
|
||||||
if (!proxy_credentials.empty())
|
if (!proxy_auth.empty())
|
||||||
{
|
{
|
||||||
proxy_headers = curl_slist_append(proxy_headers, proxy_credentials.c_str());
|
proxy_headers = curl_slist_append(proxy_headers, proxy_auth.c_str());
|
||||||
//Apply proxy headers
|
//Apply proxy headers
|
||||||
curl_easy_setopt(curl_handle, CURLOPT_PROXYHEADER, proxy_headers);
|
curl_easy_setopt(curl_handle, CURLOPT_PROXYHEADER, proxy_headers);
|
||||||
}
|
}
|
||||||
@ -330,9 +330,9 @@ HttpsCurl::setCurlOpts(long timeout, HTTP_VERSION http_version)
|
|||||||
if (!proxy.empty())
|
if (!proxy.empty())
|
||||||
{
|
{
|
||||||
curl_easy_setopt(curl_handle, CURLOPT_PROXY, proxy.c_str());
|
curl_easy_setopt(curl_handle, CURLOPT_PROXY, proxy.c_str());
|
||||||
if (!proxy_credentials.empty())
|
if (!proxy_auth.empty())
|
||||||
{
|
{
|
||||||
proxy_headers = curl_slist_append(proxy_headers, proxy_credentials.c_str());
|
proxy_headers = curl_slist_append(proxy_headers, proxy_auth.c_str());
|
||||||
//Apply proxy headers
|
//Apply proxy headers
|
||||||
curl_easy_setopt(curl_handle, CURLOPT_PROXYHEADER, proxy_headers);
|
curl_easy_setopt(curl_handle, CURLOPT_PROXYHEADER, proxy_headers);
|
||||||
}
|
}
|
||||||
|
@ -83,7 +83,7 @@ protected:
|
|||||||
std::ofstream &out_file;
|
std::ofstream &out_file;
|
||||||
std::string bearer;
|
std::string bearer;
|
||||||
std::string proxy;
|
std::string proxy;
|
||||||
std::string proxy_credentials;
|
std::string proxy_auth;
|
||||||
std::unique_ptr<CURL, std::function<void(CURL *)>> curl;
|
std::unique_ptr<CURL, std::function<void(CURL *)>> curl;
|
||||||
std::string curl_url;
|
std::string curl_url;
|
||||||
};
|
};
|
||||||
|
@ -81,6 +81,8 @@ public:
|
|||||||
const string &service_name
|
const string &service_name
|
||||||
) const override;
|
) const override;
|
||||||
|
|
||||||
|
Maybe<string> checkIfFileExists(const Package &package) const override;
|
||||||
|
void removeDownloadFile(const string &file_name) const override;
|
||||||
void createTenantProfileMap();
|
void createTenantProfileMap();
|
||||||
string getProfileFromMap(const string &tenant_id) const override;
|
string getProfileFromMap(const string &tenant_id) const override;
|
||||||
|
|
||||||
@ -194,12 +196,18 @@ Downloader::Impl::downloadVirtualFileFromFog(
|
|||||||
static const string error_text = "error";
|
static const string error_text = "error";
|
||||||
|
|
||||||
map<pair<string, string>, string> res;
|
map<pair<string, string>, string> res;
|
||||||
|
|
||||||
|
string general_file_path = dir_path + "/" + resourse_file.getFileName() + "_general.download";
|
||||||
I_UpdateCommunication *update_communication = Singleton::Consume<I_UpdateCommunication>::by<Downloader>();
|
I_UpdateCommunication *update_communication = Singleton::Consume<I_UpdateCommunication>::by<Downloader>();
|
||||||
auto downloaded_data = update_communication->downloadAttributeFile(resourse_file);
|
auto downloaded_data = update_communication->downloadAttributeFile(resourse_file, general_file_path);
|
||||||
if (!downloaded_data.ok()) return downloaded_data.passErr();
|
if (!downloaded_data.ok()) return downloaded_data.passErr();
|
||||||
|
|
||||||
|
I_OrchestrationTools *orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<Downloader>();
|
||||||
|
Maybe<string> file_content = orchestration_tools->readFile(general_file_path);
|
||||||
|
if (!file_content.ok()) return file_content.passErr();
|
||||||
|
|
||||||
Document document;
|
Document document;
|
||||||
document.Parse(downloaded_data.unpack().c_str());
|
document.Parse(file_content.unpack().c_str());
|
||||||
if (document.HasParseError()) {
|
if (document.HasParseError()) {
|
||||||
dbgWarning(D_ORCHESTRATOR) << "JSON file is not valid";
|
dbgWarning(D_ORCHESTRATOR) << "JSON file is not valid";
|
||||||
return genError("JSON file is not valid.");
|
return genError("JSON file is not valid.");
|
||||||
@ -241,7 +249,6 @@ Downloader::Impl::downloadVirtualFileFromFog(
|
|||||||
rapidjson::Writer<rapidjson::StringBuffer> writer(buffer);
|
rapidjson::Writer<rapidjson::StringBuffer> writer(buffer);
|
||||||
artifact_data->value.Accept(writer);
|
artifact_data->value.Accept(writer);
|
||||||
|
|
||||||
I_OrchestrationTools *orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<Downloader>();
|
|
||||||
if (orchestration_tools->writeFile(buffer.GetString(), file_path)) {
|
if (orchestration_tools->writeFile(buffer.GetString(), file_path)) {
|
||||||
res.insert({{tenant_id, profile_id}, file_path});
|
res.insert({{tenant_id, profile_id}, file_path});
|
||||||
}
|
}
|
||||||
@ -324,6 +331,24 @@ Downloader::Impl::downloadFileFromURL(
|
|||||||
return file_path;
|
return file_path;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Maybe<string>
|
||||||
|
Downloader::Impl::checkIfFileExists(const Package &package) const
|
||||||
|
{
|
||||||
|
string file_name = package.getName() + ".download";
|
||||||
|
Maybe<string> maybe_path = dir_path + "/" + file_name;
|
||||||
|
|
||||||
|
return validateChecksum(package.getChecksum(), package.getChecksumType(), maybe_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
Downloader::Impl::removeDownloadFile(const string &file_name) const
|
||||||
|
{
|
||||||
|
string file_path = dir_path + "/" + file_name + ".download";
|
||||||
|
dbgInfo(D_ORCHESTRATOR) << "Removing download file " << file_path;
|
||||||
|
I_OrchestrationTools *orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<Downloader>();
|
||||||
|
orchestration_tools->removeFile(file_path);
|
||||||
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
Downloader::Impl::validateChecksum(
|
Downloader::Impl::validateChecksum(
|
||||||
const string &checksum,
|
const string &checksum,
|
||||||
@ -355,13 +380,11 @@ Downloader::Impl::downloadFileFromFogByHTTP(const GetResourceFile &resourse_file
|
|||||||
dbgInfo(D_ORCHESTRATOR) << "Downloading file from fog. File: " << resourse_file.getFileName();
|
dbgInfo(D_ORCHESTRATOR) << "Downloading file from fog. File: " << resourse_file.getFileName();
|
||||||
|
|
||||||
I_UpdateCommunication *update_communication = Singleton::Consume<I_UpdateCommunication>::by<Downloader>();
|
I_UpdateCommunication *update_communication = Singleton::Consume<I_UpdateCommunication>::by<Downloader>();
|
||||||
auto downloaded_file = update_communication->downloadAttributeFile(resourse_file);
|
auto downloaded_file = update_communication->downloadAttributeFile(resourse_file, file_path);
|
||||||
if (!downloaded_file.ok()) return genError(downloaded_file.getErr());
|
if (!downloaded_file.ok()) return genError(downloaded_file.getErr());
|
||||||
dbgInfo(D_ORCHESTRATOR) << "Download completed. File: " << resourse_file.getFileName();
|
|
||||||
|
|
||||||
I_OrchestrationTools *orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<Downloader>();
|
dbgInfo(D_ORCHESTRATOR) << "Download completed. File: " << resourse_file.getFileName();
|
||||||
if (orchestration_tools->writeFile(downloaded_file.unpack(), file_path)) return file_path;
|
return file_path;
|
||||||
return genError("Failed to write the attribute file. File: " + file_name);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
|
@ -18,6 +18,7 @@ class DownloaderTest : public Test
|
|||||||
public:
|
public:
|
||||||
DownloaderTest()
|
DownloaderTest()
|
||||||
{
|
{
|
||||||
|
Debug::setUnitTestFlag(D_ORCHESTRATOR, Debug::DebugLevel::TRACE);
|
||||||
setConfiguration<string>("/tmp", "orchestration", "Default file download path");
|
setConfiguration<string>("/tmp", "orchestration", "Default file download path");
|
||||||
EXPECT_CALL(mock_orchestration_tools, createDirectory("/tmp")).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, createDirectory("/tmp")).WillOnce(Return(true));
|
||||||
downloader.init();
|
downloader.init();
|
||||||
@ -44,15 +45,14 @@ TEST_F(DownloaderTest, downloadFileFromFog)
|
|||||||
|
|
||||||
GetResourceFile resourse_file(GetResourceFile::ResourceFileType::VIRTUAL_SETTINGS);
|
GetResourceFile resourse_file(GetResourceFile::ResourceFileType::VIRTUAL_SETTINGS);
|
||||||
|
|
||||||
EXPECT_CALL(mock_communication, downloadAttributeFile(resourse_file)).WillOnce(Return(fog_response));
|
EXPECT_CALL(mock_communication, downloadAttributeFile(resourse_file, "/tmp/virtualSettings.download"))
|
||||||
|
.WillOnce(Return(fog_response));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_orchestration_tools,
|
mock_orchestration_tools,
|
||||||
calculateChecksum(Package::ChecksumTypes::SHA256, "/tmp/virtualSettings.download")
|
calculateChecksum(Package::ChecksumTypes::SHA256, "/tmp/virtualSettings.download")
|
||||||
).WillOnce(Return(string("123")));
|
).WillOnce(Return(string("123")));
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, writeFile(fog_response, "/tmp/virtualSettings.download", false))
|
|
||||||
.WillOnce(Return(true));
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile("/tmp/virtualSettings.download")).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile("/tmp/virtualSettings.download")).WillOnce(Return(true));
|
||||||
|
|
||||||
Maybe<string> downloaded_file = i_downloader->downloadFileFromFog(
|
Maybe<string> downloaded_file = i_downloader->downloadFileFromFog(
|
||||||
@ -71,7 +71,10 @@ TEST_F(DownloaderTest, downloadFileFromFogFailure)
|
|||||||
Maybe<string> fog_response(genError("Failed to download"));
|
Maybe<string> fog_response(genError("Failed to download"));
|
||||||
GetResourceFile resourse_file(GetResourceFile::ResourceFileType::SETTINGS);
|
GetResourceFile resourse_file(GetResourceFile::ResourceFileType::SETTINGS);
|
||||||
|
|
||||||
EXPECT_CALL(mock_communication, downloadAttributeFile(resourse_file)).WillOnce(Return(fog_response));
|
EXPECT_CALL(
|
||||||
|
mock_communication,
|
||||||
|
downloadAttributeFile(resourse_file, "/tmp/settings.download")
|
||||||
|
).WillOnce(Return(fog_response));
|
||||||
|
|
||||||
Maybe<string> downloaded_file = i_downloader->downloadFileFromFog(
|
Maybe<string> downloaded_file = i_downloader->downloadFileFromFog(
|
||||||
checksum,
|
checksum,
|
||||||
@ -124,6 +127,53 @@ TEST_F(DownloaderTest, registerConfig)
|
|||||||
env.fini();
|
env.fini();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_F(DownloaderTest, checkIfFileExists)
|
||||||
|
{
|
||||||
|
string local_file_path = "/tmp/test_file.sh";
|
||||||
|
string url = "file://" + local_file_path;
|
||||||
|
string dir_path = getConfigurationWithDefault<string>(
|
||||||
|
"/tmp/orchestration_downloads",
|
||||||
|
"orchestration",
|
||||||
|
"Default file download path"
|
||||||
|
);
|
||||||
|
string manifest =
|
||||||
|
"{"
|
||||||
|
" \"packages\": ["
|
||||||
|
" {"
|
||||||
|
" \"name\": \"test\","
|
||||||
|
" \"version\": \"c\","
|
||||||
|
" \"download-path\": \"http://172.23.92.135/my.sh\","
|
||||||
|
" \"relative-path\": \"\","
|
||||||
|
" \"checksum-type\": \"sha1sum\","
|
||||||
|
" \"checksum\": \"1234\","
|
||||||
|
" \"package-type\": \"service\","
|
||||||
|
" \"require\": []"
|
||||||
|
" }"
|
||||||
|
" ]"
|
||||||
|
"}";
|
||||||
|
|
||||||
|
vector<Package> manifest_services;
|
||||||
|
std::stringstream os(manifest);
|
||||||
|
cereal::JSONInputArchive archive_in(os);
|
||||||
|
archive_in(manifest_services);
|
||||||
|
|
||||||
|
string service_name = "test";
|
||||||
|
string file_name = service_name + ".download";
|
||||||
|
string file_path = dir_path + "/" + file_name;
|
||||||
|
string checksum = "1234";
|
||||||
|
Package::ChecksumTypes checksum_type = Package::ChecksumTypes::SHA1;
|
||||||
|
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, calculateChecksum(checksum_type, file_path)).WillOnce(Return(checksum));
|
||||||
|
i_downloader->checkIfFileExists(manifest_services[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(DownloaderTest, removeDownloadFile)
|
||||||
|
{
|
||||||
|
string file_path = "/tmp/package.download";
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, removeFile(file_path)).WillOnce(Return(true));
|
||||||
|
i_downloader->removeDownloadFile("package");
|
||||||
|
}
|
||||||
|
|
||||||
TEST_F(DownloaderTest, downloadWithBadChecksum)
|
TEST_F(DownloaderTest, downloadWithBadChecksum)
|
||||||
{
|
{
|
||||||
string local_file_path = "/tmp/test_file.sh";
|
string local_file_path = "/tmp/test_file.sh";
|
||||||
@ -181,10 +231,9 @@ TEST_F(DownloaderTest, downloadEmptyFileFromFog)
|
|||||||
|
|
||||||
GetResourceFile resourse_file(GetResourceFile::ResourceFileType::MANIFEST);
|
GetResourceFile resourse_file(GetResourceFile::ResourceFileType::MANIFEST);
|
||||||
|
|
||||||
EXPECT_CALL(mock_communication, downloadAttributeFile(resourse_file)).WillOnce(Return(fog_response));
|
EXPECT_CALL(mock_communication, downloadAttributeFile(resourse_file, "/tmp/manifest.download"))
|
||||||
|
.WillOnce(Return(fog_response));
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, writeFile(fog_response, "/tmp/manifest.download", false))
|
|
||||||
.WillOnce(Return(true));
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile("/tmp/manifest.download")).WillOnce(Return(false));
|
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile("/tmp/manifest.download")).WillOnce(Return(false));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
@ -340,7 +389,13 @@ TEST_F(DownloaderTest, download_virtual_policy)
|
|||||||
" ]\n"
|
" ]\n"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
EXPECT_CALL(mock_communication, downloadAttributeFile(resourse_file)).WillOnce(Return(fog_response));
|
EXPECT_CALL(
|
||||||
|
mock_communication,
|
||||||
|
downloadAttributeFile(resourse_file, "/tmp/virtualPolicy_general.download"))
|
||||||
|
.WillOnce(Return(fog_response));
|
||||||
|
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, readFile("/tmp/virtualPolicy_general.download"))
|
||||||
|
.WillOnce(Return(fog_response));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_orchestration_tools,
|
mock_orchestration_tools,
|
||||||
@ -428,7 +483,15 @@ TEST_F(DownloaderTest, download_virtual_settings)
|
|||||||
" ]\n"
|
" ]\n"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
EXPECT_CALL(mock_communication, downloadAttributeFile(resourse_file)).WillOnce(Return(fog_response));
|
EXPECT_CALL(
|
||||||
|
mock_communication,
|
||||||
|
downloadAttributeFile(resourse_file, "/tmp/virtualSettings_general.download"))
|
||||||
|
.WillOnce(Return(fog_response));
|
||||||
|
|
||||||
|
EXPECT_CALL(
|
||||||
|
mock_orchestration_tools,
|
||||||
|
readFile("/tmp/virtualSettings_general.download")
|
||||||
|
).WillOnce(Return(fog_response));
|
||||||
|
|
||||||
stringstream tenant_0000_path;
|
stringstream tenant_0000_path;
|
||||||
tenant_0000_path << "/tmp/virtualSettings_4c721b40-85df-4364-be3d-303a10ee9789"
|
tenant_0000_path << "/tmp/virtualSettings_4c721b40-85df-4364-be3d-303a10ee9789"
|
||||||
|
@ -221,7 +221,7 @@ HTTPClient::curlGetFileOverHttp(const URLParser &url, ofstream &out_file, const
|
|||||||
token,
|
token,
|
||||||
proxy_config->getProxyDomain(ProxyProtocol::HTTPS),
|
proxy_config->getProxyDomain(ProxyProtocol::HTTPS),
|
||||||
proxy_config->getProxyPort(ProxyProtocol::HTTPS),
|
proxy_config->getProxyPort(ProxyProtocol::HTTPS),
|
||||||
proxy_config->getProxyCredentials(ProxyProtocol::HTTPS));
|
proxy_config->getProxyAuthentication(ProxyProtocol::HTTPS));
|
||||||
|
|
||||||
http_curl_client.setCurlOpts();
|
http_curl_client.setCurlOpts();
|
||||||
bool connection_ok = http_curl_client.connect();
|
bool connection_ok = http_curl_client.connect();
|
||||||
@ -251,7 +251,7 @@ HTTPClient::getFileHttp(const URLParser &url, ofstream &out_file, const string &
|
|||||||
url,
|
url,
|
||||||
proxy_config->getProxyDomain(ProxyProtocol::HTTP),
|
proxy_config->getProxyDomain(ProxyProtocol::HTTP),
|
||||||
proxy_config->getProxyPort(ProxyProtocol::HTTP),
|
proxy_config->getProxyPort(ProxyProtocol::HTTP),
|
||||||
proxy_config->getProxyCredentials(ProxyProtocol::HTTP),
|
proxy_config->getProxyAuthentication(ProxyProtocol::HTTP),
|
||||||
token
|
token
|
||||||
);
|
);
|
||||||
auto handle_connect_res = client_connection.handleConnect();
|
auto handle_connect_res = client_connection.handleConnect();
|
||||||
|
@ -17,14 +17,12 @@
|
|||||||
#include <string>
|
#include <string>
|
||||||
#include "maybe_res.h"
|
#include "maybe_res.h"
|
||||||
#include "url_parser.h"
|
#include "url_parser.h"
|
||||||
#include "i_messaging.h"
|
|
||||||
#include "i_agent_details.h"
|
#include "i_agent_details.h"
|
||||||
#include "i_proxy_configuration.h"
|
#include "i_proxy_configuration.h"
|
||||||
|
|
||||||
// LCOV_EXCL_START Reason: Depends on real download server.
|
// LCOV_EXCL_START Reason: Depends on real download server.
|
||||||
class HTTPClient
|
class HTTPClient
|
||||||
:
|
:
|
||||||
public Singleton::Consume<I_Messaging>,
|
|
||||||
public Singleton::Consume<I_AgentDetails>,
|
public Singleton::Consume<I_AgentDetails>,
|
||||||
public Singleton::Consume<I_ProxyConfiguration>
|
public Singleton::Consume<I_ProxyConfiguration>
|
||||||
{
|
{
|
||||||
|
@ -544,7 +544,7 @@ HTTPClient::getFileSSL(const URLParser &url, ofstream &out_file, const string &t
|
|||||||
url,
|
url,
|
||||||
proxy_config->getProxyDomain(ProxyProtocol::HTTPS),
|
proxy_config->getProxyDomain(ProxyProtocol::HTTPS),
|
||||||
proxy_config->getProxyPort(ProxyProtocol::HTTPS),
|
proxy_config->getProxyPort(ProxyProtocol::HTTPS),
|
||||||
proxy_config->getProxyCredentials(ProxyProtocol::HTTPS),
|
proxy_config->getProxyAuthentication(ProxyProtocol::HTTPS),
|
||||||
token
|
token
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -589,7 +589,7 @@ HTTPClient::curlGetFileOverSSL(const URLParser &url, ofstream &out_file, const s
|
|||||||
token,
|
token,
|
||||||
proxy_config->getProxyDomain(ProxyProtocol::HTTPS),
|
proxy_config->getProxyDomain(ProxyProtocol::HTTPS),
|
||||||
proxy_config->getProxyPort(ProxyProtocol::HTTPS),
|
proxy_config->getProxyPort(ProxyProtocol::HTTPS),
|
||||||
proxy_config->getProxyCredentials(ProxyProtocol::HTTPS),
|
proxy_config->getProxyAuthentication(ProxyProtocol::HTTPS),
|
||||||
cert_file_path);
|
cert_file_path);
|
||||||
|
|
||||||
ssl_curl_client.setCurlOpts();
|
ssl_curl_client.setCurlOpts();
|
||||||
|
@ -3,5 +3,5 @@ link_directories(${BOOST_ROOT}/lib)
|
|||||||
add_unit_test(
|
add_unit_test(
|
||||||
health_check_ut
|
health_check_ut
|
||||||
"health_check_ut.cc"
|
"health_check_ut.cc"
|
||||||
"health_check;mainloop;singleton;agent_details;config;logging;metric;event_is;health_check_manager;-lboost_regex;-lboost_system"
|
"health_check;messaging;mainloop;singleton;agent_details;config;logging;metric;event_is;health_check_manager;-lboost_regex;-lboost_system"
|
||||||
)
|
)
|
||||||
|
@ -40,7 +40,10 @@ class FogCommunication : public FogAuthenticator
|
|||||||
public:
|
public:
|
||||||
void init() override;
|
void init() override;
|
||||||
Maybe<void> getUpdate(CheckUpdateRequest &request) override;
|
Maybe<void> getUpdate(CheckUpdateRequest &request) override;
|
||||||
Maybe<std::string> downloadAttributeFile(const GetResourceFile &resourse_file) override;
|
Maybe<std::string> downloadAttributeFile(
|
||||||
|
const GetResourceFile &resourse_file,
|
||||||
|
const std::string &file_path
|
||||||
|
) override;
|
||||||
Maybe<void> sendPolicyVersion(
|
Maybe<void> sendPolicyVersion(
|
||||||
const std::string &policy_version,
|
const std::string &policy_version,
|
||||||
const std::string &policy_versions
|
const std::string &policy_versions
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
#ifndef __GET_STATUS_RES_H__
|
#ifndef __GET_STATUS_RES_H__
|
||||||
#define __GET_STATUS_RES_H__
|
#define __GET_STATUS_RES_H__
|
||||||
|
|
||||||
#include "i_messaging.h"
|
|
||||||
#include "i_mainloop.h"
|
#include "i_mainloop.h"
|
||||||
#include "i_shell_cmd.h"
|
#include "i_shell_cmd.h"
|
||||||
#include "i_encryptor.h"
|
#include "i_encryptor.h"
|
||||||
|
@ -45,7 +45,10 @@ class HybridCommunication
|
|||||||
public:
|
public:
|
||||||
void init() override;
|
void init() override;
|
||||||
Maybe<void> getUpdate(CheckUpdateRequest &request) override;
|
Maybe<void> getUpdate(CheckUpdateRequest &request) override;
|
||||||
Maybe<std::string> downloadAttributeFile(const GetResourceFile &resourse_file) override;
|
Maybe<std::string> downloadAttributeFile(
|
||||||
|
const GetResourceFile &resourse_file,
|
||||||
|
const std::string &file_path
|
||||||
|
) override;
|
||||||
Maybe<void> sendPolicyVersion(
|
Maybe<void> sendPolicyVersion(
|
||||||
const std::string &policy_version,
|
const std::string &policy_version,
|
||||||
const std::string &policy_versions
|
const std::string &policy_versions
|
||||||
|
@ -31,7 +31,10 @@ public:
|
|||||||
Maybe<void> authenticateAgent() override;
|
Maybe<void> authenticateAgent() override;
|
||||||
Maybe<void> getUpdate(CheckUpdateRequest &request) override;
|
Maybe<void> getUpdate(CheckUpdateRequest &request) override;
|
||||||
|
|
||||||
Maybe<std::string> downloadAttributeFile(const GetResourceFile &resourse_file) override;
|
Maybe<std::string> downloadAttributeFile(
|
||||||
|
const GetResourceFile &resourse_file,
|
||||||
|
const std::string &file_path
|
||||||
|
) override;
|
||||||
void setAddressExtenesion(const std::string &extension) override;
|
void setAddressExtenesion(const std::string &extension) override;
|
||||||
Maybe<void> sendPolicyVersion(
|
Maybe<void> sendPolicyVersion(
|
||||||
const std::string &policy_version,
|
const std::string &policy_version,
|
||||||
|
@ -39,7 +39,7 @@ public:
|
|||||||
MOCK_METHOD0(isKernelVersion3OrHigher, bool());
|
MOCK_METHOD0(isKernelVersion3OrHigher, bool());
|
||||||
MOCK_METHOD0(isGwNotVsx, bool());
|
MOCK_METHOD0(isGwNotVsx, bool());
|
||||||
MOCK_METHOD0(getResolvedDetails, std::map<std::string, std::string>());
|
MOCK_METHOD0(getResolvedDetails, std::map<std::string, std::string>());
|
||||||
MOCK_METHOD0(isVersionEqualOrAboveR8110, bool());
|
MOCK_METHOD0(isVersionAboveR8110, bool());
|
||||||
MOCK_METHOD0(parseNginxMetadata, Maybe<std::tuple<std::string, std::string, std::string>>());
|
MOCK_METHOD0(parseNginxMetadata, Maybe<std::tuple<std::string, std::string, std::string>>());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -41,6 +41,16 @@ public:
|
|||||||
Maybe<std::string>(const std::string &, const std::string &, Package::ChecksumTypes, const std::string &)
|
Maybe<std::string>(const std::string &, const std::string &, Package::ChecksumTypes, const std::string &)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
MOCK_CONST_METHOD1(
|
||||||
|
checkIfFileExists,
|
||||||
|
Maybe<std::string>(const Package &)
|
||||||
|
);
|
||||||
|
|
||||||
|
MOCK_CONST_METHOD1(
|
||||||
|
removeDownloadFile,
|
||||||
|
void(const std::string &)
|
||||||
|
);
|
||||||
|
|
||||||
MOCK_CONST_METHOD1(
|
MOCK_CONST_METHOD1(
|
||||||
getProfileFromMap,
|
getProfileFromMap,
|
||||||
std::string(const std::string &)
|
std::string(const std::string &)
|
||||||
|
@ -13,6 +13,8 @@
|
|||||||
|
|
||||||
#include "manifest_controller.h"
|
#include "manifest_controller.h"
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
#include "debug.h"
|
#include "debug.h"
|
||||||
#include "environment.h"
|
#include "environment.h"
|
||||||
@ -80,9 +82,8 @@ private:
|
|||||||
|
|
||||||
bool
|
bool
|
||||||
handlePackage(
|
handlePackage(
|
||||||
const Package &updated_package,
|
const pair<Package, string> &package_downloaded_file,
|
||||||
map<string, Package> ¤t_packages,
|
map<string, Package> ¤t_packages,
|
||||||
const map<string, Package> &new_packages,
|
|
||||||
map<string, Package> &corrupted_packages
|
map<string, Package> &corrupted_packages
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -179,6 +180,34 @@ ManifestController::Impl::updateIgnoreListForNSaaS()
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static vector<pair<Package, string>>::const_iterator
|
||||||
|
findPackage(const vector<pair<Package, string>> &packages, const string &name)
|
||||||
|
{
|
||||||
|
using Pair = pair<Package, string>;
|
||||||
|
return find_if(packages.begin(), packages.end(), [&] (const Pair &pair) { return pair.first.getName() == name; });
|
||||||
|
}
|
||||||
|
|
||||||
|
static vector<pair<Package, string>>
|
||||||
|
sortByInstallationQueue(
|
||||||
|
const vector<pair<Package, string>> &downloaded_files,
|
||||||
|
const vector<Package> &installation_queue)
|
||||||
|
{
|
||||||
|
vector<pair<Package, string>> sorted_queue;
|
||||||
|
for (auto &package_file : installation_queue) {
|
||||||
|
if (package_file.getName() == "accessControlApp" || package_file.getName() == "accessControlKernel") continue;
|
||||||
|
|
||||||
|
auto package_it = findPackage(downloaded_files, package_file.getName());
|
||||||
|
if (package_it != downloaded_files.end()) sorted_queue.push_back(*package_it);
|
||||||
|
}
|
||||||
|
|
||||||
|
auto ac_app_it = findPackage(downloaded_files, "accessControlApp");
|
||||||
|
auto ac_kernel_it = findPackage(downloaded_files, "accessControlKernel");
|
||||||
|
if (ac_app_it != downloaded_files.end()) sorted_queue.push_back(*ac_app_it);
|
||||||
|
if (ac_kernel_it != downloaded_files.end()) sorted_queue.push_back(*ac_kernel_it);
|
||||||
|
|
||||||
|
return sorted_queue;
|
||||||
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
ManifestController::Impl::updateManifest(const string &new_manifest_file)
|
ManifestController::Impl::updateManifest(const string &new_manifest_file)
|
||||||
{
|
{
|
||||||
@ -220,6 +249,7 @@ ManifestController::Impl::updateManifest(const string &new_manifest_file)
|
|||||||
}
|
}
|
||||||
|
|
||||||
map<string, Package> new_packages = parsed_manifest.unpack();
|
map<string, Package> new_packages = parsed_manifest.unpack();
|
||||||
|
map<string, Package> all_packages = parsed_manifest.unpack();
|
||||||
map<string, Package> current_packages;
|
map<string, Package> current_packages;
|
||||||
parsed_manifest = orchestration_tools->loadPackagesFromJson(manifest_file_path);
|
parsed_manifest = orchestration_tools->loadPackagesFromJson(manifest_file_path);
|
||||||
|
|
||||||
@ -256,13 +286,14 @@ ManifestController::Impl::updateManifest(const string &new_manifest_file)
|
|||||||
auto packages_to_remove = manifest_diff_calc.filterUntrackedPackages(current_packages, new_packages);
|
auto packages_to_remove = manifest_diff_calc.filterUntrackedPackages(current_packages, new_packages);
|
||||||
for (auto remove_package = packages_to_remove.begin(); remove_package != packages_to_remove.end();) {
|
for (auto remove_package = packages_to_remove.begin(); remove_package != packages_to_remove.end();) {
|
||||||
bool uninstall_response = true;
|
bool uninstall_response = true;
|
||||||
if (remove_package->second.isInstallable().ok()) {
|
if (remove_package->second.isInstallable()) {
|
||||||
uninstall_response = manifest_handler.uninstallPackage(remove_package->second);
|
uninstall_response = manifest_handler.uninstallPackage(remove_package->second);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!uninstall_response) {
|
if (!uninstall_response) {
|
||||||
dbgWarning(D_ORCHESTRATOR)
|
dbgWarning(D_ORCHESTRATOR)
|
||||||
<< "Failed to uninstall package. Package: " << remove_package->second.getName();
|
<< "Failed to uninstall package. Package: "
|
||||||
|
<< remove_package->second.getName();
|
||||||
all_cleaned = false;
|
all_cleaned = false;
|
||||||
remove_package++;
|
remove_package++;
|
||||||
} else {
|
} else {
|
||||||
@ -284,42 +315,40 @@ ManifestController::Impl::updateManifest(const string &new_manifest_file)
|
|||||||
|
|
||||||
bool no_change = new_packages.size() == 0;
|
bool no_change = new_packages.size() == 0;
|
||||||
// Both new_packages & corrupted_packages will be updated based on updated manifest
|
// Both new_packages & corrupted_packages will be updated based on updated manifest
|
||||||
bool no_corrupted_package = manifest_diff_calc.filterCorruptedPackages(new_packages, corrupted_packages);
|
|
||||||
|
|
||||||
auto orchestration_service = new_packages.find("orchestration");
|
const auto &download_packages_res = manifest_handler.downloadPackages(new_packages);
|
||||||
if (orchestration_service != new_packages.end()) {
|
if (!download_packages_res.ok()) {
|
||||||
// Orchestration needs special handling as manifest should be backup differently
|
dbgWarning(D_ORCHESTRATOR)
|
||||||
return handlePackage(
|
<< "Failed to download required packages. Error: "
|
||||||
orchestration_service->second,
|
<< download_packages_res.getErr();
|
||||||
current_packages,
|
return false;
|
||||||
new_packages,
|
|
||||||
corrupted_packages
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
auto wlp_standalone_service = new_packages.find("wlpStandalone");
|
const vector<pair<Package, string>> &downloaded_files = download_packages_res.unpack();
|
||||||
if (wlp_standalone_service != new_packages.end()) {
|
const auto &installation_queue_res = manifest_diff_calc.buildInstallationQueue(
|
||||||
// wlpStandalone needs special handling as manifest should be backup differently
|
|
||||||
return handlePackage(
|
|
||||||
wlp_standalone_service->second,
|
|
||||||
current_packages,
|
current_packages,
|
||||||
new_packages,
|
new_packages
|
||||||
corrupted_packages
|
);
|
||||||
);
|
if (!installation_queue_res.ok()) {
|
||||||
|
dbgWarning(D_ORCHESTRATOR)
|
||||||
|
<< "Failed building installation queue. Error: "
|
||||||
|
<< installation_queue_res.getErr();
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
const vector<Package> &installation_queue = installation_queue_res.unpack();
|
||||||
|
|
||||||
|
const auto &sortd_downloaded_files = sortByInstallationQueue(downloaded_files, installation_queue);
|
||||||
|
|
||||||
bool all_installed = true;
|
bool all_installed = true;
|
||||||
bool any_installed = false;
|
bool any_installed = false;
|
||||||
|
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Starting to handle " << new_packages.size() <<" new packages";
|
dbgDebug(D_ORCHESTRATOR) << "Starting to handle " << downloaded_files.size() << " new packages";
|
||||||
for (auto &new_package : new_packages) {
|
for (auto &package : sortd_downloaded_files) {
|
||||||
|
if (package.first.getType() != Package::PackageType::Service) continue;
|
||||||
if (new_package.second.getType() != Package::PackageType::Service) continue;
|
|
||||||
|
|
||||||
size_t prev_size = corrupted_packages.size();
|
size_t prev_size = corrupted_packages.size();
|
||||||
|
|
||||||
bool handling_response = handlePackage(
|
bool handling_response = handlePackage(
|
||||||
new_package.second,
|
package,
|
||||||
current_packages,
|
current_packages,
|
||||||
new_packages,
|
|
||||||
corrupted_packages
|
corrupted_packages
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -331,7 +360,10 @@ ManifestController::Impl::updateManifest(const string &new_manifest_file)
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Orchestration needs special handling as manifest should be backup differently
|
// Orchestration needs special handling as manifest should be backup differently
|
||||||
if (new_package.first.compare(orch_service_name) == 0) {
|
if (package.first.getName().compare(orch_service_name) == 0) {
|
||||||
|
return handling_response;
|
||||||
|
}
|
||||||
|
if (package.first.getName().compare("wlpStandalone") == 0) {
|
||||||
return handling_response;
|
return handling_response;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -341,14 +373,22 @@ ManifestController::Impl::updateManifest(const string &new_manifest_file)
|
|||||||
|
|
||||||
bool manifest_file_update = true;
|
bool manifest_file_update = true;
|
||||||
|
|
||||||
if (all_installed && (any_installed || no_change) && no_corrupted_package) {
|
if (all_installed && (any_installed || no_change)) {
|
||||||
manifest_file_update = changeManifestFile(new_manifest_file);
|
manifest_file_update = changeManifestFile(new_manifest_file);
|
||||||
// In NSaaS - set ignore packages to any
|
// In NSaaS - set ignore packages to any
|
||||||
ignore_packages_update = updateIgnoreListForNSaaS();
|
ignore_packages_update = updateIgnoreListForNSaaS();
|
||||||
} else if (any_installed) {
|
} else if (any_installed) {
|
||||||
manifest_file_update = orchestration_tools->packagesToJsonFile(current_packages, manifest_file_path);
|
manifest_file_update = orchestration_tools->packagesToJsonFile(current_packages, manifest_file_path);
|
||||||
}
|
}
|
||||||
return all_installed && manifest_file_update && no_corrupted_package && all_cleaned;
|
if (all_installed) {
|
||||||
|
auto orchestration_downloader = Singleton::Consume<I_Downloader>::by<ManifestHandler>();
|
||||||
|
for (auto &package : all_packages) {
|
||||||
|
dbgDebug(D_ORCHESTRATOR)
|
||||||
|
<< "Removing temp Download file after successfull installation : " << package.second.getName();
|
||||||
|
orchestration_downloader->removeDownloadFile(package.second.getName());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return all_installed && manifest_file_update && all_cleaned;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Orchestration package needs a special handling. Old service will die during the upgrade
|
// Orchestration package needs a special handling. Old service will die during the upgrade
|
||||||
@ -425,35 +465,26 @@ ManifestController::Impl::changeManifestFile(const string &new_manifest_file)
|
|||||||
|
|
||||||
bool
|
bool
|
||||||
ManifestController::Impl::handlePackage(
|
ManifestController::Impl::handlePackage(
|
||||||
const Package &package,
|
const pair<Package, string> &package_downloaded_file,
|
||||||
map<string, Package> ¤t_packages,
|
map<string, Package> ¤t_packages,
|
||||||
const map<string, Package> &new_packages,
|
|
||||||
map<string, Package> &corrupted_packages)
|
map<string, Package> &corrupted_packages)
|
||||||
{
|
{
|
||||||
|
auto &package = package_downloaded_file.first;
|
||||||
|
|
||||||
auto i_env = Singleton::Consume<I_Environment>::by<ManifestController>();
|
auto i_env = Singleton::Consume<I_Environment>::by<ManifestController>();
|
||||||
auto span_scope = i_env->startNewSpanScope(Span::ContextType::CHILD_OF);
|
auto span_scope = i_env->startNewSpanScope(Span::ContextType::CHILD_OF);
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Handling package. Package: " << package.getName();
|
dbgDebug(D_ORCHESTRATOR) << "Handling package. Package: " << package.getName();
|
||||||
|
|
||||||
if (!package.isInstallable().ok()) {
|
if (!package.isInstallable()) {
|
||||||
string report_msg =
|
string report_msg =
|
||||||
"Skipping installation of package: " + package.getName() + ". Reason: " + package.isInstallable().getErr();
|
"Skipping installation of package: " + package.getName() + ". Reason: " + package.getErrorMessage();
|
||||||
dbgWarning(D_ORCHESTRATOR) << report_msg;
|
dbgWarning(D_ORCHESTRATOR) << report_msg;
|
||||||
LogGen(report_msg, Audience::SECURITY, Severity::CRITICAL, Priority::HIGH, Tags::ORCHESTRATOR);
|
LogGen(report_msg, Audience::SECURITY, Severity::CRITICAL, Priority::HIGH, Tags::ORCHESTRATOR);
|
||||||
current_packages.insert(make_pair(package.getName(), package));
|
current_packages.insert(make_pair(package.getName(), package));
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
vector<Package> installation_queue;
|
if (!manifest_handler.installPackage(package_downloaded_file, current_packages, corrupted_packages)) {
|
||||||
|
|
||||||
if (!manifest_diff_calc.buildInstallationQueue(package, installation_queue, current_packages, new_packages)) {
|
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed building installation queue. Package: " << package.getName();
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
vector<pair<Package, string>> downloaded_files;
|
|
||||||
|
|
||||||
if (!manifest_handler.downloadPackages(installation_queue, downloaded_files)) return false;
|
|
||||||
if (!manifest_handler.installPackages(downloaded_files, current_packages, corrupted_packages)) {
|
|
||||||
LogGen(
|
LogGen(
|
||||||
"Failed to install package: " + package.getName(),
|
"Failed to install package: " + package.getName(),
|
||||||
Audience::SECURITY,
|
Audience::SECURITY,
|
||||||
|
@ -91,6 +91,16 @@ public:
|
|||||||
archive_in(ret);
|
archive_in(ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void checkIfFileExistsCall(const Package &package)
|
||||||
|
{
|
||||||
|
Maybe<string> checksum_validation(
|
||||||
|
genError("File /tmp/orchestration_downloads/" + package.getName() + ".download does not exist.")
|
||||||
|
);
|
||||||
|
EXPECT_CALL(
|
||||||
|
mock_downloader,
|
||||||
|
checkIfFileExists(package)).WillRepeatedly(Return(checksum_validation));
|
||||||
|
}
|
||||||
|
|
||||||
string manifest_file_path;
|
string manifest_file_path;
|
||||||
string corrupted_file_list;
|
string corrupted_file_list;
|
||||||
string temp_ext;
|
string temp_ext;
|
||||||
@ -171,6 +181,10 @@ TEST_F(ManifestControllerTest, createNewManifest)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("my"));
|
||||||
|
|
||||||
//mock_downloader
|
//mock_downloader
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
@ -187,6 +201,8 @@ TEST_F(ManifestControllerTest, createNewManifest)
|
|||||||
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
|
||||||
load(manifest, new_services);
|
load(manifest, new_services);
|
||||||
@ -237,6 +253,10 @@ TEST_F(ManifestControllerTest, badChecksum)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("my"));
|
||||||
|
|
||||||
//mock_downloader
|
//mock_downloader
|
||||||
Maybe<string> err(genError("Empty"));
|
Maybe<string> err(genError("Empty"));
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
@ -300,6 +320,11 @@ TEST_F(ManifestControllerTest, updateManifest)
|
|||||||
" }"
|
" }"
|
||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("my"));
|
||||||
|
|
||||||
//mock_downloader
|
//mock_downloader
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
@ -316,6 +341,8 @@ TEST_F(ManifestControllerTest, updateManifest)
|
|||||||
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my")).Times(2);
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration")).Times(2);
|
||||||
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
|
||||||
load(manifest, new_services);
|
load(manifest, new_services);
|
||||||
@ -366,6 +393,9 @@ TEST_F(ManifestControllerTest, updateManifest)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("my"));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
@ -412,6 +442,10 @@ TEST_F(ManifestControllerTest, selfUpdate)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("orchestration"));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
@ -491,63 +525,6 @@ TEST_F(ManifestControllerTest, successLoadAfteSelfUpdate)
|
|||||||
EXPECT_TRUE(i_manifest_controller->loadAfterSelfUpdate());
|
EXPECT_TRUE(i_manifest_controller->loadAfterSelfUpdate());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(ManifestControllerTest, updateWhileErrorPackageExist)
|
|
||||||
{
|
|
||||||
new_services.clear();
|
|
||||||
old_services.clear();
|
|
||||||
string manifest =
|
|
||||||
"{"
|
|
||||||
" \"packages\": ["
|
|
||||||
" {"
|
|
||||||
" \"name\": \"my\","
|
|
||||||
" \"version\": \"c\","
|
|
||||||
" \"download-path\": \"http://172.23.92.135/my.sh\","
|
|
||||||
" \"relative-path\": \"\","
|
|
||||||
" \"checksum-type\": \"sha1sum\","
|
|
||||||
" \"checksum\": \"a58bbab8020b0e6d08568714b5e582a3adf9c805\","
|
|
||||||
" \"package-type\": \"service\","
|
|
||||||
" \"require\": []"
|
|
||||||
" },"
|
|
||||||
" {"
|
|
||||||
" \"name\": \"orchestration\","
|
|
||||||
" \"version\": \"c\","
|
|
||||||
" \"download-path\": \"http://172.23.92.135/my.sh\","
|
|
||||||
" \"relative-path\": \"\","
|
|
||||||
" \"checksum-type\": \"sha1sum\","
|
|
||||||
" \"checksum\": \"a58bbab8020b0e6d08568714b5e582a3adf9c805\","
|
|
||||||
" \"package-type\": \"service\","
|
|
||||||
" \"require\": []"
|
|
||||||
" }"
|
|
||||||
" ]"
|
|
||||||
"}";
|
|
||||||
|
|
||||||
string corrupted_packages_manifest =
|
|
||||||
"{"
|
|
||||||
" \"packages\": ["
|
|
||||||
" {"
|
|
||||||
" \"name\": \"my\","
|
|
||||||
" \"version\": \"c\","
|
|
||||||
" \"download-path\": \"http://172.23.92.135/my.sh\","
|
|
||||||
" \"relative-path\": \"\","
|
|
||||||
" \"checksum-type\": \"sha1sum\","
|
|
||||||
" \"checksum\": \"a58bbab8020b0e6d08568714b5e582a3adf9c805\","
|
|
||||||
" \"package-type\": \"service\","
|
|
||||||
" \"require\": []"
|
|
||||||
" }"
|
|
||||||
" ]"
|
|
||||||
"}";
|
|
||||||
|
|
||||||
load(manifest, new_services);
|
|
||||||
load(old_manifest, old_services);
|
|
||||||
load(corrupted_packages_manifest, corrupted_packages);
|
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools,
|
|
||||||
loadPackagesFromJson(corrupted_file_list)).WillOnce(Return(corrupted_packages));
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(file_name)).WillOnce(Return(new_services));
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(manifest_file_path)).WillOnce(Return(old_services));
|
|
||||||
EXPECT_FALSE(i_manifest_controller->updateManifest(file_name));
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(ManifestControllerTest, removeCurrentErrorPackage)
|
TEST_F(ManifestControllerTest, removeCurrentErrorPackage)
|
||||||
{
|
{
|
||||||
new_services.clear();
|
new_services.clear();
|
||||||
@ -598,6 +575,8 @@ TEST_F(ManifestControllerTest, removeCurrentErrorPackage)
|
|||||||
load(old_manifest, old_services);
|
load(old_manifest, old_services);
|
||||||
load(corrupted_packages_manifest, corrupted_packages);
|
load(corrupted_packages_manifest, corrupted_packages);
|
||||||
|
|
||||||
|
checkIfFileExistsCall(new_services.at("my"));
|
||||||
|
|
||||||
//mock_downloader
|
//mock_downloader
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
@ -613,6 +592,8 @@ TEST_F(ManifestControllerTest, removeCurrentErrorPackage)
|
|||||||
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(file_name)).WillOnce(Return(new_services));
|
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(file_name)).WillOnce(Return(new_services));
|
||||||
@ -629,8 +610,6 @@ TEST_F(ManifestControllerTest, removeCurrentErrorPackage)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||||
|
|
||||||
corrupted_packages.clear();
|
corrupted_packages.clear();
|
||||||
EXPECT_CALL(mock_orchestration_tools, packagesToJsonFile(corrupted_packages,
|
|
||||||
corrupted_file_list)).WillOnce(Return(true));
|
|
||||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -656,6 +635,10 @@ TEST_F(ManifestControllerTest, selfUpdateWithOldCopy)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("orchestration"));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
@ -710,6 +693,10 @@ TEST_F(ManifestControllerTest, selfUpdateWithOldCopyWithError)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("orchestration"));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
@ -775,6 +762,10 @@ TEST_F(ManifestControllerTest, installAndRemove)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("my"));
|
||||||
|
|
||||||
//mock_downloader
|
//mock_downloader
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
@ -791,6 +782,8 @@ TEST_F(ManifestControllerTest, installAndRemove)
|
|||||||
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
|
||||||
load(manifest, new_services);
|
load(manifest, new_services);
|
||||||
@ -839,6 +832,9 @@ TEST_F(ManifestControllerTest, installAndRemove)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
load(new_manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("my1"));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
@ -854,6 +850,8 @@ TEST_F(ManifestControllerTest, installAndRemove)
|
|||||||
EXPECT_CALL(mock_package_handler, preInstallPackage("my1", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, preInstallPackage("my1", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage("my1", "/tmp/temp_file", _)).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, installPackage("my1", "/tmp/temp_file", _)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, postInstallPackage("my1", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, postInstallPackage("my1", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my1"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
EXPECT_CALL(mock_package_handler, updateSavedPackage("my1", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("my1", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, packagesToJsonFile(old_services, manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, packagesToJsonFile(old_services, manifest_file_path)).WillOnce(Return(true));
|
||||||
@ -900,6 +898,10 @@ TEST_F(ManifestControllerTest, badInstall)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("my"));
|
||||||
|
|
||||||
//mock_downloader
|
//mock_downloader
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
@ -979,6 +981,10 @@ TEST_F(ManifestControllerTest, failToDownloadWithselfUpdate)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("orchestration"));
|
||||||
|
|
||||||
Maybe<string> err(genError("Empty"));
|
Maybe<string> err(genError("Empty"));
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
@ -1019,7 +1025,7 @@ TEST_F(ManifestControllerTest, requireUpdate)
|
|||||||
"{"
|
"{"
|
||||||
" \"packages\": ["
|
" \"packages\": ["
|
||||||
" {"
|
" {"
|
||||||
" \"name\": \"orchestration\","
|
" \"name\": \"orchestration1\","
|
||||||
" \"version\": \"c\","
|
" \"version\": \"c\","
|
||||||
" \"download-path\": \"http://172.23.92.135/my.sh\","
|
" \"download-path\": \"http://172.23.92.135/my.sh\","
|
||||||
" \"relative-path\": \"\","
|
" \"relative-path\": \"\","
|
||||||
@ -1040,16 +1046,22 @@ TEST_F(ManifestControllerTest, requireUpdate)
|
|||||||
" }"
|
" }"
|
||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
EXPECT_CALL(mock_status, writeStatusToFile());
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("orchestration1"));
|
||||||
|
checkIfFileExistsCall(manifest_services.at("pre_orchestration"));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
"http://172.23.92.135/my.sh",
|
"http://172.23.92.135/my.sh",
|
||||||
"a58bbab8020b0e6d08568714b5e582a3adf9c805",
|
"a58bbab8020b0e6d08568714b5e582a3adf9c805",
|
||||||
Package::ChecksumTypes::SHA1,
|
Package::ChecksumTypes::SHA1,
|
||||||
"orchestration"
|
"orchestration1"
|
||||||
)
|
)
|
||||||
).WillOnce(Return(string("/tmp/temp_file1")));
|
).WillOnce(Return(string("/tmp/temp_file1")));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
@ -1059,10 +1071,16 @@ TEST_F(ManifestControllerTest, requireUpdate)
|
|||||||
"pre_orchestration"
|
"pre_orchestration"
|
||||||
)
|
)
|
||||||
).WillOnce(Return(string("/tmp/temp_file2")));
|
).WillOnce(Return(string("/tmp/temp_file2")));
|
||||||
string temp_orc_file = "/etc/cp/packages/orchestration/orchestration_temp";
|
EXPECT_CALL(mock_package_handler, preInstallPackage("orchestration1", "/tmp/temp_file1"))
|
||||||
EXPECT_CALL(mock_package_handler, preInstallPackage(orch_service_name, temp_orc_file))
|
|
||||||
.WillOnce(Return(true));
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage(orch_service_name, temp_orc_file, _))
|
EXPECT_CALL(mock_package_handler, installPackage("orchestration1", "/tmp/temp_file1", _))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
|
||||||
|
EXPECT_CALL(
|
||||||
|
mock_package_handler,
|
||||||
|
shouldInstallPackage("orchestration1", "/tmp/temp_file1")
|
||||||
|
).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_package_handler, postInstallPackage("orchestration1", "/tmp/temp_file1"))
|
||||||
.WillOnce(Return(true));
|
.WillOnce(Return(true));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
@ -1075,8 +1093,12 @@ TEST_F(ManifestControllerTest, requireUpdate)
|
|||||||
.WillOnce(Return(true));
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, postInstallPackage("pre_orchestration", "/tmp/temp_file2"))
|
EXPECT_CALL(mock_package_handler, postInstallPackage("pre_orchestration", "/tmp/temp_file2"))
|
||||||
.WillOnce(Return(true));
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("pre_orchestration"));
|
||||||
EXPECT_CALL(mock_package_handler, updateSavedPackage("pre_orchestration", "/tmp/temp_file2"))
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("pre_orchestration", "/tmp/temp_file2"))
|
||||||
.WillOnce(Return(true));
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration1"));
|
||||||
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("orchestration1", "/tmp/temp_file1"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(corrupted_file_list))
|
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(corrupted_file_list))
|
||||||
@ -1088,20 +1110,24 @@ TEST_F(ManifestControllerTest, requireUpdate)
|
|||||||
.WillOnce(Return(new_services));
|
.WillOnce(Return(new_services));
|
||||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(manifest_file_path))
|
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(manifest_file_path))
|
||||||
.WillOnce(Return(old_services));
|
.WillOnce(Return(old_services));
|
||||||
string temp_manifest_path = manifest_file_path + temp_ext;
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist(manifest_file_path))
|
||||||
EXPECT_CALL(mock_orchestration_tools, packagesToJsonFile(new_services, temp_manifest_path))
|
|
||||||
.WillOnce(Return(true));
|
.WillOnce(Return(true));
|
||||||
|
|
||||||
string path = packages_dir + "/" + orch_service_name + "/" +
|
string temp_manifest_path = manifest_file_path + temp_ext;
|
||||||
orch_service_name;
|
|
||||||
|
string path = packages_dir + "/orchestration1/" + "orchestration1";
|
||||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(path)).Times(2).WillOnce(Return(false));
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist(path)).Times(2).WillOnce(Return(false));
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_orchestration_tools,
|
mock_orchestration_tools,
|
||||||
doesFileExist("/etc/cp/packages/pre_orchestration/pre_orchestration")
|
doesFileExist("/etc/cp/packages/pre_orchestration/pre_orchestration")
|
||||||
).Times(2).WillOnce(Return(true));
|
).Times(2).WillOnce(Return(true));
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, copyFile("/tmp/temp_file1", path + temp_ext))
|
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, copyFile(manifest_file_path, "/etc/cp/conf/manifest.json.bk"))
|
||||||
.WillOnce(Return(true));
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, removeFile("new_manifest.json")).WillOnce(Return(true));
|
||||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1141,6 +1167,11 @@ TEST_F(ManifestControllerTest, sharedObjectNotInstalled)
|
|||||||
EXPECT_CALL(mock_orchestration_tools,
|
EXPECT_CALL(mock_orchestration_tools,
|
||||||
loadPackagesFromJson(corrupted_file_list)).WillOnce(Return(corrupted_packages));
|
loadPackagesFromJson(corrupted_file_list)).WillOnce(Return(corrupted_packages));
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("orchestration"));
|
||||||
|
checkIfFileExistsCall(manifest_services.at("pre_orchestration"));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
@ -1151,6 +1182,16 @@ TEST_F(ManifestControllerTest, sharedObjectNotInstalled)
|
|||||||
)
|
)
|
||||||
).WillOnce(Return(string("/tmp/temp_file1")));
|
).WillOnce(Return(string("/tmp/temp_file1")));
|
||||||
|
|
||||||
|
EXPECT_CALL(
|
||||||
|
mock_downloader,
|
||||||
|
downloadFileFromURL(
|
||||||
|
"http://172.23.92.135/my.sh",
|
||||||
|
"a58bbab8020b0e6d08568714b5e582a3adf9c806",
|
||||||
|
Package::ChecksumTypes::SHA1,
|
||||||
|
"pre_orchestration"
|
||||||
|
)
|
||||||
|
).WillOnce(Return(string("/tmp/temp_file2")));
|
||||||
|
|
||||||
string temp_manifest_path = manifest_file_path + temp_ext;
|
string temp_manifest_path = manifest_file_path + temp_ext;
|
||||||
string writen_manifest =
|
string writen_manifest =
|
||||||
"{"
|
"{"
|
||||||
@ -1181,7 +1222,10 @@ TEST_F(ManifestControllerTest, sharedObjectNotInstalled)
|
|||||||
string path = packages_dir + "/" + orch_service_name + "/" +
|
string path = packages_dir + "/" + orch_service_name + "/" +
|
||||||
orch_service_name;
|
orch_service_name;
|
||||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(path)).Times(2).WillOnce(Return(false));
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist(path)).Times(2).WillOnce(Return(false));
|
||||||
|
EXPECT_CALL(
|
||||||
|
mock_orchestration_tools,
|
||||||
|
doesFileExist("/etc/cp/packages/pre_orchestration/pre_orchestration")
|
||||||
|
).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, copyFile("/tmp/temp_file1", path +
|
EXPECT_CALL(mock_orchestration_tools, copyFile("/tmp/temp_file1", path +
|
||||||
temp_ext)).WillOnce(Return(true));
|
temp_ext)).WillOnce(Return(true));
|
||||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||||
@ -1195,7 +1239,7 @@ TEST_F(ManifestControllerTest, requireSharedObjectUpdate)
|
|||||||
"{"
|
"{"
|
||||||
" \"packages\": ["
|
" \"packages\": ["
|
||||||
" {"
|
" {"
|
||||||
" \"name\": \"orchestration\","
|
" \"name\": \"orchestration1\","
|
||||||
" \"version\": \"c\","
|
" \"version\": \"c\","
|
||||||
" \"download-path\": \"http://172.23.92.135/my.sh\","
|
" \"download-path\": \"http://172.23.92.135/my.sh\","
|
||||||
" \"relative-path\": \"\","
|
" \"relative-path\": \"\","
|
||||||
@ -1211,21 +1255,27 @@ TEST_F(ManifestControllerTest, requireSharedObjectUpdate)
|
|||||||
" \"relative-path\": \"\","
|
" \"relative-path\": \"\","
|
||||||
" \"checksum-type\": \"sha1sum\","
|
" \"checksum-type\": \"sha1sum\","
|
||||||
" \"checksum\": \"a58bbab8020b0e6d08568714b5e582a3adf9c806\","
|
" \"checksum\": \"a58bbab8020b0e6d08568714b5e582a3adf9c806\","
|
||||||
" \"package-type\": \"shared objects\","
|
" \"package-type\": \"service\","
|
||||||
" \"require\": []"
|
" \"require\": []"
|
||||||
" }"
|
" }"
|
||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("orchestration1"));
|
||||||
|
checkIfFileExistsCall(manifest_services.at("pre_orchestration"));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
"http://172.23.92.135/my.sh",
|
"http://172.23.92.135/my.sh",
|
||||||
"a58bbab8020b0e6d08568714b5e582a3adf9c805",
|
"a58bbab8020b0e6d08568714b5e582a3adf9c805",
|
||||||
Package::ChecksumTypes::SHA1,
|
Package::ChecksumTypes::SHA1,
|
||||||
"orchestration"
|
"orchestration1"
|
||||||
)
|
)
|
||||||
).WillOnce(Return(string("/tmp/temp_file1")));
|
).WillOnce(Return(string("/tmp/temp_file1")));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
@ -1235,15 +1285,26 @@ TEST_F(ManifestControllerTest, requireSharedObjectUpdate)
|
|||||||
"pre_orchestration"
|
"pre_orchestration"
|
||||||
)
|
)
|
||||||
).WillOnce(Return(string("/tmp/temp_file2")));
|
).WillOnce(Return(string("/tmp/temp_file2")));
|
||||||
EXPECT_CALL(mock_status, writeStatusToFile());
|
string temp_orc_file = "/etc/cp/packages/orchestration1/orchestration_temp";
|
||||||
string temp_orc_file = "/etc/cp/packages/orchestration/orchestration_temp";
|
|
||||||
EXPECT_CALL(mock_package_handler, shouldInstallPackage(_, _)).WillRepeatedly(Return(true));
|
EXPECT_CALL(mock_package_handler, shouldInstallPackage(_, _)).WillRepeatedly(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, preInstallPackage(orch_service_name,
|
EXPECT_CALL(mock_package_handler, installPackage("orchestration1", "/tmp/temp_file1", _))
|
||||||
temp_orc_file)).WillOnce(Return(true));
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage(orch_service_name,
|
|
||||||
temp_orc_file, _)).WillOnce(Return(true));
|
|
||||||
EXPECT_CALL(mock_package_handler, installPackage("pre_orchestration",
|
EXPECT_CALL(mock_package_handler, installPackage("pre_orchestration",
|
||||||
"/tmp/temp_file2", _)).WillOnce(Return(true));
|
"/tmp/temp_file2", _)).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_package_handler, preInstallPackage("orchestration1", "/tmp/temp_file1"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_package_handler, postInstallPackage("orchestration1", "/tmp/temp_file1"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration1"));
|
||||||
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("orchestration1", "/tmp/temp_file1"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_package_handler, preInstallPackage("pre_orchestration", "/tmp/temp_file2"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_package_handler, postInstallPackage("pre_orchestration", "/tmp/temp_file2"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("pre_orchestration"));
|
||||||
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("pre_orchestration", "/tmp/temp_file2"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools,
|
EXPECT_CALL(mock_orchestration_tools,
|
||||||
loadPackagesFromJson(corrupted_file_list)).WillOnce(Return(corrupted_packages));
|
loadPackagesFromJson(corrupted_file_list)).WillOnce(Return(corrupted_packages));
|
||||||
|
|
||||||
@ -1252,18 +1313,22 @@ TEST_F(ManifestControllerTest, requireSharedObjectUpdate)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(file_name)).WillOnce(Return(new_services));
|
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(file_name)).WillOnce(Return(new_services));
|
||||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(manifest_file_path)).WillOnce(Return(old_services));
|
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(manifest_file_path)).WillOnce(Return(old_services));
|
||||||
string temp_manifest_path = manifest_file_path + temp_ext;
|
string temp_manifest_path = manifest_file_path + temp_ext;
|
||||||
EXPECT_CALL(mock_orchestration_tools, packagesToJsonFile(new_services, temp_manifest_path)).WillOnce(Return(true));
|
|
||||||
|
|
||||||
string path = packages_dir + "/" + orch_service_name + "/" +
|
string path = packages_dir + "/" + "orchestration1" + "/" + "orchestration1";
|
||||||
orch_service_name;
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(path)).Times(2).WillOnce(Return(false));
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist(path)).Times(2).WillOnce(Return(false));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/conf/manifest.json"))
|
||||||
|
.WillOnce(Return(false));
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_orchestration_tools,
|
mock_orchestration_tools,
|
||||||
doesFileExist("/etc/cp/packages/pre_orchestration/pre_orchestration")
|
doesFileExist("/etc/cp/packages/pre_orchestration/pre_orchestration")
|
||||||
).Times(2).WillOnce(Return(false));
|
).Times(2).WillOnce(Return(false));
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, copyFile("/tmp/temp_file1", path +
|
EXPECT_CALL(mock_orchestration_tools, copyFile("new_manifest.json", "/etc/cp/conf/manifest.json"))
|
||||||
temp_ext)).WillOnce(Return(true));
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile("/etc/cp/conf/manifest.json"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, removeFile("new_manifest.json"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1297,6 +1362,21 @@ TEST_F(ManifestControllerTest, failureOnDownloadSharedObject)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("orchestration"));
|
||||||
|
checkIfFileExistsCall(manifest_services.at("pre_orchestration"));
|
||||||
|
|
||||||
|
EXPECT_CALL(
|
||||||
|
mock_downloader,
|
||||||
|
downloadFileFromURL(
|
||||||
|
"http://172.23.92.135/my.sh",
|
||||||
|
"a58bbab8020b0e6d08568714b5e582a3adf9c805",
|
||||||
|
Package::ChecksumTypes::SHA1,
|
||||||
|
"orchestration"
|
||||||
|
)
|
||||||
|
).WillOnce(Return(string("/tmp/temp_file1")));
|
||||||
|
|
||||||
Maybe<string> err = genError("error");
|
Maybe<string> err = genError("error");
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
@ -1314,11 +1394,16 @@ TEST_F(ManifestControllerTest, failureOnDownloadSharedObject)
|
|||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(file_name)).WillOnce(Return(new_services));
|
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(file_name)).WillOnce(Return(new_services));
|
||||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(manifest_file_path)).WillOnce(Return(old_services));
|
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(manifest_file_path)).WillOnce(Return(old_services));
|
||||||
|
EXPECT_CALL(
|
||||||
|
mock_orchestration_tools,
|
||||||
|
doesFileExist("/etc/cp/packages/orchestration/orchestration")
|
||||||
|
).WillOnce(Return(false));
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_orchestration_tools,
|
mock_orchestration_tools,
|
||||||
doesFileExist("/etc/cp/packages/pre_orchestration/pre_orchestration")
|
doesFileExist("/etc/cp/packages/pre_orchestration/pre_orchestration")
|
||||||
).WillOnce(Return(false));
|
).WillOnce(Return(false));
|
||||||
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return(string("hostname")));
|
EXPECT_CALL(mock_details_resolver, getHostname()).WillOnce(Return(string("hostname")));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, removeFile("/tmp/temp_file1")).WillOnce(Return(true));
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_status,
|
mock_status,
|
||||||
setFieldStatus(OrchestrationStatusFieldType::MANIFEST, OrchestrationStatusResult::FAILED, _)
|
setFieldStatus(OrchestrationStatusFieldType::MANIFEST, OrchestrationStatusResult::FAILED, _)
|
||||||
@ -1337,7 +1422,7 @@ TEST_F(ManifestControllerTest, multiRequireUpdate)
|
|||||||
"{"
|
"{"
|
||||||
" \"packages\": ["
|
" \"packages\": ["
|
||||||
" {"
|
" {"
|
||||||
" \"name\": \"orchestration\","
|
" \"name\": \"orchestration1\","
|
||||||
" \"version\": \"c\","
|
" \"version\": \"c\","
|
||||||
" \"download-path\": \"http://172.23.92.135/my.sh\","
|
" \"download-path\": \"http://172.23.92.135/my.sh\","
|
||||||
" \"relative-path\": \"\","
|
" \"relative-path\": \"\","
|
||||||
@ -1353,7 +1438,7 @@ TEST_F(ManifestControllerTest, multiRequireUpdate)
|
|||||||
" \"relative-path\": \"\","
|
" \"relative-path\": \"\","
|
||||||
" \"checksum-type\": \"sha1sum\","
|
" \"checksum-type\": \"sha1sum\","
|
||||||
" \"checksum\": \"a58bbab8020b0e6d08568714b5e582a3adf9c806\","
|
" \"checksum\": \"a58bbab8020b0e6d08568714b5e582a3adf9c806\","
|
||||||
" \"package-type\": \"shared objects\","
|
" \"package-type\": \"service\","
|
||||||
" \"require\": []"
|
" \"require\": []"
|
||||||
" },"
|
" },"
|
||||||
" {"
|
" {"
|
||||||
@ -1363,19 +1448,25 @@ TEST_F(ManifestControllerTest, multiRequireUpdate)
|
|||||||
" \"relative-path\": \"\","
|
" \"relative-path\": \"\","
|
||||||
" \"checksum-type\": \"sha1sum\","
|
" \"checksum-type\": \"sha1sum\","
|
||||||
" \"checksum\": \"a58bbab8020b0e6d08568714b5e582a3adf9c807\","
|
" \"checksum\": \"a58bbab8020b0e6d08568714b5e582a3adf9c807\","
|
||||||
" \"package-type\": \"shared objects\","
|
" \"package-type\": \"service\","
|
||||||
" \"require\": [ \"pre_orchestration001\" ]"
|
" \"require\": [ \"pre_orchestration001\" ]"
|
||||||
" }"
|
" }"
|
||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("orchestration1"));
|
||||||
|
checkIfFileExistsCall(manifest_services.at("pre_orchestration001"));
|
||||||
|
checkIfFileExistsCall(manifest_services.at("pre_orchestration002"));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
"http://172.23.92.135/my.sh",
|
"http://172.23.92.135/my.sh",
|
||||||
"a58bbab8020b0e6d08568714b5e582a3adf9c805",
|
"a58bbab8020b0e6d08568714b5e582a3adf9c805",
|
||||||
Package::ChecksumTypes::SHA1,
|
Package::ChecksumTypes::SHA1,
|
||||||
"orchestration"
|
"orchestration1"
|
||||||
)
|
)
|
||||||
).WillOnce(Return(string("/tmp/temp_file1")));
|
).WillOnce(Return(string("/tmp/temp_file1")));
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
@ -1396,13 +1487,9 @@ TEST_F(ManifestControllerTest, multiRequireUpdate)
|
|||||||
"pre_orchestration002"
|
"pre_orchestration002"
|
||||||
)
|
)
|
||||||
).WillOnce(Return(string("/tmp/temp_file3")));
|
).WillOnce(Return(string("/tmp/temp_file3")));
|
||||||
EXPECT_CALL(mock_status, writeStatusToFile());
|
|
||||||
string temp_orc_file = "/etc/cp/packages/orchestration/orchestration_temp";
|
|
||||||
EXPECT_CALL(mock_package_handler, shouldInstallPackage(_, _)).WillRepeatedly(Return(true));
|
EXPECT_CALL(mock_package_handler, shouldInstallPackage(_, _)).WillRepeatedly(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, preInstallPackage(orch_service_name,
|
EXPECT_CALL(mock_package_handler, installPackage("orchestration1",
|
||||||
temp_orc_file)).WillOnce(Return(true));
|
"/tmp/temp_file1", _)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage(orch_service_name,
|
|
||||||
temp_orc_file, _)).WillOnce(Return(true));
|
|
||||||
EXPECT_CALL(mock_package_handler, installPackage("pre_orchestration001",
|
EXPECT_CALL(mock_package_handler, installPackage("pre_orchestration001",
|
||||||
"/tmp/temp_file2", _)).WillOnce(Return(true));
|
"/tmp/temp_file2", _)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage("pre_orchestration002",
|
EXPECT_CALL(mock_package_handler, installPackage("pre_orchestration002",
|
||||||
@ -1410,15 +1497,37 @@ TEST_F(ManifestControllerTest, multiRequireUpdate)
|
|||||||
EXPECT_CALL(mock_orchestration_tools,
|
EXPECT_CALL(mock_orchestration_tools,
|
||||||
loadPackagesFromJson(corrupted_file_list)).WillOnce(Return(corrupted_packages));
|
loadPackagesFromJson(corrupted_file_list)).WillOnce(Return(corrupted_packages));
|
||||||
|
|
||||||
|
EXPECT_CALL(mock_package_handler, preInstallPackage("orchestration1", "/tmp/temp_file1"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_package_handler, preInstallPackage("pre_orchestration001", "/tmp/temp_file2"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_package_handler, preInstallPackage("pre_orchestration002", "/tmp/temp_file3"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
|
||||||
|
EXPECT_CALL(mock_package_handler, postInstallPackage("orchestration1", "/tmp/temp_file1"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_package_handler, postInstallPackage("pre_orchestration001", "/tmp/temp_file2"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_package_handler, postInstallPackage("pre_orchestration002", "/tmp/temp_file3"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration1"));
|
||||||
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("orchestration1", "/tmp/temp_file1"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("pre_orchestration001"));
|
||||||
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("pre_orchestration001", "/tmp/temp_file2"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("pre_orchestration002"));
|
||||||
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("pre_orchestration002", "/tmp/temp_file3"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
|
||||||
load(manifest, new_services);
|
load(manifest, new_services);
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(file_name)).WillOnce(Return(new_services));
|
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(file_name)).WillOnce(Return(new_services));
|
||||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(manifest_file_path)).WillOnce(Return(old_services));
|
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson(manifest_file_path)).WillOnce(Return(old_services));
|
||||||
string temp_manifest_path = manifest_file_path + temp_ext;
|
string temp_manifest_path = manifest_file_path + temp_ext;
|
||||||
EXPECT_CALL(mock_orchestration_tools, packagesToJsonFile(new_services, temp_manifest_path)).WillOnce(Return(true));
|
|
||||||
|
|
||||||
string path = packages_dir + "/" + orch_service_name + "/" +
|
string path = packages_dir + "/" + "orchestration1" + "/" + "orchestration1";
|
||||||
orch_service_name;
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(path)).Times(2).WillOnce(Return(false));
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist(path)).Times(2).WillOnce(Return(false));
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_orchestration_tools,
|
mock_orchestration_tools,
|
||||||
@ -1429,8 +1538,14 @@ TEST_F(ManifestControllerTest, multiRequireUpdate)
|
|||||||
doesFileExist("/etc/cp/packages/pre_orchestration002/pre_orchestration002")
|
doesFileExist("/etc/cp/packages/pre_orchestration002/pre_orchestration002")
|
||||||
).Times(2).WillOnce(Return(false));
|
).Times(2).WillOnce(Return(false));
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, copyFile("/tmp/temp_file1", path +
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/conf/manifest.json"))
|
||||||
temp_ext)).WillOnce(Return(true));
|
.WillOnce(Return(false));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, copyFile("new_manifest.json", "/etc/cp/conf/manifest.json"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile("/etc/cp/conf/manifest.json"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, removeFile("new_manifest.json"))
|
||||||
|
.WillOnce(Return(true));
|
||||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1476,6 +1591,10 @@ TEST_F(ManifestControllerTest, createNewManifestWithUninstallablePackage)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("my"));
|
||||||
|
|
||||||
//mock_downloader
|
//mock_downloader
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
@ -1492,6 +1611,9 @@ TEST_F(ManifestControllerTest, createNewManifestWithUninstallablePackage)
|
|||||||
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("waap"));
|
||||||
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
|
||||||
load(manifest, new_services);
|
load(manifest, new_services);
|
||||||
@ -1552,15 +1674,17 @@ TEST_F(ManifestControllerTest, updateUninstallPackage)
|
|||||||
EXPECT_CALL(mock_orchestration_tools,
|
EXPECT_CALL(mock_orchestration_tools,
|
||||||
loadPackagesFromJson(corrupted_file_list)).Times(2).WillRepeatedly(Return(corrupted_packages));
|
loadPackagesFromJson(corrupted_file_list)).Times(2).WillRepeatedly(Return(corrupted_packages));
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(manifest_file_path)).Times(2).WillRepeatedly(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist(manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools,
|
EXPECT_CALL(mock_orchestration_tools,
|
||||||
copyFile(manifest_file_path, "/etc/cp/conf/manifest.json.bk")).Times(2).WillRepeatedly(Return(true));
|
copyFile(manifest_file_path, "/etc/cp/conf/manifest.json.bk")).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path))
|
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/packages/my/my"))
|
||||||
.Times(2).WillRepeatedly(Return(true));
|
.Times(2).WillRepeatedly(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).Times(2).WillRepeatedly(Return(true));
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).Times(2).WillRepeatedly(Return(true));
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist("/etc/cp/packages/my/my")).Times(2).WillOnce(Return(true));
|
|
||||||
string hostname = "hostname";
|
string hostname = "hostname";
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
|
||||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||||
|
|
||||||
@ -1590,6 +1714,10 @@ TEST_F(ManifestControllerTest, updateUninstallPackage)
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("my"));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
@ -1605,6 +1733,8 @@ TEST_F(ManifestControllerTest, updateUninstallPackage)
|
|||||||
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
|
||||||
//mock_orchestration_tools
|
//mock_orchestration_tools
|
||||||
@ -1673,6 +1803,16 @@ public:
|
|||||||
archive_in(ret);
|
archive_in(ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void checkIfFileExistsCall(const Package &package)
|
||||||
|
{
|
||||||
|
Maybe<string> checksum_validation(
|
||||||
|
genError("File /tmp/orchestration_downloads/" + package.getName() + ".download does not exist.")
|
||||||
|
);
|
||||||
|
EXPECT_CALL(
|
||||||
|
mock_downloader,
|
||||||
|
checkIfFileExists(package)).WillRepeatedly(Return(checksum_validation));
|
||||||
|
}
|
||||||
|
|
||||||
string manifest_file_path;
|
string manifest_file_path;
|
||||||
string corrupted_file_list;
|
string corrupted_file_list;
|
||||||
string temp_ext;
|
string temp_ext;
|
||||||
@ -1801,6 +1941,9 @@ TEST_F(ManifestControllerIgnorePakckgeTest, addAndUpdateIgnorePackage)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("dummy_service"));
|
||||||
|
|
||||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||||
|
|
||||||
@ -1855,11 +1998,13 @@ TEST_F(ManifestControllerIgnorePakckgeTest, addAndUpdateIgnorePackage)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("dummy_service"));
|
||||||
|
|
||||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
TEST_F(ManifestControllerIgnorePakckgeTest, addIgnorePackageAndUpdateNormal)
|
TEST_F(ManifestControllerIgnorePakckgeTest, addIgnorePackageAndUpdateNormal)
|
||||||
{
|
{
|
||||||
init();
|
init();
|
||||||
@ -1916,6 +2061,9 @@ TEST_F(ManifestControllerIgnorePakckgeTest, addIgnorePackageAndUpdateNormal)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("dummy_service"));
|
||||||
|
|
||||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||||
|
|
||||||
@ -1958,6 +2106,7 @@ TEST_F(ManifestControllerIgnorePakckgeTest, addIgnorePackageAndUpdateNormal)
|
|||||||
|
|
||||||
//mock_orchestration_tools
|
//mock_orchestration_tools
|
||||||
load(manifest, new_services);
|
load(manifest, new_services);
|
||||||
|
checkIfFileExistsCall(new_services.at("my"));
|
||||||
|
|
||||||
//mock_downloader
|
//mock_downloader
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
@ -1975,6 +2124,9 @@ TEST_F(ManifestControllerIgnorePakckgeTest, addIgnorePackageAndUpdateNormal)
|
|||||||
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("dummy_service"));
|
||||||
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
|
||||||
load(manifest, new_services);
|
load(manifest, new_services);
|
||||||
@ -2050,6 +2202,9 @@ TEST_F(ManifestControllerIgnorePakckgeTest, removeIgnoredPackage)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("dummy_service"));
|
||||||
|
|
||||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||||
|
|
||||||
@ -2094,6 +2249,8 @@ TEST_F(ManifestControllerIgnorePakckgeTest, removeIgnoredPackage)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
|
||||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||||
}
|
}
|
||||||
@ -2147,6 +2304,8 @@ TEST_F(ManifestControllerIgnorePakckgeTest, freezeIgnoredPackage)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, copyFile(file_name, manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, isNonEmptyFile(manifest_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, removeFile(file_name)).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
|
||||||
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
EXPECT_TRUE(i_manifest_controller->updateManifest(file_name));
|
||||||
|
|
||||||
@ -2210,6 +2369,10 @@ TEST_F(ManifestControllerIgnorePakckgeTest, overrideIgnoredPackageFromProfileSet
|
|||||||
" ]"
|
" ]"
|
||||||
"}";
|
"}";
|
||||||
|
|
||||||
|
map<string, Package> manifest_services;
|
||||||
|
load(manifest, manifest_services);
|
||||||
|
checkIfFileExistsCall(manifest_services.at("my"));
|
||||||
|
|
||||||
//mock_downloader
|
//mock_downloader
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
@ -2226,6 +2389,8 @@ TEST_F(ManifestControllerIgnorePakckgeTest, overrideIgnoredPackageFromProfileSet
|
|||||||
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, preInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, installPackage("my", "/tmp/temp_file", _)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, postInstallPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("my"));
|
||||||
|
EXPECT_CALL(mock_downloader, removeDownloadFile("orchestration"));
|
||||||
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
EXPECT_CALL(mock_package_handler, updateSavedPackage("my", "/tmp/temp_file")).WillOnce(Return(true));
|
||||||
|
|
||||||
load(manifest, new_services);
|
load(manifest, new_services);
|
||||||
@ -2270,6 +2435,17 @@ public:
|
|||||||
|
|
||||||
manifest_controller.init();
|
manifest_controller.init();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void checkIfFileExistsCall(const Package &package)
|
||||||
|
{
|
||||||
|
Maybe<string> checksum_validation(
|
||||||
|
genError("File /tmp/orchestration_downloads/" + package.getName() + ".download does not exist.")
|
||||||
|
);
|
||||||
|
EXPECT_CALL(
|
||||||
|
mock_downloader,
|
||||||
|
checkIfFileExists(package)).WillRepeatedly(Return(checksum_validation));
|
||||||
|
}
|
||||||
|
|
||||||
::Environment env;
|
::Environment env;
|
||||||
ConfigComponent config;
|
ConfigComponent config;
|
||||||
|
|
||||||
@ -2335,6 +2511,9 @@ TEST_F(ManifestDownloadTest, download_relative_path)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson("/etc/cp/conf/corrupted_packages.json"))
|
EXPECT_CALL(mock_orchestration_tools, loadPackagesFromJson("/etc/cp/conf/corrupted_packages.json"))
|
||||||
.WillOnce(Return(corrupted_packages));
|
.WillOnce(Return(corrupted_packages));
|
||||||
EXPECT_CALL(agent_details, getFogDomain()).WillOnce(Return(fog_domain));
|
EXPECT_CALL(agent_details, getFogDomain()).WillOnce(Return(fog_domain));
|
||||||
|
|
||||||
|
checkIfFileExistsCall(new_packages.at("orchestration"));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
@ -2413,6 +2592,8 @@ TEST_F(ManifestDownloadTest, download_relative_path_no_fog_domain)
|
|||||||
string not_error;
|
string not_error;
|
||||||
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(not_error));
|
EXPECT_CALL(mock_status, getManifestError()).WillOnce(ReturnRef(not_error));
|
||||||
|
|
||||||
|
checkIfFileExistsCall(new_packages.at("orchestration"));
|
||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_downloader,
|
mock_downloader,
|
||||||
downloadFileFromURL(
|
downloadFileFromURL(
|
||||||
|
@ -13,6 +13,8 @@
|
|||||||
|
|
||||||
#include "manifest_diff_calculator.h"
|
#include "manifest_diff_calculator.h"
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
|
||||||
#include "debug.h"
|
#include "debug.h"
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
|
|
||||||
@ -59,6 +61,8 @@ ManifestDiffCalculator::filterUntrackedPackages(
|
|||||||
return packages_to_remove;
|
return packages_to_remove;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LCOV_EXCL_START Reason: temp disabling corrupted packages mechanism
|
||||||
|
|
||||||
// If one of the new packages is already known as corrupted, new_packages map is
|
// If one of the new packages is already known as corrupted, new_packages map is
|
||||||
// updated accordingly.
|
// updated accordingly.
|
||||||
// Otherwise, corrupted_packages is updated and old corrupted package is deleted.
|
// Otherwise, corrupted_packages is updated and old corrupted package is deleted.
|
||||||
@ -102,38 +106,71 @@ ManifestDiffCalculator::filterCorruptedPackages(
|
|||||||
}
|
}
|
||||||
return no_corrupted_package_exist;
|
return no_corrupted_package_exist;
|
||||||
}
|
}
|
||||||
|
// LCOV_EXCL_STOP
|
||||||
|
|
||||||
// This function build the installation queue recursively and return true if succeeded, false otherwise
|
Maybe<void>
|
||||||
// At the beginning, installation_queue is empty and will be filled according package dependences
|
ManifestDiffCalculator::buildRecInstallationQueue(
|
||||||
bool
|
const Package &package,
|
||||||
ManifestDiffCalculator::buildInstallationQueue(
|
|
||||||
const Package &updated_package,
|
|
||||||
vector<Package> &installation_queue,
|
vector<Package> &installation_queue,
|
||||||
const map<string, Package> ¤t_packages,
|
const map<string, Package> ¤t_packages,
|
||||||
const map<string, Package> &new_packages)
|
const map<string, Package> &new_packages)
|
||||||
{
|
{
|
||||||
vector<string> requires = updated_package.getRequire();
|
const vector<string> &requires = package.getRequire();
|
||||||
|
|
||||||
for (size_t i = 0; i < requires.size(); i++) {
|
for (const auto &require : requires) {
|
||||||
auto installed_package = current_packages.find(requires[i]);
|
auto installed_package = current_packages.find(require);
|
||||||
auto new_package = new_packages.find(requires[i]);
|
auto new_package = new_packages.find(require);
|
||||||
|
|
||||||
if (installed_package == current_packages.end() ||
|
if (installed_package == current_packages.end() ||
|
||||||
(new_package != new_packages.end() && *installed_package != *new_package)) {
|
(new_package != new_packages.end() && *installed_package != *new_package)) {
|
||||||
if(!buildInstallationQueue(new_package->second,
|
auto rec_res = buildRecInstallationQueue(
|
||||||
installation_queue,
|
new_package->second,
|
||||||
current_packages,
|
installation_queue,
|
||||||
new_packages)) {
|
current_packages,
|
||||||
return false;
|
new_packages
|
||||||
}
|
);
|
||||||
|
if (!rec_res.ok()) return rec_res.passErr();
|
||||||
} else if (installed_package != current_packages.end()) {
|
} else if (installed_package != current_packages.end()) {
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Package is already installed. Package: " << installed_package->first;
|
dbgDebug(D_ORCHESTRATOR) << "Package is already in the queue. Package: " << installed_package->first;
|
||||||
} else if (new_package == new_packages.end()) {
|
} else if (new_package == new_packages.end()) {
|
||||||
dbgWarning(D_ORCHESTRATOR) << "One of the requested dependencies is corrupted or doesn't exist."
|
return genError(
|
||||||
<< " Package: "<< requires[i];
|
"One of the requested dependencies is corrupted or doesn't exist. Package: " + require
|
||||||
return false;
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
installation_queue.push_back(updated_package);
|
if (find(installation_queue.begin(), installation_queue.end(), package) == installation_queue.end()) {
|
||||||
return true;
|
installation_queue.push_back(package);
|
||||||
|
}
|
||||||
|
return Maybe<void>();
|
||||||
|
}
|
||||||
|
|
||||||
|
// This function build the installation queue recursively and return true if succeeded, false otherwise
|
||||||
|
// At the beginning, installation_queue is empty and will be filled according package dependences
|
||||||
|
Maybe<vector<Package>>
|
||||||
|
ManifestDiffCalculator::buildInstallationQueue(
|
||||||
|
const map<string, Package> ¤t_packages,
|
||||||
|
const map<string, Package> &new_packages)
|
||||||
|
{
|
||||||
|
vector<Package> installation_queue;
|
||||||
|
installation_queue.reserve(new_packages.size());
|
||||||
|
auto orchestration_it = new_packages.find("orchestration");
|
||||||
|
if (orchestration_it != new_packages.end()) {
|
||||||
|
installation_queue.push_back(orchestration_it->second);
|
||||||
|
}
|
||||||
|
|
||||||
|
auto wlp_standalone_it = new_packages.find("wlpStandalone");
|
||||||
|
if (wlp_standalone_it != new_packages.end()){
|
||||||
|
installation_queue.push_back(wlp_standalone_it->second);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (auto &package_pair : new_packages) {
|
||||||
|
auto build_queue_res = buildRecInstallationQueue(
|
||||||
|
package_pair.second,
|
||||||
|
installation_queue,
|
||||||
|
current_packages,
|
||||||
|
new_packages
|
||||||
|
);
|
||||||
|
if (!build_queue_res.ok()) return build_queue_res.passErr();
|
||||||
|
}
|
||||||
|
return installation_queue;
|
||||||
}
|
}
|
||||||
|
@ -13,6 +13,8 @@
|
|||||||
|
|
||||||
#include "manifest_handler.h"
|
#include "manifest_handler.h"
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
|
||||||
#include "debug.h"
|
#include "debug.h"
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
#include "agent_details.h"
|
#include "agent_details.h"
|
||||||
@ -57,6 +59,10 @@ ManifestHandler::downloadPackage(const Package &package, bool is_clean_installat
|
|||||||
fog_domain = Singleton::Consume<I_AgentDetails>::by<ManifestHandler>()->getFogDomain();
|
fog_domain = Singleton::Consume<I_AgentDetails>::by<ManifestHandler>()->getFogDomain();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
auto orchestration_downloader = Singleton::Consume<I_Downloader>::by<ManifestHandler>();
|
||||||
|
auto maybe_package_exists = orchestration_downloader->checkIfFileExists(package);
|
||||||
|
if (maybe_package_exists.ok()) return maybe_package_exists;
|
||||||
|
|
||||||
if (!is_clean_installation) {
|
if (!is_clean_installation) {
|
||||||
I_MainLoop *i_mainloop = Singleton::Consume<I_MainLoop>::by<ManifestHandler>();
|
I_MainLoop *i_mainloop = Singleton::Consume<I_MainLoop>::by<ManifestHandler>();
|
||||||
auto pending_time_frame_seconds = getConfigurationWithDefault<int>(
|
auto pending_time_frame_seconds = getConfigurationWithDefault<int>(
|
||||||
@ -76,11 +82,10 @@ ManifestHandler::downloadPackage(const Package &package, bool is_clean_installat
|
|||||||
dbgTrace(D_ORCHESTRATOR) << "Proceeding to package downloading. Package name " << package.getName();
|
dbgTrace(D_ORCHESTRATOR) << "Proceeding to package downloading. Package name " << package.getName();
|
||||||
}
|
}
|
||||||
|
|
||||||
auto orchestration_downloader = Singleton::Consume<I_Downloader>::by<ManifestHandler>();
|
|
||||||
if (!package.getRelativeDownloadPath().empty() && fog_domain.ok()) {
|
if (!package.getRelativeDownloadPath().empty() && fog_domain.ok()) {
|
||||||
string download_path =
|
string download_path =
|
||||||
"<JWT>https://" + fog_domain.unpack() + "/download" + package.getRelativeDownloadPath();
|
"<JWT>https://" + fog_domain.unpack() + "/download" + package.getRelativeDownloadPath();
|
||||||
package_download_file= orchestration_downloader->downloadFileFromURL(
|
package_download_file = orchestration_downloader->downloadFileFromURL(
|
||||||
download_path,
|
download_path,
|
||||||
package.getChecksum(),
|
package.getChecksum(),
|
||||||
package.getChecksumType(),
|
package.getChecksumType(),
|
||||||
@ -99,15 +104,22 @@ ManifestHandler::downloadPackage(const Package &package, bool is_clean_installat
|
|||||||
return package_download_file;
|
return package_download_file;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
Maybe<vector<pair<Package, packageFilePath>>>
|
||||||
ManifestHandler::downloadPackages(
|
ManifestHandler::downloadPackages(const map<string, Package> &new_packages_to_download)
|
||||||
const vector<Package> &packages_to_download,
|
|
||||||
vector<pair<Package, packageFilePath>> &downloaded_packages)
|
|
||||||
{
|
{
|
||||||
auto i_env = Singleton::Consume<I_Environment>::by<ManifestHandler>();
|
auto i_env = Singleton::Consume<I_Environment>::by<ManifestHandler>();
|
||||||
auto i_orch_tools = Singleton::Consume<I_OrchestrationTools>::by<ManifestHandler>();
|
auto i_orch_tools = Singleton::Consume<I_OrchestrationTools>::by<ManifestHandler>();
|
||||||
auto span_scope = i_env->startNewSpanScope(Span::ContextType::CHILD_OF);
|
auto span_scope = i_env->startNewSpanScope(Span::ContextType::CHILD_OF);
|
||||||
for (auto &package : packages_to_download) {
|
|
||||||
|
vector<pair<Package, packageFilePath>> downloaded_packages;
|
||||||
|
for (auto &package_pair : new_packages_to_download) {
|
||||||
|
const Package &package = package_pair.second;
|
||||||
|
if (!package.isInstallable()) {
|
||||||
|
dbgTrace(D_ORCHESTRATOR)
|
||||||
|
<< "Skipping package download, package isn't installable. Package: "
|
||||||
|
<< package.getName() << ". Reason: " << package.getErrorMessage();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
dbgInfo(D_ORCHESTRATOR) << "Downloading package file." << " Package: " << package.getName();
|
dbgInfo(D_ORCHESTRATOR) << "Downloading package file." << " Package: " << package.getName();
|
||||||
|
|
||||||
string packages_dir = getConfigurationWithDefault<string>(
|
string packages_dir = getConfigurationWithDefault<string>(
|
||||||
@ -170,133 +182,42 @@ ManifestHandler::downloadPackages(
|
|||||||
install_error
|
install_error
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return false;
|
return genError(
|
||||||
|
"Failed to download installation package. Package: " +
|
||||||
|
package.getName() +
|
||||||
|
", Error: " + package_download_file.getErr());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return downloaded_packages;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
ManifestHandler::installPackages(
|
ManifestHandler::installPackage(
|
||||||
const vector<pair<Package, packageFilePath>> &downloaded_package_files,
|
const pair<Package, string> &package_downloaded_file,
|
||||||
map<packageFilePath, Package> ¤t_packages,
|
map<packageFilePath, Package> ¤t_packages,
|
||||||
map<packageFilePath, Package> &corrupted_packages)
|
map<packageFilePath, Package> &corrupted_packages)
|
||||||
{
|
{
|
||||||
auto i_env = Singleton::Consume<I_Environment>::by<ManifestHandler>();
|
auto i_env = Singleton::Consume<I_Environment>::by<ManifestHandler>();
|
||||||
auto span_scope = i_env->startNewSpanScope(Span::ContextType::CHILD_OF);
|
auto span_scope = i_env->startNewSpanScope(Span::ContextType::CHILD_OF);
|
||||||
// Patch - reorder packages so that accessControlApp is installed before accessControlKernel
|
|
||||||
vector<pair<Package, packageFilePath>> patched_downloaded_package_files;
|
|
||||||
patched_downloaded_package_files.reserve(downloaded_package_files.size());
|
|
||||||
int ac_kernel_package_idx = -1;
|
|
||||||
int ac_app_package_idx = -1;
|
|
||||||
int i = 0;
|
|
||||||
for (auto &downloaded_package : downloaded_package_files) {
|
|
||||||
if (downloaded_package.first.getName() == "accessControlApp") {
|
|
||||||
ac_app_package_idx = i;
|
|
||||||
} else if (downloaded_package.first.getName() == "accessControlKernel") {
|
|
||||||
ac_kernel_package_idx = i;
|
|
||||||
} else {
|
|
||||||
patched_downloaded_package_files.push_back(downloaded_package);
|
|
||||||
}
|
|
||||||
i++;
|
|
||||||
}
|
|
||||||
if (ac_app_package_idx != -1) {
|
|
||||||
patched_downloaded_package_files.push_back(downloaded_package_files.at(ac_app_package_idx));
|
|
||||||
}
|
|
||||||
if (ac_kernel_package_idx != -1) {
|
|
||||||
patched_downloaded_package_files.push_back(downloaded_package_files.at(ac_kernel_package_idx));
|
|
||||||
}
|
|
||||||
|
|
||||||
auto orchestration_status = Singleton::Consume<I_OrchestrationStatus>::by<ManifestHandler>();
|
auto orchestration_status = Singleton::Consume<I_OrchestrationStatus>::by<ManifestHandler>();
|
||||||
for (auto &downloaded_package : patched_downloaded_package_files) {
|
|
||||||
auto package = downloaded_package.first;
|
|
||||||
auto package_name = package.getName();
|
|
||||||
auto package_handler_path = downloaded_package.second;
|
|
||||||
|
|
||||||
dbgInfo(D_ORCHESTRATOR) << "Handling package installation. Package: " << package_name;
|
auto &package = package_downloaded_file.first;
|
||||||
|
auto &package_name = package.getName();
|
||||||
|
auto &package_handler_path = package_downloaded_file.second;
|
||||||
|
|
||||||
if (package_name.compare(orch_service_name) == 0) {
|
dbgInfo(D_ORCHESTRATOR) << "Handling package installation. Package: " << package_name;
|
||||||
orchestration_status->writeStatusToFile();
|
|
||||||
bool self_update_status = selfUpdate(package, current_packages, package_handler_path);
|
|
||||||
if (!self_update_status) {
|
|
||||||
auto details = Singleton::Consume<I_AgentDetails>::by<ManifestHandler>();
|
|
||||||
auto hostname = Singleton::Consume<I_DetailsResolver>::by<ManifestHandler>()->getHostname();
|
|
||||||
string err_hostname = (hostname.ok() ? "on host '" + *hostname : "'" + details->getAgentId()) + "'";
|
|
||||||
string install_error =
|
|
||||||
"Warning: Agent/Gateway " +
|
|
||||||
err_hostname +
|
|
||||||
" software update failed. Agent is running previous software. Contact Check Point support.";
|
|
||||||
if (orchestration_status->getManifestError().find("Gateway was not fully deployed") == string::npos) {
|
|
||||||
orchestration_status->setFieldStatus(
|
|
||||||
OrchestrationStatusFieldType::MANIFEST,
|
|
||||||
OrchestrationStatusResult::FAILED,
|
|
||||||
install_error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return self_update_status;
|
if (package_name.compare(orch_service_name) == 0) {
|
||||||
}
|
orchestration_status->writeStatusToFile();
|
||||||
|
bool self_update_status = selfUpdate(package, current_packages, package_handler_path);
|
||||||
string packages_dir = getConfigurationWithDefault<string>(
|
if (!self_update_status) {
|
||||||
"/etc/cp/packages",
|
auto details = Singleton::Consume<I_AgentDetails>::by<ManifestHandler>();
|
||||||
"orchestration",
|
|
||||||
"Packages directory"
|
|
||||||
);
|
|
||||||
|
|
||||||
string current_installation_file = packages_dir + "/" + package_name + "/" + package_name;
|
|
||||||
auto orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<ManifestHandler>();
|
|
||||||
bool is_clean_installation = !orchestration_tools->doesFileExist(current_installation_file);
|
|
||||||
|
|
||||||
auto package_handler = Singleton::Consume<I_PackageHandler>::by<ManifestHandler>();
|
|
||||||
if (!package_handler->shouldInstallPackage(package_name, package_handler_path)) {
|
|
||||||
current_packages.insert(make_pair(package_name, package));
|
|
||||||
dbgInfo(D_ORCHESTRATOR)
|
|
||||||
<< "Skipping installation of new package with the same version as current. Package: "
|
|
||||||
<< package_name;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool current_result = true;
|
|
||||||
bool is_service = package.getType() == Package::PackageType::Service;
|
|
||||||
if (is_service) {
|
|
||||||
current_result = package_handler->preInstallPackage(package_name, package_handler_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
current_result = current_result && package_handler->installPackage(
|
|
||||||
package_name,
|
|
||||||
package_handler_path,
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
if (current_result && is_service) {
|
|
||||||
current_result = package_handler->postInstallPackage(package_name, package_handler_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (current_result && is_service) {
|
|
||||||
current_result = package_handler->updateSavedPackage(package_name, package_handler_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!current_result) {
|
|
||||||
auto agent_details = Singleton::Consume<I_AgentDetails>::by<ManifestHandler>();
|
|
||||||
auto hostname = Singleton::Consume<I_DetailsResolver>::by<ManifestHandler>()->getHostname();
|
auto hostname = Singleton::Consume<I_DetailsResolver>::by<ManifestHandler>()->getHostname();
|
||||||
string err_hostname = (hostname.ok() ? "on host '" + *hostname : "'" +agent_details->getAgentId()) + "'";
|
string err_hostname = (hostname.ok() ? "on host '" + *hostname : "'" + details->getAgentId()) + "'";
|
||||||
string install_error;
|
string install_error =
|
||||||
if (is_clean_installation) {
|
"Warning: Agent/Gateway " +
|
||||||
install_error =
|
err_hostname +
|
||||||
"Critical Error: Agent/Gateway was not fully deployed " +
|
" software update failed. Agent is running previous software. Contact Check Point support.";
|
||||||
err_hostname +
|
|
||||||
" and is not enforcing a security policy. Retry installation or contact Check Point support.";
|
|
||||||
} else {
|
|
||||||
install_error =
|
|
||||||
"Warning: Agent/Gateway " +
|
|
||||||
err_hostname +
|
|
||||||
" software update failed. Agent is running previous software. Contact Check Point support.";
|
|
||||||
}
|
|
||||||
corrupted_packages.insert(make_pair(package_name, package));
|
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to install package. Package: " << package_name;
|
|
||||||
|
|
||||||
auto orchestration_status = Singleton::Consume<I_OrchestrationStatus>::by<ManifestHandler>();
|
|
||||||
if (orchestration_status->getManifestError().find("Gateway was not fully deployed") == string::npos) {
|
if (orchestration_status->getManifestError().find("Gateway was not fully deployed") == string::npos) {
|
||||||
orchestration_status->setFieldStatus(
|
orchestration_status->setFieldStatus(
|
||||||
OrchestrationStatusFieldType::MANIFEST,
|
OrchestrationStatusFieldType::MANIFEST,
|
||||||
@ -304,11 +225,80 @@ ManifestHandler::installPackages(
|
|||||||
install_error
|
install_error
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
return self_update_status;
|
||||||
current_packages.insert(make_pair(package_name, package));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
string packages_dir = getConfigurationWithDefault<string>(
|
||||||
|
"/etc/cp/packages",
|
||||||
|
"orchestration",
|
||||||
|
"Packages directory"
|
||||||
|
);
|
||||||
|
|
||||||
|
auto package_handler = Singleton::Consume<I_PackageHandler>::by<ManifestHandler>();
|
||||||
|
if (!package_handler->shouldInstallPackage(package_name, package_handler_path)) {
|
||||||
|
current_packages.insert(make_pair(package_name, package));
|
||||||
|
dbgInfo(D_ORCHESTRATOR)
|
||||||
|
<< "Skipping installation of new package with the same version as current. Package: "
|
||||||
|
<< package_name;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
string current_installation_file = packages_dir + "/" + package_name + "/" + package_name;
|
||||||
|
auto orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<ManifestHandler>();
|
||||||
|
bool is_clean_installation = !orchestration_tools->doesFileExist(current_installation_file);
|
||||||
|
|
||||||
|
|
||||||
|
bool current_result = true;
|
||||||
|
bool is_service = package.getType() == Package::PackageType::Service;
|
||||||
|
if (is_service) {
|
||||||
|
current_result = package_handler->preInstallPackage(package_name, package_handler_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
current_result = current_result && package_handler->installPackage(
|
||||||
|
package_name,
|
||||||
|
package_handler_path,
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
|
if (current_result && is_service) {
|
||||||
|
current_result = package_handler->postInstallPackage(package_name, package_handler_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current_result && is_service) {
|
||||||
|
current_result = package_handler->updateSavedPackage(package_name, package_handler_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!current_result) {
|
||||||
|
auto agent_details = Singleton::Consume<I_AgentDetails>::by<ManifestHandler>();
|
||||||
|
auto hostname = Singleton::Consume<I_DetailsResolver>::by<ManifestHandler>()->getHostname();
|
||||||
|
string err_hostname = (hostname.ok() ? "on host '" + *hostname : "'" +agent_details->getAgentId()) + "'";
|
||||||
|
string install_error;
|
||||||
|
if (is_clean_installation) {
|
||||||
|
install_error =
|
||||||
|
"Critical Error: Agent/Gateway was not fully deployed " +
|
||||||
|
err_hostname +
|
||||||
|
" and is not enforcing a security policy. Retry installation or contact Check Point support.";
|
||||||
|
} else {
|
||||||
|
install_error =
|
||||||
|
"Warning: Agent/Gateway " +
|
||||||
|
err_hostname +
|
||||||
|
" software update failed. Agent is running previous software. Contact Check Point support.";
|
||||||
|
}
|
||||||
|
corrupted_packages.insert(make_pair(package_name, package));
|
||||||
|
dbgWarning(D_ORCHESTRATOR) << "Failed to install package. Package: " << package_name;
|
||||||
|
|
||||||
|
auto orchestration_status = Singleton::Consume<I_OrchestrationStatus>::by<ManifestHandler>();
|
||||||
|
if (orchestration_status->getManifestError().find("Gateway was not fully deployed") == string::npos) {
|
||||||
|
orchestration_status->setFieldStatus(
|
||||||
|
OrchestrationStatusFieldType::MANIFEST,
|
||||||
|
OrchestrationStatusResult::FAILED,
|
||||||
|
install_error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
current_packages.insert(make_pair(package_name, package));
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -73,7 +73,7 @@ TEST_F(PackageTest, serializationFromString)
|
|||||||
EXPECT_EQ("orchestration", package.getName());
|
EXPECT_EQ("orchestration", package.getName());
|
||||||
EXPECT_EQ("c", package.getVersion());
|
EXPECT_EQ("c", package.getVersion());
|
||||||
EXPECT_EQ(Package::PackageType::Service, package.getType());
|
EXPECT_EQ(Package::PackageType::Service, package.getType());
|
||||||
EXPECT_TRUE(package.isInstallable().ok());
|
EXPECT_TRUE(package.isInstallable());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(PackageTest, writeAsJson)
|
TEST_F(PackageTest, writeAsJson)
|
||||||
@ -86,7 +86,8 @@ TEST_F(PackageTest, writeAsJson)
|
|||||||
" \"name\": \"orchestration\",\n"
|
" \"name\": \"orchestration\",\n"
|
||||||
" \"checksum-type\": \"sha1sum\",\n"
|
" \"checksum-type\": \"sha1sum\",\n"
|
||||||
" \"checksum\": \"8d4a5709673a05b380ba7d6567e28910019118f5\",\n"
|
" \"checksum\": \"8d4a5709673a05b380ba7d6567e28910019118f5\",\n"
|
||||||
" \"package-type\": \"service\"\n"
|
" \"package-type\": \"service\",\n"
|
||||||
|
" \"status\": true\n"
|
||||||
"}";
|
"}";
|
||||||
Package package;
|
Package package;
|
||||||
EXPECT_EQ(true, load(string_stream, package));
|
EXPECT_EQ(true, load(string_stream, package));
|
||||||
@ -99,7 +100,7 @@ TEST_F(PackageTest, writeAsJson)
|
|||||||
EXPECT_EQ("orchestration", package.getName());
|
EXPECT_EQ("orchestration", package.getName());
|
||||||
EXPECT_EQ("c", package.getVersion());
|
EXPECT_EQ("c", package.getVersion());
|
||||||
EXPECT_EQ(Package::PackageType::Service, package.getType());
|
EXPECT_EQ(Package::PackageType::Service, package.getType());
|
||||||
EXPECT_TRUE(package.isInstallable().ok());
|
EXPECT_TRUE(package.isInstallable());
|
||||||
|
|
||||||
write("service.json", package);
|
write("service.json", package);
|
||||||
string data = readFile("service.json");
|
string data = readFile("service.json");
|
||||||
@ -232,5 +233,6 @@ TEST_F(PackageTest, uninstallablePackage)
|
|||||||
"}";
|
"}";
|
||||||
Package package;
|
Package package;
|
||||||
EXPECT_TRUE(load(string_stream, package));
|
EXPECT_TRUE(load(string_stream, package));
|
||||||
EXPECT_THAT(package.isInstallable(), IsError("This security app isn't valid for this agent"));
|
EXPECT_FALSE(package.isInstallable());
|
||||||
|
EXPECT_EQ(package.getErrorMessage(), "This security app isn't valid for this agent");
|
||||||
}
|
}
|
||||||
|
@ -59,9 +59,9 @@ Package::serialize(JSONOutputArchive & out_archive) const
|
|||||||
out_archive(make_nvp("require", require_packages));
|
out_archive(make_nvp("require", require_packages));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!installable.ok()) {
|
out_archive(make_nvp("status", installable));
|
||||||
out_archive(make_nvp("status", installable.ok()));
|
if (!installable) {
|
||||||
out_archive(make_nvp("message", installable.getErr()));
|
out_archive(make_nvp("message", error_message));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -89,21 +89,18 @@ Package::serialize(JSONInputArchive & in_archive)
|
|||||||
in_archive.setNextName(nullptr);
|
in_archive.setNextName(nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool is_installable = true;
|
|
||||||
try {
|
try {
|
||||||
in_archive(make_nvp("status", is_installable));
|
in_archive(make_nvp("status", installable));
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
in_archive.setNextName(nullptr);
|
in_archive.setNextName(nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!is_installable) {
|
if (!installable) {
|
||||||
string error_message;
|
|
||||||
try {
|
try {
|
||||||
in_archive(make_nvp("message", error_message));
|
in_archive(make_nvp("message", error_message));
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
in_archive.setNextName(nullptr);
|
in_archive.setNextName(nullptr);
|
||||||
}
|
}
|
||||||
installable = genError(error_message);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto &character : name) {
|
for (auto &character : name) {
|
||||||
|
@ -29,7 +29,6 @@
|
|||||||
#include "service_controller.h"
|
#include "service_controller.h"
|
||||||
#include "manifest_controller.h"
|
#include "manifest_controller.h"
|
||||||
#include "url_parser.h"
|
#include "url_parser.h"
|
||||||
#include "i_messaging.h"
|
|
||||||
#include "agent_details_report.h"
|
#include "agent_details_report.h"
|
||||||
#include "maybe_res.h"
|
#include "maybe_res.h"
|
||||||
#include "customized_cereal_map.h"
|
#include "customized_cereal_map.h"
|
||||||
@ -227,7 +226,6 @@ private:
|
|||||||
Maybe<OrchestrationPolicy> maybe_policy = genError("Empty policy");
|
Maybe<OrchestrationPolicy> maybe_policy = genError("Empty policy");
|
||||||
string policy_version = "";
|
string policy_version = "";
|
||||||
auto orchestration_policy_file = getPolicyConfigPath("orchestration", Config::ConfigFileType::Policy);
|
auto orchestration_policy_file = getPolicyConfigPath("orchestration", Config::ConfigFileType::Policy);
|
||||||
|
|
||||||
auto orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<OrchestrationComp>();
|
auto orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<OrchestrationComp>();
|
||||||
if (orchestration_tools->doesFileExist(orchestration_policy_file)) {
|
if (orchestration_tools->doesFileExist(orchestration_policy_file)) {
|
||||||
maybe_policy = loadOrchestrationPolicy();
|
maybe_policy = loadOrchestrationPolicy();
|
||||||
@ -296,7 +294,10 @@ private:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (declarative) Singleton::Consume<I_DeclarativePolicy>::from<DeclarativePolicyUtils>()->turnOnApplyPolicyFlag();
|
if (declarative) {
|
||||||
|
Singleton::Consume<I_DeclarativePolicy>::from<DeclarativePolicyUtils>()->turnOnApplyPolicyFlag();
|
||||||
|
}
|
||||||
|
|
||||||
return authentication_res;
|
return authentication_res;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -769,13 +770,11 @@ private:
|
|||||||
|
|
||||||
LogRest policy_update_message_client_rest(policy_update_message);
|
LogRest policy_update_message_client_rest(policy_update_message);
|
||||||
|
|
||||||
Singleton::Consume<I_Messaging>::by<OrchestrationComp>()->sendObjectWithPersistence(
|
Singleton::Consume<I_Messaging>::by<OrchestrationComp>()->sendAsyncMessage(
|
||||||
policy_update_message_client_rest,
|
HTTPMethod::POST,
|
||||||
I_Messaging::Method::POST,
|
|
||||||
"/api/v1/agents/events",
|
"/api/v1/agents/events",
|
||||||
"",
|
policy_update_message_client_rest,
|
||||||
true,
|
MessageCategory::LOG
|
||||||
MessageTypeTag::REPORT
|
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
"Send policy update report"
|
"Send policy update report"
|
||||||
@ -1471,8 +1470,8 @@ private:
|
|||||||
agent_data_report << AgentReportFieldWithLabel("isGwNotVsx", "true");
|
agent_data_report << AgentReportFieldWithLabel("isGwNotVsx", "true");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (i_details_resolver->isVersionEqualOrAboveR8110()) {
|
if (i_details_resolver->isVersionAboveR8110()) {
|
||||||
agent_data_report << AgentReportFieldWithLabel("isVersionEqualOrAboveR8110", "true");
|
agent_data_report << AgentReportFieldWithLabel("isVersionAboveR8110", "true");
|
||||||
}
|
}
|
||||||
|
|
||||||
auto i_agent_details = Singleton::Consume<I_AgentDetails>::by<OrchestrationComp>();
|
auto i_agent_details = Singleton::Consume<I_AgentDetails>::by<OrchestrationComp>();
|
||||||
@ -1528,25 +1527,45 @@ private:
|
|||||||
encryptToFile(data3, data_path + data6_file_name);
|
encryptToFile(data3, data_path + data6_file_name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int
|
||||||
|
calcSleepInterval(int sleep_interval)
|
||||||
|
{
|
||||||
|
failure_count++;
|
||||||
|
int failure_multiplier = 1;
|
||||||
|
if (failure_count >= 10) {
|
||||||
|
failure_multiplier = 10;
|
||||||
|
} else if (failure_count >= 3) {
|
||||||
|
failure_multiplier = 2;
|
||||||
|
}
|
||||||
|
return sleep_interval * failure_multiplier;
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
run()
|
run()
|
||||||
{
|
{
|
||||||
int sleep_interval = policy.getErrorSleepInterval();
|
int sleep_interval = policy.getErrorSleepInterval();
|
||||||
Maybe<void> start_state(genError("Not running yet."));
|
Maybe<void> start_state(genError("Not running yet."));
|
||||||
while (!(start_state = start()).ok()) {
|
while (!(start_state = start()).ok()) {
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Orchestration not started yet. Status: " << start_state.getErr();
|
|
||||||
health_check_status_listener.setStatus(
|
health_check_status_listener.setStatus(
|
||||||
HealthCheckStatus::UNHEALTHY,
|
HealthCheckStatus::UNHEALTHY,
|
||||||
OrchestrationStatusFieldType::REGISTRATION,
|
OrchestrationStatusFieldType::REGISTRATION,
|
||||||
start_state.getErr()
|
start_state.getErr()
|
||||||
);
|
);
|
||||||
sleep_interval = getConfigurationWithDefault<int>(
|
sleep_interval = getConfigurationWithDefault<int>(
|
||||||
20,
|
30,
|
||||||
"orchestration",
|
"orchestration",
|
||||||
"Default sleep interval"
|
"Default sleep interval"
|
||||||
);
|
);
|
||||||
|
sleep_interval = calcSleepInterval(sleep_interval);
|
||||||
|
dbgWarning(D_ORCHESTRATOR)
|
||||||
|
<< "Orchestration not started yet. Status: "
|
||||||
|
<< start_state.getErr()
|
||||||
|
<< " Next attempt to start the orchestration will be in: "
|
||||||
|
<< sleep_interval
|
||||||
|
<< " seconds";
|
||||||
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->yield(seconds(sleep_interval));
|
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->yield(seconds(sleep_interval));
|
||||||
}
|
}
|
||||||
|
failure_count = 0;
|
||||||
|
|
||||||
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->yield(chrono::seconds(1));
|
Singleton::Consume<I_MainLoop>::by<OrchestrationComp>()->yield(chrono::seconds(1));
|
||||||
|
|
||||||
@ -1589,24 +1608,14 @@ private:
|
|||||||
|
|
||||||
bool is_new_success = false;
|
bool is_new_success = false;
|
||||||
while (true) {
|
while (true) {
|
||||||
static int failure_count = 0;
|
|
||||||
Singleton::Consume<I_Environment>::by<OrchestrationComp>()->startNewTrace(false);
|
Singleton::Consume<I_Environment>::by<OrchestrationComp>()->startNewTrace(false);
|
||||||
if (shouldReportAgentDetailsMetadata()) {
|
if (shouldReportAgentDetailsMetadata()) {
|
||||||
reportAgentDetailsMetaData();
|
reportAgentDetailsMetaData();
|
||||||
}
|
}
|
||||||
auto check_update_result = checkUpdate();
|
auto check_update_result = checkUpdate();
|
||||||
if (!check_update_result.ok()) {
|
if (!check_update_result.ok()) {
|
||||||
failure_count++;
|
|
||||||
is_new_success = false;
|
is_new_success = false;
|
||||||
sleep_interval = policy.getErrorSleepInterval();
|
sleep_interval = calcSleepInterval(policy.getErrorSleepInterval());
|
||||||
int failure_multiplier = 1;
|
|
||||||
if (failure_count >= 10) {
|
|
||||||
failure_count = 10;
|
|
||||||
failure_multiplier = 10;
|
|
||||||
} else if (failure_count >= 3) {
|
|
||||||
failure_multiplier = 2;
|
|
||||||
}
|
|
||||||
sleep_interval *= failure_multiplier;
|
|
||||||
dbgWarning(D_ORCHESTRATOR)
|
dbgWarning(D_ORCHESTRATOR)
|
||||||
<< "Failed during check update from Fog. Error: "
|
<< "Failed during check update from Fog. Error: "
|
||||||
<< check_update_result.getErr()
|
<< check_update_result.getErr()
|
||||||
@ -1690,13 +1699,11 @@ private:
|
|||||||
if (email != "") registration_report << LogField("userDefinedId", email);
|
if (email != "") registration_report << LogField("userDefinedId", email);
|
||||||
|
|
||||||
LogRest registration_report_rest(registration_report);
|
LogRest registration_report_rest(registration_report);
|
||||||
Singleton::Consume<I_Messaging>::by<OrchestrationComp>()->sendObjectWithPersistence(
|
Singleton::Consume<I_Messaging>::by<OrchestrationComp>()->sendAsyncMessage(
|
||||||
registration_report_rest,
|
HTTPMethod::POST,
|
||||||
I_Messaging::Method::POST,
|
|
||||||
"/api/v1/agents/events",
|
"/api/v1/agents/events",
|
||||||
"",
|
registration_report_rest,
|
||||||
true,
|
MessageCategory::LOG
|
||||||
MessageTypeTag::REPORT
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1764,6 +1771,7 @@ private:
|
|||||||
{
|
{
|
||||||
auto agent_details = Singleton::Consume<I_AgentDetails>::by<OrchestrationComp>();
|
auto agent_details = Singleton::Consume<I_AgentDetails>::by<OrchestrationComp>();
|
||||||
return
|
return
|
||||||
|
agent_details->getAccessToken().empty() ||
|
||||||
agent_details->getSSLFlag() != is_secure ||
|
agent_details->getSSLFlag() != is_secure ||
|
||||||
!agent_details->getFogPort().ok() || agent_details->getFogPort().unpack() != port ||
|
!agent_details->getFogPort().ok() || agent_details->getFogPort().unpack() != port ||
|
||||||
!agent_details->getFogDomain().ok() || agent_details->getFogDomain().unpack() != fog;
|
!agent_details->getFogDomain().ok() || agent_details->getFogDomain().unpack() != fog;
|
||||||
@ -1772,6 +1780,7 @@ private:
|
|||||||
bool
|
bool
|
||||||
updateFogAddress(const string &fog_addr)
|
updateFogAddress(const string &fog_addr)
|
||||||
{
|
{
|
||||||
|
dbgFlow(D_ORCHESTRATOR) << "Setting a fog address: " << fog_addr;
|
||||||
auto orch_status = Singleton::Consume<I_OrchestrationStatus>::by<OrchestrationComp>();
|
auto orch_status = Singleton::Consume<I_OrchestrationStatus>::by<OrchestrationComp>();
|
||||||
auto agent_details = Singleton::Consume<I_AgentDetails>::by<OrchestrationComp>();
|
auto agent_details = Singleton::Consume<I_AgentDetails>::by<OrchestrationComp>();
|
||||||
auto orchestration_mode = getOrchestrationMode();
|
auto orchestration_mode = getOrchestrationMode();
|
||||||
@ -1783,7 +1792,7 @@ private:
|
|||||||
if (agent_details->writeAgentDetails()) {
|
if (agent_details->writeAgentDetails()) {
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Agent details was successfully saved";
|
dbgDebug(D_ORCHESTRATOR) << "Agent details was successfully saved";
|
||||||
} else {
|
} else {
|
||||||
dbgWarning(D_COMMUNICATION) << "Failed to save agent details to a file";
|
dbgWarning(D_ORCHESTRATOR) << "Failed to save agent details to a file";
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -1803,16 +1812,12 @@ private:
|
|||||||
|
|
||||||
auto message = Singleton::Consume<I_Messaging>::by<OrchestrationComp>();
|
auto message = Singleton::Consume<I_Messaging>::by<OrchestrationComp>();
|
||||||
|
|
||||||
if (!shouldReconnectToFog(
|
if (!shouldReconnectToFog(fog_domain, fog_port, encrypted_fog_connection)) {
|
||||||
fog_domain,
|
|
||||||
fog_port,
|
|
||||||
encrypted_fog_connection
|
|
||||||
)) {
|
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Skipping reconnection to the Fog - Fog details did not change";
|
dbgDebug(D_ORCHESTRATOR) << "Skipping reconnection to the Fog - Fog details did not change";
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (message->setActiveFog(fog_domain, fog_port, encrypted_fog_connection, MessageTypeTag::GENERIC)) {
|
if (message->setFogConnection(fog_domain, fog_port, encrypted_fog_connection, MessageCategory::GENERIC)) {
|
||||||
agent_details->setFogPort(fog_port);
|
agent_details->setFogPort(fog_port);
|
||||||
agent_details->setFogDomain(fog_domain);
|
agent_details->setFogDomain(fog_domain);
|
||||||
agent_details->setSSLFlag(encrypted_fog_connection);
|
agent_details->setSSLFlag(encrypted_fog_connection);
|
||||||
@ -1820,7 +1825,7 @@ private:
|
|||||||
if (agent_details->writeAgentDetails()) {
|
if (agent_details->writeAgentDetails()) {
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Agent details was successfully saved";
|
dbgDebug(D_ORCHESTRATOR) << "Agent details was successfully saved";
|
||||||
} else {
|
} else {
|
||||||
dbgWarning(D_COMMUNICATION) << "Failed to save agent details to a file";
|
dbgWarning(D_ORCHESTRATOR) << "Failed to save agent details to a file";
|
||||||
}
|
}
|
||||||
|
|
||||||
auto update_communication = Singleton::Consume<I_UpdateCommunication>::by<OrchestrationComp>();
|
auto update_communication = Singleton::Consume<I_UpdateCommunication>::by<OrchestrationComp>();
|
||||||
@ -1894,7 +1899,11 @@ private:
|
|||||||
auto result = i_shell_cmd->getExecOutput(openssl_dir_cmd);
|
auto result = i_shell_cmd->getExecOutput(openssl_dir_cmd);
|
||||||
if (result.ok()) {
|
if (result.ok()) {
|
||||||
string val_openssl_dir = result.unpack();
|
string val_openssl_dir = result.unpack();
|
||||||
if (val_openssl_dir.empty()) return;
|
if (val_openssl_dir.empty()) {
|
||||||
|
dbgWarning(D_ORCHESTRATOR)
|
||||||
|
<< "Failed to load OpenSSL default certificate authority. Error: no OpenSSL directory found";
|
||||||
|
return;
|
||||||
|
}
|
||||||
if (val_openssl_dir.back() == '\n') val_openssl_dir.pop_back();
|
if (val_openssl_dir.back() == '\n') val_openssl_dir.pop_back();
|
||||||
dbgTrace(D_ORCHESTRATOR)
|
dbgTrace(D_ORCHESTRATOR)
|
||||||
<< "Adding OpenSSL default directory to agent details. Directory: "
|
<< "Adding OpenSSL default directory to agent details. Directory: "
|
||||||
@ -1953,6 +1962,7 @@ private:
|
|||||||
};
|
};
|
||||||
|
|
||||||
const uint16_t default_fog_dport = 443;
|
const uint16_t default_fog_dport = 443;
|
||||||
|
int failure_count = 0;
|
||||||
OrchestrationPolicy policy;
|
OrchestrationPolicy policy;
|
||||||
HealthCheckStatusListener health_check_status_listener;
|
HealthCheckStatusListener health_check_status_listener;
|
||||||
HybridModeMetric hybrid_mode_metric;
|
HybridModeMetric hybrid_mode_metric;
|
||||||
|
@ -147,21 +147,21 @@ getNamespaceDataFromCluster(const string &path)
|
|||||||
{
|
{
|
||||||
NamespaceData name_space;
|
NamespaceData name_space;
|
||||||
string token = Singleton::Consume<I_EnvDetails>::by<OrchestrationTools>()->getToken();
|
string token = Singleton::Consume<I_EnvDetails>::by<OrchestrationTools>()->getToken();
|
||||||
Flags<MessageConnConfig> conn_flags;
|
|
||||||
conn_flags.setFlag(MessageConnConfig::SECURE_CONN);
|
|
||||||
conn_flags.setFlag(MessageConnConfig::IGNORE_SSL_VALIDATION);
|
|
||||||
auto messaging = Singleton::Consume<I_Messaging>::by<OrchestrationTools>();
|
auto messaging = Singleton::Consume<I_Messaging>::by<OrchestrationTools>();
|
||||||
bool res = messaging->sendObject(
|
|
||||||
name_space,
|
MessageMetadata get_ns_md("kubernetes.default.svc", 443);
|
||||||
I_Messaging::Method::GET,
|
get_ns_md.insertHeader("Authorization", "Bearer " + token);
|
||||||
"kubernetes.default.svc",
|
get_ns_md.insertHeader("Connection", "close");
|
||||||
443,
|
get_ns_md.setConnectioFlag(MessageConnectionConfig::IGNORE_SSL_VALIDATION);
|
||||||
conn_flags,
|
auto res = messaging->sendSyncMessage(
|
||||||
|
HTTPMethod::GET,
|
||||||
path,
|
path,
|
||||||
"Authorization: Bearer " + token + "\nConnection: close"
|
name_space,
|
||||||
|
MessageCategory::GENERIC,
|
||||||
|
get_ns_md
|
||||||
);
|
);
|
||||||
|
|
||||||
if (res) return name_space;
|
if (res.ok()) return name_space;
|
||||||
|
|
||||||
return genError(string("Was not able to get object form k8s cluser in path: " + path));
|
return genError(string("Was not able to get object form k8s cluser in path: " + path));
|
||||||
}
|
}
|
||||||
|
@ -90,19 +90,15 @@ TEST_F(OrchestrationToolsTest, getClusterId)
|
|||||||
string namespaces = getResource("k8s_namespaces.json");
|
string namespaces = getResource("k8s_namespaces.json");
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_messaging,
|
mock_messaging,
|
||||||
sendMessage(
|
sendSyncMessage(
|
||||||
true,
|
HTTPMethod::GET,
|
||||||
"",
|
|
||||||
I_Messaging::Method::GET,
|
|
||||||
"kubernetes.default.svc",
|
|
||||||
443,
|
|
||||||
_,
|
|
||||||
"/api/v1/namespaces/",
|
"/api/v1/namespaces/",
|
||||||
"Authorization: Bearer 123\nConnection: close",
|
_,
|
||||||
_,
|
_,
|
||||||
_
|
_
|
||||||
)
|
)
|
||||||
).WillRepeatedly(Return(Maybe<string>(namespaces)));
|
).WillOnce(Return(HTTPResponse(HTTPStatusCode::HTTP_OK, namespaces)));
|
||||||
|
|
||||||
i_orchestration_tools->getClusterId();
|
i_orchestration_tools->getClusterId();
|
||||||
routine();
|
routine();
|
||||||
}
|
}
|
||||||
|
@ -110,7 +110,7 @@ public:
|
|||||||
EXPECT_CALL(mock_details_resolver, isReverseProxy()).WillRepeatedly(Return(false));
|
EXPECT_CALL(mock_details_resolver, isReverseProxy()).WillRepeatedly(Return(false));
|
||||||
EXPECT_CALL(mock_details_resolver, isKernelVersion3OrHigher()).WillRepeatedly(Return(false));
|
EXPECT_CALL(mock_details_resolver, isKernelVersion3OrHigher()).WillRepeatedly(Return(false));
|
||||||
EXPECT_CALL(mock_details_resolver, isGwNotVsx()).WillRepeatedly(Return(false));
|
EXPECT_CALL(mock_details_resolver, isGwNotVsx()).WillRepeatedly(Return(false));
|
||||||
EXPECT_CALL(mock_details_resolver, isVersionEqualOrAboveR8110()).WillRepeatedly(Return(false));
|
EXPECT_CALL(mock_details_resolver, isVersionAboveR8110()).WillRepeatedly(Return(false));
|
||||||
EXPECT_CALL(mock_details_resolver, parseNginxMetadata()).WillRepeatedly(Return(no_nginx));
|
EXPECT_CALL(mock_details_resolver, parseNginxMetadata()).WillRepeatedly(Return(no_nginx));
|
||||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillRepeatedly(Return("1.1.1"));
|
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillRepeatedly(Return("1.1.1"));
|
||||||
|
|
||||||
@ -250,7 +250,8 @@ TEST_F(OrchestrationMultitenancyTest, handle_virtual_resource)
|
|||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(orchestration_policy_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist(orchestration_policy_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, readFile(orchestration_policy_file_path)).WillOnce(Return(response));
|
EXPECT_CALL(mock_orchestration_tools, readFile(orchestration_policy_file_path)).WillOnce(Return(response));
|
||||||
EXPECT_CALL(mock_message, setActiveFog(host_address, 443, true, MessageTypeTag::GENERIC)).WillOnce(Return(true));
|
EXPECT_CALL(mock_message, setFogConnection(host_address, 443, true, MessageCategory::GENERIC))
|
||||||
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
||||||
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
||||||
EXPECT_CALL(mock_manifest_controller, loadAfterSelfUpdate()).WillOnce(Return(false));
|
EXPECT_CALL(mock_manifest_controller, loadAfterSelfUpdate()).WillOnce(Return(false));
|
||||||
|
@ -85,15 +85,13 @@ public:
|
|||||||
mockRestCall(RestAction::SET, "agent-uninstall", _)
|
mockRestCall(RestAction::SET, "agent-uninstall", _)
|
||||||
).WillOnce(WithArg<2>(Invoke(this, &OrchestrationTest::restHandlerAgentUninstall)));
|
).WillOnce(WithArg<2>(Invoke(this, &OrchestrationTest::restHandlerAgentUninstall)));
|
||||||
|
|
||||||
EXPECT_CALL(mock_message, mockSendPersistentMessage(
|
EXPECT_CALL(mock_message, sendAsyncMessage(
|
||||||
false,
|
HTTPMethod::POST,
|
||||||
_,
|
|
||||||
I_Messaging::Method::POST,
|
|
||||||
"/api/v1/agents/events",
|
"/api/v1/agents/events",
|
||||||
_,
|
_,
|
||||||
_,
|
MessageCategory::LOG,
|
||||||
MessageTypeTag::REPORT
|
_
|
||||||
)).WillRepeatedly(DoAll(SaveArg<1>(&message_body), Return(Maybe<string>(string("")))));
|
)).WillRepeatedly(SaveArg<2>(&message_body));
|
||||||
|
|
||||||
doEncrypt();
|
doEncrypt();
|
||||||
EXPECT_CALL(mock_orchestration_tools, loadTenantsFromDir(_)).Times(1);
|
EXPECT_CALL(mock_orchestration_tools, loadTenantsFromDir(_)).Times(1);
|
||||||
@ -137,7 +135,7 @@ public:
|
|||||||
EXPECT_CALL(mock_details_resolver, isReverseProxy()).WillRepeatedly(Return(false));
|
EXPECT_CALL(mock_details_resolver, isReverseProxy()).WillRepeatedly(Return(false));
|
||||||
EXPECT_CALL(mock_details_resolver, isKernelVersion3OrHigher()).WillRepeatedly(Return(false));
|
EXPECT_CALL(mock_details_resolver, isKernelVersion3OrHigher()).WillRepeatedly(Return(false));
|
||||||
EXPECT_CALL(mock_details_resolver, isGwNotVsx()).WillRepeatedly(Return(false));
|
EXPECT_CALL(mock_details_resolver, isGwNotVsx()).WillRepeatedly(Return(false));
|
||||||
EXPECT_CALL(mock_details_resolver, isVersionEqualOrAboveR8110()).WillRepeatedly(Return(false));
|
EXPECT_CALL(mock_details_resolver, isVersionAboveR8110()).WillRepeatedly(Return(false));
|
||||||
EXPECT_CALL(mock_details_resolver, parseNginxMetadata()).WillRepeatedly(Return(no_nginx));
|
EXPECT_CALL(mock_details_resolver, parseNginxMetadata()).WillRepeatedly(Return(no_nginx));
|
||||||
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillRepeatedly(Return("1.1.1"));
|
EXPECT_CALL(mock_details_resolver, getAgentVersion()).WillRepeatedly(Return("1.1.1"));
|
||||||
EXPECT_CALL(mock_details_resolver, getHostname()).WillRepeatedly(Return(string("hostname")));
|
EXPECT_CALL(mock_details_resolver, getHostname()).WillRepeatedly(Return(string("hostname")));
|
||||||
@ -505,7 +503,7 @@ TEST_F(OrchestrationTest, check_sending_registration_data)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, readFile(_)).WillOnce(Return(response));
|
EXPECT_CALL(mock_orchestration_tools, readFile(_)).WillOnce(Return(response));
|
||||||
EXPECT_CALL(mock_service_controller, updateServiceConfiguration(_, _, _, _, _, _))
|
EXPECT_CALL(mock_service_controller, updateServiceConfiguration(_, _, _, _, _, _))
|
||||||
.WillOnce(Return(Maybe<void>()));
|
.WillOnce(Return(Maybe<void>()));
|
||||||
EXPECT_CALL(mock_message, setActiveFog(_, _, _, _)).WillOnce(Return(true));
|
EXPECT_CALL(mock_message, setFogConnection(_, _, _, _)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, calculateChecksum(_, _)).WillRepeatedly(Return(string()));
|
EXPECT_CALL(mock_orchestration_tools, calculateChecksum(_, _)).WillRepeatedly(Return(string()));
|
||||||
EXPECT_CALL(mock_service_controller, getPolicyVersion()).WillRepeatedly(ReturnRef(first_policy_version));
|
EXPECT_CALL(mock_service_controller, getPolicyVersion()).WillRepeatedly(ReturnRef(first_policy_version));
|
||||||
EXPECT_CALL(mock_shell_cmd, getExecOutput(_, _, _)).WillRepeatedly(Return(string()));
|
EXPECT_CALL(mock_shell_cmd, getExecOutput(_, _, _)).WillRepeatedly(Return(string()));
|
||||||
@ -630,7 +628,7 @@ TEST_F(OrchestrationTest, orchestrationPolicyUpdatRollback)
|
|||||||
.WillOnce(Return(policy_response));
|
.WillOnce(Return(policy_response));
|
||||||
EXPECT_CALL(mock_orchestration_tools, copyFile(new_policy_path, policy_file_path + ".last"))
|
EXPECT_CALL(mock_orchestration_tools, copyFile(new_policy_path, policy_file_path + ".last"))
|
||||||
.WillOnce(Return(true));
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_message, setActiveFog(host_address, 443, true, MessageTypeTag::GENERIC))
|
EXPECT_CALL(mock_message, setFogConnection(host_address, 443, true, MessageCategory::GENERIC))
|
||||||
.Times(2).WillRepeatedly(Return(true));
|
.Times(2).WillRepeatedly(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion("")).Times(2);
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion("")).Times(2);
|
||||||
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
||||||
@ -699,7 +697,7 @@ TEST_F(OrchestrationTest, orchestrationPolicyUpdatRollback)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_message,
|
mock_message,
|
||||||
setActiveFog(new_host_address, 443, true, MessageTypeTag::GENERIC)
|
setFogConnection(new_host_address, 443, true, MessageCategory::GENERIC)
|
||||||
).WillOnce(Return(true));
|
).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion("/test"));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion("/test"));
|
||||||
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
||||||
@ -829,7 +827,8 @@ TEST_F(OrchestrationTest, orchestrationPolicyUpdate)
|
|||||||
.WillOnce(Return(new_policy_response));
|
.WillOnce(Return(new_policy_response));
|
||||||
EXPECT_CALL(mock_orchestration_tools, copyFile(new_policy_path, policy_file_path + ".last"))
|
EXPECT_CALL(mock_orchestration_tools, copyFile(new_policy_path, policy_file_path + ".last"))
|
||||||
.WillOnce(Return(true));
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_message, setActiveFog(host_address, 443, true, MessageTypeTag::GENERIC)).WillOnce(Return(true));
|
EXPECT_CALL(mock_message, setFogConnection(host_address, 443, true, MessageCategory::GENERIC))
|
||||||
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
||||||
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
||||||
expectDetailsResolver();
|
expectDetailsResolver();
|
||||||
@ -892,7 +891,7 @@ TEST_F(OrchestrationTest, orchestrationPolicyUpdate)
|
|||||||
|
|
||||||
EXPECT_CALL(
|
EXPECT_CALL(
|
||||||
mock_message,
|
mock_message,
|
||||||
setActiveFog(new_host_address, 443, true, MessageTypeTag::GENERIC)
|
setFogConnection(new_host_address, 443, true, MessageCategory::GENERIC)
|
||||||
).WillOnce(Return(true));
|
).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion("/test"));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion("/test"));
|
||||||
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
EXPECT_CALL(mock_status, setLastUpdateAttempt());
|
||||||
@ -989,7 +988,8 @@ TEST_F(OrchestrationTest, startOrchestrationPoliceWithFailures)
|
|||||||
updateServiceConfiguration(policy_file_path, setting_file_path, expected_data_types, "", "", _)
|
updateServiceConfiguration(policy_file_path, setting_file_path, expected_data_types, "", "", _)
|
||||||
).Times(2).WillRepeatedly(Return(Maybe<void>()));
|
).Times(2).WillRepeatedly(Return(Maybe<void>()));
|
||||||
|
|
||||||
EXPECT_CALL(mock_message, setActiveFog(host_address, 443, true, MessageTypeTag::GENERIC)).WillOnce(Return(true));
|
EXPECT_CALL(mock_message, setFogConnection(host_address, 443, true, MessageCategory::GENERIC))
|
||||||
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
||||||
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
||||||
expectDetailsResolver();
|
expectDetailsResolver();
|
||||||
@ -1117,7 +1117,8 @@ TEST_F(OrchestrationTest, loadOrchestrationPolicyFromBackup)
|
|||||||
mock_orchestration_tools,
|
mock_orchestration_tools,
|
||||||
copyFile(orchestration_policy_file_path_bk, orchestration_policy_file_path)
|
copyFile(orchestration_policy_file_path_bk, orchestration_policy_file_path)
|
||||||
).WillOnce(Return(true));
|
).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_message, setActiveFog(host_address, 443, true, MessageTypeTag::GENERIC)).WillOnce(Return(true));
|
EXPECT_CALL(mock_message, setFogConnection(host_address, 443, true, MessageCategory::GENERIC))
|
||||||
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
||||||
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
||||||
expectDetailsResolver();
|
expectDetailsResolver();
|
||||||
@ -1245,7 +1246,8 @@ TEST_F(OrchestrationTest, manifestUpdate)
|
|||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(orchestration_policy_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist(orchestration_policy_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, readFile(orchestration_policy_file_path)).WillOnce(Return(response));
|
EXPECT_CALL(mock_orchestration_tools, readFile(orchestration_policy_file_path)).WillOnce(Return(response));
|
||||||
EXPECT_CALL(mock_message, setActiveFog(host_address, 443, true, MessageTypeTag::GENERIC)).WillOnce(Return(true));
|
EXPECT_CALL(mock_message, setFogConnection(host_address, 443, true, MessageCategory::GENERIC))
|
||||||
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
||||||
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
||||||
expectDetailsResolver();
|
expectDetailsResolver();
|
||||||
@ -1381,7 +1383,8 @@ TEST_F(OrchestrationTest, getBadPolicyUpdate)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, readFile(orchestration_policy_file_path)).WillOnce(Return(response));
|
EXPECT_CALL(mock_orchestration_tools, readFile(orchestration_policy_file_path)).WillOnce(Return(response));
|
||||||
EXPECT_CALL(mock_orchestration_tools, copyFile(new_policy_path, policy_file_path + ".last"))
|
EXPECT_CALL(mock_orchestration_tools, copyFile(new_policy_path, policy_file_path + ".last"))
|
||||||
.WillOnce(Return(true));
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_message, setActiveFog(host_address, 443, true, MessageTypeTag::GENERIC)).WillOnce(Return(true));
|
EXPECT_CALL(mock_message, setFogConnection(host_address, 443, true, MessageCategory::GENERIC))
|
||||||
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
||||||
|
|
||||||
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
||||||
@ -1527,7 +1530,8 @@ TEST_F(OrchestrationTest, failedDownloadSettings)
|
|||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(orchestration_policy_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist(orchestration_policy_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_orchestration_tools, readFile(orchestration_policy_file_path)).WillOnce(Return(response));
|
EXPECT_CALL(mock_orchestration_tools, readFile(orchestration_policy_file_path)).WillOnce(Return(response));
|
||||||
EXPECT_CALL(mock_message, setActiveFog(host_address, 443, true, MessageTypeTag::GENERIC)).WillOnce(Return(true));
|
EXPECT_CALL(mock_message, setFogConnection(host_address, 443, true, MessageCategory::GENERIC))
|
||||||
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
||||||
|
|
||||||
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
||||||
@ -1679,7 +1683,7 @@ TEST_P(OrchestrationTest, orchestrationFirstRun)
|
|||||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(orchestration_policy_file_path)).WillOnce(Return(false));
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist(orchestration_policy_file_path)).WillOnce(Return(false));
|
||||||
EXPECT_CALL(mock_orchestration_tools, readFile(orchestration_policy_file_path))
|
EXPECT_CALL(mock_orchestration_tools, readFile(orchestration_policy_file_path))
|
||||||
.WillOnce(Return(response));
|
.WillOnce(Return(response));
|
||||||
EXPECT_CALL(mock_message, setActiveFog(host_address, 443, true, MessageTypeTag::GENERIC)).
|
EXPECT_CALL(mock_message, setFogConnection(host_address, 443, true, MessageCategory::GENERIC)).
|
||||||
Times(1).
|
Times(1).
|
||||||
WillRepeatedly(Return(true));
|
WillRepeatedly(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
||||||
@ -1929,7 +1933,8 @@ TEST_F(OrchestrationTest, dataUpdate)
|
|||||||
|
|
||||||
|
|
||||||
EXPECT_CALL(mock_orchestration_tools, doesFileExist(orchestration_policy_file_path)).WillOnce(Return(true));
|
EXPECT_CALL(mock_orchestration_tools, doesFileExist(orchestration_policy_file_path)).WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_message, setActiveFog(host_address, 443, true, MessageTypeTag::GENERIC)).WillOnce(Return(true));
|
EXPECT_CALL(mock_message, setFogConnection(host_address, 443, true, MessageCategory::GENERIC))
|
||||||
|
.WillOnce(Return(true));
|
||||||
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
EXPECT_CALL(mock_update_communication, setAddressExtenesion(""));
|
||||||
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
EXPECT_CALL(mock_update_communication, authenticateAgent()).WillOnce(Return(Maybe<void>()));
|
||||||
EXPECT_CALL(mock_manifest_controller, loadAfterSelfUpdate()).WillOnce(Return(false));
|
EXPECT_CALL(mock_manifest_controller, loadAfterSelfUpdate()).WillOnce(Return(false));
|
||||||
|
@ -17,12 +17,13 @@
|
|||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
|
#include <boost/uuid/uuid_generators.hpp>
|
||||||
|
#include <boost/uuid/uuid_io.hpp>
|
||||||
|
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
#include "debug.h"
|
#include "debug.h"
|
||||||
#include "rest.h"
|
#include "rest.h"
|
||||||
#include "connkey.h"
|
#include "connkey.h"
|
||||||
#include "i_messaging.h"
|
|
||||||
#include "common.h"
|
#include "common.h"
|
||||||
#include "log_generator.h"
|
#include "log_generator.h"
|
||||||
#include "i_orchestration_tools.h"
|
#include "i_orchestration_tools.h"
|
||||||
@ -165,7 +166,7 @@ ServiceDetails::isServiceActive() const
|
|||||||
bool is_registered = status.find("not-registered") == string::npos && status.find("registered") != string::npos;
|
bool is_registered = status.find("not-registered") == string::npos && status.find("registered") != string::npos;
|
||||||
bool is_running = status.find("not-running") == string::npos && status.find("running") != string::npos;
|
bool is_running = status.find("not-running") == string::npos && status.find("running") != string::npos;
|
||||||
|
|
||||||
dbgInfo(D_ORCHESTRATOR)
|
dbgTrace(D_ORCHESTRATOR)
|
||||||
<< "Successfully set service status. Service name: "
|
<< "Successfully set service status. Service name: "
|
||||||
<< service_name
|
<< service_name
|
||||||
<< ", Status: "
|
<< ", Status: "
|
||||||
@ -195,19 +196,25 @@ ServiceDetails::sendNewConfigurations(int configuration_id, const string &policy
|
|||||||
SendConfigurations new_config(configuration_id, policy_version);
|
SendConfigurations new_config(configuration_id, policy_version);
|
||||||
|
|
||||||
I_Messaging *messaging = Singleton::Consume<I_Messaging>::by<ServiceController>();
|
I_Messaging *messaging = Singleton::Consume<I_Messaging>::by<ServiceController>();
|
||||||
Flags<MessageConnConfig> conn_flags;
|
|
||||||
conn_flags.setFlag(MessageConnConfig::ONE_TIME_CONN);
|
MessageMetadata new_config_req_md("127.0.0.1", service_port);
|
||||||
bool res = messaging->sendObject(
|
new_config_req_md.setConnectioFlag(MessageConnectionConfig::ONE_TIME_CONN);
|
||||||
|
new_config_req_md.setConnectioFlag(MessageConnectionConfig::UNSECURE_CONN);
|
||||||
|
auto res = messaging->sendSyncMessage(
|
||||||
|
HTTPMethod::POST,
|
||||||
|
"/set-new-configuration",
|
||||||
new_config,
|
new_config,
|
||||||
I_Messaging::Method::POST,
|
MessageCategory::GENERIC,
|
||||||
"127.0.0.1",
|
new_config_req_md
|
||||||
service_port,
|
|
||||||
conn_flags,
|
|
||||||
"/set-new-configuration"
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!res) {
|
if (!res.ok()) {
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Service " << service_name << " didn't respond to new configuration request";
|
auto err = res.getErr();
|
||||||
|
dbgDebug(D_ORCHESTRATOR)
|
||||||
|
<< "Service: "
|
||||||
|
<< service_name
|
||||||
|
<< " didn't get new configuration. Error: "
|
||||||
|
<< err.getBody();
|
||||||
return ReconfStatus::FAILED;
|
return ReconfStatus::FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -322,7 +329,7 @@ private:
|
|||||||
Maybe<void> updateServiceConfigurationFile(
|
Maybe<void> updateServiceConfigurationFile(
|
||||||
const string &configuration_name,
|
const string &configuration_name,
|
||||||
const string &configuration_file_path,
|
const string &configuration_file_path,
|
||||||
const string &new_configuration_path);
|
const string &new_configuration);
|
||||||
|
|
||||||
ReconfStatus getUpdatedReconfStatus();
|
ReconfStatus getUpdatedReconfStatus();
|
||||||
Maybe<ServiceDetails> getServiceDetails(const string &service_name);
|
Maybe<ServiceDetails> getServiceDetails(const string &service_name);
|
||||||
@ -694,6 +701,26 @@ ServiceController::Impl::createDirectoryForChildTenant(
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static string
|
||||||
|
getChecksum(const string &file_path)
|
||||||
|
{
|
||||||
|
auto orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<ServiceController>();
|
||||||
|
Maybe<string> file_checksum = orchestration_tools->calculateChecksum(
|
||||||
|
Package::ChecksumTypes::MD5,
|
||||||
|
file_path
|
||||||
|
);
|
||||||
|
|
||||||
|
if (file_checksum.ok()) return file_checksum.unpack();
|
||||||
|
|
||||||
|
string checksum = "unknown version";
|
||||||
|
try {
|
||||||
|
checksum = to_string(boost::uuids::random_generator()());
|
||||||
|
} catch (const boost::uuids::entropy_error &e) {
|
||||||
|
dbgDebug(D_ORCHESTRATOR) << "Couldn't generate random checksum";
|
||||||
|
}
|
||||||
|
return checksum;
|
||||||
|
}
|
||||||
|
|
||||||
Maybe<void>
|
Maybe<void>
|
||||||
ServiceController::Impl::updateServiceConfiguration(
|
ServiceController::Impl::updateServiceConfiguration(
|
||||||
const string &new_policy_path,
|
const string &new_policy_path,
|
||||||
@ -869,7 +896,8 @@ ServiceController::Impl::updateServiceConfiguration(
|
|||||||
|
|
||||||
// In a multi-tenant env, we send the signal to the services only on the last iteration
|
// In a multi-tenant env, we send the signal to the services only on the last iteration
|
||||||
if (!is_multi_tenant_env || last_iteration) {
|
if (!is_multi_tenant_env || last_iteration) {
|
||||||
auto is_send_signal_for_services = sendSignalForServices(nano_services_to_update, version_value);
|
auto is_send_signal_for_services =
|
||||||
|
sendSignalForServices(nano_services_to_update, version_value + ',' + getChecksum(new_policy_path));
|
||||||
was_policy_updated &= is_send_signal_for_services.ok();
|
was_policy_updated &= is_send_signal_for_services.ok();
|
||||||
if (!is_send_signal_for_services.ok()) send_signal_for_services_err = is_send_signal_for_services.getErr();
|
if (!is_send_signal_for_services.ok()) send_signal_for_services_err = is_send_signal_for_services.getErr();
|
||||||
}
|
}
|
||||||
@ -1003,21 +1031,20 @@ Maybe<void>
|
|||||||
ServiceController::Impl::updateServiceConfigurationFile(
|
ServiceController::Impl::updateServiceConfigurationFile(
|
||||||
const string &configuration_name,
|
const string &configuration_name,
|
||||||
const string &configuration_file_path,
|
const string &configuration_file_path,
|
||||||
const string &new_configuration_path)
|
const string &new_configuration)
|
||||||
{
|
{
|
||||||
|
|
||||||
dbgFlow(D_ORCHESTRATOR) << "Updating configuration. Config Name: " << configuration_name;
|
dbgFlow(D_ORCHESTRATOR) << "Updating configuration. Config Name: " << configuration_name;
|
||||||
|
|
||||||
if (orchestration_tools->doesFileExist(configuration_file_path)) {
|
if (orchestration_tools->doesFileExist(configuration_file_path)) {
|
||||||
Maybe<string> old_configuration = orchestration_tools->readFile(configuration_file_path);
|
Maybe<string> old_configuration = orchestration_tools->readFile(configuration_file_path);
|
||||||
if (old_configuration.ok()) {
|
if (old_configuration.ok()) {
|
||||||
bool service_changed = old_configuration.unpack().compare(new_configuration_path) != 0;
|
bool service_changed = old_configuration.unpack().compare(new_configuration) != 0;
|
||||||
if (service_changed == false) {
|
if (service_changed == false) {
|
||||||
dbgDebug(D_ORCHESTRATOR) << "There is no update for policy file: " << configuration_file_path;
|
dbgDebug(D_ORCHESTRATOR) << "There is no update for policy file: " << configuration_file_path;
|
||||||
return Maybe<void>();
|
return Maybe<void>();
|
||||||
}
|
}
|
||||||
dbgDebug(D_ORCHESTRATOR)
|
dbgDebug(D_ORCHESTRATOR)
|
||||||
<< "Starting to update " << configuration_file_path << " to " << new_configuration_path;
|
<< "Starting to update " << configuration_file_path << " to " << new_configuration;
|
||||||
string old_configuration_backup_path = configuration_file_path + getConfigurationWithDefault<string>(
|
string old_configuration_backup_path = configuration_file_path + getConfigurationWithDefault<string>(
|
||||||
".bk",
|
".bk",
|
||||||
"orchestration",
|
"orchestration",
|
||||||
@ -1045,7 +1072,7 @@ ServiceController::Impl::updateServiceConfigurationFile(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (orchestration_tools->writeFile(new_configuration_path, configuration_file_path)) {
|
if (orchestration_tools->writeFile(new_configuration, configuration_file_path)) {
|
||||||
dbgDebug(D_ORCHESTRATOR) << "New policy file has been saved in: " << configuration_file_path;
|
dbgDebug(D_ORCHESTRATOR) << "New policy file has been saved in: " << configuration_file_path;
|
||||||
} else {
|
} else {
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to save new policy file";
|
dbgWarning(D_ORCHESTRATOR) << "Failed to save new policy file";
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
#include "log_generator.h"
|
#include "log_generator.h"
|
||||||
#include "agent_details.h"
|
#include "agent_details.h"
|
||||||
#include "version.h"
|
#include "version.h"
|
||||||
|
#include "i_messaging.h"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <map>
|
#include <map>
|
||||||
@ -24,7 +25,6 @@
|
|||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace cereal;
|
using namespace cereal;
|
||||||
using HTTPMethod = I_Messaging::Method;
|
|
||||||
|
|
||||||
USE_DEBUG_FLAG(D_ORCHESTRATOR);
|
USE_DEBUG_FLAG(D_ORCHESTRATOR);
|
||||||
|
|
||||||
@ -141,7 +141,7 @@ FogAuthenticator::registerAgent(
|
|||||||
const string &platform,
|
const string &platform,
|
||||||
const string &architecture) const
|
const string &architecture) const
|
||||||
{
|
{
|
||||||
dbgInfo(D_ORCHESTRATOR) << "Starting agent registration to fog";
|
dbgFlow(D_ORCHESTRATOR) << "Starting agent registration to fog";
|
||||||
|
|
||||||
auto details_resolver = Singleton::Consume<I_DetailsResolver>::by<FogAuthenticator>();
|
auto details_resolver = Singleton::Consume<I_DetailsResolver>::by<FogAuthenticator>();
|
||||||
RegistrationRequest request(
|
RegistrationRequest request(
|
||||||
@ -201,8 +201,8 @@ FogAuthenticator::registerAgent(
|
|||||||
request << make_pair("isGwNotVsx", "true");
|
request << make_pair("isGwNotVsx", "true");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (details_resolver->isVersionEqualOrAboveR8110()) {
|
if (details_resolver->isVersionAboveR8110()) {
|
||||||
request << make_pair("isVersionEqualOrAboveR8110", "true");
|
request << make_pair("isVersionAboveR8110", "true");
|
||||||
}
|
}
|
||||||
|
|
||||||
#if defined(gaia) || defined(smb)
|
#if defined(gaia) || defined(smb)
|
||||||
@ -214,8 +214,13 @@ FogAuthenticator::registerAgent(
|
|||||||
}
|
}
|
||||||
#endif // gaia || smb
|
#endif // gaia || smb
|
||||||
|
|
||||||
auto fog_messaging = Singleton::Consume<I_Messaging>::by<FogAuthenticator>();
|
dbgDebug(D_ORCHESTRATOR) << "Sending registration request to fog";
|
||||||
if (fog_messaging->sendObject(request, HTTPMethod::POST, fog_address_ex + "/agents")) {
|
auto request_status = Singleton::Consume<I_Messaging>::by<FogAuthenticator>()->sendSyncMessage(
|
||||||
|
HTTPMethod::POST,
|
||||||
|
"/agents",
|
||||||
|
request
|
||||||
|
);
|
||||||
|
if (request_status.ok()) {
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Agent has registered successfully.";
|
dbgDebug(D_ORCHESTRATOR) << "Agent has registered successfully.";
|
||||||
i_agent_details->setAgentId(request.getAgentId());
|
i_agent_details->setAgentId(request.getAgentId());
|
||||||
i_agent_details->setProfileId(request.getProfileId());
|
i_agent_details->setProfileId(request.getProfileId());
|
||||||
@ -236,7 +241,12 @@ FogAuthenticator::registerAgent(
|
|||||||
ReportIS::Tags::ORCHESTRATOR
|
ReportIS::Tags::ORCHESTRATOR
|
||||||
);
|
);
|
||||||
|
|
||||||
return genError("Failed to register agent with the Fog");
|
return genError(
|
||||||
|
"Failed to register agent with the Fog. " +
|
||||||
|
request_status.getErr().getBody() +
|
||||||
|
" " +
|
||||||
|
request_status.getErr().toString()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<FogAuthenticator::AccessToken>
|
Maybe<FogAuthenticator::AccessToken>
|
||||||
@ -246,15 +256,20 @@ FogAuthenticator::getAccessToken(const UserCredentials &user_credentials) const
|
|||||||
static const string grant_type_string = "/oauth/token?grant_type=client_credentials";
|
static const string grant_type_string = "/oauth/token?grant_type=client_credentials";
|
||||||
TokenRequest request = TokenRequest();
|
TokenRequest request = TokenRequest();
|
||||||
|
|
||||||
auto fog_messaging = Singleton::Consume<I_Messaging>::by<FogAuthenticator>();
|
MessageMetadata request_token_md;
|
||||||
auto sending_result = fog_messaging->sendObject(
|
request_token_md.insertHeader(
|
||||||
request,
|
"Authorization",
|
||||||
HTTPMethod::POST,
|
|
||||||
fog_address_ex + grant_type_string,
|
|
||||||
buildBasicAuthHeader(user_credentials.getClientId(), user_credentials.getSharedSecret())
|
buildBasicAuthHeader(user_credentials.getClientId(), user_credentials.getSharedSecret())
|
||||||
);
|
);
|
||||||
|
auto request_token_status = Singleton::Consume<I_Messaging>::by<FogAuthenticator>()->sendSyncMessage(
|
||||||
|
HTTPMethod::POST,
|
||||||
|
grant_type_string,
|
||||||
|
request,
|
||||||
|
MessageCategory::GENERIC,
|
||||||
|
request_token_md
|
||||||
|
);
|
||||||
|
|
||||||
if (sending_result) {
|
if (request_token_status.ok()) {
|
||||||
auto data_path = getConfigurationWithDefault<string>(
|
auto data_path = getConfigurationWithDefault<string>(
|
||||||
filesystem_prefix + "/data/",
|
filesystem_prefix + "/data/",
|
||||||
"encryptor",
|
"encryptor",
|
||||||
@ -371,6 +386,7 @@ FogAuthenticator::getCredentials()
|
|||||||
return maybe_credentials;
|
return maybe_credentials;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dbgTrace(D_ORCHESTRATOR) << "Credentials were not not receoived from the file. Getting registration data.";
|
||||||
auto reg_data = getRegistrationData();
|
auto reg_data = getRegistrationData();
|
||||||
if (!reg_data.ok()) {
|
if (!reg_data.ok()) {
|
||||||
return genError("Failed to load a valid registration token, Error: " + reg_data.getErr());
|
return genError("Failed to load a valid registration token, Error: " + reg_data.getErr());
|
||||||
@ -436,13 +452,7 @@ FogAuthenticator::buildBasicAuthHeader(const string &username, const string &pas
|
|||||||
{
|
{
|
||||||
auto orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<FogAuthenticator>();
|
auto orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<FogAuthenticator>();
|
||||||
auto auth_encode = orchestration_tools->base64Encode(username + ":" + pass);
|
auto auth_encode = orchestration_tools->base64Encode(username + ":" + pass);
|
||||||
return "Authorization: Basic " + auth_encode + "\r\n";
|
return "Basic " + auth_encode;
|
||||||
}
|
|
||||||
|
|
||||||
string
|
|
||||||
FogAuthenticator::buildOAuth2Header(const string &token) const
|
|
||||||
{
|
|
||||||
return "Authorization: Bearer " + token + "\r\n";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
@ -455,6 +465,7 @@ FogAuthenticator::setAddressExtenesion(const std::string &extension)
|
|||||||
Maybe<void>
|
Maybe<void>
|
||||||
FogAuthenticator::authenticateAgent()
|
FogAuthenticator::authenticateAgent()
|
||||||
{
|
{
|
||||||
|
dbgFlow(D_ORCHESTRATOR) << "Authenticating the agent";
|
||||||
const int min_expiration_time = 10;
|
const int min_expiration_time = 10;
|
||||||
if (!credentials.ok()) {
|
if (!credentials.ok()) {
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Getting Agent credentials.";
|
dbgDebug(D_ORCHESTRATOR) << "Getting Agent credentials.";
|
||||||
|
@ -24,7 +24,6 @@
|
|||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace cereal;
|
using namespace cereal;
|
||||||
using HTTPMethod = I_Messaging::Method;
|
|
||||||
|
|
||||||
USE_DEBUG_FLAG(D_ORCHESTRATOR);
|
USE_DEBUG_FLAG(D_ORCHESTRATOR);
|
||||||
|
|
||||||
@ -43,16 +42,16 @@ FogCommunication::getUpdate(CheckUpdateRequest &request)
|
|||||||
|
|
||||||
auto unpacked_access_token = access_token.unpack().getToken();
|
auto unpacked_access_token = access_token.unpack().getToken();
|
||||||
static const string check_update_str = "/api/v2/agents/resources";
|
static const string check_update_str = "/api/v2/agents/resources";
|
||||||
auto request_status = Singleton::Consume<I_Messaging>::by<FogCommunication>()->sendObject(
|
auto response = Singleton::Consume<I_Messaging>::by<FogCommunication>()->sendSyncMessage(
|
||||||
request,
|
|
||||||
HTTPMethod::POST,
|
HTTPMethod::POST,
|
||||||
fog_address_ex + check_update_str,
|
check_update_str,
|
||||||
buildOAuth2Header(unpacked_access_token)
|
request
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!request_status) {
|
if (!response.ok()) {
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Failed to get response after check update request.";
|
const auto &fog_err = response.getErr();
|
||||||
return genError("Failed to request updates");
|
dbgDebug(D_ORCHESTRATOR) << "Check update request fail. Error: " << fog_err.getBody();
|
||||||
|
return genError(fog_err.getBody());
|
||||||
}
|
}
|
||||||
|
|
||||||
string policy_mgmt_mode = getSettingWithDefault<string>("management", "profileManagedMode");
|
string policy_mgmt_mode = getSettingWithDefault<string>("management", "profileManagedMode");
|
||||||
@ -93,7 +92,7 @@ FogCommunication::getUpdate(CheckUpdateRequest &request)
|
|||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
FogCommunication::downloadAttributeFile(const GetResourceFile &resourse_file)
|
FogCommunication::downloadAttributeFile(const GetResourceFile &resourse_file, const string &file_path)
|
||||||
{
|
{
|
||||||
if (!access_token.ok()) return genError("Acccess Token not available.");
|
if (!access_token.ok()) return genError("Acccess Token not available.");
|
||||||
|
|
||||||
@ -105,27 +104,34 @@ FogCommunication::downloadAttributeFile(const GetResourceFile &resourse_file)
|
|||||||
return i_declarative_policy->getCurrPolicy();
|
return i_declarative_policy->getCurrPolicy();
|
||||||
}
|
}
|
||||||
static const string file_attribute_str = "/api/v2/agents/resources/";
|
static const string file_attribute_str = "/api/v2/agents/resources/";
|
||||||
Maybe<string> attribute_file = Singleton::Consume<I_Messaging>::by<FogCommunication>()->downloadFile(
|
|
||||||
resourse_file,
|
|
||||||
resourse_file.getRequestMethod(),
|
|
||||||
fog_address_ex + file_attribute_str + resourse_file.getFileName(),
|
|
||||||
buildOAuth2Header(unpacked_access_token) // Header
|
|
||||||
);
|
|
||||||
|
|
||||||
return attribute_file;
|
auto attribute_file = Singleton::Consume<I_Messaging>::by<FogCommunication>()->downloadFile(
|
||||||
|
resourse_file.getRequestMethod(),
|
||||||
|
file_attribute_str + resourse_file.getFileName(),
|
||||||
|
file_path
|
||||||
|
);
|
||||||
|
if (!attribute_file.ok()) {
|
||||||
|
const auto &fog_err = attribute_file.getErr();
|
||||||
|
return genError(fog_err.getBody());
|
||||||
|
}
|
||||||
|
return file_path;
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<void>
|
Maybe<void>
|
||||||
FogCommunication::sendPolicyVersion(const string &policy_version, const string &policy_versions) const
|
FogCommunication::sendPolicyVersion(const string &policy_version, const string &policy_versions) const
|
||||||
{
|
{
|
||||||
PolicyVersionPatchRequest request(policy_version, policy_versions);
|
|
||||||
auto fog_messaging = Singleton::Consume<I_Messaging>::by<FogCommunication>();
|
|
||||||
dbgTrace(D_ORCHESTRATOR)
|
dbgTrace(D_ORCHESTRATOR)
|
||||||
<< "Sending patch request to the fog. Policy version: "
|
<< "Sending patch request to the fog. Policy version: "
|
||||||
<< policy_version
|
<< policy_version
|
||||||
<< " , Policy versions: "
|
<< " , Policy versions: "
|
||||||
<< policy_versions;
|
<< policy_versions;
|
||||||
if (fog_messaging->sendNoReplyObject(request, HTTPMethod::PATCH, fog_address_ex + "/agents")) {
|
PolicyVersionPatchRequest request(policy_version, policy_versions);
|
||||||
|
auto request_status = Singleton::Consume<I_Messaging>::by<FogCommunication>()->sendSyncMessageWithoutResponse(
|
||||||
|
HTTPMethod::PATCH,
|
||||||
|
"/agents",
|
||||||
|
request
|
||||||
|
);
|
||||||
|
if (request_status) {
|
||||||
dbgTrace(D_ORCHESTRATOR)
|
dbgTrace(D_ORCHESTRATOR)
|
||||||
<< "Patch request was sent successfully to the fog."
|
<< "Patch request was sent successfully to the fog."
|
||||||
<< " Policy versions: "
|
<< " Policy versions: "
|
||||||
|
@ -24,7 +24,6 @@
|
|||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using HTTPMethod = I_Messaging::Method;
|
|
||||||
|
|
||||||
USE_DEBUG_FLAG(D_ORCHESTRATOR);
|
USE_DEBUG_FLAG(D_ORCHESTRATOR);
|
||||||
|
|
||||||
@ -51,16 +50,17 @@ HybridCommunication::getUpdate(CheckUpdateRequest &request)
|
|||||||
dbgTrace(D_ORCHESTRATOR) << "Getting updates in Hybrid Communication";
|
dbgTrace(D_ORCHESTRATOR) << "Getting updates in Hybrid Communication";
|
||||||
if (access_token.ok()) {
|
if (access_token.ok()) {
|
||||||
static const string check_update_str = "/api/v2/agents/resources";
|
static const string check_update_str = "/api/v2/agents/resources";
|
||||||
auto request_status = Singleton::Consume<I_Messaging>::by<HybridCommunication>()->sendObject(
|
auto request_status = Singleton::Consume<I_Messaging>::by<HybridCommunication>()->sendSyncMessage(
|
||||||
request,
|
|
||||||
HTTPMethod::POST,
|
HTTPMethod::POST,
|
||||||
fog_address_ex + check_update_str,
|
check_update_str,
|
||||||
buildOAuth2Header((*access_token).getToken())
|
request
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!request_status) {
|
|
||||||
dbgWarning(D_ORCHESTRATOR) << "Failed to get response after check update request.";
|
if (!request_status.ok()) {
|
||||||
return genError("Failed to request updates");
|
auto fog_err = request_status.getErr();
|
||||||
|
dbgDebug(D_ORCHESTRATOR) << "Check update request fail. Error: " << fog_err.getBody();
|
||||||
|
return genError(fog_err.getBody());
|
||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string> maybe_new_manifest = request.getManifest();
|
Maybe<string> maybe_new_manifest = request.getManifest();
|
||||||
@ -82,14 +82,6 @@ HybridCommunication::getUpdate(CheckUpdateRequest &request)
|
|||||||
if (env == EnvType::K8S && !policy_response.empty()) {
|
if (env == EnvType::K8S && !policy_response.empty()) {
|
||||||
dbgDebug(D_ORCHESTRATOR) << "Policy has changes, sending notification to tuning host";
|
dbgDebug(D_ORCHESTRATOR) << "Policy has changes, sending notification to tuning host";
|
||||||
I_AgentDetails *agentDetails = Singleton::Consume<I_AgentDetails>::by<HybridCommunication>();
|
I_AgentDetails *agentDetails = Singleton::Consume<I_AgentDetails>::by<HybridCommunication>();
|
||||||
I_Messaging *messaging = Singleton::Consume<I_Messaging>::by<HybridCommunication>();
|
|
||||||
|
|
||||||
UpdatePolicyCrdObject policy_change_object(policy_response);
|
|
||||||
|
|
||||||
Flags<MessageConnConfig> conn_flags;
|
|
||||||
conn_flags.setFlag(MessageConnConfig::EXTERNAL);
|
|
||||||
|
|
||||||
string tenant_header = "X-Tenant-Id: " + agentDetails->getTenantId();
|
|
||||||
|
|
||||||
auto get_tuning_host = []()
|
auto get_tuning_host = []()
|
||||||
{
|
{
|
||||||
@ -107,18 +99,22 @@ HybridCommunication::getUpdate(CheckUpdateRequest &request)
|
|||||||
return tuning_host;
|
return tuning_host;
|
||||||
};
|
};
|
||||||
|
|
||||||
bool ok = messaging->sendNoReplyObject(
|
MessageMetadata update_policy_crd_md(get_tuning_host(), 80);
|
||||||
policy_change_object,
|
update_policy_crd_md.insertHeader("X-Tenant-Id", agentDetails->getTenantId());
|
||||||
I_Messaging::Method::POST,
|
UpdatePolicyCrdObject policy_change_object(policy_response);
|
||||||
get_tuning_host(),
|
|
||||||
80,
|
auto i_messaging = Singleton::Consume<I_Messaging>::by<HybridCommunication>();
|
||||||
conn_flags,
|
bool tuning_req_status = i_messaging->sendSyncMessageWithoutResponse(
|
||||||
|
HTTPMethod::POST,
|
||||||
"/api/update-policy-crd",
|
"/api/update-policy-crd",
|
||||||
tenant_header
|
policy_change_object,
|
||||||
|
MessageCategory::GENERIC,
|
||||||
|
update_policy_crd_md
|
||||||
);
|
);
|
||||||
dbgDebug(D_ORCHESTRATOR) << "sent tuning policy update notification ok: " << ok;
|
if (!tuning_req_status) {
|
||||||
if (!ok) {
|
dbgWarning(D_ORCHESTRATOR) << "Failed to send tuning notification";
|
||||||
dbgWarning(D_ORCHESTRATOR) << "failed to send tuning notification";
|
} else {
|
||||||
|
dbgDebug(D_ORCHESTRATOR) << "Successfully sent tuning policy update notification";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -128,14 +124,17 @@ HybridCommunication::getUpdate(CheckUpdateRequest &request)
|
|||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
HybridCommunication::downloadAttributeFile(const GetResourceFile &resourse_file)
|
HybridCommunication::downloadAttributeFile(const GetResourceFile &resourse_file, const string &file_path)
|
||||||
{
|
{
|
||||||
dbgTrace(D_ORCHESTRATOR)
|
dbgTrace(D_ORCHESTRATOR)
|
||||||
<< "Downloading attribute file on hybrid mode, file name: "
|
<< "Downloading attribute file on hybrid mode, file name: "
|
||||||
<< resourse_file.getFileName();
|
<< resourse_file.getFileName();
|
||||||
|
|
||||||
if (resourse_file.getFileName() =="policy") {
|
if (resourse_file.getFileName() =="policy") {
|
||||||
return i_declarative_policy->getCurrPolicy();
|
string downloaded_file = i_declarative_policy->getCurrPolicy();
|
||||||
|
auto *orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<HybridCommunication>();
|
||||||
|
if (orchestration_tools->writeFile(downloaded_file, file_path)) return downloaded_file;
|
||||||
|
return genError("Failed to write the attribute file in hybrid mode. File: " + downloaded_file);
|
||||||
}
|
}
|
||||||
if (resourse_file.getFileName() == "manifest") {
|
if (resourse_file.getFileName() == "manifest") {
|
||||||
if (!access_token.ok()) return genError("Acccess Token not available.");
|
if (!access_token.ok()) return genError("Acccess Token not available.");
|
||||||
@ -143,13 +142,16 @@ HybridCommunication::downloadAttributeFile(const GetResourceFile &resourse_file)
|
|||||||
auto unpacked_access_token = access_token.unpack().getToken();
|
auto unpacked_access_token = access_token.unpack().getToken();
|
||||||
|
|
||||||
static const string file_attribute_str = "/api/v2/agents/resources/";
|
static const string file_attribute_str = "/api/v2/agents/resources/";
|
||||||
Maybe<string> attribute_file = Singleton::Consume<I_Messaging>::by<HybridCommunication>()->downloadFile(
|
auto attribute_file = Singleton::Consume<I_Messaging>::by<HybridCommunication>()->downloadFile(
|
||||||
resourse_file,
|
|
||||||
resourse_file.getRequestMethod(),
|
resourse_file.getRequestMethod(),
|
||||||
fog_address_ex + file_attribute_str + resourse_file.getFileName(),
|
file_attribute_str + resourse_file.getFileName(),
|
||||||
buildOAuth2Header((*access_token).getToken()) // Header
|
file_path
|
||||||
);
|
);
|
||||||
return attribute_file;
|
if (!attribute_file.ok()) {
|
||||||
|
auto fog_err = attribute_file.getErr();
|
||||||
|
return genError(fog_err.getBody());
|
||||||
|
}
|
||||||
|
return file_path;
|
||||||
}
|
}
|
||||||
dbgTrace(D_ORCHESTRATOR) << "Unnecessary attribute files downloading on hybrid mode";
|
dbgTrace(D_ORCHESTRATOR) << "Unnecessary attribute files downloading on hybrid mode";
|
||||||
return string("");
|
return string("");
|
||||||
|
@ -122,9 +122,14 @@ LocalCommunication::getUpdate(CheckUpdateRequest &request)
|
|||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
LocalCommunication::downloadAttributeFile(const GetResourceFile &resource_file)
|
LocalCommunication::downloadAttributeFile(const GetResourceFile &resource_file, const string &file_path)
|
||||||
{
|
{
|
||||||
auto file_name = resource_file.getFileName();
|
auto file_name = resource_file.getFileName();
|
||||||
|
dbgTrace(D_ORCHESTRATOR)
|
||||||
|
<< "Download "
|
||||||
|
<< file_name
|
||||||
|
<< " file in local communication, file path is redundant: "
|
||||||
|
<< file_path;
|
||||||
|
|
||||||
I_OrchestrationTools *orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<LocalCommunication>();
|
I_OrchestrationTools *orchestration_tools = Singleton::Consume<I_OrchestrationTools>::by<LocalCommunication>();
|
||||||
if (file_name.compare("policy") == 0) {
|
if (file_name.compare("policy") == 0) {
|
||||||
|
@ -82,9 +82,9 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
downloadAttributeFile(const GetResourceFile &resourse_file) override
|
downloadAttributeFile(const GetResourceFile &resourse_file, const string &file_path) override
|
||||||
{
|
{
|
||||||
return i_update_comm_impl->downloadAttributeFile(resourse_file);
|
return i_update_comm_impl->downloadAttributeFile(resourse_file, file_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -45,9 +45,9 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
Maybe<string>
|
Maybe<string>
|
||||||
downloadAttributeFile(const GetResourceFile &resourse_file)
|
downloadAttributeFile(const GetResourceFile &resourse_file, const string &file_path)
|
||||||
{
|
{
|
||||||
return local_communication.downloadAttributeFile(resourse_file);
|
return local_communication.downloadAttributeFile(resourse_file, file_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
@ -127,7 +127,7 @@ TEST_F(LocalCommunicationTest, downloadManifest)
|
|||||||
string new_manifest_string = "new manifest";
|
string new_manifest_string = "new manifest";
|
||||||
EXPECT_CALL(mock_orc_tools, readFile("/etc/cp/conf/offline_manifest.json")).WillOnce(Return(new_manifest_string));
|
EXPECT_CALL(mock_orc_tools, readFile("/etc/cp/conf/offline_manifest.json")).WillOnce(Return(new_manifest_string));
|
||||||
GetResourceFile resourse_file(GetResourceFile::ResourceFileType::MANIFEST);
|
GetResourceFile resourse_file(GetResourceFile::ResourceFileType::MANIFEST);
|
||||||
auto downloaded_string = downloadAttributeFile(resourse_file);
|
auto downloaded_string = downloadAttributeFile(resourse_file, "/tmp/orch_files");
|
||||||
EXPECT_TRUE(downloaded_string.ok());
|
EXPECT_TRUE(downloaded_string.ok());
|
||||||
EXPECT_EQ(downloaded_string.unpack(), new_manifest_string);
|
EXPECT_EQ(downloaded_string.unpack(), new_manifest_string);
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,7 @@ USE_DEBUG_FLAG(D_RATE_LIMIT);
|
|||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
enum class RateLimitVedict { ACCEPT, DROP, DROP_AND_LOG };
|
enum class RateLimitVerdict { ACCEPT, DROP, DROP_AND_LOG };
|
||||||
|
|
||||||
class RateLimit::Impl
|
class RateLimit::Impl
|
||||||
:
|
:
|
||||||
@ -74,8 +74,100 @@ public:
|
|||||||
return !should_rule_be_exact_match && str_starts_with(request_uri, rule_uri);
|
return !should_rule_be_exact_match && str_starts_with(request_uri, rule_uri);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
isRuleMatchingUri(const string &rule_uri, const string &request_uri, const RateLimitRule &rule)
|
||||||
|
{
|
||||||
|
if (rule_uri == request_uri ||
|
||||||
|
rule_uri == request_uri + "/" ||
|
||||||
|
rule_uri + "/" == request_uri) {
|
||||||
|
dbgDebug(D_RATE_LIMIT)
|
||||||
|
<< "Found Exact match to request URI: "
|
||||||
|
<< request_uri
|
||||||
|
<< ", rule URI: "
|
||||||
|
<< rule_uri;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (rule_uri == "/") {
|
||||||
|
dbgDebug(D_RATE_LIMIT)
|
||||||
|
<< "Matched new longest rule, request URI: "
|
||||||
|
<< request_uri
|
||||||
|
<< ", rule URI: "
|
||||||
|
<< rule_uri;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isRuleMatchingUri(rule_uri, request_uri, rule.isExactMatch())) {
|
||||||
|
dbgDebug(D_RATE_LIMIT)
|
||||||
|
<< "Matched new longest rule, request URI: "
|
||||||
|
<< request_uri
|
||||||
|
<< ", rule URI: "
|
||||||
|
<< rule_uri;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
shouldUpdateBestMatchingRule(
|
||||||
|
const RateLimitRule &rule,
|
||||||
|
const unordered_map<string, set<string>> &condition_map,
|
||||||
|
int full_rule_uri_length,
|
||||||
|
int rate_limit_longest_match,
|
||||||
|
float current_matched_rule_limit,
|
||||||
|
RateLimitAction current_matched_rule_verdict)
|
||||||
|
{
|
||||||
|
if (!rule.isMatchAny() && !rule.getRateLimitMatch().matchAttributes(condition_map)) {
|
||||||
|
dbgTrace(D_RATE_LIMIT) << "The request does not match the rule's condition";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
RateLimitAction rule_action = calcRuleAction(rule);
|
||||||
|
if (current_matched_rule_verdict < rule_action) {
|
||||||
|
dbgTrace(D_RATE_LIMIT)
|
||||||
|
<< "Rule's action is more strict than already matched rule. current rule's action: "
|
||||||
|
<< RateLimitConfig::rate_limit_action_to_string.at(rule_action)
|
||||||
|
<< ", previously matched rule's action: "
|
||||||
|
<< RateLimitConfig::rate_limit_action_to_string.at(current_matched_rule_verdict);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (rule_action < current_matched_rule_verdict) {
|
||||||
|
dbgTrace(D_RATE_LIMIT)
|
||||||
|
<< "Rule's action is less strict than already matched rule. current rule's action: "
|
||||||
|
<< RateLimitConfig::rate_limit_action_to_string.at(rule_action)
|
||||||
|
<< ", previously matched rule's action: "
|
||||||
|
<< RateLimitConfig::rate_limit_action_to_string.at(current_matched_rule_verdict);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (full_rule_uri_length < rate_limit_longest_match) {
|
||||||
|
dbgTrace(D_RATE_LIMIT)
|
||||||
|
<< "rule is shorter than already matched rule. current rule length: "
|
||||||
|
<< full_rule_uri_length
|
||||||
|
<< ", previously longest matched rule length: "
|
||||||
|
<< rate_limit_longest_match;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (full_rule_uri_length == rate_limit_longest_match && current_matched_rule_limit < calcRuleLimit(rule)) {
|
||||||
|
dbgTrace(D_RATE_LIMIT)
|
||||||
|
<< "rule limit is more permissive than already matched rule. current rule limit: "
|
||||||
|
<< limit
|
||||||
|
<< ", previously matched rule limit: "
|
||||||
|
<< current_matched_rule_limit;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
Maybe<RateLimitRule>
|
Maybe<RateLimitRule>
|
||||||
findRateLimitRule(const string &matched_uri, string &asset_id)
|
findRateLimitRule(
|
||||||
|
const string &matched_uri,
|
||||||
|
string &asset_id,
|
||||||
|
const unordered_map<string, set<string>> &condition_map)
|
||||||
{
|
{
|
||||||
WaapConfigAPI api_config;
|
WaapConfigAPI api_config;
|
||||||
WaapConfigApplication application_config;
|
WaapConfigApplication application_config;
|
||||||
@ -97,13 +189,14 @@ public:
|
|||||||
return genError("Failed to get rate limit configuration. Skipping rate limit check.");
|
return genError("Failed to get rate limit configuration. Skipping rate limit check.");
|
||||||
|
|
||||||
const auto &rate_limit_config = maybe_rate_limit_config.unpack();
|
const auto &rate_limit_config = maybe_rate_limit_config.unpack();
|
||||||
mode = rate_limit_config.getRateLimitMode();
|
practice_action = rate_limit_config.getRateLimitMode();
|
||||||
|
|
||||||
if (mode == "Inactive") return genError("Rate limit mode is Inactive in policy");
|
if (practice_action == RateLimitAction::INACTIVE) return genError("Rate limit mode is Inactive in policy");
|
||||||
|
|
||||||
set<string> rule_set;
|
|
||||||
Maybe<RateLimitRule> matched_rule = genError("URI did not match any rate limit rule.");
|
Maybe<RateLimitRule> matched_rule = genError("URI did not match any rate limit rule.");
|
||||||
int rate_limit_longest_match = 0;
|
int rate_limit_longest_match = 0;
|
||||||
|
float current_matched_rule_limit = 0;
|
||||||
|
RateLimitAction current_matched_rule_verdict = RateLimitAction::INACTIVE;
|
||||||
for (const auto &application_url : site_config->get_applicationUrls()) {
|
for (const auto &application_url : site_config->get_applicationUrls()) {
|
||||||
dbgTrace(D_RATE_LIMIT) << "Application URL: " << application_url;
|
dbgTrace(D_RATE_LIMIT) << "Application URL: " << application_url;
|
||||||
|
|
||||||
@ -120,54 +213,30 @@ public:
|
|||||||
string full_rule_uri = application_uri + rule.getRateLimitUri();
|
string full_rule_uri = application_uri + rule.getRateLimitUri();
|
||||||
int full_rule_uri_length = full_rule_uri.length();
|
int full_rule_uri_length = full_rule_uri.length();
|
||||||
|
|
||||||
// avoiding duplicates
|
|
||||||
if (!rule_set.insert(full_rule_uri).second) continue;
|
|
||||||
|
|
||||||
dbgTrace(D_RATE_LIMIT)
|
dbgTrace(D_RATE_LIMIT)
|
||||||
<< "Trying to match rule uri: "
|
<< "Trying to match rule URI: "
|
||||||
<< full_rule_uri
|
<< full_rule_uri
|
||||||
<< " with request uri: "
|
<< " with request URI: "
|
||||||
<< matched_uri;
|
<< matched_uri;
|
||||||
|
|
||||||
if (full_rule_uri_length < rate_limit_longest_match) {
|
if (!isRuleMatchingUri(full_rule_uri, matched_uri, rule)) {
|
||||||
dbgDebug(D_RATE_LIMIT)
|
dbgTrace(D_RATE_LIMIT) << "No match";
|
||||||
<< "rule is shorter then already matched rule. current rule length: "
|
|
||||||
<< full_rule_uri_length
|
|
||||||
<< ", previously longest matched rule length: "
|
|
||||||
<< rate_limit_longest_match;
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (full_rule_uri == matched_uri ||
|
bool should_update_rule = shouldUpdateBestMatchingRule(
|
||||||
full_rule_uri == matched_uri + "/" ||
|
rule,
|
||||||
full_rule_uri + "/" == matched_uri) {
|
condition_map,
|
||||||
dbgDebug(D_RATE_LIMIT)
|
full_rule_uri_length,
|
||||||
<< "Found Exact match to request uri: "
|
rate_limit_longest_match,
|
||||||
<< matched_uri
|
current_matched_rule_limit,
|
||||||
<< ", rule uri: "
|
current_matched_rule_verdict);
|
||||||
<< full_rule_uri;
|
|
||||||
return rule;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (rule.getRateLimitUri() == "/") {
|
if (should_update_rule) {
|
||||||
dbgDebug(D_RATE_LIMIT)
|
|
||||||
<< "Matched new longest rule, request uri: "
|
|
||||||
<< matched_uri
|
|
||||||
<< ", rule uri: "
|
|
||||||
<< full_rule_uri;
|
|
||||||
matched_rule = rule;
|
|
||||||
rate_limit_longest_match = full_rule_uri_length;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isRuleMatchingUri(full_rule_uri, matched_uri, rule.isExactMatch())) {
|
|
||||||
dbgDebug(D_RATE_LIMIT)
|
|
||||||
<< "Matched new longest rule, request uri: "
|
|
||||||
<< matched_uri
|
|
||||||
<< ", rule uri: "
|
|
||||||
<< full_rule_uri;
|
|
||||||
matched_rule = rule;
|
matched_rule = rule;
|
||||||
rate_limit_longest_match = full_rule_uri_length;
|
rate_limit_longest_match = full_rule_uri_length;
|
||||||
|
current_matched_rule_verdict = calcRuleAction(rule);
|
||||||
|
current_matched_rule_limit = calcRuleLimit(rule);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -203,21 +272,27 @@ public:
|
|||||||
string source_ip = "";
|
string source_ip = "";
|
||||||
if (maybe_source_ip.ok()) source_ip = ipAddrToStr(maybe_source_ip.unpack());
|
if (maybe_source_ip.ok()) source_ip = ipAddrToStr(maybe_source_ip.unpack());
|
||||||
|
|
||||||
if (shouldApplyException(uri, source_identifier, source_ip)) {
|
unordered_map<string, set<string>> condition_map = createConditionMap(uri, source_ip, source_identifier);
|
||||||
dbgDebug(D_RATE_LIMIT) << "found accept exception, not enforcing rate limit on this uri: " << uri;
|
if (shouldApplyException(condition_map)) {
|
||||||
|
dbgDebug(D_RATE_LIMIT) << "found accept exception, not enforcing rate limit on this URI: " << uri;
|
||||||
return ACCEPT;
|
return ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
string asset_id;
|
string asset_id;
|
||||||
auto maybe_rule = findRateLimitRule(uri, asset_id);
|
auto maybe_rule = findRateLimitRule(uri, asset_id, condition_map);
|
||||||
if (!maybe_rule.ok()) {
|
if (!maybe_rule.ok()) {
|
||||||
dbgDebug(D_RATE_LIMIT) << "Not Enforcing Rate Limit: " << maybe_rule.getErr();
|
dbgDebug(D_RATE_LIMIT) << "Not Enforcing Rate Limit: " << maybe_rule.getErr();
|
||||||
return ACCEPT;
|
return ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
const auto &rule = maybe_rule.unpack();
|
const auto &rule = maybe_rule.unpack();
|
||||||
|
if (rule.getRateLimitAction() == RateLimitAction::INACTIVE) {
|
||||||
|
dbgDebug(D_RATE_LIMIT) << "Rule's action is Inactive, rate limit will not be enforced";
|
||||||
|
return ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
burst = rule.getRateLimit();
|
burst = rule.getRateLimit();
|
||||||
limit = static_cast<float>(rule.getRateLimit()) / (rule.getRateLimitScope() == "Minute" ? 60 : 1);
|
limit = calcRuleLimit(rule);
|
||||||
|
|
||||||
dbgTrace(D_RATE_LIMIT)
|
dbgTrace(D_RATE_LIMIT)
|
||||||
<< "found rate limit rule with: "
|
<< "found rate limit rule with: "
|
||||||
@ -226,18 +301,18 @@ public:
|
|||||||
<< (rule.getRateLimitScope() == "Minute" ? 60 : 1)
|
<< (rule.getRateLimitScope() == "Minute" ? 60 : 1)
|
||||||
<< " seconds";
|
<< " seconds";
|
||||||
|
|
||||||
string unique_key = asset_id + ":" + source_identifier + ":" + uri;
|
string unique_key = asset_id + ":" + source_identifier + ":" + rule.getRateLimitUri();
|
||||||
if (!unique_key.empty() && unique_key.back() == '/') unique_key.pop_back();
|
if (unique_key.back() == '/') unique_key.pop_back();
|
||||||
|
|
||||||
auto verdict = decide(unique_key);
|
auto verdict = decide(unique_key);
|
||||||
if (verdict == RateLimitVedict::ACCEPT) {
|
if (verdict == RateLimitVerdict::ACCEPT) {
|
||||||
dbgTrace(D_RATE_LIMIT) << "Received ACCEPT verdict.";
|
dbgTrace(D_RATE_LIMIT) << "Received ACCEPT verdict.";
|
||||||
return ACCEPT;
|
return ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (verdict == RateLimitVedict::DROP_AND_LOG) sendLog(uri, source_identifier, source_ip, rule);
|
if (verdict == RateLimitVerdict::DROP_AND_LOG) sendLog(uri, source_identifier, source_ip, rule);
|
||||||
|
|
||||||
if (mode == "Active") {
|
if (calcRuleAction(rule) == RateLimitAction::PREVENT) {
|
||||||
dbgTrace(D_RATE_LIMIT) << "Received DROP verdict, this request will be blocked by rate limit";
|
dbgTrace(D_RATE_LIMIT) << "Received DROP verdict, this request will be blocked by rate limit";
|
||||||
return DROP;
|
return DROP;
|
||||||
}
|
}
|
||||||
@ -246,19 +321,33 @@ public:
|
|||||||
return ACCEPT;
|
return ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
RateLimitAction
|
||||||
|
calcRuleAction(const RateLimitRule &rule)
|
||||||
|
{
|
||||||
|
if (rule.getRateLimitAction() == RateLimitAction::ACCORDING_TO_PRACTICE) return practice_action;
|
||||||
|
|
||||||
|
return rule.getRateLimitAction();
|
||||||
|
}
|
||||||
|
|
||||||
|
float
|
||||||
|
calcRuleLimit(const RateLimitRule &rule)
|
||||||
|
{
|
||||||
|
return static_cast<float>(rule.getRateLimit()) / (rule.getRateLimitScope() == "Minute" ? 60 : 1);
|
||||||
|
}
|
||||||
|
|
||||||
string
|
string
|
||||||
getListenerName() const override
|
getListenerName() const override
|
||||||
{
|
{
|
||||||
return "rate limit";
|
return "rate limit";
|
||||||
}
|
}
|
||||||
|
|
||||||
RateLimitVedict
|
RateLimitVerdict
|
||||||
decide(const string &key) {
|
decide(const string &key) {
|
||||||
if (redis == nullptr) {
|
if (redis == nullptr) {
|
||||||
dbgDebug(D_RATE_LIMIT)
|
dbgDebug(D_RATE_LIMIT)
|
||||||
<< "there is no connection to the redis at the moment, unable to enforce rate limit";
|
<< "there is no connection to the redis at the moment, unable to enforce rate limit";
|
||||||
reconnectRedis();
|
reconnectRedis();
|
||||||
return RateLimitVedict::ACCEPT;
|
return RateLimitVerdict::ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
redisReply* reply = static_cast<redisReply*>(redisCommand(redis, "EVALSHA %s 1 %s %f %d",
|
redisReply* reply = static_cast<redisReply*>(redisCommand(redis, "EVALSHA %s 1 %s %f %d",
|
||||||
@ -268,26 +357,26 @@ public:
|
|||||||
dbgDebug(D_RATE_LIMIT)
|
dbgDebug(D_RATE_LIMIT)
|
||||||
<< "Error executing Redis command: No reply received, unable to enforce rate limit";
|
<< "Error executing Redis command: No reply received, unable to enforce rate limit";
|
||||||
reconnectRedis();
|
reconnectRedis();
|
||||||
return RateLimitVedict::ACCEPT;
|
return RateLimitVerdict::ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
// redis's lua script returned true - accept
|
// redis's lua script returned true - accept
|
||||||
if (reply->type == REDIS_REPLY_INTEGER) {
|
if (reply->type == REDIS_REPLY_INTEGER) {
|
||||||
freeReplyObject(reply);
|
freeReplyObject(reply);
|
||||||
return RateLimitVedict::ACCEPT;
|
return RateLimitVerdict::ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
// redis's lua script returned false - drop, no need to log
|
// redis's lua script returned false - drop, no need to log
|
||||||
if (reply->type == REDIS_REPLY_NIL) {
|
if (reply->type == REDIS_REPLY_NIL) {
|
||||||
freeReplyObject(reply);
|
freeReplyObject(reply);
|
||||||
return RateLimitVedict::DROP;
|
return RateLimitVerdict::DROP;
|
||||||
}
|
}
|
||||||
|
|
||||||
// redis's lua script returned string - drop and send log
|
// redis's lua script returned string - drop and send log
|
||||||
const char* log_str = "BLOCK AND LOG";
|
const char* log_str = "BLOCK AND LOG";
|
||||||
if (reply->type == REDIS_REPLY_STRING && strncmp(reply->str, log_str, strlen(log_str)) == 0) {
|
if (reply->type == REDIS_REPLY_STRING && strncmp(reply->str, log_str, strlen(log_str)) == 0) {
|
||||||
freeReplyObject(reply);
|
freeReplyObject(reply);
|
||||||
return RateLimitVedict::DROP_AND_LOG;
|
return RateLimitVerdict::DROP_AND_LOG;
|
||||||
}
|
}
|
||||||
|
|
||||||
dbgDebug(D_RATE_LIMIT)
|
dbgDebug(D_RATE_LIMIT)
|
||||||
@ -295,7 +384,7 @@ public:
|
|||||||
<< reply->type
|
<< reply->type
|
||||||
<< ". not enforcing rate limit for this request.";
|
<< ". not enforcing rate limit for this request.";
|
||||||
freeReplyObject(reply);
|
freeReplyObject(reply);
|
||||||
return RateLimitVedict::ACCEPT;
|
return RateLimitVerdict::ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
@ -344,7 +433,7 @@ public:
|
|||||||
<< LogField("ruleName", rule_by_ctx.getRuleName())
|
<< LogField("ruleName", rule_by_ctx.getRuleName())
|
||||||
<< LogField("httpUriPath", uri)
|
<< LogField("httpUriPath", uri)
|
||||||
<< LogField("httpSourceId", source_identifier)
|
<< LogField("httpSourceId", source_identifier)
|
||||||
<< LogField("securityAction", (mode == "Active" ? "Prevent" : "Detect"))
|
<< LogField("securityAction", (calcRuleAction(rule) == RateLimitAction::PREVENT ? "Prevent" : "Detect"))
|
||||||
<< LogField("waapIncidentType", "Rate Limit");
|
<< LogField("waapIncidentType", "Rate Limit");
|
||||||
|
|
||||||
auto env = Singleton::Consume<I_Environment>::by<RateLimit>();
|
auto env = Singleton::Consume<I_Environment>::by<RateLimit>();
|
||||||
@ -363,28 +452,33 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
shouldApplyException(const string &uri, const string &source_identifier, const string &source_ip)
|
shouldApplyException(const unordered_map<string, set<string>> &exceptions_dict)
|
||||||
{
|
{
|
||||||
dbgTrace(D_RATE_LIMIT) << "matching exceptions";
|
dbgTrace(D_RATE_LIMIT) << "matching exceptions";
|
||||||
unordered_map<string, set<string>> exceptions_dict;
|
|
||||||
|
|
||||||
// collect sourceip, sourceIdentifier, url
|
|
||||||
if (!source_ip.empty()) exceptions_dict["sourceIP"].insert(source_ip);
|
|
||||||
exceptions_dict["sourceIdentifier"].insert(source_identifier);
|
|
||||||
exceptions_dict["url"].insert(uri);
|
|
||||||
|
|
||||||
auto behaviors = Singleton::Consume<I_GenericRulebase>::by<RateLimit>()->getBehavior(exceptions_dict);
|
auto behaviors = Singleton::Consume<I_GenericRulebase>::by<RateLimit>()->getBehavior(exceptions_dict);
|
||||||
for (auto const &behavior : behaviors) {
|
for (auto const &behavior : behaviors) {
|
||||||
if (behavior == action_accept) {
|
if (behavior == action_accept) {
|
||||||
dbgTrace(D_RATE_LIMIT) << "matched exceptions for " << uri << " should accept";
|
dbgTrace(D_RATE_LIMIT) << "matched exceptions for current request, should accept";
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dbgTrace(D_RATE_LIMIT) << "No accept exceptions found for this uri and source ip";
|
dbgTrace(D_RATE_LIMIT) << "No accept exceptions found for this request";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unordered_map<string, set<string>>
|
||||||
|
createConditionMap(const string &uri, const string &source_ip, const string &source_identifier)
|
||||||
|
{
|
||||||
|
unordered_map<string, set<string>> condition_map;
|
||||||
|
if (!source_ip.empty()) condition_map["sourceIP"].insert(source_ip);
|
||||||
|
condition_map["sourceIdentifier"].insert(source_identifier);
|
||||||
|
condition_map["url"].insert(uri);
|
||||||
|
|
||||||
|
return condition_map;
|
||||||
|
}
|
||||||
|
|
||||||
string
|
string
|
||||||
ipAddrToStr(const IPAddr& ip_address) const
|
ipAddrToStr(const IPAddr& ip_address) const
|
||||||
{
|
{
|
||||||
@ -398,7 +492,7 @@ public:
|
|||||||
{
|
{
|
||||||
disconnectRedis();
|
disconnectRedis();
|
||||||
|
|
||||||
const string &redis_ip = getConfigurationWithDefault<string>("127.0.0.1", "connection", "Redis IP");
|
const string redis_ip = getConfigurationWithDefault<string>("127.0.0.1", "connection", "Redis IP");
|
||||||
int redis_port = getConfigurationWithDefault<int>(6379, "connection", "Redis Port");
|
int redis_port = getConfigurationWithDefault<int>(6379, "connection", "Redis Port");
|
||||||
|
|
||||||
timeval timeout;
|
timeval timeout;
|
||||||
@ -476,7 +570,6 @@ public:
|
|||||||
false
|
false
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
@ -529,7 +622,7 @@ private:
|
|||||||
static constexpr auto ACCEPT = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT;
|
static constexpr auto ACCEPT = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_ACCEPT;
|
||||||
static constexpr auto INSPECT = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_INSPECT;
|
static constexpr auto INSPECT = ngx_http_cp_verdict_e::TRAFFIC_VERDICT_INSPECT;
|
||||||
|
|
||||||
string mode;
|
RateLimitAction practice_action;
|
||||||
string rate_limit_lua_script_hash;
|
string rate_limit_lua_script_hash;
|
||||||
int burst;
|
int burst;
|
||||||
float limit;
|
float limit;
|
||||||
|
@ -1,15 +1,37 @@
|
|||||||
#include "rate_limit_config.h"
|
#include "rate_limit_config.h"
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
|
||||||
|
const string RateLimitRule::default_match =
|
||||||
|
"{\"type\":\"condition\",\"op\":\"equals\",\"key\":\"any\",\"value\":[\"any\"]}";
|
||||||
bool RateLimitConfig::is_active = false;
|
bool RateLimitConfig::is_active = false;
|
||||||
|
|
||||||
|
const map<RateLimitAction, string> RateLimitConfig::rate_limit_action_to_string = {
|
||||||
|
{RateLimitAction::INACTIVE, "Inactive"},
|
||||||
|
{RateLimitAction::ACCORDING_TO_PRACTICE, "AccordingToPractice"},
|
||||||
|
{RateLimitAction::DETECT, "Detect"},
|
||||||
|
{RateLimitAction::PREVENT, "Prevent"},
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
// Actions in mgmt were changed from Active/Learn to Prevent/Detect. Active/Learn are being kept here for BC.
|
||||||
|
const map<string, RateLimitAction> RateLimitConfig::rate_limit_string_to_action = {
|
||||||
|
{"Inactive", RateLimitAction::INACTIVE},
|
||||||
|
{"AccordingToPractice", RateLimitAction::ACCORDING_TO_PRACTICE},
|
||||||
|
{"Detect", RateLimitAction::DETECT},
|
||||||
|
{"Learn", RateLimitAction::DETECT},
|
||||||
|
{"Prevent", RateLimitAction::PREVENT},
|
||||||
|
{"Active", RateLimitAction::PREVENT}
|
||||||
|
};
|
||||||
|
|
||||||
void
|
void
|
||||||
RateLimitTrigger::load(cereal::JSONInputArchive &ar)
|
RateLimitTrigger::load(cereal::JSONInputArchive &ar)
|
||||||
{
|
{
|
||||||
dbgTrace(D_REVERSE_PROXY) << "Serializing single Rate Limit rule's triggers";
|
dbgTrace(D_RATE_LIMIT) << "Serializing single Rate Limit rule's triggers";
|
||||||
try {
|
try {
|
||||||
ar(cereal::make_nvp("id", id));
|
ar(cereal::make_nvp("id", id));
|
||||||
} catch (const cereal::Exception &e) {
|
} catch (const cereal::Exception &e) {
|
||||||
dbgWarning(D_REVERSE_PROXY)
|
dbgWarning(D_RATE_LIMIT)
|
||||||
<< "Failed to load single Rate Limit JSON rule's triggers. Error: " << e.what();
|
<< "Failed to load single Rate Limit JSON rule's triggers. Error: " << e.what();
|
||||||
ar.setNextName(nullptr);
|
ar.setNextName(nullptr);
|
||||||
}
|
}
|
||||||
@ -18,39 +40,43 @@ RateLimitTrigger::load(cereal::JSONInputArchive &ar)
|
|||||||
void
|
void
|
||||||
RateLimitRule::load(cereal::JSONInputArchive &ar)
|
RateLimitRule::load(cereal::JSONInputArchive &ar)
|
||||||
{
|
{
|
||||||
dbgTrace(D_REVERSE_PROXY) << "Serializing single Rate Limit rule";
|
dbgTrace(D_RATE_LIMIT) << "Serializing single Rate Limit rule";
|
||||||
try {
|
try {
|
||||||
|
string _action;
|
||||||
ar(cereal::make_nvp("URI", uri));
|
ar(cereal::make_nvp("URI", uri));
|
||||||
ar(cereal::make_nvp("scope", scope));
|
ar(cereal::make_nvp("scope", scope));
|
||||||
ar(cereal::make_nvp("limit", limit));
|
ar(cereal::make_nvp("limit", limit));
|
||||||
ar(cereal::make_nvp("triggers", rate_limit_triggers));
|
ar(cereal::make_nvp("triggers", rate_limit_triggers));
|
||||||
|
ar(cereal::make_nvp("action", _action));
|
||||||
|
action = RateLimitConfig::rate_limit_string_to_action.at(_action);
|
||||||
|
ar(cereal::make_nvp("match", match));
|
||||||
} catch (const cereal::Exception &e) {
|
} catch (const cereal::Exception &e) {
|
||||||
dbgWarning(D_REVERSE_PROXY) << "Failed to load single Rate Limit JSON rule. Error: " << e.what();
|
dbgWarning(D_RATE_LIMIT) << "Failed to load single Rate Limit JSON rule. Error: " << e.what();
|
||||||
ar.setNextName(nullptr);
|
ar.setNextName(nullptr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
RateLimitRule::prepare(const std::string &asset_id, int zone_id)
|
RateLimitRule::prepare(const string &asset_id, int zone_id)
|
||||||
{
|
{
|
||||||
std::string zone_id_s = std::to_string(zone_id);
|
string zone_id_s = to_string(zone_id);
|
||||||
std::string zone;
|
string zone;
|
||||||
if (isRootLocation()) {
|
if (isRootLocation()) {
|
||||||
zone = "root_zone_" + asset_id + "_" + zone_id_s;
|
zone = "root_zone_" + asset_id + "_" + zone_id_s;
|
||||||
} else {
|
} else {
|
||||||
std::string zone_name_suffix = uri;
|
string zone_name_suffix = uri;
|
||||||
std::replace(zone_name_suffix.begin(), zone_name_suffix.end(), '/', '_');
|
replace(zone_name_suffix.begin(), zone_name_suffix.end(), '/', '_');
|
||||||
zone = "zone" + zone_name_suffix + "_" + zone_id_s;
|
zone = "zone" + zone_name_suffix + "_" + zone_id_s;
|
||||||
}
|
}
|
||||||
|
|
||||||
limit_req_template_value = "zone=" + zone + " burst=" + std::to_string(limit) + " nodelay";
|
limit_req_template_value = "zone=" + zone + " burst=" + to_string(limit) + " nodelay";
|
||||||
|
|
||||||
// nginx conf will look like: limit_req_zone <sourceIdentifier> zone=<location>_<id>:10m rate=<limit>r/<scope>;
|
// nginx conf will look like: limit_req_zone <sourceIdentifier> zone=<location>_<id>:10m rate=<limit>r/<scope>;
|
||||||
std::string rate_unit = scope == "Minute" ? "r/m" : "r/s";
|
string rate_unit = scope == "Minute" ? "r/m" : "r/s";
|
||||||
limit_req_zone_template_value =
|
limit_req_zone_template_value =
|
||||||
"zone=" + zone + ":" + cache_size + " rate=" + std::to_string(limit) + rate_unit;
|
"zone=" + zone + ":" + cache_size + " rate=" + to_string(limit) + rate_unit;
|
||||||
|
|
||||||
dbgTrace(D_REVERSE_PROXY)
|
dbgTrace(D_RATE_LIMIT)
|
||||||
<< "limit_req_zone nginx template value: "
|
<< "limit_req_zone nginx template value: "
|
||||||
<< limit_req_zone_template_value
|
<< limit_req_zone_template_value
|
||||||
<< ", limit_req nginx template value: "
|
<< ", limit_req nginx template value: "
|
||||||
@ -65,33 +91,58 @@ RateLimitRule::isRootLocation() const
|
|||||||
}
|
}
|
||||||
|
|
||||||
auto non_root = uri.find_first_not_of("/");
|
auto non_root = uri.find_first_not_of("/");
|
||||||
if (non_root != std::string::npos) {
|
if (non_root != string::npos) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
RateLimitRule::isMatchAny() const
|
||||||
|
{
|
||||||
|
return
|
||||||
|
match.getType() == MatchQuery::MatchType::Condition &&
|
||||||
|
match.getKey() == "any" &&
|
||||||
|
match.getValue().count("any") > 0;
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
RateLimitConfig::load(cereal::JSONInputArchive &ar)
|
RateLimitConfig::load(cereal::JSONInputArchive &ar)
|
||||||
{
|
{
|
||||||
dbgTrace(D_REVERSE_PROXY) << "Serializing Rate Limit config";
|
dbgTrace(D_RATE_LIMIT) << "Serializing Rate Limit config";
|
||||||
try {
|
try {
|
||||||
|
string _mode;
|
||||||
ar(cereal::make_nvp("rules", rate_limit_rules));
|
ar(cereal::make_nvp("rules", rate_limit_rules));
|
||||||
ar(cereal::make_nvp("mode", mode));
|
ar(cereal::make_nvp("mode", _mode));
|
||||||
|
mode = rate_limit_string_to_action.at(_mode);
|
||||||
prepare();
|
prepare();
|
||||||
} catch (const cereal::Exception &e) {
|
} catch (const cereal::Exception &e) {
|
||||||
dbgWarning(D_REVERSE_PROXY) << "Failed to load single Rate Limit JSON config. Error: " << e.what();
|
dbgWarning(D_RATE_LIMIT) << "Failed to load single Rate Limit JSON config. Error: " << e.what();
|
||||||
ar.setNextName(nullptr);
|
ar.setNextName(nullptr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
RateLimitRule
|
||||||
RateLimitConfig::addSiblingRateLimitRule(RateLimitRule &rule) {
|
RateLimitConfig::generateSiblingRateLimitRule(const RateLimitRule &rule) {
|
||||||
rule.setExactMatch();
|
|
||||||
RateLimitRule sibling_rule(rule);
|
RateLimitRule sibling_rule(rule);
|
||||||
sibling_rule.appendSlash();
|
sibling_rule.appendSlash();
|
||||||
sibling_rule.setExactMatch();
|
sibling_rule.setExactMatch();
|
||||||
rate_limit_rules.push_back(sibling_rule);
|
|
||||||
|
return sibling_rule;
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
RateLimitConfig::addSiblingRateLimitRules()
|
||||||
|
{
|
||||||
|
std::vector<RateLimitRule> siblings;
|
||||||
|
for (auto &rule : rate_limit_rules) {
|
||||||
|
if (rule.isExactMatch()) {
|
||||||
|
siblings.push_back(generateSiblingRateLimitRule(rule));
|
||||||
|
rule.setExactMatch();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rate_limit_rules.insert(rate_limit_rules.end(), siblings.begin(), siblings.end());
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
@ -99,7 +150,7 @@ RateLimitConfig::prepare()
|
|||||||
{
|
{
|
||||||
// Removes invalid rules
|
// Removes invalid rules
|
||||||
auto last_valid_rule =
|
auto last_valid_rule =
|
||||||
std::remove_if(
|
remove_if(
|
||||||
rate_limit_rules.begin(),
|
rate_limit_rules.begin(),
|
||||||
rate_limit_rules.end(),
|
rate_limit_rules.end(),
|
||||||
[](const RateLimitRule &rule) { return !rule; }
|
[](const RateLimitRule &rule) { return !rule; }
|
||||||
@ -107,35 +158,27 @@ RateLimitConfig::prepare()
|
|||||||
|
|
||||||
rate_limit_rules.erase(last_valid_rule, rate_limit_rules.end());
|
rate_limit_rules.erase(last_valid_rule, rate_limit_rules.end());
|
||||||
|
|
||||||
// Removes duplicates
|
|
||||||
sort(rate_limit_rules.begin(), rate_limit_rules.end());
|
sort(rate_limit_rules.begin(), rate_limit_rules.end());
|
||||||
rate_limit_rules.erase(std::unique(rate_limit_rules.begin(), rate_limit_rules.end()), rate_limit_rules.end());
|
|
||||||
|
|
||||||
std::for_each(
|
addSiblingRateLimitRules();
|
||||||
rate_limit_rules.begin(),
|
|
||||||
rate_limit_rules.end(),
|
|
||||||
[this](RateLimitRule &rule) { if (rule.isExactMatch()) { addSiblingRateLimitRule(rule); } }
|
|
||||||
);
|
|
||||||
|
|
||||||
dbgTrace(D_REVERSE_PROXY)
|
dbgTrace(D_RATE_LIMIT)
|
||||||
<< "Final rate-limit rules: "
|
<< "Final rate-limit rules: "
|
||||||
<< makeSeparatedStr(rate_limit_rules, "; ")
|
<< makeSeparatedStr(rate_limit_rules, "; ");
|
||||||
<< "; Mode: "
|
|
||||||
<< mode;
|
|
||||||
|
|
||||||
setIsActive(mode != "Inactive");
|
setIsActive(mode != RateLimitAction::INACTIVE);
|
||||||
}
|
}
|
||||||
|
|
||||||
const RateLimitRule
|
const RateLimitRule
|
||||||
RateLimitConfig::findLongestMatchingRule(const std::string &nginx_uri) const
|
RateLimitConfig::findLongestMatchingRule(const string &nginx_uri) const
|
||||||
{
|
{
|
||||||
dbgFlow(D_REVERSE_PROXY) << "Trying to find a matching rat-limit rule for NGINX URI: " << nginx_uri;
|
dbgFlow(D_RATE_LIMIT) << "Trying to find a matching rat-limit rule for NGINX URI: " << nginx_uri;
|
||||||
|
|
||||||
size_t longest_len = 0;
|
size_t longest_len = 0;
|
||||||
RateLimitRule longest_matching_rule;
|
RateLimitRule longest_matching_rule;
|
||||||
for (const RateLimitRule &rule : rate_limit_rules) {
|
for (const RateLimitRule &rule : rate_limit_rules) {
|
||||||
if (rule.getRateLimitUri() == nginx_uri) {
|
if (rule.getRateLimitUri() == nginx_uri) {
|
||||||
dbgTrace(D_REVERSE_PROXY) << "Found exact rate-limit match: " << rule;
|
dbgTrace(D_RATE_LIMIT) << "Found exact rate-limit match: " << rule;
|
||||||
return rule;
|
return rule;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -143,15 +186,15 @@ RateLimitConfig::findLongestMatchingRule(const std::string &nginx_uri) const
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (std::equal(rule.getRateLimitUri().rbegin(), rule.getRateLimitUri().rend(), nginx_uri.rbegin())) {
|
if (equal(rule.getRateLimitUri().rbegin(), rule.getRateLimitUri().rend(), nginx_uri.rbegin())) {
|
||||||
if (rule.getRateLimitUri().size() > longest_len) {
|
if (rule.getRateLimitUri().size() > longest_len) {
|
||||||
longest_matching_rule = rule;
|
longest_matching_rule = rule;
|
||||||
longest_len = rule.getRateLimitUri().size();
|
longest_len = rule.getRateLimitUri().size();
|
||||||
dbgTrace(D_REVERSE_PROXY) << "Longest matching rate-limit rule so far: " << rule;
|
dbgTrace(D_RATE_LIMIT) << "Longest matching rate-limit rule so far: " << rule;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dbgTrace(D_REVERSE_PROXY) << "Longest matching rate-limit rule: " << longest_matching_rule;
|
dbgTrace(D_RATE_LIMIT) << "Longest matching rate-limit rule: " << longest_matching_rule;
|
||||||
return longest_matching_rule;
|
return longest_matching_rule;
|
||||||
}
|
}
|
||||||
|
@ -143,7 +143,7 @@ protected:
|
|||||||
size_t getIntervalsCount();
|
size_t getIntervalsCount();
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
bool sendObject(T &obj, I_Messaging::Method method, std::string uri)
|
bool sendObject(T &obj, HTTPMethod method, std::string uri)
|
||||||
{
|
{
|
||||||
I_Messaging *messaging = Singleton::Consume<I_Messaging>::by<WaapComponent>();
|
I_Messaging *messaging = Singleton::Consume<I_Messaging>::by<WaapComponent>();
|
||||||
I_AgentDetails *agentDetails = Singleton::Consume<I_AgentDetails>::by<WaapComponent>();
|
I_AgentDetails *agentDetails = Singleton::Consume<I_AgentDetails>::by<WaapComponent>();
|
||||||
@ -152,33 +152,28 @@ protected:
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (agentDetails->getOrchestrationMode() == OrchestrationMode::HYBRID) {
|
if (agentDetails->getOrchestrationMode() == OrchestrationMode::HYBRID) {
|
||||||
Flags <MessageConnConfig> conn_flags;
|
MessageMetadata req_md(getSharedStorageHost(), 80);
|
||||||
conn_flags.setFlag(MessageConnConfig::EXTERNAL);
|
req_md.insertHeader("X-Tenant-Id", agentDetails->getTenantId());
|
||||||
std::string tenant_header = "X-Tenant-Id: " + agentDetails->getTenantId();
|
auto req_status = messaging->sendSyncMessage(
|
||||||
|
|
||||||
return messaging->sendObject(
|
|
||||||
obj,
|
|
||||||
method,
|
method,
|
||||||
getSharedStorageHost(),
|
|
||||||
80,
|
|
||||||
conn_flags,
|
|
||||||
uri,
|
uri,
|
||||||
tenant_header,
|
obj,
|
||||||
nullptr,
|
MessageCategory::GENERIC,
|
||||||
MessageTypeTag::WAAP_LEARNING);
|
req_md
|
||||||
|
);
|
||||||
|
return req_status.ok();
|
||||||
}
|
}
|
||||||
return messaging->sendObject(
|
auto req_status = messaging->sendSyncMessage(
|
||||||
obj,
|
|
||||||
method,
|
method,
|
||||||
uri,
|
uri,
|
||||||
"",
|
obj,
|
||||||
nullptr,
|
MessageCategory::GENERIC
|
||||||
true,
|
);
|
||||||
MessageTypeTag::WAAP_LEARNING);
|
return req_status.ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
bool sendObjectWithRetry(T &obj, I_Messaging::Method method, std::string uri)
|
bool sendObjectWithRetry(T &obj, HTTPMethod method, std::string uri)
|
||||||
{
|
{
|
||||||
I_MainLoop *mainloop = Singleton::Consume<I_MainLoop>::by<WaapComponent>();
|
I_MainLoop *mainloop = Singleton::Consume<I_MainLoop>::by<WaapComponent>();
|
||||||
for (uint i = 0; i < max_send_obj_retries; i++)
|
for (uint i = 0; i < max_send_obj_retries; i++)
|
||||||
@ -198,7 +193,7 @@ protected:
|
|||||||
}
|
}
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
bool sendNoReplyObject(T &obj, I_Messaging::Method method, std::string uri)
|
bool sendNoReplyObject(T &obj, HTTPMethod method, std::string uri)
|
||||||
{
|
{
|
||||||
I_Messaging *messaging = Singleton::Consume<I_Messaging>::by<WaapComponent>();
|
I_Messaging *messaging = Singleton::Consume<I_Messaging>::by<WaapComponent>();
|
||||||
I_AgentDetails *agentDetails = Singleton::Consume<I_AgentDetails>::by<WaapComponent>();
|
I_AgentDetails *agentDetails = Singleton::Consume<I_AgentDetails>::by<WaapComponent>();
|
||||||
@ -207,32 +202,26 @@ protected:
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (agentDetails->getOrchestrationMode() == OrchestrationMode::HYBRID) {
|
if (agentDetails->getOrchestrationMode() == OrchestrationMode::HYBRID) {
|
||||||
Flags<MessageConnConfig> conn_flags;
|
MessageMetadata req_md(getSharedStorageHost(), 80);
|
||||||
conn_flags.setFlag(MessageConnConfig::EXTERNAL);
|
req_md.insertHeader("X-Tenant-Id", agentDetails->getTenantId());
|
||||||
std::string tenant_header = "X-Tenant-Id: " + agentDetails->getTenantId();
|
return messaging->sendSyncMessageWithoutResponse(
|
||||||
return messaging->sendNoReplyObject(
|
|
||||||
obj,
|
|
||||||
method,
|
method,
|
||||||
getSharedStorageHost(),
|
|
||||||
80,
|
|
||||||
conn_flags,
|
|
||||||
uri,
|
uri,
|
||||||
tenant_header,
|
obj,
|
||||||
nullptr,
|
MessageCategory::GENERIC,
|
||||||
MessageTypeTag::WAAP_LEARNING);
|
req_md
|
||||||
|
);
|
||||||
}
|
}
|
||||||
return messaging->sendNoReplyObject(
|
return messaging->sendSyncMessageWithoutResponse(
|
||||||
obj,
|
|
||||||
method,
|
method,
|
||||||
uri,
|
uri,
|
||||||
"",
|
obj,
|
||||||
nullptr,
|
MessageCategory::GENERIC
|
||||||
true,
|
);
|
||||||
MessageTypeTag::WAAP_LEARNING);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
bool sendNoReplyObjectWithRetry(T &obj, I_Messaging::Method method, std::string uri)
|
bool sendNoReplyObjectWithRetry(T &obj, HTTPMethod method, std::string uri)
|
||||||
{
|
{
|
||||||
I_MainLoop *mainloop= Singleton::Consume<I_MainLoop>::by<WaapComponent>();
|
I_MainLoop *mainloop= Singleton::Consume<I_MainLoop>::by<WaapComponent>();
|
||||||
for (uint i = 0; i < max_send_obj_retries; i++)
|
for (uint i = 0; i < max_send_obj_retries; i++)
|
||||||
@ -273,6 +262,7 @@ private:
|
|||||||
size_t m_intervalsCounter;
|
size_t m_intervalsCounter;
|
||||||
bool m_remoteSyncEnabled;
|
bool m_remoteSyncEnabled;
|
||||||
const std::string m_assetId;
|
const std::string m_assetId;
|
||||||
|
const bool m_isAssetIdUuid;
|
||||||
std::string m_type;
|
std::string m_type;
|
||||||
std::string m_lastProcessedModified;
|
std::string m_lastProcessedModified;
|
||||||
Maybe<std::string> m_shared_storage_host;
|
Maybe<std::string> m_shared_storage_host;
|
||||||
|
@ -375,12 +375,12 @@ ReputationFeaturesAgg::Impl::reportReputationFeatures()
|
|||||||
string uri = "/storage/waap/" + tenantId + "/reputation/" +
|
string uri = "/storage/waap/" + tenantId + "/reputation/" +
|
||||||
to_string(chrono::duration_cast<chrono::hours>(currentTime).count()) +
|
to_string(chrono::duration_cast<chrono::hours>(currentTime).count()) +
|
||||||
"/" + agentId + "/data.data";
|
"/" + agentId + "/data.data";
|
||||||
msg->sendObjectWithPersistence(report,
|
|
||||||
I_Messaging::Method::PUT,
|
msg->sendAsyncMessage(
|
||||||
|
HTTPMethod::PUT,
|
||||||
uri,
|
uri,
|
||||||
"",
|
report
|
||||||
true,
|
);
|
||||||
MessageTypeTag::WAAP_LEARNING);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -22,6 +22,7 @@
|
|||||||
#include "i_table.h"
|
#include "i_table.h"
|
||||||
#include "i_agent_details.h"
|
#include "i_agent_details.h"
|
||||||
#include "i_instance_awareness.h"
|
#include "i_instance_awareness.h"
|
||||||
|
#include "i_messaging.h"
|
||||||
|
|
||||||
class ReputationFeaturesEntry : public TableOpaqueSerialize<ReputationFeaturesEntry>
|
class ReputationFeaturesEntry : public TableOpaqueSerialize<ReputationFeaturesEntry>
|
||||||
{
|
{
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
#include "waap.h"
|
#include "waap.h"
|
||||||
#include "ConfidenceFile.h"
|
#include "ConfidenceFile.h"
|
||||||
#include "i_agent_details.h"
|
#include "i_agent_details.h"
|
||||||
#include "i_messaging.h"
|
|
||||||
#include "i_mainloop.h"
|
#include "i_mainloop.h"
|
||||||
#include <math.h>
|
#include <math.h>
|
||||||
|
|
||||||
@ -138,7 +137,7 @@ bool ConfidenceCalculator::postData()
|
|||||||
|
|
||||||
WindowLogPost currentWindow(m_time_window_logger_backup);
|
WindowLogPost currentWindow(m_time_window_logger_backup);
|
||||||
bool ok = sendNoReplyObjectWithRetry(currentWindow,
|
bool ok = sendNoReplyObjectWithRetry(currentWindow,
|
||||||
I_Messaging::Method::PUT,
|
HTTPMethod::PUT,
|
||||||
url);
|
url);
|
||||||
if (!ok) {
|
if (!ok) {
|
||||||
dbgError(D_WAAP_CONFIDENCE_CALCULATOR) << "Failed to post collected data to: " << url;
|
dbgError(D_WAAP_CONFIDENCE_CALCULATOR) << "Failed to post collected data to: " << url;
|
||||||
@ -164,7 +163,7 @@ void ConfidenceCalculator::pullData(const std::vector<std::string>& files)
|
|||||||
dbgTrace(D_WAAP_CONFIDENCE_CALCULATOR) << "Pulling the file: " << file;
|
dbgTrace(D_WAAP_CONFIDENCE_CALCULATOR) << "Pulling the file: " << file;
|
||||||
WindowLogGet getWindow;
|
WindowLogGet getWindow;
|
||||||
bool ok = sendObjectWithRetry(getWindow,
|
bool ok = sendObjectWithRetry(getWindow,
|
||||||
I_Messaging::Method::GET,
|
HTTPMethod::GET,
|
||||||
getUri() + "/" + file);
|
getUri() + "/" + file);
|
||||||
|
|
||||||
if (!ok) {
|
if (!ok) {
|
||||||
@ -213,7 +212,7 @@ void ConfidenceCalculator::pullProcessedData(const std::vector<std::string>& fil
|
|||||||
{
|
{
|
||||||
ConfidenceFileDecryptor getConfFile;
|
ConfidenceFileDecryptor getConfFile;
|
||||||
bool res = sendObjectWithRetry(getConfFile,
|
bool res = sendObjectWithRetry(getConfFile,
|
||||||
I_Messaging::Method::GET,
|
HTTPMethod::GET,
|
||||||
getUri() + "/" + file);
|
getUri() + "/" + file);
|
||||||
is_ok |= res;
|
is_ok |= res;
|
||||||
if (res && getConfFile.getConfidenceSet().ok())
|
if (res && getConfFile.getConfidenceSet().ok())
|
||||||
@ -243,7 +242,7 @@ void ConfidenceCalculator::postProcessedData()
|
|||||||
dbgTrace(D_WAAP_CONFIDENCE_CALCULATOR) << "Posting the confidence set object to: " << postUrl;
|
dbgTrace(D_WAAP_CONFIDENCE_CALCULATOR) << "Posting the confidence set object to: " << postUrl;
|
||||||
ConfidenceFileEncryptor postConfFile(m_confident_sets, m_confidence_level);
|
ConfidenceFileEncryptor postConfFile(m_confident_sets, m_confidence_level);
|
||||||
sendNoReplyObjectWithRetry(postConfFile,
|
sendNoReplyObjectWithRetry(postConfFile,
|
||||||
I_Messaging::Method::PUT,
|
HTTPMethod::PUT,
|
||||||
postUrl);
|
postUrl);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -187,6 +187,7 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
|||||||
bool isCookiePayload = (m_key.first().size() == 6 && m_key.first() == "cookie");
|
bool isCookiePayload = (m_key.first().size() == 6 && m_key.first() == "cookie");
|
||||||
bool isBodyPayload = (m_key.first().size() == 4 && m_key.first() == "body");
|
bool isBodyPayload = (m_key.first().size() == 4 && m_key.first() == "body");
|
||||||
|
|
||||||
|
|
||||||
// If csrf/antibot cookie - send to Waf2Transaction for collection of cookie value.
|
// If csrf/antibot cookie - send to Waf2Transaction for collection of cookie value.
|
||||||
if (m_depth == 1 && isCookiePayload && (m_key.str() == "x-chkp-csrf-token" || m_key.str() == "__fn1522082288")) {
|
if (m_depth == 1 && isCookiePayload && (m_key.str() == "x-chkp-csrf-token" || m_key.str() == "__fn1522082288")) {
|
||||||
std::string cur_val = std::string(v, v_len);
|
std::string cur_val = std::string(v, v_len);
|
||||||
@ -388,21 +389,17 @@ DeepParser::onKv(const char *k, size_t k_len, const char *v, size_t v_len, int f
|
|||||||
&& m_parsersDeque.size() > parser_depth
|
&& m_parsersDeque.size() > parser_depth
|
||||||
&&!m_parsersDeque.at(parser_depth)->getRecursionFlag()
|
&&!m_parsersDeque.at(parser_depth)->getRecursionFlag()
|
||||||
) {
|
) {
|
||||||
ScopedContext ctx;
|
|
||||||
ctx.registerValue<IWaf2Transaction *>("waap_transaction", m_pTransaction);
|
|
||||||
rc = pushValueToTopParser(cur_val, flags, base64ParamFound, offset, parser_depth);
|
rc = pushValueToTopParser(cur_val, flags, base64ParamFound, offset, parser_depth);
|
||||||
if (rc != CONTINUE_PARSING) {
|
if (rc != CONTINUE_PARSING) {
|
||||||
if (shouldUpdateKeyStack) {
|
if (shouldUpdateKeyStack) {
|
||||||
m_key.pop("deep parser key");
|
m_key.pop("deep parser key");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
m_depth--;
|
m_depth--;
|
||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if (rc == CONTINUE_PARSING) {
|
if (rc == CONTINUE_PARSING) {
|
||||||
// Try to eliminate m_multipart_boundary to allow other parser to work instead of multipart
|
// Try to eliminate m_multipart_boundary to allow other parser to work instead of multipart
|
||||||
if (m_depth == 1
|
if (m_depth == 1
|
||||||
@ -853,21 +850,17 @@ DeepParser::parseAfterMisleadingMultipartBoundaryCleaned(
|
|||||||
&& m_parsersDeque.size() > parser_depth
|
&& m_parsersDeque.size() > parser_depth
|
||||||
&&!m_parsersDeque.at(parser_depth)->getRecursionFlag()
|
&&!m_parsersDeque.at(parser_depth)->getRecursionFlag()
|
||||||
) {
|
) {
|
||||||
ScopedContext ctx;
|
|
||||||
ctx.registerValue<IWaf2Transaction *>("waap_transaction", m_pTransaction);
|
|
||||||
rc = pushValueToTopParser(cur_val, flags, base64ParamFound, offset, parser_depth);
|
rc = pushValueToTopParser(cur_val, flags, base64ParamFound, offset, parser_depth);
|
||||||
if (rc != CONTINUE_PARSING) {
|
if (rc != CONTINUE_PARSING) {
|
||||||
if (shouldUpdateKeyStack) {
|
if (shouldUpdateKeyStack) {
|
||||||
m_key.pop("deep parser key");
|
m_key.pop("deep parser key");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
m_depth--;
|
m_depth--;
|
||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1091,7 +1084,12 @@ DeepParser::createInternalParser(
|
|||||||
) {
|
) {
|
||||||
// Graphql value detected
|
// Graphql value detected
|
||||||
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse graphql";
|
dbgTrace(D_WAAP_DEEP_PARSER) << "Starting to parse graphql";
|
||||||
m_parsersDeque.push_back(std::make_shared<BufferedParser<ParserGql>>(*this, parser_depth + 1));
|
|
||||||
|
m_parsersDeque.push_back(std::make_shared<BufferedParser<ParserGql>>(
|
||||||
|
*this,
|
||||||
|
parser_depth + 1,
|
||||||
|
m_pTransaction));
|
||||||
|
|
||||||
offset = 0;
|
offset = 0;
|
||||||
} else if (cur_val.length() > 0
|
} else if (cur_val.length() > 0
|
||||||
&& (cur_val[0] == '[' || cur_val[0] == '{')
|
&& (cur_val[0] == '[' || cur_val[0] == '{')
|
||||||
@ -1123,12 +1121,12 @@ DeepParser::createInternalParser(
|
|||||||
// but only if the JSON is passed in body and on the top level.
|
// but only if the JSON is passed in body and on the top level.
|
||||||
bool should_collect_for_oa_schema_updater = false;
|
bool should_collect_for_oa_schema_updater = false;
|
||||||
|
|
||||||
m_parsersDeque.push_back(
|
m_parsersDeque.push_back(std::make_shared<BufferedParser<ParserJson>>(
|
||||||
std::make_shared<BufferedParser<ParserJson>>(
|
*this,
|
||||||
*this,
|
parser_depth + 1,
|
||||||
should_collect_for_oa_schema_updater,
|
m_pTransaction,
|
||||||
parser_depth + 1
|
should_collect_for_oa_schema_updater
|
||||||
));
|
));
|
||||||
offset = 0;
|
offset = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1326,7 +1324,6 @@ DeepParser::createInternalParser(
|
|||||||
return offset;
|
return offset;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void
|
void
|
||||||
DeepParser::apiProcessKey(const char *v, size_t v_len)
|
DeepParser::apiProcessKey(const char *v, size_t v_len)
|
||||||
{
|
{
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include "i_indicatorsFilter.h"
|
#include "i_indicatorsFilter.h"
|
||||||
#include "i_messaging.h"
|
|
||||||
#include "waap.h"
|
#include "waap.h"
|
||||||
#include "TrustedSources.h"
|
#include "TrustedSources.h"
|
||||||
#include "TrustedSourcesConfidence.h"
|
#include "TrustedSourcesConfidence.h"
|
||||||
|
@ -19,7 +19,6 @@
|
|||||||
#include "TypeIndicatorsFilter.h"
|
#include "TypeIndicatorsFilter.h"
|
||||||
#include "WaapParameters.h"
|
#include "WaapParameters.h"
|
||||||
#include "i_waapConfig.h"
|
#include "i_waapConfig.h"
|
||||||
#include "i_messaging.h"
|
|
||||||
#include "ScannersDetector.h"
|
#include "ScannersDetector.h"
|
||||||
#include "TuningDecisions.h"
|
#include "TuningDecisions.h"
|
||||||
#include <cereal/cereal.hpp>
|
#include <cereal/cereal.hpp>
|
||||||
|
@ -16,15 +16,23 @@
|
|||||||
#include "graphqlparser/AstVisitor.h"
|
#include "graphqlparser/AstVisitor.h"
|
||||||
#include "graphqlparser/GraphQLParser.h"
|
#include "graphqlparser/GraphQLParser.h"
|
||||||
#include "debug.h"
|
#include "debug.h"
|
||||||
|
#include "oas_updater_entry_saver.h"
|
||||||
|
|
||||||
USE_DEBUG_FLAG(D_WAAP_PARSER_GQL);
|
USE_DEBUG_FLAG(D_WAAP_PARSER_GQL);
|
||||||
|
USE_DEBUG_FLAG(D_OA_SCHEMA_UPDATER);
|
||||||
|
|
||||||
const std::string ParserGql::m_parserName = "gqlParser";
|
const std::string ParserGql::m_parserName = "gqlParser";
|
||||||
|
|
||||||
ParserGql::ParserGql(IParserReceiver &receiver, size_t parser_depth) :
|
ParserGql::ParserGql(
|
||||||
|
IParserReceiver &receiver,
|
||||||
|
size_t parser_depth,
|
||||||
|
IWaf2Transaction *pTransaction)
|
||||||
|
:
|
||||||
m_receiver(receiver),
|
m_receiver(receiver),
|
||||||
m_error(false),
|
m_error(false),
|
||||||
m_curNameValues(0),
|
m_curNameValues(0),
|
||||||
|
m_pTransaction(pTransaction),
|
||||||
|
field_depth(0),
|
||||||
m_parser_depth(parser_depth)
|
m_parser_depth(parser_depth)
|
||||||
{
|
{
|
||||||
dbgFlow(D_WAAP_PARSER_GQL);
|
dbgFlow(D_WAAP_PARSER_GQL);
|
||||||
@ -93,6 +101,7 @@ bool ParserGql::visitValue(const char *value)
|
|||||||
bool ParserGql::visitName(const facebook::graphql::ast::Name &node)
|
bool ParserGql::visitName(const facebook::graphql::ast::Name &node)
|
||||||
{
|
{
|
||||||
dbgTrace(D_WAAP_PARSER_GQL) << node.getValue() << "'";
|
dbgTrace(D_WAAP_PARSER_GQL) << node.getValue() << "'";
|
||||||
|
|
||||||
bool ret = true;
|
bool ret = true;
|
||||||
if (m_curNameValues == 0 && !m_curNodeName.empty()) {
|
if (m_curNameValues == 0 && !m_curNodeName.empty()) {
|
||||||
ret = m_receiver.onKv(
|
ret = m_receiver.onKv(
|
||||||
@ -105,6 +114,13 @@ bool ParserGql::visitName(const facebook::graphql::ast::Name &node)
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool ParserGql::visitOperationDefinition(const facebook::graphql::ast::OperationDefinition &node)
|
||||||
|
{
|
||||||
|
dbgFlow(D_OA_SCHEMA_UPDATER) << "getOperation()";
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
bool ParserGql::visitIntValue(const facebook::graphql::ast::IntValue &node)
|
bool ParserGql::visitIntValue(const facebook::graphql::ast::IntValue &node)
|
||||||
{
|
{
|
||||||
dbgFlow(D_WAAP_PARSER_GQL);
|
dbgFlow(D_WAAP_PARSER_GQL);
|
||||||
|
@ -22,10 +22,19 @@
|
|||||||
#include "graphqlparser/AstNode.h"
|
#include "graphqlparser/AstNode.h"
|
||||||
#include "graphqlparser/AstVisitor.h"
|
#include "graphqlparser/AstVisitor.h"
|
||||||
#include "KeyStack.h"
|
#include "KeyStack.h"
|
||||||
|
#include "i_transaction.h"
|
||||||
|
#include "singleton.h"
|
||||||
|
#include "i_oa_schema_updater.h"
|
||||||
|
|
||||||
class ParserGql : public ParserBase, public facebook::graphql::ast::visitor::AstVisitor {
|
class ParserGql :
|
||||||
|
public ParserBase,
|
||||||
|
public facebook::graphql::ast::visitor::AstVisitor,
|
||||||
|
Singleton::Consume<I_OASUpdater> {
|
||||||
public:
|
public:
|
||||||
ParserGql(IParserReceiver &receiver, size_t parser_depth);
|
ParserGql(
|
||||||
|
IParserReceiver &receiver,
|
||||||
|
size_t parser_depth,
|
||||||
|
IWaf2Transaction *pTransaction=nullptr);
|
||||||
virtual ~ParserGql();
|
virtual ~ParserGql();
|
||||||
size_t push(const char *data, size_t data_len);
|
size_t push(const char *data, size_t data_len);
|
||||||
void finish();
|
void finish();
|
||||||
@ -38,10 +47,12 @@ private:
|
|||||||
std::string m_buffer;
|
std::string m_buffer;
|
||||||
std::string m_curNodeName;
|
std::string m_curNodeName;
|
||||||
int m_curNameValues;
|
int m_curNameValues;
|
||||||
|
IWaf2Transaction *m_pTransaction;
|
||||||
|
int field_depth;
|
||||||
bool visitValue(const char *value);
|
bool visitValue(const char *value);
|
||||||
|
|
||||||
// Callbacks from the parser
|
// Callbacks from the parser
|
||||||
|
bool visitOperationDefinition(const facebook::graphql::ast::OperationDefinition &node) override;
|
||||||
bool visitName(const facebook::graphql::ast::Name &node) override;
|
bool visitName(const facebook::graphql::ast::Name &node) override;
|
||||||
bool visitIntValue(const facebook::graphql::ast::IntValue &node) override;
|
bool visitIntValue(const facebook::graphql::ast::IntValue &node) override;
|
||||||
bool visitFloatValue(const facebook::graphql::ast::FloatValue &node) override;
|
bool visitFloatValue(const facebook::graphql::ast::FloatValue &node) override;
|
||||||
|
@ -249,8 +249,9 @@ ParserJson::p_end_array(void *ctx)
|
|||||||
|
|
||||||
ParserJson::ParserJson(
|
ParserJson::ParserJson(
|
||||||
IParserReceiver &receiver,
|
IParserReceiver &receiver,
|
||||||
bool should_collect_oas,
|
|
||||||
size_t parser_depth,
|
size_t parser_depth,
|
||||||
|
IWaf2Transaction *pTransaction,
|
||||||
|
bool should_collect_oas,
|
||||||
IParserReceiver2 *receiver2)
|
IParserReceiver2 *receiver2)
|
||||||
:
|
:
|
||||||
m_receiver(receiver),
|
m_receiver(receiver),
|
||||||
@ -260,10 +261,12 @@ ParserJson::ParserJson(
|
|||||||
m_key("json_parser"),
|
m_key("json_parser"),
|
||||||
m_jsonHandler(NULL),
|
m_jsonHandler(NULL),
|
||||||
is_map_empty(false),
|
is_map_empty(false),
|
||||||
should_collect_for_oa_schema_updater(should_collect_oas),
|
m_pTransaction(pTransaction),
|
||||||
m_parser_depth(parser_depth)
|
m_parser_depth(parser_depth),
|
||||||
|
is_graphql_operation_name(false)
|
||||||
{
|
{
|
||||||
dbgTrace(D_WAAP_PARSER_JSON) << "parser_depth= " << parser_depth;
|
dbgTrace(D_WAAP_PARSER_JSON) << "parser_depth= " << parser_depth;
|
||||||
|
should_collect_for_oa_schema_updater = should_collect_oas;
|
||||||
|
|
||||||
// TODO:: do we really want to clear this?
|
// TODO:: do we really want to clear this?
|
||||||
memset(m_buf, 0, sizeof(m_buf));
|
memset(m_buf, 0, sizeof(m_buf));
|
||||||
|
@ -20,17 +20,22 @@
|
|||||||
#include "ParserBase.h"
|
#include "ParserBase.h"
|
||||||
#include "KeyStack.h"
|
#include "KeyStack.h"
|
||||||
#include "yajl/yajl_parse.h"
|
#include "yajl/yajl_parse.h"
|
||||||
|
#include "singleton.h"
|
||||||
|
#include "i_oa_schema_updater.h"
|
||||||
|
|
||||||
#define FIRST_JSON_BUFFER_SIZE 4 // must buffer at least 4 first bytes to allow unicode autodetection (BOM).
|
#define FIRST_JSON_BUFFER_SIZE 4 // must buffer at least 4 first bytes to allow unicode autodetection (BOM).
|
||||||
|
|
||||||
typedef size_t yajl_size_t;
|
typedef size_t yajl_size_t;
|
||||||
|
|
||||||
class ParserJson : public ParserBase {
|
class ParserJson :
|
||||||
|
public ParserBase,
|
||||||
|
Singleton::Consume<I_OASUpdater> {
|
||||||
public:
|
public:
|
||||||
ParserJson(
|
ParserJson(
|
||||||
IParserReceiver &receiver,
|
IParserReceiver &receiver,
|
||||||
bool should_collect_for_oa_schema_updater=false,
|
|
||||||
size_t parser_depth=0,
|
size_t parser_depth=0,
|
||||||
|
IWaf2Transaction *pTransaction = nullptr,
|
||||||
|
bool should_collect_for_oa_schema_updater=false,
|
||||||
IParserReceiver2 *receiver2=NULL);
|
IParserReceiver2 *receiver2=NULL);
|
||||||
virtual ~ParserJson();
|
virtual ~ParserJson();
|
||||||
size_t push(const char *data, size_t data_len);
|
size_t push(const char *data, size_t data_len);
|
||||||
@ -86,8 +91,9 @@ private:
|
|||||||
yajl_handle m_jsonHandler;
|
yajl_handle m_jsonHandler;
|
||||||
bool is_map_empty;
|
bool is_map_empty;
|
||||||
bool should_collect_for_oa_schema_updater;
|
bool should_collect_for_oa_schema_updater;
|
||||||
|
IWaf2Transaction *m_pTransaction;
|
||||||
size_t m_parser_depth;
|
size_t m_parser_depth;
|
||||||
|
bool is_graphql_operation_name;
|
||||||
public:
|
public:
|
||||||
static const std::string m_parserName;
|
static const std::string m_parserName;
|
||||||
};
|
};
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user