mirror of
https://github.com/dragonflydb/dragonfly.git
synced 2024-12-14 11:58:02 +00:00
chore: let resp parser provide more useful logs (#4273)
* chore: let resp parser provide more useful logs 1. More warning logs around bad BAD_ARRAYLEN messages 2. Lift the restriction around big bulk strings and log a warning instead. 3. Pull helio Probably fixes #4213 --------- Signed-off-by: Roman Gershman <roman@dragonflydb.io>
This commit is contained in:
parent
03d679ac31
commit
bf4f45a7c2
5 changed files with 34 additions and 13 deletions
2
helio
2
helio
|
@ -1 +1 @@
|
|||
Subproject commit ff9b6cd35bf082a9d48cf0904b0e8557cf31b6d2
|
||||
Subproject commit 32e8c4ec830e8d4224d8a13a11c1607f357da80f
|
|
@ -11,6 +11,7 @@
|
|||
namespace facade {
|
||||
|
||||
using namespace std;
|
||||
constexpr static long kMaxBulkLen = 256 * (1ul << 20); // 256MB.
|
||||
|
||||
auto RedisParser::Parse(Buffer str, uint32_t* consumed, RespExpr::Vec* res) -> Result {
|
||||
DCHECK(!str.empty());
|
||||
|
@ -218,7 +219,11 @@ auto RedisParser::ParseLen(Buffer str, int64_t* res) -> ResultConsumed {
|
|||
const char* s = reinterpret_cast<const char*>(str.data());
|
||||
const char* pos = reinterpret_cast<const char*>(memchr(s, '\n', str.size()));
|
||||
if (!pos) {
|
||||
Result r = str.size() < 32 ? INPUT_PENDING : BAD_ARRAYLEN;
|
||||
Result r = INPUT_PENDING;
|
||||
if (str.size() >= 32) {
|
||||
LOG(WARNING) << "Unexpected format " << string_view{s, str.size()};
|
||||
r = BAD_ARRAYLEN;
|
||||
}
|
||||
return {r, 0};
|
||||
}
|
||||
|
||||
|
@ -227,10 +232,16 @@ auto RedisParser::ParseLen(Buffer str, int64_t* res) -> ResultConsumed {
|
|||
}
|
||||
|
||||
// Skip the first character and 2 last ones (\r\n).
|
||||
bool success = absl::SimpleAtoi(std::string_view{s + 1, size_t(pos - 1 - s)}, res);
|
||||
unsigned consumed = pos - s + 1;
|
||||
string_view len_token{s + 1, size_t(pos - 1 - s)};
|
||||
bool success = absl::SimpleAtoi(len_token, res);
|
||||
|
||||
return ResultConsumed{success ? OK : BAD_ARRAYLEN, consumed};
|
||||
unsigned consumed = pos - s + 1;
|
||||
if (success && *res >= -1) {
|
||||
return ResultConsumed{OK, consumed};
|
||||
}
|
||||
|
||||
LOG(WARNING) << "Failed to parse len " << len_token;
|
||||
return ResultConsumed{BAD_ARRAYLEN, consumed};
|
||||
}
|
||||
|
||||
auto RedisParser::ConsumeArrayLen(Buffer str) -> ResultConsumed {
|
||||
|
@ -247,8 +258,8 @@ auto RedisParser::ConsumeArrayLen(Buffer str) -> ResultConsumed {
|
|||
len *= 2;
|
||||
}
|
||||
|
||||
if (len < -1 || len > max_arr_len_) {
|
||||
LOG_IF(WARNING, len > max_arr_len_) << "Multibulk len is too large " << len;
|
||||
if (len > max_arr_len_) {
|
||||
LOG(WARNING) << "Multibulk len is too large " << len;
|
||||
|
||||
return {BAD_ARRAYLEN, res.second};
|
||||
}
|
||||
|
@ -310,15 +321,14 @@ auto RedisParser::ParseArg(Buffer str) -> ResultConsumed {
|
|||
return res;
|
||||
}
|
||||
|
||||
if (len < -1 || len > kMaxBulkLen)
|
||||
return {BAD_ARRAYLEN, res.second};
|
||||
|
||||
if (len == -1) { // Resp2 NIL
|
||||
cached_expr_->emplace_back(RespExpr::NIL);
|
||||
cached_expr_->back().u = Buffer{};
|
||||
HandleFinishArg();
|
||||
} else {
|
||||
DVLOG(1) << "String(" << len << ")";
|
||||
LOG_IF(WARNING, len > kMaxBulkLen) << "Large bulk len: " << len;
|
||||
|
||||
cached_expr_->emplace_back(RespExpr::STRING);
|
||||
cached_expr_->back().u = Buffer{};
|
||||
bulk_len_ = len;
|
||||
|
|
|
@ -22,8 +22,6 @@ namespace facade {
|
|||
*/
|
||||
class RedisParser {
|
||||
public:
|
||||
constexpr static long kMaxBulkLen = 256 * (1ul << 20); // 256MB.
|
||||
|
||||
enum Result : uint8_t {
|
||||
OK,
|
||||
INPUT_PENDING,
|
||||
|
|
|
@ -170,7 +170,7 @@ TEST_F(RedisParserTest, Empty) {
|
|||
}
|
||||
|
||||
TEST_F(RedisParserTest, LargeBulk) {
|
||||
std::string_view prefix("*1\r\n$1024\r\n");
|
||||
string_view prefix("*1\r\n$1024\r\n");
|
||||
|
||||
ASSERT_EQ(RedisParser::INPUT_PENDING, Parse(prefix));
|
||||
ASSERT_EQ(prefix.size(), consumed_);
|
||||
|
@ -191,6 +191,18 @@ TEST_F(RedisParserTest, LargeBulk) {
|
|||
ASSERT_EQ(RedisParser::INPUT_PENDING, Parse(part1));
|
||||
ASSERT_EQ(RedisParser::INPUT_PENDING, Parse(half));
|
||||
ASSERT_EQ(RedisParser::OK, Parse("\r\n"));
|
||||
|
||||
prefix = "*1\r\n$270000000\r\n";
|
||||
ASSERT_EQ(RedisParser::INPUT_PENDING, Parse(prefix));
|
||||
ASSERT_EQ(prefix.size(), consumed_);
|
||||
string chunk(1000000, 'a');
|
||||
for (unsigned i = 0; i < 270; ++i) {
|
||||
ASSERT_EQ(RedisParser::INPUT_PENDING, Parse(chunk));
|
||||
ASSERT_EQ(chunk.size(), consumed_);
|
||||
}
|
||||
ASSERT_EQ(RedisParser::OK, Parse("\r\n"));
|
||||
ASSERT_THAT(args_, ElementsAre(ArgType(RespExpr::STRING)));
|
||||
EXPECT_EQ(270000000, args_[0].GetBuf().size());
|
||||
}
|
||||
|
||||
TEST_F(RedisParserTest, NILs) {
|
||||
|
|
|
@ -877,6 +877,7 @@ void ServerFamily::Init(util::AcceptServer* acceptor, std::vector<facade::Listen
|
|||
absl::GetFlag(FLAGS_s3_ec2_metadata), absl::GetFlag(FLAGS_s3_sign_payload));
|
||||
#else
|
||||
LOG(ERROR) << "Compiled without AWS support";
|
||||
exit(1);
|
||||
#endif
|
||||
} else if (IsGCSPath(flag_dir)) {
|
||||
auto gcs = std::make_shared<detail::GcsSnapshotStorage>();
|
||||
|
|
Loading…
Reference in a new issue