1
0
Fork 0
mirror of https://github.com/dragonflydb/dragonfly.git synced 2024-12-14 11:58:02 +00:00

chore: add a script that parses allocator tracking logs (#3687)

This commit is contained in:
Roman Gershman 2024-09-10 10:26:44 +03:00 committed by GitHub
parent 257749263b
commit 3cdc8fa128
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 47 additions and 1 deletions

View file

@ -85,7 +85,8 @@ void AllocationTracker::ProcessDelete(void* ptr) {
if (tracking_.size() == 1 && tracking_.front().sample_odds == 1) {
size_t usable = mi_usable_size(ptr);
if (usable <= tracking_.front().upper_bound && usable >= tracking_.front().lower_bound) {
LOG(INFO) << "Deallocating " << usable << " bytes (" << ptr << ")";
LOG(INFO) << "Deallocating " << usable << " bytes (" << ptr << ")\n"
<< util::fb2::GetStacktrace();
}
}
inside_tracker_ = false;

View file

@ -0,0 +1,45 @@
#!/usr/bin/env python3
"""
Usage:
1. First run Dragonfly with tracking allocator enabled. Must be a single allocator range with 100% samping rate to catch both allocations and deallocations.
2. Finish tracking.
3. cat /tmp/dragonfly.INFO | ./parse_allocator_tracking_logs.py
"""
import re
import sys
def parse_log(log_lines):
memory_map = {}
allocation_pattern = re.compile(r"Allocating (\d+) bytes \((0x[0-9a-f]+)\)")
deallocation_pattern = re.compile(r"Deallocating (\d+) bytes \((0x[0-9a-f]+)\)")
for line in log_lines:
allocation_match = allocation_pattern.search(line)
deallocation_match = deallocation_pattern.search(line)
if allocation_match:
size = int(allocation_match.group(1))
address = allocation_match.group(2)
assert address not in memory_map
memory_map[address] = (size, line)
elif deallocation_match:
size = int(deallocation_match.group(1))
address = deallocation_match.group(2)
if address in memory_map:
assert size == memory_map[address][0]
del memory_map[address]
else:
print(f"Deallocating non existing address: {address} {size}")
return memory_map
if __name__ == "__main__":
log_lines = sys.stdin.readlines()
memory_map = parse_log(log_lines)
for address, item in memory_map.items():
print(f"Address: {address}, Size: {item[0]} bytes, original line: `{item[1]}`")