Logging update, adding transactions optimized

This commit is contained in:
doggo 2021-06-03 16:40:08 -05:00
parent 9e056a640a
commit a2287a4d36
2 changed files with 12 additions and 25 deletions

View file

@ -7,39 +7,24 @@
struct Log
{
std::vector<Transaction> data;
void AddTrans(Transaction &&v)
void AddTrans(Transaction &&t)
{
if (data.capacity() == data.size() && data.size() < max_log_size) //if memory reserved is full and max isnt reached
{
if (data.size() + pre_log_size > max_log_size) //if prefetched memory is larger then max
{
//std::cout << "allocating " << max_log_size << '\n';
data.reserve(max_log_size); //just allocate max
}
else
{
//std::cout << "allocating " << data.size() + pre_log_size << '\n';
data.reserve(data.size() + pre_log_size); //prefetching memory
}
}
if (data.size() == max_log_size)
{
for (size_t i = data.size() - 1; i > 0; --i)
for (auto i = data.size() - 1; i > 0; i--)
{
data[i] = std::move(data[i - 1]);
data[i - 1] == std::move(data[i])
}
data[0] = std::move(v);
}
else
{
data.push_back(std::move(v));
else if (data.size() == data.capacity()) {
data.reserve(data.capacity() + pre_alloc)
}
//std::cout << "size is " << data.size() << '\n';
data[data.size() - 1] = std::move(t)
}
Json::Value Serialize() const
{
Json::Value res;
for (uint32_t i = 0; i < data.size(); ++i)
for (uint32_t i = 0; i < end; ++i)
{
res[i]["to"] = data[i].to;
res[i]["from"] = data[i].from;
@ -48,4 +33,4 @@ struct Log
}
return res;
}
};
};

View file

@ -1,3 +1,5 @@
#pragma once
constexpr auto max_log_size = 100; //
constexpr auto pre_log_size = 10; //amount allocated in advance (for example 5 would allocate every 5 logs)
// `max_log_size` must be divisible by `pre_alloc`
constexpr auto max_log_size = 100;
constexpr auto pre_alloc = 10;