mirror of
https://github.com/esphome/esphome.git
synced 2024-11-23 07:28:10 +01:00
Reserve less memory for json (#3289)
This commit is contained in:
parent
0729ed538e
commit
4e4a512107
1 changed files with 43 additions and 17 deletions
|
@ -16,16 +16,24 @@ static const char *const TAG = "json";
|
|||
static std::vector<char> global_json_build_buffer; // NOLINT
|
||||
|
||||
std::string build_json(const json_build_t &f) {
|
||||
// Here we are allocating as much heap memory as available minus 2kb to be safe
|
||||
// Here we are allocating up to 5kb of memory,
|
||||
// with the heap size minus 2kb to be safe if less than 5kb
|
||||
// as we can not have a true dynamic sized document.
|
||||
// The excess memory is freed below with `shrinkToFit()`
|
||||
#ifdef USE_ESP8266
|
||||
const size_t free_heap = ESP.getMaxFreeBlockSize() - 2048; // NOLINT(readability-static-accessed-through-instance)
|
||||
const size_t free_heap = ESP.getMaxFreeBlockSize(); // NOLINT(readability-static-accessed-through-instance)
|
||||
#elif defined(USE_ESP32)
|
||||
const size_t free_heap = heap_caps_get_largest_free_block(MALLOC_CAP_INTERNAL) - 2048;
|
||||
const size_t free_heap = heap_caps_get_largest_free_block(MALLOC_CAP_INTERNAL);
|
||||
#endif
|
||||
|
||||
DynamicJsonDocument json_document(free_heap);
|
||||
const size_t request_size = std::min(free_heap - 2048, (size_t) 5120);
|
||||
|
||||
DynamicJsonDocument json_document(request_size);
|
||||
if (json_document.memoryPool().buffer() == nullptr) {
|
||||
ESP_LOGE(TAG, "Could not allocate memory for JSON document! Requested %u bytes, largest free heap block: %u bytes",
|
||||
request_size, free_heap);
|
||||
return "{}";
|
||||
}
|
||||
JsonObject root = json_document.to<JsonObject>();
|
||||
f(root);
|
||||
json_document.shrinkToFit();
|
||||
|
@ -36,27 +44,45 @@ std::string build_json(const json_build_t &f) {
|
|||
}
|
||||
|
||||
void parse_json(const std::string &data, const json_parse_t &f) {
|
||||
// Here we are allocating as much heap memory as available minus 2kb to be safe
|
||||
// Here we are allocating 1.5 times the data size,
|
||||
// with the heap size minus 2kb to be safe if less than that
|
||||
// as we can not have a true dynamic sized document.
|
||||
// The excess memory is freed below with `shrinkToFit()`
|
||||
#ifdef USE_ESP8266
|
||||
const size_t free_heap = ESP.getMaxFreeBlockSize() - 2048; // NOLINT(readability-static-accessed-through-instance)
|
||||
const size_t free_heap = ESP.getMaxFreeBlockSize(); // NOLINT(readability-static-accessed-through-instance)
|
||||
#elif defined(USE_ESP32)
|
||||
const size_t free_heap = heap_caps_get_largest_free_block(MALLOC_CAP_INTERNAL) - 2048;
|
||||
const size_t free_heap = heap_caps_get_largest_free_block(MALLOC_CAP_INTERNAL);
|
||||
#endif
|
||||
bool pass = false;
|
||||
do {
|
||||
const size_t request_size = std::min(free_heap - 2048, (size_t)(data.size() * 1.5));
|
||||
|
||||
DynamicJsonDocument json_document(free_heap);
|
||||
DynamicJsonDocument json_document(request_size);
|
||||
if (json_document.memoryPool().buffer() == nullptr) {
|
||||
ESP_LOGE(TAG, "Could not allocate memory for JSON document! Requested %u bytes, free heap: %u", request_size,
|
||||
free_heap);
|
||||
return;
|
||||
}
|
||||
DeserializationError err = deserializeJson(json_document, data);
|
||||
json_document.shrinkToFit();
|
||||
|
||||
JsonObject root = json_document.as<JsonObject>();
|
||||
|
||||
if (err) {
|
||||
ESP_LOGW(TAG, "Parsing JSON failed.");
|
||||
if (err == DeserializationError::Ok) {
|
||||
pass = true;
|
||||
f(root);
|
||||
} else if (err == DeserializationError::NoMemory) {
|
||||
if (request_size * 2 >= free_heap) {
|
||||
ESP_LOGE(TAG, "Can not allocate more memory for deserialization. Consider making source string smaller");
|
||||
return;
|
||||
}
|
||||
|
||||
f(root);
|
||||
ESP_LOGW(TAG, "Increasing memory allocation.");
|
||||
continue;
|
||||
} else {
|
||||
ESP_LOGE(TAG, "JSON parse error: %s", err.c_str());
|
||||
return;
|
||||
}
|
||||
} while (!pass);
|
||||
}
|
||||
|
||||
} // namespace json
|
||||
|
|
Loading…
Reference in a new issue