mirror of
https://git.notmuchmail.org/git/notmuch
synced 2024-11-21 10:28:09 +01:00
e2a3e5fa51
This change addresses two known issues with large sets of changes to the database. The first is that as reported by Steven Allen [1], notmuch commits are not "flushed" when they complete, which means that if there is an open transaction when the database closes (or e.g. the program crashes) then all changes since the last commit will be discarded (nothing is irrecoverably lost for "notmuch new", as the indexing process just restarts next time it is run). This does not really "fix" the issue reported in [1]; that seems rather difficult given how transactions work in Xapian. On the other hand, with the default settings, this should mean one only loses less than a minutes worth of work. The second issue is the occasionally reported "storm" of disk writes when notmuch finishes. I don't yet have a test for this, but I think committing as we go should reduce the amount of work when finalizing the database. [1]: id:20151025210215.GA3754@stebalien.com
35 lines
805 B
Bash
Executable file
35 lines
805 B
Bash
Executable file
#!/usr/bin/env bash
|
|
test_description='transactions'
|
|
. $(dirname "$0")/test-lib.sh || exit 1
|
|
|
|
make_shim no-close <<EOF
|
|
#include <notmuch.h>
|
|
#include <stdio.h>
|
|
notmuch_status_t
|
|
notmuch_database_close (notmuch_database_t *notmuch)
|
|
{
|
|
return notmuch_database_begin_atomic (notmuch);
|
|
}
|
|
EOF
|
|
|
|
for i in `seq 1 1024`
|
|
do
|
|
generate_message '[subject]="'"subject $i"'"' \
|
|
'[body]="'"body $i"'"'
|
|
done
|
|
|
|
test_begin_subtest "initial new"
|
|
NOTMUCH_NEW > OUTPUT
|
|
cat <<EOF > EXPECTED
|
|
Added 1024 new messages to the database.
|
|
EOF
|
|
test_expect_equal_file EXPECTED OUTPUT
|
|
|
|
test_begin_subtest "Some changes saved with open transaction"
|
|
notmuch config set database.autocommit 1000
|
|
rm -r ${MAIL_DIR}/.notmuch
|
|
notmuch_with_shim no-close new
|
|
output=$(notmuch count '*')
|
|
test_expect_equal "$output" "1000"
|
|
|
|
test_done
|