Skip to content

Commit d68495e

Browse files
committed
feat: add merge feature
1 parent ca5bf3e commit d68495e

File tree

7 files changed

+2186
-29
lines changed

7 files changed

+2186
-29
lines changed

Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ serialization = ["serde", "serde_json", "chrono/serde"]
2525
totp = ["totp-lite", "url", "base32"]
2626
save_kdbx4 = []
2727
challenge_response = ["sha1", "hex"]
28+
merge = []
2829

2930
default = []
3031

src/config.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ const SALSA_20: u32 = 2;
3030
const CHA_CHA_20: u32 = 3;
3131

3232
/// Configuration of how a database should be stored
33-
#[derive(Debug, PartialEq, Eq)]
33+
#[derive(Debug, PartialEq, Eq, Clone)]
3434
#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
3535
pub struct DatabaseConfig {
3636
/// Version of the outer database file

src/db/entry.rs

+197-6
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
use std::collections::HashMap;
2+
use std::{thread, time};
23

34
use chrono::NaiveDateTime;
45
use secstr::SecStr;
56
use uuid::Uuid;
67

8+
#[cfg(feature = "merge")]
9+
use crate::db::merge::{MergeError, MergeLog};
10+
711
use crate::db::{Color, CustomData, Times};
812

913
#[cfg(feature = "totp")]
@@ -41,6 +45,131 @@ impl Entry {
4145
..Default::default()
4246
}
4347
}
48+
49+
#[cfg(feature = "merge")]
50+
pub(crate) fn merge(&self, other: &Entry) -> Result<(Option<Entry>, MergeLog), MergeError> {
51+
let mut log = MergeLog::default();
52+
53+
let source_last_modification = match other.times.get_last_modification() {
54+
Some(t) => *t,
55+
None => {
56+
log.warnings.push(format!(
57+
"Entry {} did not have a last modification timestamp",
58+
other.uuid
59+
));
60+
Times::epoch()
61+
}
62+
};
63+
let destination_last_modification = match self.times.get_last_modification() {
64+
Some(t) => *t,
65+
None => {
66+
log.warnings.push(format!(
67+
"Entry {} did not have a last modification timestamp",
68+
self.uuid
69+
));
70+
Times::now()
71+
}
72+
};
73+
74+
if destination_last_modification == source_last_modification {
75+
if !self.has_diverged_from(&other) {
76+
// This should never happen.
77+
// This means that an entry was updated without updating the last modification
78+
// timestamp.
79+
return Err(MergeError::EntryModificationTimeNotUpdated(
80+
other.uuid.to_string(),
81+
));
82+
}
83+
return Ok((None, log));
84+
}
85+
86+
let mut merged_entry: Entry = Entry::default();
87+
let mut entry_merge_log: MergeLog = MergeLog::default();
88+
89+
if destination_last_modification > source_last_modification {
90+
(merged_entry, entry_merge_log) = self.merge_history(other)?;
91+
} else {
92+
(merged_entry, entry_merge_log) = other.clone().merge_history(&self)?;
93+
}
94+
95+
// The location changed timestamp is handled separately when merging two databases.
96+
if let Some(location_changed_timestamp) = self.times.get_location_changed() {
97+
merged_entry
98+
.times
99+
.set_location_changed(*location_changed_timestamp);
100+
}
101+
102+
return Ok((Some(merged_entry), entry_merge_log));
103+
}
104+
105+
#[cfg(feature = "merge")]
106+
pub(crate) fn merge_history(&self, other: &Entry) -> Result<(Entry, MergeLog), MergeError> {
107+
let mut log = MergeLog::default();
108+
109+
let mut source_history = match &other.history {
110+
Some(h) => h.clone(),
111+
None => {
112+
log.warnings.push(format!(
113+
"Entry {} from source database had no history.",
114+
self.uuid
115+
));
116+
History::default()
117+
}
118+
};
119+
let mut destination_history = match &self.history {
120+
Some(h) => h.clone(),
121+
None => {
122+
log.warnings.push(format!(
123+
"Entry {} from destination database had no history.",
124+
self.uuid
125+
));
126+
History::default()
127+
}
128+
};
129+
let mut history_merge_log: MergeLog = MergeLog::default();
130+
131+
let mut response = self.clone();
132+
133+
if other.has_uncommitted_changes() {
134+
log.warnings.push(format!(
135+
"Entry {} from source database has uncommitted changes.",
136+
self.uuid
137+
));
138+
source_history.add_entry(other.clone());
139+
}
140+
141+
// TODO we should probably check for uncommitted changes in the destination
142+
// database here too for consistency.
143+
144+
history_merge_log = destination_history.merge_with(&source_history)?;
145+
response.history = Some(destination_history);
146+
147+
Ok((response, log.merge_with(&history_merge_log)))
148+
}
149+
150+
// Convenience function used in unit tests, to make sure that:
151+
// 1. The history gets updated after changing a field
152+
// 2. We wait a second before commiting the changes so that the timestamp is not the same
153+
// as it previously was. This is necessary since the timestamps in the KDBX format
154+
// do not preserve the msecs.
155+
pub(crate) fn set_field_and_commit(&mut self, field_name: &str, field_value: &str) {
156+
self.fields.insert(
157+
field_name.to_string(),
158+
Value::Unprotected(field_value.to_string()),
159+
);
160+
thread::sleep(time::Duration::from_secs(1));
161+
self.update_history();
162+
}
163+
164+
// Convenience function used in when merging two entries
165+
pub(crate) fn has_diverged_from(&self, other_entry: &Entry) -> bool {
166+
let mut new_times = Times::default();
167+
let mut self_without_times = self.clone();
168+
self_without_times.times = new_times.clone();
169+
let mut other_without_times = other_entry.clone();
170+
other_without_times.times = new_times.clone();
171+
!self_without_times.eq(&other_without_times)
172+
}
44173
}
45174

46175
impl<'a> Entry {
@@ -148,16 +277,13 @@ impl<'a> Entry {
148277
return true;
149278
}
150279

280+
let mut new_times = Times::default();
151281
let mut sanitized_entry = self.clone();
152-
sanitized_entry
153-
.times
154-
.set_last_modification(NaiveDateTime::default());
282+
sanitized_entry.times = new_times.clone();
155283
sanitized_entry.history.take();
156284

157285
let mut last_history_entry = history.entries.get(0).unwrap().clone();
158-
last_history_entry
159-
.times
160-
.set_last_modification(NaiveDateTime::default());
286+
last_history_entry.times = new_times.clone();
161287
last_history_entry.history.take();
162288

163289
if sanitized_entry.eq(&last_history_entry) {
@@ -225,6 +351,8 @@ pub struct History {
225351
}
226352
impl History {
227353
pub fn add_entry(&mut self, mut entry: Entry) {
354+
// DISCUSS: should we make sure that the last modification time is not the same
355+
// or older than the entry at the top of the history?
228356
if entry.history.is_some() {
229357
// Remove the history from the new history entry to avoid having
230358
// an exponential number of history entries.
@@ -236,6 +364,69 @@ impl History {
236364
pub fn get_entries(&self) -> &Vec<Entry> {
237365
&self.entries
238366
}
367+
368+
// Determines if the entries of the history are
369+
// ordered by last modification time.
370+
pub(crate) fn is_ordered(&self) -> bool {
371+
let mut last_modification_time: Option<&NaiveDateTime> = None;
372+
for entry in &self.entries {
373+
if last_modification_time.is_none() {
374+
last_modification_time = entry.times.get_last_modification();
375+
}
376+
377+
let entry_modification_time = entry.times.get_last_modification().unwrap();
378+
// FIXME should we also handle equal modification times??
379+
if last_modification_time.unwrap() < entry_modification_time {
380+
return false;
381+
}
382+
last_modification_time = Some(entry_modification_time);
383+
}
384+
true
385+
}
386+
387+
// Merge both histories together.
388+
#[cfg(feature = "merge")]
389+
pub(crate) fn merge_with(&mut self, other: &History) -> Result<MergeLog, MergeError> {
390+
let mut log = MergeLog::default();
391+
let mut new_history_entries: HashMap<NaiveDateTime, Entry> = HashMap::new();
392+
393+
for history_entry in &self.entries {
394+
let modification_time = history_entry.times.get_last_modification().unwrap();
395+
if new_history_entries.contains_key(modification_time) {
396+
return Err(MergeError::DuplicateHistoryEntries(
397+
modification_time.to_string(),
398+
history_entry.uuid.to_string(),
399+
));
400+
}
401+
new_history_entries.insert(modification_time.clone(), history_entry.clone());
402+
}
403+
404+
for history_entry in &other.entries {
405+
let modification_time = history_entry.times.get_last_modification().unwrap();
406+
let existing_history_entry = new_history_entries.get(modification_time);
407+
if let Some(existing_history_entry) = existing_history_entry {
408+
if existing_history_entry.has_diverged_from(&history_entry) {
409+
log.warnings.push(format!(
410+
"History entries for {} have the same modification timestamp but were not the same.",
411+
existing_history_entry.uuid
412+
));
413+
}
414+
} else {
415+
new_history_entries.insert(modification_time.clone(), history_entry.clone());
416+
}
417+
}
418+
419+
let mut all_modification_times: Vec<&NaiveDateTime> = new_history_entries.keys().collect();
420+
all_modification_times.sort();
421+
all_modification_times.reverse();
422+
let mut new_entries: Vec<Entry> = vec![];
423+
for modification_time in &all_modification_times {
424+
new_entries.push(new_history_entries.get(&modification_time).unwrap().clone());
425+
}
426+
427+
self.entries = new_entries;
428+
Ok(log)
429+
}
239430
}
240431

241432
#[cfg(test)]

0 commit comments

Comments
 (0)