Skip to content

Commit 7648765

Browse files
committed
feat: add merge feature
1 parent 0902dc6 commit 7648765

File tree

6 files changed

+2135
-9
lines changed

6 files changed

+2135
-9
lines changed

Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ serialization = ["serde", "serde_json", "chrono/serde"]
2525
totp = ["totp-lite", "url", "base32"]
2626
save_kdbx4 = []
2727
challenge_response = ["sha1", "hex"]
28+
merge = []
2829

2930
default = []
3031

src/config.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ const SALSA_20: u32 = 2;
3030
const CHA_CHA_20: u32 = 3;
3131

3232
/// Configuration of how a database should be stored
33-
#[derive(Debug, PartialEq, Eq)]
33+
#[derive(Debug, PartialEq, Eq, Clone)]
3434
#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
3535
pub struct DatabaseConfig {
3636
/// Version of the outer database file

src/db/entry.rs

+196-6
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
use std::collections::HashMap;
2+
use std::{thread, time};
23

34
use chrono::NaiveDateTime;
45
use secstr::SecStr;
56
use uuid::Uuid;
67

8+
#[cfg(feature = "merge")]
9+
use crate::db::merge::{MergeError, MergeLog};
10+
711
use crate::db::{Color, CustomData, Times};
812

913
#[cfg(feature = "totp")]
@@ -41,6 +45,129 @@ impl Entry {
4145
..Default::default()
4246
}
4347
}
48+
49+
#[cfg(feature = "merge")]
50+
pub(crate) fn merge(&self, other: &Entry) -> Result<(Option<Entry>, MergeLog), MergeError> {
51+
let mut log = MergeLog::default();
52+
53+
let source_last_modification = match other.times.get_last_modification() {
54+
Some(t) => *t,
55+
None => {
56+
log.warnings.push(format!(
57+
"Entry {} did not have a last modification timestamp",
58+
other.uuid
59+
));
60+
Times::epoch()
61+
}
62+
};
63+
let destination_last_modification = match self.times.get_last_modification() {
64+
Some(t) => *t,
65+
None => {
66+
log.warnings.push(format!(
67+
"Entry {} did not have a last modification timestamp",
68+
self.uuid
69+
));
70+
Times::now()
71+
}
72+
};
73+
74+
if destination_last_modification == source_last_modification {
75+
if !self.has_diverged_from(&other) {
76+
// This should never happen.
77+
// This means that an entry was updated without updating the last modification
78+
// timestamp.
79+
return Err(MergeError::EntryModificationTimeNotUpdated(
80+
other.uuid.to_string(),
81+
));
82+
}
83+
return Ok((None, log));
84+
}
85+
86+
let (mut merged_entry, entry_merge_log) = match destination_last_modification > source_last_modification
87+
{
88+
true => self.merge_history(other)?,
89+
false => other.clone().merge_history(&self)?,
90+
};
91+
92+
// The location changed timestamp is handled separately when merging two databases.
93+
if let Some(location_changed_timestamp) = self.times.get_location_changed() {
94+
merged_entry
95+
.times
96+
.set_location_changed(*location_changed_timestamp);
97+
}
98+
99+
return Ok((Some(merged_entry), entry_merge_log));
100+
}
101+
102+
#[cfg(feature = "merge")]
103+
pub(crate) fn merge_history(&self, other: &Entry) -> Result<(Entry, MergeLog), MergeError> {
104+
let mut log = MergeLog::default();
105+
106+
let mut source_history = match &other.history {
107+
Some(h) => h.clone(),
108+
None => {
109+
log.warnings.push(format!(
110+
"Entry {} from source database had no history.",
111+
self.uuid
112+
));
113+
History::default()
114+
}
115+
};
116+
let mut destination_history = match &self.history {
117+
Some(h) => h.clone(),
118+
None => {
119+
log.warnings.push(format!(
120+
"Entry {} from destination database had no history.",
121+
self.uuid
122+
));
123+
History::default()
124+
}
125+
};
126+
let mut response = self.clone();
127+
128+
if other.has_uncommitted_changes() {
129+
log.warnings.push(format!(
130+
"Entry {} from source database has uncommitted changes.",
131+
self.uuid
132+
));
133+
source_history.add_entry(other.clone());
134+
}
135+
136+
// TODO we should probably check for uncommitted changes in the destination
137+
// database here too for consistency.
138+
139+
let history_merge_log = destination_history.merge_with(&source_history)?;
140+
response.history = Some(destination_history);
141+
142+
Ok((response, log.merge_with(&history_merge_log)))
143+
}
144+
145+
// Convenience function used in unit tests, to make sure that:
146+
// 1. The history gets updated after changing a field
147+
// 2. We wait a second before commiting the changes so that the timestamp is not the same
148+
// as it previously was. This is necessary since the timestamps in the KDBX format
149+
// do not preserve the msecs.
150+
pub(crate) fn set_field_and_commit(&mut self, field_name: &str, field_value: &str) {
151+
self.fields.insert(
152+
field_name.to_string(),
153+
Value::Unprotected(field_value.to_string()),
154+
);
155+
thread::sleep(time::Duration::from_secs(1));
156+
self.update_history();
157+
}
158+
159+
// Convenience function used in when merging two entries
160+
pub(crate) fn has_diverged_from(&self, other_entry: &Entry) -> bool {
161+
let new_times = Times::default();
162+
163+
let mut self_without_times = self.clone();
164+
self_without_times.times = new_times.clone();
165+
166+
let mut other_without_times = other_entry.clone();
167+
other_without_times.times = new_times.clone();
168+
169+
!self_without_times.eq(&other_without_times)
170+
}
44171
}
45172

46173
impl<'a> Entry {
@@ -148,16 +275,14 @@ impl<'a> Entry {
148275
return true;
149276
}
150277

278+
let new_times = Times::default();
279+
151280
let mut sanitized_entry = self.clone();
152-
sanitized_entry
153-
.times
154-
.set_last_modification(NaiveDateTime::default());
281+
sanitized_entry.times = new_times.clone();
155282
sanitized_entry.history.take();
156283

157284
let mut last_history_entry = history.entries.get(0).unwrap().clone();
158-
last_history_entry
159-
.times
160-
.set_last_modification(NaiveDateTime::default());
285+
last_history_entry.times = new_times.clone();
161286
last_history_entry.history.take();
162287

163288
if sanitized_entry.eq(&last_history_entry) {
@@ -225,6 +350,8 @@ pub struct History {
225350
}
226351
impl History {
227352
pub fn add_entry(&mut self, mut entry: Entry) {
353+
// DISCUSS: should we make sure that the last modification time is not the same
354+
// or older than the entry at the top of the history?
228355
if entry.history.is_some() {
229356
// Remove the history from the new history entry to avoid having
230357
// an exponential number of history entries.
@@ -236,6 +363,69 @@ impl History {
236363
pub fn get_entries(&self) -> &Vec<Entry> {
237364
&self.entries
238365
}
366+
367+
// Determines if the entries of the history are
368+
// ordered by last modification time.
369+
pub(crate) fn is_ordered(&self) -> bool {
370+
let mut last_modification_time: Option<&NaiveDateTime> = None;
371+
for entry in &self.entries {
372+
if last_modification_time.is_none() {
373+
last_modification_time = entry.times.get_last_modification();
374+
}
375+
376+
let entry_modification_time = entry.times.get_last_modification().unwrap();
377+
// FIXME should we also handle equal modification times??
378+
if last_modification_time.unwrap() < entry_modification_time {
379+
return false;
380+
}
381+
last_modification_time = Some(entry_modification_time);
382+
}
383+
true
384+
}
385+
386+
// Merge both histories together.
387+
#[cfg(feature = "merge")]
388+
pub(crate) fn merge_with(&mut self, other: &History) -> Result<MergeLog, MergeError> {
389+
let mut log = MergeLog::default();
390+
let mut new_history_entries: HashMap<NaiveDateTime, Entry> = HashMap::new();
391+
392+
for history_entry in &self.entries {
393+
let modification_time = history_entry.times.get_last_modification().unwrap();
394+
if new_history_entries.contains_key(modification_time) {
395+
return Err(MergeError::DuplicateHistoryEntries(
396+
modification_time.to_string(),
397+
history_entry.uuid.to_string(),
398+
));
399+
}
400+
new_history_entries.insert(modification_time.clone(), history_entry.clone());
401+
}
402+
403+
for history_entry in &other.entries {
404+
let modification_time = history_entry.times.get_last_modification().unwrap();
405+
let existing_history_entry = new_history_entries.get(modification_time);
406+
if let Some(existing_history_entry) = existing_history_entry {
407+
if existing_history_entry.has_diverged_from(&history_entry) {
408+
log.warnings.push(format!(
409+
"History entries for {} have the same modification timestamp but were not the same.",
410+
existing_history_entry.uuid
411+
));
412+
}
413+
} else {
414+
new_history_entries.insert(modification_time.clone(), history_entry.clone());
415+
}
416+
}
417+
418+
let mut all_modification_times: Vec<&NaiveDateTime> = new_history_entries.keys().collect();
419+
all_modification_times.sort();
420+
all_modification_times.reverse();
421+
let mut new_entries: Vec<Entry> = vec![];
422+
for modification_time in &all_modification_times {
423+
new_entries.push(new_history_entries.get(&modification_time).unwrap().clone());
424+
}
425+
426+
self.entries = new_entries;
427+
Ok(log)
428+
}
239429
}
240430

241431
#[cfg(test)]

0 commit comments

Comments
 (0)