Skip to content

Commit ea4a2d8

Browse files
authored
CBST2-09: Add duplicate key removal to the Lido and SSV key fetchers (#318)
1 parent 31dfa81 commit ea4a2d8

File tree

2 files changed

+34
-8
lines changed

2 files changed

+34
-8
lines changed

crates/common/src/config/mux.rs

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ use tracing::{debug, info};
1616
use url::Url;
1717

1818
use super::{load_optional_env_var, PbsConfig, RelayConfig, MUX_PATH_ENV};
19-
use crate::{pbs::RelayClient, types::Chain};
19+
use crate::{config::remove_duplicate_keys, pbs::RelayClient, types::Chain};
2020

2121
#[derive(Debug, Deserialize, Serialize)]
2222
pub struct PbsMuxes {
@@ -164,7 +164,7 @@ impl MuxKeysLoader {
164164
chain: Chain,
165165
rpc_url: Option<Url>,
166166
) -> eyre::Result<Vec<BlsPublicKey>> {
167-
match self {
167+
let keys = match self {
168168
Self::File(config_path) => {
169169
// First try loading from env
170170
let path: PathBuf = load_optional_env_var(&get_mux_env(mux_id))
@@ -192,7 +192,11 @@ impl MuxKeysLoader {
192192
}
193193
NORegistry::SSV => fetch_ssv_pubkeys(chain, U256::from(*node_operator_id)).await,
194194
},
195-
}
195+
}?;
196+
197+
// Remove duplicates
198+
let deduped_keys = remove_duplicate_keys(keys);
199+
Ok(deduped_keys)
196200
}
197201
}
198202

@@ -281,8 +285,6 @@ async fn fetch_lido_registry_keys(
281285
}
282286

283287
ensure!(keys.len() == total_keys as usize, "expected {total_keys} keys, got {}", keys.len());
284-
let unique = keys.iter().collect::<HashSet<_>>();
285-
ensure!(unique.len() == keys.len(), "found duplicate keys in registry");
286288

287289
Ok(keys)
288290
}
@@ -330,9 +332,6 @@ async fn fetch_ssv_pubkeys(
330332
}
331333
}
332334

333-
let unique = pubkeys.iter().collect::<HashSet<_>>();
334-
ensure!(unique.len() == pubkeys.len(), "found duplicate keys in registry");
335-
336335
Ok(pubkeys)
337336
}
338337

crates/common/src/config/utils.rs

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
use std::{collections::HashMap, path::Path};
22

3+
use alloy::rpc::types::beacon::BlsPublicKey;
34
use eyre::{bail, Context, Result};
45
use serde::de::DeserializeOwned;
56

@@ -30,6 +31,20 @@ pub fn load_jwt_secrets() -> Result<HashMap<ModuleId, String>> {
3031
decode_string_to_map(&jwt_secrets)
3132
}
3233

34+
/// Removes duplicate entries from a vector of BlsPublicKey
35+
pub fn remove_duplicate_keys(keys: Vec<BlsPublicKey>) -> Vec<BlsPublicKey> {
36+
let mut unique_keys = Vec::new();
37+
let mut key_set = std::collections::HashSet::new();
38+
39+
for key in keys {
40+
if key_set.insert(key) {
41+
unique_keys.push(key);
42+
}
43+
}
44+
45+
unique_keys
46+
}
47+
3348
fn decode_string_to_map(raw: &str) -> Result<HashMap<ModuleId, String>> {
3449
// trim the string and split for comma
3550
raw.trim()
@@ -57,4 +72,16 @@ mod tests {
5772
assert_eq!(map.get(&ModuleId("KEY".into())), Some(&"VALUE".to_string()));
5873
assert_eq!(map.get(&ModuleId("KEY2".into())), Some(&"value2".to_string()));
5974
}
75+
76+
#[test]
77+
fn test_remove_duplicate_keys() {
78+
let key1 = BlsPublicKey::from([1; 48]);
79+
let key2 = BlsPublicKey::from([2; 48]);
80+
let keys = vec![key1, key2, key1];
81+
82+
let unique_keys = remove_duplicate_keys(keys);
83+
assert_eq!(unique_keys.len(), 2);
84+
assert!(unique_keys.contains(&key1));
85+
assert!(unique_keys.contains(&key2));
86+
}
6087
}

0 commit comments

Comments
 (0)