Skip to content

Commit 2a543d4

Browse files
authored
feat(pyth-lazer-agent) Allow deduplicating updates within each batch (#2944)
1 parent 5d8d4f2 commit 2a543d4

File tree

6 files changed

+84
-4
lines changed

6 files changed

+84
-4
lines changed

Cargo.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

apps/pyth-lazer-agent/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "pyth-lazer-agent"
3-
version = "0.4.0"
3+
version = "0.4.1"
44
edition = "2024"
55
description = "Pyth Lazer Agent"
66
license = "Apache-2.0"

apps/pyth-lazer-agent/README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,10 +49,12 @@ publish_keypair_path = "/path/to/keypair.json"
4949
authorization_token = "your_token"
5050
listen_address = "0.0.0.0:8910"
5151
publish_interval_duration = "25ms"
52+
enable_update_deduplication = false
5253
```
5354

5455
- `relayers_urls`: The Lazer team will provide these.
5556
- `publish_keypair_path`: The keypair file generated with `solana-keygen` or similar.
5657
- `authorization_token`: The Lazer team will provide this or instruct that it can be omitted.
5758
- `listen_address`: The local port the agent will be listening on; can be anything you want.
5859
- `publisher_interval`: The agent will batch and send transaction bundles at this interval. The Lazer team will provide guidance here.
60+
- `enable_update_deduplication`: The agent will deduplicate updates based inside each batch before sending it to Lazer.

apps/pyth-lazer-agent/src/config.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ pub struct Config {
1919
#[serde(with = "humantime_serde", default = "default_publish_interval")]
2020
pub publish_interval_duration: Duration,
2121
pub history_service_url: Option<Url>,
22+
#[serde(default)]
23+
pub enable_update_deduplication: bool,
2224
}
2325

2426
#[derive(Deserialize, Derivative, Clone, PartialEq)]

apps/pyth-lazer-agent/src/jrpc_handle.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -299,6 +299,7 @@ pub mod tests {
299299
publish_keypair_path: Default::default(),
300300
publish_interval_duration: Default::default(),
301301
history_service_url: None,
302+
enable_update_deduplication: false,
302303
};
303304

304305
println!("{:?}", get_metadata(config).await.unwrap());

apps/pyth-lazer-agent/src/lazer_publisher.rs

Lines changed: 77 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -132,8 +132,13 @@ impl LazerPublisherTask {
132132
return Ok(());
133133
}
134134

135+
let mut updates = self.pending_updates.drain(..).collect();
136+
if self.config.enable_update_deduplication {
137+
deduplicate_feed_updates(&mut updates);
138+
}
139+
135140
let publisher_update = PublisherUpdate {
136-
updates: self.pending_updates.drain(..).collect(),
141+
updates,
137142
publisher_timestamp: MessageField::some(Timestamp::now()),
138143
special_fields: Default::default(),
139144
};
@@ -173,13 +178,19 @@ impl LazerPublisherTask {
173178
}
174179
}
175180

181+
fn deduplicate_feed_updates(feed_updates: &mut Vec<FeedUpdate>) {
182+
// assume that feed_updates is already sorted by timestamp for each feed_update.feed_id
183+
feed_updates.dedup_by_key(|feed_update| (feed_update.feed_id, feed_update.update.clone()));
184+
}
185+
176186
#[cfg(test)]
177187
mod tests {
178188
use crate::config::{CHANNEL_CAPACITY, Config};
179-
use crate::lazer_publisher::LazerPublisherTask;
189+
use crate::lazer_publisher::{LazerPublisherTask, deduplicate_feed_updates};
180190
use ed25519_dalek::SigningKey;
181191
use protobuf::well_known_types::timestamp::Timestamp;
182192
use protobuf::{Message, MessageField};
193+
use pyth_lazer_protocol::time::TimestampUs;
183194
use pyth_lazer_publisher_sdk::publisher_update::feed_update::Update;
184195
use pyth_lazer_publisher_sdk::publisher_update::{FeedUpdate, PriceUpdate};
185196
use pyth_lazer_publisher_sdk::transaction::{LazerTransaction, lazer_transaction};
@@ -212,6 +223,18 @@ mod tests {
212223
temp_file
213224
}
214225

226+
fn test_feed_update(feed_id: u32, timestamp: TimestampUs, price: i64) -> FeedUpdate {
227+
FeedUpdate {
228+
feed_id: Some(feed_id),
229+
source_timestamp: MessageField::some(timestamp.into()),
230+
update: Some(Update::PriceUpdate(PriceUpdate {
231+
price: Some(price),
232+
..PriceUpdate::default()
233+
})),
234+
special_fields: Default::default(),
235+
}
236+
}
237+
215238
#[tokio::test]
216239
async fn test_lazer_exporter_task() {
217240
let signing_key_file = get_private_key_file();
@@ -224,6 +247,7 @@ mod tests {
224247
publish_keypair_path: PathBuf::from(signing_key_file.path()),
225248
publish_interval_duration: Duration::from_millis(25),
226249
history_service_url: None,
250+
enable_update_deduplication: false,
227251
};
228252

229253
let (relayer_sender, mut relayer_receiver) = broadcast::channel(CHANNEL_CAPACITY);
@@ -274,4 +298,55 @@ mod tests {
274298
_ => panic!("channel should have a transaction waiting"),
275299
}
276300
}
301+
302+
#[test]
303+
fn test_deduplicate_feed_updates() {
304+
// let's consider a batch containing updates for a single feed. the updates are (ts, price):
305+
// - (1, 10)
306+
// - (2, 10)
307+
// - (3, 10)
308+
// - (4, 15)
309+
// - (5, 15)
310+
// - (6, 10)
311+
// we should only return (1, 10), (4, 15), (6, 10)
312+
313+
let updates = &mut vec![
314+
test_feed_update(1, TimestampUs::from_millis(1).unwrap(), 10),
315+
test_feed_update(1, TimestampUs::from_millis(2).unwrap(), 10),
316+
test_feed_update(1, TimestampUs::from_millis(3).unwrap(), 10),
317+
test_feed_update(1, TimestampUs::from_millis(4).unwrap(), 15),
318+
test_feed_update(1, TimestampUs::from_millis(5).unwrap(), 15),
319+
test_feed_update(1, TimestampUs::from_millis(6).unwrap(), 10),
320+
];
321+
322+
let expected_updates = vec![
323+
test_feed_update(1, TimestampUs::from_millis(1).unwrap(), 10),
324+
test_feed_update(1, TimestampUs::from_millis(4).unwrap(), 15),
325+
test_feed_update(1, TimestampUs::from_millis(6).unwrap(), 10),
326+
];
327+
328+
deduplicate_feed_updates(updates);
329+
assert_eq!(updates.to_vec(), expected_updates);
330+
}
331+
332+
#[test]
333+
fn test_deduplicate_feed_updates_multiple_feeds() {
334+
let updates = &mut vec![
335+
test_feed_update(1, TimestampUs::from_millis(1).unwrap(), 10),
336+
test_feed_update(1, TimestampUs::from_millis(2).unwrap(), 10),
337+
test_feed_update(1, TimestampUs::from_millis(3).unwrap(), 10),
338+
test_feed_update(2, TimestampUs::from_millis(4).unwrap(), 15),
339+
test_feed_update(2, TimestampUs::from_millis(5).unwrap(), 15),
340+
test_feed_update(2, TimestampUs::from_millis(6).unwrap(), 10),
341+
];
342+
343+
let expected_updates = vec![
344+
test_feed_update(1, TimestampUs::from_millis(1).unwrap(), 10),
345+
test_feed_update(2, TimestampUs::from_millis(4).unwrap(), 15),
346+
test_feed_update(2, TimestampUs::from_millis(6).unwrap(), 10),
347+
];
348+
349+
deduplicate_feed_updates(updates);
350+
assert_eq!(updates.to_vec(), expected_updates);
351+
}
277352
}

0 commit comments

Comments
 (0)