Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 1179739

Browse files
authoredSep 17, 2024··
refactor: remove mapping account usage (#145)
Solana get program accounts RPC call is a great way to track price and product accounts instead of mapping account. Mapping account is not efficient and gets in the way if we change its structure. This PR removes all the usages of the mapping account and cleans up some legacy tests/codes in the code.
1 parent 557a79e commit 1179739

File tree

11 files changed

+120
-454
lines changed

11 files changed

+120
-454
lines changed
 

‎Cargo.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎Cargo.toml

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,12 @@
11
[package]
22
name = "pyth-agent"
3-
version = "2.11.1"
3+
version = "2.12.0"
44
edition = "2021"
55

66
[[bin]]
77
name = "agent"
88
path = "src/bin/agent.rs"
99

10-
[[bin]]
11-
name = "agent-migrate-config"
12-
path = "src/bin/agent_migrate_config.rs"
13-
1410
[dependencies]
1511
anyhow = "1.0.81"
1612
serde = { version = "1.0.197", features = ["derive"] }

‎config/config.sample.pythnet.toml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,6 @@ key_store.publish_keypair_path = "/path/to/keypair.json"
1717
# Oracle program pubkey
1818
key_store.program_key = "FsJ3A3u2vn5cTVofAjvy6y5kwABJAqYWpe4975bi2epH"
1919

20-
# Oracle mapping pubkey
21-
key_store.mapping_key = "AHtgzX45WTKfkPG53L6WYhGEXwQkN1BVknET3sVsLL8J"
22-
2320
# Compute unit per price update.
2421
exporter.compute_unit_limit = 5000
2522

‎config/config.sample.pythtest.toml

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,6 @@ key_store.publish_keypair_path = "/path/to/keypair.json"
1818
key_store.program_key = "8tfDNiaEyrV6Q1U4DEXrEigs9DoDtkugzFbybENEbCDz" # conformance
1919
# key_store.program_key = "gSbePebfvPy7tRqimPoVecS2UsBvYv46ynrzWocc92s" # cross-chain
2020

21-
# Oracle mapping pubkey
22-
key_store.mapping_key = "AFmdnt9ng1uVxqCmqwQJDAYC5cKTkw8gJKSM5PnzuF6z" # conformance
23-
# key_store.mapping_key = "BmA9Z6FjioHJPpjT39QazZyhDRUdZy2ezwx4GiDdE2u2" # cross-chain
24-
2521
# Pythtest accumulator key (only for the cross-chain oracle)
2622
# key_store.accumulator_key = "7Vbmv1jt4vyuqBZcpYPpnVhrqVe5e6ZPb6JxDcffRHUM"
2723

‎config/config.toml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -47,9 +47,6 @@ key_store.publish_keypair_path = "/path/to/keypair.json"
4747
# Public key of the oracle program
4848
key_store.program_key = "RelevantOracleProgramAddress"
4949

50-
# Public key of the root mapping account
51-
key_store.mapping_key = "RelevantOracleMappingAddress"
52-
5350
### Optional fields of primary/secondary network config ###
5451

5552
# Pubkey of accumulator message buffer program ID. Setting this

‎integration-tests/tests/test_integration.py

Lines changed: 12 additions & 95 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,10 @@
2020
from contextlib import contextmanager
2121
import shutil
2222
from solana.keypair import Keypair
23-
from solders.system_program import ID as SYSTEM_PROGRAM_ID
2423
from solana.rpc.async_api import AsyncClient
2524
from solana.rpc import commitment
26-
from solana.transaction import AccountMeta, Transaction, TransactionInstruction
25+
from solana.transaction import AccountMeta, Transaction
2726
from anchorpy import Provider, Wallet
28-
from construct import Bytes, Int32sl, Int32ul, Struct
2927
from solana.publickey import PublicKey
3028
from message_buffer_client_codegen.instructions import initialize, set_allowed_programs, create_buffer
3129
from message_buffer_client_codegen.accounts.message_buffer import MessageBuffer
@@ -359,21 +357,6 @@ def agent_publish_keypair(self, agent_keystore_path, sync_accounts):
359357
LOGGER.debug(f"Publisher {address.stdout.strip()} balance: {balance.stdout.strip()}")
360358
time.sleep(8)
361359

362-
@pytest.fixture
363-
def agent_keystore(self, agent_keystore_path, agent_publish_keypair):
364-
self.run(
365-
f"../scripts/init_key_store.sh localnet {agent_keystore_path}")
366-
367-
if USE_ACCUMULATOR:
368-
path = os.path.join(agent_keystore_path, "accumulator_program_key.json")
369-
370-
with open(path, 'w') as f:
371-
f.write(MESSAGE_BUFFER_PROGRAM)
372-
373-
if os.path.exists("keystore"):
374-
os.remove("keystore")
375-
os.symlink(agent_keystore_path, "keystore")
376-
377360
@pytest_asyncio.fixture
378361
async def initialize_message_buffer_program(self, funding_keypair, sync_key_path, sync_accounts):
379362

@@ -429,18 +412,15 @@ async def initialize_message_buffer_program(self, funding_keypair, sync_key_path
429412
await provider.send(tx, [parsed_funding_keypair])
430413

431414
@pytest.fixture
432-
def agent_config(self, agent_keystore, agent_keystore_path, tmp_path):
415+
def agent_config(self, agent_keystore_path, agent_publish_keypair, tmp_path):
433416
with open("agent_conf.toml") as config_file:
434417
agent_config = config_file.read()
435418

436419
publish_keypair_path = os.path.join(agent_keystore_path, "publish_key_pair.json")
437420

438-
mapping_keypair = Keypair.from_secret_key(MAPPING_KEYPAIR)
439-
440421
agent_config += f"""
441422
key_store.publish_keypair_path = "{publish_keypair_path}"
442423
key_store.program_key = "{ORACLE_PROGRAM}"
443-
key_store.mapping_key = "{mapping_keypair.public_key}"
444424
"""
445425

446426
# Add accumulator setting if option is enabled
@@ -457,32 +437,7 @@ def agent_config(self, agent_keystore, agent_keystore_path, tmp_path):
457437
return path
458438

459439
@pytest.fixture
460-
def agent_legacy_config(self, agent_keystore, agent_keystore_path, tmp_path):
461-
"""
462-
Prepares a legacy v1.x.x config for testing agent-migrate-config
463-
"""
464-
with open("agent_conf.toml") as config_file:
465-
agent_config = config_file.read()
466-
467-
agent_config += f'\nkey_store.root_path = "{agent_keystore_path}"'
468-
469-
if USE_ACCUMULATOR:
470-
# Add accumulator setting to verify that it is inlined as well
471-
agent_config += f'\nkey_store.accumulator_key_path = "accumulator_program_key.json"'
472-
473-
LOGGER.debug(f"Built legacy agent config:\n{agent_config}")
474-
475-
path = os.path.join(tmp_path, "agent_conf_legacy.toml")
476-
477-
with open(path, 'w') as f:
478-
f.write(agent_config)
479-
480-
return path
481-
482-
483-
484-
@pytest.fixture
485-
def agent(self, sync_accounts, agent_keystore, tmp_path, initialize_message_buffer_program, agent_config):
440+
def agent(self, sync_accounts, agent_keystore_path, agent_publish_keypair, tmp_path, initialize_message_buffer_program, agent_config):
486441
LOGGER.debug("Building agent binary")
487442
self.run("cargo build --release --bin agent")
488443

@@ -496,7 +451,7 @@ def agent(self, sync_accounts, agent_keystore, tmp_path, initialize_message_buff
496451
yield
497452

498453
@pytest.fixture
499-
def agent_hotload(self, sync_accounts, agent_keystore, agent_keystore_path, tmp_path, initialize_message_buffer_program, agent_config):
454+
def agent_hotload(self, sync_accounts, agent_keystore_path, tmp_path, initialize_message_buffer_program, agent_config):
500455
"""
501456
Spawns an agent without a publish keypair, used for keypair hotloading testing
502457
"""
@@ -560,11 +515,11 @@ async def test_update_price_simple(self, client: PythAgentClient):
560515

561516
# Send an "update_price" request
562517
await client.update_price(price_account, 42, 2, "trading")
563-
time.sleep(2)
518+
time.sleep(5)
564519

565520
# Send another "update_price" request to trigger aggregation
566521
await client.update_price(price_account, 81, 1, "trading")
567-
time.sleep(2)
522+
time.sleep(5)
568523

569524
# Confirm that the price account has been updated with the values from the first "update_price" request
570525
final_product_state = await client.get_product(product_account)
@@ -726,44 +681,6 @@ async def test_publish_forever(self, client: PythAgentClient, tmp_path):
726681
await client.update_price(price_account, 47, 2, "trading")
727682
time.sleep(1)
728683

729-
@pytest.mark.asyncio
730-
async def test_agent_migrate_config(self,
731-
agent_keystore,
732-
agent_legacy_config,
733-
agent_migrate_config_binary,
734-
client_no_spawn: PythAgentClient,
735-
initialize_message_buffer_program,
736-
sync_accounts,
737-
tmp_path,
738-
):
739-
os.environ["RUST_BACKTRACE"] = "full"
740-
os.environ["RUST_LOG"] = "debug"
741-
742-
# Migrator must run successfully (run() raises on error)
743-
new_config = self.run(f"{agent_migrate_config_binary} -c {agent_legacy_config}").stdout.strip()
744-
745-
LOGGER.debug(f"Successfully migrated legacy config to:\n{new_config}")
746-
747-
# Overwrite legacy config with the migrated version.
748-
#
749-
# NOTE: assumes 'w' erases the file before access)
750-
with open(agent_legacy_config, 'w') as f:
751-
f.write(new_config)
752-
f.flush()
753-
754-
self.run("cargo build --release --bin agent")
755-
756-
log_dir = os.path.join(tmp_path, "agent_logs")
757-
758-
# We start the agent manually to pass it the updated legacy config
759-
with self.spawn(f"../target/release/agent --config {agent_legacy_config}", log_dir=log_dir):
760-
time.sleep(3)
761-
await client_no_spawn.connect()
762-
763-
# Continue with the simple test case, which must succeed
764-
await self.test_update_price_simple(client_no_spawn)
765-
await client_no_spawn.close()
766-
767684
@pytest.mark.asyncio
768685
async def test_agent_respects_market_hours(self, client: PythAgentClient):
769686
'''
@@ -784,13 +701,13 @@ async def test_agent_respects_market_hours(self, client: PythAgentClient):
784701

785702
# Send an "update_price" request
786703
await client.update_price(price_account, 42, 2, "trading")
787-
time.sleep(2)
704+
time.sleep(5)
788705

789706
# Send another update_price request to "trigger" aggregation
790707
# (aggregation would happen if market hours were to fail, but
791708
# we want to catch that happening if there's a problem)
792709
await client.update_price(price_account, 81, 1, "trading")
793-
time.sleep(2)
710+
time.sleep(5)
794711

795712
# Confirm that the price account has not been updated
796713
final_product_state = await client.get_product(product_account)
@@ -819,13 +736,13 @@ async def test_agent_respects_holiday_hours(self, client: PythAgentClient):
819736

820737
# Send an "update_price" request
821738
await client.update_price(price_account, 42, 2, "trading")
822-
time.sleep(2)
739+
time.sleep(5)
823740

824741
# Send another update_price request to "trigger" aggregation
825742
# (aggregation would happen if market hours were to fail, but
826743
# we want to catch that happening if there's a problem)
827744
await client.update_price(price_account, 81, 1, "trading")
828-
time.sleep(2)
745+
time.sleep(5)
829746

830747
# Confirm that the price account has not been updated
831748
final_product_state = await client.get_product(product_account)
@@ -861,7 +778,7 @@ async def test_agent_respects_publish_interval(self, client: PythAgentClient):
861778
# (aggregation would happen if publish interval were to fail, but
862779
# we want to catch that happening if there's a problem)
863780
await client.update_price(price_account, 81, 1, "trading")
864-
time.sleep(2)
781+
time.sleep(5)
865782

866783
# Confirm that the price account has not been updated
867784
final_product_state = await client.get_product(product_account)
@@ -875,7 +792,7 @@ async def test_agent_respects_publish_interval(self, client: PythAgentClient):
875792
# Send another update_price request to "trigger" aggregation
876793
# Now it is after the publish interval, so the price should be updated
877794
await client.update_price(price_account, 81, 1, "trading")
878-
time.sleep(2)
795+
time.sleep(5)
879796

880797
# Confirm that the price account has been updated
881798
final_product_state = await client.get_product(product_account)

‎scripts/init_key_store.sh

Lines changed: 0 additions & 45 deletions
This file was deleted.

‎src/agent/services/oracle.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ where
6060
config.clone(),
6161
network,
6262
state.clone(),
63-
key_store.mapping_key,
63+
key_store.pyth_oracle_program_key,
6464
key_store.publish_keypair,
6565
key_store.pyth_price_store_program_key,
6666
config.oracle.max_lookup_batch_size,
@@ -152,13 +152,13 @@ where
152152
Ok(())
153153
}
154154

155-
/// On poll lookup all Pyth Mapping/Product/Price accounts and sync.
155+
/// On poll lookup all Pyth Product/Price accounts and sync.
156156
#[instrument(skip(config, publish_keypair, state))]
157157
async fn poller<S>(
158158
config: Config,
159159
network: Network,
160160
state: Arc<S>,
161-
mapping_key: Pubkey,
161+
oracle_program_key: Pubkey,
162162
publish_keypair: Option<Keypair>,
163163
pyth_price_store_program_key: Option<Pubkey>,
164164
max_lookup_batch_size: usize,
@@ -183,7 +183,7 @@ async fn poller<S>(
183183
Oracle::poll_updates(
184184
&*state,
185185
network,
186-
mapping_key,
186+
oracle_program_key,
187187
publish_keypair.as_ref(),
188188
pyth_price_store_program_key,
189189
&client,

‎src/agent/solana.rs

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -103,12 +103,6 @@ pub mod key_store {
103103
default
104104
)]
105105
pub pyth_price_store_program_key: Option<Pubkey>,
106-
/// The public key of the root mapping account
107-
#[serde(
108-
serialize_with = "pubkey_string_ser",
109-
deserialize_with = "pubkey_string_de"
110-
)]
111-
pub mapping_key: Pubkey,
112106
/// The public key of the accumulator program.
113107
#[serde(
114108
serialize_with = "opt_pubkey_string_ser",
@@ -127,8 +121,6 @@ pub mod key_store {
127121
pub pyth_oracle_program_key: Pubkey,
128122
/// Public key of the pyth-price-store program
129123
pub pyth_price_store_program_key: Option<Pubkey>,
130-
/// Public key of the root mapping account
131-
pub mapping_key: Pubkey,
132124
/// Public key of the accumulator program (if provided)
133125
pub accumulator_key: Option<Pubkey>,
134126
}
@@ -151,7 +143,6 @@ pub mod key_store {
151143
publish_keypair,
152144
pyth_oracle_program_key: config.pyth_oracle_program_key,
153145
pyth_price_store_program_key: config.pyth_price_store_program_key,
154-
mapping_key: config.mapping_key,
155146
accumulator_key: config.accumulator_key,
156147
})
157148
}

‎src/agent/state/oracle.rs

Lines changed: 102 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,8 @@ use {
2020
},
2121
pyth_price_store::instruction::PUBLISHER_CONFIG_SEED,
2222
pyth_sdk_solana::state::{
23-
load_mapping_account,
2423
load_product_account,
2524
GenericPriceAccount,
26-
MappingAccount,
2725
PriceComp,
2826
PythnetPriceAccount,
2927
SolanaPriceAccount,
@@ -132,7 +130,6 @@ impl std::ops::Deref for PriceEntry {
132130

133131
#[derive(Default, Debug, Clone)]
134132
pub struct Data {
135-
pub mapping_accounts: HashMap<Pubkey, MappingAccount>,
136133
pub product_accounts: HashMap<Pubkey, ProductEntry>,
137134
pub price_accounts: HashMap<Pubkey, PriceEntry>,
138135
/// publisher => {their permissioned price accounts => price publishing metadata}
@@ -194,7 +191,7 @@ pub trait Oracle {
194191
async fn poll_updates(
195192
&self,
196193
network: Network,
197-
mapping_key: Pubkey,
194+
oracle_program_key: Pubkey,
198195
publish_keypair: Option<&Keypair>,
199196
pyth_price_store_program_key: Option<Pubkey>,
200197
rpc_client: &RpcClient,
@@ -269,20 +266,16 @@ where
269266
async fn poll_updates(
270267
&self,
271268
network: Network,
272-
mapping_key: Pubkey,
269+
oracle_program_key: Pubkey,
273270
publish_keypair: Option<&Keypair>,
274271
pyth_price_store_program_key: Option<Pubkey>,
275272
rpc_client: &RpcClient,
276273
max_lookup_batch_size: usize,
277274
) -> Result<()> {
278275
let mut publisher_permissions = HashMap::new();
279-
let mapping_accounts = fetch_mapping_accounts(rpc_client, mapping_key).await?;
280-
let (product_accounts, price_accounts) = fetch_product_and_price_accounts(
281-
rpc_client,
282-
max_lookup_batch_size,
283-
mapping_accounts.values(),
284-
)
285-
.await?;
276+
let (product_accounts, price_accounts) =
277+
fetch_product_and_price_accounts(rpc_client, oracle_program_key, max_lookup_batch_size)
278+
.await?;
286279

287280
for (price_key, price_entry) in price_accounts.iter() {
288281
for component in price_entry.comp {
@@ -337,7 +330,6 @@ where
337330
}
338331

339332
let new_data = Data {
340-
mapping_accounts,
341333
product_accounts,
342334
price_accounts,
343335
publisher_permissions,
@@ -412,57 +404,109 @@ async fn fetch_publisher_buffer_key(
412404
}
413405

414406
#[instrument(skip(rpc_client))]
415-
async fn fetch_mapping_accounts(
416-
rpc_client: &RpcClient,
417-
mapping_account_key: Pubkey,
418-
) -> Result<HashMap<Pubkey, MappingAccount>> {
419-
let mut accounts = HashMap::new();
420-
let mut account_key = mapping_account_key;
421-
while account_key != Pubkey::default() {
422-
let account = *load_mapping_account(
423-
&rpc_client
424-
.get_account_data(&account_key)
425-
.await
426-
.with_context(|| format!("load mapping account {}", account_key))?,
427-
)?;
428-
accounts.insert(account_key, account);
429-
account_key = account.next;
430-
}
431-
Ok(accounts)
432-
}
433-
434-
#[instrument(skip(rpc_client, mapping_accounts))]
435-
async fn fetch_product_and_price_accounts<'a, A>(
407+
async fn fetch_product_and_price_accounts(
436408
rpc_client: &RpcClient,
409+
oracle_program_key: Pubkey,
437410
max_lookup_batch_size: usize,
438-
mapping_accounts: A,
439-
) -> Result<(HashMap<Pubkey, ProductEntry>, HashMap<Pubkey, PriceEntry>)>
440-
where
441-
A: IntoIterator<Item = &'a MappingAccount>,
442-
{
443-
let mut product_keys = vec![];
444-
445-
// Get all product keys
446-
for mapping_account in mapping_accounts {
447-
for account_key in mapping_account
448-
.products
449-
.iter()
450-
.filter(|pubkey| **pubkey != Pubkey::default())
451-
{
452-
product_keys.push(*account_key);
453-
}
454-
}
455-
411+
) -> Result<(HashMap<Pubkey, ProductEntry>, HashMap<Pubkey, PriceEntry>)> {
456412
let mut product_entries = HashMap::new();
457413
let mut price_entries = HashMap::new();
458414

459-
// Lookup products and their prices using the configured batch size
460-
for product_key_batch in product_keys.as_slice().chunks(max_lookup_batch_size) {
461-
let (mut batch_products, mut batch_prices) =
462-
fetch_batch_of_product_and_price_accounts(rpc_client, product_key_batch).await?;
415+
let oracle_accounts = rpc_client.get_program_accounts(&oracle_program_key).await?;
416+
417+
// Go over all the product accounts and partially fill the product entires. The product
418+
// entires need to have prices inside them which gets filled by going over all the
419+
// price accounts.
420+
for (product_key, product) in oracle_accounts.iter().filter_map(|(pubkey, account)| {
421+
load_product_account(&account.data)
422+
.ok()
423+
.map(|product| (pubkey, product))
424+
}) {
425+
#[allow(deprecated)]
426+
let legacy_schedule: LegacySchedule = if let Some((_wsched_key, wsched_val)) =
427+
product.iter().find(|(k, _v)| *k == "weekly_schedule")
428+
{
429+
wsched_val.parse().unwrap_or_else(|err| {
430+
tracing::warn!(
431+
product_key = product_key.to_string(),
432+
weekly_schedule = wsched_val,
433+
"Oracle: Product has weekly_schedule defined but it could not be parsed. Falling back to 24/7 publishing.",
434+
);
435+
tracing::debug!(err = ?err, "Parsing error context.");
436+
Default::default()
437+
})
438+
} else {
439+
Default::default() // No market hours specified, meaning 24/7 publishing
440+
};
441+
442+
let market_schedule: Option<MarketSchedule> = if let Some((_msched_key, msched_val)) =
443+
product.iter().find(|(k, _v)| *k == "schedule")
444+
{
445+
match msched_val.parse::<MarketSchedule>() {
446+
Ok(schedule) => Some(schedule),
447+
Err(err) => {
448+
tracing::warn!(
449+
product_key = product_key.to_string(),
450+
schedule = msched_val,
451+
"Oracle: Product has schedule defined but it could not be parsed. Falling back to legacy schedule.",
452+
);
453+
tracing::debug!(err = ?err, "Parsing error context.");
454+
None
455+
}
456+
}
457+
} else {
458+
None
459+
};
460+
461+
let publish_interval: Option<Duration> = if let Some((
462+
_publish_interval_key,
463+
publish_interval_val,
464+
)) =
465+
product.iter().find(|(k, _v)| *k == "publish_interval")
466+
{
467+
match publish_interval_val.parse::<f64>() {
468+
Ok(interval) => Some(Duration::from_secs_f64(interval)),
469+
Err(err) => {
470+
tracing::warn!(
471+
product_key = product_key.to_string(),
472+
publish_interval = publish_interval_val,
473+
"Oracle: Product has publish_interval defined but it could not be parsed. Falling back to None.",
474+
);
475+
tracing::debug!(err = ?err, "parsing error context");
476+
None
477+
}
478+
}
479+
} else {
480+
None
481+
};
463482

464-
product_entries.extend(batch_products.drain());
465-
price_entries.extend(batch_prices.drain());
483+
product_entries.insert(
484+
*product_key,
485+
ProductEntry {
486+
account_data: *product,
487+
schedule: market_schedule.unwrap_or_else(|| legacy_schedule.into()),
488+
price_accounts: vec![],
489+
publish_interval,
490+
},
491+
);
492+
}
493+
494+
// Load the price accounts into price entry and also fill the product entires
495+
for (price_key, price) in oracle_accounts.iter().filter_map(|(pubkey, account)| {
496+
PriceEntry::load_from_account(&account.data).map(|product| (pubkey, product))
497+
}) {
498+
if let Some(prod) = product_entries.get_mut(&price.prod) {
499+
prod.price_accounts.push(*price_key);
500+
price_entries.insert(*price_key, price);
501+
} else {
502+
tracing::warn!(
503+
missing_product = price.prod.to_string(),
504+
price_key = price_key.to_string(),
505+
"Could not find product entry for price, listed in its prod field, skipping",
506+
);
507+
508+
continue;
509+
}
466510
}
467511

468512
Ok((product_entries, price_entries))
@@ -625,20 +669,6 @@ async fn fetch_batch_of_product_and_price_accounts(
625669
#[instrument(skip(data, new_data))]
626670
fn log_data_diff(data: &Data, new_data: &Data) {
627671
// Log new accounts which have been found
628-
let previous_mapping_accounts = data
629-
.mapping_accounts
630-
.keys()
631-
.cloned()
632-
.collect::<HashSet<_>>();
633-
tracing::info!(
634-
new = ?new_data
635-
.mapping_accounts
636-
.keys()
637-
.cloned()
638-
.collect::<HashSet<_>>().difference(&previous_mapping_accounts),
639-
total = data.mapping_accounts.len(),
640-
"Fetched mapping accounts."
641-
);
642672
let previous_product_accounts = data
643673
.product_accounts
644674
.keys()

‎src/bin/agent_migrate_config.rs

Lines changed: 0 additions & 213 deletions
This file was deleted.

0 commit comments

Comments
 (0)
Please sign in to comment.