Skip to content

Commit 7e64cff

Browse files
committed
add docker config for redis and postgres
1 parent 0dceab7 commit 7e64cff

File tree

8 files changed

+220
-74
lines changed

8 files changed

+220
-74
lines changed

Diff for: .gitignore

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
.env
22

33
node_modules/
4-
dist/
4+
dist/
5+
postgres_data/

Diff for: Makefile

+19
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
# Define variables
2+
COMPOSE=docker-compose
3+
4+
# Default target
5+
.PHONY: help
6+
help:
7+
@echo "Usage:"
8+
@echo " make up - Run docker-compose up"
9+
@echo " make down - Stop and remove containers"
10+
11+
# Target to run docker-compose up
12+
.PHONY: up
13+
up:
14+
$(COMPOSE) up -d
15+
16+
# Target to stop and remove containers
17+
.PHONY: down
18+
down:
19+
$(COMPOSE) down

Diff for: docker-compose.yaml

+26
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
version: "3.8"
2+
3+
services:
4+
redis:
5+
image: redis:latest
6+
container_name: redis_server
7+
restart: always
8+
ports:
9+
- "63799:6379"
10+
11+
postgres:
12+
image: postgres:latest
13+
container_name: postgres_server
14+
restart: always
15+
ports:
16+
- "5433:5432"
17+
environment:
18+
POSTGRES_USER: dbuser
19+
POSTGRES_PASSWORD: dbpassword
20+
POSTGRES_DB: evm
21+
22+
volumes:
23+
- "${PWD}/postgres_data:/var/lib/postgresql/data"
24+
25+
volumes:
26+
postgres_data:
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
-- AlterTable
2+
ALTER TABLE "Block" ALTER COLUMN "difficulty" SET DATA TYPE BIGINT;

Diff for: prisma/schema.prisma

+4-6
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ model Block {
1919
hash String @unique
2020
parentHash String
2121
nonce String?
22-
difficulty Int
22+
difficulty BigInt
2323
gasLimit String
2424
gasUsed String
2525
miner String
@@ -69,7 +69,7 @@ model InternalTransaction {
6969
}
7070

7171
model Log {
72-
id Int @id @default(autoincrement())
72+
id Int @id @default(autoincrement())
7373
transactionHash String
7474
logIndex Int
7575
blockNumber Int
@@ -78,10 +78,8 @@ model Log {
7878
data String
7979
topics String[]
8080
removed Boolean
81-
82-
// Define a relation to the Transaction model
83-
transaction Transaction @relation(fields: [transactionHash], references: [hash])
84-
block Block @relation(fields: [blockNumber], references: [number])
81+
Block Block @relation(fields: [blockNumber], references: [number])
82+
Transaction Transaction @relation(fields: [transactionHash], references: [hash])
8583
8684
@@unique([transactionHash, logIndex])
8785
@@index([transactionHash, logIndex])

Diff for: readme.md

+64
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
# Ethereum Data Ingester and API Development
2+
3+
## Overview
4+
5+
The Ethereum Data Ingester and API Development project aims to develop a system for ingesting historical data from the Ethereum blockchain and exposing it through a RESTful API. The system consists of multiple components including a data ingester, PostgreSQL database for storage, Redis for message queuing, and a Node.js server for hosting the API endpoints.
6+
7+
## Technical Stacks
8+
9+
- **Ethers.js and TypeScript**: Used for connecting to the Ethereum blockchain, fetching transactions, and internal transactions.
10+
- **Prisma**: Database ORM for PostgreSQL, providing a convenient way to interact with the database.
11+
- **Redis**: Used as a message queue service to implement the pub-sub model for asynchronous processing.
12+
- **Express.js**: Framework for building the RESTful API.
13+
- **Docker**: Used for containerization to simplify deployment and manage dependencies.
14+
15+
## Running the Project Locally
16+
17+
To run the project locally, follow these steps:
18+
19+
1. Clone the repository: `git clone [email protected]:gsofter/evm-data-ingester.git`
20+
2. Navigate to the project directory: `cd ethereum-data-ingester`
21+
3. Create a `.env` file in the root directory and set the necessary environment variables.
22+
23+
Example:
24+
25+
```
26+
DATABASE_URL=<your-postgres-db-url>
27+
RPC_PROVIDER_URL=<eth-rpc-provider-url>
28+
REDIS_URL=<your-redis-url>
29+
ETHERSCAN_API_KEY=<your-etherscan-api-key>
30+
```
31+
32+
4. Run `docker-compose up -d` to start the Redis and PostgreSQL services.
33+
5. Run `npx prisma migrate dev` to run migrations for the PostgreSQL database.
34+
6. Finally, run `npm run dev` to start the Node.js API server
35+
36+
## How to start injesting data from evm
37+
38+
```
39+
npx ts-node src/injest.ts <from_block> <to_block>
40+
41+
```
42+
43+
## How to test endpoint to get logs
44+
45+
### /logs
46+
47+
The `/logs` endpoint allows you to query logs from the Ethereum blockchain. It supports filter parameters similar to the JSON-RPC `getLogs` method.
48+
49+
To test the `/logs` endpoint:
50+
51+
1. Make a GET request to the `/logs` endpoint with the desired query parameters.
52+
2. Example query parameters include:
53+
54+
- `fromBlock`: Specify the starting block number.
55+
- `toBlock`: Specify the ending block number.
56+
- `address`: Specify the address of the contract.
57+
- `topics`: Specify log topics.
58+
59+
60+
61+
3. Example request:
62+
63+
`<API_HOST>/logs?address=0x0Df581a7afC09d0A0a55BF864baEf2A4559Bbfe2&topics=0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef&fromBlock=19562372`
64+

Diff for: src/controllers/ethereum-data-ingestor.ts

+65-52
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,8 @@ export class EthereumDataIngester {
8989
console.log(`Tx for ${tx.hash} queued.`);
9090
}
9191
console.log(`Transactions for block ${blockNumber} queued successfully.`);
92+
93+
this.fetchLogs(blockNumber);
9294
} catch (error) {
9395
console.error(
9496
`Error storing transactions for block ${blockNumber}:`,
@@ -119,70 +121,82 @@ export class EthereumDataIngester {
119121
}
120122

121123
async storeBlock(block: IBlock) {
122-
const { number, ...updates } = block;
123-
// create new block or update existing one
124-
await this.prisma.block.upsert({
125-
where: {
126-
number: block.number,
127-
},
128-
create: {
129-
number: block.number,
130-
...updates,
131-
},
132-
update: {
133-
...updates,
134-
},
135-
});
124+
try {
125+
const { number, ...updates } = block;
126+
// create new block or update existing one
127+
await this.prisma.block.upsert({
128+
where: {
129+
number: block.number,
130+
},
131+
create: {
132+
number: block.number,
133+
...updates,
134+
},
135+
update: {
136+
...updates,
137+
},
138+
});
136139

137-
console.log(`Block stored for ${block.number}`);
140+
console.log(`Block stored for ${block.number}`);
141+
} catch (error) {
142+
console.error(`Error storing block for ${block.number}`, error);
143+
}
138144
}
139145

140146
async storeTransaction(tx: ITransaction) {
141147
const { hash, ...updates } = tx;
142-
await this.prisma.transaction.upsert({
143-
where: {
144-
hash,
145-
},
146-
create: {
147-
hash,
148-
...updates,
149-
},
150-
update: {
151-
...updates,
152-
},
153-
});
148+
try {
149+
await this.prisma.transaction.upsert({
150+
where: {
151+
hash,
152+
},
153+
create: {
154+
hash,
155+
...updates,
156+
},
157+
update: {
158+
...updates,
159+
},
160+
});
154161

155-
console.log(`Tx stored for ${tx.hash}`);
162+
this.publishToRedis("internal-transaction", { hash: tx.hash });
156163

157-
this.publishToRedis("internal-transaction", { hash: tx.hash });
164+
console.log(`Tx stored for ${tx.hash}`);
165+
} catch (error) {
166+
console.error(`Error storing transaction for ${hash}`, error);
167+
}
158168
}
159169

160170
async storeLog(log: ethers.providers.Log) {
161-
const logFound = await this.prisma.log.findFirst({
162-
where: {
163-
transactionHash: log.transactionHash,
164-
},
165-
});
171+
try {
172+
const logFound = await this.prisma.log.findFirst({
173+
where: {
174+
transactionHash: log.transactionHash,
175+
},
176+
});
166177

167-
if (logFound) {
168-
console.log(`Log for ${log.transactionHash} existing`);
169-
return;
170-
}
178+
if (logFound) {
179+
console.log(`Log for ${log.transactionHash} existing`);
180+
return;
181+
}
171182

172-
await this.prisma.log.create({
173-
data: {
174-
transactionHash: log.transactionHash,
175-
logIndex: log.logIndex,
176-
blockNumber: log.blockNumber,
177-
blockHash: log.blockHash,
178-
address: log.address,
179-
data: log.data,
180-
topics: log.topics,
181-
removed: log.removed,
182-
},
183-
});
183+
await this.prisma.log.create({
184+
data: {
185+
transactionHash: log.transactionHash,
186+
logIndex: log.logIndex,
187+
blockNumber: log.blockNumber,
188+
blockHash: log.blockHash,
189+
address: log.address,
190+
data: log.data,
191+
topics: log.topics,
192+
removed: log.removed,
193+
},
194+
});
184195

185-
console.log(`Log stored for ${log.transactionHash}`);
196+
console.log(`Log stored for ${log.transactionHash}`);
197+
} catch (error) {
198+
console.error(`Error storing log for ${log.transactionHash}`, error);
199+
}
186200
}
187201

188202
async fetchAndStoreInternalTransactions(txData: any): Promise<void> {
@@ -249,7 +263,6 @@ export class EthereumDataIngester {
249263

250264
async startFetchingBlockData(blockNumber: number): Promise<void> {
251265
await this.fetchTransactions(blockNumber);
252-
await this.fetchLogs(blockNumber);
253266
}
254267

255268
async disconnectDB(): Promise<void> {

Diff for: src/script.ts renamed to src/injest.ts

+38-15
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,32 @@
11
import { Redis } from "ioredis";
22
import { EthereumDataIngester } from "./controllers/ethereum-data-ingestor";
3-
// Specify Ethereum provider URL and PostgreSQL database URL
3+
4+
// Parse command line arguments
5+
const args = process.argv.slice(2);
6+
const fromBlock = parseInt(args[0]);
7+
const toBlock = parseInt(args[1]);
8+
9+
// Check if fromBlock and toBlock are valid numbers
10+
if (
11+
isNaN(fromBlock) ||
12+
isNaN(toBlock) ||
13+
fromBlock > toBlock ||
14+
fromBlock < 0
15+
) {
16+
console.error(
17+
"Invalid block numbers. Please provide valid block numbers for <from> and <to>."
18+
);
19+
process.exit(1);
20+
}
21+
22+
console.log(`Fetching data for blocks from ${fromBlock} to ${toBlock}`);
23+
424
const providerUrl = process.env.RPC_PROVIDER_URL || "https://eth.llamarpc.com";
5-
const redisUrl = "redis://localhost:6379";
25+
const redisUrl = process.env.REDIS_URL || "redis://localhost:6379";
626

727
// Create an instance of EthereumDataIngester class
828
const dataIngester = new EthereumDataIngester(providerUrl, redisUrl);
929

10-
console.log("redisUrl => ", redisUrl);
1130
const sub = new Redis(redisUrl);
1231
sub.subscribe(
1332
"transaction",
@@ -51,16 +70,20 @@ sub.on("error", function (error) {
5170
console.dir(error);
5271
});
5372

54-
// Random block number for trial purpose
55-
const blockNumber = 19562371;
56-
57-
dataIngester
58-
.startFetchingBlockData(blockNumber)
59-
.catch((error) => console.error("Error fetching and storing data:", error))
60-
.finally(async () => {
61-
// sub.disconnect();
62-
// dataIngester.disconnectDB();
63-
// dataIngester.disconnectRedis();
73+
// Fetch and process data for each block within the specified range
74+
for (let blockNumber = fromBlock; blockNumber <= toBlock; blockNumber++) {
75+
dataIngester
76+
.startFetchingBlockData(blockNumber)
77+
.catch((error) =>
78+
console.error(
79+
`Error fetching and storing data for block ${blockNumber}:`,
80+
error
81+
)
82+
);
83+
}
6484

65-
console.log("Done");
66-
});
85+
// Disconnect from database and Redis after processing
86+
process.on("exit", async () => {
87+
await dataIngester.disconnectDB();
88+
await dataIngester.disconnectRedis();
89+
});

0 commit comments

Comments
 (0)