Skip to content

Commit 679aeca

Browse files
authored
Merge pull request #523 from macrocosm-os/staging
v2.16.0
2 parents 2b9a7ce + 76fb522 commit 679aeca

File tree

17 files changed

+307
-250
lines changed

17 files changed

+307
-250
lines changed

.github/workflows/python-package.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ jobs:
1212
strategy:
1313
fail-fast: false
1414
matrix:
15-
python-version: ["3.9", "3.10"]
15+
python-version: ["3.10"]
1616

1717
steps:
1818
- uses: actions/checkout@v3

neurons/miners/epistula_miner/miner.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
# ruff: noqa: E402
2-
from prompting import settings
2+
from shared import settings
33

4-
settings.settings = settings.Settings.load(mode="miner")
5-
settings = settings.settings
4+
settings.shared_settings = settings.SharedSettings.load(mode="miner")
5+
shared_settings = settings.shared_settings
66

77
import asyncio
88
import json
@@ -39,7 +39,7 @@ def __init__(self):
3939
self.client = httpx.AsyncClient(
4040
base_url="https://api.openai.com/v1",
4141
headers={
42-
"Authorization": f"Bearer {settings.OPENAI_API_KEY}",
42+
"Authorization": f"Bearer {shared_settings.OPENAI_API_KEY}",
4343
"Content-Type": "application/json",
4444
},
4545
)
@@ -107,14 +107,14 @@ async def verify_request(
107107

108108
signed_by = request.headers.get("Epistula-Signed-By")
109109
signed_for = request.headers.get("Epistula-Signed-For")
110-
if signed_for != settings.WALLET.hotkey.ss58_address:
110+
if signed_for != shared_settings.WALLET.hotkey.ss58_address:
111111
raise HTTPException(status_code=400, detail="Bad Request, message is not intended for self")
112-
if signed_by not in settings.METAGRAPH.hotkeys:
112+
if signed_by not in shared_settings.METAGRAPH.hotkeys:
113113
raise HTTPException(status_code=401, detail="Signer not in metagraph")
114114

115-
uid = settings.METAGRAPH.hotkeys.index(signed_by)
116-
stake = settings.METAGRAPH.S[uid].item()
117-
if not settings.NETUID == 61 and stake < 10000:
115+
uid = shared_settings.METAGRAPH.hotkeys.index(signed_by)
116+
stake = shared_settings.METAGRAPH.S[uid].item()
117+
if not shared_settings.NETUID == 61 and stake < 10000:
118118
logger.warning(f"Blacklisting request from {signed_by} [uid={uid}], not enough stake -- {stake}")
119119
raise HTTPException(status_code=401, detail="Stake below minimum: {stake}")
120120

@@ -133,7 +133,7 @@ async def verify_request(
133133
raise HTTPException(status_code=400, detail=err)
134134

135135
def run(self):
136-
external_ip = None # settings.EXTERNAL_IP
136+
external_ip = None # shared_settings.EXTERNAL_IP
137137
if not external_ip or external_ip == "[::]":
138138
try:
139139
external_ip = requests.get("https://checkip.amazonaws.com").text.strip()
@@ -142,16 +142,16 @@ def run(self):
142142
logger.error("Failed to get external IP")
143143

144144
logger.info(
145-
f"Serving miner endpoint {external_ip}:{settings.AXON_PORT} on network: {settings.SUBTENSOR_NETWORK} with netuid: {settings.NETUID}"
145+
f"Serving miner endpoint {external_ip}:{shared_settings.AXON_PORT} on network: {shared_settings.SUBTENSOR_NETWORK} with netuid: {shared_settings.NETUID}"
146146
)
147147

148148
serve_success = serve_extrinsic(
149-
subtensor=settings.SUBTENSOR,
150-
wallet=settings.WALLET,
149+
subtensor=shared_settings.SUBTENSOR,
150+
wallet=shared_settings.WALLET,
151151
ip=external_ip,
152-
port=settings.AXON_PORT,
152+
port=shared_settings.AXON_PORT,
153153
protocol=4,
154-
netuid=settings.NETUID,
154+
netuid=shared_settings.NETUID,
155155
)
156156
if not serve_success:
157157
logger.error("Failed to serve endpoint")
@@ -174,15 +174,15 @@ def run(self):
174174
fast_config = uvicorn.Config(
175175
app,
176176
host="0.0.0.0",
177-
port=settings.AXON_PORT,
177+
port=shared_settings.AXON_PORT,
178178
log_level="info",
179179
loop="asyncio",
180180
workers=4,
181181
)
182182
self.fast_api = FastAPIThreadedServer(config=fast_config)
183183
self.fast_api.start()
184184

185-
logger.info(f"Miner starting at block: {settings.SUBTENSOR.block}")
185+
logger.info(f"Miner starting at block: {shared_settings.SUBTENSOR.block}")
186186

187187
# Main execution loop.
188188
try:

neurons/test_vanilla_post.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
import openai
22
from httpx import Timeout
33

4-
from prompting import settings
4+
from shared import settings
55

6-
settings.settings = settings.Settings.load(mode="validator")
7-
settings = settings.settings
6+
settings.shared_settings = settings.SharedSettings.load(mode="validator")
7+
shared_settings = settings.shared_settings
88

99
from shared.epistula import create_header_hook
1010

neurons/validator.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -81,11 +81,12 @@ async def spawn_loops(task_queue, scoring_queue, reward_events):
8181
asyncio.run(spawn_loops(task_queue, scoring_queue, reward_events))
8282

8383

84-
def start_api():
84+
def start_api(scoring_queue, reward_events):
8585
async def start():
8686
from prompting.api.api import start_scoring_api # noqa: F401
8787

88-
await start_scoring_api()
88+
await start_scoring_api(scoring_queue, reward_events)
89+
8990
while True:
9091
await asyncio.sleep(10)
9192
logger.debug("Running API...")
@@ -125,7 +126,7 @@ async def main():
125126

126127
if shared_settings.DEPLOY_SCORING_API:
127128
# Use multiprocessing to bypass API blocking issue
128-
api_process = mp.Process(target=start_api, name="API_Process")
129+
api_process = mp.Process(target=start_api, args=(scoring_queue, reward_events), name="API_Process")
129130
api_process.start()
130131
processes.append(api_process)
131132

prompting/api/api.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
from prompting.api.miner_availabilities.api import router as miner_availabilities_router
66
from prompting.api.scoring.api import router as scoring_router
7+
from prompting.rewards.scoring import task_scorer
78
from shared.settings import shared_settings
89

910
app = FastAPI()
@@ -17,7 +18,9 @@ def health():
1718
return {"status": "healthy"}
1819

1920

20-
async def start_scoring_api():
21+
async def start_scoring_api(scoring_queue, reward_events):
22+
task_scorer.scoring_queue = scoring_queue
23+
task_scorer.reward_events = reward_events
2124
logger.info(f"Starting Scoring API on https://0.0.0.0:{shared_settings.SCORING_API_PORT}")
2225
uvicorn.run(
2326
"prompting.api.api:app", host="0.0.0.0", port=shared_settings.SCORING_API_PORT, loop="asyncio", reload=False

prompting/api/scoring/api.py

Lines changed: 53 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,11 @@
44
from fastapi import APIRouter, Depends, Header, HTTPException, Request
55
from loguru import logger
66

7+
from prompting.datasets.random_website import DDGDatasetEntry
78
from prompting.llms.model_zoo import ModelZoo
89
from prompting.rewards.scoring import task_scorer
910
from prompting.tasks.inference import InferenceTask
11+
from prompting.tasks.web_retrieval import WebRetrievalTask
1012
from shared.base import DatasetEntry
1113
from shared.dendrite import DendriteResponseEvent
1214
from shared.epistula import SynapseStreamResult
@@ -37,22 +39,54 @@ async def score_response(request: Request, api_key_data: dict = Depends(validate
3739
uid = int(payload.get("uid"))
3840
chunks = payload.get("chunks")
3941
llm_model = ModelZoo.get_model_by_id(model) if (model := body.get("model")) else None
40-
task_scorer.add_to_queue(
41-
task=InferenceTask(
42-
messages=[msg["content"] for msg in body.get("messages")],
43-
llm_model=llm_model,
44-
llm_model_id=body.get("model"),
45-
seed=int(body.get("seed", 0)),
46-
sampling_params=body.get("sampling_params", {}),
47-
),
48-
response=DendriteResponseEvent(
49-
uids=[uid],
50-
stream_results=[SynapseStreamResult(accumulated_chunks=[chunk for chunk in chunks if chunk is not None])],
51-
timeout=shared_settings.NEURON_TIMEOUT,
52-
),
53-
dataset_entry=DatasetEntry(),
54-
block=shared_settings.METAGRAPH.block,
55-
step=-1,
56-
task_id=str(uuid.uuid4()),
57-
)
58-
logger.info("Organic tas appended to scoring queue")
42+
task = body.get("task")
43+
if task == "InferenceTask":
44+
logger.info(f"Received Organic InferenceTask with body: {body}")
45+
task_scorer.add_to_queue(
46+
task=InferenceTask(
47+
messages=[msg["content"] for msg in body.get("messages")],
48+
llm_model=llm_model,
49+
llm_model_id=body.get("model"),
50+
seed=int(body.get("seed", 0)),
51+
sampling_params=body.get("sampling_params", {}),
52+
),
53+
response=DendriteResponseEvent(
54+
uids=[uid],
55+
stream_results=[
56+
SynapseStreamResult(accumulated_chunks=[chunk for chunk in chunks if chunk is not None])
57+
],
58+
timeout=shared_settings.NEURON_TIMEOUT,
59+
),
60+
dataset_entry=DatasetEntry(),
61+
block=shared_settings.METAGRAPH.block,
62+
step=-1,
63+
task_id=str(uuid.uuid4()),
64+
)
65+
elif task == "WebRetrievalTask":
66+
logger.info(f"Received Organic WebRetrievalTask with body: {body}")
67+
try:
68+
search_term = body.get("messages")[0].get("content")
69+
except Exception as ex:
70+
logger.error(f"Failed to get search term from messages: {ex}, can't score WebRetrievalTask")
71+
return
72+
73+
task_scorer.add_to_queue(
74+
task=WebRetrievalTask(
75+
messages=[msg["content"] for msg in body.get("messages")],
76+
seed=int(body.get("seed", 0)),
77+
sampling_params=body.get("sampling_params", {}),
78+
query=search_term,
79+
),
80+
response=DendriteResponseEvent(
81+
uids=[uid],
82+
stream_results=[
83+
SynapseStreamResult(accumulated_chunks=[chunk for chunk in chunks if chunk is not None])
84+
],
85+
timeout=shared_settings.NEURON_TIMEOUT,
86+
),
87+
dataset_entry=DDGDatasetEntry(search_term=search_term),
88+
block=shared_settings.METAGRAPH.block,
89+
step=-1,
90+
task_id=str(uuid.uuid4()),
91+
)
92+
logger.info("Organic task appended to scoring queue")

prompting/datasets/random_website.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,8 @@
1515

1616
class DDGDatasetEntry(DatasetEntry):
1717
search_term: str
18-
website_url: str
19-
website_content: str
18+
website_url: str = None
19+
website_content: str = None
2020

2121

2222
class DDGDataset(BaseDataset):

prompting/settings.py

Lines changed: 0 additions & 134 deletions
This file was deleted.

prompting/weight_setting/weight_setter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ def set_weights(
102102
"weights": processed_weights.flatten(),
103103
"raw_weights": str(list(weights.flatten())),
104104
"averaged_weights": str(list(averaged_weights.flatten())),
105-
"block": ttl_get_block(),
105+
"block": ttl_get_block(subtensor=subtensor),
106106
}
107107
)
108108
step_filename = "weights.csv"

0 commit comments

Comments
 (0)