Skip to content

Commit

Permalink
result
Browse files Browse the repository at this point in the history
  • Loading branch information
femto committed Jan 18, 2025
1 parent 89bc2b4 commit 8b7e70e
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 7 deletions.
2 changes: 1 addition & 1 deletion minion/actions/lmp_action_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ async def execute(self, messages: Union[str, Message, List[Message]], response_f

# 从 llm.config 获取配置
api_params = {
"temperature": self.llm.config.temperature + random.random() * 0.01,
"temperature": self.llm.config.temperature + random.random() * 0.01, #add random to avoid prompt caching
"model": self.llm.config.model,
}

Expand Down
4 changes: 2 additions & 2 deletions minion/main/ldb_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ async def execute(self):
entry=self.input.entry_point,
model=self.model,
messages=messages,
dataset_type=self.input.dataset,
dataset_type=self.input.dataset, #todo: this dataset type is just for prompt
level="block"
)
self.input.metadata["messages"] = messages
Expand Down Expand Up @@ -226,7 +226,7 @@ async def execute(self):
failed_tests=self.input.metadata["ldb_format_test"] ,
num_comps=1,
temperature=self.brain.llm.config.temperature,
dataset_type=self.input.dataset
dataset_type=self.input.dataset #todo: this dataset type is just for prompt
)

self.answer = answer
Expand Down
8 changes: 4 additions & 4 deletions minion/main/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -622,7 +622,7 @@ async def choose_minion_and_run(self):
return await self.execute_single()

async def execute_ensemble(self):
if 'workers_config' not in self.input.execution_config:
if 'workers' not in self.input.execution_config:
return await self.execute_single()

# Get the result strategy
Expand All @@ -632,7 +632,7 @@ async def execute_ensemble(self):

workers = [] # List to store actual worker instances

for worker_config in self.input.execution_config["workers_config"]:
for worker_config in self.input.execution_config["workers"]:
minion_name = worker_config["name"]
count = worker_config["count"]
post_processing = worker_config.get("post_processing")
Expand Down Expand Up @@ -663,7 +663,7 @@ async def execute(self):

if self.input.execution_state.current_minion:
# Resume from previous state, assume pre_processing already been done
if hasattr(self.input, 'type') and self.input.type == "ensemble":
if hasattr(self.input, 'execution_config') and self.input.execution_config['type'] == "ensemble":
await self.execute_ensemble()
else:
await self.execute_single()
Expand Down Expand Up @@ -768,7 +768,7 @@ async def get_minion_class_and_name(self):
else:
# 智能选择逻辑
choose_template = Template(SMART_PROMPT_TEMPLATE)
filtered_registry = {key: value for key, value in MINION_REGISTRY.items()}
filtered_registry = {key: value for key, value in WORKER_MINIONS.items()}
filled_template = choose_template.render(minions=filtered_registry, input=self.input)

# 如果brain.llms中有route配置,则依次尝试每个LLM
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -38,5 +38,6 @@ astor
graphviz
#vllm
astroid
pysnooper
#ldb dependencies
#llmdebugger

0 comments on commit 8b7e70e

Please sign in to comment.