Skip to content

Commit 2a7b20b

Browse files
committed
update moa example
1 parent 1b4de00 commit 2a7b20b

File tree

5 files changed

+59
-33
lines changed

5 files changed

+59
-33
lines changed

examples/descript/generate.ts

+9-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
#!/usr/bin/env -S npx ts-node --transpileOnly
22
import fs from "fs";
3-
import { Substrate, TranscribeSpeech } from "substrate";
3+
import { ComputeText, sb, Substrate, TranscribeSpeech } from "substrate";
44
import { currentDir } from "./util";
55

66
/**
@@ -9,7 +9,8 @@ import { currentDir } from "./util";
99
* https://media.substrate.run/kaufman-bafta-short.mp3
1010
* https://media.substrate.run/dfw-clip.m4a
1111
*/
12-
const sample = "https://media.substrate.run/my-dinner-andre.m4a"; // NB: this is a ~2hr long file
12+
// const sample = "https://media.substrate.run/my-dinner-andre.m4a"; // NB: this is a ~2hr long file
13+
const sample = "https://media.substrate.run/federer-dartmouth.m4a";
1314
const substrate = new Substrate({ apiKey: process.env["SUBSTRATE_API_KEY"] });
1415

1516
const audio_uri = process.argv[2] || sample;
@@ -19,6 +20,12 @@ async function main() {
1920
{ audio_uri, segment: true, align: true },
2021
{ cache_age: 60 * 60 * 24 * 7 },
2122
);
23+
// const summarize = new ComputeText({
24+
// model: "Llama3Instruct70B",
25+
// prompt: sb.interpolate`summarize this transcript: <TRANSCRIPT>${transcribe.future.text}</TRANSCRIPT>`,
26+
// max_tokens: 800,
27+
// });
28+
2229
const res = await substrate.run(transcribe);
2330
const transcript = res.get(transcribe);
2431

examples/image-generation.ts

+2-4
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,11 @@
33
import { Substrate, ComputeText, GenerateImage } from "substrate";
44

55
async function main() {
6-
const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"];
7-
8-
const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY });
6+
const substrate = new Substrate({ apiKey: process.env["SUBSTRATE_API_KEY"] });
97

108
const scene = new ComputeText({
119
prompt:
12-
"describe a highly detailed forest scene with something suprising happening in one sentence, be concise, like hemmingway would write it.",
10+
"describe a highly detailed forest scene with something surprising happening in one sentence, be concise, like Hemingway would write it.",
1311
});
1412

1513
const styles = [

examples/mixture-of-agents/ask.ts

+37-19
Original file line numberDiff line numberDiff line change
@@ -4,46 +4,64 @@ import { Substrate, Box, sb, ComputeText } from "substrate";
44
import fs from "fs";
55
import { currentDir, sampleQuestion, aggregate, jqList } from "./util";
66

7+
const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"];
8+
const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY });
9+
710
const models = [
8-
"Mistral7BInstruct",
9-
"Mixtral8x7BInstruct",
10-
"Llama3Instruct8B",
11+
"Llama3Instruct405B",
12+
"claude-3-5-sonnet-20240620",
1113
"Llama3Instruct70B",
14+
"gpt-4o-mini",
15+
"Llama3Instruct8B",
16+
"Mixtral8x7BInstruct",
1217
];
13-
const max_tokens = 800;
18+
const aggregatorModel = "claude-3-5-sonnet-20240620";
19+
const max_tokens = 400;
20+
const temperature = 0.4;
1421
const opts = { cache_age: 60 * 60 * 24 * 7 };
1522

1623
const numLayers = 3;
1724
const question = process.argv[2] || sampleQuestion;
1825

19-
function getMixture(q: string, prev: any = null) {
20-
const prompt = prev
21-
? sb.concat(aggregate, "\n\nquestion: ", q, "\n\nprevious:\n\n", prev)
22-
: q;
26+
function getPrompt(prev: any = null) {
27+
return prev
28+
? sb.concat(
29+
aggregate,
30+
"\n\nuser query: ",
31+
question,
32+
"\n\nprevious responses:\n\n",
33+
prev,
34+
)
35+
: question;
36+
}
37+
38+
function getMixture(prev: any = null) {
2339
return new Box({
2440
value: models.map(
2541
(model) =>
26-
new ComputeText({ prompt, model, max_tokens }, opts).future.text,
42+
new ComputeText(
43+
{ prompt: getPrompt(prev), model, max_tokens, temperature },
44+
opts,
45+
).future.text,
2746
),
2847
});
2948
}
3049

50+
function getLastLayer(layers: Box[]) {
51+
return sb.jq<"string">(layers[layers.length - 1]!.future.value, jqList);
52+
}
53+
3154
async function main() {
32-
const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"];
33-
const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY });
3455
const layers: Box[] = [getMixture(question)];
35-
const lastLayer = () =>
36-
sb.jq<"string">(layers[layers.length - 1]!.future.value, jqList);
37-
3856
for (let i = 0; i < numLayers - 1; i++) {
39-
layers.push(getMixture(question, lastLayer()));
57+
layers.push(getMixture(getLastLayer(layers)));
4058
}
41-
4259
const final = new ComputeText(
4360
{
44-
prompt: sb.concat(aggregate, "\n\n", lastLayer()),
45-
model: "Llama3Instruct70B",
46-
max_tokens,
61+
prompt: getPrompt(getLastLayer(layers)),
62+
model: aggregatorModel,
63+
max_tokens: 800,
64+
temperature,
4765
},
4866
opts,
4967
);

examples/mixture-of-agents/index.html

+9-6
Original file line numberDiff line numberDiff line change
@@ -159,11 +159,13 @@
159159
<script>
160160
const question = "{{ question }}";
161161
const modelNames = [
162-
"Mistral7BInstruct",
163-
"Mixtral8x7BInstruct",
164-
"Llama3Instruct8B",
165-
"Llama3Instruct70B",
166-
];
162+
"Llama 3.1 405B",
163+
"Claude 3.5",
164+
"Llama 3.1 70B",
165+
"GPT-4o Mini",
166+
"Llama 3.1 8B",
167+
"Mixtral 8x7B",
168+
]
167169
const individualResults = "{{ individual }}";
168170
const aggResults = "{{ summaries }}";
169171

@@ -187,7 +189,8 @@
187189
if (showingIndividual) {
188190
contentArea.textContent =
189191
individualResults[currentLayer][currentIndex].trim();
190-
cardTitle.textContent = modelNames[currentIndex];
192+
193+
cardTitle.textContent = `${modelNames[currentIndex]} - Layer ${ currentLayer + 1 }`;
191194
} else {
192195
contentArea.textContent = aggResults[currentLayer].trim();
193196
cardTitle.textContent = `MoA Layer ${currentLayer + 1}`;

examples/mixture-of-agents/util.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@ import { fileURLToPath } from "url";
22
import { dirname } from "path";
33

44
export const sampleQuestion =
5-
"What was Arendt's notion of Freedom? How did she distinguish it from Action?";
6-
export const aggregate = `You have been provided with a set of responses from various open-source models to the latest user query. Your task is to synthesize these responses into a single, high-quality response. It is crucial to critically evaluate the information provided in these responses, recognizing that some of it may be biased or incorrect. Your response should not simply replicate the given answers but should offer a refined, accurate, and comprehensive reply to the instruction. Ensure your response is well-structured, well-considered, and adheres to the highest standards of accuracy and reliability. Do not respond as if we're having a conversation, just output an objective response.`;
5+
"The following is a hypothetical short story written by Asimov after seeing the world in 2024. Go beyond the obvious, and come up with a creative story that is incisive, allegorical, and relevant. Respond starting with the title on the first line, followed by two newlines, and then the story.";
6+
export const aggregate = `You have been provided with a set of responses to the a user query. Your task is to synthesize these responses into a single, high-quality response. It is crucial to critically evaluate the information provided in these responses, recognizing that some of it may be biased or incorrect. Your response should not simply replicate the given answers but should offer a refined, accurate, and comprehensive reply to the original user query. Ensure your response is well-structured, well-considered, and adheres to the highest standards of accuracy and reliability. Do not respond conversationally or acknowledge the asking of the query, just output an objective response.`;
77
export const jqList = `to_entries | map(((.key + 1) | tostring) + ". " + .value) | join("\n")`;
88

99
// @ts-ignore

0 commit comments

Comments
 (0)