Skip to content

Commit

Permalink
Adding slider for max tokens for generating the podcast script (#20)
Browse files Browse the repository at this point in the history
  • Loading branch information
LeonB87 authored Feb 2, 2025
1 parent 4a38cba commit caea820
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 1 deletion.
10 changes: 10 additions & 0 deletions app/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,15 @@
else 1,
)

# Max tokens slider
max_tokens = st.slider(
"Max Tokens",
min_value=1000,
max_value=32000,
value=8000,
step=500,
help="Select the maximum number of tokens to be used for generating the podcast script. Adjust this according to your OpenAI quota.",
)
# Submit button
generate_podcast = form_container.button(
"Generate Podcast", type="primary", disabled=not uploaded_file
Expand Down Expand Up @@ -131,6 +140,7 @@
title=podcast_title,
voice_1=voice_1,
voice_2=voice_2,
max_tokens=max_tokens,
)

podcast_script = podcast_response.podcast["script"]
Expand Down
3 changes: 2 additions & 1 deletion app/utils/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ def document_to_podcast_script(
title: str = "AI in Action",
voice_1: str = "Andrew",
voice_2: str = "Emma",
max_tokens: int = 8000,
) -> PodcastScriptResponse:
"""Get LLM response."""

Expand Down Expand Up @@ -136,7 +137,7 @@ def document_to_podcast_script(
model=os.getenv("AZURE_OPENAI_MODEL_DEPLOYMENT", "gpt-4o"),
temperature=0.7,
response_format={"type": "json_schema", "json_schema": JSON_SCHEMA},
max_tokens=8000,
max_tokens=max_tokens,
)

message = chat_completion.choices[0].message.content
Expand Down

0 comments on commit caea820

Please sign in to comment.