Skip to content

Added MAUI usage example (Android) #1217

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -351,4 +351,5 @@ site/
**/appsettings.Local.json
/LLama/runtimes/deps
/LLama/runtimes/deps.zip
/LLama/runtimes/release_id.txt
/LLama/runtimes/release_id.txt
/Llama.Mobile/Resources/Raw/Llama-3.2-1B-Instruct-Q4_0.gguf
21 changes: 21 additions & 0 deletions LLamaSharp.sln
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LLama.Experimental", "LLama
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LLama.Benchmark", "LLama.Benchmark\LLama.Benchmark.csproj", "{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Llama.Mobile", "Llama.Mobile\Llama.Mobile.csproj", "{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Expand Down Expand Up @@ -196,6 +198,25 @@ Global
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Release|Arm64.Build.0 = Release|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Release|x64.ActiveCfg = Release|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Release|x64.Build.0 = Release|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Debug|Any CPU.Build.0 = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Debug|Any CPU.Deploy.0 = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Debug|Arm64.ActiveCfg = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Debug|Arm64.Build.0 = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Debug|x64.ActiveCfg = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Debug|x64.Build.0 = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.GPU|Any CPU.ActiveCfg = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.GPU|Any CPU.Build.0 = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.GPU|Arm64.ActiveCfg = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.GPU|Arm64.Build.0 = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.GPU|x64.ActiveCfg = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.GPU|x64.Build.0 = Debug|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Release|Any CPU.ActiveCfg = Release|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Release|Any CPU.Build.0 = Release|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Release|Arm64.ActiveCfg = Release|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Release|Arm64.Build.0 = Release|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Release|x64.ActiveCfg = Release|Any CPU
{0E058BB0-83C6-4FBE-BC80-E8C5F7E29651}.Release|x64.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
Expand Down
61 changes: 59 additions & 2 deletions Llama.Mobile/Llama.Mobile.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

<PropertyGroup>
<TargetFrameworks>net8.0-android</TargetFrameworks>

<!--Temporarily Disable iOS and MacCatalyst until native lib support is added-->
<!--<TargetFrameworks>$(TargetFrameworks);net8.0-ios;net8.0-maccatalyst</TargetFrameworks>-->

Expand Down Expand Up @@ -51,7 +51,64 @@
<SupportedOSPlatformVersion Condition="$([MSBuild]::GetTargetPlatformIdentifier('$(TargetFramework)')) == 'tizen'">6.5</SupportedOSPlatformVersion>
</PropertyGroup>

<Target Name="EnsureFolders">
<MakeDir Directories="Resources\Raw" Condition="!Exists('Resources\Raw')" />
</Target>

<!-- Download a single file:
- Computes the full target file name (DesiredFile).
- If DesiredFile already exists, the download is skipped.
- Otherwise, creates a temporary folder (TempDownload),
downloads the file there using DownloadFile, and then moves it
to DesiredFile. Finally, cleans up the temporary folder. -->
<Target Name="DownloadSingleFile" DependsOnTargets="EnsureFolders">
<!-- (These properties come in via the MSBuild call.) -->
<PropertyGroup>
<DesiredFile>$([System.IO.Path]::Combine($(DestinationFolder), $(LocalFileName)))</DesiredFile>
</PropertyGroup>

<Message Text="Processing file: $(DesiredFile)" Importance="high" />

<!-- Define a flag based on whether the file already exists -->
<PropertyGroup>
<DownloadNeeded Condition="!Exists('$(DesiredFile)')">true</DownloadNeeded>
<DownloadNeeded Condition="Exists('$(DesiredFile)')">false</DownloadNeeded>
</PropertyGroup>
<Message Text="Download needed: $(DownloadNeeded)" Importance="high" />

<!-- If the file is already present, skip the download (by simply exiting this target) -->
<Message Text="File $(DesiredFile) already exists; skipping download." Importance="high" Condition=" '$(DownloadNeeded)'=='false' " />

<!-- Only download if required -->
<DownloadFile SourceUrl="$(SourceUrl)" DestinationFolder="TempDownload" SkipUnchangedFiles="true" Condition=" '$(DownloadNeeded)'=='true' " />

<!-- If a file was downloaded, move it to the desired name.
We assume TempDownload now contains the downloaded file.
(You might want to refine this if TempDownload could ever contain multiple files.) -->
<ItemGroup Condition=" '$(DownloadNeeded)'=='true' ">
<TempFile Include="TempDownload/*.*" />
</ItemGroup>
<Message Text="Downloaded file (temp): @(TempFile)" Importance="high" Condition=" '$(DownloadNeeded)'=='true' " />
<Move SourceFiles="@(TempFile)" DestinationFiles="$(DesiredFile)" Condition=" '$(DownloadNeeded)'=='true' and @(TempFile) != '' " />
<Message Text="Renamed downloaded file to $(DesiredFile)" Importance="high" Condition=" '$(DownloadNeeded)'=='true' and @(TempFile) != '' " />

<!-- Remove the temporary download folder -->
<RemoveDir Directories="TempDownload" Condition="Exists('TempDownload')" />
</Target>

<!-- Main target to process each file by calling the DownloadSingleFile target for each item.
The MSBuild task will batch over the DownloadFileItem items, passing in each file’s metadata. -->
<Target Name="DownloadAllFiles" BeforeTargets="DispatchToInnerBuilds;BeforeBuild">
<MSBuild Projects="$(MSBuildProjectFile)" Targets="DownloadSingleFile" Properties="SourceUrl=%(DownloadFileItem.SourceUrl);DestinationFolder=%(DownloadFileItem.DestinationFolder);LocalFileName=%(DownloadFileItem.LocalFileName);TargetFramework=once" />
</Target>

<ItemGroup>
<DownloadFileItem Include="Llama-3.2-1B-Instruct-Q4_0">
<SourceUrl>https://huggingface.co/bartowski/Llama-3.2-1B-Instruct-GGUF/resolve/main/Llama-3.2-1B-Instruct-Q4_0.gguf</SourceUrl>
<DestinationFolder>Resources/Raw</DestinationFolder>
<LocalFileName>Llama-3.2-1B-Instruct-Q4_0.gguf</LocalFileName>
</DownloadFileItem>

<!-- App Icon -->
<MauiIcon Include="Resources\AppIcon\appicon.svg" ForegroundFile="Resources\AppIcon\appiconfg.svg" Color="#512BD4" />

Expand All @@ -76,7 +133,7 @@
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
</ItemGroup>

</Project>
116 changes: 106 additions & 10 deletions Llama.Mobile/MainPage.xaml
Original file line number Diff line number Diff line change
@@ -1,14 +1,110 @@
<?xml version="1.0" encoding="utf-8" ?>

<ContentPage xmlns="http://schemas.microsoft.com/dotnet/2021/maui"
xmlns:x="http://schemas.microsoft.com/winfx/2009/xaml"
x:Class="Llama.Mobile.MainPage">
<Label
x:Name="label1"
Text="Hello, World!"
Style="{StaticResource Headline}"
HorizontalOptions="Center"
VerticalOptions="Center"
SemanticProperties.HeadingLevel="Level1" />
<ContentPage xmlns="http://schemas.microsoft.com/dotnet/2021/maui"
xmlns:x="http://schemas.microsoft.com/winfx/2009/xaml"
x:Class="Llama.Mobile.MainPage"
Title="Bob, the AI assistant"
xmlns:src="clr-namespace:Llama.Mobile.Src"
>

<ContentPage.Resources>
<src:InverseBooleanConverter x:Key="InverseBooleanConverter"/>

<DataTemplate x:Key="UserMessageTemplate">
<Grid ColumnDefinitions="*,auto">
<Frame BackgroundColor="LightBlue"
Padding="10"
CornerRadius="16"
HasShadow="False"
Margin="40,4,4,4"
HorizontalOptions="End"
BorderColor="#B0C4DE">
<Label Text="{Binding Text}" TextColor="Black"/>
</Frame>
<BoxView Grid.Column="1"
WidthRequest="10"
HeightRequest="10"
BackgroundColor="LightBlue"
CornerRadius="10,10,10,10"
Margin="0,0,0,8"
VerticalOptions="End"
HorizontalOptions="End"/>
</Grid>
</DataTemplate>
<DataTemplate x:Key="OtherMessageTemplate">
<Grid ColumnDefinitions="auto,*">
<BoxView Grid.Column="0"
WidthRequest="10"
HeightRequest="10"
BackgroundColor="LightGray"
CornerRadius="10,10,10,10"
Margin="0,0,0,8"
VerticalOptions="End"
HorizontalOptions="Start"/>
<Frame Grid.Column="1"
BackgroundColor="LightGray"
Padding="10"
CornerRadius="16"
HasShadow="False"
Margin="4,4,40,4"
HorizontalOptions="Start"
BorderColor="#D3D3D3">
<Grid>
<Label Text="{Binding Text}" TextColor="Black" IsVisible="{Binding IsPreparing, Converter={StaticResource InverseBooleanConverter}}" />
<ActivityIndicator IsRunning="True" IsVisible="{Binding IsPreparing}" WidthRequest="20" HeightRequest="20" />
</Grid>
</Frame>
</Grid>
</DataTemplate>

<src:ChatMessageTemplateSelector x:Key="ChatTemplateSelector"
UserTemplate="{StaticResource UserMessageTemplate}"
OtherTemplate="{StaticResource OtherMessageTemplate}" />
</ContentPage.Resources>

<Grid RowDefinitions="Auto,*,Auto" Margin="10,0,10,0">
<HorizontalStackLayout Grid.Row="0" x:Name="pnl_loading">
<Label
Text="Loading the model"
VerticalOptions="Center"
HorizontalOptions="StartAndExpand"/>
<ActivityIndicator
Margin="5,0,0,0"
IsRunning="True"
IsVisible="True"
WidthRequest="25"
HeightRequest="25"
VerticalOptions="Center"
HorizontalOptions="EndAndExpand"/>
</HorizontalStackLayout>

<CollectionView
ItemsSource="{Binding Messages}"
ItemTemplate="{StaticResource ChatTemplateSelector}"
x:Name="chat"
Grid.Row="1"
VerticalOptions="FillAndExpand">
<CollectionView.ItemsLayout>
<LinearItemsLayout Orientation="Vertical"/>
</CollectionView.ItemsLayout>
</CollectionView>

<Grid Grid.Row="2" ColumnDefinitions="*,Auto" Margin="0,15,0,5">
<Entry
x:Name="tx_userPrompt"
Placeholder="Ask to the model"
Margin="0,0,10,0"
Grid.Column="0"/>

<Button
x:Name="btn_ask"
Text="Ask"
Clicked="OnAskClicked"
IsEnabled="False"
Grid.Column="1"/>

</Grid>

</Grid>

</ContentPage>
111 changes: 107 additions & 4 deletions Llama.Mobile/MainPage.xaml.cs
Original file line number Diff line number Diff line change
@@ -1,16 +1,119 @@
namespace Llama.Mobile;

using Android.Icu.Text;
using Java.Lang;
using Javax.Annotation;
using Llama.Mobile.Src;
using LLama;
using LLama.Common;
using LLama.Native;
using LLama.Sampling;
using System.Collections.ObjectModel;
using System.Text;
using System.Xml.Linq;
using Xamarin.Google.Crypto.Tink.Subtle;
using Xamarin.KotlinX.Coroutines;
using static System.Net.Mime.MediaTypeNames;
using StringBuilder = System.Text.StringBuilder;

public partial class MainPage : ContentPage
{
public MainPage()

public ObservableCollection<Message> Messages { get; } = new();

//Put the gguf model in the directory resources/raw and write its name in the following string
private const string modelName = "Llama-3.2-1B-Instruct-Q4_0.gguf";

private ChatSession? _session;
ChatSession Session
{
get
{
if (_session is null) throw new NullReferenceException("_session can't be null");
return _session;
}

set
{
_session = value;
}
}

private static InferenceParams InferenceParams = new InferenceParams()
{
MaxTokens = 256, // No more than 256 tokens should appear in answer. Remove it if antiprompt is enough for control.
AntiPrompts = new List<string> { "User:" }, // Stop generation once antiprompts appear.

SamplingPipeline = new DefaultSamplingPipeline(),

};

public MainPage()
{
InitializeComponent();
chat.BindingContext = this;
}
protected override async void OnAppearing()
{
base.OnAppearing();
string modelPath = Path.Combine(FileSystem.Current.AppDataDirectory, modelName);

if (!File.Exists(modelPath))
{
//get the data stream of the model stored in the apk
using Stream inputStream = await FileSystem.Current.OpenAppPackageFileAsync(modelName);

//copy the data from the inputStream in to a new file, with te same name, in the the app data directory
using FileStream outputStream = File.Create(modelPath);
await inputStream.CopyToAsync(outputStream);
outputStream.Close();
inputStream.Close();
}


var parameters = new ModelParams(modelPath)
{
ContextSize = 1024, // The longest length of chat as memory.
GpuLayerCount = 5 // How many layers to offload to GPU. Please adjust it according to your GPU memory.
};
var model = LLamaWeights.LoadFromFile(parameters);
var context = model.CreateContext(parameters);
var executor = new InteractiveExecutor(context);

// Add chat histories as prompt to tell AI how to act.
var chatHistory = new ChatHistory();
chatHistory.AddMessage(AuthorRole.System, "Transcript of a dialog, where the User interacts with an Assistant named Bob. Bob is helpful, kind, honest, good at writing, and never fails to answer the User's requests immediately and with precision.");
chatHistory.AddMessage(AuthorRole.User, "Hello, Bob.");
chatHistory.AddMessage(AuthorRole.Assistant, "Hello. How may I help you today?");

//Load the native library
NativeApi.llama_empty_call();
Session = new(executor, chatHistory);
Session.WithOutputTransform(new LLamaTransforms.KeywordTextOutputStreamTransform(
new string[] { "\n\nUser:", "\nUser:", "User:", "Assistant: " },
redundancyLength: 8));

label1.Text = "llama.cpp loaded successfully";
pnl_loading.IsVisible = false;
btn_ask.IsEnabled = true;

await Task.Delay(100); //on the emulator without this little delay the popup isn't shown

await DisplayAlert("Loaded", "model correctly Loaded", "OK");
}

private async void OnAskClicked(object sender, EventArgs e)
{
btn_ask.IsEnabled = false;
Messages.Add(new Message { Type = messageType.User, Text = tx_userPrompt.Text, IsPreparing = false });
string userPrompt = tx_userPrompt.Text;
tx_userPrompt.Text="";
Message response = new Message { Type = messageType.other, Text = "", IsPreparing = true };
Messages.Add(response);
chat.ScrollTo(Messages.Last(), position: ScrollToPosition.End, animate: false);
await foreach (string text in Session.ChatAsync(new ChatHistory.Message(AuthorRole.User, userPrompt), InferenceParams))
{
response.IsPreparing = false;
response.AppendText(text);
chat.ScrollTo(Messages.Last(), position: ScrollToPosition.End, animate: false);
}
btn_ask.IsEnabled = true;
}
}
16 changes: 16 additions & 0 deletions Llama.Mobile/Src/ChatMessageTemplateSelector.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
using Microsoft.Maui.Controls;
namespace Llama.Mobile.Src
{
public class ChatMessageTemplateSelector : DataTemplateSelector
{
public DataTemplate UserTemplate { get; set; }
public DataTemplate OtherTemplate { get; set; }


protected override DataTemplate OnSelectTemplate(object item, BindableObject container)
{
var message = (Message)item;
return message.Type == messageType.User ? UserTemplate : OtherTemplate;
}
}
}
Loading