Skip to content

Commit 3534129

Browse files
committed
Merge branch 'master' of https://github.com/SciSharp/LLamaSharp
2 parents 9d091c0 + 156f369 commit 3534129

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+1871
-374
lines changed

.github/workflows/compile.yml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -367,13 +367,22 @@ jobs:
367367
cp artifacts/llava-bin-osx-x64.dylib/libllava_shared.dylib deps/osx-x64/libllava_shared.dylib
368368
369369
cp artifacts/llama-bin-win-cublas-cu11.7.1-x64.dll/llama.dll deps/cu11.7.1/llama.dll
370+
cp artifacts/llava-bin-win-cublas-cu11.7.1-x64.dll/llava_shared.dll deps/cu11.7.1/llava_shared.dll
371+
370372
cp artifacts/llama-bin-linux-cublas-cu11.7.1-x64.so/libllama.so deps/cu11.7.1/libllama.so
373+
cp artifacts/llava-bin-linux-cublas-cu11.7.1-x64.so/libllava_shared.so deps/cu11.7.1/libllama_shared.so
374+
371375
cp artifacts/llama-bin-win-cublas-cu12.1.0-x64.dll/llama.dll deps/cu12.1.0/llama.dll
376+
cp artifacts/llava-bin-win-cublas-cu12.1.0-x64.dll/llava_shared.dll deps/cu12.1.0/llava_shared.dll
377+
372378
cp artifacts/llama-bin-linux-cublas-cu12.1.0-x64.so/libllama.so deps/cu12.1.0/libllama.so
379+
cp artifacts/llava-bin-linux-cublas-cu12.1.0-x64.so/libllava_shared.so deps/cu12.1.0/libllava_shared.so
373380
374381
cp artifacts/llama-bin-win-clblast-x64.dll/{llama,clblast}.dll deps/clblast/
382+
375383
cp artifacts/llama-bin-linux-clblast-x64.so/libllama.so deps/clblast/
376384
385+
377386
- name: Upload artifacts
378387
uses: actions/upload-artifact@v3
379388
with:

LLama.Examples/ExampleRunner.cs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,12 @@ public class ExampleRunner
88
{ "Chat Session: History", ChatSessionWithHistory.Run },
99
{ "Chat Session: Role names", ChatSessionWithRoleName.Run },
1010
{ "Chat Session: Role names stripped", ChatSessionStripRoleName.Run },
11+
{ "Chat Session: Pre-processing and reset", ChatSessionWithRestart.Run },
1112
{ "Chat Session: Coding Assistant", CodingAssistant.Run },
1213
{ "Chat Session: Automatic conversation", TalkToYourself.Run },
1314
{ "Chat Session: Chinese characters", ChatChineseGB2312.Run },
1415
{ "Executor: Interactive mode chat", InteractiveModeExecute.Run },
16+
{ "Executor: Llava Interactive mode chat", LlavaInteractiveModeExecute.Run },
1517
{ "Executor: Instruct mode chat", InstructModeExecute.Run },
1618
{ "Executor: Stateless mode chat", StatelessModeExecute.Run },
1719
{ "Save and Load: chat session", SaveAndLoadSession.Run },

LLama.Examples/Examples/BatchedExecutorFork.cs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,8 @@ public static async Task Run()
3131
Console.WriteLine($"Created executor with model: {name}");
3232

3333
// Evaluate the initial prompt to create one conversation
34-
using var start = executor.Prompt(prompt);
34+
using var start = executor.Create();
35+
start.Prompt(prompt);
3536
await executor.Infer();
3637

3738
// Create the root node of the tree
@@ -132,7 +133,7 @@ public void Display<T>(T tree, int depth = 0)
132133
var colors = new[] { "red", "green", "blue", "yellow", "white" };
133134
var color = colors[depth % colors.Length];
134135

135-
var message = _decoder.Read().ReplaceLineEndings("");
136+
var message = Markup.Escape(_decoder.Read().ReplaceLineEndings(""));
136137

137138
var n = tree.AddNode($"[{color}]{message}[/]");
138139

LLama.Examples/Examples/BatchedExecutorGuidance.cs

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,10 @@ public static async Task Run()
3333
Console.WriteLine($"Created executor with model: {name}");
3434

3535
// Load the two prompts into two conversations
36-
using var guided = executor.Prompt(positivePrompt);
37-
using var guidance = executor.Prompt(negativePrompt);
36+
using var guided = executor.Create();
37+
guided.Prompt(positivePrompt);
38+
using var guidance = executor.Create();
39+
guidance.Prompt(negativePrompt);
3840

3941
// Run inference to evaluate prompts
4042
await AnsiConsole

LLama.Examples/Examples/BatchedExecutorRewind.cs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,8 @@ public static async Task Run()
3232
Console.WriteLine($"Created executor with model: {name}");
3333

3434
// Evaluate the initial prompt to create one conversation
35-
using var conversation = executor.Prompt(prompt);
35+
using var conversation = executor.Create();
36+
conversation.Prompt(prompt);
3637

3738
// Create the start node wrapping the conversation
3839
var node = new Node(executor.Context);

LLama.Examples/Examples/ChatSessionWithHistory.cs

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,19 +48,31 @@ public static async Task Run()
4848

4949
Console.ForegroundColor = ConsoleColor.Yellow;
5050
Console.WriteLine("The chat session has started.");
51+
Console.WriteLine("Type 'exit' to end the chat session.");
52+
Console.WriteLine("Type 'save' to save the chat session to disk.");
53+
Console.WriteLine("Type 'load' to load the chat session from disk.");
54+
Console.WriteLine("Type 'regenerate' to regenerate the last response.");
5155

5256
// show the prompt
5357
Console.ForegroundColor = ConsoleColor.Green;
5458
string userInput = Console.ReadLine() ?? "";
5559

5660
while (userInput != "exit")
5761
{
62+
// Save the chat state to disk
5863
if (userInput == "save")
5964
{
6065
session.SaveSession("Assets/chat-with-bob");
6166
Console.ForegroundColor = ConsoleColor.Yellow;
6267
Console.WriteLine("Session saved.");
6368
}
69+
// Load the chat state from disk
70+
else if (userInput == "load")
71+
{
72+
session.LoadSession("Assets/chat-with-bob");
73+
Console.ForegroundColor = ConsoleColor.Yellow;
74+
Console.WriteLine("Session loaded.");
75+
}
6476
else if (userInput == "regenerate")
6577
{
6678
Console.ForegroundColor = ConsoleColor.Yellow;
Lines changed: 107 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,107 @@
1+
using LLama.Common;
2+
3+
namespace LLama.Examples.Examples;
4+
5+
public class ChatSessionWithRestart
6+
{
7+
public static async Task Run()
8+
{
9+
string modelPath = UserSettings.GetModelPath();
10+
11+
var parameters = new ModelParams(modelPath)
12+
{
13+
ContextSize = 1024,
14+
Seed = 1337,
15+
GpuLayerCount = 5
16+
};
17+
using var model = LLamaWeights.LoadFromFile(parameters);
18+
using var context = model.CreateContext(parameters);
19+
var executor = new InteractiveExecutor(context);
20+
21+
var chatHistoryJson = File.ReadAllText("Assets/chat-with-bob.json");
22+
ChatHistory chatHistory = ChatHistory.FromJson(chatHistoryJson) ?? new ChatHistory();
23+
ChatSession prototypeSession =
24+
await ChatSession.InitializeSessionFromHistoryAsync(executor, chatHistory);
25+
prototypeSession.WithOutputTransform(new LLamaTransforms.KeywordTextOutputStreamTransform(
26+
new string[] { "User:", "Assistant:" },
27+
redundancyLength: 8));
28+
var resetState = prototypeSession.GetSessionState();
29+
30+
ChatSession session = new ChatSession(executor);
31+
session.LoadSession(resetState);
32+
33+
InferenceParams inferenceParams = new InferenceParams()
34+
{
35+
Temperature = 0.9f,
36+
AntiPrompts = new List<string> { "User:" }
37+
};
38+
39+
Console.ForegroundColor = ConsoleColor.Yellow;
40+
Console.WriteLine("The chat session has started. Starting point saved.");
41+
Console.WriteLine("Type 'exit' to end the chat session.");
42+
Console.WriteLine("Type 'save' to save chat session state in memory.");
43+
Console.WriteLine("Type 'reset' to reset the chat session to its saved state.");
44+
Console.WriteLine("Type 'answer for assistant' to add and process provided user and assistant messages.");
45+
46+
// show the prompt
47+
Console.ForegroundColor = ConsoleColor.Green;
48+
string userInput = Console.ReadLine() ?? "";
49+
50+
while (userInput != "exit")
51+
{
52+
// Load the session state from the reset state
53+
if(userInput == "reset")
54+
{
55+
session.LoadSession(resetState);
56+
Console.WriteLine($"Reset to history:\n{session.HistoryTransform.HistoryToText(session.History)}");
57+
Console.ForegroundColor = ConsoleColor.Yellow;
58+
Console.WriteLine("Session reset.");
59+
}
60+
// Assign new reset state.
61+
else if (userInput == "save")
62+
{
63+
resetState = session.GetSessionState();
64+
Console.ForegroundColor = ConsoleColor.Yellow;
65+
Console.WriteLine("Session saved.");
66+
}
67+
// Provide user and override assistant answer with your own.
68+
else if (userInput == "answer for assistant")
69+
{
70+
Console.ForegroundColor = ConsoleColor.Yellow;
71+
Console.WriteLine("Provide user input: ");
72+
73+
Console.ForegroundColor = ConsoleColor.Green;
74+
string userInputOverride = Console.ReadLine() ?? "";
75+
76+
Console.ForegroundColor = ConsoleColor.Yellow;
77+
Console.WriteLine("Provide assistant input: ");
78+
79+
Console.ForegroundColor = ConsoleColor.Green;
80+
string assistantInputOverride = Console.ReadLine() ?? "";
81+
82+
await session.AddAndProcessUserMessage(userInputOverride);
83+
await session.AddAndProcessAssistantMessage(assistantInputOverride);
84+
85+
Console.ForegroundColor = ConsoleColor.Yellow;
86+
Console.WriteLine("User and assistant messages processed. Provide next user message:");
87+
}
88+
else
89+
{
90+
await foreach (
91+
var text
92+
in session.ChatAsync(
93+
new ChatHistory.Message(AuthorRole.User, userInput),
94+
inferenceParams))
95+
{
96+
Console.ForegroundColor = ConsoleColor.White;
97+
Console.Write(text);
98+
}
99+
}
100+
101+
Console.ForegroundColor = ConsoleColor.Green;
102+
userInput = Console.ReadLine() ?? "";
103+
104+
Console.ForegroundColor = ConsoleColor.White;
105+
}
106+
}
107+
}
Lines changed: 121 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,121 @@
1+
using System.Text.RegularExpressions;
2+
using LLama.Batched;
3+
using LLama.Common;
4+
using Spectre.Console;
5+
6+
namespace LLama.Examples.Examples
7+
{
8+
public class LlavaInteractiveModeExecute
9+
{
10+
public static async Task Run()
11+
{
12+
string multiModalProj = UserSettings.GetMMProjPath();
13+
string modelPath = UserSettings.GetModelPath();
14+
string modelImage = UserSettings.GetImagePath();
15+
const int maxTokens = 1024;
16+
17+
var prompt = $"{{{modelImage}}}\nUSER:\nProvide a full description of the image.\nASSISTANT:\n";
18+
19+
var parameters = new ModelParams(modelPath)
20+
{
21+
ContextSize = 4096,
22+
Seed = 1337,
23+
};
24+
using var model = LLamaWeights.LoadFromFile(parameters);
25+
using var context = model.CreateContext(parameters);
26+
27+
// Llava Init
28+
using var clipModel = LLavaWeights.LoadFromFile(multiModalProj);
29+
30+
var ex = new InteractiveExecutor(context, clipModel );
31+
32+
Console.ForegroundColor = ConsoleColor.Yellow;
33+
Console.WriteLine("The executor has been enabled. In this example, the prompt is printed, the maximum tokens is set to {0} and the context size is {1}.", maxTokens, parameters.ContextSize );
34+
Console.WriteLine("To send an image, enter its filename in curly braces, like this {c:/image.jpg}.");
35+
36+
var inferenceParams = new InferenceParams() { Temperature = 0.1f, AntiPrompts = new List<string> { "\nUSER:" }, MaxTokens = maxTokens };
37+
38+
do
39+
{
40+
41+
// Evaluate if we have images
42+
//
43+
var imageMatches = Regex.Matches(prompt, "{([^}]*)}").Select(m => m.Value);
44+
var imageCount = imageMatches.Count();
45+
var hasImages = imageCount > 0;
46+
byte[][] imageBytes = null;
47+
48+
if (hasImages)
49+
{
50+
var imagePathsWithCurlyBraces = Regex.Matches(prompt, "{([^}]*)}").Select(m => m.Value);
51+
var imagePaths = Regex.Matches(prompt, "{([^}]*)}").Select(m => m.Groups[1].Value);
52+
53+
try
54+
{
55+
imageBytes = imagePaths.Select(File.ReadAllBytes).ToArray();
56+
}
57+
catch (IOException exception)
58+
{
59+
Console.ForegroundColor = ConsoleColor.Red;
60+
Console.Write(
61+
$"Could not load your {(imageCount == 1 ? "image" : "images")}:");
62+
Console.Write($"{exception.Message}");
63+
Console.ForegroundColor = ConsoleColor.Yellow;
64+
Console.WriteLine("Please try again.");
65+
break;
66+
}
67+
68+
69+
int index = 0;
70+
foreach (var path in imagePathsWithCurlyBraces)
71+
{
72+
// First image replace to tag <image, the rest of the images delete the tag
73+
if (index++ == 0)
74+
prompt = prompt.Replace(path, "<image>");
75+
else
76+
prompt = prompt.Replace(path, "");
77+
}
78+
79+
80+
Console.ForegroundColor = ConsoleColor.Yellow;
81+
Console.WriteLine($"Here are the images, that are sent to the chat model in addition to your message.");
82+
Console.WriteLine();
83+
84+
foreach (var consoleImage in imageBytes?.Select(bytes => new CanvasImage(bytes)))
85+
{
86+
consoleImage.MaxWidth = 50;
87+
AnsiConsole.Write(consoleImage);
88+
}
89+
90+
Console.WriteLine();
91+
Console.ForegroundColor = ConsoleColor.Yellow;
92+
Console.WriteLine($"The images were scaled down for the console only, the model gets full versions.");
93+
Console.WriteLine($"Write /exit or press Ctrl+c to return to main menu.");
94+
Console.WriteLine();
95+
96+
97+
// Initilize Images in executor
98+
//
99+
ex.ImagePaths = imagePaths.ToList();
100+
}
101+
102+
Console.ForegroundColor = Color.White;
103+
await foreach (var text in ex.InferAsync(prompt, inferenceParams))
104+
{
105+
Console.Write(text);
106+
}
107+
Console.Write(" ");
108+
Console.ForegroundColor = ConsoleColor.Green;
109+
prompt = Console.ReadLine();
110+
Console.WriteLine();
111+
112+
// let the user finish with exit
113+
//
114+
if (prompt.Equals("/exit", StringComparison.OrdinalIgnoreCase))
115+
break;
116+
117+
}
118+
while(true);
119+
}
120+
}
121+
}

LLama.Examples/Examples/StatelessModeExecute.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ public static async Task Run()
2121
Console.ForegroundColor = ConsoleColor.Yellow;
2222
Console.WriteLine("The executor has been enabled. In this example, the inference is an one-time job. That says, the previous input and response has " +
2323
"no impact on the current response. Now you can ask it questions. Note that in this example, no prompt was set for LLM and the maximum response tokens is 50. " +
24-
"It may not perform well because of lack of prompt. This is also an example that could indicate the improtance of prompt in LLM. To improve it, you can add " +
24+
"It may not perform well because of lack of prompt. This is also an example that could indicate the importance of prompt in LLM. To improve it, you can add " +
2525
"a prompt for it yourself!");
2626
Console.ForegroundColor = ConsoleColor.White;
2727

LLama.Examples/LLama.Examples.csproj

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,15 +10,16 @@
1010
<IncludeBuiltInRuntimes>true</IncludeBuiltInRuntimes>
1111
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
1212
<LangVersion>12</LangVersion>
13-
<NoWarn>1701;1702;8604;SKEXP0001;SKEXP0052;SKEXP0003</NoWarn>
13+
<NoWarn>1701;1702;8604;SKEXP0001;SKEXP0050;SKEXP0052;SKEXP0003</NoWarn>
1414
</PropertyGroup>
1515

1616
<ItemGroup>
1717
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="8.0.0" />
18-
<PackageReference Include="Microsoft.KernelMemory.Core" Version="0.29.240219.2" />
19-
<PackageReference Include="Microsoft.SemanticKernel" Version="1.5.0" />
20-
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Memory" Version="1.1.0-alpha" />
18+
<PackageReference Include="Microsoft.KernelMemory.Core" Version="0.34.240313.1" />
19+
<PackageReference Include="Microsoft.SemanticKernel" Version="1.6.2" />
20+
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Memory" Version="1.6.2-alpha" />
2121
<PackageReference Include="Spectre.Console" Version="0.48.0" />
22+
<PackageReference Include="Spectre.Console.ImageSharp" Version="0.48.0" />
2223
</ItemGroup>
2324

2425
<ItemGroup>

LLama.Examples/Program.cs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,14 @@ __ __ ____ __
1616
1717
""");
1818

19+
// Configure native library to use
1920
NativeLibraryConfig
2021
.Instance
2122
.WithCuda()
22-
.WithLogs(LLamaLogLevel.Warning);
23+
.WithLogs(LLamaLogLevel.Info);
2324

25+
// Calling this method forces loading to occur now.
2426
NativeApi.llama_empty_call();
2527

26-
await ExampleRunner.Run();
28+
await ExampleRunner.Run();
29+

0 commit comments

Comments
 (0)