Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion LLama.Examples/Examples/LlavaInteractiveModeExecute.cs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ public static async Task Run()
Console.WriteLine($"Here are the images, that are sent to the chat model in addition to your message.");
Console.WriteLine();

foreach (var consoleImage in imageBytes?.Select(bytes => new CanvasImage(bytes)))
foreach (var consoleImage in imageBytes?.Select(bytes => new CanvasImage(bytes)) ?? Array.Empty<CanvasImage>())
{
consoleImage.MaxWidth = 50;
AnsiConsole.Write(consoleImage);
Expand Down
14 changes: 7 additions & 7 deletions LLama.Examples/Examples/SemanticKernelHomeAutomation.cs
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ public static async Task Run()
}
}

class Worker(
internal class Worker(
IHostApplicationLifetime hostApplicationLifetime,
[FromKeyedServices("HomeAutomationKernel")] Kernel kernel) : BackgroundService
{
Expand All @@ -92,7 +92,7 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken)
TopP = 0.1f
};

string? input = null;
string? input;

while ((input = Console.ReadLine()) != null)
{
Expand Down Expand Up @@ -123,19 +123,19 @@ [WHICH LIGHT IS ON]
ChatMessageContent chatResult = await chatCompletionService.GetChatMessageContentAsync(chatHistory, llamaSharpPromptExecutionSettings, _kernel, stoppingToken);

FunctionResult? fres = null;
if (chatResult.Content.Contains("[TURN ON THE LIGHT]"))
if (chatResult.Content!.Contains("[TURN ON THE LIGHT]"))
{
fres = await _kernel.InvokeAsync("OfficeLight", "TurnOn");
fres = await _kernel.InvokeAsync("OfficeLight", "TurnOn", cancellationToken: stoppingToken);
}
else if (chatResult.Content.Contains("[TURN OFF THE LIGHT]"))
{
fres = await _kernel.InvokeAsync("OfficeLight", "TurnOff");
fres = await _kernel.InvokeAsync("OfficeLight", "TurnOff", cancellationToken: stoppingToken);
}

Console.ForegroundColor = ConsoleColor.Green;
if (fres != null || chatResult.Content.Contains("[WHICH LIGHT IS ON]"))
{
fres = await _kernel.InvokeAsync("OfficeLight", "IsTurnedOn");
fres = await _kernel.InvokeAsync("OfficeLight", "IsTurnedOn", cancellationToken: stoppingToken);
Console.Write($">>> Result:\n {(fres.GetValue<bool>()==true?"The light is ON.": "The light is OFF.")}\n\n> ");
}
else
Expand All @@ -154,7 +154,7 @@ [WHICH LIGHT IS ON]
/// Class that represents a controllable light.
/// </summary>
[Description("Represents a light")]
class MyLightPlugin(bool turnedOn = false)
internal class MyLightPlugin(bool turnedOn = false)
{
private bool _turnedOn = turnedOn;

Expand Down
1 change: 0 additions & 1 deletion LLama.Examples/Examples/SemanticKernelMemory.cs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ public static async Task Run()
Console.WriteLine("This example is from: \n" +
"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs");

var seed = 1337u;
// Load weights into memory
var parameters = new ModelParams(modelPath)
{
Expand Down
1 change: 1 addition & 0 deletions LLama.Examples/LLama.Examples.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
<PackageReference Include="Microsoft.SemanticKernel" Version="1.29.0" />
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Memory" Version="1.6.2-alpha" />
<PackageReference Include="NAudio" Version="2.2.1" />
<PackageReference Include="SixLabors.ImageSharp" Version="3.1.5" />
<PackageReference Include="Spectre.Console" Version="0.49.1" />
<PackageReference Include="Spectre.Console.ImageSharp" Version="0.49.1" />
<PackageReference Include="Whisper.net" Version="1.7.4" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ public class LLamaSharpChatCompletionTests

public LLamaSharpChatCompletionTests()
{
this.mockStatelessExecutor = new Mock<ILLamaExecutor>();
mockStatelessExecutor = new Mock<ILLamaExecutor>();
}

private LLamaSharpChatCompletion CreateLLamaSharpChatCompletion()
{
return new LLamaSharpChatCompletion(
this.mockStatelessExecutor.Object,
mockStatelessExecutor.Object,
null,
null,
null);
Expand All @@ -28,7 +28,7 @@ private LLamaSharpChatCompletion CreateLLamaSharpChatCompletion()
public async Task GetChatMessageContentsAsync_StateUnderTest_ExpectedBehavior()
{
// Arrange
var unitUnderTest = this.CreateLLamaSharpChatCompletion();
var unitUnderTest = CreateLLamaSharpChatCompletion();
ChatHistory chatHistory = new ChatHistory();
PromptExecutionSettings? executionSettings = null;
Kernel? kernel = null;
Expand All @@ -51,7 +51,7 @@ public async Task GetChatMessageContentsAsync_StateUnderTest_ExpectedBehavior()
public async Task GetStreamingChatMessageContentsAsync_StateUnderTest_ExpectedBehavior()
{
// Arrange
var unitUnderTest = this.CreateLLamaSharpChatCompletion();
var unitUnderTest = CreateLLamaSharpChatCompletion();
ChatHistory chatHistory = new ChatHistory();
PromptExecutionSettings? executionSettings = null;
Kernel? kernel = null;
Expand Down
2 changes: 1 addition & 1 deletion LLama.Web/Pages/Error.cshtml.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ namespace LLama.Web.Pages
[IgnoreAntiforgeryToken]
public class ErrorModel : PageModel
{
public string? RequestId { get; set; }
public string RequestId { get; set; }

public bool ShowRequestId => !string.IsNullOrEmpty(RequestId);

Expand Down
2 changes: 1 addition & 1 deletion LLama/ChatSession.cs
Original file line number Diff line number Diff line change
Expand Up @@ -779,7 +779,7 @@ public static SessionState Load(string path)

return new SessionState(
contextState,
executorState,
executorState!,
history,
inputTransforms.ToList(),
outputTransform,
Expand Down
8 changes: 4 additions & 4 deletions LLama/Common/ChatHistory.cs
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ public class Message
/// <param name="content">Message content</param>
public Message(AuthorRole authorRole, string content)
{
this.AuthorRole = authorRole;
this.Content = content;
AuthorRole = authorRole;
Content = content;
}
}

Expand All @@ -87,7 +87,7 @@ public ChatHistory() { }
/// <param name="messageHistory"></param>
public ChatHistory(Message[] messageHistory)
{
this.Messages = messageHistory.ToList();
Messages = messageHistory.ToList();
}

/// <summary>
Expand All @@ -97,7 +97,7 @@ public ChatHistory(Message[] messageHistory)
/// <param name="content">Message content</param>
public void AddMessage(AuthorRole authorRole, string content)
{
this.Messages.Add(new Message(authorRole, content));
Messages.Add(new Message(authorRole, content));
}

/// <summary>
Expand Down
2 changes: 1 addition & 1 deletion LLama/Common/PolymorphicJSONConverter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ internal class PolymorphicJSONConverter<T> : JsonConverter<T>
public override void Write(Utf8JsonWriter writer, T value, JsonSerializerOptions options)
{
writer.WriteStartObject();
writer.WriteString("Name", value.GetType().Name);
writer.WriteString("Name", value!.GetType().Name);
writer.WritePropertyName("Data");
JsonSerializer.Serialize(writer, value, value.GetType(), options);
writer.WriteEndObject();
Expand Down
2 changes: 2 additions & 0 deletions LLama/LLamaExecutorBase.cs
Original file line number Diff line number Diff line change
Expand Up @@ -395,6 +395,7 @@ protected class InferStateArgs
public bool NeedToSaveSession { get; set; }
}

#pragma warning disable CS1591, CS8618 // Missing XML and irrelevant nullable warnings
[JsonConverter(typeof(PolymorphicJSONConverter<ExecutorBaseState>))]
public class ExecutorBaseState
{
Expand Down Expand Up @@ -431,5 +432,6 @@ public class ExecutorBaseState
[JsonPropertyName("mirostat_mu")]
public float? MirostatMu { get; set; }
}
#pragma warning restore
}
}
22 changes: 10 additions & 12 deletions LLama/LLamaInstructExecutor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,6 @@
private LLamaToken[] _inp_pfx;
private LLamaToken[] _inp_sfx;

private ISamplingPipeline? _pipeline;

/// <summary>
///
/// </summary>
Expand Down Expand Up @@ -72,17 +70,17 @@
if(data is InstructExecutorState state)
{
_n_session_consumed = state.ConsumedSessionCount;
_embed_inps = state.EmbedInps.ToList();
_embed_inps = state.EmbedInps!.ToList();
_is_prompt_run = state.IsPromptRun;
_consumedTokensCount = state.ConsumedTokensCount;
_embeds = state.Embeds.ToList();
_last_n_tokens = new FixedSizeQueue<LLamaToken>(state.LastTokensCapacity, state.LastTokens);
_inp_pfx = state.InputPrefixTokens;
_inp_sfx = state.InputSuffixTokens;
_embeds = state.Embeds!.ToList();
_last_n_tokens = new FixedSizeQueue<LLamaToken>(state.LastTokensCapacity, state.LastTokens!);
_inp_pfx = state.InputPrefixTokens!;
_inp_sfx = state.InputSuffixTokens!;
Comment on lines +73 to +79
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I do have to say that you just made a lot of suppressing just to ignore the warnings, which might introduce some non indicative NullReferenceExceptions. I believe the warnings should rather remind to us provide a different behavior when its actually null, or at least provide some information to the user (or logger).

Copy link
Contributor Author

@Lyrcaxis Lyrcaxis Mar 1, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I may have misunderstood the whole “nullables” concept, but from what I understand, they exist so developers can double check they indeed handle a possible null value properly before implementing it on a new system, and mark with “!” if they indeed did.

In reality, those values will NOT be null here based on all current implementations that the engine supports, and it doesn’t support alternative state loading ways in which these can be null.

Let me know if you have a different view on the nullables, or if you have an alternative way to propose that handles this — or if you wanna try your hand at it I’d be happy to learn how you’ll handle this!

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I’m thinking more long-term. If the implementation ever changes, or someone modifies the state-loading logic in the future, we might suddenly start seeing NullReferenceExceptions in unexpected places.

As for this specific comment context, state.Embeds is of a non nullable type LLamaToken[] so I don't even think a suppression was necessary. As of state.InputPrefixTokens, if it can't be null in the current project implementation - why not just change its type to not nullable?

In general I agree with the inhouse double check approach you mentioned, I just hate to assume something will never happen so I made this comment because that was (almost) the only way you handled this warning across the PR, which made me wonder if a "double check" was made. Thank you for your contribution.

_n_matching_session_tokens = state.MatchingSessionTokensCount;
_pastTokensCount = state.PastTokensCount;
_pathSession = state.SessionFilePath;
_session_tokens = state.SessionTokens.ToList();
_session_tokens = state.SessionTokens!.ToList();
}
else
{
Expand All @@ -107,7 +105,7 @@
using (var fs = new FileStream(filename, FileMode.Open, FileAccess.Read))
{
var state = await JsonSerializer.DeserializeAsync<InstructExecutorState>(fs);
await LoadState(state);
await LoadState(state!);
}
}

Expand Down Expand Up @@ -156,11 +154,11 @@
}

/// <inheritdoc />
protected override async Task<(bool, IReadOnlyList<string>)> PostProcess(IInferenceParams inferenceParams, InferStateArgs args)

Check warning on line 157 in LLama/LLamaInstructExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (linux-release)

This async method lacks 'await' operators and will run synchronously. Consider using the 'await' operator to await non-blocking API calls, or 'await Task.Run(...)' to do CPU-bound work on a background thread.

Check warning on line 157 in LLama/LLamaInstructExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (linux-release)

This async method lacks 'await' operators and will run synchronously. Consider using the 'await' operator to await non-blocking API calls, or 'await Task.Run(...)' to do CPU-bound work on a background thread.

Check warning on line 157 in LLama/LLamaInstructExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (windows-release)

This async method lacks 'await' operators and will run synchronously. Consider using the 'await' operator to await non-blocking API calls, or 'await Task.Run(...)' to do CPU-bound work on a background thread.

Check warning on line 157 in LLama/LLamaInstructExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (windows-release)

This async method lacks 'await' operators and will run synchronously. Consider using the 'await' operator to await non-blocking API calls, or 'await Task.Run(...)' to do CPU-bound work on a background thread.

Check warning on line 157 in LLama/LLamaInstructExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (osx-release)

This async method lacks 'await' operators and will run synchronously. Consider using the 'await' operator to await non-blocking API calls, or 'await Task.Run(...)' to do CPU-bound work on a background thread.

Check warning on line 157 in LLama/LLamaInstructExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (osx-release)

This async method lacks 'await' operators and will run synchronously. Consider using the 'await' operator to await non-blocking API calls, or 'await Task.Run(...)' to do CPU-bound work on a background thread.
{
if (_embed_inps.Count <= _consumedTokensCount)
{
if (_last_n_tokens.TokensEndsWithAnyString(args.Antiprompts, Context.NativeHandle.ModelHandle, Context.Encoding))

Check warning on line 161 in LLama/LLamaInstructExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (linux-release)

'IReadOnlyListExtensions.TokensEndsWithAnyString<TTokens>(TTokens, IList<string>?, SafeLlamaModelHandle, Encoding)' is obsolete: 'Use an Antiprompt processor instead'

Check warning on line 161 in LLama/LLamaInstructExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (linux-release)

'IReadOnlyListExtensions.TokensEndsWithAnyString<TTokens>(TTokens, IList<string>?, SafeLlamaModelHandle, Encoding)' is obsolete: 'Use an Antiprompt processor instead'

Check warning on line 161 in LLama/LLamaInstructExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (windows-release)

'IReadOnlyListExtensions.TokensEndsWithAnyString<TTokens>(TTokens, IList<string>?, SafeLlamaModelHandle, Encoding)' is obsolete: 'Use an Antiprompt processor instead'

Check warning on line 161 in LLama/LLamaInstructExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (windows-release)

'IReadOnlyListExtensions.TokensEndsWithAnyString<TTokens>(TTokens, IList<string>?, SafeLlamaModelHandle, Encoding)' is obsolete: 'Use an Antiprompt processor instead'

Check warning on line 161 in LLama/LLamaInstructExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (osx-release)

'IReadOnlyListExtensions.TokensEndsWithAnyString<TTokens>(TTokens, IList<string>?, SafeLlamaModelHandle, Encoding)' is obsolete: 'Use an Antiprompt processor instead'

Check warning on line 161 in LLama/LLamaInstructExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (osx-release)

'IReadOnlyListExtensions.TokensEndsWithAnyString<TTokens>(TTokens, IList<string>?, SafeLlamaModelHandle, Encoding)' is obsolete: 'Use an Antiprompt processor instead'
{
args.WaitForInput = true;
return (true, Array.Empty<string>());
Expand Down Expand Up @@ -224,7 +222,7 @@
if (!string.IsNullOrEmpty(_pathSession) && args.NeedToSaveSession)
{
args.NeedToSaveSession = false;
SaveSessionFile(_pathSession);
SaveSessionFile(_pathSession!);
}

// Sample with the pipeline
Expand Down Expand Up @@ -266,12 +264,12 @@
/// Instruction prefix tokens.
/// </summary>
[JsonPropertyName("inp_pfx")]
public LLamaToken[] InputPrefixTokens { get; set; }
public LLamaToken[]? InputPrefixTokens { get; set; }
/// <summary>
/// Instruction suffix tokens.
/// </summary>
[JsonPropertyName("inp_sfx")]
public LLamaToken[] InputSuffixTokens { get; set; }
public LLamaToken[]? InputSuffixTokens { get; set; }
}
}
}
37 changes: 19 additions & 18 deletions LLama/LLamaInteractExecutor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@
private List<SafeLlavaImageEmbedHandle> _imageEmbedHandles = new List<SafeLlavaImageEmbedHandle>();
private bool _imageInPrompt = false;

private ISamplingPipeline? _pipeline;

/// <summary>
///
/// </summary>
Expand All @@ -39,6 +37,12 @@
{
}

/// <summary>
///
/// </summary>
/// <param name="context"></param>
/// <param name="clipModel"></param>
/// <param name="logger"></param>
public InteractiveExecutor(LLamaContext context, LLavaWeights clipModel, ILogger? logger = null)
: base(context, clipModel, logger)
{
Expand Down Expand Up @@ -69,15 +73,15 @@
if (data is InteractiveExecutorState state)
{
_n_session_consumed = state.ConsumedSessionCount;
_embed_inps = state.EmbedInps.ToList();
_embed_inps = state.EmbedInps!.ToList();
_is_prompt_run = state.IsPromptRun;
_consumedTokensCount = state.ConsumedTokensCount;
_embeds = state.Embeds.ToList();
_last_n_tokens = new FixedSizeQueue<LLamaToken>(state.LastTokensCapacity, state.LastTokens);
_embeds = state.Embeds!.ToList();
_last_n_tokens = new FixedSizeQueue<LLamaToken>(state.LastTokensCapacity, state.LastTokens!);
_n_matching_session_tokens = state.MatchingSessionTokensCount;
_pastTokensCount = state.PastTokensCount;
_pathSession = state.SessionFilePath;
_session_tokens = state.SessionTokens.ToList();
_session_tokens = state.SessionTokens!.ToList();
}
else
throw new ArgumentException("Invalid state data type.");
Expand All @@ -99,7 +103,7 @@
using (var fs = new FileStream(filename, FileMode.Open, FileAccess.Read))
{
var state = await JsonSerializer.DeserializeAsync<InteractiveExecutorState>(fs);
await LoadState(state);
await LoadState(state!);
}
}

Expand All @@ -119,7 +123,7 @@
{
// When running the first input (prompt) in interactive mode, we should specially process it.
if (text == null) throw new ArgumentException("Prompt cannot be null to trigger continuation if a prompt has not been provided previously.");
if (!this.IsMultiModal)
if (!IsMultiModal)
{
_embed_inps = Context.Tokenize(text, true, true).ToList();
}
Expand All @@ -138,7 +142,7 @@
text += "\n";
}

if (!this.IsMultiModal)
if (!IsMultiModal)
{
var line_inp = Context.Tokenize(text, false, true);
_embed_inps.AddRange(line_inp);
Expand All @@ -156,16 +160,14 @@

/// <inheritdoc />
private Task PreprocessLlava(string text, InferStateArgs args, bool addBos = true )
{
int usedTokens = 0;

{
// If the prompt contains the tag <image> extract this.
_imageInPrompt = text.Contains("<image>");
if (_imageInPrompt && IsMultiModal )
if (_imageInPrompt && IsMultiModal)
{
foreach (var image in Images)
{
_imageEmbedHandles.Add(SafeLlavaImageEmbedHandle.CreateFromMemory(ClipModel.NativeHandle, Context, image));
_imageEmbedHandles.Add(SafeLlavaImageEmbedHandle.CreateFromMemory(ClipModel!.NativeHandle, Context, image));
}

int imageIndex = text.IndexOf("<image>");
Expand All @@ -178,7 +180,6 @@
var segment2 = Context.Tokenize(postImagePrompt, false, true);
_embed_inps.AddRange(segment1);
_embed_inps.AddRange(segment2);
usedTokens += (segment1.Length + segment2.Length);
}
else
{
Expand All @@ -195,18 +196,18 @@
}
return Task.CompletedTask;
}

/// <summary>
/// Return whether to break the generation.
/// </summary>
/// <param name="inferenceParams"></param>
/// <param name="args"></param>
/// <returns></returns>
protected override async Task<(bool, IReadOnlyList<string>)> PostProcess(IInferenceParams inferenceParams, InferStateArgs args)

Check warning on line 206 in LLama/LLamaInteractExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (linux-release)

This async method lacks 'await' operators and will run synchronously. Consider using the 'await' operator to await non-blocking API calls, or 'await Task.Run(...)' to do CPU-bound work on a background thread.

Check warning on line 206 in LLama/LLamaInteractExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (linux-release)

This async method lacks 'await' operators and will run synchronously. Consider using the 'await' operator to await non-blocking API calls, or 'await Task.Run(...)' to do CPU-bound work on a background thread.

Check warning on line 206 in LLama/LLamaInteractExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (windows-release)

This async method lacks 'await' operators and will run synchronously. Consider using the 'await' operator to await non-blocking API calls, or 'await Task.Run(...)' to do CPU-bound work on a background thread.

Check warning on line 206 in LLama/LLamaInteractExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (windows-release)

This async method lacks 'await' operators and will run synchronously. Consider using the 'await' operator to await non-blocking API calls, or 'await Task.Run(...)' to do CPU-bound work on a background thread.

Check warning on line 206 in LLama/LLamaInteractExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (osx-release)

This async method lacks 'await' operators and will run synchronously. Consider using the 'await' operator to await non-blocking API calls, or 'await Task.Run(...)' to do CPU-bound work on a background thread.

Check warning on line 206 in LLama/LLamaInteractExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (osx-release)

This async method lacks 'await' operators and will run synchronously. Consider using the 'await' operator to await non-blocking API calls, or 'await Task.Run(...)' to do CPU-bound work on a background thread.
{
if (_embed_inps.Count <= _consumedTokensCount)
{
if (_last_n_tokens.TokensEndsWithAnyString(args.Antiprompts, Context.NativeHandle.ModelHandle, Context.Encoding))

Check warning on line 210 in LLama/LLamaInteractExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (linux-release)

'IReadOnlyListExtensions.TokensEndsWithAnyString<TTokens>(TTokens, IList<string>?, SafeLlamaModelHandle, Encoding)' is obsolete: 'Use an Antiprompt processor instead'

Check warning on line 210 in LLama/LLamaInteractExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (linux-release)

'IReadOnlyListExtensions.TokensEndsWithAnyString<TTokens>(TTokens, IList<string>?, SafeLlamaModelHandle, Encoding)' is obsolete: 'Use an Antiprompt processor instead'

Check warning on line 210 in LLama/LLamaInteractExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (windows-release)

'IReadOnlyListExtensions.TokensEndsWithAnyString<TTokens>(TTokens, IList<string>?, SafeLlamaModelHandle, Encoding)' is obsolete: 'Use an Antiprompt processor instead'

Check warning on line 210 in LLama/LLamaInteractExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (windows-release)

'IReadOnlyListExtensions.TokensEndsWithAnyString<TTokens>(TTokens, IList<string>?, SafeLlamaModelHandle, Encoding)' is obsolete: 'Use an Antiprompt processor instead'

Check warning on line 210 in LLama/LLamaInteractExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (osx-release)

'IReadOnlyListExtensions.TokensEndsWithAnyString<TTokens>(TTokens, IList<string>?, SafeLlamaModelHandle, Encoding)' is obsolete: 'Use an Antiprompt processor instead'

Check warning on line 210 in LLama/LLamaInteractExecutor.cs

View workflow job for this annotation

GitHub Actions / Test (osx-release)

'IReadOnlyListExtensions.TokensEndsWithAnyString<TTokens>(TTokens, IList<string>?, SafeLlamaModelHandle, Encoding)' is obsolete: 'Use an Antiprompt processor instead'
args.WaitForInput = true;

if (_pastTokensCount > 0 && args.WaitForInput)
Expand Down Expand Up @@ -267,7 +268,7 @@

// Images
foreach( var image in _imageEmbedHandles )
ClipModel.EvalImageEmbed(Context, image, ref _pastTokensCount);
ClipModel!.EvalImageEmbed(Context, image, ref _pastTokensCount);

// Post-image Tokens
end = await Context.DecodeAsync(_embeds.GetRange(_EmbedImagePosition, _embeds.Count - _EmbedImagePosition), LLamaSeqId.Zero, batch, _pastTokensCount);
Expand Down Expand Up @@ -301,7 +302,7 @@
if (!string.IsNullOrEmpty(_pathSession) && args.NeedToSaveSession)
{
args.NeedToSaveSession = false;
SaveSessionFile(_pathSession);
SaveSessionFile(_pathSession!);
}


Expand Down
2 changes: 2 additions & 0 deletions LLama/LLamaTransforms.cs
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,13 @@ public class DefaultHistoryTransform : IHistoryTransform
private readonly string _unknownName;
private readonly bool _isInstructMode;

#pragma warning disable CS1591 // Missing XML comment for publicly visible type or member
public string UserName => _userName;
public string AssistantName => _assistantName;
public string SystemName => _systemName;
public string UnknownName => _unknownName;
public bool IsInstructMode => _isInstructMode;
#pragma warning restore CS1591 // Missing XML comment for publicly visible type or member

/// <summary>
///
Expand Down
Loading
Loading