Skip to content

Commit

Permalink
Merge pull request #304 from martindevans/obsolete_attribute_eval
Browse files Browse the repository at this point in the history
Added Obsolete markings to all `Eval` overloads
  • Loading branch information
martindevans authored Nov 17, 2023
2 parents 18b3a55 + 16ab33b commit c517cc1
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 1 deletion.
6 changes: 5 additions & 1 deletion LLama/LLamaContext.cs
Original file line number Diff line number Diff line change
Expand Up @@ -340,6 +340,7 @@ public LLamaTokenDataArray ApplyPenalty(IEnumerable<llama_token> lastTokens, Dic
/// <param name="pastTokensCount"></param>
/// <returns>The updated `pastTokensCount`.</returns>
/// <exception cref="RuntimeError"></exception>
[Obsolete("use llama_decode() instead")]
public int Eval(llama_token[] tokens, int pastTokensCount)
{
return Eval(tokens.AsSpan(), pastTokensCount);
Expand All @@ -352,6 +353,7 @@ public int Eval(llama_token[] tokens, int pastTokensCount)
/// <param name="pastTokensCount"></param>
/// <returns>The updated `pastTokensCount`.</returns>
/// <exception cref="RuntimeError"></exception>
[Obsolete("use llama_decode() instead")]
public int Eval(List<llama_token> tokens, int pastTokensCount)
{
#if NET5_0_OR_GREATER
Expand Down Expand Up @@ -382,6 +384,7 @@ public int Eval(List<llama_token> tokens, int pastTokensCount)
/// <param name="pastTokensCount"></param>
/// <returns>The updated `pastTokensCount`.</returns>
/// <exception cref="RuntimeError"></exception>
[Obsolete("use llama_decode() instead")]
public int Eval(ReadOnlyMemory<llama_token> tokens, int pastTokensCount)
{
return Eval(tokens.Span, pastTokensCount);
Expand All @@ -394,6 +397,7 @@ public int Eval(ReadOnlyMemory<llama_token> tokens, int pastTokensCount)
/// <param name="pastTokensCount"></param>
/// <returns>The updated `pastTokensCount`.</returns>
/// <exception cref="RuntimeError"></exception>
[Obsolete("use llama_decode() instead")]
public int Eval(ReadOnlySpan<llama_token> tokens, int pastTokensCount)
{
var total = tokens.Length;
Expand All @@ -415,7 +419,7 @@ public int Eval(ReadOnlySpan<llama_token> tokens, int pastTokensCount)
}
return pastTokensCount;
}
#endregion
#endregion

/// <inheritdoc />
public void Dispose()
Expand Down
1 change: 1 addition & 0 deletions LLama/Native/SafeLLamaContextHandle.cs
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,7 @@ public int TokenToSpan(int token, Span<byte> dest)
/// <param name="tokens">The provided batch of new tokens to process</param>
/// <param name="n_past">the number of tokens to use from previous eval calls</param>
/// <returns>Returns true on success</returns>
[Obsolete("use llama_decode() instead")]
public bool Eval(ReadOnlySpan<int> tokens, int n_past)
{
unsafe
Expand Down

0 comments on commit c517cc1

Please sign in to comment.