|
static int | ailiaLLMCreate (ref IntPtr llm) |
| Creates a LLM instance. More...
|
|
static int | ailiaLLMOpenModelFile (IntPtr llm, string path, uint n_ctx) |
| Open model file. More...
|
|
static int | ailiaLLMSetSamplingParams (IntPtr llm, uint top_k, float top_p, float temp, uint dist) |
| Set the sampling parameter. More...
|
|
static int | ailiaLLMSetPrompt (IntPtr llm, IntPtr messages, uint messages_len) |
| Set the prompt. More...
|
|
static int | ailiaLLMGenerate (IntPtr llm, ref uint done) |
| Perform generate. More...
|
|
static int | ailiaLLMGetDeltaTextSize (IntPtr llm, ref uint len) |
| Gets the size of text. (Include null) More...
|
|
static int | ailiaLLMGetDeltaText (IntPtr llm, IntPtr text, uint len) |
| Gets the decoded text. More...
|
|
static int | ailiaLLMGetTokenCount (IntPtr llm, ref uint cnt, IntPtr text) |
| Gets the count of token. More...
|
|
static int | ailiaLLMGetPromptTokenCount (IntPtr llm, ref uint cnt) |
| Gets the count of prompt token. More...
|
|
static int | ailiaLLMGetGeneratedTokenCount (IntPtr llm, ref uint cnt) |
| Gets the count of prompt token. More...
|
|
static void | ailiaLLMDestroy (IntPtr llm) |
| It destroys the LLM instance. More...
|
|
◆ ailiaLLMCreate()
static int ailiaLLM.AiliaLLM.ailiaLLMCreate |
( |
ref IntPtr |
llm | ) |
|
Creates a LLM instance.
- Parameters
-
llm | A pointer to the LLM instance pointer |
- Returns
- If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.
Creates a LLM instance.
◆ ailiaLLMDestroy()
static void ailiaLLM.AiliaLLM.ailiaLLMDestroy |
( |
IntPtr |
llm | ) |
|
It destroys the LLM instance.
- Parameters
-
llm | A LLM instance pointer |
◆ ailiaLLMGenerate()
static int ailiaLLM.AiliaLLM.ailiaLLMGenerate |
( |
IntPtr |
llm, |
|
|
ref uint |
done |
|
) |
| |
Perform generate.
- Parameters
-
llm | A LLM instance pointer |
done | Generation complete? |
- Returns
- If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.
The decoded result is obtained through the ailiaLLMGetDeltaText API. Each call to ailiaLLMGenerate decodes one token at a time. The value of done is 0 or 1. If done is 1, the generation is complete.
◆ ailiaLLMGetDeltaText()
static int ailiaLLM.AiliaLLM.ailiaLLMGetDeltaText |
( |
IntPtr |
llm, |
|
|
IntPtr |
text, |
|
|
uint |
len |
|
) |
| |
Gets the decoded text.
- Parameters
-
llm | A LLM instance pointer |
text | Text(UTF8) |
len | Buffer size |
- Returns
- If this function is successful, it returns AILIA_STATUS_SUCCESS , or an error code otherwise.
If ailiaLLMGenerate() is not run at all, the function returns AILIA_STATUS_INVALID_STATE .
◆ ailiaLLMGetDeltaTextSize()
static int ailiaLLM.AiliaLLM.ailiaLLMGetDeltaTextSize |
( |
IntPtr |
llm, |
|
|
ref uint |
len |
|
) |
| |
Gets the size of text. (Include null)
- Parameters
-
llm | A LLM instance pointer |
len | The length of text |
- Returns
- If this function is successful, it returns AILIA_STATUS_SUCCESS , or an error code otherwise.
◆ ailiaLLMGetGeneratedTokenCount()
static int ailiaLLM.AiliaLLM.ailiaLLMGetGeneratedTokenCount |
( |
IntPtr |
llm, |
|
|
ref uint |
cnt |
|
) |
| |
Gets the count of prompt token.
- Parameters
-
llm | A LLM instance pointer |
cnt | The count of generated token |
- Returns
- If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.
It can be called after calling ailiaLLMGenerate.
◆ ailiaLLMGetPromptTokenCount()
static int ailiaLLM.AiliaLLM.ailiaLLMGetPromptTokenCount |
( |
IntPtr |
llm, |
|
|
ref uint |
cnt |
|
) |
| |
Gets the count of prompt token.
- Parameters
-
llm | A LLM instance pointer |
cnt | The count of prompt token |
- Returns
- If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.
It can be called after calling ailiaLLMSetPrompt.
◆ ailiaLLMGetTokenCount()
static int ailiaLLM.AiliaLLM.ailiaLLMGetTokenCount |
( |
IntPtr |
llm, |
|
|
ref uint |
cnt, |
|
|
IntPtr |
text |
|
) |
| |
Gets the count of token.
- Parameters
-
llm | A LLM instance pointer |
cnt | The count of token |
text | Text(UTF8) |
- Returns
- If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.
◆ ailiaLLMOpenModelFile()
static int ailiaLLM.AiliaLLM.ailiaLLMOpenModelFile |
( |
IntPtr |
llm, |
|
|
string |
path, |
|
|
uint |
n_ctx |
|
) |
| |
Open model file.
- Parameters
-
llm | A pointer to the LLM instance pointer |
path | Path for GGUF |
n_ctx | Context length for model (0 is model default) |
- Returns
- If this function is successful, it returns AILIA_STATUS_SUCCESS , or an error code otherwise.
Open a model file for GGUF.
◆ ailiaLLMSetPrompt()
static int ailiaLLM.AiliaLLM.ailiaLLMSetPrompt |
( |
IntPtr |
llm, |
|
|
IntPtr |
messages, |
|
|
uint |
messages_len |
|
) |
| |
Set the prompt.
- Parameters
-
llm | A pointer to the LLM instance pointer |
message | Array of messages |
message_cnt | Number of messages |
- Returns
- If this function is successful, it returns AILIA_STATUS_SUCCESS , or an error code otherwise.
Set the prompt to query the LLM. Please include ChatHistory in the message as well.
◆ ailiaLLMSetSamplingParams()
static int ailiaLLM.AiliaLLM.ailiaLLMSetSamplingParams |
( |
IntPtr |
llm, |
|
|
uint |
top_k, |
|
|
float |
top_p, |
|
|
float |
temp, |
|
|
uint |
dist |
|
) |
| |
Set the sampling parameter.
- Parameters
-
llm | A pointer to the LLM instance pointer |
top_k | Sampling probability value's top number, default 40 |
top_p | Sampling probability value range, default 0.9 (0.9 to 1.0) |
temp | Temperature parameter, default 0.4 |
dist | Seed, default 1234 |
- Returns
- If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.
Set LLM sampling parameters. Must be run before ailiaLLMSetPrompt.
◆ AILIA_LLM_STATUS_BROKEN
const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_BROKEN = (-4) |
|
static |
◆ AILIA_LLM_STATUS_CONTEXT_FULL
const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_CONTEXT_FULL = (-8) |
|
static |
◆ AILIA_LLM_STATUS_ERROR_FILE_API
const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_ERROR_FILE_API = (-2) |
|
static |
◆ AILIA_LLM_STATUS_INVALID_ARGUMENT
const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_INVALID_ARGUMENT = (-1) |
|
static |
◆ AILIA_LLM_STATUS_INVALID_STATE
const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_INVALID_STATE = (-7) |
|
static |
◆ AILIA_LLM_STATUS_INVALID_VERSION
const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_INVALID_VERSION = (-3) |
|
static |
◆ AILIA_LLM_STATUS_MEMORY_INSUFFICIENT
const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_MEMORY_INSUFFICIENT = (-5) |
|
static |
◆ AILIA_LLM_STATUS_OTHER_ERROR
const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_OTHER_ERROR = (-128) |
|
static |
◆ AILIA_LLM_STATUS_SUCCESS
const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_SUCCESS = (0) |
|
static |
◆ AILIA_LLM_STATUS_THREAD_ERROR
const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_THREAD_ERROR = (-6) |
|
static |
◆ AILIA_LLM_STATUS_UNIMPLEMENTED
const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_UNIMPLEMENTED = (-15) |
|
static |
◆ LIBRARY_NAME
const String ailiaLLM.AiliaLLM.LIBRARY_NAME ="ailia_llm" |
|
static |
The documentation for this class was generated from the following file: