ailia_llm  1.3.1.0
Classes | Public Member Functions | Static Public Attributes | List of all members
ailiaLLM.AiliaLLM Class Reference

Classes

class  AILIAChatMessage
 

Public Member Functions

static int ailiaLLMCreate (ref IntPtr llm)
 Creates a LLM instance. More...
 
static int ailiaLLMOpenModelFile (IntPtr llm, string path, uint n_ctx)
 Open model file. More...
 
static int ailiaLLMSetSamplingParams (IntPtr llm, uint top_k, float top_p, float temp, uint dist)
 Set the sampling parameter. More...
 
static int ailiaLLMSetPrompt (IntPtr llm, IntPtr messages, uint messages_len)
 Set the prompt. More...
 
static int ailiaLLMGenerate (IntPtr llm, ref uint done)
 Perform generate. More...
 
static int ailiaLLMGetDeltaTextSize (IntPtr llm, ref uint len)
 Gets the size of text. (Include null) More...
 
static int ailiaLLMGetDeltaText (IntPtr llm, IntPtr text, uint len)
 Gets the decoded text. More...
 
static int ailiaLLMGetTokenCount (IntPtr llm, ref uint cnt, IntPtr text)
 Gets the count of token. More...
 
static int ailiaLLMGetPromptTokenCount (IntPtr llm, ref uint cnt)
 Gets the count of prompt token. More...
 
static int ailiaLLMGetGeneratedTokenCount (IntPtr llm, ref uint cnt)
 Gets the count of prompt token. More...
 
static void ailiaLLMDestroy (IntPtr llm)
 It destroys the LLM instance. More...
 

Static Public Attributes

const String LIBRARY_NAME ="ailia_llm"
 
const int AILIA_LLM_STATUS_SUCCESS = (0)
 
const int AILIA_LLM_STATUS_INVALID_ARGUMENT = (-1)
 
const int AILIA_LLM_STATUS_ERROR_FILE_API = (-2)
 
const int AILIA_LLM_STATUS_INVALID_VERSION = (-3)
 
const int AILIA_LLM_STATUS_BROKEN = (-4)
 
const int AILIA_LLM_STATUS_MEMORY_INSUFFICIENT = (-5)
 
const int AILIA_LLM_STATUS_THREAD_ERROR = (-6)
 
const int AILIA_LLM_STATUS_INVALID_STATE = (-7)
 
const int AILIA_LLM_STATUS_CONTEXT_FULL = (-8)
 
const int AILIA_LLM_STATUS_UNIMPLEMENTED = (-15)
 
const int AILIA_LLM_STATUS_OTHER_ERROR = (-128)
 

Member Function Documentation

◆ ailiaLLMCreate()

static int ailiaLLM.AiliaLLM.ailiaLLMCreate ( ref IntPtr  llm)

Creates a LLM instance.

Parameters
llmA pointer to the LLM instance pointer
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

Creates a LLM instance.

◆ ailiaLLMDestroy()

static void ailiaLLM.AiliaLLM.ailiaLLMDestroy ( IntPtr  llm)

It destroys the LLM instance.

Parameters
llmA LLM instance pointer

◆ ailiaLLMGenerate()

static int ailiaLLM.AiliaLLM.ailiaLLMGenerate ( IntPtr  llm,
ref uint  done 
)

Perform generate.

Parameters
llmA LLM instance pointer
doneGeneration complete?
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

The decoded result is obtained through the ailiaLLMGetDeltaText API. Each call to ailiaLLMGenerate decodes one token at a time. The value of done is 0 or 1. If done is 1, the generation is complete.

◆ ailiaLLMGetDeltaText()

static int ailiaLLM.AiliaLLM.ailiaLLMGetDeltaText ( IntPtr  llm,
IntPtr  text,
uint  len 
)

Gets the decoded text.

Parameters
llmA LLM instance pointer
textText(UTF8)
lenBuffer size
Returns
If this function is successful, it returns AILIA_STATUS_SUCCESS , or an error code otherwise.

If ailiaLLMGenerate() is not run at all, the function returns AILIA_STATUS_INVALID_STATE .

◆ ailiaLLMGetDeltaTextSize()

static int ailiaLLM.AiliaLLM.ailiaLLMGetDeltaTextSize ( IntPtr  llm,
ref uint  len 
)

Gets the size of text. (Include null)

Parameters
llmA LLM instance pointer
lenThe length of text
Returns
If this function is successful, it returns AILIA_STATUS_SUCCESS , or an error code otherwise.

◆ ailiaLLMGetGeneratedTokenCount()

static int ailiaLLM.AiliaLLM.ailiaLLMGetGeneratedTokenCount ( IntPtr  llm,
ref uint  cnt 
)

Gets the count of prompt token.

Parameters
llmA LLM instance pointer
cntThe count of generated token
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

It can be called after calling ailiaLLMGenerate.

◆ ailiaLLMGetPromptTokenCount()

static int ailiaLLM.AiliaLLM.ailiaLLMGetPromptTokenCount ( IntPtr  llm,
ref uint  cnt 
)

Gets the count of prompt token.

Parameters
llmA LLM instance pointer
cntThe count of prompt token
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

It can be called after calling ailiaLLMSetPrompt.

◆ ailiaLLMGetTokenCount()

static int ailiaLLM.AiliaLLM.ailiaLLMGetTokenCount ( IntPtr  llm,
ref uint  cnt,
IntPtr  text 
)

Gets the count of token.

Parameters
llmA LLM instance pointer
cntThe count of token
textText(UTF8)
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

◆ ailiaLLMOpenModelFile()

static int ailiaLLM.AiliaLLM.ailiaLLMOpenModelFile ( IntPtr  llm,
string  path,
uint  n_ctx 
)

Open model file.

Parameters
llmA pointer to the LLM instance pointer
pathPath for GGUF
n_ctxContext length for model (0 is model default)
Returns
If this function is successful, it returns AILIA_STATUS_SUCCESS , or an error code otherwise.

Open a model file for GGUF.

◆ ailiaLLMSetPrompt()

static int ailiaLLM.AiliaLLM.ailiaLLMSetPrompt ( IntPtr  llm,
IntPtr  messages,
uint  messages_len 
)

Set the prompt.

Parameters
llmA pointer to the LLM instance pointer
messageArray of messages
message_cntNumber of messages
Returns
If this function is successful, it returns AILIA_STATUS_SUCCESS , or an error code otherwise.

Set the prompt to query the LLM. Please include ChatHistory in the message as well.

◆ ailiaLLMSetSamplingParams()

static int ailiaLLM.AiliaLLM.ailiaLLMSetSamplingParams ( IntPtr  llm,
uint  top_k,
float  top_p,
float  temp,
uint  dist 
)

Set the sampling parameter.

Parameters
llmA pointer to the LLM instance pointer
top_kSampling probability value's top number, default 40
top_pSampling probability value range, default 0.9 (0.9 to 1.0)
tempTemperature parameter, default 0.4
distSeed, default 1234
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

Set LLM sampling parameters. Must be run before ailiaLLMSetPrompt.

Member Data Documentation

◆ AILIA_LLM_STATUS_BROKEN

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_BROKEN = (-4)
static

◆ AILIA_LLM_STATUS_CONTEXT_FULL

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_CONTEXT_FULL = (-8)
static

◆ AILIA_LLM_STATUS_ERROR_FILE_API

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_ERROR_FILE_API = (-2)
static

◆ AILIA_LLM_STATUS_INVALID_ARGUMENT

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_INVALID_ARGUMENT = (-1)
static

◆ AILIA_LLM_STATUS_INVALID_STATE

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_INVALID_STATE = (-7)
static

◆ AILIA_LLM_STATUS_INVALID_VERSION

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_INVALID_VERSION = (-3)
static

◆ AILIA_LLM_STATUS_MEMORY_INSUFFICIENT

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_MEMORY_INSUFFICIENT = (-5)
static

◆ AILIA_LLM_STATUS_OTHER_ERROR

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_OTHER_ERROR = (-128)
static

◆ AILIA_LLM_STATUS_SUCCESS

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_SUCCESS = (0)
static

◆ AILIA_LLM_STATUS_THREAD_ERROR

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_THREAD_ERROR = (-6)
static

◆ AILIA_LLM_STATUS_UNIMPLEMENTED

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_UNIMPLEMENTED = (-15)
static

◆ LIBRARY_NAME

const String ailiaLLM.AiliaLLM.LIBRARY_NAME ="ailia_llm"
static

The documentation for this class was generated from the following file: