Go to the documentation of this file.
15 #ifndef INCLUDED_AILIA_LLM
16 #define INCLUDED_AILIA_LLM
18 #if defined(_WIN64) || defined(_M_X64) || defined(__amd64__) || defined(__x86_64__) || defined(__APPLE__) || \
19 defined(__ANDROID__) || defined(ANDROID) || defined(__linux__) || defined(NN_NINTENDO_SDK)
22 #define AILIA_LLM_API __stdcall
38 #define AILIA_LLM_STATUS_SUCCESS (0)
50 #define AILIA_LLM_STATUS_INVALID_ARGUMENT (-1)
62 #define AILIA_LLM_STATUS_ERROR_FILE_API (-2)
74 #define AILIA_LLM_STATUS_INVALID_VERSION (-3)
86 #define AILIA_LLM_STATUS_BROKEN (-4)
98 #define AILIA_LLM_STATUS_MEMORY_INSUFFICIENT (-5)
110 #define AILIA_LLM_STATUS_THREAD_ERROR (-6)
122 #define AILIA_LLM_STATUS_INVALID_STATE (-7)
134 #define AILIA_LLM_STATUS_CONTEXT_FULL (-8)
148 #define AILIA_LLM_STATUS_UNIMPLEMENTED (-15)
160 #define AILIA_LLM_STATUS_OTHER_ERROR (-128)
477 #endif // INCLUDED_AILIA_LLM
AILIA_LLM_API int ailiaLLMGetBackendName(const char **env, unsigned int env_idx)
Gets the list of computational environments.
AILIA_LLM_API void ailiaLLMDestroy(struct AILIALLM *llm)
It destroys the LLM instance.
AILIA_LLM_API int ailiaLLMOpenModelFileA(struct AILIALLM *llm, const char *path, unsigned int n_ctx)
Open model file.
AILIA_LLM_API int ailiaLLMGenerate(struct AILIALLM *llm, unsigned int *done)
Perform generate.
AILIA_LLM_API int ailiaLLMSetPrompt(struct AILIALLM *llm, const AILIALLMChatMessage *message, unsigned int message_cnt)
Set the prompt.
AILIA_LLM_API int ailiaLLMGetBackendCount(unsigned int *env_count)
Gets the number of available computational environments (CPU, GPU).
AILIA_LLM_API int ailiaLLMOpenModelFileW(struct AILIALLM *llm, const wchar_t *path, unsigned int n_ctx)
const char * role
Represent the role. (system, user, assistant)
Definition: ailia_llm.h:172
Definition: ailia_llm.h:166
AILIA_LLM_API int ailiaLLMGetContextSize(struct AILIALLM *llm, unsigned int *context_size)
Gets the size of context.
AILIA_LLM_API int ailiaLLMGetGeneratedTokenCount(struct AILIALLM *llm, unsigned int *cnt)
Gets the count of prompt token.
struct _AILIALLMChatMessage AILIALLMChatMessage
AILIA_LLM_API int ailiaLLMGetTokenCount(struct AILIALLM *llm, unsigned int *cnt, const char *text)
Gets the count of token.
AILIA_LLM_API int ailiaLLMSetSamplingParams(struct AILIALLM *llm, unsigned int top_k, float top_p, float temp, unsigned int dist)
Set the sampling parameter.
#define AILIA_LLM_API
Definition: ailia_llm.h:22
AILIA_LLM_API int ailiaLLMGetPromptTokenCount(struct AILIALLM *llm, unsigned int *cnt)
Gets the count of prompt token.
AILIA_LLM_API int ailiaLLMGetDeltaText(struct AILIALLM *llm, char *text, unsigned int buf_size)
Gets the decoded text.
AILIA_LLM_API int ailiaLLMCreate(struct AILIALLM **llm)
Creates a LLM instance.
AILIA_LLM_API int ailiaLLMGetDeltaTextSize(struct AILIALLM *llm, unsigned int *buf_size)
Gets the size of text. (Include null)
const char * content
Represent the content of the message.
Definition: ailia_llm.h:176