ailia_llm  1.3.1.0
ailia_llm.h
Go to the documentation of this file.
1 
15 #ifndef INCLUDED_AILIA_LLM
16 #define INCLUDED_AILIA_LLM
17 
18 #if defined(_WIN64) || defined(_M_X64) || defined(__amd64__) || defined(__x86_64__) || defined(__APPLE__) || \
19  defined(__ANDROID__) || defined(ANDROID) || defined(__linux__) || defined(NN_NINTENDO_SDK)
20 #define AILIA_LLM_API
21 #else
22 #define AILIA_LLM_API __stdcall
23 #endif
24 
25 /****************************************************************
26  * ライブラリ状態定義
27  **/
28 
38 #define AILIA_LLM_STATUS_SUCCESS (0)
39 
50 #define AILIA_LLM_STATUS_INVALID_ARGUMENT (-1)
51 
62 #define AILIA_LLM_STATUS_ERROR_FILE_API (-2)
63 
74 #define AILIA_LLM_STATUS_INVALID_VERSION (-3)
75 
86 #define AILIA_LLM_STATUS_BROKEN (-4)
87 
98 #define AILIA_LLM_STATUS_MEMORY_INSUFFICIENT (-5)
99 
110 #define AILIA_LLM_STATUS_THREAD_ERROR (-6)
111 
122 #define AILIA_LLM_STATUS_INVALID_STATE (-7)
123 
134 #define AILIA_LLM_STATUS_CONTEXT_FULL (-8)
135 
148 #define AILIA_LLM_STATUS_UNIMPLEMENTED (-15)
149 
160 #define AILIA_LLM_STATUS_OTHER_ERROR (-128)
161 
162 /****************************************************************
163  * チャットメッセージ
164  **/
165 
166 typedef struct _AILIALLMChatMessage {
170  const char *role;
174  const char *content;
176 
177 #ifdef __cplusplus
178 extern "C" {
179 #endif
180 
181 /****************************************************************
182  * LLMオブジェクトのインスタンス
183  **/
184 
185 struct AILIALLM;
186 
187 /****************************************************************
188  * LLM API
189  **/
190 
204 AILIA_LLM_API int ailiaLLMGetBackendCount(unsigned int* env_count);
205 
222 AILIA_LLM_API int ailiaLLMGetBackendName(const char** env, unsigned int env_idx);
223 
241 AILIA_LLM_API int ailiaLLMCreate(struct AILIALLM** llm);
242 
264 AILIA_LLM_API int ailiaLLMOpenModelFileA(struct AILIALLM* llm, const char *path, unsigned int n_ctx);
265 AILIA_LLM_API int ailiaLLMOpenModelFileW(struct AILIALLM* llm, const wchar_t *path, unsigned int n_ctx);
266 
282 AILIA_LLM_API int ailiaLLMGetContextSize(struct AILIALLM* llm, unsigned int *context_size);
283 
309 AILIA_LLM_API int ailiaLLMSetSamplingParams(struct AILIALLM* llm, unsigned int top_k, float top_p, float temp, unsigned int dist);
310 
334 AILIA_LLM_API int ailiaLLMSetPrompt(struct AILIALLM* llm, const AILIALLMChatMessage * message, unsigned int message_cnt);
335 
359 AILIA_LLM_API int ailiaLLMGenerate(struct AILIALLM* llm, unsigned int *done);
360 
376 AILIA_LLM_API int ailiaLLMGetDeltaTextSize(struct AILIALLM* llm, unsigned int *buf_size);
377 
399 AILIA_LLM_API int ailiaLLMGetDeltaText(struct AILIALLM* llm, char * text, unsigned int buf_size);
400 
418 AILIA_LLM_API int ailiaLLMGetTokenCount(struct AILIALLM* llm, unsigned int *cnt, const char* text);
419 
439 AILIA_LLM_API int ailiaLLMGetPromptTokenCount(struct AILIALLM* llm, unsigned int *cnt);
440 
460 AILIA_LLM_API int ailiaLLMGetGeneratedTokenCount(struct AILIALLM* llm, unsigned int *cnt);
461 
471 AILIA_LLM_API void ailiaLLMDestroy(struct AILIALLM* llm);
472 
473 #ifdef __cplusplus
474 }
475 #endif
476 
477 #endif // INCLUDED_AILIA_LLM
ailiaLLMGetBackendName
AILIA_LLM_API int ailiaLLMGetBackendName(const char **env, unsigned int env_idx)
Gets the list of computational environments.
ailiaLLMDestroy
AILIA_LLM_API void ailiaLLMDestroy(struct AILIALLM *llm)
It destroys the LLM instance.
ailiaLLMOpenModelFileA
AILIA_LLM_API int ailiaLLMOpenModelFileA(struct AILIALLM *llm, const char *path, unsigned int n_ctx)
Open model file.
ailiaLLMGenerate
AILIA_LLM_API int ailiaLLMGenerate(struct AILIALLM *llm, unsigned int *done)
Perform generate.
ailiaLLMSetPrompt
AILIA_LLM_API int ailiaLLMSetPrompt(struct AILIALLM *llm, const AILIALLMChatMessage *message, unsigned int message_cnt)
Set the prompt.
ailiaLLMGetBackendCount
AILIA_LLM_API int ailiaLLMGetBackendCount(unsigned int *env_count)
Gets the number of available computational environments (CPU, GPU).
ailiaLLMOpenModelFileW
AILIA_LLM_API int ailiaLLMOpenModelFileW(struct AILIALLM *llm, const wchar_t *path, unsigned int n_ctx)
_AILIALLMChatMessage::role
const char * role
Represent the role. (system, user, assistant)
Definition: ailia_llm.h:172
_AILIALLMChatMessage
Definition: ailia_llm.h:166
ailiaLLMGetContextSize
AILIA_LLM_API int ailiaLLMGetContextSize(struct AILIALLM *llm, unsigned int *context_size)
Gets the size of context.
ailiaLLMGetGeneratedTokenCount
AILIA_LLM_API int ailiaLLMGetGeneratedTokenCount(struct AILIALLM *llm, unsigned int *cnt)
Gets the count of prompt token.
AILIALLMChatMessage
struct _AILIALLMChatMessage AILIALLMChatMessage
ailiaLLMGetTokenCount
AILIA_LLM_API int ailiaLLMGetTokenCount(struct AILIALLM *llm, unsigned int *cnt, const char *text)
Gets the count of token.
ailiaLLMSetSamplingParams
AILIA_LLM_API int ailiaLLMSetSamplingParams(struct AILIALLM *llm, unsigned int top_k, float top_p, float temp, unsigned int dist)
Set the sampling parameter.
AILIA_LLM_API
#define AILIA_LLM_API
Definition: ailia_llm.h:22
ailiaLLMGetPromptTokenCount
AILIA_LLM_API int ailiaLLMGetPromptTokenCount(struct AILIALLM *llm, unsigned int *cnt)
Gets the count of prompt token.
ailiaLLMGetDeltaText
AILIA_LLM_API int ailiaLLMGetDeltaText(struct AILIALLM *llm, char *text, unsigned int buf_size)
Gets the decoded text.
ailiaLLMCreate
AILIA_LLM_API int ailiaLLMCreate(struct AILIALLM **llm)
Creates a LLM instance.
ailiaLLMGetDeltaTextSize
AILIA_LLM_API int ailiaLLMGetDeltaTextSize(struct AILIALLM *llm, unsigned int *buf_size)
Gets the size of text. (Include null)
_AILIALLMChatMessage::content
const char * content
Represent the content of the message.
Definition: ailia_llm.h:176