ailia_llm  1.3.1.0
Public Member Functions | Protected Member Functions | List of all members
ailiaLLM.AiliaLLMModel Class Reference
Inheritance diagram for ailiaLLM.AiliaLLMModel:
Inheritance graph
[legend]
Collaboration diagram for ailiaLLM.AiliaLLMModel:
Collaboration graph
[legend]

Public Member Functions

bool Create ()
 Create a instance. More...
 
bool Open (string model_path, uint n_ctx=0)
 Open a model. More...
 
virtual void Close ()
 Destroys instance. More...
 
virtual void Dispose ()
 Release resources. More...
 
bool SetSamplingParam (uint top_k, float top_p, float temp, uint dist)
 Set the sampling parameter. More...
 
bool SetPrompt (List< AiliaLLMChatMessage > messages)
 Set prompt messages. More...
 
bool Generate (ref bool done)
 Perform encode. More...
 
string GetDeltaText ()
 Set prompt messages. More...
 
bool ContextFull ()
 Check if the context length limit has been reached. More...
 

Protected Member Functions

virtual void Dispose (bool disposing)
 

Member Function Documentation

◆ Close()

virtual void ailiaLLM.AiliaLLMModel.Close ( )
inlinevirtual

Destroys instance.

Destroys and initializes the instance.

109  {
110  if (net != IntPtr.Zero){
111  AiliaLLM.ailiaLLMDestroy(net);
112  net = IntPtr.Zero;
113  }
114  }

◆ ContextFull()

bool ailiaLLM.AiliaLLMModel.ContextFull ( )
inline

Check if the context length limit has been reached.

Returns
True if the limit is reached, false otherwise.
353  {
354  return context_full;
355  }

◆ Create()

bool ailiaLLM.AiliaLLMModel.Create ( )
inline

Create a instance.

Returns
If this function is successful, it returns true , or false otherwise.
42  {
43  if (net != IntPtr.Zero){
44  Close();
45  }
46 
47  int status = AiliaLLM.ailiaLLMCreate(ref net);
48  if (status != 0){
49  if (logging)
50  {
51  Debug.Log("ailiaLLMCreate failed " + status);
52  }
53  return false;
54  }
55 
56  return true;
57  }

◆ Dispose() [1/2]

virtual void ailiaLLM.AiliaLLMModel.Dispose ( )
inlinevirtual

Release resources.

124  {
125  Dispose(true);
126  }

◆ Dispose() [2/2]

virtual void ailiaLLM.AiliaLLMModel.Dispose ( bool  disposing)
inlineprotectedvirtual
129  {
130  if (disposing){
131  // release managed resource
132  }
133  Close(); // release unmanaged resource
134  }

◆ Generate()

bool ailiaLLM.AiliaLLMModel.Generate ( ref bool  done)
inline

Perform encode.

Parameters
doneIs done generation
Returns
If this function is successful, it returns array of tokens , or empty array otherwise.
270  {
271  uint done_uint = 0;
272  int status = AiliaLLM.ailiaLLMGenerate(net, ref done_uint);
273  context_full = false;
274  done = (done_uint == 1);
275  if (status != 0){
276  if (logging)
277  {
278  Debug.Log("ailiaLLMGenerate failed " + status);
279  }
280  if (status == AiliaLLM.AILIA_LLM_STATUS_CONTEXT_FULL){
281  context_full = true;
282  }
283  return false;
284  }
285  return true;
286  }

◆ GetDeltaText()

string ailiaLLM.AiliaLLMModel.GetDeltaText ( )
inline

Set prompt messages.

Returns
It returns text.
300  {
301  uint len = 0;
302  int status = AiliaLLM.ailiaLLMGetDeltaTextSize(net, ref len);
303  if (status != 0){
304  if (logging)
305  {
306  Debug.Log("ailiaLLMGetDeltaTextSize failed " + status);
307  }
308  return "";
309  }
310  byte[] text = new byte [len];
311  GCHandle handle = GCHandle.Alloc(text, GCHandleType.Pinned);
312  IntPtr output = handle.AddrOfPinnedObject();
313  status = AiliaLLM.ailiaLLMGetDeltaText(net, output, len);
314  handle.Free();
315  if (status != 0){
316  if (logging)
317  {
318  Debug.Log("ailiaLLMGetDeltaText failed " + status);
319  }
320  return "";
321  }
322 
323  byte[] new_buf = new byte [buf.Length + len - 1];
324  for (int i = 0; i < buf.Length; i++){
325  new_buf[i] = buf[i];
326  }
327  for (int i = 0; i < len - 1; i++){ // NULLの削除
328  new_buf[buf.Length + i] = text[i];
329  }
330  buf = new_buf;
331 
332  string decoded_text = System.Text.Encoding.UTF8.GetString(buf); // Unicode Decode Errorは発生しない
333  string delta_text = "";
334  if (decoded_text.Length > before_text.Length){
335  delta_text = decoded_text.Substring(before_text.Length);
336  }
337  before_text = decoded_text;
338  return delta_text;
339  }

◆ Open()

bool ailiaLLM.AiliaLLMModel.Open ( string  model_path,
uint  n_ctx = 0 
)
inline

Open a model.

Parameters
model_pathPath for model
n_ctxContext length for model (0 is model default)
Returns
If this function is successful, it returns true , or false otherwise.
74  {
75  if (net == IntPtr.Zero){
76  return false;
77  }
78 
79  int status = 0;
80 
81  status = AiliaLLM.ailiaLLMOpenModelFile(net, model_path, n_ctx);
82  if (status != 0){
83  if (logging)
84  {
85  Debug.Log("ailiaLLMOpenModelFile failed " + status);
86  }
87  return false;
88  }
89 
90  return true;
91  }

◆ SetPrompt()

bool ailiaLLM.AiliaLLMModel.SetPrompt ( List< AiliaLLMChatMessage messages)
inline

Set prompt messages.

Parameters
messagesPrompt messages
Returns
If this function is successful, it returns true , or false otherwise.
189  {
190  List<GCHandle> handle_list = new List<GCHandle>();
191  int len = messages.Count;
192  byte[][] role_text_list = new byte [len][];
193  byte[][] conntent_text_list = new byte [len][];
194  AiliaLLM.AILIAChatMessage [] message_list = new AiliaLLM.AILIAChatMessage[len];
195  for (int i = 0; i< len; i++){
196  AiliaLLM.AILIAChatMessage message = new AiliaLLM.AILIAChatMessage();
197 
198  role_text_list[i] = System.Text.Encoding.UTF8.GetBytes(messages[i].role+"\u0000");
199  GCHandle role_handle = GCHandle.Alloc(role_text_list[i], GCHandleType.Pinned);
200  IntPtr role_input = role_handle.AddrOfPinnedObject();
201 
202  conntent_text_list[i] = System.Text.Encoding.UTF8.GetBytes(messages[i].content+"\u0000");
203  GCHandle content_handle = GCHandle.Alloc(conntent_text_list[i], GCHandleType.Pinned);
204  IntPtr content_input = content_handle.AddrOfPinnedObject();
205 
206  message.role = role_input;
207  message.content = content_input;
208  message_list[i] = message;
209 
210  handle_list.Add(role_handle);
211  handle_list.Add(content_handle);
212  }
213 
214  int size = Marshal.SizeOf(typeof(AiliaLLM.AILIAChatMessage)) * message_list.Length;
215  IntPtr ptr = Marshal.AllocHGlobal(size);
216 
217  int status = 0;
218 
219  try
220  {
221  for (int i = 0; i < message_list.Length; i++)
222  {
223  IntPtr offset = new IntPtr(ptr.ToInt64() + i * Marshal.SizeOf(typeof(AiliaLLM.AILIAChatMessage)));
224  Marshal.StructureToPtr(message_list[i], offset, false);
225  }
226 
227  status = AiliaLLM.ailiaLLMSetPrompt(net, ptr, (uint)len);
228  }
229  finally
230  {
231  Marshal.FreeHGlobal(ptr);
232  }
233 
234  for (int i = 0; i < handle_list.Count; i++){
235  handle_list[i].Free();
236  }
237 
238  context_full = false;
239  buf = new byte[0];
240  before_text = "";
241 
242  if (status != 0){
243  if (logging)
244  {
245  Debug.Log("ailiaLLMSetPrompt failed " + status);
246  }
247  if (status == AiliaLLM.AILIA_LLM_STATUS_CONTEXT_FULL){
248  context_full = true;
249  }
250  return false;
251  }
252 
253  return true;
254  }

◆ SetSamplingParam()

bool ailiaLLM.AiliaLLMModel.SetSamplingParam ( uint  top_k,
float  top_p,
float  temp,
uint  dist 
)
inline

Set the sampling parameter.

Parameters
top_kSampling probability value's top number, default 40
top_pSampling probability value range, default 0.9 (0.9 to 1.0)
tempTemperature parameter, default 0.4
distSeed, default 1234
Returns
If this function is successful, it returns true , or false otherwise.
163  {
164  int status = AiliaLLM.ailiaLLMSetSamplingParams(net, top_k, top_p, temp, dist);
165  if (status != 0){
166  if (logging)
167  {
168  Debug.Log("ailiaLLMSetSamplingParams failed " + status);
169  }
170  return false;
171  }
172  return true;
173  }

The documentation for this class was generated from the following file:
ailiaLLM.AiliaLLMModel.Close
virtual void Close()
Destroys instance.
Definition: AiliaLLMModel.cs:108
ailiaLLM.AiliaLLMModel.Dispose
virtual void Dispose()
Release resources.
Definition: AiliaLLMModel.cs:123