StyledLines from testedlines.com: C# docs 1.0.1
GPT2-based text stylization LLM model, wrapped with Llama.cpp in Csharp to ensure compatibility across various platforms, including iOS and WebGL. The model is designed to transform generic texts into stylized, game or user-tailored dialogue.
Loading...
Searching...
No Matches
LlamaLibrary.llama_sampling_params Class Reference
+ Inheritance diagram for LlamaLibrary.llama_sampling_params:
+ Collaboration diagram for LlamaLibrary.llama_sampling_params:

Public Member Functions

virtual void Dispose ()
 
 llama_sampling_params ()
 

Protected Attributes

bool swigCMemOwn
 

Package Functions

 llama_sampling_params (global::System.IntPtr cPtr, bool cMemoryOwn)
 

Static Package Functions

static global::System.Runtime.InteropServices.HandleRef getCPtr (llama_sampling_params obj)
 

Properties

string cfg_negative_prompt [get, set]
 
float cfg_scale [get, set]
 
float dynatemp_exponent [get, set]
 
float dynatemp_range [get, set]
 
string grammar [get, set]
 
SWIGTYPE_p_std__unordered_mapT_llama_token_float_t logit_bias [get, set]
 
int min_keep [get, set]
 
float min_p [get, set]
 
int mirostat [get, set]
 
float mirostat_eta [get, set]
 
float mirostat_tau [get, set]
 
int n_prev [get, set]
 
int n_probs [get, set]
 
bool penalize_nl [get, set]
 
float penalty_freq [get, set]
 
int penalty_last_n [get, set]
 
float penalty_present [get, set]
 
SWIGTYPE_p_std__vectorT_llama_token_t penalty_prompt_tokens [get, set]
 
float penalty_repeat [get, set]
 
SWIGTYPE_p_std__vectorT_llama_sampler_type_t samplers_sequence [get, set]
 
uint seed [get, set]
 
float temp [get, set]
 
float tfs_z [get, set]
 
int top_k [get, set]
 
float top_p [get, set]
 
float typical_p [get, set]
 
bool use_penalty_prompt_tokens [get, set]
 

Private Member Functions

 ~llama_sampling_params ()
 

Private Attributes

global::System.Runtime.InteropServices.HandleRef swigCPtr
 

Constructor & Destructor Documentation

◆ llama_sampling_params() [1/2]

LlamaLibrary.llama_sampling_params.llama_sampling_params ( global::System::IntPtr cPtr,
bool cMemoryOwn )
package

◆ ~llama_sampling_params()

LlamaLibrary.llama_sampling_params.~llama_sampling_params ( )
private

References LlamaLibrary.llama_sampling_params.Dispose().

+ Here is the call graph for this function:

◆ llama_sampling_params() [2/2]

LlamaLibrary.llama_sampling_params.llama_sampling_params ( )

Member Function Documentation

◆ Dispose()

virtual void LlamaLibrary.llama_sampling_params.Dispose ( )
virtual

References LlamaLibrary.libllama_libPINVOKE.delete_llama_sampling_params(), LlamaLibrary.llama_sampling_params.swigCMemOwn, and LlamaLibrary.llama_sampling_params.swigCPtr.

Referenced by LlamaLibrary.llama_sampling_params.~llama_sampling_params().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ getCPtr()

static global.System.Runtime.InteropServices.HandleRef LlamaLibrary.llama_sampling_params.getCPtr ( llama_sampling_params obj)
staticpackage

Member Data Documentation

◆ swigCMemOwn

bool LlamaLibrary.llama_sampling_params.swigCMemOwn
protected

◆ swigCPtr

global.System.Runtime.InteropServices.HandleRef LlamaLibrary.llama_sampling_params.swigCPtr
private

Property Documentation

◆ cfg_negative_prompt

string LlamaLibrary.llama_sampling_params.cfg_negative_prompt
getset

◆ cfg_scale

float LlamaLibrary.llama_sampling_params.cfg_scale
getset

◆ dynatemp_exponent

float LlamaLibrary.llama_sampling_params.dynatemp_exponent
getset

◆ dynatemp_range

float LlamaLibrary.llama_sampling_params.dynatemp_range
getset

◆ grammar

string LlamaLibrary.llama_sampling_params.grammar
getset

◆ logit_bias

SWIGTYPE_p_std__unordered_mapT_llama_token_float_t LlamaLibrary.llama_sampling_params.logit_bias
getset

◆ min_keep

int LlamaLibrary.llama_sampling_params.min_keep
getset

◆ min_p

float LlamaLibrary.llama_sampling_params.min_p
getset

◆ mirostat

int LlamaLibrary.llama_sampling_params.mirostat
getset

◆ mirostat_eta

float LlamaLibrary.llama_sampling_params.mirostat_eta
getset

◆ mirostat_tau

float LlamaLibrary.llama_sampling_params.mirostat_tau
getset

◆ n_prev

int LlamaLibrary.llama_sampling_params.n_prev
getset

◆ n_probs

int LlamaLibrary.llama_sampling_params.n_probs
getset

◆ penalize_nl

bool LlamaLibrary.llama_sampling_params.penalize_nl
getset

◆ penalty_freq

float LlamaLibrary.llama_sampling_params.penalty_freq
getset

◆ penalty_last_n

int LlamaLibrary.llama_sampling_params.penalty_last_n
getset

◆ penalty_present

float LlamaLibrary.llama_sampling_params.penalty_present
getset

◆ penalty_prompt_tokens

SWIGTYPE_p_std__vectorT_llama_token_t LlamaLibrary.llama_sampling_params.penalty_prompt_tokens
getset

◆ penalty_repeat

float LlamaLibrary.llama_sampling_params.penalty_repeat
getset

◆ samplers_sequence

SWIGTYPE_p_std__vectorT_llama_sampler_type_t LlamaLibrary.llama_sampling_params.samplers_sequence
getset

◆ seed

uint LlamaLibrary.llama_sampling_params.seed
getset

◆ temp

float LlamaLibrary.llama_sampling_params.temp
getset

◆ tfs_z

float LlamaLibrary.llama_sampling_params.tfs_z
getset

◆ top_k

int LlamaLibrary.llama_sampling_params.top_k
getset

◆ top_p

float LlamaLibrary.llama_sampling_params.top_p
getset

◆ typical_p

float LlamaLibrary.llama_sampling_params.typical_p
getset

◆ use_penalty_prompt_tokens

bool LlamaLibrary.llama_sampling_params.use_penalty_prompt_tokens
getset

The documentation for this class was generated from the following file: