Class: HuggingFaceLLM
Unified language model interface
Extends
Constructors
new HuggingFaceLLM()
new HuggingFaceLLM(
init
?):HuggingFaceLLM
Parameters
• init?: HFLLMConfig
Returns
Overrides
Defined in
packages/providers/huggingface/dist/index.d.ts:51
Properties
contextWindow
contextWindow:
number
Defined in
packages/providers/huggingface/dist/index.d.ts:48
maxTokens?
optional
maxTokens:number
Defined in
packages/providers/huggingface/dist/index.d.ts:47
modelName
modelName:
string
Defined in
packages/providers/huggingface/dist/index.d.ts:43
temperature
temperature:
number
Defined in
packages/providers/huggingface/dist/index.d.ts:45
tokenizerName
tokenizerName:
string
Defined in
packages/providers/huggingface/dist/index.d.ts:44
topP
topP:
number
Defined in
packages/providers/huggingface/dist/index.d.ts:46
Accessors
metadata
get
metadata():LLMMetadata
Returns
Overrides
Defined in
packages/providers/huggingface/dist/index.d.ts:52
Methods
chat()
chat(params)
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
,any
,any
>>
Get a chat response from the LLM
Parameters
• params: LLMChatParamsStreaming
<object
, object
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
, any
, any
>>
Overrides
Defined in
packages/providers/huggingface/dist/index.d.ts:55
chat(params)
chat(
params
):Promise
<ChatResponse
<object
>>
Get a chat response from the LLM
Parameters
• params: LLMChatParamsNonStreaming
<object
, object
>
Returns
Promise
<ChatResponse
<object
>>
Overrides
Defined in
packages/providers/huggingface/dist/index.d.ts:56
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
,any
,any
>>
Get a prompt completion from the LLM
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
, any
, any
>>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:168
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Get a prompt completion from the LLM
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:169
getModel()
getModel():
Promise
<PreTrainedModel
>
Returns
Promise
<PreTrainedModel
>
Defined in
packages/providers/huggingface/dist/index.d.ts:54
getTokenizer()
getTokenizer():
Promise
<PreTrainedTokenizer
>
Returns
Promise
<PreTrainedTokenizer
>
Defined in
packages/providers/huggingface/dist/index.d.ts:53
nonStreamChat()
protected
nonStreamChat(params
):Promise
<ChatResponse
<object
>>
Parameters
• params: LLMChatParamsNonStreaming
<object
, object
>
Returns
Promise
<ChatResponse
<object
>>
Defined in
packages/providers/huggingface/dist/index.d.ts:57
streamChat()
protected
streamChat(params
):AsyncIterable
<ChatResponseChunk
,any
,any
>
Parameters
• params: LLMChatParamsStreaming
<object
, object
>
Returns
AsyncIterable
<ChatResponseChunk
, any
, any
>
Defined in
packages/providers/huggingface/dist/index.d.ts:58