- 0.89.0 (latest)
- 0.87.0
- 0.86.0
- 0.85.0
- 0.84.0
- 0.83.0
- 0.81.0
- 0.79.0
- 0.78.0
- 0.75.0
- 0.74.0
- 0.73.0
- 0.71.0
- 0.70.0
- 0.69.0
- 0.68.0
- 0.67.0
- 0.66.0
- 0.65.0
- 0.64.0
- 0.63.0
- 0.62.0
- 0.60.0
- 0.59.0
- 0.58.0
- 0.57.0
- 0.56.0
- 0.55.0
- 0.54.0
- 0.53.0
- 0.52.0
- 0.51.0
- 0.50.0
- 0.48.0
- 0.47.0
- 0.46.0
- 0.45.0
- 0.44.0
- 0.43.0
- 0.42.0
- 0.41.0
- 0.40.0
- 0.39.0
- 0.38.0
- 0.35.0
- 0.34.0
- 0.33.0
- 0.32.0
- 0.31.0
- 0.30.0
- 0.29.0
- 0.28.0
- 0.27.0
- 0.26.0
- 0.25.0
- 0.24.0
- 0.23.0
- 0.22.0
- 0.20.0
- 0.19.0
- 0.18.0
- 0.17.0
- 0.16.0
- 0.15.0
- 0.14.7
- 0.13.1
- 0.12.1
- 0.11.5
public static interface LlmCall.TokenCountOrBuilder extends MessageOrBuilderImplements
MessageOrBuilderMethods
getConversationContextTokenCount()
public abstract long getConversationContextTokenCount()The number of tokens used for the conversation history in the prompt.
int64 conversation_context_token_count = 3;
| Returns | |
|---|---|
| Type | Description |
long |
The conversationContextTokenCount. |
getExampleTokenCount()
public abstract long getExampleTokenCount()The number of tokens used for the retrieved examples in the prompt.
int64 example_token_count = 4;
| Returns | |
|---|---|
| Type | Description |
long |
The exampleTokenCount. |
getTotalInputTokenCount()
public abstract long getTotalInputTokenCount()The total number of tokens used for the input to the LLM call.
int64 total_input_token_count = 1;
| Returns | |
|---|---|
| Type | Description |
long |
The totalInputTokenCount. |
getTotalOutputTokenCount()
public abstract long getTotalOutputTokenCount()The total number of tokens used for the output of the LLM call.
int64 total_output_token_count = 5;
| Returns | |
|---|---|
| Type | Description |
long |
The totalOutputTokenCount. |