Prompt(
prompt_data: typing.Optional[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
typing.List[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
]
],
]
] = None,
*,
variables: typing.Optional[
typing.List[
typing.Dict[
str,
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
typing.List[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
]
],
],
]
]
] = None,
prompt_name: typing.Optional[str] = None,
generation_config: typing.Optional[
vertexai.generative_models._generative_models.GenerationConfig
] = None,
model_name: typing.Optional[str] = None,
safety_settings: typing.Optional[
vertexai.generative_models._generative_models.SafetySetting
] = None,
system_instruction: typing.Optional[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
typing.List[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
]
],
]
] = None,
tools: typing.Optional[
typing.List[vertexai.generative_models._generative_models.Tool]
] = None,
tool_config: typing.Optional[
vertexai.generative_models._generative_models.ToolConfig
] = None
)
A prompt which may be a template with variables.
The Prompt
class allows users to define a template string with
variables represented in curly braces {variable}
. The variable
name must be a valid Python variable name (no spaces, must start with a
letter). These placeholders can be replaced with specific values using the
assemble_contents
method, providing flexibility in generating dynamic prompts.
Usage: Generate content from a single set of variables:
prompt = Prompt(
prompt_data="Hello, {name}! Today is {day}. How are you?",
variables=[{"name": "Alice", "day": "Monday"}]
generation_config=GenerationConfig(
temperature=0.1,
top_p=0.95,
top_k=20,
candidate_count=1,
max_output_tokens=100,
),
model_name="gemini-1.0-pro-002",
safety_settings=[SafetySetting(
category=SafetySetting.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
threshold=SafetySetting.HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE,
method=SafetySetting.HarmBlockMethod.SEVERITY,
)],
system_instruction="Please answer in a short sentence.",
)
# Generate content using the assembled prompt.
prompt.generate_content(
contents=prompt.assemble_contents(**prompt.variables)
)
```
Generate content with multiple sets of variables:
```
prompt = Prompt(
prompt_data="Hello, {name}! Today is {day}. How are you?",
variables=[
{"name": "Alice", "day": "Monday"},
{"name": "Bob", "day": "Tuesday"},
],
generation_config=GenerationConfig(
temperature=0.1,
top_p=0.95,
top_k=20,
candidate_count=1,
max_output_tokens=100,
),
model_name="gemini-1.0-pro-002",
safety_settings=[SafetySetting(
category=SafetySetting.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
threshold=SafetySetting.HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE,
method=SafetySetting.HarmBlockMethod.SEVERITY,
)],
system_instruction="Please answer in a short sentence.",
)
# Generate content using the assembled prompt for each variable set.
for i in range(len(prompt.variables)):
prompt.generate_content(
contents=prompt.assemble_contents(**prompt.variables[i])
)
```
Methods
Prompt
Prompt(
prompt_data: typing.Optional[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
typing.List[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
]
],
]
] = None,
*,
variables: typing.Optional[
typing.List[
typing.Dict[
str,
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
typing.List[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
]
],
],
]
]
] = None,
prompt_name: typing.Optional[str] = None,
generation_config: typing.Optional[
vertexai.generative_models._generative_models.GenerationConfig
] = None,
model_name: typing.Optional[str] = None,
safety_settings: typing.Optional[
vertexai.generative_models._generative_models.SafetySetting
] = None,
system_instruction: typing.Optional[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
typing.List[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
]
],
]
] = None,
tools: typing.Optional[
typing.List[vertexai.generative_models._generative_models.Tool]
] = None,
tool_config: typing.Optional[
vertexai.generative_models._generative_models.ToolConfig
] = None
)
Initializes the Prompt with a given prompt, and variables.
__repr__
__repr__() -> str
Returns a string representation of the unassembled prompt.
__str__
__str__() -> str
Returns the prompt data as a string, without any variables replaced.
assemble_contents
assemble_contents(
**variables_dict: typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
typing.List[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
]
],
]
) -> typing.List[vertexai.generative_models._generative_models.Content]
Returns the prompt data, as a List[Content], assembled with variables if applicable. Can be ingested into model.generate_content to make API calls.
generate_content
generate_content(
contents: typing.Union[
typing.List[vertexai.generative_models._generative_models.Content],
typing.List[typing.Dict[str, typing.Any]],
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
typing.List[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
]
],
],
*,
generation_config: typing.Optional[
typing.Union[
vertexai.generative_models._generative_models.GenerationConfig,
typing.Dict[str, typing.Any],
]
] = None,
safety_settings: typing.Optional[
typing.Union[
typing.List[vertexai.generative_models._generative_models.SafetySetting],
typing.Dict[
google.cloud.aiplatform_v1beta1.types.content.HarmCategory,
google.cloud.aiplatform_v1beta1.types.content.SafetySetting.HarmBlockThreshold,
],
]
] = None,
model_name: typing.Optional[str] = None,
tools: typing.Optional[
typing.List[vertexai.generative_models._generative_models.Tool]
] = None,
tool_config: typing.Optional[
vertexai.generative_models._generative_models.ToolConfig
] = None,
stream: bool = False,
system_instruction: typing.Optional[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
typing.List[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
]
],
]
] = None
) -> typing.Union[
vertexai.generative_models._generative_models.GenerationResponse,
typing.Iterable[vertexai.generative_models._generative_models.GenerationResponse],
]
Generates content using the saved Prompt configs.
get_unassembled_prompt_data
get_unassembled_prompt_data() -> typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
typing.List[
typing.Union[
str,
vertexai.generative_models._generative_models.Image,
vertexai.generative_models._generative_models.Part,
]
],
]
Returns the prompt data, without any variables replaced.