public final class ImportRagFilesConfig extends GeneratedMessageV3 implements ImportRagFilesConfigOrBuilder
Config for importing RagFiles.
Protobuf type google.cloud.aiplatform.v1beta1.ImportRagFilesConfig
Inherited Members
com.google.protobuf.GeneratedMessageV3.<ListT>makeMutableCopy(ListT)
com.google.protobuf.GeneratedMessageV3.<ListT>makeMutableCopy(ListT,int)
com.google.protobuf.GeneratedMessageV3.<T>emptyList(java.lang.Class<T>)
com.google.protobuf.GeneratedMessageV3.internalGetMapFieldReflection(int)
Static Fields
public static final int GCS_SOURCE_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int GLOBAL_MAX_EMBEDDING_REQUESTS_PER_MIN_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int GOOGLE_DRIVE_SOURCE_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int IMPORT_RESULT_BIGQUERY_SINK_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int IMPORT_RESULT_GCS_SINK_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int JIRA_SOURCE_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int MAX_EMBEDDING_REQUESTS_PER_MIN_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int PARTIAL_FAILURE_BIGQUERY_SINK_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int PARTIAL_FAILURE_GCS_SINK_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int RAG_FILE_CHUNKING_CONFIG_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int RAG_FILE_PARSING_CONFIG_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int RAG_FILE_TRANSFORMATION_CONFIG_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int REBUILD_ANN_INDEX_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int SHARE_POINT_SOURCES_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
public static final int SLACK_SOURCE_FIELD_NUMBER
Field Value |
Type |
Description |
int |
|
Static Methods
public static ImportRagFilesConfig getDefaultInstance()
public static final Descriptors.Descriptor getDescriptor()
public static ImportRagFilesConfig.Builder newBuilder()
public static ImportRagFilesConfig.Builder newBuilder(ImportRagFilesConfig prototype)
public static ImportRagFilesConfig parseDelimitedFrom(InputStream input)
public static ImportRagFilesConfig parseDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
public static ImportRagFilesConfig parseFrom(byte[] data)
Parameter |
Name |
Description |
data |
byte[]
|
public static ImportRagFilesConfig parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
public static ImportRagFilesConfig parseFrom(ByteString data)
public static ImportRagFilesConfig parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
public static ImportRagFilesConfig parseFrom(CodedInputStream input)
public static ImportRagFilesConfig parseFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
public static ImportRagFilesConfig parseFrom(InputStream input)
public static ImportRagFilesConfig parseFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
public static ImportRagFilesConfig parseFrom(ByteBuffer data)
public static ImportRagFilesConfig parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
public static Parser<ImportRagFilesConfig> parser()
Methods
public boolean equals(Object obj)
Parameter |
Name |
Description |
obj |
Object
|
Overrides
public ImportRagFilesConfig getDefaultInstanceForType()
public GcsSource getGcsSource()
Google Cloud Storage location. Supports importing individual files as
well as entire Google Cloud Storage directories. Sample formats:
gs://bucket_name/my_directory/object_name/my_file.txt
gs://bucket_name/my_directory
.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
Returns |
Type |
Description |
GcsSource |
The gcsSource.
|
public GcsSourceOrBuilder getGcsSourceOrBuilder()
Google Cloud Storage location. Supports importing individual files as
well as entire Google Cloud Storage directories. Sample formats:
gs://bucket_name/my_directory/object_name/my_file.txt
gs://bucket_name/my_directory
.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
public int getGlobalMaxEmbeddingRequestsPerMin()
Optional. The max number of queries per minute that the indexing pipeline
job is allowed to make to the embedding model specified in the project.
Please follow the quota usage guideline of the embedding model you use to
set the value properly. If this value is not specified,
max_embedding_requests_per_min will be used by indexing pipeline job as the
global limit.
int32 global_max_embedding_requests_per_min = 18 [(.google.api.field_behavior) = OPTIONAL];
Returns |
Type |
Description |
int |
The globalMaxEmbeddingRequestsPerMin.
|
public GoogleDriveSource getGoogleDriveSource()
Google Drive location. Supports importing individual files as
well as Google Drive folders.
.google.cloud.aiplatform.v1beta1.GoogleDriveSource google_drive_source = 3;
public GoogleDriveSourceOrBuilder getGoogleDriveSourceOrBuilder()
Google Drive location. Supports importing individual files as
well as Google Drive folders.
.google.cloud.aiplatform.v1beta1.GoogleDriveSource google_drive_source = 3;
public BigQueryDestination getImportResultBigquerySink()
The BigQuery destination to write import result to. It should be a
bigquery table resource name (e.g.
"bq://projectId.bqDatasetId.bqTableId"). The dataset must exist. If the
table does not exist, it will be created with the expected schema. If the
table exists, the schema will be validated and data will be added to this
existing table.
.google.cloud.aiplatform.v1beta1.BigQueryDestination import_result_bigquery_sink = 15;
public BigQueryDestinationOrBuilder getImportResultBigquerySinkOrBuilder()
The BigQuery destination to write import result to. It should be a
bigquery table resource name (e.g.
"bq://projectId.bqDatasetId.bqTableId"). The dataset must exist. If the
table does not exist, it will be created with the expected schema. If the
table exists, the schema will be validated and data will be added to this
existing table.
.google.cloud.aiplatform.v1beta1.BigQueryDestination import_result_bigquery_sink = 15;
public GcsDestination getImportResultGcsSink()
The Cloud Storage path to write import result to.
.google.cloud.aiplatform.v1beta1.GcsDestination import_result_gcs_sink = 14;
public GcsDestinationOrBuilder getImportResultGcsSinkOrBuilder()
The Cloud Storage path to write import result to.
.google.cloud.aiplatform.v1beta1.GcsDestination import_result_gcs_sink = 14;
public ImportRagFilesConfig.ImportResultSinkCase getImportResultSinkCase()
public ImportRagFilesConfig.ImportSourceCase getImportSourceCase()
public JiraSource getJiraSource()
Jira queries with their corresponding authentication.
.google.cloud.aiplatform.v1beta1.JiraSource jira_source = 7;
Returns |
Type |
Description |
JiraSource |
The jiraSource.
|
public JiraSourceOrBuilder getJiraSourceOrBuilder()
Jira queries with their corresponding authentication.
.google.cloud.aiplatform.v1beta1.JiraSource jira_source = 7;
public int getMaxEmbeddingRequestsPerMin()
Optional. The max number of queries per minute that this job is allowed to
make to the embedding model specified on the corpus. This value is specific
to this job and not shared across other import jobs. Consult the Quotas
page on the project to set an appropriate value here.
If unspecified, a default value of 1,000 QPM would be used.
int32 max_embedding_requests_per_min = 5 [(.google.api.field_behavior) = OPTIONAL];
Returns |
Type |
Description |
int |
The maxEmbeddingRequestsPerMin.
|
public Parser<ImportRagFilesConfig> getParserForType()
Overrides
public BigQueryDestination getPartialFailureBigquerySink()
Deprecated. google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.partial_failure_bigquery_sink
is deprecated. See google/cloud/aiplatform/v1beta1/vertex_rag_data.proto;l=614
The BigQuery destination to write partial failures to. It should be a
bigquery table resource name (e.g.
"bq://projectId.bqDatasetId.bqTableId"). The dataset must exist. If the
table does not exist, it will be created with the expected schema. If the
table exists, the schema will be validated and data will be added to this
existing table.
Deprecated. Prefer to use import_result_bq_sink
.
.google.cloud.aiplatform.v1beta1.BigQueryDestination partial_failure_bigquery_sink = 12 [deprecated = true];
public BigQueryDestinationOrBuilder getPartialFailureBigquerySinkOrBuilder()
The BigQuery destination to write partial failures to. It should be a
bigquery table resource name (e.g.
"bq://projectId.bqDatasetId.bqTableId"). The dataset must exist. If the
table does not exist, it will be created with the expected schema. If the
table exists, the schema will be validated and data will be added to this
existing table.
Deprecated. Prefer to use import_result_bq_sink
.
.google.cloud.aiplatform.v1beta1.BigQueryDestination partial_failure_bigquery_sink = 12 [deprecated = true];
public GcsDestination getPartialFailureGcsSink()
Deprecated. google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.partial_failure_gcs_sink is
deprecated. See google/cloud/aiplatform/v1beta1/vertex_rag_data.proto;l=605
The Cloud Storage path to write partial failures to.
Deprecated. Prefer to use import_result_gcs_sink
.
.google.cloud.aiplatform.v1beta1.GcsDestination partial_failure_gcs_sink = 11 [deprecated = true];
public GcsDestinationOrBuilder getPartialFailureGcsSinkOrBuilder()
The Cloud Storage path to write partial failures to.
Deprecated. Prefer to use import_result_gcs_sink
.
.google.cloud.aiplatform.v1beta1.GcsDestination partial_failure_gcs_sink = 11 [deprecated = true];
public ImportRagFilesConfig.PartialFailureSinkCase getPartialFailureSinkCase()
public RagFileChunkingConfig getRagFileChunkingConfig()
Deprecated. google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.rag_file_chunking_config is
deprecated. See google/cloud/aiplatform/v1beta1/vertex_rag_data.proto;l=633
Specifies the size and overlap of chunks after importing RagFiles.
.google.cloud.aiplatform.v1beta1.RagFileChunkingConfig rag_file_chunking_config = 4 [deprecated = true];
public RagFileChunkingConfigOrBuilder getRagFileChunkingConfigOrBuilder()
Specifies the size and overlap of chunks after importing RagFiles.
.google.cloud.aiplatform.v1beta1.RagFileChunkingConfig rag_file_chunking_config = 4 [deprecated = true];
public RagFileParsingConfig getRagFileParsingConfig()
Optional. Specifies the parsing config for RagFiles.
RAG will use the default parser if this field is not set.
.google.cloud.aiplatform.v1beta1.RagFileParsingConfig rag_file_parsing_config = 8 [(.google.api.field_behavior) = OPTIONAL];
public RagFileParsingConfigOrBuilder getRagFileParsingConfigOrBuilder()
Optional. Specifies the parsing config for RagFiles.
RAG will use the default parser if this field is not set.
.google.cloud.aiplatform.v1beta1.RagFileParsingConfig rag_file_parsing_config = 8 [(.google.api.field_behavior) = OPTIONAL];
public RagFileTransformationConfig getRagFileTransformationConfig()
Specifies the transformation config for RagFiles.
.google.cloud.aiplatform.v1beta1.RagFileTransformationConfig rag_file_transformation_config = 16;
public RagFileTransformationConfigOrBuilder getRagFileTransformationConfigOrBuilder()
Specifies the transformation config for RagFiles.
.google.cloud.aiplatform.v1beta1.RagFileTransformationConfig rag_file_transformation_config = 16;
public boolean getRebuildAnnIndex()
Rebuilds the ANN index to optimize for recall on the imported data.
Only applicable for RagCorpora running on RagManagedDb with
retrieval_strategy
set to ANN
. The rebuild will be performed using the
existing ANN config set on the RagCorpus. To change the ANN config, please
use the UpdateRagCorpus API.
Default is false, i.e., index is not rebuilt.
bool rebuild_ann_index = 19;
Returns |
Type |
Description |
boolean |
The rebuildAnnIndex.
|
public int getSerializedSize()
Returns |
Type |
Description |
int |
|
Overrides
public SharePointSources getSharePointSources()
SharePoint sources.
.google.cloud.aiplatform.v1beta1.SharePointSources share_point_sources = 13;
public SharePointSourcesOrBuilder getSharePointSourcesOrBuilder()
SharePoint sources.
.google.cloud.aiplatform.v1beta1.SharePointSources share_point_sources = 13;
public SlackSource getSlackSource()
Slack channels with their corresponding access tokens.
.google.cloud.aiplatform.v1beta1.SlackSource slack_source = 6;
public SlackSourceOrBuilder getSlackSourceOrBuilder()
Slack channels with their corresponding access tokens.
.google.cloud.aiplatform.v1beta1.SlackSource slack_source = 6;
public boolean hasGcsSource()
Google Cloud Storage location. Supports importing individual files as
well as entire Google Cloud Storage directories. Sample formats:
gs://bucket_name/my_directory/object_name/my_file.txt
gs://bucket_name/my_directory
.google.cloud.aiplatform.v1beta1.GcsSource gcs_source = 2;
Returns |
Type |
Description |
boolean |
Whether the gcsSource field is set.
|
public boolean hasGoogleDriveSource()
Google Drive location. Supports importing individual files as
well as Google Drive folders.
.google.cloud.aiplatform.v1beta1.GoogleDriveSource google_drive_source = 3;
Returns |
Type |
Description |
boolean |
Whether the googleDriveSource field is set.
|
public boolean hasImportResultBigquerySink()
The BigQuery destination to write import result to. It should be a
bigquery table resource name (e.g.
"bq://projectId.bqDatasetId.bqTableId"). The dataset must exist. If the
table does not exist, it will be created with the expected schema. If the
table exists, the schema will be validated and data will be added to this
existing table.
.google.cloud.aiplatform.v1beta1.BigQueryDestination import_result_bigquery_sink = 15;
Returns |
Type |
Description |
boolean |
Whether the importResultBigquerySink field is set.
|
public boolean hasImportResultGcsSink()
The Cloud Storage path to write import result to.
.google.cloud.aiplatform.v1beta1.GcsDestination import_result_gcs_sink = 14;
Returns |
Type |
Description |
boolean |
Whether the importResultGcsSink field is set.
|
public boolean hasJiraSource()
Jira queries with their corresponding authentication.
.google.cloud.aiplatform.v1beta1.JiraSource jira_source = 7;
Returns |
Type |
Description |
boolean |
Whether the jiraSource field is set.
|
public boolean hasPartialFailureBigquerySink()
Deprecated. google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.partial_failure_bigquery_sink
is deprecated. See google/cloud/aiplatform/v1beta1/vertex_rag_data.proto;l=614
The BigQuery destination to write partial failures to. It should be a
bigquery table resource name (e.g.
"bq://projectId.bqDatasetId.bqTableId"). The dataset must exist. If the
table does not exist, it will be created with the expected schema. If the
table exists, the schema will be validated and data will be added to this
existing table.
Deprecated. Prefer to use import_result_bq_sink
.
.google.cloud.aiplatform.v1beta1.BigQueryDestination partial_failure_bigquery_sink = 12 [deprecated = true];
Returns |
Type |
Description |
boolean |
Whether the partialFailureBigquerySink field is set.
|
public boolean hasPartialFailureGcsSink()
Deprecated. google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.partial_failure_gcs_sink is
deprecated. See google/cloud/aiplatform/v1beta1/vertex_rag_data.proto;l=605
The Cloud Storage path to write partial failures to.
Deprecated. Prefer to use import_result_gcs_sink
.
.google.cloud.aiplatform.v1beta1.GcsDestination partial_failure_gcs_sink = 11 [deprecated = true];
Returns |
Type |
Description |
boolean |
Whether the partialFailureGcsSink field is set.
|
public boolean hasRagFileChunkingConfig()
Deprecated. google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.rag_file_chunking_config is
deprecated. See google/cloud/aiplatform/v1beta1/vertex_rag_data.proto;l=633
Specifies the size and overlap of chunks after importing RagFiles.
.google.cloud.aiplatform.v1beta1.RagFileChunkingConfig rag_file_chunking_config = 4 [deprecated = true];
Returns |
Type |
Description |
boolean |
Whether the ragFileChunkingConfig field is set.
|
public boolean hasRagFileParsingConfig()
Optional. Specifies the parsing config for RagFiles.
RAG will use the default parser if this field is not set.
.google.cloud.aiplatform.v1beta1.RagFileParsingConfig rag_file_parsing_config = 8 [(.google.api.field_behavior) = OPTIONAL];
Returns |
Type |
Description |
boolean |
Whether the ragFileParsingConfig field is set.
|
public boolean hasRagFileTransformationConfig()
Specifies the transformation config for RagFiles.
.google.cloud.aiplatform.v1beta1.RagFileTransformationConfig rag_file_transformation_config = 16;
Returns |
Type |
Description |
boolean |
Whether the ragFileTransformationConfig field is set.
|
public boolean hasSharePointSources()
SharePoint sources.
.google.cloud.aiplatform.v1beta1.SharePointSources share_point_sources = 13;
Returns |
Type |
Description |
boolean |
Whether the sharePointSources field is set.
|
public boolean hasSlackSource()
Slack channels with their corresponding access tokens.
.google.cloud.aiplatform.v1beta1.SlackSource slack_source = 6;
Returns |
Type |
Description |
boolean |
Whether the slackSource field is set.
|
Returns |
Type |
Description |
int |
|
Overrides
protected GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
Overrides
public final boolean isInitialized()
Overrides
public ImportRagFilesConfig.Builder newBuilderForType()
protected ImportRagFilesConfig.Builder newBuilderForType(GeneratedMessageV3.BuilderParent parent)
Overrides
protected Object newInstance(GeneratedMessageV3.UnusedPrivateParameter unused)
Returns |
Type |
Description |
Object |
|
Overrides
public ImportRagFilesConfig.Builder toBuilder()
public void writeTo(CodedOutputStream output)
Overrides