public final class InputDataConfig extends com.google.protobuf.GeneratedMessageV3 implements InputDataConfigOrBuilder
Specifies Vertex AI owned input data to be used for training, and possibly evaluating, the Model.Protobuf type
google.cloud.aiplatform.v1beta1.InputDataConfig| Modifier and Type | Class and Description |
|---|---|
static class |
InputDataConfig.Builder
Specifies Vertex AI owned input data to be used for training, and
possibly evaluating, the Model.
|
static class |
InputDataConfig.DestinationCase |
static class |
InputDataConfig.SplitCase |
com.google.protobuf.GeneratedMessageV3.BuilderParent, com.google.protobuf.GeneratedMessageV3.ExtendableBuilder<MessageType extends com.google.protobuf.GeneratedMessageV3.ExtendableMessage,BuilderType extends com.google.protobuf.GeneratedMessageV3.ExtendableBuilder<MessageType,BuilderType>>, com.google.protobuf.GeneratedMessageV3.ExtendableMessage<MessageType extends com.google.protobuf.GeneratedMessageV3.ExtendableMessage>, com.google.protobuf.GeneratedMessageV3.ExtendableMessageOrBuilder<MessageType extends com.google.protobuf.GeneratedMessageV3.ExtendableMessage>, com.google.protobuf.GeneratedMessageV3.FieldAccessorTable, com.google.protobuf.GeneratedMessageV3.UnusedPrivateParameter| Modifier and Type | Field and Description |
|---|---|
static int |
ANNOTATION_SCHEMA_URI_FIELD_NUMBER |
static int |
ANNOTATIONS_FILTER_FIELD_NUMBER |
static int |
BIGQUERY_DESTINATION_FIELD_NUMBER |
static int |
DATASET_ID_FIELD_NUMBER |
static int |
FILTER_SPLIT_FIELD_NUMBER |
static int |
FRACTION_SPLIT_FIELD_NUMBER |
static int |
GCS_DESTINATION_FIELD_NUMBER |
static int |
PREDEFINED_SPLIT_FIELD_NUMBER |
static int |
SAVED_QUERY_ID_FIELD_NUMBER |
static int |
STRATIFIED_SPLIT_FIELD_NUMBER |
static int |
TIMESTAMP_SPLIT_FIELD_NUMBER |
| Modifier and Type | Method and Description |
|---|---|
boolean |
equals(Object obj) |
String |
getAnnotationSchemaUri()
Applicable only to custom training with Datasets that have DataItems and
Annotations.
|
com.google.protobuf.ByteString |
getAnnotationSchemaUriBytes()
Applicable only to custom training with Datasets that have DataItems and
Annotations.
|
String |
getAnnotationsFilter()
Applicable only to Datasets that have DataItems and Annotations.
|
com.google.protobuf.ByteString |
getAnnotationsFilterBytes()
Applicable only to Datasets that have DataItems and Annotations.
|
BigQueryDestination |
getBigqueryDestination()
Only applicable to custom training with tabular Dataset with BigQuery
source.
|
BigQueryDestinationOrBuilder |
getBigqueryDestinationOrBuilder()
Only applicable to custom training with tabular Dataset with BigQuery
source.
|
String |
getDatasetId()
Required.
|
com.google.protobuf.ByteString |
getDatasetIdBytes()
Required.
|
static InputDataConfig |
getDefaultInstance() |
InputDataConfig |
getDefaultInstanceForType() |
static com.google.protobuf.Descriptors.Descriptor |
getDescriptor() |
InputDataConfig.DestinationCase |
getDestinationCase() |
FilterSplit |
getFilterSplit()
Split based on the provided filters for each set.
|
FilterSplitOrBuilder |
getFilterSplitOrBuilder()
Split based on the provided filters for each set.
|
FractionSplit |
getFractionSplit()
Split based on fractions defining the size of each set.
|
FractionSplitOrBuilder |
getFractionSplitOrBuilder()
Split based on fractions defining the size of each set.
|
GcsDestination |
getGcsDestination()
The Cloud Storage location where the training data is to be
written to.
|
GcsDestinationOrBuilder |
getGcsDestinationOrBuilder()
The Cloud Storage location where the training data is to be
written to.
|
com.google.protobuf.Parser<InputDataConfig> |
getParserForType() |
PredefinedSplit |
getPredefinedSplit()
Supported only for tabular Datasets.
|
PredefinedSplitOrBuilder |
getPredefinedSplitOrBuilder()
Supported only for tabular Datasets.
|
String |
getSavedQueryId()
Only applicable to Datasets that have SavedQueries.
|
com.google.protobuf.ByteString |
getSavedQueryIdBytes()
Only applicable to Datasets that have SavedQueries.
|
int |
getSerializedSize() |
InputDataConfig.SplitCase |
getSplitCase() |
StratifiedSplit |
getStratifiedSplit()
Supported only for tabular Datasets.
|
StratifiedSplitOrBuilder |
getStratifiedSplitOrBuilder()
Supported only for tabular Datasets.
|
TimestampSplit |
getTimestampSplit()
Supported only for tabular Datasets.
|
TimestampSplitOrBuilder |
getTimestampSplitOrBuilder()
Supported only for tabular Datasets.
|
com.google.protobuf.UnknownFieldSet |
getUnknownFields() |
boolean |
hasBigqueryDestination()
Only applicable to custom training with tabular Dataset with BigQuery
source.
|
boolean |
hasFilterSplit()
Split based on the provided filters for each set.
|
boolean |
hasFractionSplit()
Split based on fractions defining the size of each set.
|
boolean |
hasGcsDestination()
The Cloud Storage location where the training data is to be
written to.
|
int |
hashCode() |
boolean |
hasPredefinedSplit()
Supported only for tabular Datasets.
|
boolean |
hasStratifiedSplit()
Supported only for tabular Datasets.
|
boolean |
hasTimestampSplit()
Supported only for tabular Datasets.
|
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable |
internalGetFieldAccessorTable() |
boolean |
isInitialized() |
static InputDataConfig.Builder |
newBuilder() |
static InputDataConfig.Builder |
newBuilder(InputDataConfig prototype) |
InputDataConfig.Builder |
newBuilderForType() |
protected InputDataConfig.Builder |
newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) |
protected Object |
newInstance(com.google.protobuf.GeneratedMessageV3.UnusedPrivateParameter unused) |
static InputDataConfig |
parseDelimitedFrom(InputStream input) |
static InputDataConfig |
parseDelimitedFrom(InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
static InputDataConfig |
parseFrom(byte[] data) |
static InputDataConfig |
parseFrom(byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
static InputDataConfig |
parseFrom(ByteBuffer data) |
static InputDataConfig |
parseFrom(ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
static InputDataConfig |
parseFrom(com.google.protobuf.ByteString data) |
static InputDataConfig |
parseFrom(com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
static InputDataConfig |
parseFrom(com.google.protobuf.CodedInputStream input) |
static InputDataConfig |
parseFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
static InputDataConfig |
parseFrom(InputStream input) |
static InputDataConfig |
parseFrom(InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
static com.google.protobuf.Parser<InputDataConfig> |
parser() |
InputDataConfig.Builder |
toBuilder() |
void |
writeTo(com.google.protobuf.CodedOutputStream output) |
canUseUnsafe, computeStringSize, computeStringSizeNoTag, emptyBooleanList, emptyDoubleList, emptyFloatList, emptyIntList, emptyLongList, getAllFields, getDescriptorForType, getField, getOneofFieldDescriptor, getRepeatedField, getRepeatedFieldCount, hasField, hasOneof, internalGetMapField, isStringEmpty, makeExtensionsImmutable, mergeFromAndMakeImmutableInternal, mutableCopy, mutableCopy, mutableCopy, mutableCopy, mutableCopy, newBooleanList, newBuilderForType, newDoubleList, newFloatList, newIntList, newLongList, parseDelimitedWithIOException, parseDelimitedWithIOException, parseUnknownField, parseUnknownFieldProto3, parseWithIOException, parseWithIOException, parseWithIOException, parseWithIOException, serializeBooleanMapTo, serializeIntegerMapTo, serializeLongMapTo, serializeStringMapTo, writeReplace, writeString, writeStringNoTagfindInitializationErrors, getInitializationErrorString, hashBoolean, hashEnum, hashEnumList, hashFields, hashLong, toStringaddAll, addAll, checkByteStringIsUtf8, toByteArray, toByteString, writeDelimitedTo, writeToclone, finalize, getClass, notify, notifyAll, wait, wait, waitpublic static final int FRACTION_SPLIT_FIELD_NUMBER
public static final int FILTER_SPLIT_FIELD_NUMBER
public static final int PREDEFINED_SPLIT_FIELD_NUMBER
public static final int TIMESTAMP_SPLIT_FIELD_NUMBER
public static final int STRATIFIED_SPLIT_FIELD_NUMBER
public static final int GCS_DESTINATION_FIELD_NUMBER
public static final int BIGQUERY_DESTINATION_FIELD_NUMBER
public static final int DATASET_ID_FIELD_NUMBER
public static final int ANNOTATIONS_FILTER_FIELD_NUMBER
public static final int ANNOTATION_SCHEMA_URI_FIELD_NUMBER
public static final int SAVED_QUERY_ID_FIELD_NUMBER
protected Object newInstance(com.google.protobuf.GeneratedMessageV3.UnusedPrivateParameter unused)
newInstance in class com.google.protobuf.GeneratedMessageV3public final com.google.protobuf.UnknownFieldSet getUnknownFields()
getUnknownFields in interface com.google.protobuf.MessageOrBuildergetUnknownFields in class com.google.protobuf.GeneratedMessageV3public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
internalGetFieldAccessorTable in class com.google.protobuf.GeneratedMessageV3public InputDataConfig.SplitCase getSplitCase()
getSplitCase in interface InputDataConfigOrBuilderpublic InputDataConfig.DestinationCase getDestinationCase()
getDestinationCase in interface InputDataConfigOrBuilderpublic boolean hasFractionSplit()
Split based on fractions defining the size of each set.
.google.cloud.aiplatform.v1beta1.FractionSplit fraction_split = 2;hasFractionSplit in interface InputDataConfigOrBuilderpublic FractionSplit getFractionSplit()
Split based on fractions defining the size of each set.
.google.cloud.aiplatform.v1beta1.FractionSplit fraction_split = 2;getFractionSplit in interface InputDataConfigOrBuilderpublic FractionSplitOrBuilder getFractionSplitOrBuilder()
Split based on fractions defining the size of each set.
.google.cloud.aiplatform.v1beta1.FractionSplit fraction_split = 2;getFractionSplitOrBuilder in interface InputDataConfigOrBuilderpublic boolean hasFilterSplit()
Split based on the provided filters for each set.
.google.cloud.aiplatform.v1beta1.FilterSplit filter_split = 3;hasFilterSplit in interface InputDataConfigOrBuilderpublic FilterSplit getFilterSplit()
Split based on the provided filters for each set.
.google.cloud.aiplatform.v1beta1.FilterSplit filter_split = 3;getFilterSplit in interface InputDataConfigOrBuilderpublic FilterSplitOrBuilder getFilterSplitOrBuilder()
Split based on the provided filters for each set.
.google.cloud.aiplatform.v1beta1.FilterSplit filter_split = 3;getFilterSplitOrBuilder in interface InputDataConfigOrBuilderpublic boolean hasPredefinedSplit()
Supported only for tabular Datasets. Split based on a predefined key.
.google.cloud.aiplatform.v1beta1.PredefinedSplit predefined_split = 4;hasPredefinedSplit in interface InputDataConfigOrBuilderpublic PredefinedSplit getPredefinedSplit()
Supported only for tabular Datasets. Split based on a predefined key.
.google.cloud.aiplatform.v1beta1.PredefinedSplit predefined_split = 4;getPredefinedSplit in interface InputDataConfigOrBuilderpublic PredefinedSplitOrBuilder getPredefinedSplitOrBuilder()
Supported only for tabular Datasets. Split based on a predefined key.
.google.cloud.aiplatform.v1beta1.PredefinedSplit predefined_split = 4;getPredefinedSplitOrBuilder in interface InputDataConfigOrBuilderpublic boolean hasTimestampSplit()
Supported only for tabular Datasets. Split based on the timestamp of the input data pieces.
.google.cloud.aiplatform.v1beta1.TimestampSplit timestamp_split = 5;hasTimestampSplit in interface InputDataConfigOrBuilderpublic TimestampSplit getTimestampSplit()
Supported only for tabular Datasets. Split based on the timestamp of the input data pieces.
.google.cloud.aiplatform.v1beta1.TimestampSplit timestamp_split = 5;getTimestampSplit in interface InputDataConfigOrBuilderpublic TimestampSplitOrBuilder getTimestampSplitOrBuilder()
Supported only for tabular Datasets. Split based on the timestamp of the input data pieces.
.google.cloud.aiplatform.v1beta1.TimestampSplit timestamp_split = 5;getTimestampSplitOrBuilder in interface InputDataConfigOrBuilderpublic boolean hasStratifiedSplit()
Supported only for tabular Datasets. Split based on the distribution of the specified column.
.google.cloud.aiplatform.v1beta1.StratifiedSplit stratified_split = 12;hasStratifiedSplit in interface InputDataConfigOrBuilderpublic StratifiedSplit getStratifiedSplit()
Supported only for tabular Datasets. Split based on the distribution of the specified column.
.google.cloud.aiplatform.v1beta1.StratifiedSplit stratified_split = 12;getStratifiedSplit in interface InputDataConfigOrBuilderpublic StratifiedSplitOrBuilder getStratifiedSplitOrBuilder()
Supported only for tabular Datasets. Split based on the distribution of the specified column.
.google.cloud.aiplatform.v1beta1.StratifiedSplit stratified_split = 12;getStratifiedSplitOrBuilder in interface InputDataConfigOrBuilderpublic boolean hasGcsDestination()
The Cloud Storage location where the training data is to be
written to. In the given directory a new directory is created with
name:
`dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
All training input data is written into that directory.
The Vertex AI environment variables representing Cloud Storage
data URIs are represented in the Cloud Storage wildcard
format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
* AIP_TRAINING_DATA_URI =
"gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* AIP_VALIDATION_DATA_URI =
"gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
* AIP_TEST_DATA_URI =
"gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/test-*.${AIP_DATA_FORMAT}"
.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 8;hasGcsDestination in interface InputDataConfigOrBuilderpublic GcsDestination getGcsDestination()
The Cloud Storage location where the training data is to be
written to. In the given directory a new directory is created with
name:
`dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
All training input data is written into that directory.
The Vertex AI environment variables representing Cloud Storage
data URIs are represented in the Cloud Storage wildcard
format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
* AIP_TRAINING_DATA_URI =
"gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* AIP_VALIDATION_DATA_URI =
"gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
* AIP_TEST_DATA_URI =
"gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/test-*.${AIP_DATA_FORMAT}"
.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 8;getGcsDestination in interface InputDataConfigOrBuilderpublic GcsDestinationOrBuilder getGcsDestinationOrBuilder()
The Cloud Storage location where the training data is to be
written to. In the given directory a new directory is created with
name:
`dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
All training input data is written into that directory.
The Vertex AI environment variables representing Cloud Storage
data URIs are represented in the Cloud Storage wildcard
format to support sharded data. e.g.: "gs://.../training-*.jsonl"
* AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
* AIP_TRAINING_DATA_URI =
"gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
* AIP_VALIDATION_DATA_URI =
"gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
* AIP_TEST_DATA_URI =
"gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/test-*.${AIP_DATA_FORMAT}"
.google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 8;getGcsDestinationOrBuilder in interface InputDataConfigOrBuilderpublic boolean hasBigqueryDestination()
Only applicable to custom training with tabular Dataset with BigQuery source. The BigQuery project location where the training data is to be written to. In the given project a new dataset is created with name `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>` where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training input data is written into that dataset. In the dataset three tables are created, `training`, `validation` and `test`. * AIP_DATA_FORMAT = "bigquery". * AIP_TRAINING_DATA_URI = "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training" * AIP_VALIDATION_DATA_URI = "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.validation" * AIP_TEST_DATA_URI = "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.test"
.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 10;hasBigqueryDestination in interface InputDataConfigOrBuilderpublic BigQueryDestination getBigqueryDestination()
Only applicable to custom training with tabular Dataset with BigQuery source. The BigQuery project location where the training data is to be written to. In the given project a new dataset is created with name `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>` where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training input data is written into that dataset. In the dataset three tables are created, `training`, `validation` and `test`. * AIP_DATA_FORMAT = "bigquery". * AIP_TRAINING_DATA_URI = "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training" * AIP_VALIDATION_DATA_URI = "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.validation" * AIP_TEST_DATA_URI = "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.test"
.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 10;getBigqueryDestination in interface InputDataConfigOrBuilderpublic BigQueryDestinationOrBuilder getBigqueryDestinationOrBuilder()
Only applicable to custom training with tabular Dataset with BigQuery source. The BigQuery project location where the training data is to be written to. In the given project a new dataset is created with name `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>` where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training input data is written into that dataset. In the dataset three tables are created, `training`, `validation` and `test`. * AIP_DATA_FORMAT = "bigquery". * AIP_TRAINING_DATA_URI = "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training" * AIP_VALIDATION_DATA_URI = "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.validation" * AIP_TEST_DATA_URI = "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.test"
.google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 10;getBigqueryDestinationOrBuilder in interface InputDataConfigOrBuilderpublic String getDatasetId()
Required. The ID of the Dataset in the same Project and Location which data will be used to train the Model. The Dataset must use schema compatible with Model being trained, and what is compatible should be described in the used TrainingPipeline's [training_task_definition] [google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition]. For tabular Datasets, all their data is exported to training, to pick and choose from.
string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];getDatasetId in interface InputDataConfigOrBuilderpublic com.google.protobuf.ByteString getDatasetIdBytes()
Required. The ID of the Dataset in the same Project and Location which data will be used to train the Model. The Dataset must use schema compatible with Model being trained, and what is compatible should be described in the used TrainingPipeline's [training_task_definition] [google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition]. For tabular Datasets, all their data is exported to training, to pick and choose from.
string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];getDatasetIdBytes in interface InputDataConfigOrBuilderpublic String getAnnotationsFilter()
Applicable only to Datasets that have DataItems and Annotations. A filter on Annotations of the Dataset. Only Annotations that both match this filter and belong to DataItems not ignored by the split method are used in respectively training, validation or test role, depending on the role of the DataItem they are on (for the auto-assigned that role is decided by Vertex AI). A filter with same syntax as the one used in [ListAnnotations][google.cloud.aiplatform.v1beta1.DatasetService.ListAnnotations] may be used, but note here it filters across all Annotations of the Dataset, and not just within a single DataItem.
string annotations_filter = 6;getAnnotationsFilter in interface InputDataConfigOrBuilderpublic com.google.protobuf.ByteString getAnnotationsFilterBytes()
Applicable only to Datasets that have DataItems and Annotations. A filter on Annotations of the Dataset. Only Annotations that both match this filter and belong to DataItems not ignored by the split method are used in respectively training, validation or test role, depending on the role of the DataItem they are on (for the auto-assigned that role is decided by Vertex AI). A filter with same syntax as the one used in [ListAnnotations][google.cloud.aiplatform.v1beta1.DatasetService.ListAnnotations] may be used, but note here it filters across all Annotations of the Dataset, and not just within a single DataItem.
string annotations_filter = 6;getAnnotationsFilterBytes in interface InputDataConfigOrBuilderpublic String getAnnotationSchemaUri()
Applicable only to custom training with Datasets that have DataItems and Annotations. Cloud Storage URI that points to a YAML file describing the annotation schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). The schema files that can be used here are found in gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the chosen schema must be consistent with [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id]. Only Annotations that both match this schema and belong to DataItems not ignored by the split method are used in respectively training, validation or test role, depending on the role of the DataItem they are on. When used in conjunction with [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter], the Annotations used for training are filtered by both [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter] and [annotation_schema_uri][google.cloud.aiplatform.v1beta1.InputDataConfig.annotation_schema_uri].
string annotation_schema_uri = 9;getAnnotationSchemaUri in interface InputDataConfigOrBuilderpublic com.google.protobuf.ByteString getAnnotationSchemaUriBytes()
Applicable only to custom training with Datasets that have DataItems and Annotations. Cloud Storage URI that points to a YAML file describing the annotation schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). The schema files that can be used here are found in gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the chosen schema must be consistent with [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of the Dataset specified by [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id]. Only Annotations that both match this schema and belong to DataItems not ignored by the split method are used in respectively training, validation or test role, depending on the role of the DataItem they are on. When used in conjunction with [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter], the Annotations used for training are filtered by both [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter] and [annotation_schema_uri][google.cloud.aiplatform.v1beta1.InputDataConfig.annotation_schema_uri].
string annotation_schema_uri = 9;getAnnotationSchemaUriBytes in interface InputDataConfigOrBuilderpublic String getSavedQueryId()
Only applicable to Datasets that have SavedQueries. The ID of a SavedQuery (annotation set) under the Dataset specified by [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id] used for filtering Annotations for training. Only Annotations that are associated with this SavedQuery are used in respectively training. When used in conjunction with [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter], the Annotations used for training are filtered by both [saved_query_id][google.cloud.aiplatform.v1beta1.InputDataConfig.saved_query_id] and [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter]. Only one of [saved_query_id][google.cloud.aiplatform.v1beta1.InputDataConfig.saved_query_id] and [annotation_schema_uri][google.cloud.aiplatform.v1beta1.InputDataConfig.annotation_schema_uri] should be specified as both of them represent the same thing: problem type.
string saved_query_id = 7;getSavedQueryId in interface InputDataConfigOrBuilderpublic com.google.protobuf.ByteString getSavedQueryIdBytes()
Only applicable to Datasets that have SavedQueries. The ID of a SavedQuery (annotation set) under the Dataset specified by [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id] used for filtering Annotations for training. Only Annotations that are associated with this SavedQuery are used in respectively training. When used in conjunction with [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter], the Annotations used for training are filtered by both [saved_query_id][google.cloud.aiplatform.v1beta1.InputDataConfig.saved_query_id] and [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter]. Only one of [saved_query_id][google.cloud.aiplatform.v1beta1.InputDataConfig.saved_query_id] and [annotation_schema_uri][google.cloud.aiplatform.v1beta1.InputDataConfig.annotation_schema_uri] should be specified as both of them represent the same thing: problem type.
string saved_query_id = 7;getSavedQueryIdBytes in interface InputDataConfigOrBuilderpublic final boolean isInitialized()
isInitialized in interface com.google.protobuf.MessageLiteOrBuilderisInitialized in class com.google.protobuf.GeneratedMessageV3public void writeTo(com.google.protobuf.CodedOutputStream output)
throws IOException
writeTo in interface com.google.protobuf.MessageLitewriteTo in class com.google.protobuf.GeneratedMessageV3IOExceptionpublic int getSerializedSize()
getSerializedSize in interface com.google.protobuf.MessageLitegetSerializedSize in class com.google.protobuf.GeneratedMessageV3public boolean equals(Object obj)
equals in interface com.google.protobuf.Messageequals in class com.google.protobuf.AbstractMessagepublic int hashCode()
hashCode in interface com.google.protobuf.MessagehashCode in class com.google.protobuf.AbstractMessagepublic static InputDataConfig parseFrom(ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException
com.google.protobuf.InvalidProtocolBufferExceptionpublic static InputDataConfig parseFrom(ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException
com.google.protobuf.InvalidProtocolBufferExceptionpublic static InputDataConfig parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException
com.google.protobuf.InvalidProtocolBufferExceptionpublic static InputDataConfig parseFrom(com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException
com.google.protobuf.InvalidProtocolBufferExceptionpublic static InputDataConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException
com.google.protobuf.InvalidProtocolBufferExceptionpublic static InputDataConfig parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException
com.google.protobuf.InvalidProtocolBufferExceptionpublic static InputDataConfig parseFrom(InputStream input) throws IOException
IOExceptionpublic static InputDataConfig parseFrom(InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws IOException
IOExceptionpublic static InputDataConfig parseDelimitedFrom(InputStream input) throws IOException
IOExceptionpublic static InputDataConfig parseDelimitedFrom(InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws IOException
IOExceptionpublic static InputDataConfig parseFrom(com.google.protobuf.CodedInputStream input) throws IOException
IOExceptionpublic static InputDataConfig parseFrom(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws IOException
IOExceptionpublic InputDataConfig.Builder newBuilderForType()
newBuilderForType in interface com.google.protobuf.MessagenewBuilderForType in interface com.google.protobuf.MessageLitepublic static InputDataConfig.Builder newBuilder()
public static InputDataConfig.Builder newBuilder(InputDataConfig prototype)
public InputDataConfig.Builder toBuilder()
toBuilder in interface com.google.protobuf.MessagetoBuilder in interface com.google.protobuf.MessageLiteprotected InputDataConfig.Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)
newBuilderForType in class com.google.protobuf.GeneratedMessageV3public static InputDataConfig getDefaultInstance()
public static com.google.protobuf.Parser<InputDataConfig> parser()
public com.google.protobuf.Parser<InputDataConfig> getParserForType()
getParserForType in interface com.google.protobuf.MessagegetParserForType in interface com.google.protobuf.MessageLitegetParserForType in class com.google.protobuf.GeneratedMessageV3public InputDataConfig getDefaultInstanceForType()
getDefaultInstanceForType in interface com.google.protobuf.MessageLiteOrBuildergetDefaultInstanceForType in interface com.google.protobuf.MessageOrBuilderCopyright © 2022 Google LLC. All rights reserved.