public interface JobOrBuilder extends MessageOrBuilder
Modifier and Type | Method and Description |
---|---|
boolean |
containsLabels(java.lang.String key)
Optional.
|
java.lang.String |
getDriverControlFilesUri()
Output-only.
|
ByteString |
getDriverControlFilesUriBytes()
Output-only.
|
java.lang.String |
getDriverOutputResourceUri()
Output-only.
|
ByteString |
getDriverOutputResourceUriBytes()
Output-only.
|
HadoopJob |
getHadoopJob()
Job is a Hadoop job.
|
HadoopJobOrBuilder |
getHadoopJobOrBuilder()
Job is a Hadoop job.
|
HiveJob |
getHiveJob()
Job is a Hive job.
|
HiveJobOrBuilder |
getHiveJobOrBuilder()
Job is a Hive job.
|
java.util.Map<java.lang.String,java.lang.String> |
getLabels()
Deprecated.
|
int |
getLabelsCount()
Optional.
|
java.util.Map<java.lang.String,java.lang.String> |
getLabelsMap()
Optional.
|
java.lang.String |
getLabelsOrDefault(java.lang.String key,
java.lang.String defaultValue)
Optional.
|
java.lang.String |
getLabelsOrThrow(java.lang.String key)
Optional.
|
PigJob |
getPigJob()
Job is a Pig job.
|
PigJobOrBuilder |
getPigJobOrBuilder()
Job is a Pig job.
|
JobPlacement |
getPlacement()
Required.
|
JobPlacementOrBuilder |
getPlacementOrBuilder()
Required.
|
PySparkJob |
getPysparkJob()
Job is a Pyspark job.
|
PySparkJobOrBuilder |
getPysparkJobOrBuilder()
Job is a Pyspark job.
|
JobReference |
getReference()
Optional.
|
JobReferenceOrBuilder |
getReferenceOrBuilder()
Optional.
|
JobScheduling |
getScheduling()
Optional.
|
JobSchedulingOrBuilder |
getSchedulingOrBuilder()
Optional.
|
SparkJob |
getSparkJob()
Job is a Spark job.
|
SparkJobOrBuilder |
getSparkJobOrBuilder()
Job is a Spark job.
|
SparkSqlJob |
getSparkSqlJob()
Job is a SparkSql job.
|
SparkSqlJobOrBuilder |
getSparkSqlJobOrBuilder()
Job is a SparkSql job.
|
JobStatus |
getStatus()
Output-only.
|
JobStatus |
getStatusHistory(int index)
Output-only.
|
int |
getStatusHistoryCount()
Output-only.
|
java.util.List<JobStatus> |
getStatusHistoryList()
Output-only.
|
JobStatusOrBuilder |
getStatusHistoryOrBuilder(int index)
Output-only.
|
java.util.List<? extends JobStatusOrBuilder> |
getStatusHistoryOrBuilderList()
Output-only.
|
JobStatusOrBuilder |
getStatusOrBuilder()
Output-only.
|
Job.TypeJobCase |
getTypeJobCase() |
YarnApplication |
getYarnApplications(int index)
Output-only.
|
int |
getYarnApplicationsCount()
Output-only.
|
java.util.List<YarnApplication> |
getYarnApplicationsList()
Output-only.
|
YarnApplicationOrBuilder |
getYarnApplicationsOrBuilder(int index)
Output-only.
|
java.util.List<? extends YarnApplicationOrBuilder> |
getYarnApplicationsOrBuilderList()
Output-only.
|
boolean |
hasHadoopJob()
Job is a Hadoop job.
|
boolean |
hasHiveJob()
Job is a Hive job.
|
boolean |
hasPigJob()
Job is a Pig job.
|
boolean |
hasPlacement()
Required.
|
boolean |
hasPysparkJob()
Job is a Pyspark job.
|
boolean |
hasReference()
Optional.
|
boolean |
hasScheduling()
Optional.
|
boolean |
hasSparkJob()
Job is a Spark job.
|
boolean |
hasSparkSqlJob()
Job is a SparkSql job.
|
boolean |
hasStatus()
Output-only.
|
findInitializationErrors, getAllFields, getDefaultInstanceForType, getDescriptorForType, getField, getInitializationErrorString, getOneofFieldDescriptor, getRepeatedField, getRepeatedFieldCount, getUnknownFields, hasField, hasOneof
isInitialized
boolean hasReference()
Optional. The fully qualified reference to the job, which can be used to obtain the equivalent REST path of the job resource. If this property is not specified when a job is created, the server generates a <code>job_id</code>.
.google.cloud.dataproc.v1.JobReference reference = 1;
JobReference getReference()
Optional. The fully qualified reference to the job, which can be used to obtain the equivalent REST path of the job resource. If this property is not specified when a job is created, the server generates a <code>job_id</code>.
.google.cloud.dataproc.v1.JobReference reference = 1;
JobReferenceOrBuilder getReferenceOrBuilder()
Optional. The fully qualified reference to the job, which can be used to obtain the equivalent REST path of the job resource. If this property is not specified when a job is created, the server generates a <code>job_id</code>.
.google.cloud.dataproc.v1.JobReference reference = 1;
boolean hasPlacement()
Required. Job information, including how, when, and where to run the job.
.google.cloud.dataproc.v1.JobPlacement placement = 2;
JobPlacement getPlacement()
Required. Job information, including how, when, and where to run the job.
.google.cloud.dataproc.v1.JobPlacement placement = 2;
JobPlacementOrBuilder getPlacementOrBuilder()
Required. Job information, including how, when, and where to run the job.
.google.cloud.dataproc.v1.JobPlacement placement = 2;
boolean hasHadoopJob()
Job is a Hadoop job.
.google.cloud.dataproc.v1.HadoopJob hadoop_job = 3;
HadoopJob getHadoopJob()
Job is a Hadoop job.
.google.cloud.dataproc.v1.HadoopJob hadoop_job = 3;
HadoopJobOrBuilder getHadoopJobOrBuilder()
Job is a Hadoop job.
.google.cloud.dataproc.v1.HadoopJob hadoop_job = 3;
boolean hasSparkJob()
Job is a Spark job.
.google.cloud.dataproc.v1.SparkJob spark_job = 4;
SparkJob getSparkJob()
Job is a Spark job.
.google.cloud.dataproc.v1.SparkJob spark_job = 4;
SparkJobOrBuilder getSparkJobOrBuilder()
Job is a Spark job.
.google.cloud.dataproc.v1.SparkJob spark_job = 4;
boolean hasPysparkJob()
Job is a Pyspark job.
.google.cloud.dataproc.v1.PySparkJob pyspark_job = 5;
PySparkJob getPysparkJob()
Job is a Pyspark job.
.google.cloud.dataproc.v1.PySparkJob pyspark_job = 5;
PySparkJobOrBuilder getPysparkJobOrBuilder()
Job is a Pyspark job.
.google.cloud.dataproc.v1.PySparkJob pyspark_job = 5;
boolean hasHiveJob()
Job is a Hive job.
.google.cloud.dataproc.v1.HiveJob hive_job = 6;
HiveJob getHiveJob()
Job is a Hive job.
.google.cloud.dataproc.v1.HiveJob hive_job = 6;
HiveJobOrBuilder getHiveJobOrBuilder()
Job is a Hive job.
.google.cloud.dataproc.v1.HiveJob hive_job = 6;
boolean hasPigJob()
Job is a Pig job.
.google.cloud.dataproc.v1.PigJob pig_job = 7;
PigJob getPigJob()
Job is a Pig job.
.google.cloud.dataproc.v1.PigJob pig_job = 7;
PigJobOrBuilder getPigJobOrBuilder()
Job is a Pig job.
.google.cloud.dataproc.v1.PigJob pig_job = 7;
boolean hasSparkSqlJob()
Job is a SparkSql job.
.google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12;
SparkSqlJob getSparkSqlJob()
Job is a SparkSql job.
.google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12;
SparkSqlJobOrBuilder getSparkSqlJobOrBuilder()
Job is a SparkSql job.
.google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 12;
boolean hasStatus()
Output-only. The job status. Additional application-specific status information may be contained in the <code>type_job</code> and <code>yarn_applications</code> fields.
.google.cloud.dataproc.v1.JobStatus status = 8;
JobStatus getStatus()
Output-only. The job status. Additional application-specific status information may be contained in the <code>type_job</code> and <code>yarn_applications</code> fields.
.google.cloud.dataproc.v1.JobStatus status = 8;
JobStatusOrBuilder getStatusOrBuilder()
Output-only. The job status. Additional application-specific status information may be contained in the <code>type_job</code> and <code>yarn_applications</code> fields.
.google.cloud.dataproc.v1.JobStatus status = 8;
java.util.List<JobStatus> getStatusHistoryList()
Output-only. The previous job status.
repeated .google.cloud.dataproc.v1.JobStatus status_history = 13;
JobStatus getStatusHistory(int index)
Output-only. The previous job status.
repeated .google.cloud.dataproc.v1.JobStatus status_history = 13;
int getStatusHistoryCount()
Output-only. The previous job status.
repeated .google.cloud.dataproc.v1.JobStatus status_history = 13;
java.util.List<? extends JobStatusOrBuilder> getStatusHistoryOrBuilderList()
Output-only. The previous job status.
repeated .google.cloud.dataproc.v1.JobStatus status_history = 13;
JobStatusOrBuilder getStatusHistoryOrBuilder(int index)
Output-only. The previous job status.
repeated .google.cloud.dataproc.v1.JobStatus status_history = 13;
java.util.List<YarnApplication> getYarnApplicationsList()
Output-only. The collection of YARN applications spun up by this job. **Beta** Feature: This report is available for testing purposes only. It may be changed before final release.
repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9;
YarnApplication getYarnApplications(int index)
Output-only. The collection of YARN applications spun up by this job. **Beta** Feature: This report is available for testing purposes only. It may be changed before final release.
repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9;
int getYarnApplicationsCount()
Output-only. The collection of YARN applications spun up by this job. **Beta** Feature: This report is available for testing purposes only. It may be changed before final release.
repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9;
java.util.List<? extends YarnApplicationOrBuilder> getYarnApplicationsOrBuilderList()
Output-only. The collection of YARN applications spun up by this job. **Beta** Feature: This report is available for testing purposes only. It may be changed before final release.
repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9;
YarnApplicationOrBuilder getYarnApplicationsOrBuilder(int index)
Output-only. The collection of YARN applications spun up by this job. **Beta** Feature: This report is available for testing purposes only. It may be changed before final release.
repeated .google.cloud.dataproc.v1.YarnApplication yarn_applications = 9;
java.lang.String getDriverOutputResourceUri()
Output-only. A URI pointing to the location of the stdout of the job's driver program.
string driver_output_resource_uri = 17;
ByteString getDriverOutputResourceUriBytes()
Output-only. A URI pointing to the location of the stdout of the job's driver program.
string driver_output_resource_uri = 17;
java.lang.String getDriverControlFilesUri()
Output-only. If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as `driver_output_uri`.
string driver_control_files_uri = 15;
ByteString getDriverControlFilesUriBytes()
Output-only. If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as `driver_output_uri`.
string driver_control_files_uri = 15;
int getLabelsCount()
Optional. The labels to associate with this job. Label **keys** must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a job.
map<string, string> labels = 18;
boolean containsLabels(java.lang.String key)
Optional. The labels to associate with this job. Label **keys** must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a job.
map<string, string> labels = 18;
@Deprecated java.util.Map<java.lang.String,java.lang.String> getLabels()
getLabelsMap()
instead.java.util.Map<java.lang.String,java.lang.String> getLabelsMap()
Optional. The labels to associate with this job. Label **keys** must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a job.
map<string, string> labels = 18;
java.lang.String getLabelsOrDefault(java.lang.String key, java.lang.String defaultValue)
Optional. The labels to associate with this job. Label **keys** must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a job.
map<string, string> labels = 18;
java.lang.String getLabelsOrThrow(java.lang.String key)
Optional. The labels to associate with this job. Label **keys** must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a job.
map<string, string> labels = 18;
boolean hasScheduling()
Optional. Job scheduling configuration.
.google.cloud.dataproc.v1.JobScheduling scheduling = 20;
JobScheduling getScheduling()
Optional. Job scheduling configuration.
.google.cloud.dataproc.v1.JobScheduling scheduling = 20;
JobSchedulingOrBuilder getSchedulingOrBuilder()
Optional. Job scheduling configuration.
.google.cloud.dataproc.v1.JobScheduling scheduling = 20;
Job.TypeJobCase getTypeJobCase()