public static final class PySparkJob.Builder extends GeneratedMessageV3.Builder<PySparkJob.Builder> implements PySparkJobOrBuilder
A Cloud Dataproc job for running [Apache PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html) applications on YARN.Protobuf type
google.cloud.dataproc.v1.PySparkJobgetAllFields, getField, getFieldBuilder, getOneofFieldDescriptor, getParentForChildren, getRepeatedField, getRepeatedFieldBuilder, getRepeatedFieldCount, getUnknownFields, hasField, hasOneof, isClean, markClean, newBuilderForField, onBuilt, onChanged, setUnknownFieldsProto3findInitializationErrors, getInitializationErrorString, internalMergeFrom, mergeDelimitedFrom, mergeDelimitedFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, newUninitializedMessageException, toStringaddAll, addAll, mergeFrom, newUninitializedMessageExceptionequals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, waitfindInitializationErrors, getAllFields, getField, getInitializationErrorString, getOneofFieldDescriptor, getRepeatedField, getRepeatedFieldCount, getUnknownFields, hasField, hasOneofmergeFrompublic static final Descriptors.Descriptor getDescriptor()
protected MapField internalGetMapField(int number)
internalGetMapField in class GeneratedMessageV3.Builder<PySparkJob.Builder>protected MapField internalGetMutableMapField(int number)
internalGetMutableMapField in class GeneratedMessageV3.Builder<PySparkJob.Builder>protected GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
internalGetFieldAccessorTable in class GeneratedMessageV3.Builder<PySparkJob.Builder>public PySparkJob.Builder clear()
clear in interface Message.Builderclear in interface MessageLite.Builderclear in class GeneratedMessageV3.Builder<PySparkJob.Builder>public Descriptors.Descriptor getDescriptorForType()
getDescriptorForType in interface Message.BuildergetDescriptorForType in interface MessageOrBuildergetDescriptorForType in class GeneratedMessageV3.Builder<PySparkJob.Builder>public PySparkJob getDefaultInstanceForType()
getDefaultInstanceForType in interface MessageLiteOrBuildergetDefaultInstanceForType in interface MessageOrBuilderpublic PySparkJob build()
build in interface Message.Builderbuild in interface MessageLite.Builderpublic PySparkJob buildPartial()
buildPartial in interface Message.BuilderbuildPartial in interface MessageLite.Builderpublic PySparkJob.Builder clone()
clone in interface Message.Builderclone in interface MessageLite.Builderclone in class GeneratedMessageV3.Builder<PySparkJob.Builder>public PySparkJob.Builder setField(Descriptors.FieldDescriptor field, java.lang.Object value)
setField in interface Message.BuildersetField in class GeneratedMessageV3.Builder<PySparkJob.Builder>public PySparkJob.Builder clearField(Descriptors.FieldDescriptor field)
clearField in interface Message.BuilderclearField in class GeneratedMessageV3.Builder<PySparkJob.Builder>public PySparkJob.Builder clearOneof(Descriptors.OneofDescriptor oneof)
clearOneof in interface Message.BuilderclearOneof in class GeneratedMessageV3.Builder<PySparkJob.Builder>public PySparkJob.Builder setRepeatedField(Descriptors.FieldDescriptor field, int index, java.lang.Object value)
setRepeatedField in interface Message.BuildersetRepeatedField in class GeneratedMessageV3.Builder<PySparkJob.Builder>public PySparkJob.Builder addRepeatedField(Descriptors.FieldDescriptor field, java.lang.Object value)
addRepeatedField in interface Message.BuilderaddRepeatedField in class GeneratedMessageV3.Builder<PySparkJob.Builder>public PySparkJob.Builder mergeFrom(Message other)
mergeFrom in interface Message.BuildermergeFrom in class AbstractMessage.Builder<PySparkJob.Builder>public PySparkJob.Builder mergeFrom(PySparkJob other)
public final boolean isInitialized()
isInitialized in interface MessageLiteOrBuilderisInitialized in class GeneratedMessageV3.Builder<PySparkJob.Builder>public PySparkJob.Builder mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry) throws java.io.IOException
mergeFrom in interface Message.BuildermergeFrom in interface MessageLite.BuildermergeFrom in class AbstractMessage.Builder<PySparkJob.Builder>java.io.IOExceptionpublic java.lang.String getMainPythonFileUri()
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
string main_python_file_uri = 1;getMainPythonFileUri in interface PySparkJobOrBuilderpublic ByteString getMainPythonFileUriBytes()
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
string main_python_file_uri = 1;getMainPythonFileUriBytes in interface PySparkJobOrBuilderpublic PySparkJob.Builder setMainPythonFileUri(java.lang.String value)
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
string main_python_file_uri = 1;public PySparkJob.Builder clearMainPythonFileUri()
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
string main_python_file_uri = 1;public PySparkJob.Builder setMainPythonFileUriBytes(ByteString value)
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
string main_python_file_uri = 1;public ProtocolStringList getArgsList()
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2;getArgsList in interface PySparkJobOrBuilderpublic int getArgsCount()
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2;getArgsCount in interface PySparkJobOrBuilderpublic java.lang.String getArgs(int index)
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2;getArgs in interface PySparkJobOrBuilderpublic ByteString getArgsBytes(int index)
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2;getArgsBytes in interface PySparkJobOrBuilderpublic PySparkJob.Builder setArgs(int index, java.lang.String value)
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2;public PySparkJob.Builder addArgs(java.lang.String value)
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2;public PySparkJob.Builder addAllArgs(java.lang.Iterable<java.lang.String> values)
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2;public PySparkJob.Builder clearArgs()
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2;public PySparkJob.Builder addArgsBytes(ByteString value)
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2;public ProtocolStringList getPythonFileUrisList()
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3;getPythonFileUrisList in interface PySparkJobOrBuilderpublic int getPythonFileUrisCount()
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3;getPythonFileUrisCount in interface PySparkJobOrBuilderpublic java.lang.String getPythonFileUris(int index)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3;getPythonFileUris in interface PySparkJobOrBuilderpublic ByteString getPythonFileUrisBytes(int index)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3;getPythonFileUrisBytes in interface PySparkJobOrBuilderpublic PySparkJob.Builder setPythonFileUris(int index, java.lang.String value)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3;public PySparkJob.Builder addPythonFileUris(java.lang.String value)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3;public PySparkJob.Builder addAllPythonFileUris(java.lang.Iterable<java.lang.String> values)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3;public PySparkJob.Builder clearPythonFileUris()
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3;public PySparkJob.Builder addPythonFileUrisBytes(ByteString value)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3;public ProtocolStringList getJarFileUrisList()
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4;getJarFileUrisList in interface PySparkJobOrBuilderpublic int getJarFileUrisCount()
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4;getJarFileUrisCount in interface PySparkJobOrBuilderpublic java.lang.String getJarFileUris(int index)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4;getJarFileUris in interface PySparkJobOrBuilderpublic ByteString getJarFileUrisBytes(int index)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4;getJarFileUrisBytes in interface PySparkJobOrBuilderpublic PySparkJob.Builder setJarFileUris(int index, java.lang.String value)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4;public PySparkJob.Builder addJarFileUris(java.lang.String value)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4;public PySparkJob.Builder addAllJarFileUris(java.lang.Iterable<java.lang.String> values)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4;public PySparkJob.Builder clearJarFileUris()
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4;public PySparkJob.Builder addJarFileUrisBytes(ByteString value)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4;public ProtocolStringList getFileUrisList()
Optional. HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
repeated string file_uris = 5;getFileUrisList in interface PySparkJobOrBuilderpublic int getFileUrisCount()
Optional. HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
repeated string file_uris = 5;getFileUrisCount in interface PySparkJobOrBuilderpublic java.lang.String getFileUris(int index)
Optional. HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
repeated string file_uris = 5;getFileUris in interface PySparkJobOrBuilderpublic ByteString getFileUrisBytes(int index)
Optional. HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
repeated string file_uris = 5;getFileUrisBytes in interface PySparkJobOrBuilderpublic PySparkJob.Builder setFileUris(int index, java.lang.String value)
Optional. HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
repeated string file_uris = 5;public PySparkJob.Builder addFileUris(java.lang.String value)
Optional. HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
repeated string file_uris = 5;public PySparkJob.Builder addAllFileUris(java.lang.Iterable<java.lang.String> values)
Optional. HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
repeated string file_uris = 5;public PySparkJob.Builder clearFileUris()
Optional. HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
repeated string file_uris = 5;public PySparkJob.Builder addFileUrisBytes(ByteString value)
Optional. HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
repeated string file_uris = 5;public ProtocolStringList getArchiveUrisList()
Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6;getArchiveUrisList in interface PySparkJobOrBuilderpublic int getArchiveUrisCount()
Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6;getArchiveUrisCount in interface PySparkJobOrBuilderpublic java.lang.String getArchiveUris(int index)
Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6;getArchiveUris in interface PySparkJobOrBuilderpublic ByteString getArchiveUrisBytes(int index)
Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6;getArchiveUrisBytes in interface PySparkJobOrBuilderpublic PySparkJob.Builder setArchiveUris(int index, java.lang.String value)
Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6;public PySparkJob.Builder addArchiveUris(java.lang.String value)
Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6;public PySparkJob.Builder addAllArchiveUris(java.lang.Iterable<java.lang.String> values)
Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6;public PySparkJob.Builder clearArchiveUris()
Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6;public PySparkJob.Builder addArchiveUrisBytes(ByteString value)
Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6;public int getPropertiesCount()
PySparkJobOrBuilderOptional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7;getPropertiesCount in interface PySparkJobOrBuilderpublic boolean containsProperties(java.lang.String key)
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7;containsProperties in interface PySparkJobOrBuilder@Deprecated public java.util.Map<java.lang.String,java.lang.String> getProperties()
getPropertiesMap() instead.getProperties in interface PySparkJobOrBuilderpublic java.util.Map<java.lang.String,java.lang.String> getPropertiesMap()
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7;getPropertiesMap in interface PySparkJobOrBuilderpublic java.lang.String getPropertiesOrDefault(java.lang.String key,
java.lang.String defaultValue)
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7;getPropertiesOrDefault in interface PySparkJobOrBuilderpublic java.lang.String getPropertiesOrThrow(java.lang.String key)
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7;getPropertiesOrThrow in interface PySparkJobOrBuilderpublic PySparkJob.Builder clearProperties()
public PySparkJob.Builder removeProperties(java.lang.String key)
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7;@Deprecated public java.util.Map<java.lang.String,java.lang.String> getMutableProperties()
public PySparkJob.Builder putProperties(java.lang.String key, java.lang.String value)
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7;public PySparkJob.Builder putAllProperties(java.util.Map<java.lang.String,java.lang.String> values)
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7;public boolean hasLoggingConfig()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8;hasLoggingConfig in interface PySparkJobOrBuilderpublic LoggingConfig getLoggingConfig()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8;getLoggingConfig in interface PySparkJobOrBuilderpublic PySparkJob.Builder setLoggingConfig(LoggingConfig value)
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8;public PySparkJob.Builder setLoggingConfig(LoggingConfig.Builder builderForValue)
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8;public PySparkJob.Builder mergeLoggingConfig(LoggingConfig value)
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8;public PySparkJob.Builder clearLoggingConfig()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8;public LoggingConfig.Builder getLoggingConfigBuilder()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8;public LoggingConfigOrBuilder getLoggingConfigOrBuilder()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8;getLoggingConfigOrBuilder in interface PySparkJobOrBuilderpublic final PySparkJob.Builder setUnknownFields(UnknownFieldSet unknownFields)
setUnknownFields in interface Message.BuildersetUnknownFields in class GeneratedMessageV3.Builder<PySparkJob.Builder>public final PySparkJob.Builder mergeUnknownFields(UnknownFieldSet unknownFields)
mergeUnknownFields in interface Message.BuildermergeUnknownFields in class GeneratedMessageV3.Builder<PySparkJob.Builder>