Compartilhar via


AnalyticsSynapseArtifactsModelFactory.SparkBatchJob Método

Definição

Inicializa uma nova instância do SparkBatchJob.

public static Azure.Analytics.Synapse.Artifacts.Models.SparkBatchJob SparkBatchJob (Azure.Analytics.Synapse.Artifacts.Models.SparkBatchJobState livyInfo = default, string name = default, string workspaceName = default, string sparkPoolName = default, string submitterName = default, string submitterId = default, string artifactId = default, Azure.Analytics.Synapse.Artifacts.Models.SparkJobType? jobType = default, Azure.Analytics.Synapse.Artifacts.Models.SparkBatchJobResultType? result = default, Azure.Analytics.Synapse.Artifacts.Models.SparkScheduler scheduler = default, Azure.Analytics.Synapse.Artifacts.Models.SparkServicePlugin plugin = default, System.Collections.Generic.IEnumerable<Azure.Analytics.Synapse.Artifacts.Models.SparkServiceError> errors = default, System.Collections.Generic.IReadOnlyDictionary<string,string> tags = default, int id = 0, string appId = default, System.Collections.Generic.IReadOnlyDictionary<string,string> appInfo = default, Azure.Analytics.Synapse.Artifacts.Models.LivyStates? state = default, System.Collections.Generic.IEnumerable<string> logLines = default);
static member SparkBatchJob : Azure.Analytics.Synapse.Artifacts.Models.SparkBatchJobState * string * string * string * string * string * string * Nullable<Azure.Analytics.Synapse.Artifacts.Models.SparkJobType> * Nullable<Azure.Analytics.Synapse.Artifacts.Models.SparkBatchJobResultType> * Azure.Analytics.Synapse.Artifacts.Models.SparkScheduler * Azure.Analytics.Synapse.Artifacts.Models.SparkServicePlugin * seq<Azure.Analytics.Synapse.Artifacts.Models.SparkServiceError> * System.Collections.Generic.IReadOnlyDictionary<string, string> * int * string * System.Collections.Generic.IReadOnlyDictionary<string, string> * Nullable<Azure.Analytics.Synapse.Artifacts.Models.LivyStates> * seq<string> -> Azure.Analytics.Synapse.Artifacts.Models.SparkBatchJob
Public Shared Function SparkBatchJob (Optional livyInfo As SparkBatchJobState = Nothing, Optional name As String = Nothing, Optional workspaceName As String = Nothing, Optional sparkPoolName As String = Nothing, Optional submitterName As String = Nothing, Optional submitterId As String = Nothing, Optional artifactId As String = Nothing, Optional jobType As Nullable(Of SparkJobType) = Nothing, Optional result As Nullable(Of SparkBatchJobResultType) = Nothing, Optional scheduler As SparkScheduler = Nothing, Optional plugin As SparkServicePlugin = Nothing, Optional errors As IEnumerable(Of SparkServiceError) = Nothing, Optional tags As IReadOnlyDictionary(Of String, String) = Nothing, Optional id As Integer = 0, Optional appId As String = Nothing, Optional appInfo As IReadOnlyDictionary(Of String, String) = Nothing, Optional state As Nullable(Of LivyStates) = Nothing, Optional logLines As IEnumerable(Of String) = Nothing) As SparkBatchJob

Parâmetros

name
String

O nome do lote.

workspaceName
String

O nome do workspace.

sparkPoolName
String

O nome do pool do Spark.

submitterName
String

O nome do remetente.

submitterId
String

O identificador do remetente.

artifactId
String

O identificador do artefato.

jobType
Nullable<SparkJobType>

O tipo de trabalho.

result
Nullable<SparkBatchJobResultType>

O resultado do trabalho em lotes do Spark.

scheduler
SparkScheduler

As informações do agendador.

plugin
SparkServicePlugin

As informações do plug-in.

errors
IEnumerable<SparkServiceError>

As informações de erro.

tags
IReadOnlyDictionary<String,String>

As marcas.

id
Int32

A ID da sessão.

appId
String

A ID do aplicativo desta sessão.

appInfo
IReadOnlyDictionary<String,String>

As informações detalhadas do aplicativo.

state
Nullable<LivyStates>

O estado do lote.

logLines
IEnumerable<String>

As linhas de log.

Retornos

Uma nova SparkBatchJob instância para simulação.

Aplica-se a