public abstract class TaskContext
extends java.lang.Object
implements java.io.Serializable
org.apache.spark.TaskContext.get()
Constructor and Description |
---|
TaskContext() |
Modifier and Type | Method and Description |
---|---|
abstract void |
addOnCompleteCallback(scala.Function0<scala.runtime.BoxedUnit> f)
Adds a callback function to be executed on task completion.
|
abstract TaskContext |
addTaskCompletionListener(scala.Function1<TaskContext,scala.runtime.BoxedUnit> f)
Adds a listener in the form of a Scala closure to be executed on task completion.
|
abstract TaskContext |
addTaskCompletionListener(TaskCompletionListener listener)
Adds a (Java friendly) listener to be executed on task completion.
|
abstract long |
attemptId() |
abstract int |
attemptNumber()
How many times this task has been attempted.
|
static TaskContext |
get()
Return the currently active TaskContext.
|
abstract scala.collection.Seq<org.apache.spark.metrics.source.Source> |
getMetricsSources(java.lang.String sourceName)
::DeveloperApi::
Returns all metrics sources with the given name which are associated with the instance
which runs the task.
|
static int |
getPartitionId()
Returns the partition id of currently active TaskContext.
|
abstract scala.collection.immutable.Map<java.lang.String,Accumulator<java.lang.Object>> |
internalMetricsToAccumulators()
Accumulators for tracking internal metrics indexed by the name.
|
abstract boolean |
isCompleted()
Returns true if the task has completed.
|
abstract boolean |
isInterrupted()
Returns true if the task has been killed.
|
abstract boolean |
isRunningLocally()
Returns true if the task is running locally in the driver program.
|
abstract int |
partitionId()
The ID of the RDD partition that is computed by this task.
|
abstract boolean |
runningLocally() |
protected static void |
setTaskContext(TaskContext tc)
Set the thread local TaskContext.
|
abstract int |
stageId()
The ID of the stage that this task belong to.
|
abstract long |
taskAttemptId()
An ID that is unique to this task attempt (within the same SparkContext, no two task attempts
will share the same attempt ID).
|
abstract org.apache.spark.executor.TaskMetrics |
taskMetrics()
::DeveloperApi::
|
protected static void |
unset()
Unset the thread local TaskContext.
|
public static TaskContext get()
public static int getPartitionId()
protected static void setTaskContext(TaskContext tc)
tc
- (undocumented)protected static void unset()
public abstract boolean isCompleted()
public abstract boolean isInterrupted()
public abstract boolean runningLocally()
public abstract boolean isRunningLocally()
public abstract TaskContext addTaskCompletionListener(TaskCompletionListener listener)
listener
- (undocumented)public abstract TaskContext addTaskCompletionListener(scala.Function1<TaskContext,scala.runtime.BoxedUnit> f)
f
- (undocumented)public abstract void addOnCompleteCallback(scala.Function0<scala.runtime.BoxedUnit> f)
f
- Callback function.public abstract int stageId()
public abstract int partitionId()
public abstract int attemptNumber()
public abstract long attemptId()
public abstract long taskAttemptId()
public abstract org.apache.spark.executor.TaskMetrics taskMetrics()
public abstract scala.collection.Seq<org.apache.spark.metrics.source.Source> getMetricsSources(java.lang.String sourceName)
org.apache.spark.metrics.MetricsSystem!
.sourceName
- (undocumented)public abstract scala.collection.immutable.Map<java.lang.String,Accumulator<java.lang.Object>> internalMetricsToAccumulators()