class PipelineExecutionStateApi extends AnyRef
Exposes methods and properties that may be used independent of the specific transform.
- Alphabetic
- By Inheritance
- PipelineExecutionStateApi
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
- new PipelineExecutionStateApi(scope: PipelineExecutionScope)
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native() @HotSpotIntrinsicCandidate()
-
val
content: PipelineContent
The properties of the pipeline configuration.
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
manualSaveDataFrameForTransform(frame: DataFrame, transform: String, part: Option[Int] = parallelPartOption): Unit
Writes a collection of data to the underlying storage and associates it with a given transform.
Writes a collection of data to the underlying storage and associates it with a given transform. This is used to take control of the writing process during transform execution rather than simply writing the return value of the transform. If using this it is recommended to return an empty data frame from the transform.
- frame
The Apache Spark data frame to write.
- transform
The id of the transform which the data belongs to.
- part
The part number to write, if relevant.
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
val
options: Options
The collection of options configured for the pipeline.
-
lazy val
parallelPart: Int
The parallel part number of the current execution.
-
lazy val
parallelPartOption: Option[Int]
The parallel part number of the current execution, or
The parallel part number of the current execution, or
Noneif it does not exist. None }}}
-
def
shouldCancel: Boolean
Informs the runtime to cancel the current execution.
Informs the runtime to cancel the current execution. Only used for test runs.
-
val
snapshot: PipelineSnapshotInfo
The properties of the snapshot of the pipeline configuration.
-
def
state: Map[String, Map[String, Any]]
Provides access to the state maps of the pipeline, keyed by transform id.
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
val
testMode: Boolean
Whether executing in test mode.
Whether executing in test mode. Assert on this to avoid undesired side effects during test runs.
-
val
timelineDate: LocalDateTime
The timeline date of the current execution.
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
val
user: String
The currently executing user.
The currently executing user. This will be a service user for non-test execution.
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
Deprecated Value Members
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] ) @Deprecated
- Deprecated