Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkBernoulliSampleOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkBroadcastOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkCacheOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkCartesianOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkCoGroupOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkCollectionSource.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkCollectOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkCountOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkDistinctOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkDoWhileOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkExecutionOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
Evaluates this operator.
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkFilterOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkFlatMapOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkGlobalMaterializedGroupOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkGlobalReduceOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkIntersectOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkJoinOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkKafkaTopicSink.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkKafkaTopicSource.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkLocalCallbackSink.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkLoopOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkMapOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkMapPartitionsOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkMaterializedGroupByOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkObjectFileSink.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkObjectFileSource.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkRandomPartitionSampleOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkReduceByOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkRepeatOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkShufflePartitionSampleOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkSortOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkTextFileSink.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkTextFileSource.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkTsvFileSink.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkTsvFileSource.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkUnionAllOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|
Tuple<java.util.Collection<ExecutionLineageNode>,java.util.Collection<ChannelInstance>> |
SparkZipWithIdOperator.evaluate(ChannelInstance[] inputs,
ChannelInstance[] outputs,
SparkExecutor sparkExecutor,
OptimizationContext.OperatorContext operatorContext) |
|