public class SparkUtil extends Object
| Constructor and Description |
|---|
SparkUtil() |
| Modifier and Type | Method and Description |
|---|---|
static void |
assertPredecessorSize(List<<any>> predecessors,
PhysicalOperator physicalOperator,
int size) |
static void |
assertPredecessorSizeGreaterThan(List<<any>> predecessors,
PhysicalOperator physicalOperator,
int size) |
static void |
createIndexerSparkNode(SparkOperator baseSparkOp,
String scope,
NodeIdGenerator nig) |
static <T> <any> |
getManifest(Class<T> clazz) |
static Partitioner |
getPartitioner(String customPartitioner,
int parallelism) |
static <K,V> <any> |
getProduct2Manifest() |
static <K,V> <any> |
getTuple2Manifest() |
static org.apache.hadoop.mapred.JobConf |
newJobConf(PigContext pigContext,
PhysicalPlan physicalPlan,
SparkEngineConf sparkEngineConf) |
static <T> <any> |
toScalaSeq(List<T> list) |
public static <T> <any> getManifest(Class<T> clazz)
public static <K,V> <any> getTuple2Manifest()
public static <K,V> <any> getProduct2Manifest()
public static org.apache.hadoop.mapred.JobConf newJobConf(PigContext pigContext, PhysicalPlan physicalPlan, SparkEngineConf sparkEngineConf) throws IOException
IOExceptionpublic static <T> <any> toScalaSeq(List<T> list)
public static void assertPredecessorSize(List<<any>> predecessors, PhysicalOperator physicalOperator, int size)
public static void assertPredecessorSizeGreaterThan(List<<any>> predecessors, PhysicalOperator physicalOperator, int size)
public static Partitioner getPartitioner(String customPartitioner, int parallelism)
public static void createIndexerSparkNode(SparkOperator baseSparkOp, String scope, NodeIdGenerator nig) throws PlanException, ExecException
PlanExceptionExecExceptionCopyright © 2007-2017 The Apache Software Foundation