Spark2.2.0实战:RDD转DataFrame两种方式(下)

Spark SQL支持两种不一样的方法将现有的RDDs转换为数据集。java

上一篇文章经过实例讲解经过映射方式装换,这里实例讲解第二种方式;
sql


第二种方法:经过编程接口,该接口容许您构造一个模式,而后将其应用于现有的RDD。虽然此方法更详细,但它容许您在列及其类型直到运行时才知道时构造数据集。apache

数据准备studentData.txt编程

1001,20,zhangsan1002,17,lisi1003,24,wangwu1004,16,zhaogang

代码实例:
api

package com.unicom.ljs.spark220.study;
import org.apache.spark.SparkConf;import org.apache.spark.SparkContext;import org.apache.spark.api.java.JavaRDD;import org.apache.spark.api.java.JavaSparkContext;import org.apache.spark.api.java.function.Function;import org.apache.spark.rdd.RDD;import org.apache.spark.sql.*;import org.apache.spark.sql.types.*;
import java.util.ArrayList;import java.util.List;
/** * @author: Created By lujisen * @company ChinaUnicom Software JiNan * @date: 2020-01-21 13:42 * @version: v1.0 * @description: com.unicom.ljs.spark220.study */public class RDD2DataFrameProgramatically {    public static void main(String[] args) {        SparkConf sparkConf = new SparkConf().setMaster("local[*]").setAppName("RDD2DataFrameProgramatically");        JavaSparkContext sc = new JavaSparkContext(sparkConf);        SQLContext sqlContext = new SQLContext(sc);
       JavaRDD<String> lineRDD =sc.textFile("C:\\Users\\Administrator\\Desktop\\studentData.txt");        JavaRDD<Row> rowJavaRDD = lineRDD.map(new Function<String, Row>() {            @Override            public Row call(String line) throws Exception {                String[] splitLine = line.split(",");                return RowFactory.create(Integer.valueOf(splitLine[0])                        ,Integer.valueOf(splitLine[1])                        ,splitLine[2]);            }        });        List<StructField> structFields=new ArrayList<StructField>();        /*StructField structField1=new StructField("id", DataTypes.IntegerType,true);*/        structFields.add(DataTypes.createStructField("id",DataTypes.IntegerType,true));        structFields.add(DataTypes.createStructField("age",DataTypes.IntegerType,true));        structFields.add(DataTypes.createStructField("name",DataTypes.StringType,true));
       StructType structType=DataTypes.createStructType(structFields);

       Dataset<Row> dataFrame = sqlContext.createDataFrame(rowJavaRDD, structType);
        dataFrame.registerTempTable("studentInfo");
        Dataset<Row> resultDataSet = sqlContext.sql("select  * from studentInfo where age > 17");
       List<Row> collect = resultDataSet.javaRDD().collect();        for(Row row: collect){            System.out.println(row);        }        sc.close();    }}

pom.xml关键依赖:app

2.2.02.11.8

<dependency>    <groupId>org.apache.spark</groupId>    <artifactId>spark-sql_2.11</artifactId>    <version>${spark.version}</version></dependency><dependency>    <groupId>org.apache.spark</groupId>    <artifactId>spark-core_2.11</artifactId>    <version>${spark.version}</version></dependency>
相关文章
相关标签/搜索