今日开始学习DataFrame的创建
# coding:utf8 from pyspark.sql import SparkSession from pyspark.sql.types import StructType, StringType, IntegerType import pandas as pd if __name__ == '__main__': spark = SparkSession.builder. \ appName("test"). \ master("local[*]"). \ getOrCreate() sc = spark.sparkContext rdd = sc.textFile("../data/input/people.txt"). \ map(lambda x: x.split(",")). \ map(lambda x: (x[0], int(x[1]))) # 通过rdd方式创建dataFrame # 参数1,被转换的rdd,参数2,指定类名 df = spark.createDataFrame(rdd, schema=['name', 'age']) df.printSchema() # 参数1,展示出多少条数据,默认20。参数2,是否对列进行截断 df.show(20, False) # 构建表结构的描述对象:StructType对象 schema = StructType().add("name", StringType(), nullable=True). \ add("age", IntegerType(), nullable=False) # 基于StructType对象去构建RDD到DF的转换 spark.createDataFrame(rdd, schema=schema) # toDF方式 rdd.toDF(["name", "age"]) rdd.toDF(schema=schema) # 基于pandas的DataFrame构建 pdf = pd.DataFrame( { "id": [1, 2, 3], "name": ["张", "王", "李"], "age": [11, 21, 13] } ) df = spark.createDataFrame(pdf)
标签:__,StructType,name,笔记,rdd,寒假,spark,大三,schema From: https://www.cnblogs.com/wrf1/p/17968687