SparkSQL--实现求每个学科老师访问量的排名

这里只记录一下用SparkSQL实现求每个学科老师访问量的排名。

详细的RDD实现方法请参考博客:https://blog.csdn.net/weixin_43866709/article/details/88696934

这里主要练习使用SQL中的窗口函数。

package XXX

import java.net.URL

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
  * Create by 。。。
  *
  */
object SQLFavTeacher {
  def main(args: Array[String]): Unit = {

    //val topN = args(1).toInt

    val spark = SparkSession.builder()
      .appName("SQLFavTeacher")
      .master("local[4]")
      .getOrCreate()

    val lines: Dataset[String] = spark.read.textFile(args(0))

    import spark.implicits._
    val df: DataFrame = lines.map(line => {
      //老师的index
      val tIndex = line.lastIndexOf("/") + 1
      val teacher = line.substring(tIndex)
      val host = new URL(line).getHost
      //学科的index
      val sIndex = host.indexOf(".")
      val subject = host.substring(0, sIndex)
      (subject, teacher)
    }).toDF("subject", "teacher")

    //将df注册成视图
    df.createTempView("v_subjectAndTeacher")


    //该学科下的老师的访问次数
    val temp1: DataFrame = spark.sql("SELECT subject,teacher,count(*) counts FROM v_subjectAndTeacher GROUP BY subject,teacher")

    //求每个学科下最受欢迎的老师
    //先将temp1注册成视图
    temp1.createTempView("v_temp1_subjectAndTeacher_counts")

    //局部排序,使用窗口函数
    val temp2: DataFrame = spark.sql("SELECT subject,teacher,counts,row_number() over(partition by subject order by counts desc) subject_rk,row_number() over(order by counts desc) all_rank FROM v_temp1_subjectAndTeacher_counts")

    //取出前几名的sql语句
    //s"SELECT * FROM (SELECT subject,teacher,counts,row_number() over(partition by subject order by counts desc) subject_rk,row_number() over(order by counts desc) all_rank FROM v_temp1_subjectAndTeacher_counts) temp2 WHERE subject_rk <= $topN"

    //筛选之后再求出整体的排序
    //s"SELECT *,dense_rank() over(oder by counts desc) all_rk FROM(SELECT subject,teacher,counts,row_number() over(partition by subject order by counts desc) subject_rk FROM v_temp1_subjectAndTeacher_counts) temp2 WHERE subject_rk <= $topN"
    temp2.show()

    spark.stop()


  }

}

猜你喜欢

转载自blog.csdn.net/weixin_43866709/article/details/88941602