PySparkSQL脚本模板

PySpark模板分为shell脚本和python脚本两部分,通过shell脚本提交spark任务。

shell脚本:test_submit.sh

#!/bin/sh
source /etc/profile
source /usr/local/ods_shell/shell_config.properties
locate /bin/hive

if [ $# -eq 2 ];then
    echo "参数个数为2,使用传入参数作为处理数据时间区间"
    data_date_begin_tmp=$1
    data_date_end_tmp=$2
    data_date_begin=`date -d $data_date_begin_tmp +%Y-%m-%d`
    data_date_end=`date -d $data_date_end_tmp +%Y-%m-%d`
elif [ $# -eq 0 ];then
    echo "参数个数为0,默认处理前31天的数据"
    cur_date=`date +%Y-%m-%d`
    data_date_begin=`date -d "31 days ago $cur_date" +%Y-%m-%d`
    data_date_end=$cur_date
else
    echo "参数个数错误"
    exit 1
fi

echo "处理日期:[${data_date_begin}, ${data_date_end}]"

#提交任务到Spark集群
spark-submit \
--master yarn \
--deploy-mode client \
--driver-memory 2g \
--num-executors 2 \
--executor-memory 4g \
--executor-cores 1 \
--conf spark.dynamicAllocation.maxExecutors=5 \
--conf spark.default.parallelism=10 \
--conf spark.sql.shuffle.partitions=5 \
--conf spark.sql.adaptive.enabled=true \
--conf spark.sql.adaptive.shuffle.targetPostShuffleInputSize=134217728 \
--queue queue_name \
/path/test.py ${data_date_begin} ${data_date_end}

说明:提交参数待研究完善

python脚本:test.py

import sys
from random import random
from operator import add
from os.path import expanduser, join, abspath
import importlib
import datetime
import time
from dateutil.relativedelta import relativedelta

from pyspark.sql import SparkSession
from pyspark import SparkContext, SparkConf
from pyspark.sql import Row

import logging

logging.basicConfig(level=logging.INFO, format=" %(message)s")
logger = logging.getLogger("sparrow_qis_product_usage")

warehouse_location = abspath("/user/hive/warehouse")
sparkconf = SparkConf() \
    .setAppName("sparrow_qis_product_usage") \
    .set("spark.sql.warehouse.dir", warehouse_location)
spark = SparkSession \
    .builder \
    .config(conf=sparkconf) \
    .enableHiveSupport() \
    .getOrCreate()
sc = spark.sparkContext
sc.setLogLevel("WARN")

spark.sql("set hive.exec.dynamic.partition.mode = nonstrict")
spark.sql("set hive.exec.dynamic.partition = true")


def product_usage_download_handler(spark, data_date_start, data_date_end):
    query_sql = """
        select
        product_id,
        (case when version_name in ('0','None','trial','-') then 'other' else version_name end) as version_name,
        (case when agent_system in ('Windows','Android','Mac OS X','iOS','Chrome OS','Windows Phone','Linux') then agent_system else 'other' end) as agent_system,
        (case when regexp_replace(regexp_replace(agent_system_version,'[a-zA-Z]+',''),'\\\\.$|\\\\s','') in ('','-')
        then 'other' else agent_system_version end) as agent_system_version,
        (case when agent_system_type in ('mobile','pc','tablet') then agent_system_type else 'other' end) as agent_system_type,
        (case when agent_browser in ('Chrome,Firefox','Edge','Safari','IE','Chrome Mobile','Opera','Mobile Safari','QQ Browser','UC Browser','Sogou Explorer','Samsung Internet','Firefox Mobile',
        'Chrome Mobile WebView','Yandex Browser') then agent_browser else 'other' end) as agent_browser,
        (case when regexp_replace(regexp_replace(agent_browser_version,'[a-zA-Z]+',''),'\\\\.$|\\\\s','') in ('','-') then 'other' else agent_browser_version end) as agent_browser_version,
        agent_is_bot,
        (case when country_code in ('','-','None') then 'other' else country_code end) as country_code,
        (case when state_code in ('','-','None') then 'other' else state_code end) as state_code,
        (case when city in ('','-','None') then 'other' else city end) as city,
        substr(download_start,1,10) as datatime
        from sqoop_import_db.bas_product_usage_download
        where substr(download_start,1,10) >='{data_date_start}' and substr(download_start,1,10) < '{data_date_end}'
        and product_id!=0
        """.format(data_date_start=data_date_start, data_date_end=data_date_end)
    df = spark.sql(query_sql)
    df.registerTempTable("product_usage_download_tmp1")

    query_sql = """
        select
        product_id,
        (case when version_name='other' then 'other'
        when size(split(version_name,'\\\\.')) >= 3 then concat(split(version_name,'\\\\.')[0], '.', split(version_name,'\\\\.')[1], '.', substr(split(version_name,'\\\\.')[2],0,1))
        when size(split(version_name,'\\\\.')) = 2 then concat(split(version_name,'\\\\.')[0], '.', split(version_name,'\\\\.')[1], '.', '0')
        when size(split(version_name,'\\\\.')) = 1 and version_name != '' then concat(split(version_name,'\\\\.')[0], '.', '0', '.', '0')
        else 'other'
        end) as version_name,
        (case when agent_system='Mac OS X' then 'Macintosh' else agent_system end) as agent_system,
        (case when agent_system_version='other' then 'other'
        when size(split(agent_system_version,'\\\\.')) >= 3 then concat(split(agent_system_version,'\\\\.')[0], '.', split(agent_system_version,'\\\\.')[1], '.', substr(split(agent_system_version,'\\\\.')[2],0,1))
        when size(split(agent_system_version,'\\\\.')) = 2 then concat(split(agent_system_version,'\\\\.')[0], '.', split(agent_system_version,'\\\\.')[1], '.', '0')
        when size(split(agent_system_version,'\\\\.')) = 1 and agent_system_version != '' then concat(split(agent_system_version,'\\\\.')[0], '.', '0', '.', '0')
        else 'other'
        end) as agent_system_version,
        (case when agent_system_type='pc' then 'desktop' else agent_system_type end) as agent_system_type,
        (case when agent_browser in ('Chrome Mobile','Chrome Mobile WebView') then 'Chrome'
        when agent_browser='IE' then 'Internet Explorer'
        when agent_browser='Mobile Safari' then 'Safari'
        else agent_browser
        end) as agent_browser,
        (case when agent_browser_version='other' then 'other'
        when size(split(agent_browser_version,'\\\\.')) >= 3 then concat(split(agent_browser_version,'\\\\.')[0], '.', split(agent_browser_version,'\\\\.')[1], '.', substr(split(agent_browser_version,'\\\\.')[2],0,1))
        when size(split(agent_browser_version,'\\\\.')) = 2 then concat(split(agent_browser_version,'\\\\.')[0], '.', split(agent_browser_version,'\\\\.')[1], '.', '0')
        when size(split(agent_browser_version,'\\\\.')) = 1 and agent_browser_version != '' then concat(split(agent_browser_version,'\\\\.')[0], '.', '0', '.', '0')
        else 'other'
        end) as agent_browser_version,
        agent_is_bot,
        country_code,
        state_code,
        city,
        count(1) as count_download,
        datatime
        from product_usage_download_tmp1
        group by product_id,version_name,agent_system,
        agent_system_version,agent_system_type,
        agent_browser,agent_browser_version,
        agent_is_bot,country_code,state_code,city,datatime
        """
    df = spark.sql(query_sql)
    df.registerTempTable("product_usage_download_tmp2")

    insert_sql = """
        insert overwrite table dws_sparrow_user_analyze.dws_qis_product_usage_download partition(datatime)
        select
        product_id,
        version_name,
        t4.id as operatingsystem_id,
        agent_system_version,
        t5.id as devicecategory_id,
        t6.id as browser_id,
        agent_browser_version,
        agent_is_bot,
        t3.country_en,
        t3.country_id,
        state_code,
        city,
        count_download,
        datatime
        from product_usage_download_tmp2 t1
        left join sqoop_import_dim.dim_country t2 on t1.country_code = t2.country
        left join sqoop_import_dim.dim_country_ods t3 on lower(t2.country_a2) = lower(t3.country_en)
        left join sqoop_import_dim.dim_ga_operatingsystem t4 on t1.agent_system = t4.operatingsystem
        left join sqoop_import_dim.dim_ga_devicecategory t5 on t1.agent_system_type = t5.devicecategory
        left join sqoop_import_dim.dim_ga_browser t6 on t1.agent_browser = t6.browser
        """
    spark.sql(insert_sql)

    insert_sql = """
        insert overwrite table dm_sparrow_user_analyze.dm_qis_product_usage_download_test partition(datatime)
        select
        product_id,
        country_id,
        operatingsystem_id,
        agent_system_version,
        devicecategory_id,
        browser_id,
        agent_browser_version,
        sum(count_download) as count_download, datatime
        from dws_sparrow_user_analyze.dws_qis_product_usage_download
        where datatime >= '{data_date_start}' and datatime < '{data_date_end}'
        group by product_id, country_id, operatingsystem_id, agent_system_version,
        devicecategory_id, browser_id,agent_browser_version, datatime
        """.format(data_date_start=data_date_start, data_date_end=data_date_end)
    spark.sql(insert_sql)


def create_table(spark):
    create_sql = """
        create table if not exists dws_sparrow_user_analyze.dws_qis_product_usage_download(
            product_id             string       comment '产品id',
            version_name           string       comment '产品版本',
            operatingsystem_id     string       comment '操作系统',
            agent_system_version   string       comment '操作系统版本',
            devicecategory_id      string       comment '访问设备类型',
            browser_id             string       comment '浏览器',
            agent_browser_version  string       comment '浏览器版本',
            agent_is_bot           tinyint      comment '是否爬虫,0表示否,1表示是',
            country_en             string       comment '国家',
            country_id             string       comment '国家id',
            state_code             string       comment '州|省',
            city                   string       comment '城市',
            count_download         int          comment '下载总数'
        )
        comment 'CBS下载总数表'
        partitioned by (datatime string)
        stored as parquet
        """
    spark.sql(create_sql)

    create_sql = """
        create table if not exists dm_sparrow_user_analyze.dm_qis_product_usage_download_test(
            product_id             string       comment '产品id',
            country_id             string       comment '国家id',
            operatingsystem_id     string       comment '操作系统',
            agent_system_version   string       comment '操作系统版本',
            devicecategory_id      string       comment '访问设备类型',
            browser_id             string       comment '浏览器',
            agent_browser_version  string       comment '浏览器版本',
            count_download         int          comment '下载总数'
            )
        comment 'CBS下载总数表'
        partitioned by (datatime string)
        stored as parquet
        """
    spark.sql(create_sql)


if __name__ == '__main__':
    data_date_start = sys.argv[1]
    data_date_end = sys.argv[2]
    create_table(spark)
    product_usage_download_handler(spark, data_date_start, data_date_end)

你可能感兴趣的:(PySparkSQL脚本模板)