利用spring-boot构建spark job作业提交服务

版本依赖

spark.version:2.1.0
hadoop.version:2.6.5
springboot-mybatis.version:1.1.1
springboot:1.5.10

实现功能

通过HTTP提交job作业请求,并记录日志到数据库中
项目DAO部分使用mybatis实现,本文中不做记录

编码实现

pom

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0modelVersion>
    <groupId>com.testgroupId>
    <artifactId>miniSparkJobServerartifactId>
    <version>0.0.1-SNAPSHOTversion>
    <name>miniSparkJobServername>
    <description>micro-serverdescription>
    <properties>
        
        <springboot-mybatis.version>1.1.1springboot-mybatis.version>
        
        <slf4j.version>1.7.7slf4j.version>
        <log4j.version>1.2.17log4j.version>
        <spark.version>2.1.0spark.version>
        <hadoop.version>2.6.5hadoop.version>
        <java.version>1.8java.version>
    properties>

    <parent>
        <groupId>org.springframework.bootgroupId>
        <artifactId>spring-boot-starter-parentartifactId>
        <version>1.5.10.RELEASEversion>
    parent>
    <dependencies>
        <dependency>
            <groupId>org.springframework.bootgroupId>
            <artifactId>spring-boot-starter-webartifactId>
        dependency>
        <dependency>
            <groupId>junitgroupId>
            <artifactId>junitartifactId>
            <version>4.11version>
            
            <scope>testscope>
        dependency>
        <dependency>
            <groupId>org.apache.sparkgroupId>
            <artifactId>spark-core_2.11artifactId>
            <version>${spark.version}version>
            <exclusions>
                <exclusion>
                    <artifactId>javax.servlet-apiartifactId>
                    <groupId>javax.servletgroupId>
                exclusion>
                <exclusion>
                    <artifactId>slf4j-apiartifactId>
                    <groupId>org.slf4jgroupId>
                exclusion>
                <exclusion>
                    <artifactId>log4jartifactId>
                    <groupId>log4jgroupId>
                exclusion>
                <exclusion>
                    <artifactId>slf4j-log4j12artifactId>
                    <groupId>org.slf4jgroupId>
                exclusion>
            exclusions>
        dependency>

        <dependency>
            <groupId>org.apache.hadoopgroupId>
            <artifactId>hadoop-clientartifactId>
            <version>${hadoop.version}version>
            <exclusions>
                <exclusion>
                    <artifactId>servlet-apiartifactId>
                    <groupId>javax.servletgroupId>
                exclusion>
                <exclusion>
                    <artifactId>slf4j-apiartifactId>
                    <groupId>org.slf4jgroupId>
                exclusion>
                <exclusion>
                    <artifactId>log4jartifactId>
                    <groupId>log4jgroupId>
                exclusion>
                <exclusion>
                    <artifactId>slf4j-log4j12artifactId>
                    <groupId>org.slf4jgroupId>
                exclusion>
            exclusions>
        dependency>

        <dependency>
            <groupId>org.apache.hadoopgroupId>
            <artifactId>hadoop-commonartifactId>
            <version>${hadoop.version}version>
            <exclusions>
                <exclusion>
                    <artifactId>servlet-apiartifactId>
                    <groupId>javax.servletgroupId>
                exclusion>
                <exclusion>
                    <artifactId>slf4j-apiartifactId>
                    <groupId>org.slf4jgroupId>
                exclusion>
                <exclusion>
                    <artifactId>log4jartifactId>
                    <groupId>log4jgroupId>
                exclusion>
                <exclusion>
                    <artifactId>slf4j-log4j12artifactId>
                    <groupId>org.slf4jgroupId>
                exclusion>
            exclusions>
        dependency>

        <dependency>
            <groupId>org.mybatis.spring.bootgroupId>
            <artifactId>mybatis-spring-boot-starterartifactId>
            <version>${springboot-mybatis.version}version>
        dependency>
        <dependency>
            <groupId>mysqlgroupId>
            <artifactId>mysql-connector-javaartifactId>
            <version>5.1.22version>
        dependency>

    dependencies>

    <profiles>  
        <profile>  
              
            <id>devid>  
            <properties>  
                <profiles.active>devprofiles.active>  
            properties>  
            <activation>  
                <activeByDefault>trueactiveByDefault>  
            activation>  
        profile>  
        <profile>  
              
            <id>testid>  
            <properties>  
                <profiles.active>testprofiles.active>  
            properties>  
        profile>  
        <profile>  
              
            <id>productid>  
            <properties>  
                <profiles.active>productprofiles.active>  
            properties>  
        profile>  
    profiles>  


    <build>
        <resources>  
            <resource>  
                <directory>src/main/resourcesdirectory>  
                  
                <excludes>  
                    <exclude>test/**exclude>  
                    <exclude>product/**exclude>  
                    <exclude>dev/**exclude>  
                excludes>  
            resource>  
            <resource>  
                <directory>src/main/resources/${profiles.active}directory>  
            resource>  
        resources>
        <finalName>miniSparkJobServerfinalName>
        <plugins>
            <plugin>
                <groupId>org.springframework.bootgroupId>
                <artifactId>spring-boot-maven-pluginartifactId>
            plugin>
        plugins>
    build>

project>

框架配置application.properties

mybatis.type-aliases-package=com.test.sparkjob.entity

spring.datasource.driverClassName = com.mysql.jdbc.Driver
spring.datasource.url = jdbc:mysql://localhost:3360/spark_test?useUnicode=true&characterEncoding=utf-8
spring.datasource.username = root
spring.datasource.password = 123456

SparkJobApplication

package com.test;

import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

@MapperScan("com.test.sparkjob.dao")
@SpringBootApplication
public class SparkJobApplication {
    public static void main(String[] args) throws Exception {
        SpringApplication.run(SparkJobApplication.class, args);
    }
}

controller

package com.tesst.sparkjob.controller;

import java.util.Date;

import javax.annotation.Resource;

import org.apache.log4j.Logger;
import org.apache.spark.deploy.SparkSubmit;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;

import com.test.sparkjob.entity.SparkJobLog;
import com.test.sparkjob.pojo.RetMsg;
import com.test.sparkjob.service.ISparkJobLogService;

@Controller
public class SparkJobController {
    private final static Logger log = Logger
            .getLogger(SparkJobController.class);

    @Resource
    private ISparkJobLogService sparkJobLogService;

    @RequestMapping(value="/job/executeJob",method=RequestMethod.GET)
    @ResponseBody
    RetMsg executeSparkJob(@RequestParam("jarId") String jarId,@RequestParam("sparkUri") String sparkUri) {
        RetMsg ret = new RetMsg();
        StringBuffer msg = new StringBuffer(jarId+":"+sparkUri);
        ret.setMsg(msg.toString());
        SparkJobLog jobLog = new SparkJobLog();
        jobLog.setExecTime(new Date());
        String[] arg0=new String[]{
                "/usr/job/"+jarId,
                "--master",sparkUri,   
                "--name","web polling",   
                "--executor-memory","1G"
        };
        log.info("提交作业...");
        try {
            SparkSubmit.main(arg0);
        } catch (Exception e) {
            log.info("出错了!", e);
            ret.setCode(1);
            ret.setMsg(e.getMessage());
            msg.append(e.getMessage());
        }
        jobLog.setMsg(msg.toString());
        sparkJobLogService.insertLog(jobLog);
        return ret;
    }
}

启动执行

发起调用

利用spring-boot构建spark job作业提交服务_第1张图片

你可能感兴趣的:(j2ee)