Browse Source

迅捷开发环境,spark测试用例提交
需要放到迅捷开发环境上面运行,运行的命令见源码类注释。

lilb3 6 years ago
parent
commit
6e3344d7ca

+ 2 - 14
ipu-spark-example/pom.xml

21
		<ipu>3.1-SNAPSHOT</ipu>
21
		<ipu>3.1-SNAPSHOT</ipu>
22
		<jdk>1.8</jdk>
22
		<jdk>1.8</jdk>
23
		<junit>4.12</junit>
23
		<junit>4.12</junit>
24
		<hadoop>2.7.3</hadoop>
24
		<spark>2.4.1</spark>
25
		<spark>2.4.1</spark>
25
		<slf4j-api>1.7.16</slf4j-api>
26
		<slf4j-api>1.7.16</slf4j-api>
26
	</properties>
27
	</properties>
42
		<dependency>
43
		<dependency>
43
			<groupId>org.apache.hadoop</groupId>
44
			<groupId>org.apache.hadoop</groupId>
44
			<artifactId>hadoop-client</artifactId>
45
			<artifactId>hadoop-client</artifactId>
45
			<version>2.7.3</version>
46
			<version>${hadoop}</version>
46
		</dependency>
47
		</dependency>
47
48
48
		<!--<dependency>
49
			<groupId>io.netty</groupId>
50
			<artifactId>netty-all</artifactId>
51
			<version>4.1.17.Final</version>
52
		</dependency>-->
53
54
		<dependency>
49
		<dependency>
55
			<groupId>org.apache.spark</groupId>
50
			<groupId>org.apache.spark</groupId>
56
			<artifactId>spark-core_2.11</artifactId>
51
			<artifactId>spark-core_2.11</artifactId>
63
			<artifactId>slf4j-log4j12</artifactId>
58
			<artifactId>slf4j-log4j12</artifactId>
64
			<version>${slf4j-api}</version>
59
			<version>${slf4j-api}</version>
65
		</dependency>
60
		</dependency>
66
67
		<!-- Failed to set setXIncludeAware(true) for parser -->
68
		<!--<dependency>
69
			<groupId>xerces</groupId>
70
			<artifactId>xercesImpl</artifactId>
71
			<version>2.9.1</version>
72
		</dependency>-->
73
	</dependencies>
61
	</dependencies>
74
62
75
	<build>
63
	<build>

+ 0 - 6
ipu-spark-example/src/main/java/com/ai/ipu/example/spark/SparkConfig.java

16
    private static String hdfsUri;
16
    private static String hdfsUri;
17
    /*测试文件*/
17
    /*测试文件*/
18
    private static String testFileName;
18
    private static String testFileName;
19
    /*项目打包路径*/
20
    private static String jarFilePath;
21
19
22
    public static String getSparkMaster() {
20
    public static String getSparkMaster() {
23
        return sparkMaster;
21
        return sparkMaster;
31
        return testFileName;
29
        return testFileName;
32
    }
30
    }
33
31
34
    public static String getJarFilePath() {
35
        return jarFilePath;
36
    }
37
38
    /*加载配置文件*/
32
    /*加载配置文件*/
39
    static {
33
    static {
40
        try {
34
        try {

+ 3 - 6
ipu-spark-example/src/main/java/com/ai/ipu/example/spark/SparkExample.java

19
 *
19
 *
20
 * @author lilb3@asiainfo.com
20
 * @author lilb3@asiainfo.com
21
 * @since 2019-05-24 17:10
21
 * @since 2019-05-24 17:10
22
 * 部署服务器方式
23
 * a) SpringBoot打包部署 需要在pom指定startClasscom.ai.ipu.example.spark.SparkExample
24
 * $SPARK_HOME/bin/spark-submit --class org.springframework.boot.loader.JarLauncher --master local[4] /home/mysql/test/comp-example-1.0.jar iZm5e5xe1w25avi0io1f5aZ
25
 * b) 普通打包部署 需要上传原包和依赖包 如框架日志ipu-basic-3.1-SNAPSHOT.jar, 多个jar以逗号隔开
26
 * $SPARK_HOME/bin/spark-submit --class com.ai.ipu.example.spark.SparkExample --master local[4] --jars /home/mysql/test/ipu-basic-3.1-SNAPSHOT.jar /home/mysql/test/comp-example-1.0.jar.original iZm5e5xe1w25avi0io1f5aZ
22
 * 迅捷开发环境的shell里面执行
23
 * ~/software/spark-2.4.1-bin-hadoop2.7/bin/spark-submit --jars /home/mysql/test/ipu-basic-3.1-SNAPSHOT.jar --class com.ai.ipu.example.spark.SparkExample /home/mysql/test/ipu-spark-example-1.0.jar
27
 **/
24
 **/
28
public class SparkExample {
25
public class SparkExample {
29
    public static void main(String[] args) {
26
    public static void main(String[] args) {
35
         */
32
         */
36
        SparkConf sparkConf = new SparkConf().setAppName("SparkExample").setMaster(SparkConfig.getSparkMaster());
33
        SparkConf sparkConf = new SparkConf().setAppName("SparkExample").setMaster(SparkConfig.getSparkMaster());
37
        /*java.lang.ClassCastException: cannot assign instance of java.lang.invoke.SerializedLambda to field org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$1$1.f$3 of type org.apache.spark.api.java.function.FlatMapFunction in instance of org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$1$1*/
34
        /*java.lang.ClassCastException: cannot assign instance of java.lang.invoke.SerializedLambda to field org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$1$1.f$3 of type org.apache.spark.api.java.function.FlatMapFunction in instance of org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$1$1*/
38
        sparkConf.setJars(new String[]{SparkConfig.getJarFilePath()});
35
        //sparkConf.setJars(new String[]{SparkConfig.getJarFilePath()});
39
36
40
        JavaSparkContext jsc = new JavaSparkContext(sparkConf);
37
        JavaSparkContext jsc = new JavaSparkContext(sparkConf);
41
38

+ 1 - 3
ipu-spark-example/src/main/resources/spark.properties

3
#Hadoop地址
3
#Hadoop地址
4
hdfs.uri=hdfs://iZm5e5xe1w25avi0io1f5aZ:9000
4
hdfs.uri=hdfs://iZm5e5xe1w25avi0io1f5aZ:9000
5
#测试文件
5
#测试文件
6
test.file.name=aaa.txt
7
#项目打包路径
8
jar.file.path=D:\\ideaws\\rest\\code-example\\ipu-spark-example\\target\\ipu-spark-example-1.0.jar
6
test.file.name=aaa.txt