运行时,使用包含第三方依赖的spark应用jar包 spark-day2-1.0-SNAPSHOT-jar-with-dependencies.jar
<!-- maven 打包插件 打原始jar包 第三方依赖打入jar包中--> <plugins> <plugin> <artifactId>maven-assembly-plugin</artifactId> <configuration> <archive> <manifest> <!--这里要替换成jar包main方法所在类 --> <mainClass>com.baizhi.RDDDataSourceWithHBase</mainClass> </manifest> <manifestEntries> <Class-Path>.</Class-Path> </manifestEntries> </archive> <descriptorRefs> <descriptorRef>jar-with-dependencies</descriptorRef> </descriptorRefs> </configuration> <executions> <execution> <id>make-assembly</id> <!-- this is used for inheritance merges --> <phase>package</phase> <!-- 指定在打包节点执行jar包合并操作 --> <goals> <goal>single</goal> </goals> </execution> </executions> </plugin> <plugins>上传jar包linux运行命令(spark)
bin/spark-submit --master spark://Spark:7077 --class 运行类的全限定名名 --total-executor-cores 4 /root/要运行的jar包