Back to home page

OSCL-LXR

 
 

    


0001 <?xml version="1.0" encoding="UTF-8"?>
0002 <!--
0003   ~ Licensed to the Apache Software Foundation (ASF) under one or more
0004   ~ contributor license agreements.  See the NOTICE file distributed with
0005   ~ this work for additional information regarding copyright ownership.
0006   ~ The ASF licenses this file to You under the Apache License, Version 2.0
0007   ~ (the "License"); you may not use this file except in compliance with
0008   ~ the License.  You may obtain a copy of the License at
0009   ~
0010   ~    http://www.apache.org/licenses/LICENSE-2.0
0011   ~
0012   ~ Unless required by applicable law or agreed to in writing, software
0013   ~ distributed under the License is distributed on an "AS IS" BASIS,
0014   ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0015   ~ See the License for the specific language governing permissions and
0016   ~ limitations under the License.
0017   -->
0018 
0019 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
0020   <modelVersion>4.0.0</modelVersion>
0021   <parent>
0022     <groupId>org.apache.spark</groupId>
0023     <artifactId>spark-parent_2.12</artifactId>
0024     <version>3.0.0</version>
0025     <relativePath>../pom.xml</relativePath>
0026   </parent>
0027 
0028   <artifactId>spark-assembly_2.12</artifactId>
0029   <name>Spark Project Assembly</name>
0030   <url>http://spark.apache.org/</url>
0031   <packaging>pom</packaging>
0032 
0033   <properties>
0034     <sbt.project.name>assembly</sbt.project.name>
0035     <build.testJarPhase>none</build.testJarPhase>
0036     <build.copyDependenciesPhase>package</build.copyDependenciesPhase>
0037   </properties>
0038 
0039   <dependencies>
0040     <!-- Prevent our dummy JAR from being included in Spark distributions or uploaded to YARN -->
0041     <dependency>
0042       <groupId>org.spark-project.spark</groupId>
0043       <artifactId>unused</artifactId>
0044       <version>1.0.0</version>
0045       <scope>provided</scope>
0046     </dependency>
0047     <dependency>
0048       <groupId>org.apache.spark</groupId>
0049       <artifactId>spark-core_${scala.binary.version}</artifactId>
0050       <version>${project.version}</version>
0051     </dependency>
0052     <dependency>
0053       <groupId>org.apache.spark</groupId>
0054       <artifactId>spark-mllib_${scala.binary.version}</artifactId>
0055       <version>${project.version}</version>
0056     </dependency>
0057     <dependency>
0058       <groupId>org.apache.spark</groupId>
0059       <artifactId>spark-streaming_${scala.binary.version}</artifactId>
0060       <version>${project.version}</version>
0061     </dependency>
0062     <dependency>
0063       <groupId>org.apache.spark</groupId>
0064       <artifactId>spark-graphx_${scala.binary.version}</artifactId>
0065       <version>${project.version}</version>
0066     </dependency>
0067     <dependency>
0068       <groupId>org.apache.spark</groupId>
0069       <artifactId>spark-sql_${scala.binary.version}</artifactId>
0070       <version>${project.version}</version>
0071     </dependency>
0072     <dependency>
0073       <groupId>org.apache.spark</groupId>
0074       <artifactId>spark-repl_${scala.binary.version}</artifactId>
0075       <version>${project.version}</version>
0076     </dependency>
0077 
0078     <!--
0079       Because we don't shade dependencies anymore, we need to restore Guava to compile scope so
0080       that the libraries Spark depend on have it available. We'll package the version that Spark
0081       uses (14.0.1) which is not the same as Hadoop dependencies, but works.
0082     -->
0083     <dependency>
0084       <groupId>com.google.guava</groupId>
0085       <artifactId>guava</artifactId>
0086       <scope>${hadoop.deps.scope}</scope>
0087     </dependency>
0088   </dependencies>
0089 
0090   <build>
0091     <plugins>
0092       <plugin>
0093         <groupId>org.apache.maven.plugins</groupId>
0094         <artifactId>maven-deploy-plugin</artifactId>
0095         <configuration>
0096           <skip>true</skip>
0097         </configuration>
0098       </plugin>
0099       <plugin>
0100         <groupId>org.apache.maven.plugins</groupId>
0101         <artifactId>maven-install-plugin</artifactId>
0102         <configuration>
0103           <skip>true</skip>
0104         </configuration>
0105       </plugin>
0106       <!-- zip pyspark archives to run python application on yarn mode -->
0107       <plugin>
0108         <groupId>org.apache.maven.plugins</groupId>
0109           <artifactId>maven-antrun-plugin</artifactId>
0110           <executions>
0111             <execution>
0112               <phase>package</phase>
0113                 <goals>
0114                   <goal>run</goal>
0115                 </goals>
0116             </execution>
0117           </executions>
0118           <configuration>
0119             <target>
0120               <delete file="${basedir}/../python/lib/pyspark.zip"/>
0121               <zip destfile="${basedir}/../python/lib/pyspark.zip">
0122                 <fileset dir="${basedir}/../python/" includes="pyspark/**/*"/>
0123               </zip>
0124             </target>
0125           </configuration>
0126       </plugin>
0127     </plugins>
0128   </build>
0129 
0130   <profiles>
0131     <profile>
0132       <id>yarn</id>
0133       <dependencies>
0134         <dependency>
0135           <groupId>org.apache.spark</groupId>
0136           <artifactId>spark-yarn_${scala.binary.version}</artifactId>
0137           <version>${project.version}</version>
0138         </dependency>
0139       </dependencies>
0140     </profile>
0141     <profile>
0142       <id>mesos</id>
0143       <dependencies>
0144         <dependency>
0145           <groupId>org.apache.spark</groupId>
0146           <artifactId>spark-mesos_${scala.binary.version}</artifactId>
0147           <version>${project.version}</version>
0148         </dependency>
0149       </dependencies>
0150     </profile>
0151     <profile>
0152       <id>kubernetes</id>
0153       <dependencies>
0154         <dependency>
0155           <groupId>org.apache.spark</groupId>
0156           <artifactId>spark-kubernetes_${scala.binary.version}</artifactId>
0157           <version>${project.version}</version>
0158         </dependency>
0159       </dependencies>
0160     </profile>
0161     <profile>
0162       <id>hive</id>
0163       <dependencies>
0164         <dependency>
0165           <groupId>org.apache.spark</groupId>
0166           <artifactId>spark-hive_${scala.binary.version}</artifactId>
0167           <version>${project.version}</version>
0168         </dependency>
0169       </dependencies>
0170     </profile>
0171     <profile>
0172       <id>hive-thriftserver</id>
0173       <dependencies>
0174         <dependency>
0175           <groupId>org.apache.spark</groupId>
0176           <artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>
0177           <version>${project.version}</version>
0178         </dependency>
0179       </dependencies>
0180     </profile>
0181     <profile>
0182       <id>spark-ganglia-lgpl</id>
0183       <dependencies>
0184         <dependency>
0185           <groupId>org.apache.spark</groupId>
0186           <artifactId>spark-ganglia-lgpl_${scala.binary.version}</artifactId>
0187           <version>${project.version}</version>
0188         </dependency>
0189       </dependencies>
0190     </profile>
0191     <profile>
0192       <id>bigtop-dist</id>
0193       <!-- This profile uses the assembly plugin to create a special "dist" package for BigTop
0194            that contains Spark but not the Hadoop JARs it depends on. -->
0195       <build>
0196         <plugins>
0197           <plugin>
0198             <groupId>org.apache.maven.plugins</groupId>
0199             <artifactId>maven-assembly-plugin</artifactId>
0200             <version>3.1.0</version>
0201             <executions>
0202               <execution>
0203                 <id>dist</id>
0204                 <phase>package</phase>
0205                 <goals>
0206                   <goal>single</goal>
0207                 </goals>
0208                 <configuration>
0209                   <descriptors>
0210                     <descriptor>src/main/assembly/assembly.xml</descriptor>
0211                   </descriptors>
0212                 </configuration>
0213               </execution>
0214             </executions>
0215           </plugin>
0216         </plugins>
0217       </build>
0218     </profile>
0219 
0220     <!-- Profiles that disable inclusion of certain dependencies. -->
0221     <profile>
0222       <id>hadoop-provided</id>
0223       <properties>
0224         <hadoop.deps.scope>provided</hadoop.deps.scope>
0225       </properties>
0226     </profile>
0227     <profile>
0228       <id>hive-provided</id>
0229       <properties>
0230         <hive.deps.scope>provided</hive.deps.scope>
0231       </properties>
0232     </profile>
0233     <profile>
0234       <id>orc-provided</id>
0235       <properties>
0236         <orc.deps.scope>provided</orc.deps.scope>
0237       </properties>
0238     </profile>
0239     <profile>
0240       <id>parquet-provided</id>
0241       <properties>
0242         <parquet.deps.scope>provided</parquet.deps.scope>
0243       </properties>
0244     </profile>
0245 
0246     <!--
0247      Pull in spark-hadoop-cloud and its associated JARs,
0248     -->
0249     <profile>
0250       <id>hadoop-cloud</id>
0251       <dependencies>
0252         <dependency>
0253           <groupId>org.apache.spark</groupId>
0254           <artifactId>spark-hadoop-cloud_${scala.binary.version}</artifactId>
0255           <version>${project.version}</version>
0256         </dependency>
0257         <!--
0258         Redeclare this dependency to force it into the distribution.
0259         -->
0260         <dependency>
0261           <groupId>org.eclipse.jetty</groupId>
0262           <artifactId>jetty-util</artifactId>
0263           <scope>${hadoop.deps.scope}</scope>
0264         </dependency>
0265       </dependencies>
0266     </profile>
0267   </profiles>
0268 </project>