Back to home page

OSCL-LXR

 
 

    


0001 /*
0002  * Licensed to the Apache Software Foundation (ASF) under one or more
0003  * contributor license agreements.  See the NOTICE file distributed with
0004  * this work for additional information regarding copyright ownership.
0005  * The ASF licenses this file to You under the Apache License, Version 2.0
0006  * (the "License"); you may not use this file except in compliance with
0007  * the License.  You may obtain a copy of the License at
0008  *
0009  *    http://www.apache.org/licenses/LICENSE-2.0
0010  *
0011  * Unless required by applicable law or agreed to in writing, software
0012  * distributed under the License is distributed on an "AS IS" BASIS,
0013  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0014  * See the License for the specific language governing permissions and
0015  * limitations under the License.
0016  */
0017 
0018 package org.apache.spark.api.resource;
0019 
0020 import java.util.Optional;
0021 
0022 import org.apache.spark.annotation.DeveloperApi;
0023 import org.apache.spark.SparkConf;
0024 import org.apache.spark.resource.ResourceInformation;
0025 import org.apache.spark.resource.ResourceRequest;
0026 
0027 /**
0028  * :: DeveloperApi ::
0029  * A plugin that can be dynamically loaded into a Spark application to control how custom
0030  * resources are discovered. Plugins can be chained to allow different plugins to handle
0031  * different resource types.
0032  * <p>
0033  * Plugins must implement the function discoveryResource.
0034  *
0035  * @since 3.0.0
0036  */
0037 @DeveloperApi
0038 public interface ResourceDiscoveryPlugin {
0039   /**
0040    * Discover the addresses of the requested resource.
0041    * <p>
0042    * This method is called early in the initialization of the Spark Executor/Driver/Worker.
0043    * This function is responsible for discovering the addresses of the resource which Spark will
0044    * then use for scheduling and eventually providing to the user.
0045    * Depending on the deployment mode and and configuration of custom resources, this could be
0046    * called by the Spark Driver, the Spark Executors, in standalone mode the Workers, or all of
0047    * them. The ResourceRequest has a ResourceID component that can be used to distinguish which
0048    * component it is called from and what resource its being called for.
0049    * This will get called once for each resource type requested and its the responsibility of
0050    * this function to return enough addresses of that resource based on the request. If
0051    * the addresses do not meet the requested amount, Spark will fail.
0052    * If this plugin doesn't handle a particular resource, it should return an empty Optional
0053    * and Spark will try other plugins and then last fall back to the default discovery script
0054    * plugin.
0055    *
0056    * @param request The ResourceRequest that to be discovered.
0057    * @param sparkConf SparkConf
0058    * @return An {@link Optional} containing a {@link ResourceInformation} object containing
0059    * the resource name and the addresses of the resource. If it returns {@link Optional#EMPTY}
0060    * other plugins will be called.
0061    */
0062   Optional<ResourceInformation> discoverResource(ResourceRequest request, SparkConf sparkConf);
0063 }