|
||||
0001 /* 0002 * Licensed to the Apache Software Foundation (ASF) under one or more 0003 * contributor license agreements. See the NOTICE file distributed with 0004 * this work for additional information regarding copyright ownership. 0005 * The ASF licenses this file to You under the Apache License, Version 2.0 0006 * (the "License"); you may not use this file except in compliance with 0007 * the License. You may obtain a copy of the License at 0008 * 0009 * http://www.apache.org/licenses/LICENSE-2.0 0010 * 0011 * Unless required by applicable law or agreed to in writing, software 0012 * distributed under the License is distributed on an "AS IS" BASIS, 0013 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 0014 * See the License for the specific language governing permissions and 0015 * limitations under the License. 0016 */ 0017 0018 package org.apache.spark.api.plugin; 0019 0020 import java.io.IOException; 0021 import java.util.Map; 0022 0023 import com.codahale.metrics.MetricRegistry; 0024 0025 import org.apache.spark.SparkConf; 0026 import org.apache.spark.annotation.DeveloperApi; 0027 import org.apache.spark.resource.ResourceInformation; 0028 0029 /** 0030 * :: DeveloperApi :: 0031 * Context information and operations for plugins loaded by Spark. 0032 * <p> 0033 * An instance of this class is provided to plugins in their initialization method. It is safe 0034 * for plugins to keep a reference to the instance for later use (for example, to send messages 0035 * to the plugin's driver component). 0036 * <p> 0037 * Context instances are plugin-specific, so metrics and messages are tied each plugin. It is 0038 * not possible for a plugin to directly interact with other plugins. 0039 * 0040 * @since 3.0.0 0041 */ 0042 @DeveloperApi 0043 public interface PluginContext { 0044 0045 /** 0046 * Registry where to register metrics published by the plugin associated with this context. 0047 */ 0048 MetricRegistry metricRegistry(); 0049 0050 /** Configuration of the Spark application. */ 0051 SparkConf conf(); 0052 0053 /** Executor ID of the process. On the driver, this will identify the driver. */ 0054 String executorID(); 0055 0056 /** The host name which is being used by the Spark process for communication. */ 0057 String hostname(); 0058 0059 /** The custom resources (GPUs, FPGAs, etc) allocated to driver or executor. */ 0060 Map<String, ResourceInformation> resources(); 0061 0062 /** 0063 * Send a message to the plugin's driver-side component. 0064 * <p> 0065 * This method sends a message to the driver-side component of the plugin, without expecting 0066 * a reply. It returns as soon as the message is enqueued for sending. 0067 * <p> 0068 * The message must be serializable. 0069 * 0070 * @param message Message to be sent. 0071 */ 0072 void send(Object message) throws IOException; 0073 0074 /** 0075 * Send an RPC to the plugin's driver-side component. 0076 * <p> 0077 * This method sends a message to the driver-side component of the plugin, and blocks until a 0078 * reply arrives, or the configured RPC ask timeout (<code>spark.rpc.askTimeout</code>) elapses. 0079 * <p> 0080 * If the driver replies with an error, an exception with the corresponding error will be thrown. 0081 * <p> 0082 * The message must be serializable. 0083 * 0084 * @param message Message to be sent. 0085 * @return The reply from the driver-side component. 0086 */ 0087 Object ask(Object message) throws Exception; 0088 0089 }
[ Source navigation ] | [ Diff markup ] | [ Identifier search ] | [ general search ] |
This page was automatically generated by the 2.1.0 LXR engine. The LXR team |