|
||||
0001 /* 0002 * Licensed to the Apache Software Foundation (ASF) under one or more 0003 * contributor license agreements. See the NOTICE file distributed with 0004 * this work for additional information regarding copyright ownership. 0005 * The ASF licenses this file to You under the Apache License, Version 2.0 0006 * (the "License"); you may not use this file except in compliance with 0007 * the License. You may obtain a copy of the License at 0008 * 0009 * http://www.apache.org/licenses/LICENSE-2.0 0010 * 0011 * Unless required by applicable law or agreed to in writing, software 0012 * distributed under the License is distributed on an "AS IS" BASIS, 0013 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 0014 * See the License for the specific language governing permissions and 0015 * limitations under the License. 0016 */ 0017 0018 package org.apache.spark.sql.connector.catalog; 0019 0020 import org.apache.spark.annotation.Evolving; 0021 import org.apache.spark.sql.util.CaseInsensitiveStringMap; 0022 0023 /** 0024 * An API to extend the Spark built-in session catalog. Implementation can get the built-in session 0025 * catalog from {@link #setDelegateCatalog(CatalogPlugin)}, implement catalog functions with 0026 * some custom logic and call the built-in session catalog at the end. For example, they can 0027 * implement {@code createTable}, do something else before calling {@code createTable} of the 0028 * built-in session catalog. 0029 * 0030 * @since 3.0.0 0031 */ 0032 @Evolving 0033 public interface CatalogExtension extends TableCatalog, SupportsNamespaces { 0034 0035 /** 0036 * This will be called only once by Spark to pass in the Spark built-in session catalog, after 0037 * {@link #initialize(String, CaseInsensitiveStringMap)} is called. 0038 */ 0039 void setDelegateCatalog(CatalogPlugin delegate); 0040 }
[ Source navigation ] | [ Diff markup ] | [ Identifier search ] | [ general search ] |
This page was automatically generated by the 2.1.0 LXR engine. The LXR team |