Dremio Ignite Integration

I am trying to fetch tables of ignite from Dremio using JDBC interface provided by ignite. For that I have created custom arp plugin

IgniteConf.java

package com.dremio.exec.store.jdbc.conf;

import com.dremio.exec.catalog.conf.DisplayMetadata;
import com.dremio.exec.catalog.conf.NotMetadataImpacting;
import com.dremio.exec.catalog.conf.SourceType;
import com.dremio.exec.server.SabotContext;
import com.dremio.exec.store.jdbc.CloseableDataSource;
import com.dremio.exec.store.jdbc.DataSources;
import com.dremio.exec.store.jdbc.JdbcStoragePlugin;
import com.dremio.exec.store.jdbc.JdbcStoragePlugin.Config;
import com.dremio.exec.store.jdbc.dialect.arp.ArpDialect;
import com.google.common.annotations.VisibleForTesting;
import io.protostuff.Tag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import static com.google.common.base.Preconditions.checkNotNull;

/**
 * Configuration for SQLite sources.
 */
@SourceType(value = "Ignite", label = "Ignite")
public class IgniteConf extends AbstractArpConf<IgniteConf> {
    private static final Logger logger = LoggerFactory.getLogger(IgniteConf.class);

    private static final String ARP_FILENAME = "arp/implementation/Ignite-arp.yaml";
    private static final ArpDialect ARP_DIALECT = AbstractArpConf.loadArpFile(ARP_FILENAME, (ArpDialect::new));
    private static final String DRIVER = "org.apache.ignite.IgniteJdbcThinDriver";

    @Tag(1)
    @DisplayMetadata(label = "ServerAddress")
    public String serverAddress;

    @Tag(2)
    @DisplayMetadata(label = "Record fetch size")
    @NotMetadataImpacting
    public int fetchSize = 200;


    @VisibleForTesting
    public String toJdbcConnectionString() {
        final String serverAddress = checkNotNull(this.serverAddress, "Missing serverAddress.");

        return String.format("jdbc:ignite:thin://%s", serverAddress);
    }

    @Override
    @VisibleForTesting
    public Config toPluginConfig(SabotContext context) {
        return JdbcStoragePlugin.Config.newBuilder()
                .withDialect(getDialect())
                .withFetchSize(fetchSize)
                .withDatasourceFactory(this::newDataSource)
                .build();
    }

    private CloseableDataSource newDataSource() {
        return DataSources.newGenericConnectionPoolDataSource(DRIVER,
                toJdbcConnectionString(), null, null, null, DataSources.CommitMode.DRIVER_SPECIFIED_COMMIT_MODE);
    }

    @Override
    public ArpDialect getDialect() {
        return ARP_DIALECT;
    }

}

Ignite-arp.yaml

#
# Copyright (C) 2017-2018 Dremio Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

metadata:
  # Manually Configured Metadata Section.
  name: Ignite
  apiname: Ignite
  spec_version: '1'

syntax:
  # Manually Configured Syntax Section.
  identifier_quote: '"'
  identifier_length_limit: 128
  allows_boolean_literal: false
  map_boolean_literal_to_bit: false
  supports_catalogs: false
  supports_schemas: true

data_types:
  mappings:
    # Manually Configured Data Types Mappings Section.
    - source:
        name: "INTEGER"
      dremio:
        name: "integer"
      required_cast_arguments: "none"
    - source:
        name: "REAL"
      dremio:
        name: "double"
      required_cast_arguments: "none"
    - source:
        name: "VARCHAR"
      dremio:
        name: "varchar"
      required_cast_arguments: "none"

relational_algebra:
  aggregation:
    enable: true
    group_by_ordinal: false
    distinct: true
    count_functions:
      count_star:
        enable: true
      count:
        enable: true
      count_distinct:
        enable: true
    functions:
      - names:
          - "avg"
        signatures:
          - args:
              - "double"
            return: "double"
          - args:
              - "integer"
            return: "double"
      - names:
          - "max"
          - "min"
        signatures:
          - args:
              - "integer"
            return: "integer"
          - args:
              - "double"
            return: "double"
          - args:
              - "varchar"
            return: "varchar"
      - names:
          - "sum"
        signatures:
          - args:
              - "double"
            return: "double"
          - args:
              - "integer"
            return: "bigint"

  except:
    enable: false
  project:
    enable: true
  join:
    enable: true
    cross:
      enable: true
    inner:
      enable: true
      inequality: true
    left:
      enable: true
      inequality: true
    right:
      enable: false
      inequality: false
    full:
      enable: false
      inequality: false
  sort:
    enable: true
    order_by:
      enable: true
      default_nulls_ordering: high
    fetch_offset:
      offset_fetch:
        enable: true
        format: 'LIMIT {1} OFFSET {0}'
      offset_only:
        enable: false
      fetch_only:
        enable: true
        format: 'LIMIT {0}'
  union:
    enable: false
  union_all:
    enable: false
  values:
    enable: false
    method: values

# Describe the set of function signatures that are internally supported.
expressions:
  subqueries:
    correlated: true
    scalar: true
    in_clause: true
  supports_case: true
  supports_over: false
  operators:
    - names:
        - "="
        - "!="
        - "<>"
        - ">"
        - ">="
        - "<"
        - "<="
      signatures:
        - args:
            - "double"
            - "double"
          return: "boolean"
        - args:
            - "double"
            - "integer"
          return: "boolean"
        - args:
            - "double"
            - "varchar"
          return: "boolean"
        - args:
            - "integer"
            - "double"
          return: "boolean"
        - args:
            - "integer"
            - "integer"
          return: "boolean"
        - args:
            - "integer"
            - "varchar"
          return: "boolean"
        - args:
            - "varchar"
            - "varchar"
          return: "boolean"
    - names:
        - "not"
      signatures:
        - args:
            - "boolean"
          return: "boolean"
    # Example scalar function
    - names:
        - "sign"
      signatures:
        - args:
            - "double"
          return: "double"
        - args:
            - "integer"
          return: "integer"
          # Example rewrite (although this is not necessary here as the default is the same)
          rewrite: "SIGN({0})"

  variable_length_operators:
    - names:
        - and
      variable_signatures:
        - return: boolean
          arg_type: boolean
    - names:
        - or
      variable_signatures:
        - return: boolean
          arg_type: boolean

Using it I can get details of schema,tables and columns but when I try to fetch data it fails. In query catelog name is passed which is not supported by ignite. How I should avoid passing catelog name in query?

Query generated by dremio
SELECT "DHCPEVENTS"."TS", "DHCPEVENTS"."ACTION", "DHCPEVENTS"."MAC", "DHCPEVENTS"."IP1", "DHCPEVENTS"."IP2", "DHCPEVENTS"."TSOTHER" FROM "IGNITE"."PUBLIC"."DHCPEVENTS" LIMIT 2000 OFFSET 0

Query Supported by Ignite
SELECT "DHCPEVENTS"."TS", "DHCPEVENTS"."ACTION", "DHCPEVENTS"."MAC", "DHCPEVENTS"."IP1", "DHCPEVENTS"."IP2", "DHCPEVENTS"."TSOTHER" FROM "PUBLIC"."DHCPEVENTS" LIMIT 2000 OFFSET 0

Hi @jenil.shah135,

The issue here looks to be that the Ignite JDBC driver is reporting a catalog during getTables(), but doesn’t allow catalogs to be used in the query, and the ARP framework isn’t checking if catalogs can be used in queries. The supports_catalog flag is being used when building filters for getTables() and getColumns() calls only.

We’ve tracked this in an internal ticket in the ARP framework.

A workaround that can be done is to:

  1. Create your own subclass of ArpDialect, that overrides getSchemaFetcher().
  2. Implement your own ArpSchemaFetcher subclass that overrides getTableHandles(). This method should be changed to return your own DatasetHandleListing implementation that returns identifiers of the following format [Dremio source name].[schema name].[table name]

Hi,

Thanks for quick response. Yes Ignite doesn’t support catalog name in query but it is returning catalog name in database driver meta api. I have tried your suggesting but after this I am not getting column name and not able to fire sql query from dremio webapp.

IgniteConf.java

package com.dremio.exec.store.jdbc.conf;


import com.dremio.common.AutoCloseables;
import com.dremio.connector.metadata.DatasetHandle;
import com.dremio.connector.metadata.DatasetHandleListing;
import com.dremio.connector.metadata.EmptyDatasetHandleListing;
import com.dremio.connector.metadata.EntityPath;
import com.dremio.exec.catalog.conf.DisplayMetadata;
import com.dremio.exec.catalog.conf.NotMetadataImpacting;
import com.dremio.exec.catalog.conf.SourceType;
import com.dremio.exec.server.SabotContext;
import com.dremio.exec.store.jdbc.CloseableDataSource;
import com.dremio.exec.store.jdbc.DataSources;
import com.dremio.exec.store.jdbc.JdbcSchemaFetcher;
import com.dremio.exec.store.jdbc.JdbcStoragePlugin;
import com.dremio.exec.store.jdbc.JdbcStoragePlugin.Config;
import com.dremio.exec.store.jdbc.dialect.arp.ArpDialect;
import com.dremio.exec.store.jdbc.dialect.arp.ArpYaml;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.google.common.collect.AbstractIterator;
import io.protostuff.Tag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;

import static com.google.common.base.Preconditions.checkNotNull;

/**
 * Configuration for SQLite sources.
 */
@SourceType(value = "Ignite", label = "Ignite")
public class IgniteConf extends AbstractArpConf<IgniteConf> {
    private static final Logger logger = LoggerFactory.getLogger(IgniteConf.class);
    private static final String ARP_FILENAME = "arp/implementation/Ignite-arp.yaml";
    private static final ArpDialect ARP_DIALECT = AbstractArpConf.loadArpFile(ARP_FILENAME, (IgniteDialect::new));
    private static final String DRIVER = "org.apache.ignite.IgniteJdbcThinDriver";

    @Tag(1)
    @DisplayMetadata(label = "ServerAddress")
    public String serverAddress;

    @Tag(2)
    @DisplayMetadata(label = "Record fetch size")
    @NotMetadataImpacting
    public int fetchSize = 200;

    static class IgniteSchemaFetcher extends ArpDialect.ArpSchemaFetcher {

        private final String query;

        public IgniteSchemaFetcher(String query, String name, DataSource dataSource, int timeout, Config config) {
            super(query, name, dataSource, timeout, config);
            this.query = query;
        }

        public DatasetHandleListing getTableHandles() {
            if (this.config.shouldSkipSchemaDiscovery()) {
                logger.debug("Skip schema discovery enabled, skipping getting tables '{}'", this.storagePluginName);
                return new EmptyDatasetHandleListing();
            } else {
                logger.debug("Getting all tables for plugin '{}'", this.storagePluginName);
                try {
                    Connection connection = this.dataSource.getConnection();
                    return new JdbcSchemaFetcher.JdbcIteratorListing(new IgniteJdbcDatasetMetadataIterable(this.storagePluginName, connection, this.filterQuery(this.query, connection.getMetaData())));
                } catch (SQLException ex) {
                    return new EmptyDatasetHandleListing();
                }
            }
        }

        protected static class IgniteJdbcDatasetMetadataIterable extends AbstractIterator<DatasetHandle> implements AutoCloseable {
            private final String storagePluginName;
            private final Connection connection;
            private Statement statement;
            private ResultSet tablesResult;

            protected IgniteJdbcDatasetMetadataIterable(String storagePluginName, Connection connection, String query) {
                this.storagePluginName = storagePluginName;
                this.connection = connection;

                try {
                    this.statement = connection.createStatement();
                    this.tablesResult = this.statement.executeQuery(query);
                } catch (SQLException var5) {
                    logger.error(String.format("Error retrieving all tables for %s", storagePluginName), var5);
                }

            }

            public DatasetHandle computeNext() {
                try {
                    if (this.tablesResult != null && this.tablesResult.next()) {
                        List<String> path = new ArrayList(3);
                        path.add(this.storagePluginName);
//                    String currCatalog = this.tablesResult.getString(1);
//                    if (!Strings.isNullOrEmpty(currCatalog)) {
//                        path.add(currCatalog);
//                    }

                        String currSchema = this.tablesResult.getString(2);
                        if (!Strings.isNullOrEmpty(currSchema)) {
                            path.add(currSchema);
                        }

                        path.add(this.tablesResult.getString(3));
                        return new IgniteJdbcDatasetHandle(new EntityPath(path));
                    } else {
                        logger.debug("Done fetching all schema and tables for '{}'.", this.storagePluginName);
                        return this.endOfData();
                    }
                } catch (SQLException var4) {
                    logger.error(String.format("Error listing datasets for '%s'", this.storagePluginName), var4);
                    return this.endOfData();
                }
            }

            public void close() {
                try {
                    AutoCloseables.close(new AutoCloseable[]{this.tablesResult, this.statement, this.connection});
                } catch (Exception var2) {
                    logger.warn("Error closing connection when listing JDBC datasets.", var2);
                }

            }

            protected static class IgniteJdbcDatasetHandle extends JdbcSchemaFetcher.JdbcDatasetHandle {
                private final EntityPath entityPath;

                public IgniteJdbcDatasetHandle(EntityPath entityPath) {
                    super(entityPath);
                    this.entityPath = entityPath;
                }

                public List<String> getIdentifiers() {
                    return this.entityPath.getComponents();
                }
            }
        }
    }

    static class IgniteDialect extends ArpDialect {

        public IgniteDialect(ArpYaml yaml) {
            super(yaml);
        }

        public JdbcSchemaFetcher getSchemaFetcher(String name, DataSource dataSource, int timeout, JdbcStoragePlugin.Config config) {
            String tableQuery = "SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='TABLE'";
            return new IgniteSchemaFetcher(tableQuery, name, dataSource, timeout, config);
        }

    }


    @VisibleForTesting
    public String toJdbcConnectionString() {
        final String serverAddress = checkNotNull(this.serverAddress, "Missing serverAddress.");

        return String.format("jdbc:ignite:thin://%s", serverAddress);
    }

    @Override
    @VisibleForTesting
    public Config toPluginConfig(SabotContext context) {
        return JdbcStoragePlugin.Config.newBuilder()
            .withDialect(getDialect())
            .withFetchSize(fetchSize)
            .withDatasourceFactory(this::newDataSource)
            .build();
    }

    private CloseableDataSource newDataSource() {
        return DataSources.newGenericConnectionPoolDataSource(DRIVER,
            toJdbcConnectionString(), null, null, null, DataSources.CommitMode.DRIVER_SPECIFIED_COMMIT_MODE);
    }

    @Override
    public ArpDialect getDialect() {
        return ARP_DIALECT;
    }

}

Am I missing something?

The code you have looks correct. However from the error I see that the %22 sequence is wrapping ignite-app. %22 is the URI escape sequence for double-quote – is your source-name wrapped in double-quotes (eg “ignite-app”). Could also debug the schema fetcher and check if when getSchemaFetcher is called, the name parameter has double-quotes.

If the source name has double-quotes, I’d try renaming it to a name with no quote characters.

Hi,

No source name doesn’t have double quotes.I have also added logs in schemafetcher to verify the same.I didn’t get quotes in logs. But after some debugging got to know that because of dash in name it is added from platform only. So I have tried with simple name after that I got different error.

Looks like there’s another API call you need to override to handle the fact that the catalog should be omitted.

It’d be JdbcSchemaFetcher.getTableHandle(), which takes in a path to a table (source name, schema name, table name).

Actually a possible way we can do this with alot less code change and no need to write your own schema fetcher, just a simple override of ArpDialect instead:
Override getSchemaFetcher() so that instead of running

SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE=‘TABLE’

as above, run a SQL query on INFORMATION_SCHEMA.TABLES where we explicitly set the first column (the catalog name) to NULL.

SELECT NULL AS CAT, TABLE_SCHEMA AS SCH, TABLE_NAME AS NME from information_schema.tables WHERE 1 = 1

(based on https://apacheignite.readme.io/docs/tables).

The WHERE 1 = 1 and field names CAT, SCH, and NME are there just because some of the other logic in ArpSchemaFetcher ANDs additional clauses to the query that depend on those names.

Hello,

Thanks for your help. I have tried same in above code which I had shared. After your suggestion I have changed code accordingly but still not working. Listing is working but can’t query tables.

package com.dremio.exec.store.jdbc.conf;


import com.dremio.exec.catalog.conf.DisplayMetadata;
import com.dremio.exec.catalog.conf.NotMetadataImpacting;
import com.dremio.exec.catalog.conf.SourceType;
import com.dremio.exec.server.SabotContext;
import com.dremio.exec.store.jdbc.CloseableDataSource;
import com.dremio.exec.store.jdbc.DataSources;
import com.dremio.exec.store.jdbc.JdbcSchemaFetcher;
import com.dremio.exec.store.jdbc.JdbcStoragePlugin;
import com.dremio.exec.store.jdbc.JdbcStoragePlugin.Config;
import com.dremio.exec.store.jdbc.dialect.arp.ArpDialect;
import com.dremio.exec.store.jdbc.dialect.arp.ArpYaml;
import com.google.common.annotations.VisibleForTesting;
import io.protostuff.Tag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.sql.DataSource;

import static com.google.common.base.Preconditions.checkNotNull;

/**
 * Configuration for SQLite sources.
 */
@SourceType(value = "Ignite", label = "Ignite")
public class IgniteConf extends AbstractArpConf<IgniteConf> {
    private static final Logger logger = LoggerFactory.getLogger(IgniteConf.class);
    private static final String ARP_FILENAME = "arp/implementation/Ignite-arp.yaml";
    private static final ArpDialect ARP_DIALECT = AbstractArpConf.loadArpFile(ARP_FILENAME, (IgniteDialect::new));
    private static final String DRIVER = "org.apache.ignite.IgniteJdbcThinDriver";

    @Tag(1)
    @DisplayMetadata(label = "ServerAddress")
    public String serverAddress;

    @Tag(2)
    @DisplayMetadata(label = "Record fetch size")
    @NotMetadataImpacting
    public int fetchSize = 200;

    static class IgniteDialect extends ArpDialect {

        public IgniteDialect(ArpYaml yaml) {
            super(yaml);
        }

        public JdbcSchemaFetcher getSchemaFetcher(String name, DataSource dataSource, int timeout, JdbcStoragePlugin.Config config) {
            String tableQuery = "SELECT NULL AS CAT, TABLE_SCHEMA AS SCH, TABLE_NAME AS NME from information_schema.tables WHERE 1 = 1";
            return new ArpDialect.ArpSchemaFetcher(tableQuery, name, dataSource, timeout, config);
        }

    }


    @VisibleForTesting
    public String toJdbcConnectionString() {
        final String serverAddress = checkNotNull(this.serverAddress, "Missing serverAddress.");

        return String.format("jdbc:ignite:thin://%s", serverAddress);
    }

    @Override
    @VisibleForTesting
    public Config toPluginConfig(SabotContext context) {
        return JdbcStoragePlugin.Config.newBuilder()
            .withDialect(getDialect())
            .withFetchSize(fetchSize)
            .withDatasourceFactory(this::newDataSource)
            .build();
    }

    private CloseableDataSource newDataSource() {
        return DataSources.newGenericConnectionPoolDataSource(DRIVER,
            toJdbcConnectionString(), null, null, null, DataSources.CommitMode.DRIVER_SPECIFIED_COMMIT_MODE);
    }

    @Override
    public ArpDialect getDialect() {
        return ARP_DIALECT;
    }

}


@jenil.shah135, would it be possible to post a log? Can you post a profile as well (in the job summary click Download Profile next to the narwhal).

Hello,

I have attached profle file. I got below error in logs at that time.

2020-09-17 09:11:19,125 [209cd7c9-1899-c4bb-cc9b-ad82924ca900/0:foreman-planning] ERROR o.a.calcite.runtime.CalciteException - org.apache.calcite.sql.validate.SqlValidatorException: Table 'ignite1.PUBLIC.DHCPEVENTSTAGE1' not found
2020-09-17 09:11:19,128 [209cd7c9-1899-c4bb-cc9b-ad82924ca900/0:foreman-planning] ERROR o.a.calcite.runtime.CalciteException - org.apache.calcite.runtime.CalciteContextException: From line 1, column 15 to line 1, column 21: Table 'ignite1.PUBLIC.DHCPEVENTSTAGE1' not found
2020-09-17 09:11:19,135 [209cd7c9-1899-c4bb-cc9b-ad82924ca900/0:foreman-planning] ERROR c.d.s.commandpool.CommandWrapper - command 209cd7c9-1899-c4bb-cc9b-ad82924ca900/0:foreman-planning failed
com.dremio.common.exceptions.UserException: Table 'ignite1.PUBLIC.DHCPEVENTSTAGE1' not found
	at com.dremio.common.exceptions.UserException$Builder.build(UserException.java:802)
	at com.dremio.exec.planner.sql.SqlExceptionHelper.coerceException(SqlExceptionHelper.java:114)
	at com.dremio.exec.planner.sql.handlers.query.NormalHandler.getPlan(NormalHandler.java:60)
	at com.dremio.exec.planner.sql.handlers.commands.HandlerToExec.plan(HandlerToExec.java:59)
	at com.dremio.exec.work.foreman.AttemptManager.plan(AttemptManager.java:422)
	at com.dremio.exec.work.foreman.AttemptManager.lambda$run$1(AttemptManager.java:331)
	at com.dremio.service.commandpool.CommandWrapper.run(CommandWrapper.java:62)
	at com.dremio.context.RequestContext.run(RequestContext.java:95)
	at com.dremio.common.concurrent.ContextMigratingExecutorService.lambda$decorate$3(ContextMigratingExecutorService.java:199)
	at com.dremio.common.concurrent.ContextMigratingExecutorService$ComparableRunnable.run(ContextMigratingExecutorService.java:180)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.calcite.runtime.CalciteContextException: From line 1, column 15 to line 1, column 21: Table 'ignite1.PUBLIC.DHCPEVENTSTAGE1' not found
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.calcite.runtime.Resources$ExInstWithCause.ex(Resources.java:463)
	at org.apache.calcite.sql.SqlUtil.newContextException(SqlUtil.java:803)
	at org.apache.calcite.sql.SqlUtil.newContextException(SqlUtil.java:788)
	at org.apache.calcite.sql.validate.SqlValidatorImpl.newValidationError(SqlValidatorImpl.java:4470)
	at org.apache.calcite.sql.validate.IdentifierNamespace.resolveImpl(IdentifierNamespace.java:104)
	at org.apache.calcite.sql.validate.IdentifierNamespace.validateImpl(IdentifierNamespace.java:120)
	at org.apache.calcite.sql.validate.AbstractNamespace.validate(AbstractNamespace.java:84)
	at org.apache.calcite.sql.validate.SqlValidatorImpl.validateNamespace(SqlValidatorImpl.java:943)
	at org.apache.calcite.sql.validate.SqlValidatorImpl.validateQuery(SqlValidatorImpl.java:924)
	at org.apache.calcite.sql.validate.SqlValidatorImpl.validateFrom(SqlValidatorImpl.java:2971)
	at org.apache.calcite.sql.validate.SqlValidatorImpl.validateFrom(SqlValidatorImpl.java:2956)
	at org.apache.calcite.sql.validate.SqlValidatorImpl.validateSelect(SqlValidatorImpl.java:3197)
	at org.apache.calcite.sql.validate.SelectNamespace.validateImpl(SelectNamespace.java:60)
	at org.apache.calcite.sql.validate.AbstractNamespace.validate(AbstractNamespace.java:84)
	at org.apache.calcite.sql.validate.SqlValidatorImpl.validateNamespace(SqlValidatorImpl.java:943)
	at org.apache.calcite.sql.validate.SqlValidatorImpl.validateQuery(SqlValidatorImpl.java:924)
	at org.apache.calcite.sql.SqlSelect.validate(SqlSelect.java:226)
	at org.apache.calcite.sql.validate.SqlValidatorImpl.validateScopedExpression(SqlValidatorImpl.java:899)
	at org.apache.calcite.sql.validate.SqlValidatorImpl.validate(SqlValidatorImpl.java:609)
	at com.dremio.exec.planner.sql.SqlConverter.validate(SqlConverter.java:229)
	at com.dremio.exec.planner.sql.handlers.PrelTransformer.validateNode(PrelTransformer.java:196)
	at com.dremio.exec.planner.sql.handlers.PrelTransformer.validateAndConvert(PrelTransformer.java:181)
	at com.dremio.exec.planner.sql.handlers.PrelTransformer.validateAndConvert(PrelTransformer.java:177)
	at com.dremio.exec.planner.sql.handlers.query.NormalHandler.getPlan(NormalHandler.java:43)
	... 10 common frames omitted
Caused by: org.apache.calcite.sql.validate.SqlValidatorException: Table 'ignite1.PUBLIC.DHCPEVENTSTAGE1' not found
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.calcite.runtime.Resources$ExInstWithCause.ex(Resources.java:463)
	at org.apache.calcite.runtime.Resources$ExInst.ex(Resources.java:572)
	... 33 common frames omitted
2020-09-17 09:11:19,464 [out-of-band-observer] INFO  query.logger - {"queryId":"209cd7c9-1899-c4bb-cc9b-ad82924ca900","schema":"[ignite1]","queryText":"SELECT * FROM ignite1.PUBLIC.DHCPEVENTSTAGE1","start":1600333878524,"finish":1600333879138,"outcome":"FAILED","outcomeReason":"Table 'ignite1.PUBLIC.DHCPEVENTSTAGE1' not found","username":"jenil"}
172.18.0.1 - - [17/Sep/2020:09:11:19 +0000] "POST /apiv2/datasets/new_untitled_sql_and_run?newVersion=0003680276443398 HTTP/1.1" 400 6147 "http://localhost:9047/new_query?context=%22ignite1%22" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:80.0) Gecko/20100101 Firefox/80.0"

20741c11-2a82-499f-b67a-7ca7bfda2ef9.zip (5.9 KB)

So currently what’s happening is that we’re listing tables using the query on information_schema that you’ve passed in, but we’re trying to validate the table exists and get metadata for the table using getTables and getColumns respectively, but we are no longer supplying the catalog.

Let’s try change the call to getSchemaFetcher to use a different constructor for ArpSchemaFetcher:

return new ArpDialect.ArpSchemaFetcher(tableQuery, name, dataSource, timeout, config, true, true);

Hello,

I have changed constructor according to your suggestion. Still not able to list columns but now I got different error in log.

2020-09-17 10:09:40,770 [qtp1406632624-124] WARN  c.d.e.store.jdbc.JdbcSchemaFetcher - Failed to fetch schema for [ignite1, PUBLIC, DHCPEVENTSTAGE1].
2020-09-17 10:09:40,774 [qtp1406632624-124] WARN  c.d.e.store.jdbc.JdbcSchemaFetcher - Failed to fetch schema for [ignite1, PUBLIC, DHCPEVENTSTAGE1].
172.18.0.1 - - [17/Sep/2020:10:09:40 +0000] "POST /apiv2/datasets/new_untitled/?parentDataset=ignite1.PUBLIC.DHCPEVENTSTAGE1&newVersion=0007373347503817&limit=0 HTTP/1.1" 404 222 "http://localhost:9047/source/ignite1/PUBLIC.DHCPEVENTSTAGE1" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:80.0) Gecko/20100101 Firefox/80.0"
172.18.0.1 - - [17/Sep/2020:10:09:41 +0000] "GET /vs/base/worker/workerMain.js HTTP/1.1" 200 151535 "http://localhost:9047/source/ignite1/PUBLIC.DHCPEVENTSTAGE1" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:80.0) Gecko/20100101 Firefox/80.0"

It sounds like there’s an issue getting column metadata now. Is it possible to see what queries Ignite is receiving?

Hello,

I don’t see any query received by ignite so I have changed some code to get that error and got to know that Ignite JDBC driver doesn’t support fetching meta data using prepare statement.

2020-09-17 10:56:59,436 [qtp556812393-123] WARN  c.d.exec.store.jdbc.conf.IgniteConf - Failed to fetch schema for {}.
java.sql.SQLFeatureNotSupportedException: Meta data for prepared statement is not supported.
	at org.apache.ignite.internal.jdbc.thin.JdbcThinPreparedStatement.getMetaData(JdbcThinPreparedStatement.java:331)
	at org.apache.commons.dbcp2.DelegatingPreparedStatement.getMetaData(DelegatingPreparedStatement.java:230)
	at org.apache.commons.dbcp2.DelegatingPreparedStatement.getMetaData(DelegatingPreparedStatement.java:230)
	at com.dremio.exec.store.jdbc.conf.IgniteConf$IgniteSchemaFetcher.getTableHandleViaPrepare(IgniteConf.java:116)
	at com.dremio.exec.store.jdbc.conf.IgniteConf$IgniteSchemaFetcher.getTableHandle(IgniteConf.java:169)
	at com.dremio.exec.store.jdbc.JdbcStoragePlugin.getDatasetHandle(JdbcStoragePlugin.java:333)
	at com.dremio.exec.catalog.ManagedStoragePlugin.getDatasetHandle(ManagedStoragePlugin.java:792)
	at com.dremio.exec.catalog.DatasetManager.getTableFromPlugin(DatasetManager.java:308)
	at com.dremio.exec.catalog.DatasetManager.getTable(DatasetManager.java:207)
	at com.dremio.exec.catalog.CatalogImpl.getTable(CatalogImpl.java:181)
	at com.dremio.exec.catalog.SourceAccessChecker.lambda$getTable$3(SourceAccessChecker.java:126)
	at com.dremio.exec.catalog.SourceAccessChecker.getIfVisible(SourceAccessChecker.java:90)
	at com.dremio.exec.catalog.SourceAccessChecker.getTable(SourceAccessChecker.java:126)
	at com.dremio.exec.catalog.DelegatingCatalog.getTable(DelegatingCatalog.java:88)
	at com.dremio.exec.catalog.CachingCatalog.getTable(CachingCatalog.java:93)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.glassfish.hk2.utilities.reflection.ReflectionHelper.invoke(ReflectionHelper.java:1268)
	at org.jvnet.hk2.internal.MethodInterceptorImpl.internalInvoke(MethodInterceptorImpl.java:85)
	at org.jvnet.hk2.internal.MethodInterceptorImpl.invoke(MethodInterceptorImpl.java:101)
	at org.jvnet.hk2.internal.MethodInterceptorInvocationHandler.invoke(MethodInterceptorInvocationHandler.java:39)
	at com.sun.proxy.$Proxy112.getTable(Unknown Source)
	at com.dremio.dac.explore.DatasetsResource.getDatasetSummary(DatasetsResource.java:268)
	at com.dremio.dac.explore.DatasetsResource.newUntitled(DatasetsResource.java:144)
	at com.dremio.dac.explore.DatasetsResource.newUntitledFromParent(DatasetsResource.java:210)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.glassfish.jersey.server.model.internal.ResourceMethodInvocationHandlerFactory.lambda$static$0(ResourceMethodInvocationHandlerFactory.java:52)
	at org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher$1.run(AbstractJavaResourceMethodDispatcher.java:124)
	at org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.invoke(AbstractJavaResourceMethodDispatcher.java:167)
	at org.glassfish.jersey.server.model.internal.JavaResourceMethodDispatcherProvider$TypeOutInvoker.doDispatch(JavaResourceMethodDispatcherProvider.java:219)
	at org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.dispatch(AbstractJavaResourceMethodDispatcher.java:79)
	at org.glassfish.jersey.server.model.ResourceMethodInvoker.invoke(ResourceMethodInvoker.java:469)
	at org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:391)
	at org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:80)
	at org.glassfish.jersey.server.ServerRuntime$1.run(ServerRuntime.java:253)
	at org.glassfish.jersey.internal.Errors$1.call(Errors.java:248)
	at org.glassfish.jersey.internal.Errors$1.call(Errors.java:244)
	at org.glassfish.jersey.internal.Errors.process(Errors.java:292)
	at org.glassfish.jersey.internal.Errors.process(Errors.java:274)
	at org.glassfish.jersey.internal.Errors.process(Errors.java:244)
	at org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:265)
	at org.glassfish.jersey.server.ServerRuntime.process(ServerRuntime.java:232)
	at org.glassfish.jersey.server.ApplicationHandler.handle(ApplicationHandler.java:680)
	at org.glassfish.jersey.servlet.WebComponent.serviceImpl(WebComponent.java:394)
	at org.glassfish.jersey.servlet.WebComponent.service(WebComponent.java:346)
	at org.glassfish.jersey.servlet.ServletContainer.service(ServletContainer.java:366)
	at org.glassfish.jersey.servlet.ServletContainer.service(ServletContainer.java:319)
	at org.glassfish.jersey.servlet.ServletContainer.service(ServletContainer.java:205)
	at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:755)
	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1617)
	at com.dremio.dac.server.tracing.SpanFinishingFilter.doFilter(SpanFinishingFilter.java:46)
	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
	at com.dremio.dac.server.SecurityHeadersFilter.doFilter(SecurityHeadersFilter.java:52)
	at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1604)
	at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:545)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
	at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1297)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
	at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:485)
	at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
	at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1212)
	at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
	at org.eclipse.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:717)
	at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:54)
	at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
	at org.eclipse.jetty.server.Server.handle(Server.java:500)
	at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383)
	at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:547)
	at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:375)
	at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:270)
	at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)
	at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103)
	at org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:117)
	at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336)
	at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313)
	at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)
	at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129)
	at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:388)
	at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
	at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
	at java.lang.Thread.run(Thread.java:748)

After that I have tried to set false for usePrepareForGetTables but in that I don’t get any columns listed and no error in logs.

It looks like making this work will be a bit more complicated if Ignite doesn’t support metadata at prepare.

We previously fixed the method to list datasets so that the datasets returned no longer include the Ignite catalog that can’t be used in queries.

JdbcSchemaFetcher has a function to get a specific table handle that we’re running into here – getTableHandle. It takes in a Dremio path (including the source name, and schema + table name from Ignite).

Its current implementation will either use getTables() with filters or uses prepare to:

  1. check if the path returned is valid.
  2. construct a JdbcDatasetHandle (basically a container for a path again).

The getTables path fails because the existing code will pass in an attempt string for the catalog field, expecting it to get ignored, however Ignite does not ignore it. It’s likely passing in null instead would work.

The prepare path fails because Ignite doesn’t support ResultSetMetadata from prepared statements, so we fail there.

You can write your own implementation of getTableHandle that conforms to how the Ignite driver works (pass in null to getTables or instead of preparing, execute a limit zero query on the table perhaps).

However this still won’t be enough to get column metadata properly.

You’ll also need to override JdbcSchemaFetcher’s getTableMetadata and listPartitionChunks methods. These return JdbcDatasetMetadata objects, which will eventually call either getColumns or again try to get metadata by using prepare. You’ll have to write your own subclass of this that gets column metadata in an Ignite-friendly way.

Hello @jduong,

Thanks for help.
I got it working. I have removed all the custom changes for dialect and jdbc schema fetcher in IgniteConf file. And instead of overriding from dremio side,I have overrided Ignite JDBC Meta API to not return catelog in meta api’s method call(getSchemas,getTables,getColumns,getIndexInfo,getPrimaryKeys).

If possible,please add support in arp dialect to honor supports_catalogs for all the possible code areas.That will solve this issue.

public Optional JdbcSchemaFetcher.getTableHandle(final List tableSchemaPath)

override this method can fix this bug, ignite only support null or “IGNITE” catalog name