组织ID: |
io.snappydata |
项目ID: |
snappy-spark-hive_2.11 |
版本: |
2.0.1-3 |
最后修改时间: |
2019-11-01 21:07:27 |
包类型: |
jar |
标题: |
SnappyData |
描述: |
SnappyData distributed data store and execution engine |
相关URL: |
http://www.snappydata.io |
大小: |
987.32KB |
|
Maven引入代码: |
<dependency>
<groupId>io.snappydata</groupId>
<artifactId>snappy-spark-hive_2.11</artifactId>
<version>2.0.1-3</version>
</dependency>
|
Gradle引入代码: |
io.snappydata:snappy-spark-hive_2.11:2.0.1-3
|
下载Jar包: |
|
POM文件内容: |
<?xml version="1.0" encoding="UTF-8"?>
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>io.snappydata</groupId>
<artifactId>snappy-spark-hive_2.11</artifactId>
<version>2.0.1-3</version>
<name>SnappyData</name>
<description>SnappyData distributed data store and execution engine</description>
<url>http://www.snappydata.io</url>
<licenses>
<license>
<name>The Apache License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
</license>
</licenses>
<developers>
<developer>
<id>smenon</id>
<name>Sudhir Menon</name>
<email>smenon@snappydata.io</email>
</developer>
</developers>
<scm>
<connection>scm:git:https://github.com/SnappyDataInc/snappydata.git</connection>
<developerConnection>scm:git:https://github.com/SnappyDataInc/snappydata.git</developerConnection>
<url>https://github.com/SnappyDataInc/snappydata</url>
</scm>
<dependencies>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_2.11</artifactId>
<version>2.2.6</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>1.10.19</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>2.9.4</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro-ipc</artifactId>
<version>1.7.7</version>
<scope>compile</scope>
<exclusions>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>org.jboss.netty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-util</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>velocity</artifactId>
<groupId>org.apache.velocity</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro-ipc</artifactId>
<version>1.7.7</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.21</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-avatica</artifactId>
<version>1.2.0-incubating</version>
<scope>compile</scope>
<exclusions>
<exclusion>
<artifactId>jackson-annotations</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-core</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-databind</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5.2</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.snappydata</groupId>
<artifactId>snappy-spark-catalyst_2.11</artifactId>
<version>2.0.1-3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
<version>0.9.3</version>
<scope>compile</scope>
<exclusions>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
<version>3.1</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.spark-project.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>1.2.1.spark2</version>
<scope>compile</scope>
<exclusions>
<exclusion>
<artifactId>libfb303</artifactId>
<groupId>org.apache.thrift</groupId>
</exclusion>
<exclusion>
<artifactId>curator-client</artifactId>
<groupId>org.apache.curator</groupId>
</exclusion>
<exclusion>
<artifactId>curator-framework</artifactId>
<groupId>org.apache.curator</groupId>
</exclusion>
<exclusion>
<artifactId>spark-client</artifactId>
<groupId>org.spark-project.hive</groupId>
</exclusion>
<exclusion>
<artifactId>log4j</artifactId>
<groupId>log4j</groupId>
</exclusion>
<exclusion>
<artifactId>libthrift</artifactId>
<groupId>org.apache.thrift</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>apache-curator</artifactId>
<groupId>org.apache.curator</groupId>
</exclusion>
<exclusion>
<artifactId>groovy-all</artifactId>
<groupId>org.codehaus.groovy</groupId>
</exclusion>
<exclusion>
<artifactId>avro-mapred</artifactId>
<groupId>org.apache.avro</groupId>
</exclusion>
<exclusion>
<artifactId>hive-ant</artifactId>
<groupId>org.spark-project.hive</groupId>
</exclusion>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
<exclusion>
<artifactId>zookeeper</artifactId>
<groupId>org.apache.zookeeper</groupId>
</exclusion>
<exclusion>
<artifactId>ant</artifactId>
<groupId>org.apache.ant</groupId>
</exclusion>
<exclusion>
<artifactId>hive-metastore</artifactId>
<groupId>org.spark-project.hive</groupId>
</exclusion>
<exclusion>
<artifactId>calcite-core</artifactId>
<groupId>org.apache.calcite</groupId>
</exclusion>
<exclusion>
<artifactId>commons-codec</artifactId>
<groupId>commons-codec</groupId>
</exclusion>
<exclusion>
<artifactId>kryo</artifactId>
<groupId>com.esotericsoftware.kryo</groupId>
</exclusion>
<exclusion>
<artifactId>hive-shims</artifactId>
<groupId>org.spark-project.hive</groupId>
</exclusion>
<exclusion>
<artifactId>jline</artifactId>
<groupId>jline</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>commons-httpclient</artifactId>
<groupId>commons-httpclient</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>1.7.7</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>1.9.13</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.10</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>parquet-hadoop-bundle</artifactId>
<version>1.6.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.pegdown</groupId>
<artifactId>pegdown</artifactId>
<version>1.6.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.jodd</groupId>
<artifactId>jodd-core</artifactId>
<version>3.5.2</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libfb303</artifactId>
<version>0.9.3</version>
<scope>compile</scope>
<exclusions>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.21</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>2.11.8</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.snappydata</groupId>
<artifactId>snappy-spark-tags_2.11</artifactId>
<version>2.0.1-3</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.novocode</groupId>
<artifactId>junit-interface</artifactId>
<version>0.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.11.8</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.snappydata</groupId>
<artifactId>snappy-spark-sql_2.11</artifactId>
<version>2.0.1-3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_2.11</artifactId>
<version>1.12.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.datanucleus</groupId>
<artifactId>datanucleus-core</artifactId>
<version>3.2.10</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.snappydata</groupId>
<artifactId>snappy-spark-sql_2.11</artifactId>
<version>2.0.1-3</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
<version>3.0.1</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId>
<version>10.12.1.1</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
<version>1.2.0-incubating</version>
<scope>compile</scope>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>pentaho-aggdesigner-algorithm</artifactId>
<groupId>org.pentaho</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-annotations</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
</exclusion>
<exclusion>
<artifactId>hsqldb</artifactId>
<groupId>org.hsqldb</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-core</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
</exclusion>
<exclusion>
<artifactId>jsr305</artifactId>
<groupId>com.google.code.findbugs</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-databind</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
</exclusion>
<exclusion>
<artifactId>janino</artifactId>
<groupId>org.codehaus.janino</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.snappydata</groupId>
<artifactId>snappy-spark-core_2.11</artifactId>
<version>2.0.1-3</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro-mapred</artifactId>
<version>1.7.7</version>
<classifier>hadoop2</classifier>
<scope>compile</scope>
<exclusions>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>org.jboss.netty</groupId>
</exclusion>
<exclusion>
<artifactId>avro-ipc</artifactId>
<groupId>org.apache.avro</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-util</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>velocity</artifactId>
<groupId>org.apache.velocity</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.spark-project.hive</groupId>
<artifactId>hive-metastore</artifactId>
<version>1.2.1.spark2</version>
<scope>compile</scope>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>libfb303</artifactId>
<groupId>org.apache.thrift</groupId>
</exclusion>
<exclusion>
<artifactId>libthrift</artifactId>
<groupId>org.apache.thrift</groupId>
</exclusion>
<exclusion>
<artifactId>hive-shims</artifactId>
<groupId>org.spark-project.hive</groupId>
</exclusion>
<exclusion>
<artifactId>derby</artifactId>
<groupId>org.apache.derby</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>hive-serde</artifactId>
<groupId>org.spark-project.hive</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.snappydata</groupId>
<artifactId>snappy-spark-core_2.11</artifactId>
<version>2.0.1-3</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>
|
Jar包内容: |
META-INF/MANIFEST.MF
org.apache.hadoop.hive.ql.io.orc.SparkOrcNewRecordReader.class
org.apache.spark.sql.hive.HiveStrategies$HiveTableScans$$anonfun$2.class
org.apache.spark.sql.hive.HiveShim$$anonfun$prepareWritable$1.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$wrapperFor$1.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$14$$anonfun$apply$4.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$3.class
org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand$$anonfun$3.class
org.apache.spark.sql.hive.execution.ScriptTransformation$$anon$1$$anonfun$next$1.class
org.apache.spark.sql.hive.execution.ScriptTransformationWriterThread$$anonfun$run$1.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$addColumnMetadataToConf$2.class
org.apache.spark.sql.hive.execution.ScriptTransformationWriterThread$$anonfun$run$1$$anonfun$apply$mcV$sp$1.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$2.class
org.apache.spark.sql.hive.execution.HiveScriptIOSchema$$anonfun$11.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$5.class
org.apache.spark.sql.hive.execution.HiveScriptIOSchema$$anonfun$recordReader$1.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$sideEffectResult$1.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$doExecute$1$$anonfun$apply$2.class
org.apache.spark.sql.hive.execution.HiveScriptIOSchema$$anonfun$6.class
org.apache.spark.sql.hive.execution.HiveScriptIOSchema$$anonfun$9.class
org.apache.spark.sql.hive.execution.ScriptTransformation$$anonfun$1.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$3.class
org.apache.spark.sql.hive.execution.ScriptTransformation$$anon$1$$anonfun$4.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$sideEffectResult$3.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$2.class
org.apache.spark.sql.hive.execution.HiveScriptIOSchema$$anonfun$10.class
org.apache.spark.sql.hive.execution.HiveScriptIOSchema$$anonfun$initInputSerDe$1$$anonfun$8.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$prunePartitions$1.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$saveAsHiveFile$1.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$1.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$prunePartitions$1$$anonfun$8.class
org.apache.spark.sql.hive.execution.ScriptTransformation$$anon$1.class
org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand$.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$producedAttributes$1.class
org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand$$anonfun$2.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$2$$anonfun$apply$1.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$doExecute$1.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$prunePartitions$1$$anonfun$7.class
org.apache.spark.sql.hive.execution.ScriptTransformation$$anon$1$$anonfun$next$2.class
org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand.class
org.apache.spark.sql.hive.execution.HiveScriptIOSchema$$anonfun$initInputSerDe$1.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$sideEffectResult$2.class
org.apache.spark.sql.hive.execution.HiveScriptIOSchema$$anonfun$7.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$4.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$3.class
org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand$$anonfun$4.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$6.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable.class
org.apache.spark.sql.hive.execution.HiveScriptIOSchema$$anonfun$recordWriter$1.class
org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand$$anonfun$1.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$6.class
org.apache.spark.sql.hive.execution.HiveScriptIOSchema.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$9.class
org.apache.spark.sql.hive.execution.ScriptTransformation$.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$getStagingDir$1.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$sideEffectResult$3$$anonfun$apply$1.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$saveAsHiveFile$3.class
org.apache.spark.sql.hive.execution.ScriptTransformation.class
org.apache.spark.sql.hive.execution.HiveTableScanExec.class
org.apache.spark.sql.hive.execution.ScriptTransformationWriterThread$$anonfun$run$1$$anonfun$apply$mcV$sp$3.class
org.apache.spark.sql.hive.execution.ScriptTransformation$$anonfun$3.class
org.apache.spark.sql.hive.execution.ScriptTransformationWriterThread$$anonfun$run$1$$anonfun$apply$mcV$sp$2.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$1.class
org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.class
org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$addColumnMetadataToConf$1.class
org.apache.spark.sql.hive.execution.ScriptTransformationWriterThread.class
org.apache.spark.sql.hive.execution.ScriptTransformation$$anonfun$2.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$5.class
org.apache.spark.sql.hive.execution.ScriptTransformation$$anon$1$$anonfun$checkFailureAndPropagate$1.class
org.apache.spark.sql.hive.execution.HiveScriptIOSchema$$anonfun$initOutputSerDe$1.class
org.apache.spark.sql.hive.execution.ScriptTransformation$$anonfun$5.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$saveAsHiveFile$2.class
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$4.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$OrcConversions$$anonfun$apply$2$$anonfun$applyOrElse$2.class
org.apache.spark.sql.hive.SparkHiveWriterContainer$$anonfun$writeToFile$1.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$org$apache$spark$sql$hive$HiveExternalCatalog$$requireTableExists$1.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$11.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$1.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$wrapperFor$3.class
org.apache.spark.sql.hive.MetastoreRelation$SchemaAttribute.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$getHiveQlPartitions$1$$anonfun$apply$7.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$org$apache$spark$sql$hive$HiveExternalCatalog$$requireDbMatches$1.class
org.apache.spark.sql.hive.SparkHiveWriterContainer.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$createFunction$1.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$loadPartition$1.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$unwrapperFor$6.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$getQualifiedTableName$1.class
org.apache.spark.sql.hive.HiveSessionState.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$toHiveString$3.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anon$1$$anonfun$org$apache$spark$sql$hive$HiveMetastoreCatalog$$anon$$schemaStringFromParts$1$1$$anonfun$1.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$1.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$dropTable$1.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$ParquetConversions$$anonfun$apply$1.class
org.apache.spark.sql.hive.HiveSessionCatalog$$anonfun$makeFunctionBuilder$1.class
org.apache.spark.sql.hive.InsertIntoHiveTable$$anonfun$16.class
org.apache.spark.sql.hive.HiveSimpleUDF$$anonfun$method$1.class
org.apache.spark.sql.hive.HiveShim$.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$3.class
org.apache.spark.sql.hive.HiveShim$HiveFunctionWrapper.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$toInspector$4.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$verifyPartitionPath$1$1$$anonfun$updateExistPathSetByPathPattern$1$1.class
org.apache.spark.sql.hive.HiveSessionCatalog$$anonfun$lookupRelation$1.class
org.apache.spark.sql.hive.package.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anon$1$$anonfun$org$apache$spark$sql$hive$HiveMetastoreCatalog$$anon$$getColumnNames$1$1$$anonfun$apply$4.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$unwrapperFor$7.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$unwrapperFor$8.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$alterTable$1.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$tableExists$1.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$getHiveQlPartitions$1$$anonfun$apply$5.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$createDatabase$1.class
org.apache.spark.sql.hive.HiveGenericUDF$$anonfun$argumentInspectors$1.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$6.class
org.apache.spark.sql.hive.HiveUDAFFunction.class
org.apache.spark.sql.hive.MetaStorePartitionedTableFileCatalog$.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$wrapperFor$2.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$12.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$listFunctions$1.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$3$$anonfun$apply$2.class
org.apache.spark.sql.hive.HiveGenericUDTF$$anonfun$inputDataTypes$2.class
org.apache.spark.sql.hive.HiveSessionCatalog$$anonfun$2.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$14$$anonfun$apply$6.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$14$$anonfun$apply$8.class
org.apache.spark.sql.hive.HiveSessionCatalog$$anonfun$lookupRelation$2.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$toHiveStructString$4.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$loadTable$1.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$getHiveQlPartitions$1$$anonfun$apply$3.class
org.apache.spark.sql.hive.HiveStrategies$DataSinks$.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$alterPartitions$1.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$unwrap$1$$anonfun$4.class
org.apache.spark.sql.hive.HiveStrategies$HiveTableScans$.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$12.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$getHiveQlPartitions$1$$anonfun$apply$4.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$org$apache$spark$sql$hive$HiveMetastoreCatalog$$getQualifiedTableName$1.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$14$$anonfun$apply$13.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$lookupRelation$2.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$10.class
org.apache.spark.sql.hive.HiveGenericUDTF$$anonfun$elementSchema$1.class
org.apache.spark.sql.hive.HiveSharedState.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$9$$anonfun$10.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$wrapperFor$6$$anonfun$apply$1.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$2.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$7.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$inputFiles$1.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$14.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$wrap$1.class
org.apache.spark.sql.hive.HiveSessionCatalog$$anonfun$1.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$5$$anonfun$fillPartitionKeys$1$1.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$getHiveQlPartitions$1.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$unwrap$1.class
org.apache.spark.sql.hive.HiveSimpleUDF.class
org.apache.spark.sql.hive.SparkHiveWriterContainer$$anonfun$1.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$2.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$toHiveString$1.class
org.apache.spark.sql.hive.HiveSessionCatalog$$anonfun$5.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$listDatabases$1.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$renameFunction$1.class
org.apache.spark.sql.hive.HiveContext.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$getHiveQlPartitions$1$$anonfun$apply$6.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$renameTable$1.class
org.apache.spark.sql.hive.HiveSimpleUDF$$anonfun$1.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anon$1$$anonfun$4.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$getPartition$1.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$ParquetConversions$.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$1.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$unwrapperFor$1.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$12$$anonfun$14.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$OrcConversions$.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$dropDatabase$1.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$toHiveStructString$3.class
org.apache.spark.sql.hive.HiveGenericUDF$.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$functionExists$1.class
org.apache.spark.sql.hive.HiveUDAFFunction$$anonfun$inputTypes$1.class
org.apache.spark.sql.hive.HiveExternalCatalog.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anon$1$$anonfun$load$1.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$lookupRelation$1.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$6.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$createTable$1.class
org.apache.spark.sql.hive.TableReader.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$14.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$wrapperFor$9.class
org.apache.spark.sql.hive.HiveUDAFFunction$$anonfun$inputDataTypes$3.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$1.class
org.apache.spark.sql.hive.package$.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$5$$anonfun$10$$anonfun$apply$1.class
org.apache.spark.sql.hive.MetastoreRelation.class
org.apache.spark.sql.hive.MetastoreRelation$SchemaAttribute$$anonfun$9.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$14$$anonfun$apply$5.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$14$$anonfun$apply$12.class
org.apache.spark.sql.hive.InsertIntoHiveTable$.class
org.apache.spark.sql.hive.HiveGenericUDF.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$hiveClientConfigurations$1.class
org.apache.spark.sql.hive.HiveStrategies$HiveTableScans$$anonfun$1.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$5$$anonfun$9.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$14$$anonfun$apply$7.class
org.apache.spark.sql.hive.SparkHiveWriterContainer$$anonfun$3.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$5$$anonfun$7.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$alterDatabase$1$$anonfun$apply$mcV$sp$1.class
org.apache.spark.sql.hive.SparkHiveDynamicPartitionWriterContainer$$anonfun$5.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$9.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$alterDatabase$1.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$wrap$2.class
org.apache.spark.sql.hive.HiveGenericUDTF$.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$verifyPartitionPath$1$1.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$6.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$5.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$getHiveQlPartitions$1$$anonfun$apply$2.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$13.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterPartitions$1$$anonfun$apply$mcV$sp$6.class
org.apache.spark.sql.hive.client.Shim_v0_13$$anonfun$7.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$createPartitions$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$listDatabases$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$renamePartitions$1$$anonfun$apply$mcV$sp$5.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$13.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHivePartition$2.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$loadPartition$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$dropPartitions$1$$anonfun$14.class
org.apache.spark.sql.hive.client.Shim_v0_13$$anonfun$createPartitions$2.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterDatabase$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$12.class
org.apache.spark.sql.hive.client.HiveClient$class.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$6.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anonfun$3.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anon$1$$anonfun$doLoadClass$2.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$2$$anonfun$apply$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$fromHivePartition$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHivePartition$4.class
org.apache.spark.sql.hive.client.Shim_v0_12.class
org.apache.spark.sql.hive.client.package.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitions$1$$anonfun$apply$14.class
org.apache.spark.sql.hive.client.HiveClient$$anonfun$getPartition$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$dropPartitions$1$$anonfun$14$$anonfun$apply$11.class
org.apache.spark.sql.hive.client.Shim_v0_12$$anonfun$getPartitionsByFilter$1.class
org.apache.spark.sql.hive.client.package$HiveVersion$.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$4.class
org.apache.spark.sql.hive.client.Shim_v1_2.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$reset$1$$anonfun$apply$mcV$sp$7$$anonfun$apply$21.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$4$$anonfun$apply$5.class
org.apache.spark.sql.hive.client.Shim_v0_12$$anonfun$createPartitions$1$$anonfun$1.class
org.apache.spark.sql.hive.client.package$hive$v1_0$.class
org.apache.spark.sql.hive.client.Shim_v1_1.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anonfun$downloadVersion$2.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getFunctionOption$1.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$2$$anonfun$apply$2.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$dropTable$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$4$$anonfun$apply$6.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$renameFunction$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHiveTable$1.class
org.apache.spark.sql.hive.client.Shim_v0_13$$anonfun$5.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$fromHivePartition$2.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$renamePartitions$1$$anonfun$apply$mcV$sp$4.class
org.apache.spark.sql.hive.client.Shim_v0_13.class
org.apache.spark.sql.hive.client.package$hive$v13$.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHiveTable$3.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$loadTable$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$listTables$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$liftedTree1$1$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHiveTable$5.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1$$anonfun$apply$19.class
org.apache.spark.sql.hive.client.package$.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitions$1$$anonfun$apply$15.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$fromHivePartition$3.class
org.apache.spark.sql.hive.client.Shim_v0_13$$anonfun$getDriverResults$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHiveTable$2.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$19.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anonfun$liftedTree1$1$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHiveTable$6.class
org.apache.spark.sql.hive.client.HiveClientImpl.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHiveTable$8.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHiveTable$10.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterDatabase$1$$anonfun$apply$mcV$sp$2.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getDatabaseOption$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$11.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitions$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHiveTable$4.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anon$1.class
org.apache.spark.sql.hive.client.HiveClient.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$3$$anonfun$apply$4.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$dropDatabase$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$3$$anonfun$apply$3.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1$$anonfun$apply$18.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHiveTable$7.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterFunction$1.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anon$1$$anonfun$doLoadClass$1.class
org.apache.spark.sql.hive.client.Shim_v0_13$$anonfun$3.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getDatabaseOption$1$$anonfun$apply$7.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$19$$anonfun$apply$23.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$9.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$reset$1$$anonfun$apply$mcV$sp$8.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterPartitions$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$reset$1$$anonfun$apply$mcV$sp$7.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$dropFunction$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getDatabaseOption$1$$anonfun$apply$7$$anonfun$apply$8.class
org.apache.spark.sql.hive.client.HiveClient$$anonfun$getFunction$1.class
org.apache.spark.sql.hive.client.Shim_v0_13$$anonfun$6.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHiveTable$11.class
org.apache.spark.sql.hive.client.package$hive$v1_2$.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$18.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$reset$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$loadDynamicPartitions$1.class
org.apache.spark.sql.hive.client.package$hive$v14$.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$createTable$1.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anon$1$$anonfun$doLoadClass$3.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$setError$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$3.class
org.apache.spark.sql.hive.client.Shim_v0_13$$anonfun$2.class
org.apache.spark.sql.hive.client.Shim_v0_12$$anonfun$getDataLocation$1.class
org.apache.spark.sql.hive.client.Shim_v0_13$$anonfun$4.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$dropPartitions$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$retryLocked$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$setCurrentDatabase$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$reset$1$$anonfun$apply$mcV$sp$7$$anonfun$apply$20.class
org.apache.spark.sql.hive.client.Shim_v1_0.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$reset$1$$anonfun$apply$mcV$sp$9.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$10.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anonfun$isSharedClass$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1$$anonfun$apply$17.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$reset$1$$anonfun$apply$mcV$sp$9$$anonfun$apply$22.class
org.apache.spark.sql.hive.client.package$hive$v1_1$.class
org.apache.spark.sql.hive.client.package$hive$.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anonfun$isBarrierClass$1.class
org.apache.spark.sql.hive.client.Shim$$anonfun$findStaticMethod$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitionOption$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.class
org.apache.spark.sql.hive.client.Shim_v0_12$$anonfun$createPartitions$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$createFunction$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$dropPartitions$1$$anonfun$apply$mcV$sp$3.class
org.apache.spark.sql.hive.client.Shim_v0_13$$anonfun$getFunctionOption$1.class
org.apache.spark.sql.hive.client.HiveClient$$anonfun$getDatabase$1.class
org.apache.spark.sql.hive.client.Shim_v0_13$$anonfun$convertFilters$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$createDatabase$1$$anonfun$apply$mcV$sp$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$listFunctions$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$createDatabase$1.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anonfun$4.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$setInfo$1.class
org.apache.spark.sql.hive.client.package$HiveVersion.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anonfun$createClient$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$5.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitionsByFilter$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHivePartition$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$listTables$2.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anonfun$downloadVersion$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$8.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$dropPartitions$1$$anonfun$apply$mcV$sp$3$$anonfun$apply$12.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$setOut$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$renamePartitions$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anonfun$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10$$anonfun$7.class
org.apache.spark.sql.hive.client.package$hive$v12$.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.class
org.apache.spark.sql.hive.client.IsolatedClientLoader.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$renamePartitions$1$$anonfun$apply$mcV$sp$5$$anonfun$16.class
org.apache.spark.sql.hive.client.Shim.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHiveTable$9.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$2.class
org.apache.spark.sql.hive.client.IsolatedClientLoader$$anonfun$2.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$10.class
org.apache.spark.sql.hive.client.HiveClient$$anonfun$getTable$1.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$9.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$renamePartitions$1$$anonfun$apply$mcV$sp$5$$anonfun$15.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitionsByFilter$1$$anonfun$apply$16.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$17.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitionOption$1$$anonfun$apply$13.class
org.apache.spark.sql.hive.client.Shim_v0_14.class
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$org$apache$spark$sql$hive$client$HiveClientImpl$$toHivePartition$3.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$org$apache$spark$sql$hive$HadoopTableReader$$createHadoopRdd$1.class
org.apache.spark.sql.hive.HiveGenericUDF$$anonfun$deferredObjects$1.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$liftedTree1$1$1.class
org.apache.spark.sql.hive.HiveShim$ShimFileSinkDesc.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$toHiveStructString$2.class
org.apache.spark.sql.hive.HiveMetastoreCatalog.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anon$1.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$getTableOption$1.class
org.apache.spark.sql.hive.SparkHiveDynamicPartitionWriterContainer.class
org.apache.spark.sql.hive.HiveSimpleUDF$$anonfun$foldable$1.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$verifyPartitionPath$1$1$$anonfun$getPathPatternByPath$1$1.class
org.apache.spark.sql.hive.HiveSimpleUDF$$anonfun$arguments$1.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$listPartitions$1.class
org.apache.spark.sql.hive.HiveInspectors$typeInfoConversions$$anonfun$toTypeInfo$1.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$3$$anonfun$apply$1.class
org.apache.spark.sql.hive.HiveSessionCatalog$$anonfun$4.class
org.apache.spark.sql.hive.MetaStorePartitionedTableFileCatalog$$anonfun$getPaths$1.class
org.apache.spark.sql.hive.HiveUDAFFunction$.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$8.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$5$$anonfun$10.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$unwrapperFor$5.class
org.apache.spark.sql.hive.MetastoreRelation$.class
org.apache.spark.sql.hive.HadoopTableReader.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$unwrapperFor$4.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$ParquetConversions$$anonfun$apply$1$$anonfun$applyOrElse$1.class
org.apache.spark.sql.hive.HiveStrategies$class.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$renamePartitions$1.class
org.apache.spark.sql.hive.HiveShim$$anonfun$appendReadColumnNames$1.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$wrapperFor$4.class
org.apache.spark.sql.hive.HiveSimpleUDF$.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$makeRDDForTable$1.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$5.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$getFunction$1.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$6.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$reset$2.class
org.apache.spark.sql.hive.test.TestHiveContext.class
org.apache.spark.sql.hive.test.TestHiveSessionState$$anon$1.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$TestTable$.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$TestTable.class
org.apache.spark.sql.hive.test.TestHiveFunctionRegistry$$anonfun$unregisterFunction$1.class
org.apache.spark.sql.hive.test.TestHiveSparkSession.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$2.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$reset$1.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$1$$anonfun$apply$mcV$sp$1$$anonfun$apply$1.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$2$$anonfun$apply$mcV$sp$2.class
org.apache.spark.sql.hive.test.TestHive.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$7.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$5.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$loadTestTable$2.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$envVarToFile$1.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$loadTestTable$1.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$reset$4.class
org.apache.spark.sql.hive.test.TestHiveQueryExecution$$anonfun$8.class
org.apache.spark.sql.hive.test.TestHiveSessionState.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$3.class
org.apache.spark.sql.hive.test.TestHiveQueryExecution$$anonfun$analyzed$2.class
org.apache.spark.sql.hive.test.TestHiveFunctionRegistry.class
org.apache.spark.sql.hive.test.TestHiveQueryExecution.class
org.apache.spark.sql.hive.test.TestHiveQueryExecution$$anonfun$4.class
org.apache.spark.sql.hive.test.TestHiveSessionState$$anon$1$$anonfun$clear$1.class
org.apache.spark.sql.hive.test.TestHiveContext$.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$reset$3.class
org.apache.spark.sql.hive.test.TestHiveFunctionRegistry$$anonfun$restore$1.class
org.apache.spark.sql.hive.test.TestHive$.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$SqlCmd$$anonfun$cmd$1.class
org.apache.spark.sql.hive.test.TestHiveSharedState.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$1.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$SqlCmd.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$1$$anonfun$apply$mcV$sp$1.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$sharedState$1.class
org.apache.spark.sql.hive.test.TestHiveSparkSession$$anonfun$2$$anonfun$apply$mcV$sp$2$$anonfun$apply$2.class
org.apache.spark.sql.hive.test.TestHiveQueryExecution$$anonfun$analyzed$1.class
org.apache.spark.sql.hive.test.TestHiveSessionState$$anonfun$functionRegistry$1.class
org.apache.spark.sql.hive.HiveSessionState$$anon$1.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$toHiveStructString$1.class
org.apache.spark.sql.hive.SparkHiveWriterContainer$$anonfun$abortTask$1.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$CreateTables$.class
org.apache.spark.sql.hive.MetastoreRelation$$anonfun$4.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$wrapperFor$5.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$QualifiedTableName.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$QualifiedTableName$.class
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$dropPartitions$1.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$wrapperFor$8$$anonfun$apply$3.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$15.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$2.class
org.apache.spark.sql.hive.HiveInspectors$class.class
org.apache.spark.sql.hive.HiveInspectors$$anonfun$7.class
org.apache.spark.sql.hive.InsertIntoHiveTable.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anon$1$$anonfun$2.class
org.apache.spark.sql.hive.HiveStrategies.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anon$1$$anonfun$org$apache$spark$sql$hive$HiveMetastoreCatalog$$anon$$getColumnNames$1$1.class
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$14$$anonfun$apply$2.class
org.apache.spark.sql.hive.HiveInspectors$typeInfoConversions.class
org.apache.spark.sql.hive.HiveGenericUDF$$anonfun$eval$1.class
org.apache.spark.sql.hive.InsertIntoHiveTable$$anonfun$resolved$1.class
org.apache.spark.sql.hive.HiveUtils$$anonfun$newClientForExecution$1.class
org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$getCached$1.class
org.apache.spark.sql.hive.HiveTableUtil$.class
org.apache.spark.sql.hive.orc.OrcFileFormat$$anonfun$inferSchema$1.class
org.apache.spark.sql.hive.orc.OrcOutputWriter$$anonfun$4.class
org.apache.spark.sql.hive.orc.OrcTableScan.class
org.apache.spark.sql.hive.orc.OrcFileOperator$$anonfun$getFileReader$2.class
org.apache.spark.sql.hive.orc.OrcFilters$$anonfun$org$apache$spark$sql$hive$orc$OrcFilters$$buildSearchArgument$2$$anonfun$apply$6.class
org.apache.spark.sql.hive.orc.OrcFileOperator$$anonfun$readSchema$1.class
org.apache.spark.sql.hive.orc.OrcFilters$$anonfun$2.class
org.apache.spark.sql.hive.orc.OrcFileOperator$$anonfun$1.class
org.apache.spark.sql.hive.orc.OrcFileOperator$$anonfun$org$apache$spark$sql$hive$orc$OrcFileOperator$$isWithNonEmptySchema$1$1.class
org.apache.spark.sql.hive.orc.OrcFilters$$anonfun$org$apache$spark$sql$hive$orc$OrcFilters$$buildSearchArgument$2$$anonfun$apply$6$$anonfun$apply$7$$anonfun$apply$8.class
org.apache.spark.sql.hive.orc.OrcFilters$$anonfun$org$apache$spark$sql$hive$orc$OrcFilters$$buildSearchArgument$1$$anonfun$apply$3.class
org.apache.spark.sql.hive.orc.OrcRelation$$anonfun$unwrapOrcStructs$1.class
org.apache.spark.sql.hive.orc.OrcTableScan$$anonfun$execute$3.class
org.apache.spark.sql.hive.orc.OrcFilters$$anonfun$org$apache$spark$sql$hive$orc$OrcFilters$$buildSearchArgument$4.class
org.apache.spark.sql.hive.orc.OrcSerializer.class
org.apache.spark.sql.hive.orc.OrcFileFormat$$anonfun$buildReader$1.class
org.apache.spark.sql.hive.orc.OrcOutputWriter.class
org.apache.spark.sql.hive.orc.OrcOutputWriter$$anonfun$3.class
org.apache.spark.sql.hive.orc.OrcRelation$$anonfun$unwrapOrcStructs$2.class
org.apache.spark.sql.hive.orc.OrcOptions$.class
org.apache.spark.sql.hive.orc.OrcRelation$$anonfun$8.class
org.apache.spark.sql.hive.orc.OrcFileOperator$$anonfun$3.class
org.apache.spark.sql.hive.orc.OrcFilters$$anonfun$org$apache$spark$sql$hive$orc$OrcFilters$$buildSearchArgument$2$$anonfun$apply$6$$anonfun$apply$7.class
#内容未全部加载,请点击展开加载全部代码(NowJava.com)
|
依赖Jar: |
scalatest_2.11-2.2.6.jar
/org.scalatest/scalatest_2.11/2.2.6
查看scalatest_2.11所有版本文件
mockito-core-1.10.19.jar
/org.mockito/mockito-core/1.10.19
查看mockito-core所有版本文件
joda-time-2.9.4.jar
/joda-time/joda-time/2.9.4
查看joda-time所有版本文件
avro-ipc-1.7.7.jar
/org.apache.avro/avro-ipc/1.7.7
查看avro-ipc所有版本文件
avro-ipc-1.7.7.jar
/org.apache.avro/avro-ipc/1.7.7
查看avro-ipc所有版本文件
slf4j-log4j12-1.7.21.jar
/org.slf4j/slf4j-log4j12/1.7.21
查看slf4j-log4j12所有版本文件
calcite-avatica-1.2.0-incubating.jar
/org.apache.calcite/calcite-avatica/1.2.0-incubating
查看calcite-avatica所有版本文件
httpclient-4.5.2.jar
/org.apache.httpcomponents/httpclient/4.5.2
查看httpclient所有版本文件
snappy-spark-catalyst_2.11-2.0.1-3.jar
/io.snappydata/snappy-spark-catalyst_2.11/2.0.1-3
查看snappy-spark-catalyst_2.11所有版本文件
libthrift-0.9.3.jar
/org.apache.thrift/libthrift/0.9.3
查看libthrift所有版本文件
commons-httpclient-3.1.jar
/commons-httpclient/commons-httpclient/3.1
查看commons-httpclient所有版本文件
hive-exec-1.2.1.spark2.jar
/org.spark-project.hive/hive-exec/1.2.1.spark2
查看hive-exec所有版本文件
avro-1.7.7.jar
/org.apache.avro/avro/1.7.7
查看avro所有版本文件
jackson-mapper-asl-1.9.13.jar
/org.codehaus.jackson/jackson-mapper-asl/1.9.13
查看jackson-mapper-asl所有版本文件
commons-codec-1.10.jar
/commons-codec/commons-codec/1.10
查看commons-codec所有版本文件
junit-4.12.jar
/junit/junit/4.12
查看junit所有版本文件
parquet-hadoop-bundle-1.6.0.jar
/com.twitter/parquet-hadoop-bundle/1.6.0
查看parquet-hadoop-bundle所有版本文件
pegdown-1.6.0.jar
/org.pegdown/pegdown/1.6.0
查看pegdown所有版本文件
jodd-core-3.5.2.jar
/org.jodd/jodd-core/3.5.2
查看jodd-core所有版本文件
libfb303-0.9.3.jar
/org.apache.thrift/libfb303/0.9.3
查看libfb303所有版本文件
slf4j-api-1.7.21.jar
/org.slf4j/slf4j-api/1.7.21
查看slf4j-api所有版本文件
scala-reflect-2.11.8.jar
/org.scala-lang/scala-reflect/2.11.8
查看scala-reflect所有版本文件
snappy-spark-tags_2.11-2.0.1-3.jar
/io.snappydata/snappy-spark-tags_2.11/2.0.1-3
查看snappy-spark-tags_2.11所有版本文件
log4j-1.2.17.jar
/log4j/log4j/1.2.17
查看log4j所有版本文件
junit-interface-0.11.jar
/com.novocode/junit-interface/0.11
查看junit-interface所有版本文件
scala-library-2.11.8.jar
/org.scala-lang/scala-library/2.11.8
查看scala-library所有版本文件
snappy-spark-sql_2.11-2.0.1-3.jar
/io.snappydata/snappy-spark-sql_2.11/2.0.1-3
查看snappy-spark-sql_2.11所有版本文件
scalacheck_2.11-1.12.5.jar
/org.scalacheck/scalacheck_2.11/1.12.5
查看scalacheck_2.11所有版本文件
datanucleus-core-3.2.10.jar
/org.datanucleus/datanucleus-core/3.2.10
查看datanucleus-core所有版本文件
snappy-spark-sql_2.11-2.0.1-3.jar
/io.snappydata/snappy-spark-sql_2.11/2.0.1-3
查看snappy-spark-sql_2.11所有版本文件
jsr305-3.0.1.jar
/com.google.code.findbugs/jsr305/3.0.1
查看jsr305所有版本文件
derby-10.12.1.1.jar
/org.apache.derby/derby/10.12.1.1
查看derby所有版本文件
calcite-core-1.2.0-incubating.jar
/org.apache.calcite/calcite-core/1.2.0-incubating
查看calcite-core所有版本文件
snappy-spark-core_2.11-2.0.1-3.jar
/io.snappydata/snappy-spark-core_2.11/2.0.1-3
查看snappy-spark-core_2.11所有版本文件
avro-mapred-1.7.7.jar
/org.apache.avro/avro-mapred/1.7.7
查看avro-mapred所有版本文件
hive-metastore-1.2.1.spark2.jar
/org.spark-project.hive/hive-metastore/1.2.1.spark2
查看hive-metastore所有版本文件
snappy-spark-core_2.11-2.0.1-3.jar
/io.snappydata/snappy-spark-core_2.11/2.0.1-3
查看snappy-spark-core_2.11所有版本文件
|