| 组织ID: |
org.spark-project.hive |
| 项目ID: |
hive-contrib |
| 版本: |
0.12.0-protobuf |
| 最后修改时间: |
2019-10-25 04:59:51 |
| 包类型: |
jar |
| 标题: |
Apache Hive 0.12 |
| 描述: |
Hive is a data warehouse infrastructure built on top of Hadoop see
http://wiki.apache.org/hadoop/Hive |
| 相关URL: |
http://hive.apache.org |
| 大小: |
110.16KB |
|
|
| Maven引入代码: |
<dependency>
<groupId>org.spark-project.hive</groupId>
<artifactId>hive-contrib</artifactId>
<version>0.12.0-protobuf</version>
</dependency>
|
| Gradle引入代码: |
org.spark-project.hive:hive-contrib:0.12.0-protobuf
|
| 下载Jar包: |
|
| POM文件内容: |
<?xml version="1.0" encoding="UTF-8"?>
<!--
Apache Maven 2 POM generated by Apache Ivy
http://ant.apache.org/ivy/
Apache Ivy version: 2.3.0 20130110142753
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.spark-project.hive</groupId>
<artifactId>hive-contrib</artifactId>
<packaging>jar</packaging>
<version>0.12.0-protobuf</version>
<url>http://hive.apache.org</url>
<name>Apache Hive 0.12</name>
<description>Hive is a data warehouse infrastructure built on top of Hadoop see
http://wiki.apache.org/hadoop/Hive </description>
<developers>
<developer>
<id>pwendell</id>
<name>Patrick Wendell</name>
<url>pwendell.com</url>
</developer>
</developers>
<licenses>
<license>
<name>The Apache Software License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<url>http://svn.apache.org/repos/asf/hive/</url>
</scm>
<dependencies>
<dependency>
<groupId>org.spark-project.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>0.12.0-protobuf</version>
<scope>compile</scope>
</dependency>
</dependencies>
</project>
|
| Jar包内容: |
META-INF/MANIFEST.MF
org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat$Base64LineRecordReader.class
org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat.class
org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat$Base64RecordWriter.class
org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat.class
org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput.class
org.apache.hadoop.hive.contrib.metastore.hooks.TestURLHook.class
org.apache.hadoop.hive.contrib.mr.GenericMR$1.class
org.apache.hadoop.hive.contrib.mr.GenericMR$2.class
org.apache.hadoop.hive.contrib.mr.GenericMR$KeyRecordIterator.class
org.apache.hadoop.hive.contrib.mr.GenericMR$OutputStreamOutput.class
org.apache.hadoop.hive.contrib.mr.GenericMR$RecordProcessor.class
org.apache.hadoop.hive.contrib.mr.GenericMR$RecordReader.class
org.apache.hadoop.hive.contrib.mr.GenericMR.class
org.apache.hadoop.hive.contrib.mr.Mapper.class
org.apache.hadoop.hive.contrib.mr.Output.class
org.apache.hadoop.hive.contrib.mr.Reducer.class
org.apache.hadoop.hive.contrib.mr.example.IdentityMapper$1.class
org.apache.hadoop.hive.contrib.mr.example.IdentityMapper.class
org.apache.hadoop.hive.contrib.mr.example.WordCountReduce$1.class
org.apache.hadoop.hive.contrib.mr.example.WordCountReduce.class
org.apache.hadoop.hive.contrib.serde2.RegexSerDe.class
org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe$1.class
org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe.class
org.apache.hadoop.hive.contrib.serde2.s3.S3LogDeserializer.class
org.apache.hadoop.hive.contrib.serde2.s3.S3LogStruct.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleAvg$UDAFAvgState.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleAvg$UDAFExampleAvgEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleAvg.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleGroupConcat$UDAFExampleGroupConcatEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleGroupConcat.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax$MaxDoubleEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax$MaxFloatEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax$MaxIntEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax$MaxLongEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax$MaxShortEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax$MaxStringEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMaxMinNUtil$1.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMaxMinNUtil$2.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMaxMinNUtil$Evaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMaxMinNUtil$State.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMaxMinNUtil.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMaxN$UDAFMaxNEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMaxN.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMin$MinDoubleEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMin$MinFloatEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMin$MinIntEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMin$MinLongEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMin$MinShortEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMin$MinStringEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMin.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMinN$UDAFMinNEvaluator.class
org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMinN.class
org.apache.hadoop.hive.contrib.udf.UDFRowSequence.class
org.apache.hadoop.hive.contrib.udf.example.UDFExampleAdd.class
org.apache.hadoop.hive.contrib.udf.example.UDFExampleArraySum.class
org.apache.hadoop.hive.contrib.udf.example.UDFExampleFormat.class
org.apache.hadoop.hive.contrib.udf.example.UDFExampleMapConcat.class
org.apache.hadoop.hive.contrib.udf.example.UDFExampleStructPrint.class
org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFCount2.class
org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2.class
org.apache.hadoop.hive.contrib.util.typedbytes.Type.class
org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesInput$1.class
org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesInput.class
org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesOutput$1.class
org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesOutput.class
#内容未全部加载,请点击展开加载全部代码(NowJava.com)
|
| 依赖Jar: |
hive-exec-0.12.0-protobuf.jar
/org.spark-project.hive/hive-exec/0.12.0-protobuf
查看hive-exec所有版本文件
|