From 1ffa236252e6423b1cc30f45d8f2836cecedbaba Mon Sep 17 00:00:00 2001 From: Robert Kruszewski Date: Fri, 12 May 2017 14:50:22 -0400 Subject: [PATCH 1/3] Bump artifact versions to latest --- .../spark/network/crypto/TransportCipher.java | 50 +++++++++++++---- .../network/protocol/MessageWithHeader.java | 37 ++++++++++--- .../spark/network/sasl/SaslEncryption.java | 39 +++++++++++--- .../spark/network/server/TransportServer.java | 9 ++-- .../apache/spark/network/ProtocolSuite.java | 16 +++--- .../protocol/MessageWithHeaderSuite.java | 42 ++++++++++++--- .../network/yarn/YarnShuffleService.java | 1 - .../yarn/YarnShuffleServiceMetrics.java | 3 +- .../org/apache/spark/storage/DiskStore.scala | 22 ++++++-- dev/deps/spark-deps-hadoop-palantir | 54 +++++++++---------- pom.xml | 41 ++++++++++---- project/SparkBuild.scala | 1 + .../spark/sql/catalyst/parser/SqlBase.g4 | 6 ++- .../sql/catalyst/parser/AstBuilder.scala | 5 ++ .../sql/catalyst/parser/ParseDriver.scala | 2 +- .../SpecificParquetRecordReaderBase.java | 3 +- 16 files changed, 240 insertions(+), 91 deletions(-) diff --git a/common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java b/common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java index 7376d1ddc4818..82f8220a7ecaa 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java +++ b/common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java @@ -17,23 +17,26 @@ package org.apache.spark.network.crypto; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.Channel; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.channel.ChannelOutboundHandlerAdapter; +import io.netty.channel.ChannelPromise; +import io.netty.channel.FileRegion; +import io.netty.util.AbstractReferenceCounted; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.util.Properties; -import javax.crypto.spec.SecretKeySpec; import javax.crypto.spec.IvParameterSpec; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import io.netty.buffer.ByteBuf; -import io.netty.buffer.Unpooled; -import io.netty.channel.*; -import io.netty.util.AbstractReferenceCounted; +import javax.crypto.spec.SecretKeySpec; import org.apache.commons.crypto.stream.CryptoInputStream; import org.apache.commons.crypto.stream.CryptoOutputStream; - import org.apache.spark.network.util.ByteArrayReadableChannel; import org.apache.spark.network.util.ByteArrayWritableChannel; @@ -203,6 +206,11 @@ public long transfered() { return transferred; } + @Override + public long transferred() { + return transferred; + } + @Override public long transferTo(WritableByteChannel target, long position) throws IOException { Preconditions.checkArgument(position == transfered(), "Invalid position."); @@ -232,7 +240,7 @@ private void encryptMore() throws IOException { int copied = byteRawChannel.write(buf.nioBuffer()); buf.skipBytes(copied); } else { - region.transferTo(byteRawChannel, region.transfered()); + region.transferTo(byteRawChannel, region.transferred()); } cos.write(byteRawChannel.getData(), 0, byteRawChannel.length()); cos.flush(); @@ -241,6 +249,28 @@ private void encryptMore() throws IOException { 0, byteEncChannel.length()); } + @Override + public FileRegion retain() { + super.retain(); + return this; + } + + @Override + public FileRegion retain(int increment) { + super.retain(increment); + return this; + } + + @Override + public FileRegion touch() { + return this; + } + + @Override + public FileRegion touch(Object o) { + return this; + } + @Override protected void deallocate() { byteRawChannel.reset(); diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java index 4f8781b42a0e4..7c509e5e9dbed 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java +++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java @@ -17,17 +17,15 @@ package org.apache.spark.network.protocol; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; -import javax.annotation.Nullable; - import com.google.common.base.Preconditions; import io.netty.buffer.ByteBuf; import io.netty.channel.FileRegion; import io.netty.util.AbstractReferenceCounted; import io.netty.util.ReferenceCountUtil; - +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.WritableByteChannel; +import javax.annotation.Nullable; import org.apache.spark.network.buffer.ManagedBuffer; /** @@ -95,6 +93,11 @@ public long transfered() { return totalBytesTransferred; } + @Override + public long transferred() { + return totalBytesTransferred; + } + /** * This code is more complicated than you would think because we might require multiple * transferTo invocations in order to transfer a single MessageWithHeader to avoid busy waiting. @@ -127,6 +130,28 @@ public long transferTo(final WritableByteChannel target, final long position) th return writtenHeader + writtenBody; } + @Override + public FileRegion touch(Object msg) { + return this; + } + + @Override + public FileRegion retain() { + super.retain(); + return this; + } + + @Override + public FileRegion retain(int increment) { + super.retain(increment); + return this; + } + + @Override + public FileRegion touch() { + return this; + } + @Override protected void deallocate() { header.release(); diff --git a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java index 3d71ebaa7ea0c..681cc13025b26 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java +++ b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java @@ -17,11 +17,6 @@ package org.apache.spark.network.sasl; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; -import java.util.List; - import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import io.netty.buffer.ByteBuf; @@ -33,7 +28,10 @@ import io.netty.channel.FileRegion; import io.netty.handler.codec.MessageToMessageDecoder; import io.netty.util.AbstractReferenceCounted; - +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.WritableByteChannel; +import java.util.List; import org.apache.spark.network.util.ByteArrayWritableChannel; import org.apache.spark.network.util.NettyUtils; @@ -187,6 +185,11 @@ public long transfered() { return transferred; } + @Override + public long transferred() { + return transferred; + } + /** * Transfers data from the original message to the channel, encrypting it in the process. * @@ -262,7 +265,7 @@ private void nextChunk() throws IOException { int copied = byteChannel.write(buf.nioBuffer()); buf.skipBytes(copied); } else { - region.transferTo(byteChannel, region.transfered()); + region.transferTo(byteChannel, region.transferred()); } byte[] encrypted = backend.wrap(byteChannel.getData(), 0, byteChannel.length()); @@ -272,6 +275,28 @@ private void nextChunk() throws IOException { this.unencryptedChunkSize = byteChannel.length(); } + @Override + public FileRegion touch(Object o) { + return this; + } + + @Override + public FileRegion retain() { + super.retain(); + return this; + } + + @Override + public FileRegion retain(int increment) { + super.retain(increment); + return this; + } + + @Override + public FileRegion touch() { + return this; + } + @Override protected void deallocate() { if (currentHeader != null) { diff --git a/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java b/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java index 047c5f3f1f094..744e3bdb1b962 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java +++ b/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java @@ -140,11 +140,12 @@ public void close() { channelFuture.channel().close().awaitUninterruptibly(10, TimeUnit.SECONDS); channelFuture = null; } - if (bootstrap != null && bootstrap.group() != null) { - bootstrap.group().shutdownGracefully(); + if (bootstrap != null && bootstrap.config() != null && bootstrap.config().group() != null) { + bootstrap.config().group().shutdownGracefully(); } - if (bootstrap != null && bootstrap.childGroup() != null) { - bootstrap.childGroup().shutdownGracefully(); + if (bootstrap != null && bootstrap.config() != null + && bootstrap.config().childGroup() != null) { + bootstrap.config().childGroup().shutdownGracefully(); } bootstrap = null; } diff --git a/common/network-common/src/test/java/org/apache/spark/network/ProtocolSuite.java b/common/network-common/src/test/java/org/apache/spark/network/ProtocolSuite.java index bb1c40c4b0e06..3fa785502aa20 100644 --- a/common/network-common/src/test/java/org/apache/spark/network/ProtocolSuite.java +++ b/common/network-common/src/test/java/org/apache/spark/network/ProtocolSuite.java @@ -17,7 +17,7 @@ package org.apache.spark.network; -import java.util.List; +import static org.junit.Assert.assertEquals; import com.google.common.primitives.Ints; import io.netty.buffer.Unpooled; @@ -25,10 +25,7 @@ import io.netty.channel.FileRegion; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.handler.codec.MessageToMessageEncoder; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; - +import java.util.List; import org.apache.spark.network.protocol.ChunkFetchFailure; import org.apache.spark.network.protocol.ChunkFetchRequest; import org.apache.spark.network.protocol.ChunkFetchSuccess; @@ -45,6 +42,7 @@ import org.apache.spark.network.protocol.StreamResponse; import org.apache.spark.network.util.ByteArrayWritableChannel; import org.apache.spark.network.util.NettyUtils; +import org.junit.Test; public class ProtocolSuite { private void testServerToClient(Message msg) { @@ -56,7 +54,7 @@ private void testServerToClient(Message msg) { NettyUtils.createFrameDecoder(), MessageDecoder.INSTANCE); while (!serverChannel.outboundMessages().isEmpty()) { - clientChannel.writeInbound(serverChannel.readOutbound()); + clientChannel.writeOneInbound(serverChannel.readOutbound()); } assertEquals(1, clientChannel.inboundMessages().size()); @@ -72,7 +70,7 @@ private void testClientToServer(Message msg) { NettyUtils.createFrameDecoder(), MessageDecoder.INSTANCE); while (!clientChannel.outboundMessages().isEmpty()) { - serverChannel.writeInbound(clientChannel.readOutbound()); + serverChannel.writeOneInbound(clientChannel.readOutbound()); } assertEquals(1, serverChannel.inboundMessages().size()); @@ -116,8 +114,8 @@ public void encode(ChannelHandlerContext ctx, FileRegion in, List out) throws Exception { ByteArrayWritableChannel channel = new ByteArrayWritableChannel(Ints.checkedCast(in.count())); - while (in.transfered() < in.count()) { - in.transferTo(channel, in.transfered()); + while (in.transferred() < in.count()) { + in.transferTo(channel, in.transferred()); } out.add(Unpooled.wrappedBuffer(channel.getData())); } diff --git a/common/network-common/src/test/java/org/apache/spark/network/protocol/MessageWithHeaderSuite.java b/common/network-common/src/test/java/org/apache/spark/network/protocol/MessageWithHeaderSuite.java index b341c5681e00c..dbff6e71efb9b 100644 --- a/common/network-common/src/test/java/org/apache/spark/network/protocol/MessageWithHeaderSuite.java +++ b/common/network-common/src/test/java/org/apache/spark/network/protocol/MessageWithHeaderSuite.java @@ -17,23 +17,22 @@ package org.apache.spark.network.protocol; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.FileRegion; import io.netty.util.AbstractReferenceCounted; -import org.junit.Test; -import org.mockito.Mockito; - -import static org.junit.Assert.*; - +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.WritableByteChannel; import org.apache.spark.network.TestManagedBuffer; import org.apache.spark.network.buffer.ManagedBuffer; import org.apache.spark.network.buffer.NettyManagedBuffer; import org.apache.spark.network.util.ByteArrayWritableChannel; +import org.junit.Test; +import org.mockito.Mockito; public class MessageWithHeaderSuite { @@ -134,6 +133,11 @@ public long transfered() { return 8 * written; } + @Override + public long transferred() { + return 8 * written; + } + @Override public long transferTo(WritableByteChannel target, long position) throws IOException { for (int i = 0; i < writesPerCall; i++) { @@ -148,6 +152,28 @@ public long transferTo(WritableByteChannel target, long position) throws IOExcep return 8 * writesPerCall; } + @Override + public FileRegion retain() { + super.retain(); + return this; + } + + @Override + public FileRegion retain(int increment) { + super.retain(increment); + return this; + } + + @Override + public FileRegion touch(Object o) { + return this; + } + + @Override + public FileRegion touch() { + return this; + } + @Override protected void deallocate() { } diff --git a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java index cfbf6a8a9a61b..39cc7336051bc 100644 --- a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java +++ b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java @@ -22,7 +22,6 @@ import java.lang.reflect.Method; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; import java.util.List; import java.util.Map; diff --git a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java index 86cb07ae711ac..d2e18531bf480 100644 --- a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java +++ b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java @@ -53,7 +53,8 @@ public void getMetrics(MetricsCollector collector, boolean all) { } @VisibleForTesting - public static void collectMetric(MetricsRecordBuilder metricsRecordBuilder, String name, Metric metric) { + public static void collectMetric( + MetricsRecordBuilder metricsRecordBuilder, String name, Metric metric) { // The metric types used in ExternalShuffleBlockHandler.ShuffleMetrics if (metric instanceof Timer) { diff --git a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala index c6656341fcd15..322b2a97c0704 100644 --- a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala +++ b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala @@ -21,21 +21,19 @@ import java.io._ import java.nio.ByteBuffer import java.nio.channels.{Channels, ReadableByteChannel, WritableByteChannel} import java.nio.channels.FileChannel.MapMode -import java.nio.charset.StandardCharsets.UTF_8 import java.util.concurrent.ConcurrentHashMap import scala.collection.mutable.ListBuffer -import com.google.common.io.{ByteStreams, Closeables, Files} +import com.google.common.io.Closeables import io.netty.channel.FileRegion import io.netty.util.AbstractReferenceCounted import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.internal.Logging -import org.apache.spark.network.buffer.ManagedBuffer import org.apache.spark.network.util.JavaUtils import org.apache.spark.security.CryptoStreamUtils -import org.apache.spark.util.{ByteBufferInputStream, Utils} +import org.apache.spark.util.Utils import org.apache.spark.util.io.ChunkedByteBuffer /** @@ -266,6 +264,22 @@ private class ReadableChannelFileRegion(source: ReadableByteChannel, blockSize: } override def deallocate(): Unit = source.close() + + override def transferred(): Long = _transferred + + override def touch(o: scala.Any): FileRegion = this + + override def retain(): FileRegion = { + super.retain() + this + } + + override def retain(increment: Int): FileRegion = { + super.retain(increment) + this + } + + override def touch(): FileRegion = this } private class CountingWritableChannel(sink: WritableByteChannel) extends WritableByteChannel { diff --git a/dev/deps/spark-deps-hadoop-palantir b/dev/deps/spark-deps-hadoop-palantir index 33ae78ff3bee5..dc05fb8fb9133 100644 --- a/dev/deps/spark-deps-hadoop-palantir +++ b/dev/deps/spark-deps-hadoop-palantir @@ -1,13 +1,13 @@ JavaEWAH-0.3.2.jar -RoaringBitmap-0.5.11.jar +RoaringBitmap-0.6.43.jar ST4-4.0.4.jar activation-1.1.1.jar animal-sniffer-annotation-1.0.jar antlr-2.7.7.jar antlr-runtime-3.4.jar -antlr4-runtime-4.5.3.jar +antlr4-runtime-4.7.jar aopalliance-1.0.jar -aopalliance-repackaged-2.4.0-b34.jar +aopalliance-repackaged-2.5.0-b32.jar apache-log4j-extras-1.2.17.jar apacheds-i18n-2.0.0-M15.jar apacheds-kerberos-codec-2.0.0-M15.jar @@ -89,12 +89,12 @@ hadoop-yarn-client-2.8.0-palantir3.jar hadoop-yarn-common-2.8.0-palantir3.jar hadoop-yarn-server-common-2.8.0-palantir3.jar hadoop-yarn-server-web-proxy-2.8.0-palantir3.jar -hk2-api-2.4.0-b34.jar -hk2-locator-2.4.0-b34.jar -hk2-utils-2.4.0-b34.jar +hk2-api-2.5.0-b32.jar +hk2-locator-2.5.0-b32.jar +hk2-utils-2.5.0-b32.jar htrace-core4-4.0.1-incubating.jar -httpclient-4.5.2.jar -httpcore-4.4.4.jar +httpclient-4.5.3.jar +httpcore-4.4.6.jar ion-java-1.0.1.jar ivy-2.4.0.jar jackson-annotations-2.6.5.jar @@ -113,24 +113,24 @@ jackson-module-scala_2.11-2.6.5.jar jackson-xc-1.9.13.jar janino-3.0.0.jar java-xmlbuilder-1.0.jar -javassist-3.18.1-GA.jar +javassist-3.20.0-GA.jar javax.annotation-api-1.2.jar javax.inject-1.jar -javax.inject-2.4.0-b34.jar +javax.inject-2.5.0-b32.jar javax.servlet-api-3.1.0.jar javax.ws.rs-api-2.0.1.jar javolution-5.5.1.jar jaxb-api-2.2.2.jar jcip-annotations-1.0.jar -jcl-over-slf4j-1.7.16.jar +jcl-over-slf4j-1.7.25.jar jdo-api-3.0.1.jar -jersey-client-2.22.2.jar -jersey-common-2.22.2.jar -jersey-container-servlet-2.22.2.jar -jersey-container-servlet-core-2.22.2.jar -jersey-guava-2.22.2.jar -jersey-media-jaxb-2.22.2.jar -jersey-server-2.22.2.jar +jersey-client-2.25.1.jar +jersey-common-2.25.1.jar +jersey-container-servlet-2.25.1.jar +jersey-container-servlet-core-2.25.1.jar +jersey-guava-2.25.1.jar +jersey-media-jaxb-2.25.1.jar +jersey-server-2.25.1.jar jets3t-0.9.3.jar jetty-6.1.26.jar jetty-sslengine-6.1.26.jar @@ -144,10 +144,10 @@ json4s-ast_2.11-3.2.11.jar json4s-core_2.11-3.2.11.jar json4s-jackson_2.11-3.2.11.jar jsp-api-2.1.jar -jsr305-1.3.9.jar +jsr305-3.0.1.jar jta-1.1.jar jtransforms-2.4.0.jar -jul-to-slf4j-1.7.16.jar +jul-to-slf4j-1.7.25.jar kryo-shaded-3.0.3.jar kubernetes-client-2.2.13.jar kubernetes-model-1.0.67.jar @@ -166,13 +166,13 @@ metrics-json-3.1.2.jar metrics-jvm-3.1.2.jar minlog-1.3.0.jar mx4j-3.0.2.jar -netty-3.9.9.Final.jar -netty-all-4.0.43.Final.jar +netty-3.10.6.Final.jar +netty-all-4.1.9.Final.jar nimbus-jose-jwt-3.9.jar objenesis-2.1.jar -okhttp-2.4.0.jar -okhttp-3.6.0.jar -okio-1.11.0.jar +okhttp-2.7.5.jar +okhttp-3.7.0.jar +okio-1.12.0.jar opencsv-2.3.jar oro-2.0.8.jar osgi-resource-locator-1.0.1.jar @@ -196,8 +196,8 @@ scala-reflect-2.11.8.jar scala-xml_2.11-1.0.2.jar scalap-2.11.8.jar shapeless_2.11-2.3.2.jar -slf4j-api-1.7.16.jar -slf4j-log4j12-1.7.16.jar +slf4j-api-1.7.25.jar +slf4j-log4j12-1.7.25.jar snakeyaml-1.15.jar snappy-0.2.jar snappy-java-1.1.2.6.jar diff --git a/pom.xml b/pom.xml index c0547d3695779..55e3acce98de9 100644 --- a/pom.xml +++ b/pom.xml @@ -116,9 +116,9 @@ UTF-8 UTF-8 1.8 - 3.3.9 + 3.5.0 spark - 1.7.16 + 1.7.25 1.2.17 2.6.5 2.5.0 @@ -134,8 +134,11 @@ 10.12.1.1 1.9.1-palantir1 8.18.0 + 3.7.0 + 2.7.5 + 1.12.0 1.54 - 9.3.11.v20160721 + 9.4.5.v20170502 3.1.0 0.8.0 2.4.0 @@ -150,8 +153,8 @@ 0.10.2 - 4.5.2 - 4.4.4 + 4.5.3 + 4.4.6 3.1 3.4.1 @@ -172,12 +175,12 @@ 3.5 3.2.10 3.0.0 - 2.22.2 + 2.25.1 2.9.9 3.5.2 - 1.3.9 + 3.0.1 0.9.3 - 4.5.3 + 4.7 1.1 2.52.0 2.6 @@ -325,6 +328,22 @@ ${chill.version} + + com.squareup.okio + okio + ${okio.version} + + + com.squareup.okhttp3 + okhttp + ${okhttp3.version} + + + com.squareup.okhttp + okhttp + ${okhttp.version} + + com.netflix.feign feign-core @@ -594,7 +613,7 @@ org.roaringbitmap RoaringBitmap - 0.5.11 + 0.6.43 commons-net @@ -604,12 +623,12 @@ io.netty netty-all - 4.0.43.Final + 4.1.9.Final io.netty netty - 3.9.9.Final + 3.10.6.Final org.apache.derby diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 057cdf98737ed..04ec67a4f448a 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -491,6 +491,7 @@ object OldDeps { object Catalyst { lazy val settings = antlr4Settings ++ Seq( + antlr4Version in Antlr4 := "4.7", antlr4PackageName in Antlr4 := Some("org.apache.spark.sql.catalyst.parser"), antlr4GenListener in Antlr4 := true, antlr4GenVisitor in Antlr4 := true diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index ed5450b494ccd..a2ccd995bec57 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -64,6 +64,10 @@ singleDataType : dataType EOF ; +standaloneColTypeList + : colTypeList EOF + ; + statement : query #statementDefault | USE db=identifier #use @@ -968,7 +972,7 @@ CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'; STRING : '\'' ( ~('\''|'\\') | ('\\' .) )* '\'' - | '\"' ( ~('\"'|'\\') | ('\\' .) )* '\"' + | '"' ( ~('"'|'\\') | ('\\' .) )* '"' ; BIGINT_LITERAL diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 046ea65d454a1..3e9ad1367df5e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -86,6 +86,11 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging { visitSparkDataType(ctx.dataType) } + override def visitStandaloneColTypeList(ctx: StandaloneColTypeListContext): Seq[StructField] = + withOrigin(ctx) { + visitColTypeList(ctx.colTypeList) + } + /* ******************************************************************************************** * Plan parsing * ******************************************************************************************** */ diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala index dcccbd0ed8d6b..71b199bdf9219 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala @@ -60,7 +60,7 @@ abstract class AbstractSqlParser extends ParserInterface with Logging { * definitions which will preserve the correct Hive metadata. */ override def parseTableSchema(sqlText: String): StructType = parse(sqlText) { parser => - StructType(astBuilder.visitColTypeList(parser.colTypeList())) + StructType(astBuilder.visitStandaloneColTypeList(parser.standaloneColTypeList())) } /** Creates LogicalPlan for a given SQL string. */ diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java index 9851f8b62bd2f..2d4b72aea8414 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java +++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java @@ -154,7 +154,8 @@ public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptCont ReadSupport.ReadContext readContext = readSupport.init(new InitContext( taskAttemptContext.getConfiguration(), toSetMultiMap(fileMetadata), fileSchema)); this.requestedSchema = readContext.getRequestedSchema(); - this.reader = ParquetFileReader.open(configuration, file, new ParquetMetadata(footer.getFileMetaData(), blocks)); + this.reader = ParquetFileReader.open( + configuration, file, new ParquetMetadata(footer.getFileMetaData(), blocks)); this.reader.setRequestedSchema(requestedSchema); String sparkRequestedSchemaString = configuration.get(ParquetReadSupport$.MODULE$.SPARK_ROW_REQUESTED_SCHEMA()); From f03a92b38cde4e8a8086b8d70d1f500b78a045ea Mon Sep 17 00:00:00 2001 From: Robert Kruszewski Date: Sun, 14 May 2017 00:40:46 -0400 Subject: [PATCH 2/3] revert import changes --- .../spark/network/crypto/TransportCipher.java | 21 ++++++++----------- .../network/protocol/MessageWithHeader.java | 10 +++++---- .../spark/network/sasl/SaslEncryption.java | 10 +++++---- .../apache/spark/network/ProtocolSuite.java | 8 ++++--- .../protocol/MessageWithHeaderSuite.java | 15 ++++++------- .../org/apache/spark/storage/DiskStore.scala | 6 ++++-- 6 files changed, 38 insertions(+), 32 deletions(-) diff --git a/common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java b/common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java index 82f8220a7ecaa..06a8e1740d6ad 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java +++ b/common/network-common/src/main/java/org/apache/spark/network/crypto/TransportCipher.java @@ -17,26 +17,23 @@ package org.apache.spark.network.crypto; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import io.netty.buffer.ByteBuf; -import io.netty.buffer.Unpooled; -import io.netty.channel.Channel; -import io.netty.channel.ChannelHandlerContext; -import io.netty.channel.ChannelInboundHandlerAdapter; -import io.netty.channel.ChannelOutboundHandlerAdapter; -import io.netty.channel.ChannelPromise; -import io.netty.channel.FileRegion; -import io.netty.util.AbstractReferenceCounted; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.util.Properties; -import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; +import javax.crypto.spec.IvParameterSpec; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.*; +import io.netty.util.AbstractReferenceCounted; import org.apache.commons.crypto.stream.CryptoInputStream; import org.apache.commons.crypto.stream.CryptoOutputStream; + import org.apache.spark.network.util.ByteArrayReadableChannel; import org.apache.spark.network.util.ByteArrayWritableChannel; diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java index 7c509e5e9dbed..7e76a07cac93e 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java +++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java @@ -17,15 +17,17 @@ package org.apache.spark.network.protocol; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.WritableByteChannel; +import javax.annotation.Nullable; + import com.google.common.base.Preconditions; import io.netty.buffer.ByteBuf; import io.netty.channel.FileRegion; import io.netty.util.AbstractReferenceCounted; import io.netty.util.ReferenceCountUtil; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; -import javax.annotation.Nullable; + import org.apache.spark.network.buffer.ManagedBuffer; /** diff --git a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java index 681cc13025b26..ec3829e88232f 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java +++ b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java @@ -17,6 +17,11 @@ package org.apache.spark.network.sasl; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.WritableByteChannel; +import java.util.List; + import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import io.netty.buffer.ByteBuf; @@ -28,10 +33,7 @@ import io.netty.channel.FileRegion; import io.netty.handler.codec.MessageToMessageDecoder; import io.netty.util.AbstractReferenceCounted; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; -import java.util.List; + import org.apache.spark.network.util.ByteArrayWritableChannel; import org.apache.spark.network.util.NettyUtils; diff --git a/common/network-common/src/test/java/org/apache/spark/network/ProtocolSuite.java b/common/network-common/src/test/java/org/apache/spark/network/ProtocolSuite.java index 3fa785502aa20..6fb44fea8c5a4 100644 --- a/common/network-common/src/test/java/org/apache/spark/network/ProtocolSuite.java +++ b/common/network-common/src/test/java/org/apache/spark/network/ProtocolSuite.java @@ -17,7 +17,7 @@ package org.apache.spark.network; -import static org.junit.Assert.assertEquals; +import java.util.List; import com.google.common.primitives.Ints; import io.netty.buffer.Unpooled; @@ -25,7 +25,10 @@ import io.netty.channel.FileRegion; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.handler.codec.MessageToMessageEncoder; -import java.util.List; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + import org.apache.spark.network.protocol.ChunkFetchFailure; import org.apache.spark.network.protocol.ChunkFetchRequest; import org.apache.spark.network.protocol.ChunkFetchSuccess; @@ -42,7 +45,6 @@ import org.apache.spark.network.protocol.StreamResponse; import org.apache.spark.network.util.ByteArrayWritableChannel; import org.apache.spark.network.util.NettyUtils; -import org.junit.Test; public class ProtocolSuite { private void testServerToClient(Message msg) { diff --git a/common/network-common/src/test/java/org/apache/spark/network/protocol/MessageWithHeaderSuite.java b/common/network-common/src/test/java/org/apache/spark/network/protocol/MessageWithHeaderSuite.java index dbff6e71efb9b..04b9ab62f55b6 100644 --- a/common/network-common/src/test/java/org/apache/spark/network/protocol/MessageWithHeaderSuite.java +++ b/common/network-common/src/test/java/org/apache/spark/network/protocol/MessageWithHeaderSuite.java @@ -17,22 +17,23 @@ package org.apache.spark.network.protocol; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.WritableByteChannel; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.FileRegion; import io.netty.util.AbstractReferenceCounted; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; +import org.junit.Test; +import org.mockito.Mockito; + +import static org.junit.Assert.*; + import org.apache.spark.network.TestManagedBuffer; import org.apache.spark.network.buffer.ManagedBuffer; import org.apache.spark.network.buffer.NettyManagedBuffer; import org.apache.spark.network.util.ByteArrayWritableChannel; -import org.junit.Test; -import org.mockito.Mockito; public class MessageWithHeaderSuite { diff --git a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala index 322b2a97c0704..160d1d62f9b64 100644 --- a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala +++ b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala @@ -21,19 +21,21 @@ import java.io._ import java.nio.ByteBuffer import java.nio.channels.{Channels, ReadableByteChannel, WritableByteChannel} import java.nio.channels.FileChannel.MapMode +import java.nio.charset.StandardCharsets.UTF_8 import java.util.concurrent.ConcurrentHashMap import scala.collection.mutable.ListBuffer -import com.google.common.io.Closeables +import com.google.common.io.{ByteStreams, Closeables, Files} import io.netty.channel.FileRegion import io.netty.util.AbstractReferenceCounted import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.internal.Logging +import org.apache.spark.network.buffer.ManagedBuffer import org.apache.spark.network.util.JavaUtils import org.apache.spark.security.CryptoStreamUtils -import org.apache.spark.util.Utils +import org.apache.spark.util.{ByteBufferInputStream, Utils} import org.apache.spark.util.io.ChunkedByteBuffer /** From f53ec7a06ce59028117f149bfee0863df40b9ec5 Mon Sep 17 00:00:00 2001 From: Robert Kruszewski Date: Sun, 14 May 2017 13:48:00 -0400 Subject: [PATCH 3/3] fix exception messages --- .../apache/spark/sql/catalyst/parser/PlanParserSuite.scala | 2 +- .../src/test/resources/sql-tests/results/show-tables.sql.out | 4 ++-- .../test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala index cf137cfdf96e4..4d26b9d773483 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala @@ -507,7 +507,7 @@ class PlanParserSuite extends PlanTest { val m = intercept[ParseException] { parsePlan("SELECT /*+ HINT() */ * FROM t") }.getMessage - assert(m.contains("no viable alternative at input")) + assert(m.contains("mismatched input ')' expecting")) // Hive compatibility: No database. val m2 = intercept[ParseException] { diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out index 8f2a54f7c24e2..992f39cb5fd85 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out @@ -161,7 +161,7 @@ struct<> -- !query 13 output org.apache.spark.sql.catalyst.parser.ParseException -mismatched input '' expecting 'LIKE'(line 1, pos 19) +mismatched input '' expecting {'FROM', 'IN', 'LIKE'}(line 1, pos 19) == SQL == SHOW TABLE EXTENDED @@ -184,7 +184,7 @@ struct<> -- !query 15 output org.apache.spark.sql.catalyst.parser.ParseException -mismatched input 'PARTITION' expecting 'LIKE'(line 1, pos 20) +mismatched input 'PARTITION' expecting {'FROM', 'IN', 'LIKE'}(line 1, pos 20) == SQL == SHOW TABLE EXTENDED PARTITION(c='Us', d=1) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala index bf1fd160704fa..b317bc5b52641 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.jdbc -import java.sql.{Date, DriverManager, Timestamp} +import java.sql.DriverManager import java.util.Properties import scala.collection.JavaConverters.propertiesAsScalaMapConverter @@ -466,7 +466,7 @@ class JDBCWriteSuite extends SharedSQLContext with BeforeAndAfter { .option("createTableColumnTypes", "`name char(20)") // incorrectly quoted column .jdbc(url1, "TEST.USERDBTYPETEST", properties) }.getMessage() - assert(msg.contains("no viable alternative at input")) + assert(msg.contains("extraneous input '`' expecting")) } test("SPARK-10849: jdbc CreateTableColumnTypes duplicate columns") {