运行 kafka consumer 时出现以下错误:
ERROR receiver.BlockGenerator: Error in block pushing thread
java.io.NotSerializableException: org.jnetpcap.packet.PcapPacket
at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1183)
at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1547)
at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1508)
at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1431)
at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1177)
at java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:347)
at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:42)
at org.apache.spark.serializer.SerializationStream$class.writeAll(Serializer.scala:102)
at org.apache.spark.serializer.JavaSerializationStream.writeAll(JavaSerializer.scala:30)
at org.apache.spark.storage.BlockManager.dataSerializeStream(BlockManager.scala:996)
at org.apache.spark.storage.BlockManager.dataSerialize(BlockManager.scala:1005)
at org.apache.spark.storage.MemoryStore.putValues(MemoryStore.scala:79)
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:663)
at org.apache.spark.storage.BlockManager.put(BlockManager.scala:574)
build.sbt 文件:
name := "testpacket"
version := "1.0"
scalaVersion := "2.10.3"
libraryDependencies += "org.apache.spark" % "spark-core_2.10" % "1.0.2
libraryDependencies += "org.apache.spark" % "spark-streaming_2.10" % "1.0.2"
libraryDependencies += "org.apache.spark" % "spark-streaming-kafka_2.10" % "1.0.2"
libraryDependencies += "javax.servlet" % "javax.servlet-api" % "3.0.1"
resolvers += "Akka Repository" at "http://repo.akka.io/releases/"
错误的原因可能是什么?