2、在服务器上每台spark机器上的spark-defaults.conf文件中的配置(在Phoenix4.7或以后的版本用phoenix-4.7.0-HBase-1.1-client-spark.jar,而在之前用phoenix-4.7.0-HBase-1.1-client.jar)
下面这两个包看看是否导入错误,看看包的大小是否大小错误。

spark.driver.extraClassPath /spark/phoenix-client/lib/phoenix-4.7.0-HBase-1.1-client-spark.jar:/spark/phoenix-client/lib/libthrift-0.9.0.jar

spark.executor.extraClassPath /spark/phoenix-client/lib/phoenix-4.7.0-HBase-1.1-client-spark.jar:/spark/phoenix-client/lib/libthrift-0.9.0.jar

下面这个错误是phoenix-4.7.0-HBase-1.1-client-spark.jar只有70kb

Exception in thread "main" java.sql.SQLException: java.lang.IllegalAccessError: class com.google.protobuf.HBaseZeroCopyByteString cannot access its superclass com.google.protobuf.LiteralByteString
at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1215)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1176)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1434)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:491)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:414)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:406)
at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:402)
at org.apache.phoenix.util.PhoenixRuntime.getTable(PhoenixRuntime.java:383)
at org.apache.phoenix.util.PhoenixRuntime.generateColumnInfo(PhoenixRuntime.java:405)
at org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil.getSelectColumnMetadataList(PhoenixConfigurationUtil.java:279)
at org.apache.phoenix.spark.PhoenixRDD.toDataFrame(PhoenixRDD.scala:105)
at org.apache.phoenix.spark.PhoenixRelation.schema(PhoenixRelation.scala:57)
at org.apache.spark.sql.execution.datasources.LogicalRelation.<init>(LogicalRelation.scala:37)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:125)
at com.donews.data.hbaseuser.UserOverview$.main(UserOverview.scala:61)
at com.donews.data.hbaseuser.UserOverview.main(UserOverview.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.IllegalAccessError: class com.google.protobuf.HBaseZeroCopyByteString cannot access its superclass com.google.protobuf.LiteralByteString
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:760)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at org.apache.phoenix.schema.PTableImpl.createFromProto(PTableImpl.java:1034)
at org.apache.phoenix.coprocessor.MetaDataProtocol$MetaDataMutationResult.constructFromProto(MetaDataProtocol.java:192)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1207)
... 24 more


北京小辉微信公众号

Phoenix:Phoenix无法插值和读取Hbase_java


大数据资料分享请关注