私はSparkを初めて使用し、CSVファイルをデータフレームに読み込むためにPyspark 2.3.1を使用しています。ファイルを読み込んで、Jupyterノートブックで値を印刷できますアナコンダ環境内で実行されているこれは私が使用しているコードです:
_# Start session
spark = SparkSession \
.builder \
.appName("Embedding Models") \
.config('spark.ui.showConsoleProgress', 'true') \
.config("spark.master", "local[2]") \
.getOrCreate()
sqlContext = sql.SQLContext(spark)
schema = StructType([
StructField("Index", IntegerType(), True),
StructField("title", StringType(), True),
StructField("body", StringType(), True)])
df= sqlContext.read.csv("../data/faq_data.csv",
header=True,
mode="DROPMALFORMED",
schema=schema)
_
出力:
_df.show()
+-----+--------------------+--------------------+
|Index| title| body|
+-----+--------------------+--------------------+
| 0|What does “quantu...|Quantum theory is...|
| 1|What is a quantum...|A quantum compute...|
_
しかし、データフレームで.count()
メソッドを呼び出すと、以下のエラーがスローされます
_ ---------------------------------------------------------------------------
Py4JJavaError Traceback (most recent call last)
<ipython-input-29-913a2f9eb5fc> in <module>()
----> 1 df.count()
~/anaconda3/envs/Community/lib/python3.6/site-packages/pyspark/sql/dataframe.py in count(self)
453 2
454 """
--> 455 return int(self._jdf.count())
456
457 @ignore_unicode_prefix
~/anaconda3/envs/Community/lib/python3.6/site-packages/py4j/Java_gateway.py in __call__(self, *args)
1255 answer = self.gateway_client.send_command(command)
1256 return_value = get_return_value(
-> 1257 answer, self.gateway_client, self.target_id, self.name)
1258
1259 for temp_arg in temp_args:
~/anaconda3/envs/Community/lib/python3.6/site-packages/pyspark/sql/utils.py in deco(*a, **kw)
61 def deco(*a, **kw):
62 try:
---> 63 return f(*a, **kw)
64 except py4j.protocol.Py4JJavaError as e:
65 s = e.Java_exception.toString()
~/anaconda3/envs/Community/lib/python3.6/site-packages/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)
326 raise Py4JJavaError(
327 "An error occurred while calling {0}{1}{2}.\n".
--> 328 format(target_id, ".", name), value)
329 else:
330 raise Py4JError(
Py4JJavaError: An error occurred while calling o655.count.
: Java.lang.IllegalArgumentException
at org.Apache.xbean.asm5.ClassReader.<init>(Unknown Source)
at org.Apache.xbean.asm5.ClassReader.<init>(Unknown Source)
at org.Apache.xbean.asm5.ClassReader.<init>(Unknown Source)
at org.Apache.spark.util.ClosureCleaner$.getClassReader(ClosureCleaner.scala:46)
at org.Apache.spark.util.FieldAccessFinder$$anon$3$$anonfun$visitMethodInsn$2.apply(ClosureCleaner.scala:449)
at org.Apache.spark.util.FieldAccessFinder$$anon$3$$anonfun$visitMethodInsn$2.apply(ClosureCleaner.scala:432)
at scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
at scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:103)
at scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:103)
at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
at scala.collection.mutable.HashMap$$anon$1.foreach(HashMap.scala:103)
at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
at org.Apache.spark.util.FieldAccessFinder$$anon$3.visitMethodInsn(ClosureCleaner.scala:432)
at org.Apache.xbean.asm5.ClassReader.a(Unknown Source)
at org.Apache.xbean.asm5.ClassReader.b(Unknown Source)
at org.Apache.xbean.asm5.ClassReader.accept(Unknown Source)
at org.Apache.xbean.asm5.ClassReader.accept(Unknown Source)
at org.Apache.spark.util.ClosureCleaner$$anonfun$org$Apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:262)
at org.Apache.spark.util.ClosureCleaner$$anonfun$org$Apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:261)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.Apache.spark.util.ClosureCleaner$.org$Apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:261)
at org.Apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:159)
at org.Apache.spark.SparkContext.clean(SparkContext.scala:2299)
at org.Apache.spark.SparkContext.runJob(SparkContext.scala:2073)
at org.Apache.spark.SparkContext.runJob(SparkContext.scala:2099)
at org.Apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:939)
at org.Apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.Apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.Apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.Apache.spark.rdd.RDD.collect(RDD.scala:938)
at org.Apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:297)
at org.Apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2770)
at org.Apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2769)
at org.Apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3254)
at org.Apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
at org.Apache.spark.sql.Dataset.withAction(Dataset.scala:3253)
at org.Apache.spark.sql.Dataset.count(Dataset.scala:2769)
at Java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at Java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.Java:62)
at Java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.Java:43)
at Java.base/Java.lang.reflect.Method.invoke(Method.Java:564)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.Java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.Java:357)
at py4j.Gateway.invoke(Gateway.Java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.Java:132)
at py4j.commands.CallCommand.execute(CallCommand.Java:79)
at py4j.GatewayConnection.run(GatewayConnection.Java:238)
at Java.base/Java.lang.Thread.run(Thread.Java:844)
_
Python 3.6.5を使用しています。
Javaお使いのマシンのバージョンは何ですか?問題はおそらくJava 9.に関連しています。
Java 8をダウンロードすると、例外は消えます。すでにJava 8がインストールされている場合は、Java_HOME
に。
df.repartition(1).count()
とlen(df.toPandas())
を試していただけませんか?
それが機能する場合、問題はおそらくspark構成にあります。
Linuxでは、次のようにJava 8をインストールすると役立ちます。
Sudo apt install openjdk-8-jdk
次に、デフォルトのJavaをバージョン8に設定します。
Sudo update-alternatives --config Java
*****************:2(選択を求められたら2を入力)+ Enterを押します