Created
July 20, 2017 06:37
-
-
Save romeokienzler/48320fd93d6973336aeb13a5f000e638 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Py4JJavaErrorTraceback (most recent call last) | |
<ipython-input-1-51c68bbcc36e> in <module>() | |
2 | |
3 # ***1. Loading dataframe from Cloudant db | |
----> 4 df = spark.read.load("openspace", "org.apache.bahir.cloudant") | |
5 df.cache() | |
6 df.printSchema() | |
/usr/local/src/spark21master/spark/python/pyspark/sql/readwriter.py in load(self, path, format, schema, **options) | |
147 self.options(**options) | |
148 if isinstance(path, basestring): | |
--> 149 return self._df(self._jreader.load(path)) | |
150 elif path is not None: | |
151 if type(path) != list: | |
/usr/local/src/spark21master/spark/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py in __call__(self, *args) | |
1131 answer = self.gateway_client.send_command(command) | |
1132 return_value = get_return_value( | |
-> 1133 answer, self.gateway_client, self.target_id, self.name) | |
1134 | |
1135 for temp_arg in temp_args: | |
/usr/local/src/spark21master/spark/python/pyspark/sql/utils.py in deco(*a, **kw) | |
61 def deco(*a, **kw): | |
62 try: | |
---> 63 return f(*a, **kw) | |
64 except py4j.protocol.Py4JJavaError as e: | |
65 s = e.java_exception.toString() | |
/usr/local/src/spark21master/spark/python/lib/py4j-0.10.4-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name) | |
317 raise Py4JJavaError( | |
318 "An error occurred while calling {0}{1}{2}.\n". | |
--> 319 format(target_id, ".", name), value) | |
320 else: | |
321 raise Py4JError( | |
Py4JJavaError: An error occurred while calling o98.load. | |
: java.lang.ClassNotFoundException: Failed to find data source: org.apache.bahir.cloudant. Please find packages at http://spark.apache.org/third-party-projects.html | |
at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:569) | |
at org.apache.spark.sql.execution.datasources.DataSource.providingClass$lzycompute(DataSource.scala:86) | |
at org.apache.spark.sql.execution.datasources.DataSource.providingClass(DataSource.scala:86) | |
at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:325) | |
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:152) | |
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:135) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:95) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:55) | |
at java.lang.reflect.Method.invoke(Method.java:507) | |
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) | |
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) | |
at py4j.Gateway.invoke(Gateway.java:280) | |
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) | |
at py4j.commands.CallCommand.execute(CallCommand.java:79) | |
at py4j.GatewayConnection.run(GatewayConnection.java:214) | |
at java.lang.Thread.run(Thread.java:785) | |
Caused by: java.lang.ClassNotFoundException: org.apache.bahir.cloudant.DefaultSource | |
at java.net.URLClassLoader.findClass(URLClassLoader.java:607) | |
at java.lang.ClassLoader.loadClassHelper(ClassLoader.java:844) | |
at java.lang.ClassLoader.loadClass(ClassLoader.java:823) | |
at java.lang.ClassLoader.loadClass(ClassLoader.java:803) | |
at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$25$$anonfun$apply$13.apply(DataSource.scala:554) | |
at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$25$$anonfun$apply$13.apply(DataSource.scala:554) | |
at scala.util.Try$.apply(Try.scala:192) | |
at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$25.apply(DataSource.scala:554) | |
at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$25.apply(DataSource.scala:554) | |
at scala.util.Try.orElse(Try.scala:84) | |
at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:554) | |
... 16 more |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment