You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
---------------------------------------------------------------------------
Py4JJavaError Traceback (most recent call last)
<ipython-input-2-45d611a9e008> in <module>()
----> 1 ts = td.series.frombinary('s3://test/thunder-ts', engine=sc)
/home/ec2-user/anaconda2/lib/python2.7/site-packages/thunder/series/readers.pyc in frombinary(path, ext, conf, dtype, shape, skip, index, labels, engine, credentials)
296297if spark andisinstance(engine, spark):
--> 298 lines = engine.binaryRecords(path, recordsize)
299 raw = lines.map(lambdax: frombuffer(buffer(x), offset=0, count=nelements, dtype=dtype)[skip:])
300
/home/ec2-user/spark/python/pyspark/context.pyc in binaryRecords(self, path, recordLength)
548 :param recordLength: The length at which to split the records
549"""
--> 550 return RDD(self._jsc.binaryRecords(path, recordLength), self, NoOpSerializer())
551552def_dictToJavaMap(self, d):
/home/ec2-user/spark/python/lib/py4j-0.9-src.zip/py4j/java_gateway.py in __call__(self, *args)
811 answer =self.gateway_client.send_command(command)
812 return_value = get_return_value(
--> 813 answer, self.gateway_client, self.target_id, self.name)
814815for temp_arg in temp_args:
/home/ec2-user/spark/python/pyspark/sql/utils.pyc in deco(*a, **kw)
43defdeco(*a, **kw):
44try:
---> 45 return f(*a, **kw)
46except py4j.protocol.Py4JJavaError as e:
47 s = e.java_exception.toString()
/home/ec2-user/spark/python/lib/py4j-0.9-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)
306raise Py4JJavaError(
307"An error occurred while calling {0}{1}{2}.\n".
--> 308 format(target_id, ".", name), value)
309else:
310raise Py4JError(
Py4JJavaError: An error occurred while calling o9.binaryRecords.
: java.io.IOException: No FileSystem for scheme: s3
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2584)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2591)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:91)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2630)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2612)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:370)
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:296)
at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.setInputPaths(FileInputFormat.java:498)
at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.setInputPaths(FileInputFormat.java:467)
at org.apache.spark.SparkContext$$anonfun$newAPIHadoopFile$2.apply(SparkContext.scala:1102)
at org.apache.spark.SparkContext$$anonfun$newAPIHadoopFile$2.apply(SparkContext.scala:1095)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
at org.apache.spark.SparkContext.withScope(SparkContext.scala:714)
at org.apache.spark.SparkContext.newAPIHadoopFile(SparkContext.scala:1095)
at org.apache.spark.SparkContext$$anonfun$binaryRecords$1.apply(SparkContext.scala:953)
at org.apache.spark.SparkContext$$anonfun$binaryRecords$1.apply(SparkContext.scala:950)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
at org.apache.spark.SparkContext.withScope(SparkContext.scala:714)
at org.apache.spark.SparkContext.binaryRecords(SparkContext.scala:950)
at org.apache.spark.api.java.JavaSparkContext.binaryRecords(JavaSparkContext.scala:304)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:381)
at py4j.Gateway.invoke(Gateway.java:259)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:209)
at java.lang.Thread.run(Thread.java:745)
The text was updated successfully, but these errors were encountered:
Trying to load a Series from S3 using,
Throws the error,
The text was updated successfully, but these errors were encountered: