diff --git a/README.md b/README.md index 97a1f4a..95e128c 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ Example Application Deployment: Download the files of 5 datasets as given in the ## Current State **beta**: -most of the times it should produce expected results for compression and RDF conversion. Please expect some code refactoring and fluctuation. There will be an open-source licence, either GPL or Apache. +most of the times it should produce expected results for compression and RDF format.conversion. Please expect some code refactoring and fluctuation. There will be an open-source licence, either GPL or Apache. ## Concept @@ -28,7 +28,7 @@ The databus-client is designed to unify and convert data on the client-side in s * Level 1: all features finished, testing required * Level 2: using Apache Compress library covers most of the compression formats, more testing required * Level 3: Scalable RDF libraries from [SANSA-Stack](http://sansa-stack.net/) and [Databus Derive](https://github.com/dbpedia/databus-derive). Step by step, extension for all (quasi-)isomorphic [IANA mediatypes](https://www.iana.org/assignments/media-types/media-types.xhtml). -* Level 4: In addition, we plan to provide a plugin mechanism to incorporate more sophisticated mapping engines as [Tarql](https://tarql.github.io/) (already implemented), [RML](http://rml.io), R2RML, [R2R](http://wifo5-03.informatik.uni-mannheim.de/bizer/r2r/) (for owl:equivalence translation) and XSLT. +* Level 4: In addition, we plan to provide a plugin mechanism to incorporate more sophisticated format.mapping engines as [Tarql](https://tarql.github.io/) (already implemented), [RML](http://rml.io), R2RML, [R2R](http://wifo5-03.informatik.uni-mannheim.de/bizer/r2r/) (for owl:equivalence translation) and XSLT. ## Usage diff --git a/errorLog.log b/errorLog.log index 71344ae..fd56901 100644 --- a/errorLog.log +++ b/errorLog.log @@ -745,3 +745,4600 @@ org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Go 4365 [main] ERROR org.apache.jena.sparql.expr.nodevalue.NodeFunctions - Bad IRI: Code: 4/UNWISE_CHARACTER in PATH: The character matches no grammar rules of URIs/IRIs. These characters are permitted in RDF URI References, XML system identifiers, and XML Schema anyURIs.: http://data.rli.de/ontology/13" 4838 [dispatcher-event-loop-5] WARN org.apache.spark.scheduler.TaskSetManager - Stage 0 contains a task of very large size (168 KB). The maximum recommended task size is 100 KB. 5156 [dispatcher-event-loop-1] WARN org.apache.spark.scheduler.TaskSetManager - Stage 1 contains a task of very large size (168 KB). The maximum recommended task size is 100 KB. +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +33 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +236 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +336 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +393 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +394 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +394 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +395 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +395 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +719 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 39393. +746 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +767 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +770 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +770 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +781 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-31cc126f-1ceb-45a1-aaad-950c22c4fb9a +802 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +814 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +887 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2805ms +946 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +960 [main] INFO org.spark_project.jetty.server.Server - Started @2880ms +977 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +977 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +994 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +995 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +995 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +996 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +997 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +997 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +998 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +999 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +1000 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +1000 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +1001 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +1001 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +1002 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +1003 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +1003 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +1004 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +1004 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +1005 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +1006 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +1006 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +1012 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +1012 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +1013 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +1014 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +1015 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +1016 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1087 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1198 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 42739. +1199 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:42739 +1200 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1221 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 42739, None) +1224 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:42739 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 42739, None) +1226 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 42739, None) +1226 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 42739, None) +1355 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +30 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +194 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +295 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +342 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +343 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +343 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +343 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +344 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +639 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 41797. +656 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +671 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +673 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +674 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +681 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-00159bcd-9afd-410f-9a05-a983c12668dd +696 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +705 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +762 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2389ms +811 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +824 [main] INFO org.spark_project.jetty.server.Server - Started @2451ms +843 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +843 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +859 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +860 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +861 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +862 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +862 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +863 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +864 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +866 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +866 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +867 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +868 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +869 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +870 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +871 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +871 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +872 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +873 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +873 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +874 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +875 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +881 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +881 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +882 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +883 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +884 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +885 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +947 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1050 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 34431. +1051 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:34431 +1052 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1070 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 34431, None) +1073 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:34431 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 34431, None) +1074 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 34431, None) +1074 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 34431, None) +1191 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +29 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +223 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +345 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +397 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +397 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +398 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +398 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +398 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +727 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 32829. +746 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +761 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +763 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +763 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +771 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-e6f3fe02-c75a-41d9-9b1b-8f8d52f8964c +785 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +795 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +856 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2681ms +908 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +921 [main] INFO org.spark_project.jetty.server.Server - Started @2747ms +936 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@7817c468{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +936 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +955 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@42210be1{/jobs,null,AVAILABLE,@Spark} +955 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/json,null,AVAILABLE,@Spark} +956 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job,null,AVAILABLE,@Spark} +957 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/jobs/job/json,null,AVAILABLE,@Spark} +957 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages,null,AVAILABLE,@Spark} +958 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/json,null,AVAILABLE,@Spark} +958 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2eaef76d{/stages/stage,null,AVAILABLE,@Spark} +960 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/stage/json,null,AVAILABLE,@Spark} +960 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool,null,AVAILABLE,@Spark} +961 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/stages/pool/json,null,AVAILABLE,@Spark} +961 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage,null,AVAILABLE,@Spark} +962 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/json,null,AVAILABLE,@Spark} +962 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd,null,AVAILABLE,@Spark} +963 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/storage/rdd/json,null,AVAILABLE,@Spark} +964 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment,null,AVAILABLE,@Spark} +964 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/environment/json,null,AVAILABLE,@Spark} +965 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors,null,AVAILABLE,@Spark} +965 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/json,null,AVAILABLE,@Spark} +966 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump,null,AVAILABLE,@Spark} +967 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/executors/threadDump/json,null,AVAILABLE,@Spark} +972 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30a7c98f{/static,null,AVAILABLE,@Spark} +973 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/,null,AVAILABLE,@Spark} +975 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@39008c9f{/api,null,AVAILABLE,@Spark} +975 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/jobs/job/kill,null,AVAILABLE,@Spark} +976 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46d567cb{/stages/stage/kill,null,AVAILABLE,@Spark} +978 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1046 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1150 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 41693. +1151 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:41693 +1152 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1171 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 41693, None) +1173 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:41693 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 41693, None) +1175 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 41693, None) +1175 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 41693, None) +1291 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@352c44a8{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +29 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +205 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +324 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +373 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +373 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +374 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +374 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +374 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +676 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 39957. +696 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +711 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +713 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +714 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +721 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-180cef18-310c-49eb-b669-f0a628473356 +737 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +747 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +805 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2411ms +856 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +869 [main] INFO org.spark_project.jetty.server.Server - Started @2475ms +884 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@1de9d54{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +885 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +904 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@42210be1{/jobs,null,AVAILABLE,@Spark} +904 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/json,null,AVAILABLE,@Spark} +905 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job,null,AVAILABLE,@Spark} +906 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/jobs/job/json,null,AVAILABLE,@Spark} +907 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages,null,AVAILABLE,@Spark} +908 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/json,null,AVAILABLE,@Spark} +909 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2eaef76d{/stages/stage,null,AVAILABLE,@Spark} +910 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/stage/json,null,AVAILABLE,@Spark} +911 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool,null,AVAILABLE,@Spark} +912 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/stages/pool/json,null,AVAILABLE,@Spark} +913 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage,null,AVAILABLE,@Spark} +913 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/json,null,AVAILABLE,@Spark} +914 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd,null,AVAILABLE,@Spark} +915 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/storage/rdd/json,null,AVAILABLE,@Spark} +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment,null,AVAILABLE,@Spark} +917 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/environment/json,null,AVAILABLE,@Spark} +917 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors,null,AVAILABLE,@Spark} +918 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/json,null,AVAILABLE,@Spark} +919 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump,null,AVAILABLE,@Spark} +920 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/executors/threadDump/json,null,AVAILABLE,@Spark} +927 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30a7c98f{/static,null,AVAILABLE,@Spark} +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@39008c9f{/api,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/jobs/job/kill,null,AVAILABLE,@Spark} +931 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46d567cb{/stages/stage/kill,null,AVAILABLE,@Spark} +933 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1015 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1115 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 38493. +1115 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:38493 +1116 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1135 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 38493, None) +1138 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:38493 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 38493, None) +1139 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 38493, None) +1140 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 38493, None) +1257 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@352c44a8{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +33 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +223 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +326 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +372 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +373 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +373 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +373 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +374 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +677 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 35771. +696 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +710 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +712 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +712 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +719 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-b6bc4b7b-4a50-42af-9223-fd6309550668 +733 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +743 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +802 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2568ms +851 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +864 [main] INFO org.spark_project.jetty.server.Server - Started @2631ms +878 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@1de9d54{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +878 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +895 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@42210be1{/jobs,null,AVAILABLE,@Spark} +895 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/json,null,AVAILABLE,@Spark} +896 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job,null,AVAILABLE,@Spark} +897 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/jobs/job/json,null,AVAILABLE,@Spark} +897 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages,null,AVAILABLE,@Spark} +898 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/json,null,AVAILABLE,@Spark} +898 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2eaef76d{/stages/stage,null,AVAILABLE,@Spark} +900 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/stage/json,null,AVAILABLE,@Spark} +900 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool,null,AVAILABLE,@Spark} +901 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/stages/pool/json,null,AVAILABLE,@Spark} +901 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage,null,AVAILABLE,@Spark} +902 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/json,null,AVAILABLE,@Spark} +902 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd,null,AVAILABLE,@Spark} +903 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/storage/rdd/json,null,AVAILABLE,@Spark} +903 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment,null,AVAILABLE,@Spark} +904 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/environment/json,null,AVAILABLE,@Spark} +904 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors,null,AVAILABLE,@Spark} +905 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/json,null,AVAILABLE,@Spark} +906 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump,null,AVAILABLE,@Spark} +906 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/executors/threadDump/json,null,AVAILABLE,@Spark} +911 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30a7c98f{/static,null,AVAILABLE,@Spark} +912 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/,null,AVAILABLE,@Spark} +913 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@39008c9f{/api,null,AVAILABLE,@Spark} +914 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/jobs/job/kill,null,AVAILABLE,@Spark} +914 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46d567cb{/stages/stage/kill,null,AVAILABLE,@Spark} +916 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +976 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1071 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 42211. +1072 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:42211 +1073 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1091 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 42211, None) +1093 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:42211 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 42211, None) +1095 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 42211, None) +1095 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 42211, None) +1208 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@352c44a8{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +32 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +221 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +348 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +396 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +397 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +397 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +397 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +397 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +699 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 40571. +718 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +733 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +736 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +736 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +744 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-55eac14a-7834-48ff-8172-7bc7b999e61b +758 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +768 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +825 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2504ms +877 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +889 [main] INFO org.spark_project.jetty.server.Server - Started @2569ms +906 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +906 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +923 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +925 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +927 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +931 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +931 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +932 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +932 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +933 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +933 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +935 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +944 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1006 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1102 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 36855. +1102 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:36855 +1106 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1126 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 36855, None) +1129 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:36855 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 36855, None) +1131 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 36855, None) +1131 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 36855, None) +1251 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] ERROR File Format Logger - Output file format rdf is not supported. +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +0 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +29 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +227 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +347 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +397 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +398 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +398 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +398 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +399 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +705 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 35077. +724 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +738 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +740 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +740 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +747 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-ac169c1c-81ca-4201-8348-5180795e3ecc +761 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +771 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +832 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2401ms +881 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +893 [main] INFO org.spark_project.jetty.server.Server - Started @2464ms +907 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@1de9d54{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +907 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +925 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@42210be1{/jobs,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/json,null,AVAILABLE,@Spark} +927 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job,null,AVAILABLE,@Spark} +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/jobs/job/json,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/json,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2eaef76d{/stages/stage,null,AVAILABLE,@Spark} +932 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/stage/json,null,AVAILABLE,@Spark} +933 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool,null,AVAILABLE,@Spark} +933 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/stages/pool/json,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage,null,AVAILABLE,@Spark} +935 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/json,null,AVAILABLE,@Spark} +936 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd,null,AVAILABLE,@Spark} +936 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/storage/rdd/json,null,AVAILABLE,@Spark} +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/environment/json,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/json,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/executors/threadDump/json,null,AVAILABLE,@Spark} +948 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30a7c98f{/static,null,AVAILABLE,@Spark} +949 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/,null,AVAILABLE,@Spark} +950 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@39008c9f{/api,null,AVAILABLE,@Spark} +951 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/jobs/job/kill,null,AVAILABLE,@Spark} +952 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46d567cb{/stages/stage/kill,null,AVAILABLE,@Spark} +954 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1020 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1129 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 38219. +1130 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:38219 +1131 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1155 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 38219, None) +1157 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:38219 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 38219, None) +1159 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 38219, None) +1159 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 38219, None) +1276 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@352c44a8{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +31 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +207 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +299 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +347 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +347 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +348 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +348 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +348 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +631 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 34387. +648 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +662 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +664 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +664 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +672 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-14677df0-a0cd-4bfd-8115-f6c354b976ee +687 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +697 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +754 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2404ms +804 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +819 [main] INFO org.spark_project.jetty.server.Server - Started @2468ms +833 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +833 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +850 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +851 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +851 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +852 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +852 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +853 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +853 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +855 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +855 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +856 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +856 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +857 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +857 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +858 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +858 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +859 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +860 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +860 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +861 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +861 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +867 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +867 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +868 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +869 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +869 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +871 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +934 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1041 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 35469. +1041 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:35469 +1042 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1061 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 35469, None) +1063 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:35469 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 35469, None) +1065 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 35469, None) +1066 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 35469, None) +1180 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +55 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +243 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +351 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +398 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +398 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +398 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +399 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +399 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +685 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 39933. +703 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +717 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +720 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +720 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +727 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-fdf9af45-65ae-4738-9e64-20eaa43840ee +741 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +750 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +808 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2434ms +858 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +870 [main] INFO org.spark_project.jetty.server.Server - Started @2497ms +886 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +886 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +903 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +904 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +904 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +905 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +906 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +906 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +907 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +908 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +908 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +909 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +910 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +910 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +911 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +911 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +912 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +912 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +913 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +914 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +914 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +915 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +921 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +922 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +923 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +926 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +989 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1085 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 39045. +1085 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:39045 +1086 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1105 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 39045, None) +1108 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:39045 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 39045, None) +1110 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 39045, None) +1110 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 39045, None) +1231 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +31 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +218 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +330 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +383 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +383 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +383 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +384 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +384 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +683 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 35827. +702 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +718 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +720 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +721 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +728 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-28775958-355f-409b-91a4-5c1960f64026 +743 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +754 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +815 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2458ms +867 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +880 [main] INFO org.spark_project.jetty.server.Server - Started @2524ms +897 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +897 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +917 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +917 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +918 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +919 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +920 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +920 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +922 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +923 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +925 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +927 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +931 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +932 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +943 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1019 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1118 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 45745. +1119 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:45745 +1120 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1139 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 45745, None) +1142 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:45745 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 45745, None) +1143 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 45745, None) +1144 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 45745, None) +1270 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +28 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +195 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +288 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +338 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +338 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +339 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +339 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +339 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +628 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 34539. +648 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +663 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +666 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +666 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +674 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-c5c95375-7f40-4a91-b109-d3ff54a13b99 +693 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +703 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +767 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2409ms +817 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +832 [main] INFO org.spark_project.jetty.server.Server - Started @2473ms +846 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@1de9d54{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +846 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +864 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@42210be1{/jobs,null,AVAILABLE,@Spark} +865 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/json,null,AVAILABLE,@Spark} +865 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job,null,AVAILABLE,@Spark} +866 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/jobs/job/json,null,AVAILABLE,@Spark} +867 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages,null,AVAILABLE,@Spark} +868 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/json,null,AVAILABLE,@Spark} +868 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2eaef76d{/stages/stage,null,AVAILABLE,@Spark} +870 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/stage/json,null,AVAILABLE,@Spark} +871 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool,null,AVAILABLE,@Spark} +872 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/stages/pool/json,null,AVAILABLE,@Spark} +872 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage,null,AVAILABLE,@Spark} +873 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/json,null,AVAILABLE,@Spark} +874 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd,null,AVAILABLE,@Spark} +874 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/storage/rdd/json,null,AVAILABLE,@Spark} +875 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment,null,AVAILABLE,@Spark} +876 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/environment/json,null,AVAILABLE,@Spark} +876 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors,null,AVAILABLE,@Spark} +877 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/json,null,AVAILABLE,@Spark} +878 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump,null,AVAILABLE,@Spark} +878 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/executors/threadDump/json,null,AVAILABLE,@Spark} +884 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30a7c98f{/static,null,AVAILABLE,@Spark} +885 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/,null,AVAILABLE,@Spark} +886 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@39008c9f{/api,null,AVAILABLE,@Spark} +886 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/jobs/job/kill,null,AVAILABLE,@Spark} +887 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46d567cb{/stages/stage/kill,null,AVAILABLE,@Spark} +888 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +954 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1053 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 39321. +1053 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:39321 +1054 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1074 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 39321, None) +1077 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:39321 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 39321, None) +1079 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 39321, None) +1079 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 39321, None) +1205 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@352c44a8{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +28 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +194 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +312 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +359 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +360 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +360 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +360 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +361 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +683 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 44363. +704 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +719 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +721 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +721 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +729 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-10a55731-713c-4b09-99e5-78eb7f5bfe14 +743 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +752 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +816 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2435ms +868 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +880 [main] INFO org.spark_project.jetty.server.Server - Started @2500ms +896 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@1de9d54{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +896 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +915 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@42210be1{/jobs,null,AVAILABLE,@Spark} +915 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/json,null,AVAILABLE,@Spark} +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job,null,AVAILABLE,@Spark} +917 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/jobs/job/json,null,AVAILABLE,@Spark} +918 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages,null,AVAILABLE,@Spark} +919 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/json,null,AVAILABLE,@Spark} +919 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2eaef76d{/stages/stage,null,AVAILABLE,@Spark} +921 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/stage/json,null,AVAILABLE,@Spark} +922 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool,null,AVAILABLE,@Spark} +923 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/stages/pool/json,null,AVAILABLE,@Spark} +923 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/json,null,AVAILABLE,@Spark} +925 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/storage/rdd/json,null,AVAILABLE,@Spark} +927 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment,null,AVAILABLE,@Spark} +927 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/environment/json,null,AVAILABLE,@Spark} +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/json,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump,null,AVAILABLE,@Spark} +931 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/executors/threadDump/json,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30a7c98f{/static,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@39008c9f{/api,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/jobs/job/kill,null,AVAILABLE,@Spark} +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46d567cb{/stages/stage/kill,null,AVAILABLE,@Spark} +945 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1017 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1113 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 37189. +1113 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:37189 +1114 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1133 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 37189, None) +1135 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:37189 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 37189, None) +1137 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 37189, None) +1138 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 37189, None) +1258 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@352c44a8{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +29 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +218 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +339 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +385 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +386 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +386 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +386 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +387 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +698 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 39021. +716 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +731 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +733 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +734 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +741 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-e833f4a1-e6ca-4f4a-b8ee-bb49242102d0 +757 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +767 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +828 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2474ms +879 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +894 [main] INFO org.spark_project.jetty.server.Server - Started @2538ms +909 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +909 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +935 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +935 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +944 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +945 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +946 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +947 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +947 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +948 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +949 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +950 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +957 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +958 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +960 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +961 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +962 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +964 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1045 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1160 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 34491. +1160 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:34491 +1161 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1181 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 34491, None) +1183 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:34491 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 34491, None) +1185 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 34491, None) +1185 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 34491, None) +1303 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +30 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +218 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +335 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +384 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +384 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +384 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +385 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +385 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +760 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 34055. +778 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +792 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +795 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +795 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +802 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-fab2d2c2-cafc-4cf1-ac36-e51b6b7d8b92 +817 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +828 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +894 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2655ms +957 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +970 [main] INFO org.spark_project.jetty.server.Server - Started @2733ms +991 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +991 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1016 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +1017 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +1017 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +1019 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +1020 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +1021 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +1023 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +1025 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +1026 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +1027 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +1027 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +1028 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +1029 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +1029 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +1030 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +1031 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +1031 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +1032 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +1033 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +1034 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +1042 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +1043 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +1044 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +1045 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +1045 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +1047 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1142 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1271 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 45171. +1272 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:45171 +1273 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1294 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 45171, None) +1296 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:45171 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 45171, None) +1298 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 45171, None) +1298 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 45171, None) +1426 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +31 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +239 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +341 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +391 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +392 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +392 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +392 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +393 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +702 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 36983. +721 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +735 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +737 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +738 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +745 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-9cdd50d5-faf9-4391-91a3-5add6a4214c6 +761 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +771 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +831 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2486ms +883 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +898 [main] INFO org.spark_project.jetty.server.Server - Started @2551ms +915 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@1de9d54{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +915 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +933 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@42210be1{/jobs,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/json,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job,null,AVAILABLE,@Spark} +936 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/jobs/job/json,null,AVAILABLE,@Spark} +936 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages,null,AVAILABLE,@Spark} +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/json,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2eaef76d{/stages/stage,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/stage/json,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/stages/pool/json,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage,null,AVAILABLE,@Spark} +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/json,null,AVAILABLE,@Spark} +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd,null,AVAILABLE,@Spark} +944 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/storage/rdd/json,null,AVAILABLE,@Spark} +945 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment,null,AVAILABLE,@Spark} +946 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/environment/json,null,AVAILABLE,@Spark} +948 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors,null,AVAILABLE,@Spark} +949 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/json,null,AVAILABLE,@Spark} +949 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump,null,AVAILABLE,@Spark} +950 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/executors/threadDump/json,null,AVAILABLE,@Spark} +958 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30a7c98f{/static,null,AVAILABLE,@Spark} +959 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/,null,AVAILABLE,@Spark} +960 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@39008c9f{/api,null,AVAILABLE,@Spark} +961 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/jobs/job/kill,null,AVAILABLE,@Spark} +962 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46d567cb{/stages/stage/kill,null,AVAILABLE,@Spark} +964 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1043 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1139 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 41259. +1139 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:41259 +1140 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1160 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 41259, None) +1163 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:41259 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 41259, None) +1165 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 41259, None) +1166 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 41259, None) +1284 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@352c44a8{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +30 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +213 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +327 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +379 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +379 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +380 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +380 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +380 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +693 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 43603. +712 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +727 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +730 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +730 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +738 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-201d9784-35a8-4b52-bca3-2e258618b1be +753 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +762 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +819 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2471ms +869 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +881 [main] INFO org.spark_project.jetty.server.Server - Started @2534ms +895 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +895 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +913 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +914 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +914 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +915 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +917 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +917 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +919 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +920 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +920 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +921 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +921 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +922 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +922 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +923 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +925 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +925 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +932 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +933 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +935 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +937 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1008 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1107 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 38437. +1107 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:38437 +1108 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1128 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 38437, None) +1130 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:38437 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 38437, None) +1132 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 38437, None) +1132 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 38437, None) +1259 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +30 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +229 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +342 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +390 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +390 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +391 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +391 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +391 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +690 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 37249. +711 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +725 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +728 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +728 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +735 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-7754e936-f097-4a6c-a6c8-5049a2aed848 +750 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +760 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +822 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2495ms +871 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +884 [main] INFO org.spark_project.jetty.server.Server - Started @2557ms +897 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +897 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +917 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +918 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +919 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +920 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +921 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +922 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +923 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +925 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +927 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +931 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +932 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +945 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1028 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1127 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 37515. +1128 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:37515 +1129 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1148 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 37515, None) +1150 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:37515 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 37515, None) +1152 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 37515, None) +1152 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 37515, None) +1270 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +5766 [Executor task launch worker for task 72] ERROR org.apache.spark.util.Utils - Aborting task +java.lang.UnsupportedOperationException: 8428428870bcba005c45c2db48d5979d is not a URI node + at org.apache.jena.graph.Node.getURI(Node.java:166) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$.convertAllTriplesOfSubjectToTSV(RDF_Triples_Mapper.scala:124) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:67) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:66) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$12.next(Iterator.scala:445) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:242) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:239) + at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1394) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:245) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:169) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:168) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +5774 [Executor task launch worker for task 72] ERROR org.apache.spark.sql.execution.datasources.FileFormatWriter - Job job_20210825194055_0009 aborted. +5775 [Executor task launch worker for task 72] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 9.0 (TID 72) +org.apache.spark.SparkException: Task failed while writing rows. + at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:254) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:169) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:168) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +Caused by: java.lang.UnsupportedOperationException: 8428428870bcba005c45c2db48d5979d is not a URI node + at org.apache.jena.graph.Node.getURI(Node.java:166) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$.convertAllTriplesOfSubjectToTSV(RDF_Triples_Mapper.scala:124) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:67) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:66) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$12.next(Iterator.scala:445) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:242) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:239) + at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1394) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:245) + ... 10 more +5792 [task-result-getter-2] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 9.0 (TID 72, localhost, executor driver): org.apache.spark.SparkException: Task failed while writing rows. + at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:254) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:169) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:168) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +Caused by: java.lang.UnsupportedOperationException: 8428428870bcba005c45c2db48d5979d is not a URI node + at org.apache.jena.graph.Node.getURI(Node.java:166) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$.convertAllTriplesOfSubjectToTSV(RDF_Triples_Mapper.scala:124) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:67) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:66) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$12.next(Iterator.scala:445) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:242) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:239) + at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1394) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:245) + ... 10 more + +5793 [task-result-getter-2] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 9.0 failed 1 times; aborting job +5799 [main] ERROR org.apache.spark.sql.execution.datasources.FileFormatWriter - Aborting job 40f0efd4-2fe4-4c45-bc0d-186f0140fc77. +org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 9.0 failed 1 times, most recent failure: Lost task 0.0 in stage 9.0 (TID 72, localhost, executor driver): org.apache.spark.SparkException: Task failed while writing rows. + at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:254) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:169) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:168) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +Caused by: java.lang.UnsupportedOperationException: 8428428870bcba005c45c2db48d5979d is not a URI node + at org.apache.jena.graph.Node.getURI(Node.java:166) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$.convertAllTriplesOfSubjectToTSV(RDF_Triples_Mapper.scala:124) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:67) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:66) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$12.next(Iterator.scala:445) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:242) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:239) + at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1394) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:245) + ... 10 more + +Driver stacktrace: + at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1887) + at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1875) + at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1874) + at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) + at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) + at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1874) + at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926) + at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926) + at scala.Option.foreach(Option.scala:257) + at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:926) + at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2108) + at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2057) + at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2046) + at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49) + at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:737) + at org.apache.spark.SparkContext.runJob(SparkContext.scala:2061) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:166) + at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:159) + at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:104) + at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:102) + at org.apache.spark.sql.execution.command.DataWritingCommandExec.doExecute(commands.scala:122) + at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) + at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) + at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) + at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) + at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) + at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) + at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80) + at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80) + at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:668) + at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:668) + at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78) + at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125) + at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73) + at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:668) + at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:276) + at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:270) + at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:228) + at org.apache.spark.sql.DataFrameWriter.csv(DataFrameWriter.scala:656) + at org.dbpedia.databus.client.filehandling.convert.format.tsd.format.CSV.write(CSV.scala:26) + at org.dbpedia.databus.client.filehandling.convert.format.tsd.format.TSV.write(TSV.scala:17) + at org.dbpedia.databus.client.filehandling.convert.format.tsd.TSDHandler.write(TSDHandler.scala:36) + at org.dbpedia.databus.client.filehandling.convert.FormatConverter$$anonfun$convert$2.apply(FormatConverter.scala:120) + at org.dbpedia.databus.client.filehandling.convert.FormatConverter$$anonfun$convert$2.apply(FormatConverter.scala:119) + at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33) + at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186) + at org.dbpedia.databus.client.filehandling.convert.FormatConverter$.convert(FormatConverter.scala:119) + at org.dbpedia.databus.client.filehandling.FileHandler.handleFile(FileHandler.scala:75) + at org.dbpedia.databus.client.filehandling.SourceHandler.handleSource(SourceHandler.scala:64) + at org.dbpedia.databus.client.filehandling.SourceHandler.execute(SourceHandler.scala:33) + at org.dbpedia.databus.client.main.Main$.main(Main.scala:18) + at org.dbpedia.databus.client.main.Main.main(Main.scala) +Caused by: org.apache.spark.SparkException: Task failed while writing rows. + at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:254) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:169) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:168) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +Caused by: java.lang.UnsupportedOperationException: 8428428870bcba005c45c2db48d5979d is not a URI node + at org.apache.jena.graph.Node.getURI(Node.java:166) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$.convertAllTriplesOfSubjectToTSV(RDF_Triples_Mapper.scala:124) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:67) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:66) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$12.next(Iterator.scala:445) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:242) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:239) + at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1394) + at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:245) + ... 10 more +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +30 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +202 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +319 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +367 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +368 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +368 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +368 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +369 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +661 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 44823. +680 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +694 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +696 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +696 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +704 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-f89f6828-a8b9-47f6-b3ae-6cb02a3649b2 +719 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +728 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +786 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2417ms +835 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +850 [main] INFO org.spark_project.jetty.server.Server - Started @2479ms +863 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@54d2286e{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +863 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +881 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4315e9af{/jobs,null,AVAILABLE,@Spark} +881 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/json,null,AVAILABLE,@Spark} +882 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/job,null,AVAILABLE,@Spark} +883 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/jobs/job/json,null,AVAILABLE,@Spark} +883 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages,null,AVAILABLE,@Spark} +884 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/json,null,AVAILABLE,@Spark} +884 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/stage,null,AVAILABLE,@Spark} +885 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/stage/json,null,AVAILABLE,@Spark} +886 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool,null,AVAILABLE,@Spark} +886 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool/json,null,AVAILABLE,@Spark} +887 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage,null,AVAILABLE,@Spark} +887 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/json,null,AVAILABLE,@Spark} +888 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd,null,AVAILABLE,@Spark} +888 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd/json,null,AVAILABLE,@Spark} +889 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment,null,AVAILABLE,@Spark} +890 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment/json,null,AVAILABLE,@Spark} +891 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors,null,AVAILABLE,@Spark} +891 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/json,null,AVAILABLE,@Spark} +892 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump,null,AVAILABLE,@Spark} +893 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump/json,null,AVAILABLE,@Spark} +901 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/static,null,AVAILABLE,@Spark} +902 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/,null,AVAILABLE,@Spark} +903 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/api,null,AVAILABLE,@Spark} +903 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/jobs/job/kill,null,AVAILABLE,@Spark} +904 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/stages/stage/kill,null,AVAILABLE,@Spark} +905 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +970 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1078 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 44895. +1078 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:44895 +1080 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1099 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 44895, None) +1101 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:44895 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 44895, None) +1103 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 44895, None) +1103 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 44895, None) +1219 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3c5dbdf8{/metrics/json,null,AVAILABLE,@Spark} +5220 [Executor task launch worker for task 72] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 9.0 (TID 72) +java.lang.UnsupportedOperationException: 65d730fb921f540564ecd68d213527cf is not a URI node + at org.apache.jena.graph.Node.getURI(Node.java:166) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$.convertAllTriplesOfSubjectToTSV(RDF_Triples_Mapper.scala:124) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:67) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:66) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:256) + at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247) + at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) + at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) + at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) + at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) + at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) + at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) + at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) + at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +5240 [task-result-getter-2] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 9.0 (TID 72, localhost, executor driver): java.lang.UnsupportedOperationException: 65d730fb921f540564ecd68d213527cf is not a URI node + at org.apache.jena.graph.Node.getURI(Node.java:166) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$.convertAllTriplesOfSubjectToTSV(RDF_Triples_Mapper.scala:124) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:67) + at org.dbpedia.databus.client.filehandling.convert.format.mapping.RDF_Triples_Mapper$$anonfun$8.apply(RDF_Triples_Mapper.scala:66) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) + at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:256) + at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247) + at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) + at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:836) + at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) + at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) + at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) + at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) + at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) + at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +5241 [task-result-getter-2] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 9.0 failed 1 times; aborting job +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +34 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +253 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +390 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +453 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +454 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +454 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +454 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +455 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +767 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 37145. +785 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +799 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +801 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +801 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +811 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-92896390-cc91-49af-8acf-4421bfc15a76 +828 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +841 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +902 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2621ms +950 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +963 [main] INFO org.spark_project.jetty.server.Server - Started @2683ms +976 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +976 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +996 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +997 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +997 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +998 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +999 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +999 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +1000 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +1001 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +1002 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +1002 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +1003 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +1003 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +1004 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +1004 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +1005 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +1005 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +1006 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +1007 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +1007 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +1008 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +1013 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +1014 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +1015 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +1015 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +1016 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +1018 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1098 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1210 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 33053. +1211 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:33053 +1211 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1231 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 33053, None) +1233 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:33053 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 33053, None) +1235 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 33053, None) +1236 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 33053, None) +1359 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +33 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +229 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +331 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +390 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +391 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +391 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +391 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +392 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +729 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 40375. +750 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +767 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +770 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +771 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +779 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-93616f95-27ea-432e-8a7f-4553674a7658 +797 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +810 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +884 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2807ms +942 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +956 [main] INFO org.spark_project.jetty.server.Server - Started @2880ms +974 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@13eaf8d{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +974 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +991 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4315e9af{/jobs,null,AVAILABLE,@Spark} +992 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/json,null,AVAILABLE,@Spark} +993 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/job,null,AVAILABLE,@Spark} +994 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/jobs/job/json,null,AVAILABLE,@Spark} +995 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages,null,AVAILABLE,@Spark} +995 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/json,null,AVAILABLE,@Spark} +996 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/stage,null,AVAILABLE,@Spark} +998 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/stage/json,null,AVAILABLE,@Spark} +998 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool,null,AVAILABLE,@Spark} +999 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool/json,null,AVAILABLE,@Spark} +1000 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage,null,AVAILABLE,@Spark} +1001 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/json,null,AVAILABLE,@Spark} +1002 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd,null,AVAILABLE,@Spark} +1002 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd/json,null,AVAILABLE,@Spark} +1003 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment,null,AVAILABLE,@Spark} +1004 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment/json,null,AVAILABLE,@Spark} +1005 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors,null,AVAILABLE,@Spark} +1005 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/json,null,AVAILABLE,@Spark} +1006 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump,null,AVAILABLE,@Spark} +1007 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump/json,null,AVAILABLE,@Spark} +1015 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/static,null,AVAILABLE,@Spark} +1016 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/,null,AVAILABLE,@Spark} +1017 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/api,null,AVAILABLE,@Spark} +1018 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/jobs/job/kill,null,AVAILABLE,@Spark} +1018 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/stages/stage/kill,null,AVAILABLE,@Spark} +1020 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1093 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1204 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 46451. +1205 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:46451 +1206 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1227 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 46451, None) +1229 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:46451 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 46451, None) +1231 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 46451, None) +1231 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 46451, None) +1360 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3c5dbdf8{/metrics/json,null,AVAILABLE,@Spark} +6842 [main] WARN org.apache.spark.sql.Column - Constructing trivially true equals predicate, 'graph#19 = graph#19'. Perhaps you need to use aliases. +1 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +30 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +204 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +313 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +359 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +359 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +359 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +359 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +360 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +667 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 44187. +687 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +703 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +705 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +705 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +713 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-c4c8cea1-b481-46a7-a962-05fbcd1f701e +728 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +738 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +799 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2572ms +849 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +862 [main] INFO org.spark_project.jetty.server.Server - Started @2636ms +876 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@1de9d54{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +876 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +895 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@42210be1{/jobs,null,AVAILABLE,@Spark} +896 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/json,null,AVAILABLE,@Spark} +897 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job,null,AVAILABLE,@Spark} +898 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/jobs/job/json,null,AVAILABLE,@Spark} +899 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages,null,AVAILABLE,@Spark} +899 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/json,null,AVAILABLE,@Spark} +900 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2eaef76d{/stages/stage,null,AVAILABLE,@Spark} +902 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/stage/json,null,AVAILABLE,@Spark} +903 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool,null,AVAILABLE,@Spark} +904 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/stages/pool/json,null,AVAILABLE,@Spark} +905 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage,null,AVAILABLE,@Spark} +905 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/json,null,AVAILABLE,@Spark} +906 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd,null,AVAILABLE,@Spark} +907 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/storage/rdd/json,null,AVAILABLE,@Spark} +908 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment,null,AVAILABLE,@Spark} +909 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/environment/json,null,AVAILABLE,@Spark} +910 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors,null,AVAILABLE,@Spark} +911 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/json,null,AVAILABLE,@Spark} +912 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump,null,AVAILABLE,@Spark} +913 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/executors/threadDump/json,null,AVAILABLE,@Spark} +920 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30a7c98f{/static,null,AVAILABLE,@Spark} +922 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/,null,AVAILABLE,@Spark} +923 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@39008c9f{/api,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/jobs/job/kill,null,AVAILABLE,@Spark} +925 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46d567cb{/stages/stage/kill,null,AVAILABLE,@Spark} +927 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1000 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1106 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 35837. +1106 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:35837 +1107 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1128 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 35837, None) +1130 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:35837 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 35837, None) +1132 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 35837, None) +1132 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 35837, None) +1264 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@352c44a8{/metrics/json,null,AVAILABLE,@Spark} +6675 [main] WARN org.apache.spark.sql.Column - Constructing trivially true equals predicate, 'graph#19 = graph#19'. Perhaps you need to use aliases. +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +34 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +229 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +352 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +403 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +403 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +404 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +404 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +404 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +722 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 45885. +740 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +755 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +757 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +757 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +764 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-977b1517-0df6-43bb-9078-68dbca680e75 +779 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +789 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +848 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2503ms +902 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +918 [main] INFO org.spark_project.jetty.server.Server - Started @2572ms +934 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +934 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +953 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +954 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +954 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +955 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +956 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +956 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +957 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +958 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +959 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +960 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +960 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +961 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +961 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +962 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +962 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +963 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +964 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +964 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +965 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +965 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +972 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +973 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +975 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +975 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +976 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +978 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1045 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1145 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 35011. +1146 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:35011 +1147 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1167 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 35011, None) +1169 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:35011 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 35011, None) +1170 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 35011, None) +1171 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 35011, None) +1291 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +6614 [main] WARN org.apache.spark.sql.Column - Constructing trivially true equals predicate, 'resource#3 = resource#3'. Perhaps you need to use aliases. +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +29 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +216 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +337 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +385 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +385 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +385 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +386 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +386 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +692 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 41713. +712 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +725 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +728 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +728 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +734 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-e4e68924-1c3a-4951-b8ec-95e3ca401dc2 +748 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +757 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +813 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2545ms +862 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +876 [main] INFO org.spark_project.jetty.server.Server - Started @2610ms +894 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +894 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +915 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +917 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +918 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +919 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +919 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +921 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +922 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +923 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +923 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +925 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +925 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +927 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +936 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +942 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1017 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1128 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 43625. +1129 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:43625 +1130 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1150 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 43625, None) +1152 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:43625 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 43625, None) +1154 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 43625, None) +1155 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 43625, None) +1281 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +33 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +227 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +334 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +381 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +381 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +381 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +382 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +382 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +679 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 35825. +698 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +714 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +716 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +717 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +724 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-0e9d244e-e7bb-47bb-871a-4d1ea49ae5ee +739 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +749 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +812 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2459ms +861 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +875 [main] INFO org.spark_project.jetty.server.Server - Started @2523ms +892 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +892 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +909 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +910 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +910 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +911 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +912 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +912 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +913 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +914 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +914 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +915 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +915 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +917 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +918 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +918 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +919 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +919 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +920 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +920 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +927 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +932 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +994 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1092 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 38149. +1093 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:38149 +1093 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1113 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 38149, None) +1116 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:38149 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 38149, None) +1118 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 38149, None) +1118 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 38149, None) +1233 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +28 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +211 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +337 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +383 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +384 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +384 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +384 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +385 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +682 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 37873. +700 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +715 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +717 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +718 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +725 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-ce166dcb-8ade-44d7-98af-5ce7e6c2beb3 +740 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +750 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +814 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2453ms +863 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +876 [main] INFO org.spark_project.jetty.server.Server - Started @2516ms +890 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +891 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +908 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +909 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +909 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +910 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +911 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +912 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +912 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +914 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +915 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +915 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +917 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +918 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +918 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +919 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +919 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +920 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +920 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +921 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +921 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +927 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +932 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +997 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1098 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 37005. +1099 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:37005 +1100 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1119 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 37005, None) +1121 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:37005 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 37005, None) +1123 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 37005, None) +1123 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 37005, None) +1241 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +29 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +192 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +300 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +350 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +350 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +351 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +351 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +351 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +653 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 43923. +672 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +687 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +689 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +689 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +696 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-4e500ad6-e875-4201-ad54-3c2cfab87469 +710 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +720 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +780 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2484ms +832 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +845 [main] INFO org.spark_project.jetty.server.Server - Started @2550ms +861 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +861 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +880 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +881 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +882 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +883 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +884 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +884 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +885 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +887 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +887 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +888 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +888 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +889 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +889 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +890 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +890 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +891 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +892 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +892 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +893 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +893 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +898 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +899 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +900 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +901 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +901 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +903 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +964 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1076 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 44705. +1077 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:44705 +1078 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1097 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 44705, None) +1100 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:44705 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 44705, None) +1101 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 44705, None) +1102 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 44705, None) +1227 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +32 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +242 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +369 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +417 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +417 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +417 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +418 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +418 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +708 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 41937. +727 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +743 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +745 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +746 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +753 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-575f650d-19cc-4496-82a4-08205024370b +768 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +778 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +838 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2567ms +890 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +905 [main] INFO org.spark_project.jetty.server.Server - Started @2632ms +920 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +920 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +944 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +944 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +945 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +945 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +946 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +947 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +947 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +948 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +949 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +949 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +950 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +950 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +956 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +957 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +958 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +959 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +960 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +961 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1025 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1127 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 46495. +1128 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:46495 +1129 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1149 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 46495, None) +1151 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:46495 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 46495, None) +1153 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 46495, None) +1153 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 46495, None) +1270 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +30 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +195 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +292 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +340 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +341 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +341 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +341 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +342 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +712 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 41173. +730 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +744 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +746 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +746 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +753 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-191efb87-f61f-4253-b6a2-2f495a36869f +768 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +778 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +844 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2452ms +893 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +906 [main] INFO org.spark_project.jetty.server.Server - Started @2515ms +922 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +922 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +944 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +945 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +945 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +946 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +946 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +947 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +947 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +948 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +948 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +949 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +950 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +950 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +951 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +956 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +957 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +958 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +959 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +959 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +961 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1023 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1129 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 44163. +1129 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:44163 +1130 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1150 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 44163, None) +1153 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:44163 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 44163, None) +1155 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 44163, None) +1155 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 44163, None) +1273 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +31 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +221 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +320 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +369 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +369 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +369 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +370 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +370 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +706 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 40201. +724 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +738 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +740 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +740 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +747 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-87b90c6e-9572-40f6-a665-13fbae9b7627 +762 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +773 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +835 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2470ms +887 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +900 [main] INFO org.spark_project.jetty.server.Server - Started @2535ms +914 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +914 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +933 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +935 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +936 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +936 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +944 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +944 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +945 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +946 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +951 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +952 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +953 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +954 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +954 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +956 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1019 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1120 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 41563. +1121 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:41563 +1122 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1142 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 41563, None) +1144 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:41563 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 41563, None) +1146 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 41563, None) +1146 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 41563, None) +1267 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +35 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +211 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +330 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +379 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +380 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +380 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +380 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +381 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +710 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 35197. +729 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +742 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +744 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +745 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +751 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-5a8494c4-aefa-47d0-94b1-017243cc0cf4 +766 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +775 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +833 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2447ms +882 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +894 [main] INFO org.spark_project.jetty.server.Server - Started @2510ms +910 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@6ba73040{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +910 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7275c74b{/jobs,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1639f93a{/jobs/json,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/job,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/job/json,null,AVAILABLE,@Spark} +931 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/stages,null,AVAILABLE,@Spark} +931 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages/json,null,AVAILABLE,@Spark} +932 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/stage,null,AVAILABLE,@Spark} +933 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2eaef76d{/stages/stage/json,null,AVAILABLE,@Spark} +933 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/pool,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool/json,null,AVAILABLE,@Spark} +935 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/storage,null,AVAILABLE,@Spark} +935 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage/json,null,AVAILABLE,@Spark} +936 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/rdd,null,AVAILABLE,@Spark} +936 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd/json,null,AVAILABLE,@Spark} +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/environment,null,AVAILABLE,@Spark} +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment/json,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/executors,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors/json,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/threadDump,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump/json,null,AVAILABLE,@Spark} +945 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/static,null,AVAILABLE,@Spark} +946 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45f421c{/,null,AVAILABLE,@Spark} +947 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/api,null,AVAILABLE,@Spark} +948 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@162b3d47{/jobs/job/kill,null,AVAILABLE,@Spark} +948 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/stages/stage/kill,null,AVAILABLE,@Spark} +950 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1015 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1118 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 43795. +1119 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:43795 +1120 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1143 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 43795, None) +1146 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:43795 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 43795, None) +1148 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 43795, None) +1148 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 43795, None) +1269 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@54e3658c{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +29 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +204 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +307 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +356 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +356 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +356 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +357 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +357 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +658 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 35841. +679 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +693 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +695 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +695 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +702 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-fc2ed3f9-67cd-4d85-bb71-f8d3d918c863 +716 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +726 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +783 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2367ms +833 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +849 [main] INFO org.spark_project.jetty.server.Server - Started @2431ms +863 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +863 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +881 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +882 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +883 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +884 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +885 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +885 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +886 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +887 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +888 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +888 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +889 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +890 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +891 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +891 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +892 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +892 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +893 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +894 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +894 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +895 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +900 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +901 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +902 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +903 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +903 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +905 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +971 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1072 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 40341. +1072 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:40341 +1073 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1092 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 40341, None) +1095 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:40341 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 40341, None) +1096 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 40341, None) +1097 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 40341, None) +1216 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +50 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +233 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +348 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +395 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +395 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +395 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +396 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +396 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +700 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 36993. +719 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +733 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +736 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +736 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +743 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-aa2f4433-76a7-4a3c-a7ab-7b3c93e34420 +759 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +768 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +830 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2441ms +880 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +892 [main] INFO org.spark_project.jetty.server.Server - Started @2504ms +907 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +907 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +925 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +927 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +928 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +932 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +933 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +935 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +936 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +949 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +951 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +952 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +953 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +954 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +956 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1039 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1153 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 38945. +1154 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:38945 +1155 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1180 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 38945, None) +1183 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:38945 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 38945, None) +1184 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 38945, None) +1184 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 38945, None) +1302 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +32 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +232 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +347 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +393 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +393 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +393 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +394 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +394 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +711 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 41783. +729 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +744 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +746 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +746 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +753 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-a2372ddc-e518-46a8-a987-653b41483241 +768 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +778 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +841 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2501ms +893 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +906 [main] INFO org.spark_project.jetty.server.Server - Started @2567ms +919 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@1de9d54{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +919 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@42210be1{/jobs,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/json,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/jobs/job/json,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/json,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2eaef76d{/stages/stage,null,AVAILABLE,@Spark} +942 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/stage/json,null,AVAILABLE,@Spark} +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool,null,AVAILABLE,@Spark} +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/stages/pool/json,null,AVAILABLE,@Spark} +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage,null,AVAILABLE,@Spark} +944 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/json,null,AVAILABLE,@Spark} +944 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd,null,AVAILABLE,@Spark} +945 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/storage/rdd/json,null,AVAILABLE,@Spark} +945 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment,null,AVAILABLE,@Spark} +946 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/environment/json,null,AVAILABLE,@Spark} +947 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors,null,AVAILABLE,@Spark} +947 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/json,null,AVAILABLE,@Spark} +948 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump,null,AVAILABLE,@Spark} +949 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/executors/threadDump/json,null,AVAILABLE,@Spark} +955 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30a7c98f{/static,null,AVAILABLE,@Spark} +956 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/,null,AVAILABLE,@Spark} +957 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@39008c9f{/api,null,AVAILABLE,@Spark} +958 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/jobs/job/kill,null,AVAILABLE,@Spark} +959 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46d567cb{/stages/stage/kill,null,AVAILABLE,@Spark} +961 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1031 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1127 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 38577. +1127 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:38577 +1128 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1148 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 38577, None) +1150 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:38577 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 38577, None) +1152 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 38577, None) +1152 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 38577, None) +1272 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@352c44a8{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +29 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +217 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +326 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +372 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +372 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +373 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +373 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +373 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +675 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 46023. +693 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +708 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +710 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +710 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +717 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-6d466761-302f-43cc-a338-885d46d20887 +731 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +741 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +800 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2500ms +851 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +864 [main] INFO org.spark_project.jetty.server.Server - Started @2564ms +883 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@65eabaab{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +883 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +906 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19058533{/jobs,null,AVAILABLE,@Spark} +907 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3491e86e{/jobs/json,null,AVAILABLE,@Spark} +907 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@68f32020{/jobs/job,null,AVAILABLE,@Spark} +908 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job/json,null,AVAILABLE,@Spark} +908 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22590e3e{/stages,null,AVAILABLE,@Spark} +909 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/stages/json,null,AVAILABLE,@Spark} +909 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages/stage,null,AVAILABLE,@Spark} +911 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b329bbd{/stages/stage/json,null,AVAILABLE,@Spark} +911 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34819867{/stages/pool,null,AVAILABLE,@Spark} +912 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/pool/json,null,AVAILABLE,@Spark} +912 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/storage,null,AVAILABLE,@Spark} +913 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/storage/json,null,AVAILABLE,@Spark} +913 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage/rdd,null,AVAILABLE,@Spark} +914 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/rdd/json,null,AVAILABLE,@Spark} +914 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/environment,null,AVAILABLE,@Spark} +915 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/environment/json,null,AVAILABLE,@Spark} +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/executors,null,AVAILABLE,@Spark} +916 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/executors/json,null,AVAILABLE,@Spark} +917 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors/threadDump,null,AVAILABLE,@Spark} +918 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/threadDump/json,null,AVAILABLE,@Spark} +923 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/static,null,AVAILABLE,@Spark} +924 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1816e24a{/,null,AVAILABLE,@Spark} +925 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6940f685{/api,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@783115d9{/jobs/job/kill,null,AVAILABLE,@Spark} +926 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3402b4c9{/stages/stage/kill,null,AVAILABLE,@Spark} +928 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +993 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1094 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 43923. +1095 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:43923 +1096 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1116 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 43923, None) +1119 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:43923 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 43923, None) +1121 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 43923, None) +1121 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 43923, None) +1241 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@43e9089{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +33 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +243 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +365 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +413 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +413 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +413 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +414 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +414 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +717 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 43979. +736 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +750 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +752 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +753 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +759 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-57d8cd19-073d-440b-b701-9686db7d62a7 +774 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +784 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +843 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2598ms +893 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +905 [main] INFO org.spark_project.jetty.server.Server - Started @2661ms +925 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@1de9d54{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +925 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +943 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@42210be1{/jobs,null,AVAILABLE,@Spark} +944 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/json,null,AVAILABLE,@Spark} +944 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job,null,AVAILABLE,@Spark} +945 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/jobs/job/json,null,AVAILABLE,@Spark} +946 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages,null,AVAILABLE,@Spark} +947 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/json,null,AVAILABLE,@Spark} +948 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2eaef76d{/stages/stage,null,AVAILABLE,@Spark} +950 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/stage/json,null,AVAILABLE,@Spark} +950 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool,null,AVAILABLE,@Spark} +951 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/stages/pool/json,null,AVAILABLE,@Spark} +952 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage,null,AVAILABLE,@Spark} +953 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/json,null,AVAILABLE,@Spark} +953 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd,null,AVAILABLE,@Spark} +954 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/storage/rdd/json,null,AVAILABLE,@Spark} +955 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment,null,AVAILABLE,@Spark} +956 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/environment/json,null,AVAILABLE,@Spark} +956 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors,null,AVAILABLE,@Spark} +957 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/json,null,AVAILABLE,@Spark} +958 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump,null,AVAILABLE,@Spark} +959 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/executors/threadDump/json,null,AVAILABLE,@Spark} +965 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30a7c98f{/static,null,AVAILABLE,@Spark} +965 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/,null,AVAILABLE,@Spark} +967 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@39008c9f{/api,null,AVAILABLE,@Spark} +968 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/jobs/job/kill,null,AVAILABLE,@Spark} +968 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46d567cb{/stages/stage/kill,null,AVAILABLE,@Spark} +970 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1039 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1155 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 35691. +1156 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:35691 +1157 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1177 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 35691, None) +1179 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:35691 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 35691, None) +1180 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 35691, None) +1181 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 35691, None) +1302 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@352c44a8{/metrics/json,null,AVAILABLE,@Spark} +0 [main] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [main] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +32 [main] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +232 [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +359 [main] INFO org.apache.spark.SparkContext - Submitted application: Databus Client Converter +414 [main] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +414 [main] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +415 [main] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +415 [main] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +415 [main] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +707 [main] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 37123. +726 [main] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +740 [main] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +742 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +742 [main] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +749 [main] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-5ffe957f-a54c-4d65-8222-2ef08ceb1bec +765 [main] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +774 [main] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +834 [main] INFO org.spark_project.jetty.util.log - Logging initialized @2509ms +883 [main] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +896 [main] INFO org.spark_project.jetty.server.Server - Started @2571ms +912 [main] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@1de9d54{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +912 [main] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +929 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@42210be1{/jobs,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@409986fe{/jobs/json,null,AVAILABLE,@Spark} +930 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@19b047fe{/jobs/job,null,AVAILABLE,@Spark} +931 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@53dad875{/jobs/job/json,null,AVAILABLE,@Spark} +932 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5f780a86{/stages,null,AVAILABLE,@Spark} +932 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@446c3920{/stages/json,null,AVAILABLE,@Spark} +933 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2eaef76d{/stages/stage,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@118102ee{/stages/stage/json,null,AVAILABLE,@Spark} +934 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@48cd9a2c{/stages/pool,null,AVAILABLE,@Spark} +935 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@771d1ffb{/stages/pool/json,null,AVAILABLE,@Spark} +935 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4f67e3df{/storage,null,AVAILABLE,@Spark} +936 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@56681eaf{/storage/json,null,AVAILABLE,@Spark} +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72d0f2b4{/storage/rdd,null,AVAILABLE,@Spark} +937 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6d2dc9d2{/storage/rdd/json,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1da4b6b3{/environment,null,AVAILABLE,@Spark} +938 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@b2f4ece{/environment/json,null,AVAILABLE,@Spark} +939 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7e1f584d{/executors,null,AVAILABLE,@Spark} +940 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7dff6d05{/executors/json,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@45d64d27{/executors/threadDump,null,AVAILABLE,@Spark} +941 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@34fe326d{/executors/threadDump/json,null,AVAILABLE,@Spark} +947 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30a7c98f{/static,null,AVAILABLE,@Spark} +948 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@47b179d7{/,null,AVAILABLE,@Spark} +949 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@39008c9f{/api,null,AVAILABLE,@Spark} +950 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@95bb2a2{/jobs/job/kill,null,AVAILABLE,@Spark} +951 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46d567cb{/stages/stage/kill,null,AVAILABLE,@Spark} +953 [main] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1025 [main] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1125 [main] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 37521. +1126 [main] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:37521 +1127 [main] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1146 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 37521, None) +1148 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:37521 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 37521, None) +1150 [main] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 37521, None) +1150 [main] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 37521, None) +1268 [main] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@352c44a8{/metrics/json,null,AVAILABLE,@Spark} +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +62 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +350 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +466 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +528 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +528 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +528 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +529 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +529 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +855 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 42145. +875 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +891 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +894 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +894 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +929 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-b7d6d2ba-37c6-45c9-893b-bc4f7bdce53a +946 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +958 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1037 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @2123ms +1088 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1102 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2189ms +1116 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1116 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1139 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1140 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1141 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1143 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1143 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1144 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1145 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1147 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1147 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1148 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1149 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1150 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1151 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1151 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1152 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1153 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1154 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1154 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1155 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1156 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1162 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1163 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1164 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1165 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1166 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1167 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1234 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1308 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 39427. +1308 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:39427 +1309 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1343 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 39427, None) +1346 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:39427 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 39427, None) +1348 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 39427, None) +1349 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 39427, None) +1485 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2279 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +3481 [Executor task launch worker for task 0] ERROR org.apache.jena.riot - [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] +3487 [Executor task launch worker for task 0] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 0.0 (TID 0) +org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +3520 [task-result-getter-0] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +3522 [task-result-getter-0] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 0.0 failed 1 times; aborting job +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +54 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +369 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +490 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +556 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +557 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +557 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +557 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +558 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +870 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 40541. +891 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +906 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +908 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +909 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +937 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-a15d47aa-041f-4c64-a177-5ecf20c88fab +954 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +964 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1038 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1978ms +1087 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1100 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2041ms +1115 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1115 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1138 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1139 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1139 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1140 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1141 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1142 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1142 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1144 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1144 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1145 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1146 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1146 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1147 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1148 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1148 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1149 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1150 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1150 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1151 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1152 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1157 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1158 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1159 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1160 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1161 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1162 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1225 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1300 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 43503. +1300 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:43503 +1301 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1330 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 43503, None) +1332 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:43503 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 43503, None) +1335 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 43503, None) +1335 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 43503, None) +1484 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2201 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +3529 [Executor task launch worker for task 0] ERROR org.apache.jena.riot - [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] +3536 [Executor task launch worker for task 0] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 0.0 (TID 0) +org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +3562 [task-result-getter-0] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +3563 [task-result-getter-0] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 0.0 failed 1 times; aborting job +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +43 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +319 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +442 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +514 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +514 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +514 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +515 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +515 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +856 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 40739. +876 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +890 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +892 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +892 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +924 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-8d4b7e76-0d67-4718-bf03-53cdc9223936 +940 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +952 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1024 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1896ms +1073 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1087 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @1961ms +1104 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1104 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1131 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1132 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1133 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1134 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1135 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1136 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1137 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1139 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1140 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1141 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1142 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1143 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1143 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1144 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1145 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1146 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1147 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1148 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1149 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1150 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1156 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1157 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1158 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1159 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1160 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1162 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1226 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1305 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 40145. +1306 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:40145 +1307 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1327 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 40145, None) +1337 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:40145 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 40145, None) +1339 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 40145, None) +1339 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 40145, None) +1466 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2161 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +3337 [Executor task launch worker for task 0] ERROR org.apache.jena.riot - [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] +3344 [Executor task launch worker for task 0] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 0.0 (TID 0) +org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +3370 [task-result-getter-0] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +3371 [task-result-getter-0] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 0.0 failed 1 times; aborting job +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +76 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +369 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +491 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +558 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +558 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +558 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +559 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +559 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +901 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 43609. +920 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +935 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +937 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +937 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +969 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-1693b67a-eb46-465e-9e4a-e6333b3be245 +985 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +996 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1067 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1979ms +1133 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1148 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2061ms +1162 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1162 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1185 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1186 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1187 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1189 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1189 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1190 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1191 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1193 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1194 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1195 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1195 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1196 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1197 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1198 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1199 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1200 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1200 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1201 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1202 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1203 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1209 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1209 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1211 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1212 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1213 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1215 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1277 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1346 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 34991. +1346 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:34991 +1347 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1374 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 34991, None) +1376 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:34991 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 34991, None) +1379 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 34991, None) +1379 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 34991, None) +1513 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2180 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +3427 [Executor task launch worker for task 0] ERROR org.apache.jena.riot - [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] +3434 [Executor task launch worker for task 0] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 0.0 (TID 0) +org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +3462 [task-result-getter-0] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +3464 [task-result-getter-0] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 0.0 failed 1 times; aborting job +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +46 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +320 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +438 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +510 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +511 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +511 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +512 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +512 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +863 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 35091. +884 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +899 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +901 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +902 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +944 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-50ac2e05-1ed2-4b18-970b-a24bd9f66d76 +960 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +971 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1044 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @2029ms +1091 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1104 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2090ms +1118 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@600b0b7{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1118 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1142 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@32f61a31{/jobs,null,AVAILABLE,@Spark} +1143 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72bca894{/jobs/json,null,AVAILABLE,@Spark} +1144 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job,null,AVAILABLE,@Spark} +1146 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/jobs/job/json,null,AVAILABLE,@Spark} +1147 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages,null,AVAILABLE,@Spark} +1147 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@ecf9fb3{/stages/json,null,AVAILABLE,@Spark} +1148 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2d35442b{/stages/stage,null,AVAILABLE,@Spark} +1150 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/stage/json,null,AVAILABLE,@Spark} +1151 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/stages/pool,null,AVAILABLE,@Spark} +1152 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/stages/pool/json,null,AVAILABLE,@Spark} +1153 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage,null,AVAILABLE,@Spark} +1153 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/json,null,AVAILABLE,@Spark} +1154 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/storage/rdd,null,AVAILABLE,@Spark} +1155 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/storage/rdd/json,null,AVAILABLE,@Spark} +1156 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/environment,null,AVAILABLE,@Spark} +1157 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/environment/json,null,AVAILABLE,@Spark} +1158 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors,null,AVAILABLE,@Spark} +1159 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/json,null,AVAILABLE,@Spark} +1159 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/executors/threadDump,null,AVAILABLE,@Spark} +1160 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@69c43e48{/executors/threadDump/json,null,AVAILABLE,@Spark} +1168 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1804f60d{/static,null,AVAILABLE,@Spark} +1169 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2a2bb0eb{/,null,AVAILABLE,@Spark} +1170 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3c291aad{/api,null,AVAILABLE,@Spark} +1171 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@320e400{/jobs/job/kill,null,AVAILABLE,@Spark} +1172 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5167268{/stages/stage/kill,null,AVAILABLE,@Spark} +1173 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1236 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1309 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 36815. +1309 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:36815 +1310 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1336 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 36815, None) +1338 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:36815 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 36815, None) +1340 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 36815, None) +1340 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 36815, None) +1472 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@77ee25f1{/metrics/json,null,AVAILABLE,@Spark} +2171 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +3395 [Executor task launch worker for task 0] ERROR org.apache.jena.riot - [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] +3402 [Executor task launch worker for task 0] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 0.0 (TID 0) +org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +3427 [task-result-getter-0] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +3429 [task-result-getter-0] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 0.0 failed 1 times; aborting job +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +40 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +318 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +428 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +491 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +491 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +492 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +492 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +492 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +831 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 46379. +852 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +867 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +869 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +869 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +902 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-4e1ef25c-d024-42a0-9c84-69c117f41cd2 +918 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +930 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1005 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1894ms +1054 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1067 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @1956ms +1080 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1080 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1100 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1101 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1102 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1103 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1104 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1105 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1106 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1108 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1109 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1110 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1110 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1111 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1112 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1113 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1114 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1115 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1116 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1117 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1117 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1118 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1126 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1127 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1128 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1129 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1130 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1131 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1191 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1259 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 37725. +1259 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:37725 +1260 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1285 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 37725, None) +1288 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:37725 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 37725, None) +1290 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 37725, None) +1290 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 37725, None) +1410 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2071 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +3334 [Executor task launch worker for task 0] ERROR org.apache.jena.riot - [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] +3340 [Executor task launch worker for task 0] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 0.0 (TID 0) +org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +3367 [task-result-getter-0] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +3369 [task-result-getter-0] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 0.0 failed 1 times; aborting job +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +42 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +331 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +445 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +510 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +510 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +511 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +511 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +512 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +856 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 41937. +878 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +894 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +896 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +896 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +928 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-7f0c51fb-113d-4bb7-b74e-c4146c15885d +944 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +956 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1036 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1887ms +1085 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1099 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @1952ms +1114 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@600b0b7{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1114 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1135 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@32f61a31{/jobs,null,AVAILABLE,@Spark} +1136 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72bca894{/jobs/json,null,AVAILABLE,@Spark} +1137 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job,null,AVAILABLE,@Spark} +1138 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/jobs/job/json,null,AVAILABLE,@Spark} +1139 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages,null,AVAILABLE,@Spark} +1139 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@ecf9fb3{/stages/json,null,AVAILABLE,@Spark} +1140 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2d35442b{/stages/stage,null,AVAILABLE,@Spark} +1141 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/stage/json,null,AVAILABLE,@Spark} +1142 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/stages/pool,null,AVAILABLE,@Spark} +1143 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/stages/pool/json,null,AVAILABLE,@Spark} +1143 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage,null,AVAILABLE,@Spark} +1144 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/json,null,AVAILABLE,@Spark} +1145 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/storage/rdd,null,AVAILABLE,@Spark} +1146 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/storage/rdd/json,null,AVAILABLE,@Spark} +1146 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/environment,null,AVAILABLE,@Spark} +1147 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/environment/json,null,AVAILABLE,@Spark} +1148 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors,null,AVAILABLE,@Spark} +1149 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/json,null,AVAILABLE,@Spark} +1150 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/executors/threadDump,null,AVAILABLE,@Spark} +1151 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@69c43e48{/executors/threadDump/json,null,AVAILABLE,@Spark} +1156 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1804f60d{/static,null,AVAILABLE,@Spark} +1157 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2a2bb0eb{/,null,AVAILABLE,@Spark} +1158 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3c291aad{/api,null,AVAILABLE,@Spark} +1159 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@320e400{/jobs/job/kill,null,AVAILABLE,@Spark} +1160 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5167268{/stages/stage/kill,null,AVAILABLE,@Spark} +1162 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1228 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1298 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 38689. +1299 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:38689 +1300 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1319 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 38689, None) +1322 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:38689 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 38689, None) +1324 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 38689, None) +1324 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 38689, None) +1459 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@77ee25f1{/metrics/json,null,AVAILABLE,@Spark} +2152 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +3451 [Executor task launch worker for task 0] ERROR org.apache.jena.riot - [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] +3457 [Executor task launch worker for task 0] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 0.0 (TID 0) +org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +3484 [task-result-getter-0] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +3486 [task-result-getter-0] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 0.0 failed 1 times; aborting job +5343 [Executor task launch worker for task 56] ERROR org.apache.jena.riot - [line: 1, col: 49] Expected IRI: Got: [KEYWORD:a] +5344 [Executor task launch worker for task 56] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 8.0 (TID 56) +org.apache.jena.riot.RiotException: [line: 1, col: 49] Expected IRI: Got: [KEYWORD:a] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRI(LangNTuple.java:95) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:75) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +5346 [task-result-getter-3] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 8.0 (TID 56, localhost, executor driver): org.apache.jena.riot.RiotException: [line: 1, col: 49] Expected IRI: Got: [KEYWORD:a] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRI(LangNTuple.java:95) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:75) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +5347 [task-result-getter-3] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 8.0 failed 1 times; aborting job +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +42 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +336 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +435 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +511 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +511 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +511 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +512 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +512 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +840 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 43307. +860 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +875 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +877 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +877 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +910 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-c0cf91dc-577c-4e66-a742-6e3319baa69f +926 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +936 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1011 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1903ms +1059 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1072 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @1965ms +1086 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1087 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1109 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1110 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1111 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1112 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1113 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1114 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1115 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1117 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1118 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1118 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1119 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1120 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1121 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1121 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1122 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1123 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1124 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1126 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1126 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1127 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1134 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1135 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1135 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1136 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1137 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1138 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1201 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1269 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 37079. +1270 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:37079 +1271 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1296 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 37079, None) +1299 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:37079 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 37079, None) +1301 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 37079, None) +1301 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 37079, None) +1431 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2105 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +3326 [Executor task launch worker for task 0] ERROR org.apache.jena.riot - [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] +3334 [Executor task launch worker for task 0] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 0.0 (TID 0) +org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +3359 [task-result-getter-0] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +3360 [task-result-getter-0] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 0.0 failed 1 times; aborting job +5222 [Executor task launch worker for task 56] ERROR org.apache.jena.riot - [line: 1, col: 49] Expected IRI: Got: [KEYWORD:a] +5223 [Executor task launch worker for task 56] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 8.0 (TID 56) +org.apache.jena.riot.RiotException: [line: 1, col: 49] Expected IRI: Got: [KEYWORD:a] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRI(LangNTuple.java:95) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:75) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +5225 [task-result-getter-3] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 8.0 (TID 56, localhost, executor driver): org.apache.jena.riot.RiotException: [line: 1, col: 49] Expected IRI: Got: [KEYWORD:a] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRI(LangNTuple.java:95) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:75) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +5225 [task-result-getter-3] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 8.0 failed 1 times; aborting job +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +55 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +359 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +478 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +537 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +537 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +537 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +538 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +538 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +857 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 35433. +876 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +890 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +892 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +893 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +927 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-0967c162-d961-45be-8b30-fae1d1b72871 +942 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +953 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1022 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1974ms +1070 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1083 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2036ms +1097 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1097 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1116 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1117 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1118 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1120 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1120 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1121 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1122 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1124 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1125 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1125 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1126 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1127 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1128 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1128 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1129 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1130 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1131 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1132 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1133 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1133 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1139 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1139 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1141 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1141 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1142 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1143 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1204 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1272 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 35627. +1273 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:35627 +1274 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1300 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 35627, None) +1303 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:35627 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 35627, None) +1305 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 35627, None) +1306 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 35627, None) +1468 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +38 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +354 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +492 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +568 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +568 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +569 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +569 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +570 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +915 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 45775. +935 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +949 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +951 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +951 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +990 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-e0ada493-c96a-4ca5-98ff-97074387f7ba +1006 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +1016 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1087 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @2023ms +1136 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1149 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2086ms +1163 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1163 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1187 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1188 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1189 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1190 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1191 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1192 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1192 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1194 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1195 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1196 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1197 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1198 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1199 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1199 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1200 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1201 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1202 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1203 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1204 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1205 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1210 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1211 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1212 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1212 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1213 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1214 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1274 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1342 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 36273. +1343 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:36273 +1344 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1371 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 36273, None) +1374 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:36273 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 36273, None) +1375 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 36273, None) +1376 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 36273, None) +1503 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +50 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +331 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +450 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +514 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +515 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +515 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +515 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +516 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +891 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 41733. +915 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +930 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +933 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +933 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +967 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-c216880f-9144-4fe7-80ac-f75bdc144b01 +984 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +997 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1075 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1955ms +1126 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1139 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2021ms +1154 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@600b0b7{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1154 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1180 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@32f61a31{/jobs,null,AVAILABLE,@Spark} +1181 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72bca894{/jobs/json,null,AVAILABLE,@Spark} +1182 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job,null,AVAILABLE,@Spark} +1183 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/jobs/job/json,null,AVAILABLE,@Spark} +1184 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages,null,AVAILABLE,@Spark} +1185 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@ecf9fb3{/stages/json,null,AVAILABLE,@Spark} +1185 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2d35442b{/stages/stage,null,AVAILABLE,@Spark} +1187 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/stage/json,null,AVAILABLE,@Spark} +1188 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/stages/pool,null,AVAILABLE,@Spark} +1188 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/stages/pool/json,null,AVAILABLE,@Spark} +1189 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage,null,AVAILABLE,@Spark} +1190 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/json,null,AVAILABLE,@Spark} +1190 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/storage/rdd,null,AVAILABLE,@Spark} +1191 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/storage/rdd/json,null,AVAILABLE,@Spark} +1191 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/environment,null,AVAILABLE,@Spark} +1192 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/environment/json,null,AVAILABLE,@Spark} +1193 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors,null,AVAILABLE,@Spark} +1194 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/json,null,AVAILABLE,@Spark} +1194 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/executors/threadDump,null,AVAILABLE,@Spark} +1195 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@69c43e48{/executors/threadDump/json,null,AVAILABLE,@Spark} +1201 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1804f60d{/static,null,AVAILABLE,@Spark} +1202 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2a2bb0eb{/,null,AVAILABLE,@Spark} +1203 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3c291aad{/api,null,AVAILABLE,@Spark} +1204 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@320e400{/jobs/job/kill,null,AVAILABLE,@Spark} +1204 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5167268{/stages/stage/kill,null,AVAILABLE,@Spark} +1206 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1266 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1336 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 36573. +1336 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:36573 +1337 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1356 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 36573, None) +1365 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:36573 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 36573, None) +1368 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 36573, None) +1368 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 36573, None) +1495 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@77ee25f1{/metrics/json,null,AVAILABLE,@Spark} +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +38 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +304 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +419 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +482 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +483 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +483 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +483 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +484 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +822 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 44411. +843 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +859 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +862 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +862 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +905 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-e9b42bf2-e7b7-49c8-8dae-95933ed05a7e +921 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +931 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1007 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1952ms +1055 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1069 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2016ms +1083 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1084 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1106 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1107 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1108 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1110 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1111 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1111 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1112 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1114 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1115 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1116 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1117 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1118 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1119 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1119 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1120 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1121 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1122 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1123 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1124 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1125 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1130 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1131 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1132 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1133 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1134 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1136 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1197 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1265 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 35963. +1266 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:35963 +1267 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1292 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 35963, None) +1295 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:35963 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 35963, None) +1297 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 35963, None) +1298 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 35963, None) +1436 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2147 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +38 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +357 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +478 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +545 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +546 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +546 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +546 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +547 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +901 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 45169. +923 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +940 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +943 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +943 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +978 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-0a11d930-6e0f-4acf-b549-f57922d8455e +995 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +1005 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1083 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1970ms +1134 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1147 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2036ms +1163 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1163 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1189 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1190 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1191 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1193 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1193 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1194 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1195 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1197 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1198 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1199 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1200 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1201 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1201 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1202 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1203 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1204 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1205 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1206 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1207 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1208 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1215 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1216 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1217 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1218 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1218 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1220 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1281 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1356 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 43875. +1356 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:43875 +1358 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1385 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 43875, None) +1388 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:43875 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 43875, None) +1390 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 43875, None) +1391 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 43875, None) +1547 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2261 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +1 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +55 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +365 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +479 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +548 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +548 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +549 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +549 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +550 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +914 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 42921. +940 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +959 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +963 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +963 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +1004 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-b4ee54e9-314a-4521-92b7-ffc8906f8953 +1026 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +1040 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1134 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @2066ms +1201 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1221 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2155ms +1240 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1240 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1267 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1268 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1269 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1271 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1272 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1272 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1273 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1275 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1276 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1277 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1278 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1279 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1280 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1281 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1281 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1282 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1283 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1284 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1285 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1286 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1294 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1295 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1296 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1297 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1298 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1300 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1368 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1448 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 45157. +1449 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:45157 +1450 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1480 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 45157, None) +1482 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:45157 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 45157, None) +1485 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 45157, None) +1486 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 45157, None) +1636 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2329 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +42 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +326 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +434 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +494 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +494 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +494 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +495 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +495 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +816 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 44865. +837 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +851 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +853 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +853 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +893 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-5c594d04-fac6-4b7a-84bc-3a39a2bf7f6d +909 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +921 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +992 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1889ms +1042 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1055 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @1953ms +1069 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1070 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1095 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1096 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1097 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1099 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1099 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1100 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1101 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1103 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1104 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1105 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1105 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1106 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1107 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1108 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1108 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1109 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1110 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1111 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1112 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1112 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1119 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1119 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1120 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1121 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1122 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1123 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1180 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1257 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 46557. +1257 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:46557 +1258 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1284 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 46557, None) +1286 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:46557 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 46557, None) +1288 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 46557, None) +1289 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 46557, None) +1413 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2134 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +5 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +54 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +376 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +532 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +625 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +626 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +626 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +627 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +627 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +1239 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 39503. +1261 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +1279 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +1281 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +1281 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +1324 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-7fc0b91c-e5e9-4e9e-8602-9388eea921cf +1342 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +1354 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1429 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @2362ms +1481 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1495 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2429ms +1511 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1511 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1535 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1536 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1536 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1538 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1538 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1539 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1539 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1541 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1542 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1542 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1543 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1544 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1544 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1545 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1546 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1546 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1547 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1548 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1549 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1550 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1556 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1557 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1558 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1559 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1559 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1561 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1621 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1691 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 40041. +1692 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:40041 +1693 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1713 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 40041, None) +1721 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:40041 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 40041, None) +1724 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 40041, None) +1724 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 40041, None) +1889 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2642 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +42 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +328 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +444 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +504 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +504 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +505 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +505 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +506 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +818 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 34301. +840 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +854 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +857 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +857 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +887 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-06a567bd-e101-4c75-a221-286a94ddf3cf +903 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +913 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +984 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1905ms +1032 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1045 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @1967ms +1059 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1059 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1079 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1080 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1081 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1083 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1084 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1085 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1085 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1087 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1088 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1089 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1090 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1090 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1091 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1092 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1093 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1094 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1095 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1096 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1097 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1098 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1103 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1104 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1105 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1106 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1106 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1108 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1166 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1236 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 34283. +1236 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:34283 +1237 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1257 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 34283, None) +1265 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:34283 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 34283, None) +1268 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 34283, None) +1268 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 34283, None) +1399 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2073 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +44 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +328 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +429 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +497 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +497 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +497 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +498 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +498 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +821 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 33909. +840 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +855 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +857 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +858 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +890 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-058c924e-8991-4380-be39-43aca93b3b37 +906 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +917 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +993 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1901ms +1042 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1055 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @1965ms +1070 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1070 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1091 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1092 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1092 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1093 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1094 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1094 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1095 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1097 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1098 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1098 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1099 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1099 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1100 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1101 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1101 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1102 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1103 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1103 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1104 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1104 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1110 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1111 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1112 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1112 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1113 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1115 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1179 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1253 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 41431. +1254 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:41431 +1254 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1282 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 41431, None) +1284 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:41431 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 41431, None) +1287 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 41431, None) +1287 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 41431, None) +1434 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +37 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +331 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +446 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +513 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +514 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +514 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +514 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +515 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +856 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 39643. +878 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +892 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +894 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +894 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +925 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-6785f47e-3874-4086-a687-e5faa94fb4a5 +942 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +952 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1025 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1909ms +1092 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1110 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @1994ms +1124 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1124 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1145 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1146 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1147 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1149 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1150 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1150 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1151 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1153 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1154 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1155 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1156 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1157 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1157 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1158 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1159 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1160 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1161 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1162 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1163 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1164 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1169 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1170 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1171 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1172 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1173 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1175 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1237 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1305 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 34957. +1305 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:34957 +1306 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1332 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 34957, None) +1334 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:34957 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 34957, None) +1337 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 34957, None) +1338 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 34957, None) +1481 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2199 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +3959 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +3965 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +38 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +320 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +442 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +523 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +525 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +525 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +526 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +527 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +987 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 34153. +1012 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +1032 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +1035 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +1035 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +1079 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-94f65474-ead2-4464-bf72-abf2466d90ef +1101 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +1112 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1190 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @2066ms +1244 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1260 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2137ms +1275 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@600b0b7{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1275 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1298 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@32f61a31{/jobs,null,AVAILABLE,@Spark} +1299 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@72bca894{/jobs/json,null,AVAILABLE,@Spark} +1300 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job,null,AVAILABLE,@Spark} +1301 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/jobs/job/json,null,AVAILABLE,@Spark} +1301 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages,null,AVAILABLE,@Spark} +1302 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@ecf9fb3{/stages/json,null,AVAILABLE,@Spark} +1302 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2d35442b{/stages/stage,null,AVAILABLE,@Spark} +1304 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/stage/json,null,AVAILABLE,@Spark} +1305 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/stages/pool,null,AVAILABLE,@Spark} +1306 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/stages/pool/json,null,AVAILABLE,@Spark} +1307 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage,null,AVAILABLE,@Spark} +1308 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/json,null,AVAILABLE,@Spark} +1309 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/storage/rdd,null,AVAILABLE,@Spark} +1310 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/storage/rdd/json,null,AVAILABLE,@Spark} +1311 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/environment,null,AVAILABLE,@Spark} +1312 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/environment/json,null,AVAILABLE,@Spark} +1313 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors,null,AVAILABLE,@Spark} +1314 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/json,null,AVAILABLE,@Spark} +1315 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/executors/threadDump,null,AVAILABLE,@Spark} +1316 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@69c43e48{/executors/threadDump/json,null,AVAILABLE,@Spark} +1322 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1804f60d{/static,null,AVAILABLE,@Spark} +1322 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2a2bb0eb{/,null,AVAILABLE,@Spark} +1324 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3c291aad{/api,null,AVAILABLE,@Spark} +1325 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@320e400{/jobs/job/kill,null,AVAILABLE,@Spark} +1326 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5167268{/stages/stage/kill,null,AVAILABLE,@Spark} +1327 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1391 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1459 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 43283. +1460 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:43283 +1461 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1489 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 43283, None) +1492 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:43283 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 43283, None) +1495 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 43283, None) +1495 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 43283, None) +1620 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@77ee25f1{/metrics/json,null,AVAILABLE,@Spark} +2343 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +4297 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +4302 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +2 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +43 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +332 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +443 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +518 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +518 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +518 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +519 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +519 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +845 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 34401. +864 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +879 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +881 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +882 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +917 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-d1b5153a-e75b-4ba8-859a-9606170bd095 +933 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +944 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1016 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1905ms +1067 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1080 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @1970ms +1094 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1094 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1116 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1117 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1118 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1119 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1119 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1120 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1120 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1122 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1122 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1123 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1124 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1124 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1125 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1125 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1126 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1127 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1127 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1128 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1129 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1129 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1135 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1136 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1137 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1137 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1138 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1139 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1197 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1269 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 40267. +1270 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:40267 +1271 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1296 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 40267, None) +1299 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:40267 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 40267, None) +1300 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 40267, None) +1301 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 40267, None) +1428 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2101 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +3848 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +3854 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +5052 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +5052 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +5793 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +5797 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +38 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +324 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +438 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +504 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +504 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +505 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +505 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +506 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +867 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 41263. +888 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +905 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +908 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +908 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +941 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-713bf253-c15a-4608-bd67-11f89154aee1 +958 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +969 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +1047 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1958ms +1104 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1123 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @2035ms +1141 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1141 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1164 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1165 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1166 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1168 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1169 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1169 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1170 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1172 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1173 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1174 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1175 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1176 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1177 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1177 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1178 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1179 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1180 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1181 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1182 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1183 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1188 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1189 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1190 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1191 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1192 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1194 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1255 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1332 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 40841. +1332 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:40841 +1333 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1361 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 40841, None) +1363 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:40841 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 40841, None) +1365 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 40841, None) +1366 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 40841, None) +1500 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2186 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +3931 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +3938 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +5128 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +5129 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +5612 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +5615 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +0 [ScalaTest-run] WARN org.apache.spark.util.Utils - Your hostname, pop-os resolves to a loopback address: 127.0.1.1; using 192.168.0.105 instead (on interface enp0s25) +1 [ScalaTest-run] WARN org.apache.spark.util.Utils - Set SPARK_LOCAL_IP if you need to bind to another address +43 [ScalaTest-run] INFO org.apache.spark.SparkContext - Running Spark version 2.4.0 +314 [ScalaTest-run] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +428 [ScalaTest-run] INFO org.apache.spark.SparkContext - Submitted application: Triple reader +491 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls to: eisenbahnplatte +491 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls to: eisenbahnplatte +492 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing view acls groups to: +492 [ScalaTest-run] INFO org.apache.spark.SecurityManager - Changing modify acls groups to: +493 [ScalaTest-run] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(eisenbahnplatte); groups with view permissions: Set(); users with modify permissions: Set(eisenbahnplatte); groups with modify permissions: Set() +816 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 37963. +837 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker +851 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster +853 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information +853 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - BlockManagerMasterEndpoint up +891 [ScalaTest-run] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-61387ee7-6b97-46ad-b4fb-1b2373ad6c3d +911 [ScalaTest-run] INFO org.apache.spark.storage.memory.MemoryStore - MemoryStore started with capacity 1946.1 MB +922 [ScalaTest-run] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator +995 [ScalaTest-run] INFO org.spark_project.jetty.util.log - Logging initialized @1908ms +1049 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown +1062 [ScalaTest-run] INFO org.spark_project.jetty.server.Server - Started @1977ms +1077 [ScalaTest-run] INFO org.spark_project.jetty.server.AbstractConnector - Started ServerConnector@82c57b3{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} +1077 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040. +1098 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6cc0bcf6{/jobs,null,AVAILABLE,@Spark} +1099 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@324dcd31{/jobs/json,null,AVAILABLE,@Spark} +1100 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@503d56b5{/jobs/job,null,AVAILABLE,@Spark} +1101 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@433ffad1{/jobs/job/json,null,AVAILABLE,@Spark} +1102 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1fc793c2{/stages,null,AVAILABLE,@Spark} +1103 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2575f671{/stages/json,null,AVAILABLE,@Spark} +1104 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@329a1243{/stages/stage,null,AVAILABLE,@Spark} +1106 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@27f9e982{/stages/stage/json,null,AVAILABLE,@Spark} +1106 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@4593ff34{/stages/pool,null,AVAILABLE,@Spark} +1107 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@37d3d232{/stages/pool/json,null,AVAILABLE,@Spark} +1108 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@30c0ccff{/storage,null,AVAILABLE,@Spark} +1109 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@581d969c{/storage/json,null,AVAILABLE,@Spark} +1109 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@22db8f4{/storage/rdd,null,AVAILABLE,@Spark} +1110 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2b46a8c1{/storage/rdd/json,null,AVAILABLE,@Spark} +1111 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@1d572e62{/environment,null,AVAILABLE,@Spark} +1112 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@29caf222{/environment/json,null,AVAILABLE,@Spark} +1113 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@46cf05f7{/executors,null,AVAILABLE,@Spark} +1114 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@5851bd4f{/executors/json,null,AVAILABLE,@Spark} +1115 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7cd1ac19{/executors/threadDump,null,AVAILABLE,@Spark} +1115 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@2f40a43{/executors/threadDump/json,null,AVAILABLE,@Spark} +1122 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@3caa4757{/static,null,AVAILABLE,@Spark} +1123 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@6e0ff644{/,null,AVAILABLE,@Spark} +1124 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@58dea0a5{/api,null,AVAILABLE,@Spark} +1125 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@733037{/jobs/job/kill,null,AVAILABLE,@Spark} +1126 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@7728643a{/stages/stage/kill,null,AVAILABLE,@Spark} +1127 [ScalaTest-run] INFO org.apache.spark.ui.SparkUI - Bound SparkUI to 0.0.0.0, and started at http://192.168.0.105:4040 +1187 [ScalaTest-run] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost +1255 [ScalaTest-run] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 42899. +1255 [ScalaTest-run] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 192.168.0.105:42899 +1256 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy +1275 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registering BlockManager BlockManagerId(driver, 192.168.0.105, 42899, None) +1284 [dispatcher-event-loop-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager 192.168.0.105:42899 with 1946.1 MB RAM, BlockManagerId(driver, 192.168.0.105, 42899, None) +1286 [ScalaTest-run] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager BlockManagerId(driver, 192.168.0.105, 42899, None) +1286 [ScalaTest-run] INFO org.apache.spark.storage.BlockManager - Initialized BlockManager: BlockManagerId(driver, 192.168.0.105, 42899, None) +1411 [ScalaTest-run] INFO org.spark_project.jetty.server.handler.ContextHandler - Started o.s.j.s.ServletContextHandler@21719a0{/metrics/json,null,AVAILABLE,@Spark} +2159 [ScalaTest-run-running-roundTripTests] WARN org.apache.spark.sql.SparkSession$Builder - Using an existing SparkSession; some configuration may not take effect. +3383 [Executor task launch worker for task 0] ERROR org.apache.jena.riot - [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] +3390 [Executor task launch worker for task 0] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 0.0 (TID 0) +org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +3416 [task-result-getter-0] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): org.apache.jena.riot.RiotException: [line: 1, col: 1 ] Expected BNode or IRI: Got: [DIRECTIVE:prefix] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRIOrBNode(LangNTuple.java:89) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:74) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +3418 [task-result-getter-0] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 0.0 failed 1 times; aborting job +5181 [Executor task launch worker for task 56] ERROR org.apache.jena.riot - [line: 1, col: 49] Expected IRI: Got: [KEYWORD:a] +5182 [Executor task launch worker for task 56] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 8.0 (TID 56) +org.apache.jena.riot.RiotException: [line: 1, col: 49] Expected IRI: Got: [KEYWORD:a] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRI(LangNTuple.java:95) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:75) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) +5186 [task-result-getter-3] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 8.0 (TID 56, localhost, executor driver): org.apache.jena.riot.RiotException: [line: 1, col: 49] Expected IRI: Got: [KEYWORD:a] + at org.apache.jena.riot.system.ErrorHandlerFactory$ErrorHandlerStd.fatal(ErrorHandlerFactory.java:147) + at org.apache.jena.riot.lang.LangEngine.raiseException(LangEngine.java:148) + at org.apache.jena.riot.lang.LangEngine.exceptionDirect(LangEngine.java:143) + at org.apache.jena.riot.lang.LangEngine.exception(LangEngine.java:137) + at org.apache.jena.riot.lang.LangNTuple.checkIRI(LangNTuple.java:95) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:75) + at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:38) + at org.apache.jena.riot.lang.LangNTuple.next(LangNTuple.java:67) + at org.apache.jena.atlas.iterator.IteratorResourceClosing.next(IteratorResourceClosing.java:77) + at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43) + at scala.collection.Iterator$$anon$10.next(Iterator.scala:394) + at scala.collection.Iterator$class.foreach(Iterator.scala:891) + at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) + at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) + at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) + at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) + at scala.collection.AbstractIterator.to(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) + at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) + at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) + at scala.collection.AbstractIterator.toArray(Iterator.scala:1334) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$29.apply(RDD.scala:1364) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101) + at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) + at org.apache.spark.scheduler.Task.run(Task.scala:121) + at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) + at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) + at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) + at java.lang.Thread.run(Thread.java:748) + +5187 [task-result-getter-3] ERROR org.apache.spark.scheduler.TaskSetManager - Task 0 in stage 8.0 failed 1 times; aborting job +6145 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +6146 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +6989 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +6989 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +7477 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored +7479 [ScalaTest-run-running-roundTripTests] WARN org.apache.jena.riot - Only triples or default graph data expected : named graph data ignored diff --git a/pom.xml b/pom.xml index ca9c8f8..830f818 100644 --- a/pom.xml +++ b/pom.xml @@ -201,7 +201,7 @@ scalatest-maven-plugin 1.0 - conversionTests.mapping.roundTripTests, conversionTests.conversion.roundTripTests + archived.format.mapping.roundTripTests, format.conversion.format.conversion.roundTripTests ${project.build.directory}/surefire-reports . WDF TestSuite.txt diff --git a/src/main/scala/org/dbpedia/databus/client/filehandling/FileUtil.scala b/src/main/scala/org/dbpedia/databus/client/filehandling/FileUtil.scala index d3c1938..25fe096 100644 --- a/src/main/scala/org/dbpedia/databus/client/filehandling/FileUtil.scala +++ b/src/main/scala/org/dbpedia/databus/client/filehandling/FileUtil.scala @@ -191,7 +191,7 @@ object FileUtil { * @return format */ def getFormatType(inputFile: File, compressionInputFile: String): String = { - { + val format ={ try { if (!(getFormatTypeWithDataID(inputFile) == "")) { getFormatTypeWithDataID(inputFile) @@ -202,6 +202,23 @@ object FileUtil { case _: FileNotFoundException => getFormatTypeWithoutDataID(inputFile, compressionInputFile) } } + + if (format == "rdf") "rdfxml" + else format + } + + /** + * read a query file as string + * + * @param file query file + * @return query string + */ + def readQueryFile(file: File): String = { + var queryString: String = "" + for (line <- file.lineIterator) { + queryString = queryString.concat(line).concat("\n") + } + queryString } /** diff --git a/src/main/scala/org/dbpedia/databus/client/filehandling/SourceHandler.scala b/src/main/scala/org/dbpedia/databus/client/filehandling/SourceHandler.scala index a100ae5..ac752c6 100644 --- a/src/main/scala/org/dbpedia/databus/client/filehandling/SourceHandler.scala +++ b/src/main/scala/org/dbpedia/databus/client/filehandling/SourceHandler.scala @@ -26,7 +26,7 @@ class SourceHandler(conf:CLI_Config) { val sourceFile: File = File(conf.source()) if (sourceFile.hasExtension && sourceFile.extension.get.matches(".sparql|.query")) { // conf.source() is a query file - val queryString = readQueryFile(sourceFile) + val queryString = FileUtil.readQueryFile(sourceFile) handleQuery(queryString) } else { // conf.source() is an already existing file or directory @@ -173,20 +173,6 @@ class SourceHandler(conf:CLI_Config) { handler.handleResponse(response) } - /** - * read a query file as string - * - * @param file query file - * @return query string - */ - def readQueryFile(file: File): String = { - var queryString: String = "" - for (line <- file.lineIterator) { - queryString = queryString.concat(line).concat("\n") - } - queryString - } - def printTask(sourceType: String, source: String, target: String):Unit = { val str = s""" diff --git a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/FormatConverter.scala b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/FormatConverter.scala index d4db849..a2faf02 100644 --- a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/FormatConverter.scala +++ b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/FormatConverter.scala @@ -14,6 +14,7 @@ import org.slf4j.LoggerFactory import scala.util.control.Breaks.{break, breakable} import org.apache.jena.graph.Triple +import org.apache.jena.sparql.core.Quad import org.apache.spark.rdd.RDD import java.net.URLEncoder @@ -90,7 +91,8 @@ object FormatConverter { //read process val quads = { if (RDF_QUADS.contains(conf.inputFormat)) quadsHandler.read(file.pathAsString, conf.inputFormat) - else RDF_Triples_Mapper.map_to_quads(new TripleHandler().read(file.pathAsString, conf.inputFormat), conf.graphURI) + else if (RDF_TRIPLES.contains(conf.inputFormat)) RDF_Triples_Mapper.map_to_quads(new TripleHandler().read(file.pathAsString, conf.inputFormat), conf.graphURI) + else Spark.context.emptyRDD[Quad] } //write process @@ -113,19 +115,8 @@ object FormatConverter { } } + //write process tsdHandler.write(data, conf.outputFormat) } } -// FileUtil.unionFiles(tempDir, targetFile) -// if (mappingFile.exists && mappingFile != File("")) { -// val mapDir = File("./mappings/") -// mapDir.createDirectoryIfNotExists() -// mappingFile.moveTo(mapDir / FileUtil.getSha256(targetFile), overwrite = true) -// } -//} -//catch { -// case _: RuntimeException => LoggerFactory.getLogger("UnionFilesLogger").error(s"File $targetFile already exists") //deleteAndRestart(inputFile, inputFormat, outputFormat, targetFile: File) -//} -// -// targetFile } diff --git a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/format/rdf/quads/QuadsHandler.scala b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/format/rdf/quads/QuadsHandler.scala index 286261c..de78630 100644 --- a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/format/rdf/quads/QuadsHandler.scala +++ b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/format/rdf/quads/QuadsHandler.scala @@ -6,7 +6,7 @@ import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD import org.dbpedia.databus.client.filehandling.FileUtil import org.dbpedia.databus.client.filehandling.convert.format.EquivalenceClassHandler -import org.dbpedia.databus.client.filehandling.convert.format.rdf.quads.format.{NQuads, Trig, Trix} +import org.dbpedia.databus.client.filehandling.convert.format.rdf.quads.format.{JsonLD, NQuads, Trig, Trix} class QuadsHandler extends EquivalenceClassHandler[RDD[Quad]]{ @@ -23,6 +23,7 @@ class QuadsHandler extends EquivalenceClassHandler[RDD[Quad]]{ case "nq" => new NQuads().read(source) case "trig" => new Trix().read(source) case "trix" => new Trig().read(source) + case "jsonld" => new JsonLD().read(source) } } @@ -38,6 +39,7 @@ class QuadsHandler extends EquivalenceClassHandler[RDD[Quad]]{ case "nq" => new NQuads().write(data) case "trig" => new Trig().write(data) case "trix" => new Trix().write(data) + case "jsonld" => new JsonLD().write(data) } } diff --git a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/format/rdf/quads/format/JsonLD.scala b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/format/rdf/quads/format/JsonLD.scala index 913f3d8..4b139f7 100644 --- a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/format/rdf/quads/format/JsonLD.scala +++ b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/format/rdf/quads/format/JsonLD.scala @@ -1,23 +1,40 @@ package org.dbpedia.databus.client.filehandling.convert.format.rdf.quads.format import better.files.File -import org.apache.jena.graph.{NodeFactory, Triple} -import org.apache.jena.rdf.model.{ModelFactory, ResourceFactory} -import org.apache.jena.riot.{RDFDataMgr, RDFFormat} +import org.apache.jena.riot.Lang import org.apache.jena.sparql.core.Quad -import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD -import org.apache.spark.sql.SparkSession import org.dbpedia.databus.client.filehandling.convert.format.Format -import java.io.{ByteArrayInputStream, ByteArrayOutputStream} -import scala.io.{Codec, Source} +class JsonLD extends Format[RDD[Quad]] { + + override def read(source: String): RDD[Quad] = { + new Trix(Lang.JSONLD).read(source) + } + + override def write(t: RDD[Quad]): File = { + new Trix(Lang.JSONLD).write(t) + } +} +// +//import better.files.File +//import org.apache.jena.graph.{NodeFactory, Triple} +//import org.apache.jena.rdf.model.{ModelFactory, ResourceFactory} +//import org.apache.jena.riot.{RDFDataMgr, RDFFormat} +//import org.apache.jena.sparql.core.Quad +//import org.apache.spark.SparkContext +//import org.apache.spark.rdd.RDD +//import org.apache.spark.sql.SparkSession +//import org.dbpedia.databus.client.filehandling.convert.format.Format +// +//import java.io.{ByteArrayInputStream, ByteArrayOutputStream} +//import scala.io.{Codec, Source} //class JsonLD extends Format[RDD[Quad]] { // //// def readJSONL(spark: SparkSession, inputFile: File): RDD[Triple] = { -//// val sc = spark.sparkContext -//// val data = sc.textFile(inputFile.pathAsString) +// val sc = spark.sparkContext +// val data = sc.textFile(inputFile.pathAsString) //// var tripleRDD = sc.emptyRDD[Triple] //// //// // data.foreach(println(_)) diff --git a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/format/rdf/triples/TripleHandler.scala b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/format/rdf/triples/TripleHandler.scala index 7672697..8cb3e2c 100644 --- a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/format/rdf/triples/TripleHandler.scala +++ b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/format/rdf/triples/TripleHandler.scala @@ -23,7 +23,7 @@ class TripleHandler extends EquivalenceClassHandler[RDD[Triple]] { inputFormat match { case "nt" => new NTriples().read(source) - case "rdf" => new RDFXML().read(source) + case "rdfxml" => new RDFXML().read(source) case "ttl" => //wie geht das besser? try { diff --git a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/mapping/RDF_Quads_Mapper.scala b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/mapping/RDF_Quads_Mapper.scala index fb2315f..ae0d088 100644 --- a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/mapping/RDF_Quads_Mapper.scala +++ b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/mapping/RDF_Quads_Mapper.scala @@ -3,7 +3,7 @@ package org.dbpedia.databus.client.filehandling.convert.mapping import org.apache.spark.rdd.RDD import org.apache.jena.graph.Triple import org.apache.jena.sparql.core.Quad -import org.apache.spark.sql.functions.lit +import org.apache.spark.sql.functions.{col, lit} import org.apache.spark.sql.{Column, DataFrame} import org.dbpedia.databus.client.filehandling.convert.Spark import org.dbpedia.databus.client.filehandling.convert.mapping.util.TriplesResult @@ -31,18 +31,29 @@ object RDF_Quads_Mapper { } def map_to_tsd(data:RDD[Quad], createMapping:Boolean):DataFrame={ + //calculate partly results val triplesData = map_to_triples(data) val dataFrameForEachGraph = triplesData.map(triplesResult => { val dataFrame = RDF_Triples_Mapper.map_to_tsd(triplesResult.graph, createMapping) - dataFrame.show() dataFrame.withColumn("graph", lit(triplesResult.graphName)) }) - val resultDataFrame = dataFrameForEachGraph.head + //join partly results + var resultDataFrame = dataFrameForEachGraph.head - dataFrameForEachGraph.foreach() - df1.join(df2, df1.col("column").equalTo(df2("column"))) - dataFrameForEachGraph.reduce(_ join _) + dataFrameForEachGraph.foreach(df => { + var columns = Seq.empty[String] + resultDataFrame.columns.foreach(col => { + if (df.columns.contains(col)) columns = columns:+col + }) + resultDataFrame=resultDataFrame.join(df, columns, "outer") + }) + + //sort DataFrame + val columns = resultDataFrame.columns + val graphColIndex = columns.indexOf("graph") + val cols = columns.updated(graphColIndex, columns.head).updated(0, "graph").toSeq + resultDataFrame.select(cols.map(x=>col(x)):_*).sort("graph") } } diff --git a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/mapping/RDF_Triples_Mapper.scala b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/mapping/RDF_Triples_Mapper.scala index 0314293..f5a971f 100644 --- a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/mapping/RDF_Triples_Mapper.scala +++ b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/mapping/RDF_Triples_Mapper.scala @@ -40,7 +40,7 @@ object RDF_Triples_Mapper { * converts RDF data (RDD[Triple] to TSD data [DataFrame] * * @param inData RDF input data - * @param createMappingFile create a mapping file for conversion back to RDF + * @param createMappingFile create a format.mapping file for format.conversion back to RDF * @return tabular structured data */ def triplesToTSD(inData: RDD[Triple], createMappingFile: Boolean): Seq[DataFrame] = { @@ -48,7 +48,7 @@ object RDF_Triples_Mapper { val triplesGroupedBySubject = inData.groupBy(triple ⇒ triple.getSubject).map(_._2) val allPredicates = inData.groupBy(triple => triple.getPredicate.getURI).map(_._1) - val prefixPre = "xxx" //for mapping file + val prefixPre = "xxx" //for format.mapping file val mappedPredicates = Seq(Seq("resource")) ++ allPredicates.map( diff --git a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/mapping/TSD_Mapper.scala b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/mapping/TSD_Mapper.scala index 22651cb..503a887 100644 --- a/src/main/scala/org/dbpedia/databus/client/filehandling/convert/mapping/TSD_Mapper.scala +++ b/src/main/scala/org/dbpedia/databus/client/filehandling/convert/mapping/TSD_Mapper.scala @@ -88,7 +88,7 @@ object TSD_Mapper { } - def map_to_quads()={ - - } +// def map_to_quads()={ +// +// } } diff --git a/src/main/scala/org/dbpedia/databus/client/main/CLI_Config.scala b/src/main/scala/org/dbpedia/databus/client/main/CLI_Config.scala index f2e0a1f..6061d26 100644 --- a/src/main/scala/org/dbpedia/databus/client/main/CLI_Config.scala +++ b/src/main/scala/org/dbpedia/databus/client/main/CLI_Config.scala @@ -31,11 +31,11 @@ For usage of parameters see below: val target: ScallopOption[String] = opt[String](default = Some("./files/"), descr = "set the target directory for converted files") val overwrite: ScallopOption[Boolean] = opt[Boolean](default = Some(false), descr = "true -> overwrite files in cache, false -> use cache") val clear: ScallopOption[Boolean] = opt[Boolean](default = Some(false), noshort= true, descr = "true -> clear Cache") - val mapping: ScallopOption[String] = opt[String](default = Some(""), descr = "set the mapping file for conversion to different format equivalence class") + val mapping: ScallopOption[String] = opt[String](default = Some(""), descr = "set the format.mapping file for format.conversion to different format equivalence class") val delimiter: ScallopOption[String] = opt[String](default = Some(","), descr = "set the delimiter (only for some formats)") val quotation: ScallopOption[String] = opt[String](default = Some("\""), descr = "set the quotation (only for some formats)") - val createMapping: ScallopOption[Boolean] = opt[Boolean](default = Some(false), descr = "Do you want to create mapping files for mapped sources?") - val graphURI: ScallopOption[String] = opt[String](default = Some(""), descr = "set the graph uri for mapping from rdf triples to rdf quads") + val createMapping: ScallopOption[Boolean] = opt[Boolean](default = Some(false), descr = "Do you want to create format.mapping files for mapped sources?") + val graphURI: ScallopOption[String] = opt[String](default = Some(""), descr = "set the graph uri for format.mapping from rdf triples to rdf quads") verify() } diff --git a/src/main/scala/org/dbpedia/databus/client/sparql/queries/DatabusQueries.scala b/src/main/scala/org/dbpedia/databus/client/sparql/queries/DatabusQueries.scala index 147e670..4913ca3 100644 --- a/src/main/scala/org/dbpedia/databus/client/sparql/queries/DatabusQueries.scala +++ b/src/main/scala/org/dbpedia/databus/client/sparql/queries/DatabusQueries.scala @@ -45,13 +45,13 @@ object DatabusQueries { |PREFIX dataid: |PREFIX dcat: |PREFIX xsd: - |PREFIX mapping: < + |PREFIX format.mapping: < | - |SELECT DISTINCT ?mapping + |SELECT DISTINCT ?format.mapping |WHERE { | ?dataIdElement dataid:sha256sum "$sha"^^xsd:string . | ?dataIdElement dataid:file ?file . - | ?mapping ?file . + | ?format.mapping ?file . |} |""".stripMargin @@ -66,7 +66,7 @@ object DatabusQueries { |PREFIX map: |PREFIX xsd: | - |SELECT DISTINCT ?mapping + |SELECT DISTINCT ?format.mapping |WHERE { | ?dataIdElement dataid:sha256sum "$sha"^^xsd:string ; | dataid:file ?file ; @@ -105,7 +105,7 @@ object DatabusQueries { | FILTER (?from <= ?version && ?until >= ?version) | | - | BIND( coalesce(?mapping1, ?mapping2, ?mapping3) as ?mapping) + | BIND( coalesce(?mapping1, ?mapping2, ?mapping3) as ?format.mapping) |} |""".stripMargin diff --git a/src/main/scala/org/dbpedia/databus/client/sparql/queries/MappingQueries.scala b/src/main/scala/org/dbpedia/databus/client/sparql/queries/MappingQueries.scala index fdc6ca6..5df4ea8 100644 --- a/src/main/scala/org/dbpedia/databus/client/sparql/queries/MappingQueries.scala +++ b/src/main/scala/org/dbpedia/databus/client/sparql/queries/MappingQueries.scala @@ -6,10 +6,10 @@ object MappingQueries { s""" |PREFIX map: | - |SELECT DISTINCT ?mapping + |SELECT DISTINCT ?format.mapping |WHERE { - |?mapping a map:MappingFile . - |<$mappingInfoFile> map:hasMappingFile ?mapping . + |?format.mapping a map:MappingFile . + |<$mappingInfoFile> map:hasMappingFile ?format.mapping . |} |""".stripMargin @@ -19,11 +19,11 @@ object MappingQueries { | |SELECT DISTINCT * |WHERE { - |?mapping a map:MappingFile . + |?format.mapping a map:MappingFile . | |<$mappingInfoFile> map:hasDelimiter ?delimiter ; | map:hasQuotation ?quotation ; - | map:hasMappingFile ?mapping . + | map:hasMappingFile ?format.mapping . |} |""".stripMargin } diff --git a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/dataid.ttl b/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/dataid.ttl deleted file mode 100644 index 87ee490..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/dataid.ttl +++ /dev/null @@ -1,243 +0,0 @@ - - -###### -# # ## ##### ## ##### # # #### -# # # # # # # # # # # # -# # # # # # # ##### # # #### -# # ###### # ###### # # # # # -# # # # # # # # # # # # # -###### # # # # # ##### #### #### - -# Plugin version 1.3-SNAPSHOT - https://github.com/dbpedia/databus-maven-plugin - - -@prefix databus: . -@prefix dataid-mt: . -@prefix dct: . -@prefix dataid: . -@prefix rdf: . -@prefix dataid-cv: . -@prefix xsd: . -@prefix dataid-pl: . -@prefix rdfs: . -@prefix dcat: . -@prefix prov: . - - - a dataid:SingleFile ; - dataid:associatedAgent ; - dataid:compression "None" ; - dataid:contentVariant "bob5" ; - dataid:duplicates "0"^^xsd:decimal ; - dataid:file ; - dataid:formatExtension "rdf" ; - dataid:isDistributionOf ; - dataid:nonEmptyLines "24"^^xsd:decimal ; - dataid:preview "\n\n \n Jamaica" ; - dataid:sha256sum "48940a6f9d0a4be8192f316a01d4a31bf6f1d4fc5e0088ec5d4ccafc62bb2b95" ; - dataid:signature "rghvljYQhOy6dmCi9MElYOku5D/6MgmJQ4e8Bcx/8Xqs7bG0XyuTMu6e4LK5s9YYweFHDG7/GGsEjA8H/B7JvEYcB16m8jCg1OWwtG6WyYI3etaXmOruTlF6R1IO5g3VByTn3C4U2LUezRnLDET0TpWR4gxNMmdq8OnX8wdTc5o9nFpNZIXq323KOi/4mhm8WF7/VAX54Vx3BdI8bBnK2LHMbk2zPG+VcrLD8R8QtqeIlYrEDtrb6d8lVNH6IXl9kg9uoHo2bRKGV0VilLNWBZyRVXO63RIL4VuxQfSlahtYo8dnZdquLU3v8tXnKwIZBZwVpiAkw0vu55g+ue0BYA==" ; - dataid:sorted false ; - dataid:uncompressedByteSize "1201"^^xsd:decimal ; - dataid-cv:tag "bob5" ; - dct:conformsTo "http://dataid.dbpedia.org/ns/core#" ; - dct:hasVersion "2019.08.30" ; - dct:issued "2019-08-30T00:00:00Z"^^xsd:dateTime ; - dct:license ; - dct:modified "2019-09-06T14:27:51Z"^^xsd:dateTime ; - dct:publisher ; - dcat:byteSize "1201"^^xsd:decimal ; - dcat:downloadURL ; - dcat:mediaType dataid-mt:ApplicationRDFXML . - -dataid-mt:ApplicationRDFXML - a dataid-mt:MediaType ; - dataid:mimetype "application/rdf+xml" . - -dataid-cv:tag rdfs:subPropertyOf dataid:contentVariant . - - - a dataid:SingleFile ; - dataid:associatedAgent ; - dataid:compression "None" ; - dataid:contentVariant "bob3" ; - dataid:duplicates "0"^^xsd:decimal ; - dataid:file ; - dataid:formatExtension "tsv" ; - dataid:isDistributionOf ; - dataid:nonEmptyLines "3"^^xsd:decimal ; - dataid:preview "resource\thttp://www.w3.org/1999/02/22-rdf-syntax-ns#type\thttp://dbpedia.org/ontology/birthPlace\thttp://www.w3.org/2000/01/rdf-schema#seeAlso\thttp://www.w3.org/2003/01/geo/wgs84_pos#lat\thttp://www.w3.org/2003/01/geo/wgs84_pos#long\thttp://www.w3.org/2000/01/rdf-schema#label\thttp://xmlns.com/foaf/0.1/homepage\nhttp://dbpedia.org/resource/Bob_Marley\thttp://xmlns.com/foaf/0.1/Person\thttp://dbpedia.org/resource/Jamaica\thttp://dbpedia.org/resource/Rastafari\t\t\tBob Marley\t\nhttp://dbpedia.org/resource/Jamaica\thttp://schema.org/Country\t\t\t17.9833\t-76.8\tGiamaica\thttp://jis.gov.jm/" ; - dataid:sha256sum "7af3a7cef890966c88dfb3d076551eb5a7be0e478081a4922f693beec0fab9e2" ; - dataid:signature "uvYx/ontTecFU1jRMBK8JJu2Bxjo/GMeSSXcOWjoYgUIYwlGlfamrvk5tJnOd5pQixx+UNSTEFeO9EkESJMFr8pU9OWMYYvWvXDfBKxdh5EAlxDbvskSyE1+mhZlIwrCtbV/VrjU+A+BY0MaPVq16p8q/L6bRAVSbqQ5tdrQqPx7NXDWOebPr2lSNzKLYb9/nEg/5KtPauRwYyroCNd5sDOUIz7s3TTb+40reJExKMmofORS4Np6+pPpRHsV7FlBl6hYQmMDsv8oA7YYxcfPLH4ywd+xmK/XJQPf4POlnqzQJlT8HixsnuhIQVFbTySbLBY/lsONNlYR3T6qgwUgAQ==" ; - dataid:sorted false ; - dataid:uncompressedByteSize "574"^^xsd:decimal ; - dataid-cv:tag "bob3" ; - dct:conformsTo "http://dataid.dbpedia.org/ns/core#" ; - dct:hasVersion "2019.08.30" ; - dct:issued "2019-08-30T00:00:00Z"^^xsd:dateTime ; - dct:license ; - dct:modified "2019-09-06T14:27:51Z"^^xsd:dateTime ; - dct:publisher ; - dcat:byteSize "574"^^xsd:decimal ; - dcat:downloadURL ; - dcat:mediaType dataid-mt:TextTabSeparatedValues . - -dataid-mt:TextTabSeparatedValues - a dataid-mt:MediaType ; - dataid:mimetype "text/tab-separated-values" . - - - a dataid:Version . - -dataid-mt:UNKNOWN a dataid-mt:MediaType ; - dataid:mimetype "UNKNOWN" . - - - a dataid:Dataset ; - rdfs:comment "Contains several files with information about Bob Marley. They all have the same data, but are written in different file formats."@en ; - rdfs:label "Testbed to test the format conversion ability of the Databus-Client."@en ; - dataid:account databus:fabian ; - dataid:artifact ; - dataid:associatedAgent ; - dataid:group ; - dataid:groupdocu "## Origin\nThis dataset was created by Eisenbahnplatte" ; - dataid:version ; - - ; - - ; - - ; - dct:conformsTo "http://dataid.dbpedia.org/ns/core#" ; - dct:description "" ; - dct:hasVersion "2019.08.30" ; - dct:issued "2019-08-30T00:00:00Z"^^xsd:dateTime ; - dct:license ; - dct:publisher ; - dct:title "Testbed to test the format conversion ability of the Databus-Client."@en ; - dcat:distribution , , , , , . - - - a dataid:SingleFile ; - dataid:associatedAgent ; - dataid:compression "None" ; - dataid:contentVariant "bob2" ; - dataid:duplicates "0"^^xsd:decimal ; - dataid:file ; - dataid:formatExtension "nt" ; - dataid:isDistributionOf ; - dataid:nonEmptyLines "11"^^xsd:decimal ; - dataid:preview " .\n \"Bob Marley\"@en .\n \"Bob Marley\"@fr .\n .\n .\n .\n \"Jamaica\"@en .\n \"Giamaica\"@it .\n \"17.9833\"^^ .\n \"-76.8\"^^ ." ; - dataid:sha256sum "9a39a1bac821a66bde87e3c986ced948ef235aa219645f5c952afc75d54f54f1" ; - dataid:signature "jPmMeHEywa983y+U2tL8T487Y1nPPeWHs0SPfcK1QAWuVBeV1pTcZFaXrMe7YMtim6EIlWfo4Bo4EIIy2vmaTHaN4+rj+Snge2dpSFTG6Uh/mN6DP/szI4MDceoMIs5MlFL/MOEJwBf9WpYxMoD1a4bE4dFt7Bvd5WBmlVFq/tM/v0E++U2HMNV/PNofOomkiyLynV+xO6Q8SSavdzF96wAuKH6rC7TmmOfaGcVBBHyHAW+fXZqr92ruiaU17gliXz9ip2BHCYHlIZMBc3Zs7xxlB86KsuDPPELi1nq8AHr2r1ZP4nIoMBF4gOigSq8V/TOfmmrZY8jHSjdBsWz/nQ==" ; - dataid:sorted false ; - dataid:uncompressedByteSize "1277"^^xsd:decimal ; - dataid-cv:tag "bob2" ; - dct:conformsTo "http://dataid.dbpedia.org/ns/core#" ; - dct:hasVersion "2019.08.30" ; - dct:issued "2019-08-30T00:00:00Z"^^xsd:dateTime ; - dct:license ; - dct:modified "2019-09-06T14:27:51Z"^^xsd:dateTime ; - dct:publisher ; - dcat:byteSize "1277"^^xsd:decimal ; - dcat:downloadURL ; - dcat:mediaType dataid-mt:ApplicationNTriples . - - - a dataid:SingleFile ; - dataid:associatedAgent ; - dataid:compression "None" ; - dataid:contentVariant "bob" ; - dataid:duplicates "0"^^xsd:decimal ; - dataid:file ; - dataid:formatExtension "" ; - dataid:isDistributionOf ; - dataid:nonEmptyLines "57"^^xsd:decimal ; - dataid:preview "{\n \"@context\": {\n \"dbpedia\": \"http://dbpedia.org/resource/\",\n \"foaf\": \"http://xmlns.com/foaf/0.1/\",\n \"rdf\": \"http://www.w3.org/1999/02/22-rdf-syntax-ns#\",\n \"rdfs\": \"http://www.w3.org/2000/01/rdf-schema#\",\n \"schema\": \"http://schema.org/\",\n \"xsd\": \"http://www.w3.org/2001/XMLSchema#\"\n },\n \"@graph\": [" ; - dataid:sha256sum "b88dd8c15c9321b1441694f5683dee0adce8e8bd13795c5246b687c9248a49ec" ; - dataid:signature "k03qBH7c4NeSGc7V/4qkwBFmne4H5vG8Q/dxrL3+wUTXo42H9rxSvDHVuuDvKUl/1Q/SQLNjH9DwoRmsIf14aKrgmXl/+uzTVSLMupXg1dLIot2GOyVRl/aL1yOUvm8I1JDxdS+pfkRb9S/ub3olsNK6OEE/uaZ2WxTqiZQmsWaQ/2EID/FAiAe/aqk9mw+2jA2r9yHdPRp0rI34bE1NpLP0PqirvEkKZvfXRH4t8EgqRQkvHdH2DC31tE8A8iaPgJbAHsXUF8h2g3EySmPoz8R1Rl+RH+hhld00I/unXZU5tBYx4pPxwlW3gsobHr+IGjLbZgdvJVgh4MMaHWD3Og==" ; - dataid:sorted false ; - dataid:uncompressedByteSize "1397"^^xsd:decimal ; - dataid-cv:tag "bob" ; - dct:conformsTo "http://dataid.dbpedia.org/ns/core#" ; - dct:hasVersion "2019.08.30" ; - dct:issued "2019-08-30T00:00:00Z"^^xsd:dateTime ; - dct:license ; - dct:modified "2019-09-06T14:27:51Z"^^xsd:dateTime ; - dct:publisher ; - dcat:byteSize "1397"^^xsd:decimal ; - dcat:downloadURL ; - dcat:mediaType dataid-mt:UNKNOWN . - - - a dataid:SingleFile ; - dataid:associatedAgent ; - dataid:compression "None" ; - dataid:contentVariant "bobcompact" ; - dataid:duplicates "0"^^xsd:decimal ; - dataid:file ; - dataid:formatExtension "" ; - dataid:isDistributionOf ; - dataid:nonEmptyLines "1"^^xsd:decimal ; - dataid:preview "[{\"@id\":\"http://dbpedia.org/resource/Bob_Marley\",\"@type\":[\"http://xmlns.com/foaf/0.1/Person\"],\"http://www.w3.org/2000/01/rdf-schema#label\":[{\"@value\":\"Bob Marley\",\"@language\":\"en\"},{\"@value\":\"Bob Marley\",\"@language\":\"fr\"}],\"http://www.w3.org/2000/01/rdf-schema#seeAlso\":[{\"@id\":\"http://dbpedia.org/resource/Rastafari\"}],\"http://dbpedia.org/ontology/birthPlace\":[{\"@id\":\"http://dbpedia.org/resource/Jamaica\"}]},{\"@id\":\"http://dbpedia.org/resource/Jamaica\",\"@type\":[\"http://schema.org/Country\"],\"http://www.w3.org/2000/01/rdf-schema#label\":[{\"@value\":\"Jamaica\",\"@language\":\"en\"},{\"@value\":\"Giamaica\",\"@language\":\"it\"}],\"http://www.w3.org/2003/01/geo/wgs84_pos#lat\":[{\"@value\":\"17.9833\",\"@type\":\"http://www.w3.org/2001/XMLSchema#float\"}],\"http://www.w3.org/2003/01/geo/wgs84_pos#long\":[{\"@value\":\"-76.8\",\"@type\":\"http://www.w3.org/2001/XMLSchema#float\"}],\"http://xmlns.com/foaf/0.1/homepage\":[{\"@id\":\"http://jis.gov.jm/\"}]},{\"@id\":\"http://dbpedia.org/resource/Rastafari\"},{\"@id\":\"http://jis.gov.jm/\"},{\"@id\":\"http://schema.org/Country\"},{\"@id\":\"http://xmlns.com/foaf/0.1/Person\"}] " ; - dataid:sha256sum "3867bf70c58fb826038c433432e095c86865a903d9e667f08a3a4d147e919fba" ; - dataid:signature "ahFXKyZXqT6XCkCpnFuVvy2ZBGYXnkwDmeTQLmUFd7lEMu68H56OFN9HDjTtrJxawVq2p+7co4fE+aZsgx+x3cVdGZYxuzcApO/KjinUaDYmfw4zyNZLteDYKWL3Zb6dSNTLFY1ZAmw31LHKYm5l1hhXWXbIU5aBGbqjPsEifT1UBSZhUnkSAZdfAwMS9zxF5/hbyvLYEJYs7sQN7/zaoPOhMznhDGHoWLALjgKLwYbBsn8ie4xfM1qs1vyeIoqpb3FHpi7Tl0j27L2usyHUfCq0DHLXxbQ8gzKEdcvkVE7RHZAXGQqS4CzOhYMhlw/IucudzjzAK9+694nr2CkD4w==" ; - dataid:sorted true ; - dataid:uncompressedByteSize "1079"^^xsd:decimal ; - dataid-cv:tag "bobcompact" ; - dct:conformsTo "http://dataid.dbpedia.org/ns/core#" ; - dct:hasVersion "2019.08.30" ; - dct:issued "2019-08-30T00:00:00Z"^^xsd:dateTime ; - dct:license ; - dct:modified "2019-09-06T14:38:57Z"^^xsd:dateTime ; - dct:publisher ; - dcat:byteSize "1079"^^xsd:decimal ; - dcat:downloadURL ; - dcat:mediaType dataid-mt:UNKNOWN . - - - a dataid:Artifact . - - - a dataid:Group . - -dataid-mt:ApplicationNTriples - a dataid-mt:MediaType ; - dataid:mimetype "application/n-triples" . - - - a dataid:DataId ; - rdfs:comment "Metadata created by the DBpedia Databus Maven Plugin: https://github.com/dbpedia/databus-maven-plugin (Version 1.3-SNAPSHOT)\nThe DataID ontology is a metadata omnibus, which can be extended to be interoperable with all metadata formats\nNote that the metadata (the dataid.ttl file) is always CC-0, the files are licensed individually\nMetadata created by https://eisenbahnplatte.github.io/webid.ttl#this" ; - rdfs:label "DataID metadata for databus-client-testbed/format-testbed"@en ; - dataid:associatedAgent ; - dct:conformsTo "http://dataid.dbpedia.org/ns/core#" ; - dct:hasVersion "1.3-SNAPSHOT" ; - dct:issued "2019-09-06T15:35:22Z"^^xsd:dateTime ; - dct:license ; - dct:publisher ; - dct:title "DataID metadata for databus-client-testbed/format-testbed"@en . - - - a dataid:SingleFile ; - dataid:associatedAgent ; - dataid:compression "None" ; - dataid:contentVariant "bob4" ; - dataid:duplicates "0"^^xsd:decimal ; - dataid:file ; - dataid:formatExtension "ttl" ; - dataid:isDistributionOf ; - dataid:nonEmptyLines "17"^^xsd:decimal ; - dataid:preview "@prefix foaf: .\n@prefix rdfs: .\n@prefix ns0: .\n@prefix schema: .\n@prefix geo: .\n@prefix xsd: .\n\n\n a foaf:Person ;\n rdfs:label \"Bob Marley\"@en, \"Bob Marley\"@fr ;" ; - dataid:sha256sum "d884a13b5c2b5c0e7f9239c817ad6b4c5058e1293d76381405d70306c734b210" ; - dataid:signature "kLj/leeSEfnN9DBor6vGSDAxe3oXFfg9t9h3dMYqiDSIN9HkdFK+gerk7VAdo0w7WG40n7vNUDHyr37oLW0qPO1XjN+J4aecYEhOLXMY1g94K2c9ff8GaT+IaC6rf3SIKAExFckBUlO/Sbu3DwaknQNQs86K9JOuahgHDle4hRa8Ag5pJs1lQjLJ5jrl23CNlZrLi3ja4mxLW2/hkr6qb1zhYEYxUvurDMWanpCyAgKBNIDqigOq2ZqcS5q0PnUsYFKUUOmCm3+WxN/3MIfY1IXBZmy+Kk2QusANReQBXDSlHv/t8LOdzExpd/oHnlgGJiPVp6E5Dbyzgfv90P6+HA==" ; - dataid:sorted false ; - dataid:uncompressedByteSize "725"^^xsd:decimal ; - dataid-cv:tag "bob4" ; - dct:conformsTo "http://dataid.dbpedia.org/ns/core#" ; - dct:hasVersion "2019.08.30" ; - dct:issued "2019-08-30T00:00:00Z"^^xsd:dateTime ; - dct:license ; - dct:modified "2019-09-06T14:27:51Z"^^xsd:dateTime ; - dct:publisher ; - dcat:byteSize "725"^^xsd:decimal ; - dcat:downloadURL ; - dcat:mediaType dataid-mt:ApplicationNTriples . diff --git a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob.jsonld b/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob.jsonld deleted file mode 100644 index 4bcff07..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob.jsonld +++ /dev/null @@ -1,57 +0,0 @@ -{ - "@context": { - "dbpedia": "http://dbpedia.org/resource/", - "foaf": "http://xmlns.com/foaf/0.1/", - "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", - "rdfs": "http://www.w3.org/2000/01/rdf-schema#", - "schema": "http://schema.org/", - "xsd": "http://www.w3.org/2001/XMLSchema#" - }, - "@graph": [ - { - "@id": "dbpedia:Jamaica", - "@type": "schema:Country", - "foaf:homepage": { - "@id": "http://jis.gov.jm/" - }, - "http://www.w3.org/2003/01/geo/wgs84_pos#lat": { - "@type": "xsd:float", - "@value": "17.9833" - }, - "http://www.w3.org/2003/01/geo/wgs84_pos#long": { - "@type": "xsd:float", - "@value": "-76.8" - }, - "rdfs:label": [ - { - "@language": "en", - "@value": "Jamaica" - }, - { - "@language": "it", - "@value": "Giamaica" - } - ] - }, - { - "@id": "dbpedia:Bob_Marley", - "@type": "foaf:Person", - "http://dbpedia.org/ontology/birthPlace": { - "@id": "dbpedia:Jamaica" - }, - "rdfs:label": [ - { - "@language": "en", - "@value": "Bob Marley" - }, - { - "@language": "fr", - "@value": "Bob Marley" - } - ], - "rdfs:seeAlso": { - "@id": "dbpedia:Rastafari" - } - } - ] -} diff --git a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob2.nt b/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob2.nt deleted file mode 100644 index ed43546..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob2.nt +++ /dev/null @@ -1,11 +0,0 @@ - . - "Bob Marley"@en . - "Bob Marley"@fr . - . - . - . - "Jamaica"@en . - "Giamaica"@it . - "17.9833"^^ . - "-76.8"^^ . - . diff --git a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob3.tsv b/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob3.tsv deleted file mode 100644 index 53c2e8e..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob3.tsv +++ /dev/null @@ -1,3 +0,0 @@ -resource http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://dbpedia.org/ontology/birthPlace http://www.w3.org/2000/01/rdf-schema#seeAlso http://www.w3.org/2003/01/geo/wgs84_pos#lat http://www.w3.org/2003/01/geo/wgs84_pos#long http://www.w3.org/2000/01/rdf-schema#label http://xmlns.com/foaf/0.1/homepage -http://dbpedia.org/resource/Bob_Marley http://xmlns.com/foaf/0.1/Person http://dbpedia.org/resource/Jamaica http://dbpedia.org/resource/Rastafari Bob Marley -http://dbpedia.org/resource/Jamaica http://schema.org/Country 17.9833 -76.8 Giamaica http://jis.gov.jm/ diff --git a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob4.ttl b/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob4.ttl deleted file mode 100644 index f47156e..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob4.ttl +++ /dev/null @@ -1,19 +0,0 @@ -@prefix foaf: . -@prefix rdfs: . -@prefix ns0: . -@prefix schema: . -@prefix geo: . -@prefix xsd: . - - - a foaf:Person ; - rdfs:label "Bob Marley"@en, "Bob Marley"@fr ; - rdfs:seeAlso ; - ns0:birthPlace . - - - a schema:Country ; - rdfs:label "Jamaica"@en, "Giamaica"@it ; - geo:lat "17.9833"^^xsd:float ; - geo:long "-76.8"^^xsd:float ; - foaf:homepage . diff --git a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob5.rdf b/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob5.rdf deleted file mode 100644 index f373633..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob5.rdf +++ /dev/null @@ -1,24 +0,0 @@ - - - - Jamaica - - -76.8 - - Giamaica - 17.9833 - - - Bob Marley - - Bob Marley - - - - diff --git a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bobcompact.jsonld b/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bobcompact.jsonld deleted file mode 100644 index 9986d17..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bobcompact.jsonld +++ /dev/null @@ -1 +0,0 @@ -[{"@id":"http://dbpedia.org/resource/Bob_Marley","@type":["http://xmlns.com/foaf/0.1/Person"],"http://www.w3.org/2000/01/rdf-schema#label":[{"@value":"Bob Marley","@language":"en"},{"@value":"Bob Marley","@language":"fr"}],"http://www.w3.org/2000/01/rdf-schema#seeAlso":[{"@id":"http://dbpedia.org/resource/Rastafari"}],"http://dbpedia.org/ontology/birthPlace":[{"@id":"http://dbpedia.org/resource/Jamaica"}]},{"@id":"http://dbpedia.org/resource/Jamaica","@type":["http://schema.org/Country"],"http://www.w3.org/2000/01/rdf-schema#label":[{"@value":"Jamaica","@language":"en"},{"@value":"Giamaica","@language":"it"}],"http://www.w3.org/2003/01/geo/wgs84_pos#lat":[{"@value":"17.9833","@type":"http://www.w3.org/2001/XMLSchema#float"}],"http://www.w3.org/2003/01/geo/wgs84_pos#long":[{"@value":"-76.8","@type":"http://www.w3.org/2001/XMLSchema#float"}],"http://xmlns.com/foaf/0.1/homepage":[{"@id":"http://jis.gov.jm/"}]},{"@id":"http://dbpedia.org/resource/Rastafari"},{"@id":"http://jis.gov.jm/"},{"@id":"http://schema.org/Country"},{"@id":"http://xmlns.com/foaf/0.1/Person"}] diff --git a/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob.jsonld b/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob.jsonld deleted file mode 100644 index 092f828..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob.jsonld +++ /dev/null @@ -1,51 +0,0 @@ -{ - "@graph" : [ { - "@id" : "http://dbpedia.org/resource/Bob_Marley", - "@type" : "http://xmlns.com/foaf/0.1/Person", - "birthPlace" : "http://dbpedia.org/resource/Jamaica", - "label" : [ { - "@language" : "fr", - "@value" : "Bob Marley" - }, { - "@language" : "en", - "@value" : "Bob Marley" - } ], - "seeAlso" : "http://dbpedia.org/resource/Rastafari" - }, { - "@id" : "http://dbpedia.org/resource/Jamaica", - "@type" : "http://schema.org/Country", - "label" : [ { - "@language" : "it", - "@value" : "Giamaica" - }, { - "@language" : "en", - "@value" : "Jamaica" - } ], - "lat" : "17.9833", - "long" : "-76.8", - "homepage" : "http://jis.gov.jm/" - } ], - "@context" : { - "seeAlso" : { - "@id" : "http://www.w3.org/2000/01/rdf-schema#seeAlso", - "@type" : "@id" - }, - "label" : "http://www.w3.org/2000/01/rdf-schema#label", - "birthPlace" : { - "@id" : "http://dbpedia.org/ontology/birthPlace", - "@type" : "@id" - }, - "homepage" : { - "@id" : "http://xmlns.com/foaf/0.1/homepage", - "@type" : "@id" - }, - "long" : { - "@id" : "http://www.w3.org/2003/01/geo/wgs84_pos#long", - "@type" : "http://www.w3.org/2001/XMLSchema#float" - }, - "lat" : { - "@id" : "http://www.w3.org/2003/01/geo/wgs84_pos#lat", - "@type" : "http://www.w3.org/2001/XMLSchema#float" - } - } -} diff --git a/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob2.nt b/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob2.nt deleted file mode 100644 index bc8545b..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob2.nt +++ /dev/null @@ -1,11 +0,0 @@ - . - . - . - . - . - "17.9833"^^ . - "Bob Marley"@fr . - "Jamaica"@en . - "-76.8"^^ . - "Giamaica"@it . - "Bob Marley"@en . diff --git a/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob3.tsv b/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob3.tsv deleted file mode 100644 index 53c2e8e..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob3.tsv +++ /dev/null @@ -1,3 +0,0 @@ -resource http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://dbpedia.org/ontology/birthPlace http://www.w3.org/2000/01/rdf-schema#seeAlso http://www.w3.org/2003/01/geo/wgs84_pos#lat http://www.w3.org/2003/01/geo/wgs84_pos#long http://www.w3.org/2000/01/rdf-schema#label http://xmlns.com/foaf/0.1/homepage -http://dbpedia.org/resource/Bob_Marley http://xmlns.com/foaf/0.1/Person http://dbpedia.org/resource/Jamaica http://dbpedia.org/resource/Rastafari Bob Marley -http://dbpedia.org/resource/Jamaica http://schema.org/Country 17.9833 -76.8 Giamaica http://jis.gov.jm/ diff --git a/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob4.ttl b/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob4.ttl deleted file mode 100644 index 1bd9298..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob4.ttl +++ /dev/null @@ -1,20 +0,0 @@ - - a ; - - "Giamaica"@it , "Jamaica"@en ; - - "17.9833"^^ ; - - "-76.8"^^ ; - - . - - - a ; - - "Bob Marley"@fr , "Bob Marley"@en ; - - ; - - . - diff --git a/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob5.rdf b/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob5.rdf deleted file mode 100644 index 2cc8c8d..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bob5.rdf +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - 17.9833 - Giamaica - -76.8 - - Jamaica - - - Bob Marley - Bob Marley - - diff --git a/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bobcompact.jsonld b/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bobcompact.jsonld deleted file mode 100644 index 092f828..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/output/format-testbed_bobcompact.jsonld +++ /dev/null @@ -1,51 +0,0 @@ -{ - "@graph" : [ { - "@id" : "http://dbpedia.org/resource/Bob_Marley", - "@type" : "http://xmlns.com/foaf/0.1/Person", - "birthPlace" : "http://dbpedia.org/resource/Jamaica", - "label" : [ { - "@language" : "fr", - "@value" : "Bob Marley" - }, { - "@language" : "en", - "@value" : "Bob Marley" - } ], - "seeAlso" : "http://dbpedia.org/resource/Rastafari" - }, { - "@id" : "http://dbpedia.org/resource/Jamaica", - "@type" : "http://schema.org/Country", - "label" : [ { - "@language" : "it", - "@value" : "Giamaica" - }, { - "@language" : "en", - "@value" : "Jamaica" - } ], - "lat" : "17.9833", - "long" : "-76.8", - "homepage" : "http://jis.gov.jm/" - } ], - "@context" : { - "seeAlso" : { - "@id" : "http://www.w3.org/2000/01/rdf-schema#seeAlso", - "@type" : "@id" - }, - "label" : "http://www.w3.org/2000/01/rdf-schema#label", - "birthPlace" : { - "@id" : "http://dbpedia.org/ontology/birthPlace", - "@type" : "@id" - }, - "homepage" : { - "@id" : "http://xmlns.com/foaf/0.1/homepage", - "@type" : "@id" - }, - "long" : { - "@id" : "http://www.w3.org/2003/01/geo/wgs84_pos#long", - "@type" : "http://www.w3.org/2001/XMLSchema#float" - }, - "lat" : { - "@id" : "http://www.w3.org/2003/01/geo/wgs84_pos#lat", - "@type" : "http://www.w3.org/2001/XMLSchema#float" - } - } -} diff --git a/src/test/resources/roundTripTestFiles/conversion/shas.txt b/src/test/resources/roundTripTestFiles/conversion/shas.txt deleted file mode 100644 index 81e1bad..0000000 --- a/src/test/resources/roundTripTestFiles/conversion/shas.txt +++ /dev/null @@ -1,6 +0,0 @@ -b88dd8c15c9321b1441694f5683dee0adce8e8bd13795c5246b687c9248a49ec /home/eisenbahnplatte/git/DBpedia/databus-client/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob.jsonld -9a39a1bac821a66bde87e3c986ced948ef235aa219645f5c952afc75d54f54f1 /home/eisenbahnplatte/git/DBpedia/databus-client/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob2.nt -7af3a7cef890966c88dfb3d076551eb5a7be0e478081a4922f693beec0fab9e2 /home/eisenbahnplatte/git/DBpedia/databus-client/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob3.tsv -d884a13b5c2b5c0e7f9239c817ad6b4c5058e1293d76381405d70306c734b210 /home/eisenbahnplatte/git/DBpedia/databus-client/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob4.ttl -48940a6f9d0a4be8192f316a01d4a31bf6f1d4fc5e0088ec5d4ccafc62bb2b95 /home/eisenbahnplatte/git/DBpedia/databus-client/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bob5.rdf -3867bf70c58fb826038c433432e095c86865a903d9e667f08a3a4d147e919fba /home/eisenbahnplatte/git/DBpedia/databus-client/src/test/resources/roundTripTestFiles/conversion/dbpedia-mappings.tib.eu/databus-repo/eisenbahnplatte/databus-client-testbed/format-testbed/2019.08.30/format-testbed_bobcompact.jsonld diff --git a/src/test/resources/roundTripTestFiles/mapping/mappings/bb5ee80be67e2239a6b2fe869ea67ddedc2daacda6e2c6169d5125739b4954b9 b/src/test/resources/roundTripTestFiles/mapping/mappings/bb5ee80be67e2239a6b2fe869ea67ddedc2daacda6e2c6169d5125739b4954b9 deleted file mode 100644 index b8056e6..0000000 --- a/src/test/resources/roundTripTestFiles/mapping/mappings/bb5ee80be67e2239a6b2fe869ea67ddedc2daacda6e2c6169d5125739b4954b9 +++ /dev/null @@ -1,27 +0,0 @@ -PREFIX xxxlong: -PREFIX xsd: -PREFIX xxxhomepage: -PREFIX xxxseeAlso: -PREFIX xxxlat: -PREFIX xxxbirthPlace: -PREFIX xxxlabel: - -CONSTRUCT { -?resourcebinded a ?typebinded; - xxxhomepage:homepage ?homepagebinded; - xxxlat:lat ?latbinded; - xxxbirthPlace:birthPlace ?birthPlacebinded; - xxxlong:long ?longbinded; - xxxlabel:label ?label; - xxxseeAlso:seeAlso ?seeAlsobinded; - } -WHERE { - BIND(xsd:float(?lat) AS ?latbinded) - BIND(xsd:float(?long) AS ?longbinded) - BIND(URI(?birthPlace) AS ?birthPlacebinded) - BIND(URI(?type) AS ?typebinded) - BIND(URI(?seeAlso) AS ?seeAlsobinded) - BIND(URI(?homepage) AS ?homepagebinded) - BIND(URI(?resource) AS ?resourcebinded) -} - \ No newline at end of file diff --git a/src/test/resources/roundTripTestFiles/mapping/ntriples.nt b/src/test/resources/roundTripTestFiles/mapping/ntriples.nt deleted file mode 100644 index ed43546..0000000 --- a/src/test/resources/roundTripTestFiles/mapping/ntriples.nt +++ /dev/null @@ -1,11 +0,0 @@ - . - "Bob Marley"@en . - "Bob Marley"@fr . - . - . - . - "Jamaica"@en . - "Giamaica"@it . - "17.9833"^^ . - "-76.8"^^ . - . diff --git a/src/test/resources/roundTripTestFiles/mapping/ntriples.tsv b/src/test/resources/roundTripTestFiles/mapping/ntriples.tsv deleted file mode 100644 index 6f24b49..0000000 --- a/src/test/resources/roundTripTestFiles/mapping/ntriples.tsv +++ /dev/null @@ -1,3 +0,0 @@ -resource birthPlace homepage label lat long seeAlso type -http://dbpedia.org/resource/Bob_Marley http://dbpedia.org/resource/Jamaica Bob Marley http://dbpedia.org/resource/Rastafari http://xmlns.com/foaf/0.1/Person -http://dbpedia.org/resource/Jamaica http://jis.gov.jm/ Jamaica 17.9833 -76.8 http://schema.org/Country diff --git a/src/test/resources/roundTripTestFiles/mapping/ntriples_out.nt b/src/test/resources/roundTripTestFiles/mapping/ntriples_out.nt deleted file mode 100644 index 22c76a8..0000000 --- a/src/test/resources/roundTripTestFiles/mapping/ntriples_out.nt +++ /dev/null @@ -1,9 +0,0 @@ - . - . - "-76.8"^^ . - "Jamaica" . - . - . - . - "17.9833"^^ . - "Bob Marley" . diff --git a/src/test/scala/LoggerTest.scala b/src/test/scala/LoggerTest.scala deleted file mode 100644 index b2cb5f1..0000000 --- a/src/test/scala/LoggerTest.scala +++ /dev/null @@ -1,28 +0,0 @@ -import java.nio.file.NoSuchFileException - -import better.files.File -import org.scalatest.FlatSpec -import org.slf4j.LoggerFactory - -class LoggerTest extends FlatSpec { - - info("starting...") - - "logger" should "print a File" in { - val logger = LoggerFactory.getLogger("Logger") - - - logger.debug("whahwhaw") - logger.info("Temperature has risen above 50 degrees.") - - } - - "logger" should "print something" in { - try { - File("hallo").delete() - } - catch { - case _: NoSuchFileException => LoggerFactory.getLogger("test").error("File does not exist") //deleteAndRestart(inputFile, inputFormat, outputFormat, targetFile: File) - } - } -} diff --git a/src/test/scala/apitest/apitest.scala b/src/test/scala/apitest/apitest.scala index 4bfa830..50e173f 100644 --- a/src/test/scala/apitest/apitest.scala +++ b/src/test/scala/apitest/apitest.scala @@ -17,7 +17,7 @@ class apitest extends FlatSpec { "DatabusObject" should "execute the Databus Main_Convert function in the right way" in { DatabusClient - .source("/home/eisenbahnplatte/git/databus-client/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-conversion-testbed_bob4.ttl") + .source("/home/eisenbahnplatte/git/databus-client/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-format.conversion-testbed_bob4.ttl") .compression(Compression.bz2) .format(Format.nt) .target("./test/") @@ -33,7 +33,7 @@ class apitest extends FlatSpec { "DatabusObject" should "convert files downloaded with query and already existing files, too" in { DatabusClient .source("./src/query/query3.query") - .source("/home/eisenbahnplatte/git/databus-client/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-conversion-testbed_bob4.ttl") + .source("/home/eisenbahnplatte/git/databus-client/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-format.conversion-testbed_bob4.ttl") .format(Format.nt) .config("overwrite", "true") .compression(Compression.bz2) diff --git a/src/test/scala/conversionTests/conversion/ConverterTest.scala b/src/test/scala/archived/ConverterTest.scala similarity index 98% rename from src/test/scala/conversionTests/conversion/ConverterTest.scala rename to src/test/scala/archived/ConverterTest.scala index f6609ee..dca137d 100644 --- a/src/test/scala/conversionTests/conversion/ConverterTest.scala +++ b/src/test/scala/archived/ConverterTest.scala @@ -1,6 +1,5 @@ -package conversionTests.conversion +package archived -import java.io._ import better.files.File import org.apache.jena.atlas.iterator.IteratorResourceClosing import org.apache.jena.graph.Triple @@ -18,6 +17,7 @@ import org.dbpedia.databus.client.filehandling.convert.format.rdf.triples.Triple import org.dbpedia.databus.client.filehandling.convert.format.rdf.triples.format.{RDFXML, Turtle} import org.scalatest.FlatSpec +import java.io._ import scala.collection.JavaConverters._ import scala.reflect.ClassTag @@ -33,7 +33,7 @@ class ConverterTest extends FlatSpec { "RDD" should "not contain empty Lines" in { - val file = File("/home/eisenbahnplatte/git/databus-client/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-conversion-testbed_bob4.ttl") + val file = File("/home/eisenbahnplatte/git/databus-client/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-format.conversion-testbed_bob4.ttl") readTriples(file).foreach(println(_)) @@ -63,7 +63,7 @@ class ConverterTest extends FlatSpec { "Written File" should "not contain empty Lines" in { - val file = File("/home/eisenbahnplatte/git/databus-client/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-conversion-testbed_bob4.ttl") + val file = File("/home/eisenbahnplatte/git/databus-client/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-format.conversion-testbed_bob4.ttl") val triples = readTriples(file) diff --git a/src/test/scala/conversionTests/mapFromExternalSource/gitHubTarqlMapTests.scala b/src/test/scala/archived/mapFromExternalSource/gitHubTarqlMapTests.scala similarity index 97% rename from src/test/scala/conversionTests/mapFromExternalSource/gitHubTarqlMapTests.scala rename to src/test/scala/archived/mapFromExternalSource/gitHubTarqlMapTests.scala index 9630b7b..e2c7d37 100644 --- a/src/test/scala/conversionTests/mapFromExternalSource/gitHubTarqlMapTests.scala +++ b/src/test/scala/archived/mapFromExternalSource/gitHubTarqlMapTests.scala @@ -1,4 +1,4 @@ -package conversionTests.mapFromExternalSource +package archived.mapFromExternalSource import org.apache.jena.query.{Query, QueryExecution, QueryExecutionFactory, QueryFactory, ResultSet} import org.apache.jena.rdf.model.{ModelFactory, ResourceFactory} @@ -6,7 +6,7 @@ import org.apache.jena.riot.{Lang, RDFDataMgr} import org.scalatest.FlatSpec class gitHubTarqlMapTests extends FlatSpec{ - "Sparql Query" should "necessary data for mapping" in { + "Sparql Query" should "necessary data for format.mapping" in { val url = "https://raw.githubusercontent.com/dbpedia/format-mappings/master/tarql/1.ttl" val model = RDFDataMgr.loadModel(url,Lang.TTL) diff --git a/src/test/scala/conversionTests/mapping/getMappingTest.scala b/src/test/scala/archived/mapping/getMappingTest.scala similarity index 91% rename from src/test/scala/conversionTests/mapping/getMappingTest.scala rename to src/test/scala/archived/mapping/getMappingTest.scala index fab86e3..92acd49 100644 --- a/src/test/scala/conversionTests/mapping/getMappingTest.scala +++ b/src/test/scala/archived/mapping/getMappingTest.scala @@ -1,4 +1,4 @@ -package conversionTests.mapping +package archived.mapping import better.files.File import org.apache.jena.query._ @@ -109,14 +109,14 @@ class getMappingTest extends FlatSpec{ assert(model.isEmpty) } -// "conversionTests/mapping" should "be retourned" in { +// "conversionTests/format.mapping" should "be retourned" in { // val path = "/home/eisenbahnplatte/git/databus-client/src/resources/mappingTests/getMapping/bnetza-mastr_rli_type=hydro.csv.bz2" // val sha = FileUtil.getSha256(File(path)) // println(sha) // println(QueryHandler.getMapping(sha)) // } - "mappingInfo" should "return mapping" in { + "mappingInfo" should "return format.mapping" in { val mappingInfo = "https://raw.githubusercontent.com/dbpedia/format-mappings/master/tarql/1.ttl#this" val model = RDFDataMgr.loadModel(mappingInfo) @@ -129,10 +129,10 @@ class getMappingTest extends FlatSpec{ s""" |PREFIX tmp: | - |SELECT DISTINCT ?mapping + |SELECT DISTINCT ?format.mapping |WHERE { - |?mapping a tmp:MappingFile . - |<$mappingInfo> tmp:hasMappingFile ?mapping . + |?format.mapping a tmp:MappingFile . + |<$mappingInfo> tmp:hasMappingFile ?format.mapping . |} |""".stripMargin @@ -151,10 +151,10 @@ class getMappingTest extends FlatSpec{ | |SELECT DISTINCT * |WHERE { - |?mapping a tmp:MappingFile ; + |?format.mapping a tmp:MappingFile ; | tmp:hasDelimiter ?delimiter ; | tmp:hasQuotation ?quotation . - |<$mappingInfoFile> tmp:hasMappingFile ?mapping . + |<$mappingInfoFile> tmp:hasMappingFile ?format.mapping . |} |""".stripMargin diff --git a/src/test/scala/conversionTests/mapping/multipleConstructs.scala b/src/test/scala/archived/mapping/multipleConstructs.scala similarity index 99% rename from src/test/scala/conversionTests/mapping/multipleConstructs.scala rename to src/test/scala/archived/mapping/multipleConstructs.scala index 6273815..e567ccd 100644 --- a/src/test/scala/conversionTests/mapping/multipleConstructs.scala +++ b/src/test/scala/archived/mapping/multipleConstructs.scala @@ -1,4 +1,4 @@ -package conversionTests.mapping +package archived.mapping import java.io.StringReader diff --git a/src/test/scala/conversionTests/mapping/roundTripTests.scala b/src/test/scala/archived/mapping/roundTripTests.scala similarity index 97% rename from src/test/scala/conversionTests/mapping/roundTripTests.scala rename to src/test/scala/archived/mapping/roundTripTests.scala index f13cc5d..e480de9 100644 --- a/src/test/scala/conversionTests/mapping/roundTripTests.scala +++ b/src/test/scala/archived/mapping/roundTripTests.scala @@ -1,4 +1,4 @@ -//package conversionTests.mapping +//package archived.format.mapping // //import java.net.URL //import better.files.File @@ -9,7 +9,7 @@ //import org.dbpedia.databus.client.filehandling.convert.format.tsd.TSDHandler //import org.dbpedia.databus.client.filehandling.convert.format.rdf.triples.TripleHandler //import org.dbpedia.databus.client.filehandling.download.Downloader -//import org.dbpedia.databus.client.filehandling.convert.mapping.util.MappingInfo +//import org.dbpedia.databus.client.filehandling.convert.format.mapping.util.MappingInfo //import org.dbpedia.databus.client.filehandling.{FileHandler, FileUtil} //import org.scalatest.FlatSpec // @@ -26,7 +26,7 @@ // implicit val sparkContext: SparkContext = spark.sparkContext // sparkContext.setLogLevel("WARN") // -// val testFileDir:File = File("./src/test/resources/roundTripTestFiles/mapping/") +// val testFileDir:File = File("./src/test/resources/roundTripTestFiles/format.mapping/") // val tempDir:File = testFileDir / "tempDir" // // diff --git a/src/test/scala/conversionTests/mapping/test_convertToTSVandBackToTTL.scala b/src/test/scala/archived/mapping/test_convertToTSVandBackToTTL.scala similarity index 98% rename from src/test/scala/conversionTests/mapping/test_convertToTSVandBackToTTL.scala rename to src/test/scala/archived/mapping/test_convertToTSVandBackToTTL.scala index 1174846..7bbbf6c 100644 --- a/src/test/scala/conversionTests/mapping/test_convertToTSVandBackToTTL.scala +++ b/src/test/scala/archived/mapping/test_convertToTSVandBackToTTL.scala @@ -1,4 +1,4 @@ -package conversionTests.mapping +package archived.mapping import java.io.PrintWriter @@ -47,7 +47,7 @@ class test_convertToTSVandBackToTTL extends FlatSpec{ } -// "databus-client" should "convert created tsv and mapping file back to ttl" in { +// "databus-client" should "convert created tsv and format.mapping file back to ttl" in { // //// CONVERT TO TSV // println("CONVERT TO TSV") diff --git a/src/test/scala/conversionTests/mapping/tsv_writer_tests.scala b/src/test/scala/archived/mapping/tsv_writer_tests.scala similarity index 99% rename from src/test/scala/conversionTests/mapping/tsv_writer_tests.scala rename to src/test/scala/archived/mapping/tsv_writer_tests.scala index ce798f0..a3aaaa6 100644 --- a/src/test/scala/conversionTests/mapping/tsv_writer_tests.scala +++ b/src/test/scala/archived/mapping/tsv_writer_tests.scala @@ -1,4 +1,4 @@ -//package conversionTests.mapping +//package archived.format.mapping // //import java.io.PrintWriter //import better.files.File @@ -190,7 +190,7 @@ // TTLWriter2.convertToTSV(triplesRDD, spark, targetFile) // } // -// "DataFrame with right header" should "be create from testBob.ttl and saved as csv with corresponding mapping file" in { +// "DataFrame with right header" should "be create from testBob.ttl and saved as csv with corresponding format.mapping file" in { // // val inputFile = File(s"${testDir}testBob.ttl") // val targetFile: File = inputFile.parent / inputFile.nameWithoutExtension.concat(".tsv") diff --git a/src/test/scala/conversionTests/mapping/tsvmapperTests.scala b/src/test/scala/archived/mapping/tsvmapperTests.scala similarity index 96% rename from src/test/scala/conversionTests/mapping/tsvmapperTests.scala rename to src/test/scala/archived/mapping/tsvmapperTests.scala index 801429a..1b63a92 100644 --- a/src/test/scala/conversionTests/mapping/tsvmapperTests.scala +++ b/src/test/scala/archived/mapping/tsvmapperTests.scala @@ -1,4 +1,4 @@ -//package conversionTests.mapping +//package archived.format.mapping // //import better.files.File //import org.apache.jena.graph.Triple @@ -145,9 +145,9 @@ // // "databus-client" should "convert tsv to ttlTest" in { // -// val inputFilePath = "/home/eisenbahnplatte/git/databus-client/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-conversion-testbed_bob3.tsv" -// val mappingFilePath = "/home/eisenbahnplatte/git/databus-client/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-conversion-testbed_bob4_mapping.sparql" -// val outputFile= File("/home/eisenbahnplatte/git/databus-client/files/NoDataID/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-conversion-testbed_bob3.ttl") +// val inputFilePath = "/home/eisenbahnplatte/git/databus-client/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-format.conversion-testbed_bob3.tsv" +// val mappingFilePath = "/home/eisenbahnplatte/git/databus-client/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-format.conversion-testbed_bob4_mapping.sparql" +// val outputFile= File("/home/eisenbahnplatte/git/databus-client/files/NoDataID/src/resources/databus-client-testbed/format-testbed/2019.08.30/format-format.conversion-testbed_bob3.ttl") // val tempDir = File(s"${testDir}tempDir") // if (tempDir.exists) tempDir.delete() // diff --git a/src/test/scala/cliTest.scala b/src/test/scala/cliTest.scala deleted file mode 100644 index db1d9e5..0000000 --- a/src/test/scala/cliTest.scala +++ /dev/null @@ -1,10 +0,0 @@ -import org.scalatest.FlatSpec - -class cliTest extends FlatSpec { - -// "DatabusClient" should "handle not set params of Cli right" in { -// val conf = new CLIConf(Array[String]()) -// -// if (!conf.query.isDefined) println("query is not set") -// } -} diff --git a/src/test/scala/conversionTests/conversion/getOutPutFileTest.scala b/src/test/scala/conversionTests/conversion/getOutPutFileTest.scala deleted file mode 100644 index 1e2f959..0000000 --- a/src/test/scala/conversionTests/conversion/getOutPutFileTest.scala +++ /dev/null @@ -1,113 +0,0 @@ -package conversionTests.conversion - -import better.files.File -import org.scalatest.FlatSpec - -class getOutPutFileTest extends FlatSpec{ - - "Object" should "give right outFile to inFile" in { - val inFile = File("/home/eisenbahnplatte/git/databus-client/cache_dir/dbpedia-mappings.tib.eu/release/mappings/geo-coordinates-mappingbased/2019.04.20/geo-coordinates-mappingbased_lang=ca.ttl.bz2") - val outFormat = "nt" - val outComp = "gz" - val src_dir = File("./cache_dir") - val dest_dir = File("./testTarget/") - -// val outFile = OutPutFileGetter.getOutputFile(inFile, outFormat, outComp, src_dir, dest_dir) - - val comparisonFile = File("./testTarget/marvin/mappings/geo-coordinates-mappingbased/2019.04.20/geo-coordinates-mappingbased_lang=ca.nt.gz") -// assert( outFile == comparisonFile) - } - - -// "Object" should "give right outFile to inFile with new Method" in { -// val inFile = File("/home/eisenbahnplatte/git/databus-client/cache_dir/dbpedia-mappings.tib.eu/release/mappings/geo-coordinates-mappingbased/2019.04.20/geo-coordinates-mappingbased_lang=ca.ttl.bz2") -// val outFormat = "nt" -// val outComp = "gz" -// val dest_dir = File("./testTarget/") -// -// val outFile = OutPutFileGetter.newGetOutputFile(inFile, outFormat, outComp, dest_dir) -// -// val comparisonFile = File("./testTarget/marvin/mappings/geo-coordinates-mappingbased/2019.04.20/geo-coordinates-mappingbased_lang=ca.nt.gz") -// assert( outFile == comparisonFile) -// } -} - -object OutPutFileGetter { -// def getOutputFile(inputFile: File, outputFormat: String, outputCompression: String, src_dir: File, dest_dir: File): File = { -// -// val nameWithoutExtension = inputFile.nameWithoutExtension -// val name = inputFile.name -// var filepath_new = "" -// val dataIdFile = inputFile.parent / "dataid.ttl" -// -// val newOutputFormat = { -// if (outputFormat == "rdfxml") "rdf" -// else outputFormat -// } -// -// if (dataIdFile.exists) { -// val dir_structure: List[String] = QueryHandler.executeDataIdQuery(dataIdFile) -// filepath_new = dest_dir.pathAsString.concat("/") -// dir_structure.foreach(dir => filepath_new = filepath_new.concat(dir).concat("/")) -// filepath_new = filepath_new.concat(nameWithoutExtension) -// } -// else { -// // changeExtensionTo() funktioniert nicht bei noch nicht existierendem File, deswegen ausweichen über Stringmanipulation -// // filepath_new = inputFile.pathAsString.replace(src_dir.pathAsString, dest_dir.pathAsString.concat("/NoDataID")) -// filepath_new = dest_dir.pathAsString.concat("/NoDataID").concat(inputFile.pathAsString.replace(File(".").pathAsString, "")) //.concat(nameWithoutExtension) -// -// filepath_new = filepath_new.replaceAll(name, nameWithoutExtension) -// } -// -// if (outputCompression.isEmpty) { -// filepath_new = filepath_new.concat(".").concat(newOutputFormat) -// } -// else { -// filepath_new = filepath_new.concat(".").concat(newOutputFormat).concat(".").concat(outputCompression) -// } -// -// val outputFile = File(filepath_new) -// //create necessary parent directories to write the outputfile there, later -// outputFile.parent.createDirectoryIfNotExists(createParents = true) -// -// println(s"converted file:\t${outputFile.pathAsString}\n") -// -// outputFile -// } - -// def newGetOutputFile(inputFile: File, outputFormat: String, outputCompression: String, dest_dir: File): File = { -// -// val nameWithoutExtension = inputFile.nameWithoutExtension -// -// val dataIdFile = inputFile.parent / "dataid.ttl" -// -// val newOutputFormat = { -// if (outputFormat == "rdfxml") "rdf" -// else outputFormat -// } -// -// val outputDir = { -// if (dataIdFile.exists) QueryHandler.getTargetDir(dataIdFile, dest_dir) -// else -// File(dest_dir.pathAsString.concat("/NoDataID") -// .concat(inputFile.pathAsString.splitAt(inputFile.pathAsString.lastIndexOf("/"))._1 -// .replace(File(".").pathAsString, "") -// ) -// ) -// } -// -// val newName = { -// if (outputCompression.isEmpty) s"$nameWithoutExtension.$newOutputFormat" -// else s"$nameWithoutExtension.$newOutputFormat.$outputCompression" -// } -// -// val outputFile = outputDir / newName -// -// //create necessary parent directories to write the outputfile there, later -// outputFile.parent.createDirectoryIfNotExists(createParents = true) -// -// println(s"converted file:\t${outputFile.pathAsString}\n") -// -// outputFile -// } -} diff --git a/src/test/scala/conversionTests/conversion/roundTripTests.scala b/src/test/scala/conversionTests/conversion/roundTripTests.scala deleted file mode 100644 index 8e8afeb..0000000 --- a/src/test/scala/conversionTests/conversion/roundTripTests.scala +++ /dev/null @@ -1,252 +0,0 @@ -//package conversionTests.conversion -// -//import better.files.File -//import org.apache.jena.riot.RDFDataMgr -//import org.apache.spark.SparkContext -//import org.apache.spark.sql.{DataFrame, SparkSession} -//import org.dbpedia.databus.client.filehandling.convert.format.tsd.TSDHandler -//import org.dbpedia.databus.client.filehandling.convert.format.rdf.quads.QuadsHandler -//import org.dbpedia.databus.client.filehandling.convert.format.rdf.triples.TripleHandler -//import org.dbpedia.databus.client.filehandling.download.Downloader -//import org.dbpedia.databus.client.filehandling.{FileHandler, FileUtil} -//import org.scalatest.FlatSpec -//import org.scalatest.Matchers._ -// -//import scala.collection.mutable.ListBuffer -//class roundTripTests extends FlatSpec{ -// -// val spark: SparkSession = SparkSession.builder() -// .appName(s"Triple reader") -// .master("local[*]") -// .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") -// .getOrCreate() -// -// implicit val sparkContext: SparkContext = spark.sparkContext -// sparkContext.setLogLevel("WARN") -// -// -// val testFileDir:File = downloadFiles(File("./src/test/resources/roundTripTestFiles/conversion")) -//// val testFileDir = File("./src/test/resources/roundTripTestFiles") -// val outDir:File = testFileDir / "output" -// val tempDir:File = outDir / "tempDir" -// -// def downloadFiles(testFileDir:File): File ={ -// -// val queryTestFiles= -// """ -// |PREFIX dataid: -// |PREFIX dataid-cv: -// |PREFIX dct: -// |PREFIX dcat: -// | -// |# Get all files -// |SELECT DISTINCT ?file WHERE { -// | ?dataset dataid:artifact . -// | ?dataset dcat:distribution ?distribution . -// | { -// | ?distribution dct:hasVersion ?latestVersion -// | { -// | SELECT (?version as ?latestVersion) WHERE { -// | ?dataset dataid:artifact . -// | ?dataset dct:hasVersion ?version . -// | } ORDER BY DESC (?version) LIMIT 1 -// | } -// | } -// | ?distribution dcat:downloadURL ?file . -// |} -// |""".stripMargin -// -// Downloader.downloadWithQuery(queryTestFiles, testFileDir) -// -// testFileDir -// } -// -// def readAndWriteTriples(inputFile:File, tempDir:File): File = { -// val format = FileUtil.getFormatType(inputFile,"") -// val triples = new TripleHandler().read(inputFile.pathAsString, format) -// -// if(format=="rdf") new TripleHandler().write(triples, "rdfxml") -// else new TripleHandler().write(triples, format) -// -// val targetFile = tempDir.parent / inputFile.name -// -// try { -// FileUtil.unionFiles(tempDir, targetFile) -// tempDir.delete() -// } -// catch { -// case _: RuntimeException => "File $targetFile already exists" -// } -// -// targetFile -// } -// -// def readAndWriteTSD(inputFile:File, tempDir:File, spark:SparkSession):File={ -// val format = FileHandler.getFormatType(inputFile,"") -// val delimiter = { -// if(format=="csv") ',' -// else '\t' -// } -// val dataFrame = new TSDHandler().read(inputFile.pathAsString, format, delimiter) -// -// new TSDHandler().write(dataFrame, format, delimiter) -// -// val targetFile = tempDir.parent / inputFile.name -// -// try { -// FileUtil.unionFiles(tempDir, targetFile) -// tempDir.delete() -// } -// catch { -// case _: RuntimeException => "File $targetFile already exists" -// } -// -// targetFile -// } -// -// def readTSDasDF(tsdFile:File):DataFrame ={ -// val format = FileHandler.getFormatType(tsdFile,"") -// val delimiter = { -// if (format == "csv") "," -// else "\t" -// } -// -// spark.read.format("csv") -// .option("sep", delimiter.toString) -// .option("inferSchema", "true") -// .option("header", "true") -// .load(tsdFile.pathAsString) -// -// } -// -// def checkDFEquality(df1:DataFrame, df2:DataFrame):Boolean ={ -// if (df1.columns.deep == df2.columns.deep) { -// val rowsExist:ListBuffer[Boolean] = ListBuffer() -// val array1 = df1.collect() -// val array2 = df2.collect() -// -// array1.foreach( -// row => { -//// println(row) -// var rowExists = false -// array2.foreach( -// rowArray2 => { -// if(row.equals(rowArray2)) rowExists=true -// } -// ) -// rowsExist += rowExists -// } -// ) -// if (rowsExist.contains(false)) false -// else true -// } -// else false -// } -// -// "roundtriptest" should "succeed for all RDF formats" in { -// -// -// val rdfFiles = testFileDir -// .listRecursively -// .filter(file => -// !(file.nameWithoutExtension(true) matches "dataid") -// ) -// .filter(file => -// file.extension().getOrElse("") matches ".rdf|.nt|.ttl|.jsonld" -// ) -// -// val successList: ListBuffer[Seq[String]] = ListBuffer() -// -// while (rdfFiles.hasNext) { -// val inputFile = rdfFiles.next() -// val outputFile = readAndWriteTriples(inputFile, tempDir, spark) -// -// println(inputFile.pathAsString) -// -// val statementsInput = RDFDataMgr.loadModel(inputFile.pathAsString).listStatements().toList -// val statementsOutput = RDFDataMgr.loadModel(outputFile.pathAsString).listStatements().toList -// -// if (statementsInput.containsAll(statementsOutput) && statementsOutput.containsAll(statementsInput)) successList += Seq(inputFile.pathAsString, "succeed") -// else successList += Seq(inputFile.pathAsString, "error") -// } -// -// -// println(successList.isEmpty) -// var success = { -// if (successList.isEmpty) false -// else true -// } -// -// successList.foreach(conversion => { -// if (conversion(1) == "error") { -// success = false -// println(s"${conversion.head} did not convert properly") -// } -// }) -// -// success shouldBe true -// } -// -// "roundtriptest" should "succeed for all TSD formats" in { -// -// val tsdFiles = testFileDir -// .listRecursively -// .filter(file => -// !(file.nameWithoutExtension(true) matches "dataid") -// ) -// .filter(file => -// file.extension().getOrElse("") matches ".tsv|.csv" -// ) -// -// val successList: ListBuffer[Seq[String]] = ListBuffer() -// -// while (tsdFiles.hasNext) { -// val inputFile = tsdFiles.next() -// val outputFile = readAndWriteTSD(inputFile, tempDir, spark) -// -// println(inputFile.pathAsString) -// -// -// val dataInput = readTSDasDF(inputFile).sort() -// val dataOutput = readTSDasDF(outputFile).sort() -// -//// dataInput.show() -//// dataOutput.show() -// -// if (checkDFEquality(dataInput,dataOutput) && checkDFEquality(dataOutput,dataInput)) successList += Seq(inputFile.pathAsString, "succeed") -// else successList += Seq(inputFile.pathAsString, "error") -// } -// -// var success = { -// if (successList.isEmpty) false -// else true -// } -// -// successList.foreach(conversion => { -// println(s"${conversion.head},${conversion(1)}") -// if (conversion(1) == "error") { -// success = false -// println(s"${conversion.head} did not convert properly") -// } -// }) -// -// success shouldBe true -// } -// -// "roundtriptest" should "succeed for all RDF Quad formats" in { -// val quads = QuadsHandler.read("src/test/resources/roundTripTestFiles/conversion/quads/trig.trig","trig") -// QuadsHandler.write(quads, "trig") -// -// val targetFile = tempDir.parent / "nq.nq" -// -// try { -// FileUtil.unionFiles(tempDir, targetFile) -// tempDir.delete() -// } -// catch { -// case _: RuntimeException => "File $targetFile already exists" -// } -// -// targetFile -// } -//} diff --git a/src/test/scala/download/DownloadTest.scala b/src/test/scala/download/DownloadTest.scala new file mode 100644 index 0000000..16f469e --- /dev/null +++ b/src/test/scala/download/DownloadTest.scala @@ -0,0 +1,20 @@ +package download + +import better.files.File +import org.dbpedia.databus.client.filehandling.{FileUtil} +import org.dbpedia.databus.client.filehandling.download.Downloader +import org.scalatest.FlatSpec + +class DownloadTest extends FlatSpec{ + + val testDir = File("./src/test/resources/queries") + val outDir = testDir.parent / "output" + + "downloader" should "download with query" in { + val queryFile = testDir / "query1.sparql" + + val queryString = FileUtil.readQueryFile(queryFile) + + Downloader.downloadWithQuery(queryString, outDir) + } +} diff --git a/src/test/scala/downloaderTests/queryHandlerTests/QueryTest.scala b/src/test/scala/download/QueryTest.scala similarity index 98% rename from src/test/scala/downloaderTests/queryHandlerTests/QueryTest.scala rename to src/test/scala/download/QueryTest.scala index 1247dd9..8230cdf 100644 --- a/src/test/scala/downloaderTests/queryHandlerTests/QueryTest.scala +++ b/src/test/scala/download/QueryTest.scala @@ -1,4 +1,4 @@ -package downloaderTests.queryHandlerTests +package download import org.apache.jena.query._ import org.dbpedia.databus.client.sparql.QueryHandler diff --git a/src/test/scala/downloaderTests/downloaderTest.scala b/src/test/scala/downloaderTests/downloaderTest.scala deleted file mode 100644 index eee0aa4..0000000 --- a/src/test/scala/downloaderTests/downloaderTest.scala +++ /dev/null @@ -1,27 +0,0 @@ -package downloaderTests - -import java.net.URL - -import better.files.File -import org.dbpedia.databus.client.filehandling.download.Downloader -import org.scalatest.FlatSpec - -class downloaderTest extends FlatSpec{ - - - "urls" should "be split right" in { - - val url1 = "http://dbpedia-mappings.tib.eu/release/mappings/geo-coordinates-mappingbased/2019.04.20/geo-coordinates-mappingbased_lang=ca.ttl.bz2" - val url2 = "https://dbpedia-mappings.tib.eu/release/mappings/geo-coordinates-mappingbased/2019.04.20/geo-coordinates-mappingbased_lang=cs.ttl.bz2" - - val split = url1.split("http[s]?://").last - println(split) - url1.split("http://|https://").map(_.trim).last - } - - "downloader" should "not interupt when getting bad uri" in { - val url = "http://downloads.dbpedia.org/repo/dbpedia/spotlight/spotlight-wikistats/2020.03.11/spotlight-wikistats_type=sfAndTotalCounts_lang=zh.tsv.bz2" - - Downloader.downloadUrlToFile(new URL(url), File("./test/")) - } -} diff --git a/src/test/scala/downloaderTests/getRequestTest.scala b/src/test/scala/downloaderTests/getRequestTest.scala deleted file mode 100644 index 8c5c038..0000000 --- a/src/test/scala/downloaderTests/getRequestTest.scala +++ /dev/null @@ -1,28 +0,0 @@ -package downloaderTests - -import org.apache.http.HttpHeaders -import org.apache.http.client.ResponseHandler -import org.apache.http.client.methods.HttpGet -import org.apache.http.impl.client.{BasicResponseHandler, HttpClientBuilder} -import org.scalatest.FlatSpec - -class getRequestTest extends FlatSpec { - - def getQueryOfCollection(uri: String): String = { - val client = HttpClientBuilder.create().build() - - val httpGet = new HttpGet(uri) - httpGet.addHeader(HttpHeaders.ACCEPT, "text/sparql") - - val response = client.execute(httpGet) - val handler: ResponseHandler[String] = new BasicResponseHandler() - - handler.handleResponse(response) - } - - "collectionSTR" should "return Query" in { - println(getQueryOfCollection("https://databus.dbpedia.org/jfrey/collections/id-management_links")) - - - } -} diff --git a/src/test/scala/downloaderTests/queryHandlerTests/QueryTests.scala b/src/test/scala/downloaderTests/queryHandlerTests/QueryTests.scala deleted file mode 100644 index eebc571..0000000 --- a/src/test/scala/downloaderTests/queryHandlerTests/QueryTests.scala +++ /dev/null @@ -1,30 +0,0 @@ -package downloaderTests.queryHandlerTests - -import better.files.File -import org.dbpedia.databus.client.sparql.QueryHandler -import org.scalatest.FlatSpec - -class QueryTests extends FlatSpec{ - -// "QueryHandler" should "return right TargetDir" in { -// val dataID= File("dataId_test.ttl") -// val dest_dir = File("test") -// QueryHandler.downloadDataIdFile("https://databus.dbpedia.org/data/databus/databus-data/2019.11.10/databus-data.nt.bz2", dataID) -// val targetDir = QueryHandler.getTargetDir(dataID, dest_dir) -// -// val comparisonDir = File("test") / "dbpedia" / "databus" / "databus-data" / "2019.11.10" -// assert(comparisonDir == targetDir) -// } - - "QueryHandler" should "return right dirList" in { - val dataID= File("dataId_test") - QueryHandler.downloadDataIdFile("https://databus.dbpedia.org/data/databus/databus-data/2019.11.10/databus-data.nt.bz2", dataID) -// val targetDir = QueryHandler.executeDataIdQuery(dataID) - -// targetDir.foreach(println(_)) - - val comparisonDir = File("test") / "dbpedia" / "databus" / "databus-data" / "2019.11.10" -// assert(comparisonDir == targetDir) - } - -} diff --git a/src/test/scala/format/conversion/RoundTripTests.scala b/src/test/scala/format/conversion/RoundTripTests.scala new file mode 100644 index 0000000..ca9e18b --- /dev/null +++ b/src/test/scala/format/conversion/RoundTripTests.scala @@ -0,0 +1,208 @@ +package conversionTests.format.conversion + +import better.files.File +import org.apache.jena.riot.RDFDataMgr +import org.apache.spark.SparkContext +import org.apache.spark.sql.{DataFrame, SparkSession} +import org.dbpedia.databus.client.filehandling.convert.format.tsd.TSDHandler +import org.dbpedia.databus.client.filehandling.convert.format.rdf.quads.QuadsHandler +import org.dbpedia.databus.client.filehandling.convert.format.rdf.triples.TripleHandler +import org.dbpedia.databus.client.filehandling.download.Downloader +import org.dbpedia.databus.client.filehandling.{FileHandler, FileUtil} +import org.scalatest.FlatSpec +import org.scalatest.Matchers._ +import scala.collection.mutable.ListBuffer + +class RoundTripTests extends FlatSpec{ + + val spark: SparkSession = SparkSession.builder() + .appName(s"Triple reader") + .master("local[*]") + .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") + .getOrCreate() + + implicit val sparkContext: SparkContext = spark.sparkContext + sparkContext.setLogLevel("WARN") + + val testFileDir:File = File("./src/test/resources/conversion") + val outDir:File = testFileDir / "output" + val tempDir:File = outDir / "tempDir" + + outDir.createDirectoryIfNotExists().clear() + + "roundtriptest" should "succeed for all RDF formats" in { + + println("Test Files:") + val rdfFiles = (testFileDir / "rdfTriples").listRecursively + val errorList: ListBuffer[String] = ListBuffer() + + while (rdfFiles.hasNext) { + val inputFile = rdfFiles.next() + println(inputFile.pathAsString) + + //read and write process + val format = FileUtil.getFormatType(inputFile,"") + val triples = new TripleHandler().read(inputFile.pathAsString, format) + val outputFile = new TripleHandler() + .write(triples, format) + .moveTo(outDir / inputFile.name) + + //read in input and output + val statementsInput = RDFDataMgr.loadModel(inputFile.pathAsString).listStatements().toList + val statementsOutput = RDFDataMgr.loadModel(outputFile.pathAsString).listStatements().toList + + //compare both + if (!statementsInput.containsAll(statementsOutput) || !statementsOutput.containsAll(statementsInput)) errorList += inputFile.pathAsString + } + + //Result + val success = { + if (errorList.isEmpty) true + else { + println("ERRORS:") + errorList.foreach(file => { + println(s"$file did not convert properly") + }) + false + } + } + + success shouldBe true + } + + "roundtriptest" should "succeed for all RDF Quad formats" in { + + println("Test Files:") + val quadFiles = (testFileDir / "rdfQuads").listRecursively + + val quadsHandler = new QuadsHandler() + val errorList: ListBuffer[String] = ListBuffer() + + while (quadFiles.hasNext) { + val inputFile = quadFiles.next() + println(inputFile.pathAsString) + + //read in and write out to tsd file + val format = FileUtil.getFormatType(inputFile,"") + val quads = quadsHandler.read(inputFile.pathAsString, format) + val outputFile = quadsHandler.write(quads, format).moveTo(outDir / inputFile.name) + + //read in input and output + val statementsInput = RDFDataMgr.loadModel(inputFile.pathAsString).listStatements().toList + val statementsOutput = RDFDataMgr.loadModel(outputFile.pathAsString).listStatements().toList + + //compare both + if (!statementsInput.containsAll(statementsOutput) || !statementsOutput.containsAll(statementsInput)) errorList += inputFile.pathAsString + } + + //Result + val success = { + if (errorList.isEmpty) true + else { + println("ERRORS:") + errorList.foreach(file => { + println(s"$file did not convert properly") + }) + false + } + } + + success shouldBe true + } + + "roundtriptest" should "succeed for all TSD formats" in { + println("Test Files:") + val tsdFiles = (testFileDir / "tsd").listRecursively + + val errorList: ListBuffer[String] = ListBuffer() + + val tsdHandler = new TSDHandler() + + while (tsdFiles.hasNext) { + val inputFile = tsdFiles.next() + println(inputFile.pathAsString) + + //read in and write out to tsd file + val format = FileUtil.getFormatType(inputFile,"") + val dataFrame = tsdHandler.read(inputFile.pathAsString, format) + val outputFile = tsdHandler.write(dataFrame, format).moveTo(outDir / inputFile.name) + + //read in input and output data + val dataInput = tsdHandler.read(inputFile.pathAsString, format).sort() + val dataOutput = tsdHandler.read(outputFile.pathAsString, format).sort() + + //compare both + if (!checkDFEquality(dataInput,dataOutput) || !checkDFEquality(dataOutput,dataInput)) errorList += inputFile.pathAsString + } + + //Result + val success = { + if (errorList.isEmpty) true + else { + println("ERRORS:") + errorList.foreach(file => { + println(s"$file did not convert properly") + }) + false + } + } + + success shouldBe true + } + + def checkDFEquality(df1:DataFrame, df2:DataFrame):Boolean ={ + if (df1.columns.deep == df2.columns.deep) { + val rowsExist:ListBuffer[Boolean] = ListBuffer() + val array1 = df1.collect() + val array2 = df2.collect() + + array1.foreach( + row => { + // println(row) + var rowExists = false + array2.foreach( + rowArray2 => { + if(row.equals(rowArray2)) rowExists=true + } + ) + rowsExist += rowExists + } + ) + if (rowsExist.contains(false)) false + else true + } + else false + } +} + + +// def downloadFiles(testFileDir:File): File ={ +// +// val queryTestFiles= +// """ +// |PREFIX dataid: +// |PREFIX dataid-cv: +// |PREFIX dct: +// |PREFIX dcat: +// | +// |# Get all files +// |SELECT DISTINCT ?file WHERE { +// | ?dataset dataid:artifact . +// | ?dataset dcat:distribution ?distribution . +// | { +// | ?distribution dct:hasVersion ?latestVersion +// | { +// | SELECT (?version as ?latestVersion) WHERE { +// | ?dataset dataid:artifact . +// | ?dataset dct:hasVersion ?version . +// | } ORDER BY DESC (?version) LIMIT 1 +// | } +// | } +// | ?distribution dcat:downloadURL ?file . +// |} +// |""".stripMargin +// +// Downloader.downloadWithQuery(queryTestFiles, testFileDir) +// +// testFileDir +// } \ No newline at end of file diff --git a/src/test/scala/patternmatchTest.scala b/src/test/scala/patternmatchTest.scala deleted file mode 100644 index 6bf12fd..0000000 --- a/src/test/scala/patternmatchTest.scala +++ /dev/null @@ -1,10 +0,0 @@ -import org.scalatest.FlatSpec - -class patternmatchTest extends FlatSpec{ - - ".query" should "match" in { - val str = ".query" - - if(str matches(".sparql|.query")) println("matches") - } -}