From 891e37cd371350906035cfbf93c618035a4293b1 Mon Sep 17 00:00:00 2001 From: Imran Rashid Date: Mon, 3 Dec 2018 14:29:04 -0600 Subject: [PATCH] add error handling on mkdirs --- .../main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala index d911827d78c53..7bb38ccf6f995 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala @@ -473,7 +473,9 @@ object SparkHadoopUtil { val builderMethod = fs.getClass().getMethod("createFile", classOf[Path]) // the builder api does not resolve relative paths, nor does it create parent dirs, while // the old api does. - fs.mkdirs(path.getParent()) + if (!fs.mkdirs(path.getParent())) { + throw new IOException(s"Failed to create parents of $path") + } val qualifiedPath = fs.makeQualified(path) val builder = builderMethod.invoke(fs, qualifiedPath) val builderCls = builder.getClass()