Skip to content

Commit

Permalink
Ensure condaBinaryPath is executable when setting up CondaEnvironment…
Browse files Browse the repository at this point in the history
…Manager (+test) (apache#190)
  • Loading branch information
dansanduleac authored and robert3005 committed May 22, 2017
1 parent 7f39dff commit 8d5ed79
Show file tree
Hide file tree
Showing 3 changed files with 97 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark.api.conda
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
import java.nio.file.attribute.PosixFilePermission

import scala.collection.JavaConverters._
import scala.sys.process.BasicIO
Expand All @@ -29,6 +30,7 @@ import scala.sys.process.ProcessLogger

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.google.common.collect.ImmutableSet
import org.json4s.JsonAST.JValue
import org.json4s.jackson.Json4sScalaModule
import org.json4s.jackson.JsonMethods
Expand All @@ -47,6 +49,8 @@ final class CondaEnvironmentManager(condaBinaryPath: String,

require(verbosity >= 0 && verbosity <= 3, "Verbosity must be between 0 and 3 inclusively")

CondaEnvironmentManager.ensureExecutable(condaBinaryPath)

lazy val defaultInfo: Map[String, JValue] = {
logInfo("Retrieving the conda installation's info")
val command = Process(List(condaBinaryPath, "info", "--json"), None)
Expand Down Expand Up @@ -175,7 +179,19 @@ final class CondaEnvironmentManager(condaBinaryPath: String,
}
}

object CondaEnvironmentManager {
object CondaEnvironmentManager extends Logging {
def ensureExecutable(filePath: String): Unit = {
val path = Paths.get(filePath)
if (!Files.isExecutable(path)) {
logInfo(s"Attempting to make file '$filePath' executable")
val currentPerms = Files.getPosixFilePermissions(path).asScala
val newPerms = ImmutableSet.copyOf(
(currentPerms.iterator ++ Iterator(PosixFilePermission.OWNER_EXECUTE)).asJava)
Files.setPosixFilePermissions(path, newPerms)
require(Files.isExecutable(path), s"File '$filePath' still not executable")
}
}

def isConfigured(sparkConf: SparkConf): Boolean = {
sparkConf.contains(CONDA_BINARY_PATH)
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.api.conda

import java.nio.file.Files

import org.apache.spark.util.TempDirectory

class CondaEnvironmentManagerTest extends org.apache.spark.SparkFunSuite with TempDirectory {
test("CondaEnvironmentManager.ensureExecutable") {
val path = tempDir.toPath.resolve("myfile")
Files.createFile(path)
assert(!Files.isExecutable(path), "File shouldn't be executable initially")
CondaEnvironmentManager.ensureExecutable(path.toString)
assert(Files.isExecutable(path), "File should now be executable")
}
}
49 changes: 49 additions & 0 deletions core/src/test/scala/org/apache/spark/util/TempDirectory.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.util

import java.io.File

import org.scalatest.BeforeAndAfterEach
import org.scalatest.Suite

/**
* Trait that creates a temporary directory before each test and deletes it after the test.
*/
trait TempDirectory extends BeforeAndAfterEach { self: Suite =>

private var _tempDir: File = _

/**
* Returns the temporary directory as a `File` instance.
*/
protected def tempDir: File = _tempDir

override def beforeEach(): Unit = {
super.beforeEach()
_tempDir = Utils.createTempDir(namePrefix = this.getClass.getName)
}

override def afterEach(): Unit = {
try {
Utils.deleteRecursively(_tempDir)
} finally {
super.afterEach()
}
}
}

0 comments on commit 8d5ed79

Please sign in to comment.