From a37cbdebbf655a4311e21f815e6e34688f2a6556 Mon Sep 17 00:00:00 2001 From: Anton Parkhomenko Date: Thu, 24 Mar 2016 16:55:46 +0700 Subject: [PATCH] Added force flag (closes #141) --- README.md | 12 +++- .../schemaguru/Common.scala | 4 +- .../schemaguru/cli/CommandContainer.scala | 13 +++++ .../schemaguru/cli/DdlCommand.scala | 10 ++-- .../schemaguru/cli/Parser.scala | 4 ++ .../schemaguru/utils/FileUtils.scala | 57 ++++++++++++++++--- 6 files changed, 85 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index b0e8780..346f9df 100644 --- a/README.md +++ b/README.md @@ -141,7 +141,7 @@ The most embarrassing part of shifting from dynamic-typed world to static-typed How to represent them in SQL DDL? It's a taught question and we think there's no ideal solution. Thus we provide you two options. By default product types will be transformed as most general ``VARCHAR(4096)``. But there's another way - you can split column with product types into separate ones with it's types as postfix, for example property ``model`` with type ``["string", "integer"]`` will be transformed into two columns ``mode_string`` and ``model_integer``. -This behaviour can be achieved with ``--split-product-types``. +This behavior can be achieved with ``--split-product-types``. Another thing everyone need to consider is default VARCHAR size. If there's no clues about it (like ``maxLength``) 4096 will be used. You can also specify this default value: @@ -156,6 +156,16 @@ You can also specify Redshift Schema for your table. For non-raw mode ``atomic`` $ ./schema-guru-0.5.0 ddl --raw --schema business {{input}} ``` +Some users do not full rely on Schema Guru JSON Schema derivation or DDL generation and edit their DDLs manually. +By default, Schema Guru will not override your files (either DDLs and migrations) if user made any significant changes (comments and whitespaces are not significant). +Instead Schema Guru will just warn user that file has been changed manually. +To change this behavior you may specify ``--force`` flag. + +```bash +$ ./schema-guru-0.6.0 ddl --force {{input}} +``` + + ### Web UI You can access our hosted demo of the Schema Guru web UI at [schemaguru.snplowanalytics.com] [webui-hosted]. To run it locally: diff --git a/src/main/scala/com.snowplowanalytics/schemaguru/Common.scala b/src/main/scala/com.snowplowanalytics/schemaguru/Common.scala index cb76bd1..8b1f5f2 100644 --- a/src/main/scala/com.snowplowanalytics/schemaguru/Common.scala +++ b/src/main/scala/com.snowplowanalytics/schemaguru/Common.scala @@ -173,8 +173,8 @@ object Common { * * @return validation with success or error message */ - def write: Validation[String, String] = - writeToFile(file.getName, file.getParentFile.getAbsolutePath, content) + def write(force: Boolean = false): Validation[String, String] = + writeToFile(file.getName, file.getParentFile.getAbsolutePath, content, force) } /** diff --git a/src/main/scala/com.snowplowanalytics/schemaguru/cli/CommandContainer.scala b/src/main/scala/com.snowplowanalytics/schemaguru/cli/CommandContainer.scala index fea5706..2d4cb39 100644 --- a/src/main/scala/com.snowplowanalytics/schemaguru/cli/CommandContainer.scala +++ b/src/main/scala/com.snowplowanalytics/schemaguru/cli/CommandContainer.scala @@ -24,6 +24,13 @@ import Common.SchemaVer */ trait SchemaGuruCommand +/** + * Command container was created due unability of scopt create + * subcommands with different case classes configurations + * It also contain shorthand-methods + * + * @param command one of required subcommands + */ case class CommandContainer(command: Option[SchemaGuruCommand] = None) { // Common @@ -132,4 +139,10 @@ case class CommandContainer(command: Option[SchemaGuruCommand] = None) { case Some(ddl: DdlCommand) => Some(ddl.copy(noHeader = flag)) case other => other } + + def setForce(flag: Boolean): Option[SchemaGuruCommand] = + command match { + case Some(ddl: DdlCommand) => Some(ddl.copy(force = flag)) + case other => other + } } diff --git a/src/main/scala/com.snowplowanalytics/schemaguru/cli/DdlCommand.scala b/src/main/scala/com.snowplowanalytics/schemaguru/cli/DdlCommand.scala index af47ffb..3ac9fe7 100644 --- a/src/main/scala/com.snowplowanalytics/schemaguru/cli/DdlCommand.scala +++ b/src/main/scala/com.snowplowanalytics/schemaguru/cli/DdlCommand.scala @@ -13,6 +13,7 @@ package com.snowplowanalytics.schemaguru package cli +// Scala import scala.language.implicitConversions // Scalaz @@ -45,7 +46,8 @@ case class DdlCommand private[cli] ( schema: Option[String] = None, // empty for raw, "atomic" for non-raw varcharSize: Int = 4096, splitProduct: Boolean = false, - noHeader: Boolean = false) extends SchemaGuruCommand { + noHeader: Boolean = false, + force: Boolean = false) extends SchemaGuruCommand { import DdlCommand._ @@ -250,17 +252,17 @@ case class DdlCommand private[cli] ( result.ddls .map(_.setBasePath("sql")) .map(_.setBasePath(output.getAbsolutePath)) - .map(_.write).map(printMessage) + .map(_.write(force)).map(printMessage) result.jsonPaths .map(_.setBasePath("jsonpaths")) .map(_.setBasePath(output.getAbsolutePath)) - .map(_.write).map(printMessage) + .map(_.write(force)).map(printMessage) result.migrations .map(_.setBasePath("sql")) .map(_.setBasePath(output.getAbsolutePath)) - .map(_.write).map(printMessage) + .map(_.write(force)).map(printMessage) result.warnings.map(printMessage) } diff --git a/src/main/scala/com.snowplowanalytics/schemaguru/cli/Parser.scala b/src/main/scala/com.snowplowanalytics/schemaguru/cli/Parser.scala index 491a7e6..7dd5fb1 100644 --- a/src/main/scala/com.snowplowanalytics/schemaguru/cli/Parser.scala +++ b/src/main/scala/com.snowplowanalytics/schemaguru/cli/Parser.scala @@ -170,6 +170,10 @@ object Parser { action { (_, c) => c.copy(command = c.setNoHeader(true)) } text "Do not place header comments into output DDL", + opt[Unit]("force") + action { (_, c) => c.copy(command = c.setForce(true)) } + text "Force override existing manually-edited files", + checkConfig { case CommandContainer(Some(command: DdlCommand)) if !command.input.exists() => diff --git a/src/main/scala/com.snowplowanalytics/schemaguru/utils/FileUtils.scala b/src/main/scala/com.snowplowanalytics/schemaguru/utils/FileUtils.scala index 28678b7..cada278 100644 --- a/src/main/scala/com.snowplowanalytics/schemaguru/utils/FileUtils.scala +++ b/src/main/scala/com.snowplowanalytics/schemaguru/utils/FileUtils.scala @@ -14,7 +14,10 @@ package com.snowplowanalytics.schemaguru package utils // Java -import java.io.{ PrintWriter, File } +import java.io.{ IOException, PrintWriter, File } + +// Scala +import scala.io.Source // Scalaz import scalaz._ @@ -24,6 +27,38 @@ import Scalaz._ * Utilities for printing and reading to/from files */ object FileUtils { + /** + * Check if file has changed content + * All lines changed starting with -- (SQL comment) or blank lines + * are ignored + * + * @param file existing file to check + * @param content new content + * @return true if file has different content or unavailable + */ + def isNewContent(file: File, content: String): Boolean = { + try { + val oldContent = Source.fromFile(file) + .getLines() + .map(_.trim) + .filterNot(_.isEmpty) + .filterNot(_.startsWith("--")) + .toList + + val newContent = content + .split("\n") + .map(_.trim) + .filterNot(_.isEmpty) + .filterNot(_.startsWith("--")) + .toList + + oldContent != newContent + + } catch { + case e: IOException => true + } + } + /** * Creates a new file with the contents of the list inside. * @@ -32,19 +67,25 @@ object FileUtils { * @param content Content of file * @return a success or failure string about the process */ - def writeToFile(fileName: String, fileDir: String, content: String): Validation[String, String] = { + def writeToFile(fileName: String, fileDir: String, content: String, force: Boolean = false): Validation[String, String] = { val path = fileDir + "/" + fileName try { makeDir(fileDir) match { case true => { // Attempt to open the file... val file = new File(path) - - // Print the contents of the list to the new file... - printToFile(file) { _.println(content) } - - // Output a success message - s"File [${file.getAbsolutePath}] was written successfully!".success + lazy val contentChanged = isNewContent(file, content) + if (!file.exists()) { + printToFile(file)(_.println(content)) + s"File [${file.getAbsolutePath}] was written successfully!".success + } else if (contentChanged && !force) { + s"File [${file.getAbsolutePath}] already exists and probably was modified manually. You can use --force to override".failure + } else if (force) { + printToFile(file)(_.println(content)) + s"File [${file.getAbsolutePath}] was overriden successfully!".success + } else { + s"File [${file.getAbsolutePath}] was not modified".success + } } case false => s"Could not make new directory to store files in - Check write permissions".failure }